diff --git a/.docker-home/.gitignore b/.docker-home/.gitignore deleted file mode 100644 index c96a04f008e..00000000000 --- a/.docker-home/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -* -!.gitignore \ No newline at end of file diff --git a/.docker/Dockerfile-alpine b/.docker/Dockerfile-alpine index d3309736b2f..251ce2e0d57 100644 --- a/.docker/Dockerfile-alpine +++ b/.docker/Dockerfile-alpine @@ -1,21 +1,22 @@ -FROM alpine:3.16 +FROM alpine:3.21 -RUN addgroup -S ory; \ - adduser -S ory -G ory -D -H -s /bin/nologin -RUN apk --no-cache --upgrade --latest add ca-certificates +RUN < /etc/nsswitch.conf + # Add a user/group for nonroot with a stable UID + GID. Values are from nonroot from distroless + # for interoperability with other containers. + addgroup --system --gid 65532 nonroot + adduser --system --uid 65532 \ + --gecos "nonroot User" \ + --home /home/nonroot \ + --ingroup nonroot \ + --shell /sbin/nologin \ + nonroot +HEREDOC -# By creating the sqlite folder as the ory user, the mounted volume will be owned by ory:ory, which -# is required for read/write of SQLite. -RUN mkdir -p /var/lib/sqlite && \ - chown ory:ory /var/lib/sqlite +COPY hydra /usr/bin/hydra -USER ory +USER nonroot ENTRYPOINT ["hydra"] CMD ["serve", "all"] diff --git a/.docker/Dockerfile-build b/.docker/Dockerfile-build deleted file mode 100644 index addd0f7c335..00000000000 --- a/.docker/Dockerfile-build +++ /dev/null @@ -1,46 +0,0 @@ -FROM golang:1.19-alpine3.16 AS builder - -RUN apk -U --no-cache --upgrade --latest add build-base git gcc bash - -WORKDIR /go/src/github.com/ory/hydra -RUN mkdir -p ./internal/httpclient - -COPY go.mod go.sum ./ -COPY internal/httpclient/go.* ./internal/httpclient - -ENV GO111MODULE on -ENV CGO_ENABLED 1 - -RUN go mod download - -COPY . . - -RUN go build -tags sqlite,json1 -o /usr/bin/hydra - -FROM alpine:3.15 - -RUN addgroup -S ory; \ - adduser -S ory -G ory -D -h /home/ory -s /bin/nologin; \ - chown -R ory:ory /home/ory - -COPY --from=builder /usr/bin/hydra /usr/bin/hydra - -# By creating the sqlite folder as the ory user, the mounted volume will be owned by ory:ory, which -# is required for read/write of SQLite. -RUN mkdir -p /var/lib/sqlite && \ - chown ory:ory /var/lib/sqlite - -VOLUME /var/lib/sqlite - -# Exposing the ory home directory -VOLUME /home/ory - -# Declare the standard ports used by hydra (4444 for public service endpoint, 4445 for admin service endpoint) -EXPOSE 4444 4445 - -USER ory - -ENTRYPOINT ["hydra"] -CMD ["serve"] - - diff --git a/.docker/Dockerfile-distroless-static b/.docker/Dockerfile-distroless-static new file mode 100644 index 00000000000..054c9c79758 --- /dev/null +++ b/.docker/Dockerfile-distroless-static @@ -0,0 +1,8 @@ +FROM gcr.io/distroless/static-debian12:nonroot + +COPY hydra /usr/bin/hydra +# Declare the standard ports used by hydra (4444 for public service endpoint, 4445 for admin service endpoint) +EXPOSE 4444 4445 + +ENTRYPOINT ["hydra"] +CMD ["serve", "all"] diff --git a/.docker/Dockerfile-hsm b/.docker/Dockerfile-hsm deleted file mode 100644 index fd20986fe1d..00000000000 --- a/.docker/Dockerfile-hsm +++ /dev/null @@ -1,59 +0,0 @@ -FROM golang:1.19-alpine3.16 AS builder - -RUN apk -U --no-cache --upgrade --latest add build-base git gcc bash - -WORKDIR /go/src/github.com/ory/hydra -RUN mkdir -p ./internal/httpclient - -COPY go.mod go.sum ./ -COPY internal/httpclient/go.* ./internal/httpclient - -ENV GO111MODULE on -ENV CGO_ENABLED 1 - -RUN go mod download - -COPY . . - -FROM builder as build-hydra -RUN go build -tags sqlite,json1,hsm -o /usr/bin/hydra - -FROM builder as test-hsm -ENV HSM_ENABLED=true -ENV HSM_LIBRARY=/usr/lib/softhsm/libsofthsm2.so -ENV HSM_TOKEN_LABEL=hydra -ENV HSM_PIN=1234 - -RUN apk --no-cache --upgrade --latest add softhsm opensc; \ - pkcs11-tool --module /usr/lib/softhsm/libsofthsm2.so --slot 0 --init-token --so-pin 0000 --init-pin --pin 1234 --label hydra; \ - go test -p 1 -v -failfast -short -tags=sqlite,hsm ./... - -FROM alpine:3.15 - -RUN apk --no-cache --upgrade --latest add softhsm opensc; \ - pkcs11-tool --module /usr/lib/softhsm/libsofthsm2.so --slot 0 --init-token --so-pin 0000 --init-pin --pin 1234 --label hydra - -RUN addgroup -S ory; \ - adduser -S ory -G ory -D -h /home/ory -s /bin/nologin; \ - chown -R ory:ory /home/ory; \ - chown -R ory:ory /var/lib/softhsm/tokens - -COPY --from=build-hydra /usr/bin/hydra /usr/bin/hydra - -# By creating the sqlite folder as the ory user, the mounted volume will be owned by ory:ory, which -# is required for read/write of SQLite. -RUN mkdir -p /var/lib/sqlite && \ - chown ory:ory /var/lib/sqlite - -VOLUME /var/lib/sqlite - -# Exposing the ory home directory -VOLUME /home/ory - -# Declare the standard ports used by hydra (4444 for public service endpoint, 4445 for admin service endpoint) -EXPOSE 4444 4445 - -USER ory - -ENTRYPOINT ["hydra"] -CMD ["serve"] diff --git a/.docker/Dockerfile-local-build b/.docker/Dockerfile-local-build new file mode 100644 index 00000000000..368b7542666 --- /dev/null +++ b/.docker/Dockerfile-local-build @@ -0,0 +1,32 @@ +FROM golang:1.25 AS builder + +WORKDIR /go/src/github.com/ory/hydra + +COPY oryx/go.mod oryx/go.mod +COPY oryx/go.sum oryx/go.sum + + +RUN apt-get update && apt-get upgrade -y &&\ + mkdir -p ./internal/httpclient + +COPY go.mod go.sum ./ +COPY internal/httpclient/go.* ./internal/httpclient/ + +ENV CGO_ENABLED=1 + +RUN go mod download + +COPY . . +RUN go build -ldflags="-extldflags=-static" -tags sqlite,sqlite_omit_load_extension -o /usr/bin/hydra + +######################### + +FROM gcr.io/distroless/static-debian12:debug-nonroot AS runner + +COPY --from=builder /usr/bin/hydra /usr/bin/hydra + +# Declare the standard ports used by hydra (4444 for public service endpoint, 4445 for admin service endpoint) +EXPOSE 4444 4445 + +ENTRYPOINT ["hydra"] +CMD ["serve", "all"] diff --git a/.docker/Dockerfile-scratch b/.docker/Dockerfile-scratch deleted file mode 100644 index d3e510436a3..00000000000 --- a/.docker/Dockerfile-scratch +++ /dev/null @@ -1,27 +0,0 @@ -FROM alpine:3.16 - -RUN apk --no-cache --upgrade --latest add ca-certificates - -# set up nsswitch.conf for Go's "netgo" implementation -# - https://github.com/golang/go/blob/go1.9.1/src/net/conf.go#L194-L275 -RUN [ ! -e /etc/nsswitch.conf ] && echo 'hosts: files dns' > /etc/nsswitch.conf - -RUN addgroup -S ory; \ - adduser -S ory -G ory -D -h /home/ory -s /bin/nologin; - -RUN mkdir -p /var/lib/sqlite && \ - chown -R ory:ory /var/lib/sqlite - -FROM scratch - -COPY --from=0 /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ -COPY --from=0 /etc/nsswitch.conf /etc/nsswitch.conf -COPY --from=0 /etc/passwd /etc/passwd -COPY --from=0 /var/lib/sqlite /var/lib/sqlite - -COPY hydra /usr/bin/hydra - -USER ory - -ENTRYPOINT ["hydra"] -CMD ["serve", "all"] diff --git a/.docker/Dockerfile-sqlite b/.docker/Dockerfile-sqlite deleted file mode 100644 index 83fd6a23215..00000000000 --- a/.docker/Dockerfile-sqlite +++ /dev/null @@ -1,35 +0,0 @@ -FROM alpine:3.16 - -# Because this image is built for SQLite, we create /home/ory and /home/ory/sqlite which is owned by the ory user -# and declare /home/ory/sqlite a volume. -# -# To get SQLite and Docker Volumes working with this image, mount the volume where SQLite should be written to at: -# -# /home/ory/sqlite/some-file. - -RUN addgroup -S ory; \ - adduser -S ory -G ory -D -h /home/ory -s /bin/nologin; \ - chown -R ory:ory /home/ory && \ - apk --no-cache --upgrade --latest add ca-certificates sqlite - -WORKDIR /home/ory - -COPY hydra /usr/bin/hydra - -# By creating the sqlite folder as the ory user, the mounted volume will be owned by ory:ory, which -# is required for read/write of SQLite. -RUN mkdir -p /var/lib/sqlite && \ - chown ory:ory /var/lib/sqlite - -VOLUME /var/lib/sqlite - -# Exposing the ory home directory -VOLUME /home/ory - -# Declare the standard ports used by Hydra (4444 for public service endpoint, 4445 for admin service endpoint) -EXPOSE 4444 4445 - -USER ory - -ENTRYPOINT ["hydra"] -CMD ["serve"] diff --git a/.docker/Dockerfile-test-hsm b/.docker/Dockerfile-test-hsm new file mode 100644 index 00000000000..d5dfd5d1c30 --- /dev/null +++ b/.docker/Dockerfile-test-hsm @@ -0,0 +1,32 @@ +FROM golang:1.25-alpine3.21 AS builder + +RUN apk add --no-cache build-base git gcc bash + +WORKDIR /go/src/github.com/ory/hydra +RUN mkdir -p ./internal/httpclient + +COPY go.mod go.sum ./ +COPY internal/httpclient/go.* ./internal/httpclient + +ENV CGO_ENABLED 1 + +RUN go mod download +COPY . . + +RUN go build -tags sqlite,hsm -o /usr/bin/hydra + +ENV HSM_ENABLED=true +ENV HSM_LIBRARY=/usr/lib/softhsm/libsofthsm2.so +ENV HSM_TOKEN_LABEL=hydra +ENV HSM_PIN=1234 + +RUN apk add --no-cache softhsm opensc +RUN pkcs11-tool --module "$HSM_LIBRARY" --slot 0 --init-token --so-pin 0000 --init-pin --pin "$HSM_PIN" --label "$HSM_TOKEN_LABEL" + +FROM builder as test-hsm + +RUN go test -p 1 -failfast -short -tags=sqlite,hsm ./... + +FROM builder AS test-refresh-hsm + +RUN UPDATE_SNAPSHOTS=true go test -p 1 -failfast -short -tags=sqlite,hsm,refresh ./... diff --git a/.docker/README.md b/.docker/README.md new file mode 100644 index 00000000000..04d148002f5 --- /dev/null +++ b/.docker/README.md @@ -0,0 +1,6 @@ +This directory contains Dockerfiles for various targets: + +- `Dockerfile-distroless` and `Dockerfile-alpine` are published to Docker Hub + via GoReleaser. +- The other `Dockerfile` variants are intended only for local development and + tests. diff --git a/.dockerignore b/.dockerignore index 4d913fbbc91..cf7558fc017 100644 --- a/.dockerignore +++ b/.dockerignore @@ -3,7 +3,6 @@ docs node_modules .circleci -.docker-home .github scripts sdk/js diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index a65fa85549a..ef90d000d7f 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,3 +1 @@ -* @aeneasr - -/docs/ @ory/documenters +* @aeneasr @ory/product-development diff --git a/.github/ISSUE_TEMPLATE/BUG-REPORT.yml b/.github/ISSUE_TEMPLATE/BUG-REPORT.yml index 569061946b9..2f1841bc958 100644 --- a/.github/ISSUE_TEMPLATE/BUG-REPORT.yml +++ b/.github/ISSUE_TEMPLATE/BUG-REPORT.yml @@ -24,15 +24,21 @@ body: "I have read and am following this repository's [Contribution Guidelines](https://github.com/ory/hydra/blob/master/CONTRIBUTING.md)." required: true - - label: - "This issue affects my [Ory Network](https://www.ory.sh/) project." - label: "I have joined the [Ory Community Slack](https://slack.ory.sh)." - label: "I am signed up to the [Ory Security Patch - Newsletter](https://ory.us10.list-manage.com/subscribe?u=ffb1a878e4ec6c0ed312a3480&id=f605a41b53)." + Newsletter](https://www.ory.sh/l/sign-up-newsletter)." id: checklist type: checkboxes + - attributes: + description: + "Enter the slug or API URL of the affected Ory Network project. Leave + empty when you are self-hosting." + label: "Ory Network Project" + placeholder: "https://.projects.oryapis.com" + id: ory-network-project + type: input - attributes: description: "A clear and concise description of what the bug is." label: "Describe the bug" diff --git a/.github/ISSUE_TEMPLATE/DESIGN-DOC.yml b/.github/ISSUE_TEMPLATE/DESIGN-DOC.yml index 7712e775ef3..d4f478c7abd 100644 --- a/.github/ISSUE_TEMPLATE/DESIGN-DOC.yml +++ b/.github/ISSUE_TEMPLATE/DESIGN-DOC.yml @@ -35,15 +35,21 @@ body: "I have read and am following this repository's [Contribution Guidelines](https://github.com/ory/hydra/blob/master/CONTRIBUTING.md)." required: true - - label: - "This issue affects my [Ory Network](https://www.ory.sh/) project." - label: "I have joined the [Ory Community Slack](https://slack.ory.sh)." - label: "I am signed up to the [Ory Security Patch - Newsletter](https://ory.us10.list-manage.com/subscribe?u=ffb1a878e4ec6c0ed312a3480&id=f605a41b53)." + Newsletter](https://www.ory.sh/l/sign-up-newsletter)." id: checklist type: checkboxes + - attributes: + description: + "Enter the slug or API URL of the affected Ory Network project. Leave + empty when you are self-hosting." + label: "Ory Network Project" + placeholder: "https://.projects.oryapis.com" + id: ory-network-project + type: input - attributes: description: | This section gives the reader a very rough overview of the landscape in which the new system is being built and what is actually being built. This isn’t a requirements doc. Keep it succinct! The goal is that readers are brought up to speed but some previous knowledge can be assumed and detailed info can be linked to. This section should be entirely focused on objective background facts. diff --git a/.github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml b/.github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml index 4053a53db82..4dc8b73daab 100644 --- a/.github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml +++ b/.github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml @@ -28,15 +28,21 @@ body: "I have read and am following this repository's [Contribution Guidelines](https://github.com/ory/hydra/blob/master/CONTRIBUTING.md)." required: true - - label: - "This issue affects my [Ory Network](https://www.ory.sh/) project." - label: "I have joined the [Ory Community Slack](https://slack.ory.sh)." - label: "I am signed up to the [Ory Security Patch - Newsletter](https://ory.us10.list-manage.com/subscribe?u=ffb1a878e4ec6c0ed312a3480&id=f605a41b53)." + Newsletter](https://www.ory.sh/l/sign-up-newsletter)." id: checklist type: checkboxes + - attributes: + description: + "Enter the slug or API URL of the affected Ory Network project. Leave + empty when you are self-hosting." + label: "Ory Network Project" + placeholder: "https://.projects.oryapis.com" + id: ory-network-project + type: input - attributes: description: "Is your feature request related to a problem? Please describe." diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 498ed2510c8..0839488da4c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -6,6 +6,7 @@ on: tags: - "*" pull_request: + merge_group: # Cancel in-progress runs in current workflow. concurrency: @@ -23,82 +24,65 @@ jobs: # We must fetch at least the immediate parents so that if this is # a pull request then we can checkout the head. fetch-depth: 2 - - uses: actions/setup-go@v2 + - uses: actions/setup-go@v6 with: - go-version: "1.19" + go-version: "1.25" - name: Start service run: ./test/conformance/start.sh - name: Run tests run: ./test/conformance/test.sh -v -short -parallel 16 - sdk-generate: - name: Generate SDKs - runs-on: ubuntu-latest - outputs: - sdk-cache-key: ${{ steps.sdk-generate.outputs.sdk-cache-key }} - steps: - - uses: ory/ci/sdk/generate@master - with: - token: ${{ secrets.ORY_BOT_PAT }} - id: sdk-generate - test: name: Run tests and lints runs-on: ubuntu-latest - needs: - - sdk-generate services: postgres: - image: postgres:11.8 + image: postgres:18 env: POSTGRES_DB: postgres - POSTGRES_PASSWORD: test - POSTGRES_USER: test + POSTGRES_PASSWORD: secret + POSTGRES_USER: postgres ports: - 5432:5432 mysql: - image: mysql:8.0.26 + image: mysql:9.4 env: - MYSQL_ROOT_PASSWORD: test + MYSQL_ROOT_PASSWORD: secret ports: - 3306:3306 env: - TEST_DATABASE_POSTGRESQL: "postgres://test:test@localhost:5432/postgres?sslmode=disable" - TEST_DATABASE_MYSQL: "mysql://root:test@(localhost:3306)/mysql?multiStatements=true&parseTime=true" + TEST_DATABASE_POSTGRESQL: "postgres://postgres:secret@localhost:5432/postgres?sslmode=disable" + TEST_DATABASE_MYSQL: "mysql://root:secret@(localhost:3306)/mysql?multiStatements=true&parseTime=true" TEST_DATABASE_COCKROACHDB: "cockroach://root@localhost:26257/defaultdb?sslmode=disable" steps: - run: | docker create --name cockroach -p 26257:26257 \ - cockroachdb/cockroach:v22.1.10 start-single-node --insecure + cockroachdb/cockroach:latest-v25.4 start-single-node --insecure docker start cockroach name: Start CockroachDB - uses: ory/ci/checkout@master with: fetch-depth: 2 - - uses: actions/cache@v2 + - uses: actions/setup-go@v6 with: - path: | - internal/httpclient - key: ${{ needs.sdk-generate.outputs.sdk-cache-key }} - - uses: actions/setup-go@v2 - with: - go-version: "1.19" + go-version: "1.25" - run: go list -json > go.list - name: Run nancy - uses: sonatype-nexus-community/nancy-github-action@v1.0.2 + uses: sonatype-nexus-community/nancy-github-action@v1.0.3 + with: + nancyVersion: v1.0.42 - name: Run golangci-lint - uses: golangci/golangci-lint-action@v2 + if: ${{ github.ref_type != 'tag' }} + uses: golangci/golangci-lint-action@v9 env: GOGC: 100 with: args: --timeout 10m0s - version: v1.47.3 - skip-go-installation: true - skip-pkg-cache: true - - name: Run go-acc (tests) + version: "v2.4.0" + only-new-issues: "true" + - name: Run go tests run: | - make .bin/go-acc - .bin/go-acc -o coverage.out ./... -- -failfast -timeout=20m -tags sqlite,json1 + go test -coverprofile coverage.out -failfast -timeout=20m -tags sqlite,sqlite_omit_load_extension ./... - name: Submit to Codecov run: | bash <(curl -s https://codecov.io/bash) @@ -107,8 +91,6 @@ jobs: test-hsm: name: Run HSM tests - needs: - - sdk-generate runs-on: ubuntu-latest env: HSM_ENABLED: true @@ -117,17 +99,12 @@ jobs: HSM_PIN: 1234 steps: - uses: ory/ci/checkout@master - - uses: actions/cache@v2 + - uses: actions/setup-go@v6 with: - path: | - internal/httpclient - key: ${{ needs.sdk-generate.outputs.sdk-cache-key }} - - uses: actions/setup-go@v2 - with: - go-version: "1.19" + go-version: "1.25" - name: Setup HSM libs and packages run: | - sudo apt install -y softhsm opensc + sudo apt install -y softhsm2 opensc sudo rm -rf /var/lib/softhsm/tokens sudo mkdir -p /var/lib/softhsm/tokens sudo chmod -R a+rwx /var/lib/softhsm @@ -141,15 +118,13 @@ jobs: test-e2e: name: Run end-to-end tests runs-on: ubuntu-latest - needs: - - sdk-generate strategy: matrix: database: ["memory", "postgres", "mysql", "cockroach"] args: ["", "--jwt"] services: postgres: - image: postgres:11.8 + image: postgres:18 env: POSTGRES_DB: postgres POSTGRES_PASSWORD: test @@ -157,7 +132,7 @@ jobs: ports: - 5432:5432 mysql: - image: mysql:8.0.26 + image: mysql:9.4 env: MYSQL_ROOT_PASSWORD: test ports: @@ -169,22 +144,17 @@ jobs: steps: - run: | docker create --name cockroach -p 26257:26257 \ - cockroachdb/cockroach:v22.1.10 start-single-node --insecure + cockroachdb/cockroach:latest-v25.4 start-single-node --insecure docker start cockroach name: Start CockroachDB - uses: ory/ci/checkout@master - - uses: actions/setup-go@v2 + - uses: actions/setup-go@v6 with: - go-version: "1.19" - - uses: actions/cache@v2 + go-version: "1.25" + - uses: actions/cache@v5 with: path: ./test/e2e/hydra key: ${{ runner.os }}-hydra - - uses: actions/cache@v2 - with: - path: | - internal/httpclient - key: ${{ needs.sdk-generate.outputs.sdk-cache-key }} - run: ./test/e2e/circle-ci.bash ${{ matrix.database }} ${{ matrix.args }} docs-cli: @@ -198,34 +168,6 @@ jobs: token: ${{ secrets.ORY_BOT_PAT }} output-dir: docs/hydra/cli - changelog: - name: Generate changelog - runs-on: ubuntu-latest - if: ${{ github.ref_type == 'tag' || github.ref_name == 'master' }} - needs: - - test - - test-hsm - - test-e2e - steps: - - uses: ory/ci/changelog@master - with: - token: ${{ secrets.ORY_BOT_PAT }} - - sdk-release: - name: Release SDKs - runs-on: ubuntu-latest - if: ${{ github.ref_type == 'tag' }} - needs: - - test - - test-hsm - - sdk-generate - - release - steps: - - uses: ory/ci/sdk/release@master - with: - swag-spec-location: spec/api.json - token: ${{ secrets.ORY_BOT_PAT }} - release: name: Generate release runs-on: ubuntu-latest @@ -235,7 +177,6 @@ jobs: - test - test-hsm - test-e2e - - changelog steps: - uses: ory/ci/releaser@master with: @@ -245,18 +186,6 @@ jobs: docker_username: ${{ secrets.DOCKERHUB_USERNAME }} docker_password: ${{ secrets.DOCKERHUB_PASSWORD }} - render-version-schema: - name: Render version schema - runs-on: ubuntu-latest - if: ${{ github.ref_type == 'tag' }} - needs: - - release - steps: - - uses: ory/ci/releaser/render-version-schema@master - with: - schema-path: spec/config.json - token: ${{ secrets.ORY_BOT_PAT }} - newsletter-draft: name: Draft newsletter runs-on: ubuntu-latest diff --git a/.github/workflows/closed_references.yml b/.github/workflows/closed_references.yml index 9a1b48350a8..7d1fe776cbe 100644 --- a/.github/workflows/closed_references.yml +++ b/.github/workflows/closed_references.yml @@ -19,10 +19,10 @@ jobs: runs-on: ubuntu-latest name: Find closed references steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v2-beta + - uses: actions/checkout@v6 + - uses: actions/setup-node@v6 with: - node-version: "14" + node-version: "24" - uses: ory/closed-reference-notifier@v1 with: token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index f4b6dd2b4d3..6ac3fea036d 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -13,6 +13,7 @@ on: branches: [master] schedule: - cron: "0 11 * * 6" + merge_group: jobs: analyze: @@ -30,7 +31,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v6 with: # We must fetch at least the immediate parents so that if this is # a pull request then we can checkout the head. @@ -41,9 +42,14 @@ jobs: - run: git checkout HEAD^2 if: ${{ github.event_name == 'pull_request' }} + - uses: actions/setup-go@v6 + with: + go-version: "1.25" + - run: go version + # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v1 + uses: github/codeql-action/init@v4 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -54,7 +60,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v1 + uses: github/codeql-action/autobuild@v4 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -68,4 +74,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v1 + uses: github/codeql-action/analyze@v4 diff --git a/.github/workflows/conventional_commits.yml b/.github/workflows/conventional_commits.yml index c4d39051176..12129a8d853 100644 --- a/.github/workflows/conventional_commits.yml +++ b/.github/workflows/conventional_commits.yml @@ -24,7 +24,7 @@ jobs: name: Validate PR title runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v6 - id: config uses: ory/ci/conventional_commit_config@master with: @@ -46,7 +46,7 @@ jobs: deps docs default_require_scope: false - - uses: amannn/action-semantic-pull-request@v4 + - uses: amannn/action-semantic-pull-request@v6 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: diff --git a/.github/workflows/cve-scan.yaml b/.github/workflows/cve-scan.yaml index be50bee5ca5..4d7c38c2a8a 100644 --- a/.github/workflows/cve-scan.yaml +++ b/.github/workflows/cve-scan.yaml @@ -1,5 +1,9 @@ +# AUTO-GENERATED, DO NOT EDIT! +# Please edit the original at https://github.com/ory/meta/blob/master/templates/repository/server/.github/workflows/cve-scan.yaml + name: Docker Image Scanners on: + workflow_dispatch: push: branches: - "master" @@ -9,56 +13,122 @@ on: branches: - "master" +permissions: + contents: read + security-events: write + jobs: scanners: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v6 - name: Setup Env id: vars shell: bash run: | - echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})" - echo "::set-output name=sha_short::$(git rev-parse --short HEAD)" + # Store values in local variables + SHA_SHORT=$(git rev-parse --short HEAD) + REPO_NAME=${{ github.event.repository.name }} + + IMAGE_NAME="oryd/${REPO_NAME}:${SHA_SHORT}" + + # Output values for debugging + echo "Values to be set:" + echo "SHA_SHORT: ${SHA_SHORT}" + echo "REPO_NAME: ${REPO_NAME}" + echo "IMAGE_NAME: ${IMAGE_NAME}" + + # Set GitHub Environment variables + echo "SHA_SHORT=${SHA_SHORT}" >> "${GITHUB_ENV}" + echo "IMAGE_NAME=${IMAGE_NAME}" >> "${GITHUB_ENV}" - name: Set up QEMU - uses: docker/setup-qemu-action@v1 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 + uses: docker/setup-buildx-action@v3 - name: Build images shell: bash run: | - touch hydra - DOCKER_BUILDKIT=1 docker build -f .docker/Dockerfile-alpine --build-arg=COMMIT=${{ steps.vars.outputs.sha_short }} -t oryd/hydra:${{ steps.vars.outputs.sha_short }} . - rm hydra + IMAGE_TAG="${{ env.SHA_SHORT }}" make docker + + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Configure Trivy + run: | + mkdir -p "$HOME/.cache/trivy" + echo "TRIVY_USERNAME=${{ github.actor }}" >> "$GITHUB_ENV" + echo "TRIVY_PASSWORD=${{ secrets.GITHUB_TOKEN }}" >> "$GITHUB_ENV" + - name: Anchore Scanner - uses: anchore/scan-action@v3 + uses: anchore/scan-action@v7 id: grype-scan with: - image: oryd/hydra:${{ steps.vars.outputs.sha_short }} + image: ${{ env.IMAGE_NAME }} fail-build: true severity-cutoff: high - debug: false - acs-report-enable: true + add-cpes-if-none: true + - name: Inspect action SARIF report + shell: bash + if: ${{ always() }} + run: | + echo "::group::Anchore Scan Details" + jq '.runs[0].results' ${{ steps.grype-scan.outputs.sarif }} + echo "::endgroup::" - name: Anchore upload scan SARIF report if: always() - uses: github/codeql-action/upload-sarif@v1 + uses: github/codeql-action/upload-sarif@v4 with: sarif_file: ${{ steps.grype-scan.outputs.sarif }} + - name: Kubescape scanner + uses: kubescape/github-action@main + id: kubescape + with: + image: ${{ env.IMAGE_NAME }} + verbose: true + format: pretty-printer + # can't whitelist CVE yet: https://github.com/kubescape/kubescape/pull/1568 + severityThreshold: critical - name: Trivy Scanner uses: aquasecurity/trivy-action@master if: ${{ always() }} with: - image-ref: oryd/hydra:${{ steps.vars.outputs.sha_short }} + image-ref: ${{ env.IMAGE_NAME }} format: "table" exit-code: "42" ignore-unfixed: true vuln-type: "os,library" severity: "CRITICAL,HIGH" + scanners: "vuln,secret,misconfig" + env: + TRIVY_SKIP_JAVA_DB_UPDATE: "true" + TRIVY_DISABLE_VEX_NOTICE: "true" + TRIVY_DB_REPOSITORY: ghcr.io/aquasecurity/trivy-db,public.ecr.aws/aquasecurity/trivy-db + - name: Dockle Linter - uses: erzz/dockle-action@v1.3.1 + uses: erzz/dockle-action@v1 if: ${{ always() }} with: - image: oryd/hydra:${{ steps.vars.outputs.sha_short }} + image: ${{ env.IMAGE_NAME }} exit-code: 42 - failure-threshold: fatal + failure-threshold: high + - name: Hadolint + uses: hadolint/hadolint-action@v3.3.0 + id: hadolint + if: ${{ always() }} + with: + dockerfile: .docker/Dockerfile-local-build + verbose: true + format: "json" + failure-threshold: "error" + - name: View Hadolint results + if: ${{ always() }} + shell: bash + run: | + echo "::group::Hadolint Scan Details" + echo "${HADOLINT_RESULTS}" | jq '.' + echo "::endgroup::" diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml index a7a720ebc0a..8a6d2cdd0a3 100644 --- a/.github/workflows/format.yml +++ b/.github/workflows/format.yml @@ -3,15 +3,16 @@ name: Format on: pull_request: push: + merge_group: jobs: format: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-go@v3 + - uses: actions/checkout@v6 + - uses: actions/setup-go@v6 with: - go-version: 1.19 + go-version: "1.25" - run: make format - name: Indicate formatting issues run: git diff HEAD --exit-code --color diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index e903667d45c..efa436020d1 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v6 - name: Synchronize Issue Labels uses: ory/label-sync-action@v0 with: diff --git a/.github/workflows/licenses.yml b/.github/workflows/licenses.yml deleted file mode 100644 index a4592c63ced..00000000000 --- a/.github/workflows/licenses.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Licenses - -on: - pull_request: - push: - branches: - - main - - master - -jobs: - check: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-go@v2 - with: - go-version: "1.18" - - uses: actions/setup-node@v2 - with: - node-version: "18" - - run: make licenses diff --git a/.github/workflows/milestone.yml b/.github/workflows/milestone.yml index 5d25a715ddd..913dc92e03f 100644 --- a/.github/workflows/milestone.yml +++ b/.github/workflows/milestone.yml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v6 with: token: ${{ secrets.TOKEN_PRIVILEGED }} - name: Milestone Documentation Generator @@ -24,7 +24,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} outputFile: docs/docs/milestones.md - name: Commit Milestone Documentation - uses: EndBug/add-and-commit@v4.4.0 + uses: EndBug/add-and-commit@v9.1.4 with: message: "autogen(docs): update milestone document" author_name: aeneasr diff --git a/.github/workflows/pm.yml b/.github/workflows/pm.yml new file mode 100644 index 00000000000..cec5a917c75 --- /dev/null +++ b/.github/workflows/pm.yml @@ -0,0 +1,30 @@ +name: Synchronize with product board + +on: + issues: + types: + - opened + pull_request: + types: + - opened + - ready_for_review + +jobs: + automate: + if: github.event.pull_request.head.repo.fork == false + name: Add issue to project + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - uses: ory-corp/planning-automation-action@v0.2 + with: + organization: ory-corp + project: 5 + token: ${{ secrets.ORY_BOT_PAT }} + todoLabel: "Needs Triage" + statusName: Status + prStatusValue: "Needs Triage" + issueStatusValue: "Needs Triage" + includeEffort: "false" + monthlyMilestoneName: Roadmap Monthly + quarterlyMilestoneName: Roadmap diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index ac48a5e509b..c1f52b1f645 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -12,7 +12,7 @@ jobs: if: github.repository_owner == 'ory' runs-on: ubuntu-latest steps: - - uses: actions/stale@v4 + - uses: actions/stale@v10 with: repo-token: ${{ secrets.GITHUB_TOKEN }} stale-issue-message: | diff --git a/.gitignore b/.gitignore index cd6f8d1e4a7..5dcb9bfb525 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ .bin/ .idea/ +.vscode/ node_modules/ *.iml *.exe @@ -24,8 +25,11 @@ LICENSE.txt hydra-login-consent-node ./cypress/screenshots *-packr.go +consent/csrf_flagka.go packrd/ persistence/sql/migrations/schema.sql cypress/videos cypress/screenshots BENCHMARKS.md +*.sqlite +hydra diff --git a/.golangci.yml b/.golangci.yml index c3461c51f45..b3fda053685 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -1,20 +1,21 @@ +version: "2" + linters: enable: - gosec - errcheck - - gosimple - - bodyclose - - staticcheck - # Disabled due to Go 1.19 changes and Go-Swagger incompatibility - # https://github.com/ory/hydra/issues/3227 - # - goimports - disable: - ineffassign - - deadcode + - staticcheck - unused - - structcheck - -run: - skip-files: - - ".+_test.go" - - ".+_test_.+.go" + settings: + staticcheck: + checks: + - "-SA1019" + exclusions: + rules: + - path: '_test\.go' + linters: + - gosec + - path: "internal/httpclient" + linters: + - errcheck diff --git a/.goreleaser.yml b/.goreleaser.yml index a2320feef2b..d9e1a65edbc 100644 --- a/.goreleaser.yml +++ b/.goreleaser.yml @@ -1,3 +1,5 @@ +version: 2 + includes: - from_url: url: https://raw.githubusercontent.com/ory/xgoreleaser/master/build.tmpl.yml @@ -5,9 +7,9 @@ includes: variables: brew_name: hydra brew_description: "The Ory OAuth2 and OpenID Connect Platform (Ory Hydra)" - buildinfo_hash: "github.com/ory/hydra/driver/config.Commit" - buildinfo_tag: "github.com/ory/hydra/driver/config.Version" - buildinfo_date: "github.com/ory/hydra/driver/config.Date" - dockerfile: ".docker/Dockerfile-alpine" - + buildinfo_hash: "github.com/ory/hydra/v2/driver/config.Commit" + buildinfo_tag: "github.com/ory/hydra/v2/driver/config.Version" + buildinfo_date: "github.com/ory/hydra/v2/driver/config.Date" + dockerfile_alpine: ".docker/Dockerfile-alpine" + dockerfile_static: ".docker/Dockerfile-distroless-static" project_name: hydra diff --git a/.grype.yml b/.grype.yml new file mode 100644 index 00000000000..56d262246ac --- /dev/null +++ b/.grype.yml @@ -0,0 +1,2 @@ +ignore: + - vulnerability: CVE-2023-2650 diff --git a/.orycli.yml b/.orycli.yml index 6d41798dba8..d59f9b0599d 100644 --- a/.orycli.yml +++ b/.orycli.yml @@ -1 +1,4 @@ project: hydra + +pre_release_hooks: + - ./script/render-schemas.sh diff --git a/.prettierignore b/.prettierignore index 45ba88688da..0240ced0ac8 100644 --- a/.prettierignore +++ b/.prettierignore @@ -4,3 +4,4 @@ CHANGELOG.md spec fixtures internal/httpclient/ +oryx diff --git a/.reports/dep-licenses.csv b/.reports/dep-licenses.csv new file mode 100644 index 00000000000..02fed54445c --- /dev/null +++ b/.reports/dep-licenses.csv @@ -0,0 +1,6 @@ +"module name","licenses" + +"github.com/ory/x","Apache-2.0" +"github.com/stretchr/testify","MIT" +"go.opentelemetry.io/otel/sdk","Apache-2.0" + diff --git a/.schema/config.schema.json b/.schema/config.schema.json new file mode 100644 index 00000000000..8b1d6800c81 --- /dev/null +++ b/.schema/config.schema.json @@ -0,0 +1,1098 @@ +{ + "$id": "https://github.com/ory/hydra/spec/config.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Ory Hydra Configuration", + "type": "object", + "definitions": { + "http_method": { + "type": "string", + "enum": [ + "POST", + "GET", + "PUT", + "PATCH", + "DELETE", + "CONNECT", + "HEAD", + "OPTIONS", + "TRACE" + ] + }, + "duration": { + "type": "string", + "pattern": "^(\\d+(ns|us|ms|s|m|h))+$", + "examples": [ + "1h", + "1h5m1s" + ] + }, + "webhook_config": { + "type": "object", + "additionalProperties": false, + "description": "Configures a webhook.", + "required": ["url"], + "properties": { + "url": { + "type": "string", + "format": "uri", + "description": "The URL to send the webhook to." + }, + "auth": { + "type": "object", + "additionalProperties": false, + "required": ["type", "config"], + "properties": { + "type": { + "type": "string", + "const": "api_key" + }, + "config": { + "type": "object", + "additionalProperties": false, + "required": ["name", "value"], + "properties": { + "in": { + "type": "string", + "enum": ["header", "cookie"] + }, + "name": { + "description": "The header or cookie name.", + "type": "string" + }, + "value": { + "description": "The header or cookie value.", + "type": "string" + } + } + } + } + } + } + } + }, + "properties": { + "db": { + "type": "object", + "additionalProperties": false, + "description": "Configures the database connection", + "properties": { + "ignore_unknown_table_columns": { + "type": "boolean", + "description": "Ignore scan errors when columns in the SQL result have no fields in the destination struct", + "default": false + } + } + }, + "log": { + "type": "object", + "additionalProperties": false, + "description": "Configures the logger", + "properties": { + "level": { + "type": "string", + "description": "Sets the log level.", + "enum": ["panic", "fatal", "error", "warn", "info", "debug", "trace"], + "default": "info" + }, + "leak_sensitive_values": { + "type": "boolean", + "description": "Logs sensitive values such as cookie and URL parameter.", + "default": false + }, + "redaction_text": { + "type": "string", + "title": "Sensitive log value redaction text", + "description": "Text to use, when redacting sensitive log value." + }, + "format": { + "type": "string", + "description": "Sets the log format.", + "enum": ["json", "json_pretty", "text"], + "default": "text" + } + } + }, + "serve": { + "type": "object", + "additionalProperties": false, + "description": "Controls the configuration for the http(s) daemon(s).", + "properties": { + "public": { + "allOf": [ + { + "$ref": "ory://serve-config" + }, + { + "properties": { + "cors": { + "$ref": "ory://cors-config" + } + } + } + ] + }, + "admin": { + "allOf": [ + { + "$ref": "ory://serve-config" + }, + { + "properties": { + "cors": { + "$ref": "ory://cors-config" + } + } + } + ] + }, + "tls": { + "$ref": "ory://tls-config" + }, + "cookies": { + "type": "object", + "additionalProperties": false, + "properties": { + "same_site_mode": { + "type": "string", + "description": "Specify the SameSite mode that cookies should be sent with.", + "enum": ["Strict", "Lax", "None"], + "default": "None" + }, + "same_site_legacy_workaround": { + "type": "boolean", + "description": "Some older browser versions don’t work with SameSite=None. This option enables the workaround defined in https://web.dev/samesite-cookie-recipes/ which essentially stores a second cookie without SameSite as a fallback.", + "default": false, + "examples": [ + true + ] + }, + "domain": { + "title": "HTTP Cookie Domain", + "description": "Sets the cookie domain for session and CSRF cookies. Useful when dealing with subdomains. Use with care!", + "type": "string" + }, + "secure": { + "title": "HTTP Cookie Secure Flag in Development Mode", + "description": "Sets the HTTP Cookie secure flag in development mode. HTTP Cookies always have the secure flag in production mode.", + "type": "boolean", + "default": false + }, + "names": { + "title": "Cookie Names", + "description": "Sets the session cookie name. Use with care!", + "type": "object", + "properties": { + "device_csrf": { + "type": "string", + "title": "CSRF Cookie Name", + "default": "ory_hydra_device_csrf" + }, + "login_csrf": { + "type": "string", + "title": "CSRF Cookie Name", + "default": "ory_hydra_login_csrf" + }, + "consent_csrf": { + "type": "string", + "title": "CSRF Cookie Name", + "default": "ory_hydra_consent_csrf" + }, + "session": { + "type": "string", + "title": "Session Cookie Name", + "default": "ory_hydra_session" + } + } + }, + "paths": { + "title": "Cookie Paths", + "description": "Sets the path for which session cookie is scoped. Use with care!", + "type": "object", + "properties": { + "session": { + "type": "string", + "title": "Session Cookie Path", + "default": "/" + } + } + } + } + } + } + }, + "dsn": { + "type": "string", + "description": "Sets the data source name. This configures the backend where Ory Hydra persists data. If dsn is `memory`, data will be written to memory and is lost when you restart this instance. Ory Hydra supports popular SQL databases. For more detailed configuration information go to: https://www.ory.sh/docs/hydra/dependencies-environment#sql" + }, + "clients": { + "title": "Global outgoing network settings", + "description": "Configure how outgoing network calls behave.", + "type": "object", + "additionalProperties": false, + "properties": { + "http": { + "title": "Global HTTP client configuration", + "description": "Configure how outgoing HTTP calls behave.", + "type": "object", + "additionalProperties": false, + "properties": { + "disallow_private_ip_ranges": { + "title": "Disallow private IP ranges", + "description": "Disallow all outgoing HTTP calls to private IP ranges. This feature can help protect against SSRF attacks.", + "type": "boolean", + "default": false + }, + "private_ip_exception_urls": { + "title": "Add exempt URLs to private IP ranges", + "description": "Allows the given URLs to be called despite them being in the private IP range. URLs need to have an exact and case-sensitive match to be excempt.", + "type": "array", + "items": { + "type": "string", + "format": "uri-reference" + }, + "default": [] + } + } + } + } + }, + "hsm": { + "type": "object", + "additionalProperties": false, + "description": "Configures Hardware Security Module.", + "properties": { + "enabled": { + "type": "boolean" + }, + "library": { + "type": "string", + "description": "Full path (including file extension) of the HSM vendor PKCS#11 library" + }, + "pin": { + "type": "string", + "description": "PIN code for token operations" + }, + "slot": { + "type": "integer", + "description": "Slot ID of the token to use (if label is not specified)" + }, + "token_label": { + "type": "string", + "description": "Label of the token to use (if slot is not specified). If both slot and label are set, token label takes preference over slot. In this case first slot, that contains this label is used." + }, + "key_set_prefix": { + "type": "string", + "description": "Key set prefix can be used in case of multiple Ory Hydra instances need to store keys on the same HSM partition. For example if `hsm.key_set_prefix=app1.` then key set `hydra.openid.id-token` would be generated/requested/deleted on HSM with `CKA_LABEL=app1.hydra.openid.id-token`.", + "default": "" + } + } + }, + "webfinger": { + "type": "object", + "additionalProperties": false, + "description": "Configures ./well-known/ settings.", + "properties": { + "jwks": { + "type": "object", + "additionalProperties": false, + "description": "Configures the /.well-known/jwks.json endpoint.", + "properties": { + "broadcast_keys": { + "type": "array", + "description": "A list of JSON Web Keys that should be exposed at that endpoint. This is usually the public key for verifying OpenID Connect ID Tokens. However, you might want to add additional keys here as well.", + "items": { + "type": "string" + }, + "default": ["hydra.openid.id-token"], + "examples": ["hydra.jwt.access-token"] + } + } + }, + "oidc_discovery": { + "type": "object", + "additionalProperties": false, + "description": "Configures OpenID Connect Discovery (/.well-known/openid-configuration).", + "properties": { + "jwks_url": { + "type": "string", + "description": "Overwrites the JWKS URL", + "format": "uri-reference", + "examples": [ + "https://my-service.com/.well-known/jwks.json" + ] + }, + "token_url": { + "type": "string", + "description": "Overwrites the OAuth2 Token URL", + "format": "uri-reference", + "examples": [ + "https://my-service.com/oauth2/token" + ] + }, + "auth_url": { + "type": "string", + "description": "Overwrites the OAuth2 Auth URL", + "format": "uri-reference", + "examples": [ + "https://my-service.com/oauth2/auth" + ] + }, + "device_authorization_url": { + "type": "string", + "description": "Overwrites the OAuth2 Device Auth URL", + "format": "uri-reference", + "examples": [ + "https://my-service.com/oauth2/device/auth" + ] + }, + "client_registration_url": { + "description": "Sets the OpenID Connect Dynamic Client Registration Endpoint", + "type": "string", + "format": "uri-reference", + "examples": [ + "https://my-service.com/clients" + ] + }, + "supported_claims": { + "type": "array", + "description": "A list of supported claims to be broadcasted. Claim `sub` is always included.", + "items": { + "type": "string" + }, + "examples": [["email", "username"]] + }, + "supported_scope": { + "type": "array", + "description": "The scope OAuth 2.0 Clients may request. Scope `offline`, `offline_access`, and `openid` are always included.", + "items": { + "type": "string" + }, + "examples": [["email", "whatever", "read.photos"]] + }, + "userinfo_url": { + "type": "string", + "description": "A URL of the userinfo endpoint to be advertised at the OpenID Connect Discovery endpoint /.well-known/openid-configuration. Defaults to Ory Hydra's userinfo endpoint at /userinfo. Set this value if you want to handle this endpoint yourself.", + "format": "uri-reference", + "examples": [ + "https://example.org/my-custom-userinfo-endpoint" + ] + } + } + } + } + }, + "oidc": { + "type": "object", + "additionalProperties": false, + "description": "Configures OpenID Connect features.", + "properties": { + "subject_identifiers": { + "type": "object", + "additionalProperties": false, + "description": "Configures the Subject Identifier algorithm. For more information please head over to the documentation: https://www.ory.sh/docs/hydra/advanced#subject-identifier-algorithms", + "properties": { + "supported_types": { + "type": "array", + "description": "A list of algorithms to enable.", + "default": ["public"], + "items": { + "type": "string", + "enum": ["public", "pairwise"] + } + }, + "pairwise": { + "type": "object", + "additionalProperties": false, + "description": "Configures the pairwise algorithm.", + "properties": { + "salt": { + "type": "string" + } + }, + "required": ["salt"] + } + }, + "anyOf": [ + { + "if": { + "properties": { + "supported_types": { + "contains": { + "type": "string", + "const": "pairwise" + } + } + } + }, + "then": { + "required": [ + "pairwise" + ] + } + }, + { + "not": { + "required": ["supported_types"] + } + } + ], + "examples": [ + { + "supported_types": ["public", "pairwise"], + "pairwise": { + "salt": "some-random-salt" + } + } + ] + }, + "dynamic_client_registration": { + "type": "object", + "additionalProperties": false, + "description": "Configures OpenID Connect Dynamic Client Registration (exposed as admin endpoints /clients/...).", + "properties": { + "enabled": { + "type": "boolean", + "description": "Enable dynamic client registration.", + "default": false + }, + "default_scope": { + "type": "array", + "description": "The OpenID Connect Dynamic Client Registration specification has no concept of whitelisting OAuth 2.0 Scope. If you want to expose Dynamic Client Registration, you should set the default scope enabled for newly registered clients. Keep in mind that users can overwrite this default by setting the `scope` key in the registration payload, effectively disabling the concept of whitelisted scopes.", + "items": { + "type": "string" + }, + "examples": [["openid", "offline", "offline_access"]] + } + } + } + } + }, + "urls": { + "type": "object", + "additionalProperties": false, + "properties": { + "self": { + "type": "object", + "additionalProperties": false, + "properties": { + "issuer": { + "type": "string", + "description": "This value will be used as the `issuer` in access and ID tokens. It must be specified and using HTTPS protocol, unless --dev is set. This should typically be equal to the public value.", + "format": "uri", + "examples": ["https://localhost:4444/"] + }, + "public": { + "type": "string", + "description": "This is the base location of the public endpoints of your Ory Hydra installation. This should typically be equal to the issuer value. If left unspecified, it falls back to the issuer value.", + "format": "uri", + "examples": [ + "https://localhost:4444/" + ] + }, + "admin": { + "type": "string", + "description": "This is the base location of the admin endpoints of your Ory Hydra installation.", + "format": "uri", + "examples": [ + "https://localhost:4445/" + ] + } + } + }, + "login": { + "type": "string", + "description": "Sets the OAuth2 Login Endpoint URL of the OAuth2 User Login & Consent flow. Defaults to an internal fallback URL showing an error.", + "format": "uri-reference", + "examples": [ + "https://my-login.app/login", + "/ui/login" + ] + }, + "registration": { + "type": "string", + "description": "Sets the OAuth2 Registration Endpoint URL of the OAuth2 User Login & Consent flow. Defaults to the same value as `login`. The registration URL is used if the authorization request was started with the `prompt=registration` parameter.", + "format": "uri-reference", + "examples": [ + "https://my-login.app/registration", + "/ui/registration" + ] + }, + "consent": { + "type": "string", + "description": "Sets the consent endpoint of the User Login & Consent flow. Defaults to an internal fallback URL showing an error.", + "format": "uri-reference", + "examples": [ + "https://my-consent.app/consent", + "/ui/consent" + ] + }, + "logout": { + "type": "string", + "description": "Sets the logout endpoint. Defaults to an internal fallback URL showing an error.", + "format": "uri-reference", + "examples": [ + "https://my-logout.app/logout", + "/ui/logout" + ] + }, + "device": { + "type": "object", + "description": "Configure URLs for the OAuth 2.0 Device Code Flow.", + "properties": { + "verification": { + "type": "string", + "description": "Sets the device user code verification endpoint. Defaults to an internal fallback URL showing an error.", + "format": "uri-reference", + "examples": [ + "https://my-logout.app/device_verification", + "/ui/device_verification" + ] + }, + "success": { + "type": "string", + "description": "Sets the post device authentication endpoint. Defaults to an internal fallback URL showing an error.", + "format": "uri-reference", + "examples": [ + "https://my-logout.app/device_done", + "/ui/device_done" + ] + } + } + }, + "error": { + "type": "string", + "description": "Sets the error endpoint. The error ui will be shown when an OAuth2 error occurs that which can not be sent back to the client. Defaults to an internal fallback URL showing an error.", + "format": "uri-reference", + "examples": [ + "https://my-error.app/error", + "/ui/error" + ] + }, + "post_logout_redirect": { + "type": "string", + "description": "When a user agent requests to logout, it will be redirected to this url afterwards per default.", + "format": "uri-reference", + "examples": [ + "https://my-example.app/logout-successful", + "/ui" + ] + }, + "identity_provider": { + "type": "object", + "additionalProperties": false, + "properties": { + "url": { + "title": "The admin URL of the ORY Kratos instance.", + "description": "If set, ORY Hydra will use this URL to log out the user in addition to removing the Hydra session.", + "type": "string", + "format": "uri", + "examples": [ + "https://kratos.example.com/admin" + ] + }, + "publicUrl": { + "title": "The public URL of the ORY Kratos instance.", + "type": "string", + "format": "uri", + "examples": [ + "https://kratos.example.com/public" + ] + }, + "headers": { + "title": "HTTP Request Headers", + "description": "These headers will be passed in HTTP requests to the Identity Provider.", + "type": "object", + "additionalProperties": { + "type": "string" + }, + "examples": [ + { + "Authorization": "Bearer some-token" + } + ] + } + } + } + } + }, + "strategies": { + "type": "object", + "additionalProperties": false, + "properties": { + "scope": { + "type": "string", + "description": "Defines how scopes are matched. For more details have a look at https://github.com/ory/fosite#scopes", + "enum": [ + "exact", + "wildcard" + ], + "default": "wildcard" + }, + "access_token": { + "type": "string", + "description": "Defines access token type. jwt is a bad idea, see https://www.ory.sh/docs/oauth2-oidc/jwt-access-token", + "enum": ["opaque", "jwt"], + "default": "opaque" + }, + "jwt": { + "type": "object", + "additionalProperties": false, + "properties": { + "scope_claim": { + "type": "string", + "description": "Defines how the scope claim is represented within a JWT access token", + "enum": ["list", "string", "both"], + "default": "list" + } + } + } + } + }, + "ttl": { + "type": "object", + "additionalProperties": false, + "description": "Configures time to live.", + "properties": { + "login_consent_request": { + "description": "Configures how long a user login and consent flow may take.", + "default": "30m", + "type": "string", + "allOf": [ + { + "$ref": "#/definitions/duration" + } + ] + }, + "authentication_session": { + "description": "Configures how long the authentication session cookie will be valid after login has been remembered. The larger this value is, the more database storage is needed. Defaults to 30 days.", + "default": "720h", + "type": "string", + "allOf": [ + { + "$ref": "#/definitions/duration" + } + ] + }, + "access_token": { + "description": "Configures how long access tokens are valid. The larger this value is, the more database storage is needed.", + "default": "1h", + "type": "string", + "allOf": [ + { + "$ref": "#/definitions/duration" + } + ] + }, + "refresh_token": { + "description": "Configures how long refresh tokens are valid. The larger this value is, the more database storage is needed. Set to -1 for refresh tokens to never expire, which is not recommended as the database can not be cleaned from stale tokens.", + "default": "720h", + "oneOf": [ + { + "$ref": "#/definitions/duration" + }, + { + "enum": [ + "-1", + -1 + ] + } + ] + }, + "id_token": { + "description": "Configures how long id tokens are valid.", + "default": "1h", + "type": "string", + "allOf": [ + { + "$ref": "#/definitions/duration" + } + ] + }, + "auth_code": { + "description": "Configures how long auth codes are valid. The larger this value is, the more database storage is needed.", + "default": "10m", + "type": "string", + "allOf": [ + { + "$ref": "#/definitions/duration" + } + ] + }, + "device_user_code": { + "description": "Configures how long device & user codes are valid. The larger this value is, the more database storage is needed.", + "default": "10m", + "allOf": [ + { + "$ref": "#/definitions/duration" + } + ] + } + } + }, + "oauth2": { + "type": "object", + "additionalProperties": false, + "properties": { + "expose_internal_errors": { + "type": "boolean", + "description": "Set this to true if you want to share error debugging information with your OAuth 2.0 clients. Keep in mind that debug information is very valuable when dealing with errors, but might also expose database error codes and similar errors.", + "default": false, + "examples": [true] + }, + "session": { + "type": "object", + "properties": { + "encrypt_at_rest": { + "type": "boolean", + "default": true, + "title": "Encrypt OAuth2 Session", + "description": "If set to true (default) Ory Hydra encrypt OAuth2 and OpenID Connect session data using AES-GCM and the system secret before persisting it in the database." + } + } + }, + "exclude_not_before_claim": { + "type": "boolean", + "description": "Set to true if you want to exclude claim `nbf (not before)` part of access token.", + "default": false, + "examples": [true] + }, + "allowed_top_level_claims": { + "type": "array", + "description": "A list of custom claims which are allowed to be added top level to the Access Token. They cannot override reserved claims.", + "items": { + "type": "string" + }, + "examples": [["username", "email", "user_uuid"]] + }, + "mirror_top_level_claims": { + "type": "boolean", + "description": "Set to false if you don't want to mirror custom claims under 'ext'", + "default": true, + "examples": [false] + }, + "hashers": { + "type": "object", + "additionalProperties": false, + "description": "Configures hashing algorithms. Supports only BCrypt and PBKDF2 at the moment.", + "properties": { + "algorithm": { + "title": "Password hashing algorithm", + "description": "One of the values: pbkdf2, bcrypt.\n\nWarning! This value can not be changed once set as all existing OAuth 2.0 Clients will not be able to sign in any more.", + "type": "string", + "default": "pbkdf2", + "enum": [ + "pbkdf2", + "bcrypt" + ] + }, + "bcrypt": { + "type": "object", + "additionalProperties": false, + "description": "Configures the BCrypt hashing algorithm used for hashing OAuth 2.0 Client Secrets.", + "properties": { + "cost": { + "type": "integer", + "description": "Sets the BCrypt cost. The higher the value, the more CPU time is being used to generate hashes.", + "default": 10, + "minimum": 4, + "maximum": 31 + } + } + }, + "pbkdf2": { + "type": "object", + "additionalProperties": false, + "description": "Configures the PBKDF2 hashing algorithm used for hashing OAuth 2.0 Client Secrets.", + "properties": { + "iterations": { + "type": "integer", + "description": "Sets the PBKDF2 iterations. The higher the value, the more CPU time is being used to generate hashes.", + "default": 25000, + "minimum": 1 + } + } + } + } + }, + "pkce": { + "type": "object", + "additionalProperties": false, + "properties": { + "enforced": { + "type": "boolean", + "description": "Sets whether PKCE should be enforced for all clients.", + "examples": [true] + }, + "enforced_for_public_clients": { + "type": "boolean", + "description": "Sets whether PKCE should be enforced for public clients.", + "examples": [true] + } + } + }, + "client_credentials": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_grant_allowed_scope": { + "type": "boolean", + "description": "Automatically grant authorized OAuth2 Scope in OAuth2 Client Credentials Flow. Each OAuth2 Client is allowed to request a predefined OAuth2 Scope (for example `read write`). If this option is enabled, the full\nscope is automatically granted when performing the OAuth2 Client Credentials flow.\n\nIf disabled, the OAuth2 Client has to request the scope in the OAuth2 request by providing the `scope` query parameter. Setting this option to true is common if you need compatibility with MITREid.", + "examples": [ + false + ] + } + } + }, + "grant": { + "type": "object", + "additionalProperties": false, + "properties": { + "refresh_token": { + "type": "object", + "properties": { + "rotation_grace_period": { + "title": "Refresh Token Rotation Grace Period", + "description": "Configures how long a Refresh Token remains valid after it has been used. The maximum value is 5 minutes, unless also a reuse count is configured, in which case the maximum is 180 days.", + "default": "0s", + "type": "string", + "allOf": [ + { + "$ref": "#/definitions/duration" + } + ] + }, + "rotation_grace_reuse_count": { + "title": "Refresh Token Rotation Grace Period Reuse Count", + "description": "Configures how many times a Refresh Token can be reused during the grace period. This is only effective if combined with a rotation grace period.", + "default": 0, + "type": "integer", + "minimum": 0 + } + } + }, + "jwt": { + "type": "object", + "additionalProperties": false, + "description": "Authorization Grants using JWT configuration", + "properties": { + "jti_optional": { + "type": "boolean", + "description": "Configures if the JSON Web Token ID (`jti`) claim is required in the JSON Web Token (JWT) Profile for OAuth 2.0 Client Authentication and Authorization Grants (RFC7523). If set to `false`, the `jti` claim is required. Set this value to `true` only after careful consideration.", + "default": false + }, + "iat_optional": { + "type": "boolean", + "description": "Configures if the issued at (`iat`) claim is required in the JSON Web Token (JWT) Profile for OAuth 2.0 Client Authentication and Authorization Grants (RFC7523). If set to `false`, the `iat` claim is required. Set this value to `true` only after careful consideration.", + "default": false + }, + "max_ttl": { + "description": "Configures what the maximum age of a JWT assertion used in the JSON Web Token (JWT) Profile for OAuth 2.0 Client Authentication and Authorization Grants (RFC7523) can be. This feature uses the `exp` claim and `iat` claim to calculate assertion age. Assertions exceeding the max age will be denied. Useful as a safety measure and recommended to keep below 720h. This governs the `grant.jwt.max_ttl` setting.", + "default": "720h", + "type": "string", + "allOf": [ + { + "$ref": "#/definitions/duration" + } + ] + } + } + } + } + }, + "refresh_token_hook": { + "description": "Sets the refresh token hook endpoint. If set it will be called during token refresh to receive updated token claims.", + "examples": ["https://my-example.app/token-refresh-hook"], + "oneOf": [ + { + "type": "string", + "format": "uri" + }, + { + "$ref": "#/definitions/webhook_config" + } + ] + }, + "device_authorization": { + "type": "object", + "additionalProperties": false, + "properties": { + "token_polling_interval": { + "allOf": [ + { + "$ref": "#/definitions/duration" + } + ], + "default": "5s", + "description": "Configures how often a non-interactive device should poll the device token endpoint, this is a purely informational configuration and does not enforce rate-limiting.", + "examples": ["5s", "15s", "1m"] + }, + "user_code": { + "type": "object", + "description": "Configures the user code settings.", + "oneOf": [ + { + "properties": { + "entropy_preset": { + "type": "string", + "description": "Presets for the user-code length and character set.", + "enum": ["high", "medium", "low"] + } + }, + "required": ["entropy_preset"], + "additionalProperties": false + }, + { + "properties": { + "length": { + "type": "integer", + "description": "The length of the user code.", + "minimum": 6 + }, + "character_set": { + "type": "string", + "description": "The character set to use for the user code. Provide the raw characters that should be used.", + "examples": ["ABCDEFGHJKLMNPQRSTUVWXYZ23456789"], + "minLength": 8 + } + }, + "required": ["length", "character_set"], + "additionalProperties": false + } + ] + } + } + }, + "token_hook": { + "description": "Sets the token hook endpoint for all grant types. If set it will be called while providing token to customize claims.", + "examples": ["https://my-example.app/token-hook"], + "oneOf": [ + { + "type": "string", + "format": "uri" + }, + { + "$ref": "#/definitions/webhook_config" + } + ] + } + } + }, + "secrets": { + "type": "object", + "additionalProperties": false, + "description": "The secrets section configures secrets used for encryption and signing of several systems. All secrets can be rotated, for more information on this topic go to: https://www.ory.sh/docs/hydra/advanced#rotation-of-hmac-token-signing-and-database-and-cookie-encryption-keys", + "properties": { + "system": { + "description": "The system secret must be at least 16 characters long. If none is provided, one will be generated. They key is used to encrypt sensitive data using AES-GCM (256 bit) and validate HMAC signatures. The first item in the list is used for signing and encryption. The whole list is used for verifying signatures and decryption.", + "type": "array", + "items": { + "type": "string", + "minLength": 16 + }, + "examples": [ + [ + "this-is-the-primary-secret", + "this-is-an-old-secret", + "this-is-another-old-secret" + ] + ] + }, + "cookie": { + "type": "array", + "description": "Secrets that are used for cookie session encryption. Defaults to secrets.system. It is recommended to use a separate secret in production. The first item in the list is used for signing and encryption. The whole list is used for verifying signatures and decryption.", + "items": { + "type": "string", + "minLength": 16 + }, + "examples": [ + [ + "this-is-the-primary-secret", + "this-is-an-old-secret", + "this-is-another-old-secret" + ] + ] + }, + "pagination": { + "type": "array", + "description": "Secrets that are used for pagination token encryption. Defaults to secrets.system. It is recommended to use a separate secret in production. The first item in the list is used for signing and encryption. The whole list is used for verifying signatures and decryption.", + "items": { + "type": "string", + "minLength": 16 + }, + "examples": [ + [ + "this-is-the-primary-secret", + "this-is-an-old-secret", + "this-is-another-old-secret" + ] + ] + } + } + }, + "profiling": { + "type": "string", + "description": "Enables profiling if set. For more details on profiling, head over to: https://blog.golang.org/profiling-go-programs", + "enum": ["cpu", "mem"], + "examples": ["cpu"] + }, + "tracing": { + "$ref": "https://raw.githubusercontent.com/ory/hydra/ab35ad47/oryx/otelx/config.schema.json" + }, + "sqa": { + "type": "object", + "additionalProperties": true, + "description": "Software Quality Assurance telemetry configuration section", + "properties": { + "opt_out": { + "type": "boolean", + "description": "Disables anonymized telemetry reports - for more information please visit https://www.ory.sh/docs/ecosystem/sqa", + "default": false, + "examples": [true] + } + }, + "examples": [ + { + "opt_out": true + } + ] + }, + "version": { + "type": "string", + "title": "The Hydra version this config is written for.", + "description": "SemVer according to https://semver.org/ prefixed with `v` as in our releases.", + "pattern": "^v(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$" + }, + "cgroups": { + "type": "object", + "additionalProperties": false, + "description": "Ory Hydra can respect Linux container CPU quota", + "properties": { + "v1": { + "type": "object", + "additionalProperties": false, + "description": "Configures parameters using cgroups v1 hierarchy", + "properties": { + "auto_max_procs_enabled": { + "type": "boolean", + "description": "Set GOMAXPROCS automatically according to cgroups limits", + "default": false, + "examples": [true] + } + } + } + } + }, + "dev": { + "type": "boolean", + "title": "Enable development mode", + "description": "If true, disables critical security measures to allow easier local development. Do not use in production.", + "default": false + }, + "feature_flags": { + "title": "Feature flags", + "type": "object", + "additionalProperties": true + } + }, + "additionalProperties": false +} diff --git a/.schema/openapi/patches/oauth2.yaml b/.schema/openapi/patches/oauth2.yaml index 3f4f5648015..bf4decd7a89 100644 --- a/.schema/openapi/patches/oauth2.yaml +++ b/.schema/openapi/patches/oauth2.yaml @@ -6,26 +6,6 @@ path: /components/schemas/acceptOAuth2ConsentRequestSession/properties/id_token/additionalProperties - op: remove path: /components/schemas/acceptOAuth2ConsentRequestSession/properties/id_token/type -- op: replace - path: /components/schemas/oAuth2ConsentSession/properties/expires_at - value: - type: object - properties: - access_token: - format: date-time - type: string - refresh_token: - format: date-time - type: string - authorize_code: - format: date-time - type: string - id_token: - format: date-time - type: string - par_context: - format: date-time - type: string - op: replace path: /components/schemas/nullDuration @@ -41,3 +21,7 @@ description: "Specify a time duration in milliseconds, seconds, minutes, hours." type: string pattern: "^([0-9]+(ns|us|ms|s|m|h))*$" +- op: replace + path: /components/schemas/oAuth2Client/properties/jwks + value: + "$ref": "#/components/schemas/jsonWebKeySet" diff --git a/.schema/version.schema.json b/.schema/version.schema.json index cbc0c50f8b2..113db4ff737 100644 --- a/.schema/version.schema.json +++ b/.schema/version.schema.json @@ -1,7 +1,126 @@ { - "$id": "https://github.com/ory/kratos/.schema/versions.config.schema.json", + "$id": "https://github.com/ory/hydra/.schema/versions.config.schema.json", "$schema": "http://json-schema.org/draft-07/schema#", "oneOf": [ + { + "allOf": [ + { + "properties": { + "version": { + "const": "v2.3.0" + } + }, + "required": [ + "version" + ] + }, + { + "$ref": "https://raw.githubusercontent.com/ory/hydra/v2.3.0/.schema/config.schema.json" + } + ] + }, + { + "allOf": [ + { + "properties": { + "version": { + "const": "v2.2.0" + } + }, + "required": [ + "version" + ] + }, + { + "$ref": "https://raw.githubusercontent.com/ory/hydra/v2.2.0/.schema/config.schema.json" + } + ] + }, + { + "allOf": [ + { + "properties": { + "version": { + "const": "v2.2.0-rc.3" + } + }, + "required": [ + "version" + ] + }, + { + "$ref": "https://raw.githubusercontent.com/ory/hydra/v2.2.0-rc.3/.schema/config.schema.json" + } + ] + }, + { + "allOf": [ + { + "properties": { + "version": { + "const": "v2.2.0-rc.2" + } + }, + "required": [ + "version" + ] + }, + { + "$ref": "https://raw.githubusercontent.com/ory/hydra/v2.2.0-rc.2/.schema/config.schema.json" + } + ] + }, + { + "allOf": [ + { + "properties": { + "version": { + "const": "v2.1.2" + } + }, + "required": [ + "version" + ] + }, + { + "$ref": "https://raw.githubusercontent.com/ory/hydra/v2.1.2/.schema/config.schema.json" + } + ] + }, + { + "allOf": [ + { + "properties": { + "version": { + "const": "v2.1.1" + } + }, + "required": [ + "version" + ] + }, + { + "$ref": "https://raw.githubusercontent.com/ory/hydra/v2.1.1/.schema/config.schema.json" + } + ] + }, + { + "allOf": [ + { + "properties": { + "version": { + "const": "v2.1.0" + } + }, + "required": [ + "version" + ] + }, + { + "$ref": "https://raw.githubusercontent.com/ory/hydra/v2.1.0/.schema/config.schema.json" + } + ] + }, { "allOf": [ { diff --git a/.trivyignore b/.trivyignore new file mode 100644 index 00000000000..73859219e24 --- /dev/null +++ b/.trivyignore @@ -0,0 +1 @@ +CVE-2023-2650 diff --git a/CHANGELOG.md b/CHANGELOG.md index 2a4dd882914..3f10ced1107 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,242 +4,307 @@ **Table of Contents** -- [0.0.0 (2022-12-23)](#000-2022-12-23) -- [2.0.3 (2022-12-08)](#203-2022-12-08) +- [0.0.0 (2025-11-07)](#000-2025-11-07) +- [25.4.0 (2025-11-06)](#2540-2025-11-06) + - [Breaking Changes](#breaking-changes) + - [Related issue(s)](#related-issues) + - [Checklist](#checklist) - [Bug Fixes](#bug-fixes) - - [Code Generation](#code-generation) + - [Chores](#chores) + - [Code Refactoring](#code-refactoring) - [Features](#features) -- [2.0.2 (2022-11-10)](#202-2022-11-10) + - [Performance Improvements](#performance-improvements) + - [Tests](#tests) + - [Unclassified](#unclassified) +- [2.3.0 (2025-01-17)](#230-2025-01-17) + - [Breaking Changes](#breaking-changes-1) - [Bug Fixes](#bug-fixes-1) - - [Code Generation](#code-generation-1) + - [Code Generation](#code-generation) - [Documentation](#documentation) - [Features](#features-1) - - [Tests](#tests) -- [2.0.1 (2022-10-27)](#201-2022-10-27) + - [Reverts](#reverts) + - [Tests](#tests-1) + - [Unclassified](#unclassified-1) +- [2.2.0 (2024-02-12)](#220-2024-02-12) - [Bug Fixes](#bug-fixes-2) + - [Code Generation](#code-generation-1) +- [2.2.0-pre.1 (2024-02-01)](#220-pre1-2024-02-01) + - [Bug Fixes](#bug-fixes-3) - [Code Generation](#code-generation-2) - [Documentation](#documentation-1) -- [2.0.0 (2022-10-27)](#200-2022-10-27) - - [Breaking Changes](#breaking-changes) - - [Bug Fixes](#bug-fixes-3) - - [Code Generation](#code-generation-3) - - [Code Refactoring](#code-refactoring) - - [Documentation](#documentation-2) - [Features](#features-2) - - [Tests](#tests-1) - - [Unclassified](#unclassified) -- [1.11.10 (2022-08-25)](#11110-2022-08-25) +- [2.2.0-rc.3 (2023-08-16)](#220-rc3-2023-08-16) - [Bug Fixes](#bug-fixes-4) + - [Code Generation](#code-generation-3) + - [Features](#features-3) +- [2.2.0-pre.0 (2023-06-22)](#220-pre0-2023-06-22) - [Code Generation](#code-generation-4) -- [1.11.9 (2022-08-01)](#1119-2022-08-01) + - [Features](#features-4) +- [2.2.0-rc.2 (2023-06-13)](#220-rc2-2023-06-13) - [Bug Fixes](#bug-fixes-5) - [Code Generation](#code-generation-5) - - [Documentation](#documentation-3) - - [Features](#features-3) -- [1.11.8 (2022-05-04)](#1118-2022-05-04) + - [Features](#features-5) +- [2.2.0-rc.1 (2023-06-12)](#220-rc1-2023-06-12) + - [Breaking Changes](#breaking-changes-2) - [Bug Fixes](#bug-fixes-6) - [Code Generation](#code-generation-6) - - [Documentation](#documentation-4) - - [Features](#features-4) - - [Tests](#tests-2) -- [1.11.7 (2022-02-23)](#1117-2022-02-23) - - [Code Generation](#code-generation-7) -- [1.11.6 (2022-02-23)](#1116-2022-02-23) + - [Features](#features-6) + - [Unclassified](#unclassified-2) +- [2.1.2 (2023-05-24)](#212-2023-05-24) - [Bug Fixes](#bug-fixes-7) - - [Code Generation](#code-generation-8) -- [1.11.5 (2022-02-21)](#1115-2022-02-21) + - [Code Generation](#code-generation-7) + - [Documentation](#documentation-2) + - [Features](#features-7) +- [2.1.1 (2023-04-11)](#211-2023-04-11) - [Bug Fixes](#bug-fixes-8) - - [Code Generation](#code-generation-9) -- [1.11.4 (2022-02-16)](#1114-2022-02-16) + - [Code Generation](#code-generation-8) +- [2.1.0 (2023-04-06)](#210-2023-04-06) - [Bug Fixes](#bug-fixes-9) + - [Code Generation](#code-generation-9) +- [2.1.0-pre.2 (2023-04-03)](#210-pre2-2023-04-03) - [Code Generation](#code-generation-10) -- [1.11.3 (2022-02-15)](#1113-2022-02-15) - - [Bug Fixes](#bug-fixes-10) +- [2.1.0-pre.1 (2023-04-03)](#210-pre1-2023-04-03) - [Code Generation](#code-generation-11) -- [1.11.2 (2022-02-11)](#1112-2022-02-11) +- [2.1.0-pre.0 (2023-03-31)](#210-pre0-2023-03-31) + - [Bug Fixes](#bug-fixes-10) - [Code Generation](#code-generation-12) -- [1.11.1 (2022-02-11)](#1111-2022-02-11) + - [Documentation](#documentation-3) + - [Features](#features-8) +- [2.0.3 (2022-12-08)](#203-2022-12-08) - [Bug Fixes](#bug-fixes-11) - [Code Generation](#code-generation-13) - - [Code Refactoring](#code-refactoring-1) - - [Documentation](#documentation-5) -- [1.11.0 (2022-01-21)](#1110-2022-01-21) - - [Breaking Changes](#breaking-changes-1) + - [Features](#features-9) +- [2.0.2 (2022-11-10)](#202-2022-11-10) - [Bug Fixes](#bug-fixes-12) - [Code Generation](#code-generation-14) - - [Documentation](#documentation-6) - - [Features](#features-5) -- [1.10.7 (2021-10-27)](#1107-2021-10-27) - - [Breaking Changes](#breaking-changes-2) + - [Documentation](#documentation-4) + - [Features](#features-10) + - [Tests](#tests-2) +- [2.0.1 (2022-10-27)](#201-2022-10-27) - [Bug Fixes](#bug-fixes-13) - [Code Generation](#code-generation-15) - - [Code Refactoring](#code-refactoring-2) - - [Documentation](#documentation-7) - - [Features](#features-6) -- [1.10.6 (2021-08-28)](#1106-2021-08-28) + - [Documentation](#documentation-5) +- [2.0.0 (2022-10-27)](#200-2022-10-27) + - [Breaking Changes](#breaking-changes-3) - [Bug Fixes](#bug-fixes-14) - [Code Generation](#code-generation-16) - - [Documentation](#documentation-8) -- [1.10.5 (2021-08-13)](#1105-2021-08-13) + - [Code Refactoring](#code-refactoring-1) + - [Documentation](#documentation-6) + - [Features](#features-11) + - [Tests](#tests-3) + - [Unclassified](#unclassified-3) +- [1.11.10 (2022-08-25)](#11110-2022-08-25) - [Bug Fixes](#bug-fixes-15) - [Code Generation](#code-generation-17) - - [Documentation](#documentation-9) - - [Features](#features-7) -- [1.10.3 (2021-07-14)](#1103-2021-07-14) +- [1.11.9 (2022-08-01)](#1119-2022-08-01) - [Bug Fixes](#bug-fixes-16) - [Code Generation](#code-generation-18) - - [Code Refactoring](#code-refactoring-3) - - [Documentation](#documentation-10) - - [Features](#features-8) -- [1.10.2 (2021-05-04)](#1102-2021-05-04) - - [Breaking Changes](#breaking-changes-3) + - [Documentation](#documentation-7) + - [Features](#features-12) +- [1.11.8 (2022-05-04)](#1118-2022-05-04) - [Bug Fixes](#bug-fixes-17) - [Code Generation](#code-generation-19) - - [Code Refactoring](#code-refactoring-4) - - [Documentation](#documentation-11) - - [Features](#features-9) -- [1.10.1 (2021-03-25)](#1101-2021-03-25) - - [Bug Fixes](#bug-fixes-18) + - [Documentation](#documentation-8) + - [Features](#features-13) + - [Tests](#tests-4) +- [1.11.7 (2022-02-23)](#1117-2022-02-23) - [Code Generation](#code-generation-20) - - [Documentation](#documentation-12) - - [Features](#features-10) - - [Tests](#tests-3) - - [Unclassified](#unclassified-1) -- [1.9.2 (2021-01-29)](#192-2021-01-29) +- [1.11.6 (2022-02-23)](#1116-2022-02-23) + - [Bug Fixes](#bug-fixes-18) - [Code Generation](#code-generation-21) - - [Features](#features-11) -- [1.9.1 (2021-01-27)](#191-2021-01-27) +- [1.11.5 (2022-02-21)](#1115-2022-02-21) + - [Bug Fixes](#bug-fixes-19) - [Code Generation](#code-generation-22) - - [Documentation](#documentation-13) -- [1.9.0 (2021-01-12)](#190-2021-01-12) +- [1.11.4 (2022-02-16)](#1114-2022-02-16) + - [Bug Fixes](#bug-fixes-20) - [Code Generation](#code-generation-23) -- [1.9.0-rc.0 (2021-01-12)](#190-rc0-2021-01-12) +- [1.11.3 (2022-02-15)](#1113-2022-02-15) + - [Bug Fixes](#bug-fixes-21) - [Code Generation](#code-generation-24) -- [1.9.0-alpha.4.pre.0 (2021-01-12)](#190-alpha4pre0-2021-01-12) - - [Bug Fixes](#bug-fixes-19) +- [1.11.2 (2022-02-11)](#1112-2022-02-11) - [Code Generation](#code-generation-25) - - [Documentation](#documentation-14) -- [1.9.0-alpha.3 (2020-12-08)](#190-alpha3-2020-12-08) - - [Breaking Changes](#breaking-changes-4) - - [Bug Fixes](#bug-fixes-20) +- [1.11.1 (2022-02-11)](#1111-2022-02-11) + - [Bug Fixes](#bug-fixes-22) - [Code Generation](#code-generation-26) - - [Code Refactoring](#code-refactoring-5) - - [Documentation](#documentation-15) - - [Features](#features-12) - - [Tests](#tests-4) - - [Unclassified](#unclassified-2) -- [1.9.0-alpha.2 (2020-10-29)](#190-alpha2-2020-10-29) - - [Bug Fixes](#bug-fixes-21) + - [Code Refactoring](#code-refactoring-2) + - [Documentation](#documentation-9) +- [1.11.0 (2022-01-21)](#1110-2022-01-21) + - [Breaking Changes](#breaking-changes-4) + - [Bug Fixes](#bug-fixes-23) - [Code Generation](#code-generation-27) - - [Documentation](#documentation-16) - - [Features](#features-13) - - [Tests](#tests-5) -- [1.9.0-alpha.1 (2020-10-20)](#190-alpha1-2020-10-20) - - [Bug Fixes](#bug-fixes-22) - - [Code Generation](#code-generation-28) - - [Code Refactoring](#code-refactoring-6) - - [Documentation](#documentation-17) + - [Documentation](#documentation-10) - [Features](#features-14) - - [Tests](#tests-6) -- [1.8.5 (2020-10-03)](#185-2020-10-03) - - [Code Generation](#code-generation-29) -- [1.8.0-pre.1 (2020-10-03)](#180-pre1-2020-10-03) - - [Bug Fixes](#bug-fixes-23) - - [Code Generation](#code-generation-30) - - [Features](#features-15) -- [1.8.0-pre.0 (2020-10-02)](#180-pre0-2020-10-02) +- [1.10.7 (2021-10-27)](#1107-2021-10-27) - [Breaking Changes](#breaking-changes-5) - [Bug Fixes](#bug-fixes-24) - - [Code Generation](#code-generation-31) - - [Documentation](#documentation-18) - - [Features](#features-16) -- [1.7.4 (2020-08-31)](#174-2020-08-31) + - [Code Generation](#code-generation-28) + - [Code Refactoring](#code-refactoring-3) + - [Documentation](#documentation-11) + - [Features](#features-15) +- [1.10.6 (2021-08-28)](#1106-2021-08-28) - [Bug Fixes](#bug-fixes-25) - - [Code Generation](#code-generation-32) -- [1.7.3 (2020-08-31)](#173-2020-08-31) - - [Code Generation](#code-generation-33) -- [1.7.1 (2020-08-31)](#171-2020-08-31) - - [Breaking Changes](#breaking-changes-6) + - [Code Generation](#code-generation-29) + - [Documentation](#documentation-12) +- [1.10.5 (2021-08-13)](#1105-2021-08-13) - [Bug Fixes](#bug-fixes-26) - - [Code Generation](#code-generation-34) - - [Code Refactoring](#code-refactoring-7) - - [Documentation](#documentation-19) - - [Features](#features-17) - - [Unclassified](#unclassified-3) -- [1.7.0 (2020-08-14)](#170-2020-08-14) - - [Breaking Changes](#breaking-changes-7) + - [Code Generation](#code-generation-30) + - [Documentation](#documentation-13) + - [Features](#features-16) +- [1.10.3 (2021-07-14)](#1103-2021-07-14) - [Bug Fixes](#bug-fixes-27) - - [Code Generation](#code-generation-35) - - [Code Refactoring](#code-refactoring-8) - - [Documentation](#documentation-20) + - [Code Generation](#code-generation-31) + - [Code Refactoring](#code-refactoring-4) + - [Documentation](#documentation-14) + - [Features](#features-17) +- [1.10.2 (2021-05-04)](#1102-2021-05-04) + - [Breaking Changes](#breaking-changes-6) + - [Bug Fixes](#bug-fixes-28) + - [Code Generation](#code-generation-32) + - [Code Refactoring](#code-refactoring-5) + - [Documentation](#documentation-15) - [Features](#features-18) +- [1.10.1 (2021-03-25)](#1101-2021-03-25) + - [Bug Fixes](#bug-fixes-29) + - [Code Generation](#code-generation-33) + - [Documentation](#documentation-16) + - [Features](#features-19) + - [Tests](#tests-5) - [Unclassified](#unclassified-4) -- [1.6.0 (2020-07-20)](#160-2020-07-20) - - [Bug Fixes](#bug-fixes-28) +- [1.9.2 (2021-01-29)](#192-2021-01-29) + - [Code Generation](#code-generation-34) + - [Features](#features-20) +- [1.9.1 (2021-01-27)](#191-2021-01-27) + - [Code Generation](#code-generation-35) + - [Documentation](#documentation-17) +- [1.9.0 (2021-01-12)](#190-2021-01-12) - [Code Generation](#code-generation-36) - - [Documentation](#documentation-21) - - [Unclassified](#unclassified-5) -- [1.5.2 (2020-06-23)](#152-2020-06-23) - - [Bug Fixes](#bug-fixes-29) +- [1.9.0-rc.0 (2021-01-12)](#190-rc0-2021-01-12) - [Code Generation](#code-generation-37) - - [Features](#features-19) -- [1.5.1 (2020-06-16)](#151-2020-06-16) - - [Code Generation](#code-generation-38) -- [1.5.0 (2020-06-16)](#150-2020-06-16) +- [1.9.0-alpha.4.pre.0 (2021-01-12)](#190-alpha4pre0-2021-01-12) - [Bug Fixes](#bug-fixes-30) - - [Chores](#chores) - - [Documentation](#documentation-22) - - [Features](#features-20) - - [Unclassified](#unclassified-6) -- [1.5.0-beta.5 (2020-05-28)](#150-beta5-2020-05-28) + - [Code Generation](#code-generation-38) + - [Documentation](#documentation-18) +- [1.9.0-alpha.3 (2020-12-08)](#190-alpha3-2020-12-08) + - [Breaking Changes](#breaking-changes-7) - [Bug Fixes](#bug-fixes-31) - - [Chores](#chores-1) - - [Documentation](#documentation-23) + - [Code Generation](#code-generation-39) + - [Code Refactoring](#code-refactoring-6) + - [Documentation](#documentation-19) - [Features](#features-21) -- [1.5.0-beta.3 (2020-05-23)](#150-beta3-2020-05-23) - - [Chores](#chores-2) -- [1.5.0-beta.2 (2020-05-23)](#150-beta2-2020-05-23) + - [Tests](#tests-6) + - [Unclassified](#unclassified-5) +- [1.9.0-alpha.2 (2020-10-29)](#190-alpha2-2020-10-29) - [Bug Fixes](#bug-fixes-32) - - [Chores](#chores-3) - - [Code Refactoring](#code-refactoring-9) - - [Documentation](#documentation-24) -- [1.5.0-beta.1 (2020-04-30)](#150-beta1-2020-04-30) - - [Breaking Changes](#breaking-changes-8) - - [Chores](#chores-4) - - [Code Refactoring](#code-refactoring-10) -- [1.4.10 (2020-04-30)](#1410-2020-04-30) + - [Code Generation](#code-generation-40) + - [Documentation](#documentation-20) + - [Features](#features-22) + - [Tests](#tests-7) +- [1.9.0-alpha.1 (2020-10-20)](#190-alpha1-2020-10-20) - [Bug Fixes](#bug-fixes-33) - - [Chores](#chores-5) - - [Documentation](#documentation-25) - - [Unclassified](#unclassified-7) -- [1.4.9 (2020-04-25)](#149-2020-04-25) + - [Code Generation](#code-generation-41) + - [Code Refactoring](#code-refactoring-7) + - [Documentation](#documentation-21) + - [Features](#features-23) + - [Tests](#tests-8) +- [1.8.5 (2020-10-03)](#185-2020-10-03) + - [Code Generation](#code-generation-42) +- [1.8.0-pre.1 (2020-10-03)](#180-pre1-2020-10-03) - [Bug Fixes](#bug-fixes-34) - - [Chores](#chores-6) -- [1.4.8 (2020-04-24)](#148-2020-04-24) + - [Code Generation](#code-generation-43) + - [Features](#features-24) +- [1.8.0-pre.0 (2020-10-02)](#180-pre0-2020-10-02) + - [Breaking Changes](#breaking-changes-8) - [Bug Fixes](#bug-fixes-35) - - [Chores](#chores-7) - - [Documentation](#documentation-26) - - [Features](#features-22) -- [1.4.7 (2020-04-24)](#147-2020-04-24) + - [Code Generation](#code-generation-44) + - [Documentation](#documentation-22) + - [Features](#features-25) +- [1.7.4 (2020-08-31)](#174-2020-08-31) - [Bug Fixes](#bug-fixes-36) - - [Chores](#chores-8) - - [Documentation](#documentation-27) -- [1.4.6 (2020-04-17)](#146-2020-04-17) + - [Code Generation](#code-generation-45) +- [1.7.3 (2020-08-31)](#173-2020-08-31) + - [Code Generation](#code-generation-46) +- [1.7.1 (2020-08-31)](#171-2020-08-31) + - [Breaking Changes](#breaking-changes-9) - [Bug Fixes](#bug-fixes-37) - - [Documentation](#documentation-28) -- [1.4.5 (2020-04-16)](#145-2020-04-16) + - [Code Generation](#code-generation-47) + - [Code Refactoring](#code-refactoring-8) + - [Documentation](#documentation-23) + - [Features](#features-26) + - [Unclassified](#unclassified-6) +- [1.7.0 (2020-08-14)](#170-2020-08-14) + - [Breaking Changes](#breaking-changes-10) - [Bug Fixes](#bug-fixes-38) - - [Documentation](#documentation-29) -- [1.4.3 (2020-04-16)](#143-2020-04-16) + - [Code Generation](#code-generation-48) + - [Code Refactoring](#code-refactoring-9) + - [Documentation](#documentation-24) + - [Features](#features-27) + - [Unclassified](#unclassified-7) +- [1.6.0 (2020-07-20)](#160-2020-07-20) - [Bug Fixes](#bug-fixes-39) + - [Code Generation](#code-generation-49) + - [Documentation](#documentation-25) + - [Unclassified](#unclassified-8) +- [1.5.2 (2020-06-23)](#152-2020-06-23) + - [Bug Fixes](#bug-fixes-40) + - [Code Generation](#code-generation-50) + - [Features](#features-28) +- [1.5.1 (2020-06-16)](#151-2020-06-16) + - [Code Generation](#code-generation-51) +- [1.5.0 (2020-06-16)](#150-2020-06-16) + - [Bug Fixes](#bug-fixes-41) + - [Chores](#chores-1) + - [Documentation](#documentation-26) + - [Features](#features-29) + - [Unclassified](#unclassified-9) +- [1.5.0-beta.5 (2020-05-28)](#150-beta5-2020-05-28) + - [Bug Fixes](#bug-fixes-42) + - [Chores](#chores-2) + - [Documentation](#documentation-27) + - [Features](#features-30) +- [1.5.0-beta.3 (2020-05-23)](#150-beta3-2020-05-23) + - [Chores](#chores-3) +- [1.5.0-beta.2 (2020-05-23)](#150-beta2-2020-05-23) + - [Bug Fixes](#bug-fixes-43) + - [Chores](#chores-4) + - [Code Refactoring](#code-refactoring-10) + - [Documentation](#documentation-28) +- [1.5.0-beta.1 (2020-04-30)](#150-beta1-2020-04-30) + - [Breaking Changes](#breaking-changes-11) + - [Chores](#chores-5) - [Code Refactoring](#code-refactoring-11) +- [1.4.10 (2020-04-30)](#1410-2020-04-30) + - [Bug Fixes](#bug-fixes-44) + - [Chores](#chores-6) + - [Documentation](#documentation-29) + - [Unclassified](#unclassified-10) +- [1.4.9 (2020-04-25)](#149-2020-04-25) + - [Bug Fixes](#bug-fixes-45) + - [Chores](#chores-7) +- [1.4.8 (2020-04-24)](#148-2020-04-24) + - [Bug Fixes](#bug-fixes-46) + - [Chores](#chores-8) - [Documentation](#documentation-30) - - [Features](#features-23) -- [1.4.2 (2020-04-03)](#142-2020-04-03) + - [Features](#features-31) +- [1.4.7 (2020-04-24)](#147-2020-04-24) + - [Bug Fixes](#bug-fixes-47) - [Chores](#chores-9) - [Documentation](#documentation-31) +- [1.4.6 (2020-04-17)](#146-2020-04-17) + - [Bug Fixes](#bug-fixes-48) + - [Documentation](#documentation-32) +- [1.4.5 (2020-04-16)](#145-2020-04-16) + - [Bug Fixes](#bug-fixes-49) + - [Documentation](#documentation-33) +- [1.4.3 (2020-04-16)](#143-2020-04-16) + - [Bug Fixes](#bug-fixes-50) + - [Code Refactoring](#code-refactoring-12) + - [Documentation](#documentation-34) + - [Features](#features-32) +- [1.4.2 (2020-04-03)](#142-2020-04-03) + - [Chores](#chores-10) + - [Documentation](#documentation-35) - [1.4.1 (2020-04-02)](#141-2020-04-02) - - [Bug Fixes](#bug-fixes-40) + - [Bug Fixes](#bug-fixes-51) - [1.4.0 (2020-04-02)](#140-2020-04-02) - [GHSA-3p3g-vpw6-4w66](#ghsa-3p3g-vpw6-4w66) - [Impact](#impact) @@ -248,7 +313,7 @@ - [Workarounds](#workarounds) - [References](#references) - [Upstream](#upstream) - - [Breaking Changes](#breaking-changes-9) + - [Breaking Changes](#breaking-changes-12) - [GHSA-3p3g-vpw6-4w66](#ghsa-3p3g-vpw6-4w66-1) - [Impact](#impact-1) - [Severity](#severity-1) @@ -256,49 +321,49 @@ - [Workarounds](#workarounds-1) - [References](#references-1) - [Upstream](#upstream-1) - - [Bug Fixes](#bug-fixes-41) - - [Code Refactoring](#code-refactoring-12) - - [Documentation](#documentation-32) - - [Features](#features-24) - - [Unclassified](#unclassified-8) + - [Bug Fixes](#bug-fixes-52) + - [Code Refactoring](#code-refactoring-13) + - [Documentation](#documentation-36) + - [Features](#features-33) + - [Unclassified](#unclassified-11) - [1.3.2 (2020-02-17)](#132-2020-02-17) - - [Bug Fixes](#bug-fixes-42) - - [Chores](#chores-10) - - [Documentation](#documentation-33) + - [Bug Fixes](#bug-fixes-53) + - [Chores](#chores-11) + - [Documentation](#documentation-37) - [1.3.1 (2020-02-16)](#131-2020-02-16) - [Continuous Integration](#continuous-integration) - [1.3.0 (2020-02-14)](#130-2020-02-14) - - [Bug Fixes](#bug-fixes-43) - - [Documentation](#documentation-34) - - [Features](#features-25) - - [Unclassified](#unclassified-9) + - [Bug Fixes](#bug-fixes-54) + - [Documentation](#documentation-38) + - [Features](#features-34) + - [Unclassified](#unclassified-12) - [1.2.3 (2020-01-31)](#123-2020-01-31) - - [Unclassified](#unclassified-10) + - [Unclassified](#unclassified-13) - [1.2.2 (2020-01-23)](#122-2020-01-23) - - [Documentation](#documentation-35) - - [Unclassified](#unclassified-11) + - [Documentation](#documentation-39) + - [Unclassified](#unclassified-14) - [1.2.1 (2020-01-15)](#121-2020-01-15) - - [Unclassified](#unclassified-12) + - [Unclassified](#unclassified-15) - [1.2.0 (2020-01-08)](#120-2020-01-08) - - [Unclassified](#unclassified-13) + - [Unclassified](#unclassified-16) - [1.2.0-alpha.3 (2020-01-08)](#120-alpha3-2020-01-08) - - [Unclassified](#unclassified-14) + - [Unclassified](#unclassified-17) - [1.2.0-alpha.2 (2020-01-08)](#120-alpha2-2020-01-08) - [Continuous Integration](#continuous-integration-1) - [1.2.0-alpha.1 (2020-01-07)](#120-alpha1-2020-01-07) - - [Documentation](#documentation-36) - - [Unclassified](#unclassified-15) + - [Documentation](#documentation-40) + - [Unclassified](#unclassified-18) - [1.1.1 (2019-12-19)](#111-2019-12-19) - - [Documentation](#documentation-37) - - [Unclassified](#unclassified-16) + - [Documentation](#documentation-41) + - [Unclassified](#unclassified-19) - [1.1.0 (2019-12-16)](#110-2019-12-16) - - [Documentation](#documentation-38) - - [Unclassified](#unclassified-17) + - [Documentation](#documentation-42) + - [Unclassified](#unclassified-20) - [1.0.9 (2019-11-02)](#109-2019-11-02) - - [Documentation](#documentation-39) - - [Unclassified](#unclassified-18) + - [Documentation](#documentation-43) + - [Unclassified](#unclassified-21) - [1.0.8 (2019-10-04)](#108-2019-10-04) - - [Unclassified](#unclassified-19) + - [Unclassified](#unclassified-22) - [1.0.7 (2019-09-29)](#107-2019-09-29) - [Continuous Integration](#continuous-integration-2) - [1.0.6 (2019-09-29)](#106-2019-09-29) @@ -306,367 +371,1847 @@ - [1.0.5 (2019-09-28)](#105-2019-09-28) - [Continuous Integration](#continuous-integration-4) - [1.0.4 (2019-09-26)](#104-2019-09-26) - - [Unclassified](#unclassified-20) + - [Unclassified](#unclassified-23) - [1.0.3 (2019-09-23)](#103-2019-09-23) - - [Unclassified](#unclassified-21) + - [Unclassified](#unclassified-24) - [1.0.2 (2019-09-18)](#102-2019-09-18) - - [Unclassified](#unclassified-22) + - [Unclassified](#unclassified-25) - [1.0.1 (2019-09-04)](#101-2019-09-04) - - [Documentation](#documentation-40) - - [Unclassified](#unclassified-23) + - [Documentation](#documentation-44) + - [Unclassified](#unclassified-26) - [1.0.0 (2019-06-24)](#100-2019-06-24) - - [Documentation](#documentation-41) - - [Unclassified](#unclassified-24) + - [Documentation](#documentation-45) + - [Unclassified](#unclassified-27) - [1.0.0-rc.16 (2019-06-13)](#100-rc16-2019-06-13) - - [Documentation](#documentation-42) - - [Unclassified](#unclassified-25) + - [Documentation](#documentation-46) + - [Unclassified](#unclassified-28) - [1.0.0-rc.15 (2019-06-05)](#100-rc15-2019-06-05) - - [Documentation](#documentation-43) - - [Unclassified](#unclassified-26) + - [Documentation](#documentation-47) + - [Unclassified](#unclassified-29) - [1.0.0-rc.14 (2019-05-18)](#100-rc14-2019-05-18) - [Continuous Integration](#continuous-integration-5) - - [Documentation](#documentation-44) - - [Unclassified](#unclassified-27) + - [Documentation](#documentation-48) + - [Unclassified](#unclassified-30) - [1.0.0-rc.12 (2019-05-10)](#100-rc12-2019-05-10) - - [Unclassified](#unclassified-28) + - [Unclassified](#unclassified-31) - [0.0.1 (2019-05-08)](#001-2019-05-08) - - [Documentation](#documentation-45) - - [Unclassified](#unclassified-29) + - [Documentation](#documentation-49) + - [Unclassified](#unclassified-32) - [1.0.0-rc.11 (2019-05-02)](#100-rc11-2019-05-02) - - [Documentation](#documentation-46) - - [Unclassified](#unclassified-30) + - [Documentation](#documentation-50) + - [Unclassified](#unclassified-33) - [1.0.0-rc.10 (2019-04-29)](#100-rc10-2019-04-29) - - [Documentation](#documentation-47) - - [Unclassified](#unclassified-31) + - [Documentation](#documentation-51) + - [Unclassified](#unclassified-34) - [1.0.0-rc.9+oryOS.10 (2019-04-18)](#100-rc9oryos10-2019-04-18) - - [Documentation](#documentation-48) - - [Unclassified](#unclassified-32) + - [Documentation](#documentation-52) + - [Unclassified](#unclassified-35) - [1.0.0-rc.8+oryOS.10 (2019-04-03)](#100-rc8oryos10-2019-04-03) - [Continuous Integration](#continuous-integration-6) - - [Documentation](#documentation-49) + - [Documentation](#documentation-53) - [1.0.0-rc.7+oryOS.10 (2019-04-02)](#100-rc7oryos10-2019-04-02) - [Continuous Integration](#continuous-integration-7) - - [Documentation](#documentation-50) - - [Unclassified](#unclassified-33) + - [Documentation](#documentation-54) + - [Unclassified](#unclassified-36) - [1.0.0-rc.6+oryOS.10 (2018-12-18)](#100-rc6oryos10-2018-12-18) - - [Documentation](#documentation-51) - - [Unclassified](#unclassified-34) + - [Documentation](#documentation-55) + - [Unclassified](#unclassified-37) - [1.0.0-rc.5+oryOS.10 (2018-12-13)](#100-rc5oryos10-2018-12-13) - - [Documentation](#documentation-52) - - [Unclassified](#unclassified-35) + - [Documentation](#documentation-56) + - [Unclassified](#unclassified-38) - [1.0.0-rc.4+oryOS.9 (2018-12-12)](#100-rc4oryos9-2018-12-12) - - [Documentation](#documentation-53) - - [Unclassified](#unclassified-36) + - [Documentation](#documentation-57) + - [Unclassified](#unclassified-39) - [1.0.0-rc.3+oryOS.9 (2018-12-06)](#100-rc3oryos9-2018-12-06) - - [Documentation](#documentation-54) - - [Unclassified](#unclassified-37) + - [Documentation](#documentation-58) + - [Unclassified](#unclassified-40) - [1.0.0-rc.2+oryOS.9 (2018-11-21)](#100-rc2oryos9-2018-11-21) - - [Documentation](#documentation-55) - - [Unclassified](#unclassified-38) + - [Documentation](#documentation-59) + - [Unclassified](#unclassified-41) - [1.0.0-rc.1+oryOS.9 (2018-11-21)](#100-rc1oryos9-2018-11-21) - [Build System](#build-system) - - [Documentation](#documentation-56) - - [Unclassified](#unclassified-39) + - [Documentation](#documentation-60) + - [Unclassified](#unclassified-42) - [1.0.0-beta.9 (2018-09-01)](#100-beta9-2018-09-01) - - [Documentation](#documentation-57) - - [Unclassified](#unclassified-40) + - [Documentation](#documentation-61) + - [Unclassified](#unclassified-43) - [1.0.0-beta.8 (2018-08-10)](#100-beta8-2018-08-10) - - [Documentation](#documentation-58) - - [Unclassified](#unclassified-41) + - [Documentation](#documentation-62) + - [Unclassified](#unclassified-44) - [1.0.0-beta.7 (2018-07-16)](#100-beta7-2018-07-16) - - [Documentation](#documentation-59) - - [Unclassified](#unclassified-42) + - [Documentation](#documentation-63) + - [Unclassified](#unclassified-45) - [1.0.0-beta.6 (2018-07-11)](#100-beta6-2018-07-11) - - [Documentation](#documentation-60) - - [Unclassified](#unclassified-43) + - [Documentation](#documentation-64) + - [Unclassified](#unclassified-46) - [1.0.0-beta.5 (2018-07-07)](#100-beta5-2018-07-07) - - [Documentation](#documentation-61) - - [Unclassified](#unclassified-44) + - [Documentation](#documentation-65) + - [Unclassified](#unclassified-47) - [1.0.0-beta.4 (2018-06-13)](#100-beta4-2018-06-13) - - [Documentation](#documentation-62) + - [Documentation](#documentation-66) - [1.0.0-beta.3 (2018-06-13)](#100-beta3-2018-06-13) - [Continuous Integration](#continuous-integration-8) - - [Documentation](#documentation-63) - - [Unclassified](#unclassified-45) + - [Documentation](#documentation-67) + - [Unclassified](#unclassified-48) - [1.0.0-beta.2 (2018-05-29)](#100-beta2-2018-05-29) - [Continuous Integration](#continuous-integration-9) - [1.0.0-beta.1 (2018-05-29)](#100-beta1-2018-05-29) - [Build System](#build-system-1) - - [Documentation](#documentation-64) - - [Unclassified](#unclassified-46) + - [Documentation](#documentation-68) + - [Unclassified](#unclassified-49) - [0.11.10 (2018-03-19)](#01110-2018-03-19) - - [Documentation](#documentation-65) - - [Unclassified](#unclassified-47) + - [Documentation](#documentation-69) + - [Unclassified](#unclassified-50) - [0.11.12 (2018-04-08)](#01112-2018-04-08) - - [Documentation](#documentation-66) - - [Unclassified](#unclassified-48) + - [Documentation](#documentation-70) + - [Unclassified](#unclassified-51) - [0.11.9 (2018-03-10)](#0119-2018-03-10) - - [Unclassified](#unclassified-49) + - [Unclassified](#unclassified-52) - [0.11.7 (2018-03-03)](#0117-2018-03-03) - - [Unclassified](#unclassified-50) + - [Unclassified](#unclassified-53) - [0.11.6 (2018-02-07)](#0116-2018-02-07) - - [Unclassified](#unclassified-51) + - [Unclassified](#unclassified-54) - [0.11.10 (2018-03-19)](#01110-2018-03-19-1) - - [Documentation](#documentation-67) - - [Unclassified](#unclassified-52) + - [Documentation](#documentation-71) + - [Unclassified](#unclassified-55) - [0.11.9 (2018-03-10)](#0119-2018-03-10-1) - - [Unclassified](#unclassified-53) + - [Unclassified](#unclassified-56) - [0.11.7 (2018-03-03)](#0117-2018-03-03-1) - - [Unclassified](#unclassified-54) + - [Unclassified](#unclassified-57) - [0.11.6 (2018-02-07)](#0116-2018-02-07-1) - - [Unclassified](#unclassified-55) + - [Unclassified](#unclassified-58) - [0.11.4 (2018-01-23)](#0114-2018-01-23) - - [Documentation](#documentation-68) + - [Documentation](#documentation-72) - [0.11.3 (2018-01-23)](#0113-2018-01-23) - - [Documentation](#documentation-69) - - [Unclassified](#unclassified-56) + - [Documentation](#documentation-73) + - [Unclassified](#unclassified-59) - [0.11.2 (2018-01-22)](#0112-2018-01-22) - - [Unclassified](#unclassified-57) + - [Unclassified](#unclassified-60) - [0.11.1 (2018-01-18)](#0111-2018-01-18) - - [Unclassified](#unclassified-58) + - [Unclassified](#unclassified-61) - [0.11.0 (2018-01-08)](#0110-2018-01-08) - - [Documentation](#documentation-70) - - [Unclassified](#unclassified-59) + - [Documentation](#documentation-74) + - [Unclassified](#unclassified-62) - [0.10.10 (2017-12-16)](#01010-2017-12-16) - - [Documentation](#documentation-71) - - [Unclassified](#unclassified-60) + - [Documentation](#documentation-75) + - [Unclassified](#unclassified-63) - [0.10.9 (2017-12-13)](#0109-2017-12-13) - - [Documentation](#documentation-72) - - [Unclassified](#unclassified-61) + - [Documentation](#documentation-76) + - [Unclassified](#unclassified-64) - [0.10.8 (2017-12-12)](#0108-2017-12-12) - - [Documentation](#documentation-73) - - [Unclassified](#unclassified-62) + - [Documentation](#documentation-77) + - [Unclassified](#unclassified-65) - [0.10.7 (2017-12-09)](#0107-2017-12-09) - - [Documentation](#documentation-74) - - [Unclassified](#unclassified-63) + - [Documentation](#documentation-78) + - [Unclassified](#unclassified-66) - [0.10.6 (2017-12-09)](#0106-2017-12-09) - - [Unclassified](#unclassified-64) + - [Unclassified](#unclassified-67) - [0.10.5 (2017-12-09)](#0105-2017-12-09) - - [Documentation](#documentation-75) - - [Unclassified](#unclassified-65) + - [Documentation](#documentation-79) + - [Unclassified](#unclassified-68) - [0.10.4 (2017-12-09)](#0104-2017-12-09) - - [Documentation](#documentation-76) - - [Unclassified](#unclassified-66) + - [Documentation](#documentation-80) + - [Unclassified](#unclassified-69) - [0.10.3 (2017-12-08)](#0103-2017-12-08) - - [Documentation](#documentation-77) + - [Documentation](#documentation-81) - [0.10.2 (2017-12-08)](#0102-2017-12-08) - [Continuous Integration](#continuous-integration-10) - [0.10.1 (2017-12-08)](#0101-2017-12-08) - [Continuous Integration](#continuous-integration-11) - [0.10.0 (2017-12-08)](#0100-2017-12-08) - [Continuous Integration](#continuous-integration-12) - - [Documentation](#documentation-78) - - [Unclassified](#unclassified-67) + - [Documentation](#documentation-82) + - [Unclassified](#unclassified-70) - [0.10.0-alpha.21 (2017-11-27)](#0100-alpha21-2017-11-27) - - [Unclassified](#unclassified-68) + - [Unclassified](#unclassified-71) - [0.10.0-alpha.20 (2017-11-26)](#0100-alpha20-2017-11-26) - - [Unclassified](#unclassified-69) + - [Unclassified](#unclassified-72) - [0.10.0-alpha.19 (2017-11-26)](#0100-alpha19-2017-11-26) - - [Documentation](#documentation-79) - - [Unclassified](#unclassified-70) + - [Documentation](#documentation-83) + - [Unclassified](#unclassified-73) - [0.10.0-alpha.18 (2017-11-06)](#0100-alpha18-2017-11-06) - [Continuous Integration](#continuous-integration-13) - [0.10.0-alpha.17 (2017-11-06)](#0100-alpha17-2017-11-06) - [Continuous Integration](#continuous-integration-14) - [0.10.0-alpha.16 (2017-11-06)](#0100-alpha16-2017-11-06) - [Continuous Integration](#continuous-integration-15) - - [Documentation](#documentation-80) - - [Unclassified](#unclassified-71) + - [Documentation](#documentation-84) + - [Unclassified](#unclassified-74) - [0.10.0-alpha.15 (2017-11-06)](#0100-alpha15-2017-11-06) - - [Unclassified](#unclassified-72) + - [Unclassified](#unclassified-75) - [0.10.0-alpha.14 (2017-11-06)](#0100-alpha14-2017-11-06) - - [Unclassified](#unclassified-73) + - [Unclassified](#unclassified-76) - [0.10.0-alpha.13 (2017-11-06)](#0100-alpha13-2017-11-06) - - [Unclassified](#unclassified-74) + - [Unclassified](#unclassified-77) - [0.10.0-alpha.12 (2017-11-06)](#0100-alpha12-2017-11-06) - - [Documentation](#documentation-81) - - [Unclassified](#unclassified-75) + - [Documentation](#documentation-85) + - [Unclassified](#unclassified-78) - [0.10.0-alpha.10 (2017-10-26)](#0100-alpha10-2017-10-26) - [Continuous Integration](#continuous-integration-16) - - [Documentation](#documentation-82) + - [Documentation](#documentation-86) - [0.10.0-alpha.9 (2017-10-25)](#0100-alpha9-2017-10-25) - - [Documentation](#documentation-83) - - [Unclassified](#unclassified-76) + - [Documentation](#documentation-87) + - [Unclassified](#unclassified-79) - [0.10.0-alpha.8 (2017-10-18)](#0100-alpha8-2017-10-18) - - [Documentation](#documentation-84) - - [Unclassified](#unclassified-77) + - [Documentation](#documentation-88) + - [Unclassified](#unclassified-80) - [0.9.14 (2017-10-06)](#0914-2017-10-06) - - [Documentation](#documentation-85) - - [Unclassified](#unclassified-78) + - [Documentation](#documentation-89) + - [Unclassified](#unclassified-81) - [0.10.0-alpha.7 (2017-10-06)](#0100-alpha7-2017-10-06) - - [Unclassified](#unclassified-79) + - [Unclassified](#unclassified-82) - [0.10.0-alpha.6 (2017-10-05)](#0100-alpha6-2017-10-05) - - [Unclassified](#unclassified-80) + - [Unclassified](#unclassified-83) - [0.10.0-alpha.5 (2017-10-05)](#0100-alpha5-2017-10-05) - - [Unclassified](#unclassified-81) + - [Unclassified](#unclassified-84) - [0.10.0-alpha.4 (2017-10-05)](#0100-alpha4-2017-10-05) - - [Unclassified](#unclassified-82) + - [Unclassified](#unclassified-85) - [0.10.0-alpha.3 (2017-10-05)](#0100-alpha3-2017-10-05) - - [Unclassified](#unclassified-83) + - [Unclassified](#unclassified-86) - [0.10.0-alpha.2 (2017-10-05)](#0100-alpha2-2017-10-05) - - [Documentation](#documentation-86) - - [Unclassified](#unclassified-84) + - [Documentation](#documentation-90) + - [Unclassified](#unclassified-87) - [0.10.0-alpha.1 (2017-10-05)](#0100-alpha1-2017-10-05) - - [Documentation](#documentation-87) - - [Unclassified](#unclassified-85) + - [Documentation](#documentation-91) + - [Unclassified](#unclassified-88) - [0.9.13 (2017-09-26)](#0913-2017-09-26) - - [Documentation](#documentation-88) - - [Unclassified](#unclassified-86) + - [Documentation](#documentation-92) + - [Unclassified](#unclassified-89) - [0.9.12 (2017-07-06)](#0912-2017-07-06) - - [Documentation](#documentation-89) - - [Unclassified](#unclassified-87) + - [Documentation](#documentation-93) + - [Unclassified](#unclassified-90) - [0.9.11 (2017-06-30)](#0911-2017-06-30) - - [Documentation](#documentation-90) - - [Unclassified](#unclassified-88) + - [Documentation](#documentation-94) + - [Unclassified](#unclassified-91) - [0.9.10 (2017-06-29)](#0910-2017-06-29) - - [Documentation](#documentation-91) - - [Unclassified](#unclassified-89) + - [Documentation](#documentation-95) + - [Unclassified](#unclassified-92) - [0.9.9 (2017-06-17)](#099-2017-06-17) - - [Unclassified](#unclassified-90) + - [Unclassified](#unclassified-93) - [0.9.8 (2017-06-17)](#098-2017-06-17) - - [Documentation](#documentation-92) - - [Unclassified](#unclassified-91) + - [Documentation](#documentation-96) + - [Unclassified](#unclassified-94) - [0.9.7 (2017-06-16)](#097-2017-06-16) - - [Documentation](#documentation-93) - - [Unclassified](#unclassified-92) + - [Documentation](#documentation-97) + - [Unclassified](#unclassified-95) - [0.9.6 (2017-06-15)](#096-2017-06-15) - - [Unclassified](#unclassified-93) + - [Unclassified](#unclassified-96) - [0.9.5 (2017-06-15)](#095-2017-06-15) - - [Unclassified](#unclassified-94) + - [Unclassified](#unclassified-97) - [0.9.4 (2017-06-14)](#094-2017-06-14) - - [Documentation](#documentation-94) - - [Unclassified](#unclassified-95) + - [Documentation](#documentation-98) + - [Unclassified](#unclassified-98) - [0.9.3 (2017-06-14)](#093-2017-06-14) - - [Documentation](#documentation-95) - - [Unclassified](#unclassified-96) + - [Documentation](#documentation-99) + - [Unclassified](#unclassified-99) - [0.9.2 (2017-06-13)](#092-2017-06-13) - - [Unclassified](#unclassified-97) + - [Unclassified](#unclassified-100) - [0.9.1 (2017-06-12)](#091-2017-06-12) - - [Unclassified](#unclassified-98) + - [Unclassified](#unclassified-101) - [0.9.0 (2017-06-07)](#090-2017-06-07) - - [Documentation](#documentation-96) - - [Unclassified](#unclassified-99) + - [Documentation](#documentation-100) + - [Unclassified](#unclassified-102) - [0.8.7 (2017-06-05)](#087-2017-06-05) - - [Unclassified](#unclassified-100) + - [Unclassified](#unclassified-103) - [0.8.6 (2017-06-05)](#086-2017-06-05) - - [Documentation](#documentation-97) - - [Unclassified](#unclassified-101) + - [Documentation](#documentation-101) + - [Unclassified](#unclassified-104) - [0.8.5 (2017-06-01)](#085-2017-06-01) - - [Unclassified](#unclassified-102) + - [Unclassified](#unclassified-105) - [0.8.4 (2017-05-24)](#084-2017-05-24) - - [Documentation](#documentation-98) - - [Unclassified](#unclassified-103) + - [Documentation](#documentation-102) + - [Unclassified](#unclassified-106) - [0.8.3 (2017-05-23)](#083-2017-05-23) - - [Documentation](#documentation-99) - - [Unclassified](#unclassified-104) + - [Documentation](#documentation-103) + - [Unclassified](#unclassified-107) - [0.8.2 (2017-05-10)](#082-2017-05-10) - - [Unclassified](#unclassified-105) + - [Unclassified](#unclassified-108) - [0.8.1 (2017-05-08)](#081-2017-05-08) - [Continuous Integration](#continuous-integration-17) - [0.8.0 (2017-05-07)](#080-2017-05-07) - [Continuous Integration](#continuous-integration-18) - - [Documentation](#documentation-100) - - [Unclassified](#unclassified-106) + - [Documentation](#documentation-104) + - [Unclassified](#unclassified-109) - [0.7.13 (2017-05-03)](#0713-2017-05-03) - - [Documentation](#documentation-101) - - [Unclassified](#unclassified-107) + - [Documentation](#documentation-105) + - [Unclassified](#unclassified-110) - [0.7.12 (2017-04-30)](#0712-2017-04-30) - - [Unclassified](#unclassified-108) + - [Unclassified](#unclassified-111) - [0.7.11 (2017-04-28)](#0711-2017-04-28) - - [Unclassified](#unclassified-109) + - [Unclassified](#unclassified-112) - [0.7.10 (2017-04-14)](#0710-2017-04-14) - - [Documentation](#documentation-102) - - [Unclassified](#unclassified-110) + - [Documentation](#documentation-106) + - [Unclassified](#unclassified-113) - [0.7.9 (2017-04-02)](#079-2017-04-02) - - [Unclassified](#unclassified-111) + - [Unclassified](#unclassified-114) - [0.7.8 (2017-03-24)](#078-2017-03-24) - - [Documentation](#documentation-103) - - [Unclassified](#unclassified-112) + - [Documentation](#documentation-107) + - [Unclassified](#unclassified-115) - [0.7.7 (2017-02-11)](#077-2017-02-11) - - [Unclassified](#unclassified-113) + - [Unclassified](#unclassified-116) - [0.7.6 (2017-02-11)](#076-2017-02-11) - - [Unclassified](#unclassified-114) + - [Unclassified](#unclassified-117) - [0.7.3 (2017-01-22)](#073-2017-01-22) - - [Unclassified](#unclassified-115) + - [Unclassified](#unclassified-118) - [0.7.2 (2017-01-02)](#072-2017-01-02) - - [Unclassified](#unclassified-116) + - [Unclassified](#unclassified-119) - [0.7.1 (2016-12-30)](#071-2016-12-30) - - [Unclassified](#unclassified-117) + - [Unclassified](#unclassified-120) - [0.7.0 (2016-12-30)](#070-2016-12-30) - - [Documentation](#documentation-104) - - [Unclassified](#unclassified-118) + - [Documentation](#documentation-108) + - [Unclassified](#unclassified-121) - [0.6.10 (2016-12-26)](#0610-2016-12-26) - - [Unclassified](#unclassified-119) + - [Unclassified](#unclassified-122) - [0.6.9 (2016-12-20)](#069-2016-12-20) - - [Documentation](#documentation-105) - - [Unclassified](#unclassified-120) + - [Documentation](#documentation-109) + - [Unclassified](#unclassified-123) - [0.6.8 (2016-12-06)](#068-2016-12-06) - - [Unclassified](#unclassified-121) + - [Unclassified](#unclassified-124) - [0.6.7 (2016-12-04)](#067-2016-12-04) - - [Unclassified](#unclassified-122) + - [Unclassified](#unclassified-125) - [0.6.6 (2016-12-04)](#066-2016-12-04) - - [Documentation](#documentation-106) - - [Unclassified](#unclassified-123) + - [Documentation](#documentation-110) + - [Unclassified](#unclassified-126) - [0.6.5 (2016-11-28)](#065-2016-11-28) - - [Unclassified](#unclassified-124) + - [Unclassified](#unclassified-127) - [0.6.4 (2016-11-22)](#064-2016-11-22) - - [Unclassified](#unclassified-125) + - [Unclassified](#unclassified-128) - [0.6.3 (2016-11-17)](#063-2016-11-17) - - [Documentation](#documentation-107) - - [Unclassified](#unclassified-126) + - [Documentation](#documentation-111) + - [Unclassified](#unclassified-129) - [0.6.2 (2016-11-05)](#062-2016-11-05) - - [Unclassified](#unclassified-127) + - [Unclassified](#unclassified-130) - [0.6.1 (2016-10-26)](#061-2016-10-26) - - [Documentation](#documentation-108) - - [Unclassified](#unclassified-128) + - [Documentation](#documentation-112) + - [Unclassified](#unclassified-131) - [0.6.0 (2016-10-25)](#060-2016-10-25) - - [Unclassified](#unclassified-129) + - [Unclassified](#unclassified-132) - [0.5.8 (2016-10-06)](#058-2016-10-06) - - [Unclassified](#unclassified-130) + - [Unclassified](#unclassified-133) - [0.5.7 (2016-10-04)](#057-2016-10-04) - - [Unclassified](#unclassified-131) + - [Unclassified](#unclassified-134) - [0.5.6 (2016-10-03)](#056-2016-10-03) - - [Unclassified](#unclassified-132) + - [Unclassified](#unclassified-135) - [0.5.5 (2016-09-29)](#055-2016-09-29) - - [Unclassified](#unclassified-133) + - [Unclassified](#unclassified-136) - [0.5.4 (2016-09-29)](#054-2016-09-29) - - [Unclassified](#unclassified-134) + - [Unclassified](#unclassified-137) - [0.5.3 (2016-09-29)](#053-2016-09-29) - - [Documentation](#documentation-109) - - [Unclassified](#unclassified-135) + - [Documentation](#documentation-113) + - [Unclassified](#unclassified-138) - [0.5.2 (2016-09-23)](#052-2016-09-23) - - [Unclassified](#unclassified-136) + - [Unclassified](#unclassified-139) - [0.5.1 (2016-09-22)](#051-2016-09-22) - - [Documentation](#documentation-110) - - [Unclassified](#unclassified-137) + - [Documentation](#documentation-114) + - [Unclassified](#unclassified-140) - [0.4.3 (2016-09-03)](#043-2016-09-03) - - [Unclassified](#unclassified-138) + - [Unclassified](#unclassified-141) - [0.4.2-alpha.3 (2016-09-02)](#042-alpha3-2016-09-02) - - [Unclassified](#unclassified-139) + - [Unclassified](#unclassified-142) - [0.4.2-alpha.2 (2016-09-01)](#042-alpha2-2016-09-01) - - [Unclassified](#unclassified-140) + - [Unclassified](#unclassified-143) - [0.4.2-alpha.1 (2016-09-01)](#042-alpha1-2016-09-01) - - [Unclassified](#unclassified-141) + - [Unclassified](#unclassified-144) - [0.4.2-alpha (2016-09-01)](#042-alpha-2016-09-01) - - [Documentation](#documentation-111) - - [Unclassified](#unclassified-142) + - [Documentation](#documentation-115) + - [Unclassified](#unclassified-145) - [0.4.1 (2016-08-18)](#041-2016-08-18) - - [Unclassified](#unclassified-143) + - [Unclassified](#unclassified-146) - [0.3.1 (2016-08-17)](#031-2016-08-17) - - [Documentation](#documentation-112) - - [Unclassified](#unclassified-144) + - [Documentation](#documentation-116) + - [Unclassified](#unclassified-147) - [0.3.0 (2016-08-09)](#030-2016-08-09) - - [Unclassified](#unclassified-145) + - [Unclassified](#unclassified-148) - [0.2.0 (2016-08-09)](#020-2016-08-09) - - [Documentation](#documentation-113) - - [Unclassified](#unclassified-146) + - [Documentation](#documentation-117) + - [Unclassified](#unclassified-149) + + + +# [0.0.0](https://github.com/ory/hydra/compare/v25.4.0...v0.0.0) (2025-11-07) + + + +# [25.4.0](https://github.com/ory/hydra/compare/v2.3.0...v25.4.0) (2025-11-06) + +v25.4.0 + + + +## Breaking Changes + +This patch changes the behavior of configuration item `foo` to do bar. To keep the existing +behavior please do baz. +``` +--> + +## Related issue(s) + + + +## Checklist + + + +- [ ] I have read the [contributing +guidelines](../blob/master/CONTRIBUTING.md). +- [ ] I have referenced an issue containing the design document if my +change + introduces a new feature. +- [ ] I am following the +[contributing code + +This patch changes the behavior of configuration item `foo` to do bar. To keep the existing +behavior please do baz. +``` +--> + +## Related issue(s) + + + +## Checklist + + + +- [ ] I have read the [contributing +guidelines](../blob/master/CONTRIBUTING.md). +- [ ] I have referenced an issue containing the design document if my +change + introduces a new feature. +- [ ] I am following the +[contributing code + + + +### Bug Fixes + +* Add repo syncing for polis ([46d17f8](https://github.com/ory/hydra/commit/46d17f8bfdc59e2185e9ce65823eb2652e01f1b8)): + + GitOrigin-RevId: e277a25d594b512b800d39dd18f36ea3d99fcf84 + +* Add virtual expiry column to flow for easy cross-db querying ([1c402e3](https://github.com/ory/hydra/commit/1c402e392cb08da78ccab2c229d19dce2d414b08)): + + GitOrigin-RevId: 5b41402e367523009eec0e25c8cf1ea129e6b96e + +* Allow updating when JWKS URI is set ([#3935](https://github.com/ory/hydra/issues/3935)) ([#3946](https://github.com/ory/hydra/issues/3946)) ([fb1655b](https://github.com/ory/hydra/commit/fb1655ba86077b10141132ed332ba8d6f8c70582)): + + The client validator no longer rejects PATCH and PUT updates when `JSONWebKeysURI` is non-empty and `JSONWebKeys` is not nil. + +* Always use EC private keys in tests ([7481827](https://github.com/ory/hydra/commit/748182721768a6bf331e51a6989420f20383ae05)): + + GitOrigin-RevId: a82b6e636e84d543db3b035194c3a5dc85286afc + +* Better tracing in proxy HTTP ([0d8a797](https://github.com/ory/hydra/commit/0d8a7976ac51e8392520ceeeea340a973b4c2f73)): + + GitOrigin-RevId: e66493762481986aefa8c73c676b1f7515cd29cb + +* Case-insensitive user_code scrubbing ([#3979](https://github.com/ory/hydra/issues/3979)) ([d389fd0](https://github.com/ory/hydra/commit/d389fd0269f93c8b7c787f1b3683ae4c6e9e1909)) +* **changelog-oel:** Cap grace period for refresh token rotation at 30d ([35d5d58](https://github.com/ory/hydra/commit/35d5d586aa19780116b6ef20321433af7ba45325)): + + GitOrigin-RevId: a8785b2760897612d8b72d62b95622f35ee8ac36 + +* **changelog-oel:** Reduce rows read when checking past consents ([ace80c2](https://github.com/ory/hydra/commit/ace80c2c145ddb541dd06110ace4039439ab3139)): + + GitOrigin-RevId: 2df16d3053a1b4fb0bb0eb9e4f829f6861a55f1b + +* **changelog-oel:** Replace `returning *` with defined column names ([0b26e27](https://github.com/ory/hydra/commit/0b26e279d8b9c5b27961ba3b943e8a94d338d50b)): + + GitOrigin-RevId: 8fa1912556293bba8f9c841ec316da18a52ea61e + +* **changelog-oel:** Update expires_at on token use ([0588744](https://github.com/ory/hydra/commit/0588744a246e924012f46200b31b993796b5f3a6)): + + GitOrigin-RevId: c4ea129061ba34aaae5ed63403ee32221aee5556 + +* **changelog-oel:** Use keyset pagination instead of offset ([cbf14c0](https://github.com/ory/hydra/commit/cbf14c0b9c763e60424949358126258cbf3c316a)): + + GitOrigin-RevId: 61645585277edd95914705499afd7211a85983eb + +* CLI usage help examples ([#3943](https://github.com/ory/hydra/issues/3943)) ([e24f9a7](https://github.com/ory/hydra/commit/e24f9a704c22c72690bc20c498439865181d9239)) +* Copybara script ([7b33358](https://github.com/ory/hydra/commit/7b333585bb44a069bf47267c853aa2e91db0efa3)): + + GitOrigin-RevId: 14665e01451ac5fcdda148b473b8fc35d4fe21ef + +* Correct multiple instances of 'stragegy' typo ([#3906](https://github.com/ory/hydra/issues/3906)) ([50eefbc](https://github.com/ory/hydra/commit/50eefbc21c2c43d221b6079bbd78a33ef8c754c4)): + + This commit addresses several occurrences where 'strategy' was + misspelled as 'stragegy' throughout the codebase. + + Additionally, a similar issue was found and corrected in the Ory + documentation repository (ory/docs), with a corresponding pull request + submitted. + +* Deduplicate down migrations ([02baf36](https://github.com/ory/hydra/commit/02baf364c5f1fe09a74edb879c711983f761dc7f)): + + GitOrigin-RevId: 94c68daeded4f3b6f42d079d71415d8935a74e69 + +* **deps:** Update go-x ([582a3c5](https://github.com/ory/hydra/commit/582a3c5f2de833a7996812d4873b305d162e1c7b)): + + GitOrigin-RevId: 2d32f7710b9c6111a30f4e0d3cc0abc967d7dfb6 + +* Escape IPv6 regex string ([0ba326a](https://github.com/ory/hydra/commit/0ba326ad59ab227bc6e9f47a846fbf1ac75c0f1c)): + + GitOrigin-RevId: cf04d7cae93aea32950a149527e2b1319af97b39 + +* Failing CI in OSS repos ([c900985](https://github.com/ory/hydra/commit/c9009858dc96edbbec1dd256cb1a734beb4f90aa)): + + GitOrigin-RevId: 3d1f84b0f0d006971aea9489322b3e0f32a6a7e3 + +* Fix expires_at timestamp not in UTC leading to local test failures ([337000a](https://github.com/ory/hydra/commit/337000aacd07a1af6e77918a011903b6f39701a3)): + + GitOrigin-RevId: 560d958391b12ace6db9d4c05074719e96e0329e + +* Fixed typo in description of api ([4551eb6](https://github.com/ory/hydra/commit/4551eb6e34cae866f66adf5c74469ed0d3ab2543)): + + GitOrigin-RevId: 020354a01d85ec411d879d7ebf260b7fce71c539 + +* Force autocommit for device auth code migration ([#3991](https://github.com/ory/hydra/issues/3991)) ([29761f4](https://github.com/ory/hydra/commit/29761f4ac7586478ea6f553cb571ac11b0275e6d)), closes [../blob/master/CONTRIBUTING.md#contributing-code](https://github.com/../blob/master/CONTRIBUTING.md/issues/contributing-code): + + +* Correct id token type in token exchange response ([#3625](https://github.com/ory/hydra/issues/3625)) ([d1f9ba8](https://github.com/ory/hydra/commit/d1f9ba8edee45323e1f13dcf9c67eefbd524dc81)): + + Closes https://github.com/ory/client-go/issues/2 + +* Dropped persistence/sql test errors ([#3670](https://github.com/ory/hydra/issues/3670)) ([22f0119](https://github.com/ory/hydra/commit/22f0119ad300c1e09c03e966a3d3411e57db444f)) +* Handle logout double-submit gracefully ([#3675](https://github.com/ory/hydra/issues/3675)) ([5133cf9](https://github.com/ory/hydra/commit/5133cf972ecfca18d7799c00a7afeae6a4386fbf)) +* Handle subject mismatch gracefully ([#3619](https://github.com/ory/hydra/issues/3619)) ([af0d477](https://github.com/ory/hydra/commit/af0d477e0eb1e336b01fa8d1321e9dce098c82a8)): + + We now redirect to the original request URL if the subjects between + the remembered Hydra session and what was confirmed by the login + screen does not match. + +* Handle token hook auth config ([#3677](https://github.com/ory/hydra/issues/3677)) ([1a40833](https://github.com/ory/hydra/commit/1a40833e2c87c98541d053f7c54b38f791dbb448)): + + * fix: handle token hook auth config + + * fix: bump golangci-lint + +* Improved SSRF protection ([#3669](https://github.com/ory/hydra/issues/3669)) ([24c3be5](https://github.com/ory/hydra/commit/24c3be574a11a76e69f09a24754f20cf644b624c)) +* Incorrect down migration ([#3708](https://github.com/ory/hydra/issues/3708)) ([8812e0e](https://github.com/ory/hydra/commit/8812e0e67b1f192de4ab6819c8f2bb98e6a5b7a7)): + + See https://github.com/ory/hydra/pull/3705#discussion_r1471514014 + +* Remove required mark ([#3693](https://github.com/ory/hydra/issues/3693)) ([3a764a0](https://github.com/ory/hydra/commit/3a764a053a3d7eab698668cf63d387ea76c1db40)) +* Timeout in jwt-bearer grants when too many grants are available ([#3692](https://github.com/ory/hydra/issues/3692)) ([a748797](https://github.com/ory/hydra/commit/a748797761f5503b048df1b57bcc406f16cd40a3)) +* Verifiable credentials JWT format ([#3614](https://github.com/ory/hydra/issues/3614)) ([0176adc](https://github.com/ory/hydra/commit/0176adc17848ab1dd021910ea31202dbdcd51737)) + +### Code Generation + +* Pin v2.2.0-pre.1 release commit ([8168ee3](https://github.com/ory/hydra/commit/8168ee31161784b8a5e686a9a2c42f323e40b7bf)) + +### Documentation + +* Fix typo ([#3649](https://github.com/ory/hydra/issues/3649)) ([f0501d2](https://github.com/ory/hydra/commit/f0501d2cd7f30c550cc07f6f583118efc9f12a5f)) + +### Features + +* Add --skip-logout-consent flag to CLI ([#3709](https://github.com/ory/hydra/issues/3709)) ([f502d6e](https://github.com/ory/hydra/commit/f502d6e38747986cca2ce42b0854f194e85ed103)) +* Add authentication options to hooks ([#3633](https://github.com/ory/hydra/issues/3633)) ([5c8e792](https://github.com/ory/hydra/commit/5c8e7923ed22f6d231ca748bb76e4261a87afb08)) +* Add flag to export public keys ([#3684](https://github.com/ory/hydra/issues/3684)) ([62c006b](https://github.com/ory/hydra/commit/62c006b916351e7f74fb00e0006ea112801143d7)) +* Add missing index for jwk table ([#3691](https://github.com/ory/hydra/issues/3691)) ([39ee5e1](https://github.com/ory/hydra/commit/39ee5e1f0cfa7fae5c4f9e1663a930cb5b8c2bc9)) +* Add prompt=registration ([#3636](https://github.com/ory/hydra/issues/3636)) ([19857d2](https://github.com/ory/hydra/commit/19857d20b1d7d3b918de5388f17076de0660a6be)): + + Ory Hydra now supports a `registration` value for the `prompt` parameter of + the authorization request. When specifying `prompt=registration`, Ory Hydra + will redirect the user to the URL found under `urls.registration` + (instead of `urls.login`). + +* Add skip_logout_consent option to clients ([#3705](https://github.com/ory/hydra/issues/3705)) ([2a653e6](https://github.com/ory/hydra/commit/2a653e66803ddb03de02d981dbc8ea57b2ac0936)): + + Adds a special field which disables the logout consent screen when performing OIDC logout. + +* Allow injecting extra fosite strategies ([#3646](https://github.com/ory/hydra/issues/3646)) ([88b0b7c](https://github.com/ory/hydra/commit/88b0b7cfdf1a1968bf3a720cb2e6640451e2956b)) +* Re-enable legacy client IDs ([#3628](https://github.com/ory/hydra/issues/3628)) ([5dd7d30](https://github.com/ory/hydra/commit/5dd7d306ba8181b1fff1225d056a2ee69183392e)): + + This patch changes the primary key of the `hydra_client` table. We do not expect issues, as that table is probably not overly huge in any deployment. We do however highly recommend to test the migration performance on a staging environment with a similar database setup. + +* Remove flow cookie ([#3639](https://github.com/ory/hydra/issues/3639)) ([cde3a30](https://github.com/ory/hydra/commit/cde3a30a92bc30cf072763503e9780a79ba44e43)): + + This patch removes the flow cookie. All information is already tracked in the request query parameters as part of the {login|consent}_{challenge|verifier}. + +* Remove login session cookie during consent flow ([#3667](https://github.com/ory/hydra/issues/3667)) ([5f41949](https://github.com/ory/hydra/commit/5f41949ad209c90d114dc427bd6ccde5e08f05da)) +* Support multiple token URLs ([#3676](https://github.com/ory/hydra/issues/3676)) ([95cc273](https://github.com/ory/hydra/commit/95cc2735ed18374cc01d625c66417e42b600778d)) + + +# [2.2.0-rc.3](https://github.com/ory/hydra/compare/v2.2.0-pre.0...v2.2.0-rc.3) (2023-08-16) + +Introduces logout compatibility with Ory Kratos. + + + + + +### Bug Fixes + +* Add exceptions for internal IP addresses ([#3608](https://github.com/ory/hydra/issues/3608)) ([1f1121c](https://github.com/ory/hydra/commit/1f1121caef6dd2c99c2ab551bfeb82e3cd2d8cf2)) +* Add kid to verifiable credential header ([#3606](https://github.com/ory/hydra/issues/3606)) ([9f1c8d1](https://github.com/ory/hydra/commit/9f1c8d192004e0e7d7f5c3383d4dd1df222dec81)) +* Deflake ttl test ([6741a49](https://github.com/ory/hydra/commit/6741a49f7b4d55a270f3eb968921894b1e5f2989)) +* Docker build ([#3609](https://github.com/ory/hydra/issues/3609)) ([01ff9da](https://github.com/ory/hydra/commit/01ff9da87e231a3cef8933c16a28ed425daa3355)) +* Enable CORS with hot-reloaded origins ([#3601](https://github.com/ory/hydra/issues/3601)) ([6f592fc](https://github.com/ory/hydra/commit/6f592fc8425887fb403516cbb03838b63f85f87e)) +* Only query access tokens by hashed signature ([a21e945](https://github.com/ory/hydra/commit/a21e94519416cc7801995b0804696348b18fa844)) +* Racy random string generation ([#3555](https://github.com/ory/hydra/issues/3555)) ([1b26c4c](https://github.com/ory/hydra/commit/1b26c4cb96400b333fe214d2da892fc045bbc69f)) +* Reject invalid JWKS in client configuration / dependency cleanup and bump ([#3603](https://github.com/ory/hydra/issues/3603)) ([1d73d83](https://github.com/ory/hydra/commit/1d73d83eb03e4ceef6edb4bd0738959007053118)) +* Restore ability to override auth and token urls for exemplary app ([#3590](https://github.com/ory/hydra/issues/3590)) ([dfb129a](https://github.com/ory/hydra/commit/dfb129a5b7c8ae01e1c490fce1a127697abc7bee)) +* Return proper error when the grant request cannot be parsed ([#3558](https://github.com/ory/hydra/issues/3558)) ([26f2d34](https://github.com/ory/hydra/commit/26f2d34459f55444e880e6e27e081c002d630246)) +* Use correct tracer in middleware ([#3567](https://github.com/ory/hydra/issues/3567)) ([807cbd2](https://github.com/ory/hydra/commit/807cbd209af376b9b2d18c278cc927d1c43e6865)) + +### Code Generation + +* Pin v2.2.0-rc.3 release commit ([ad8a4ba](https://github.com/ory/hydra/commit/ad8a4bab63b352c0b259a97d7b3f23247b0238b1)) + +### Features + +* Add `hydra migrate status` subcommand ([#3579](https://github.com/ory/hydra/issues/3579)) ([749eb8d](https://github.com/ory/hydra/commit/749eb8db40fb8b2d6333d917fac6c25b6e5574ef)) +* Add more resolution to events and collect client metrics ([#3568](https://github.com/ory/hydra/issues/3568)) ([466e66b](https://github.com/ory/hydra/commit/466e66bd1df7bf589c5a74ad5be399b1eaa80d9b)) +* Add state override ([b8b9154](https://github.com/ory/hydra/commit/b8b9154077963492dad3ed0350a4d93d09a95602)) +* Add support for OIDC VC ([#3575](https://github.com/ory/hydra/issues/3575)) ([219a7c0](https://github.com/ory/hydra/commit/219a7c068fa0ec423923f157553f430c80934c45)): + + This adds initial support for issuing verifiable credentials + as specified in https://openid.net/specs/openid-connect-userinfo-vc-1_0.html. + + Because the spec is still in draft, public identifiers are + suffixed with `draft_00`. + +* Allow additional SQL migrations ([#3587](https://github.com/ory/hydra/issues/3587)) ([8900cbb](https://github.com/ory/hydra/commit/8900cbb770d6f39a5c3322fce488675ca6d0138a)) +* Allow Go migrations ([#3602](https://github.com/ory/hydra/issues/3602)) ([8eed306](https://github.com/ory/hydra/commit/8eed306800fa330a1cda752dbb11ddf09faf25ad)) +* Allow to disable claim mirroring ([#3563](https://github.com/ory/hydra/issues/3563)) ([c72a316](https://github.com/ory/hydra/commit/c72a31641ee79f090a2ac1b64a276be58312b2ee)): + + This PR introduces another config option called `oauth2:mirror_top_level_claims` which may be used to disable the mirroring of custom claims into the `ext` claim of the jwt. + This new config option is an opt-in. If unused the behavior remains as-is to ensure backwards compatibility. + + Example: + + ```yaml + oauth2: + allowed_top_level_claims: + - test_claim + mirror_top_level_claims: false # -> this will prevent test_claim to be mirrored within ext + ``` + + Closes https://github.com/ory/hydra/issues/3348 + +* Bump fosite and add some more tracing ([0b56f53](https://github.com/ory/hydra/commit/0b56f53a491e165f68a53f013989328ce86928ba)) +* **cmd:** Add route that redirects to the auth code url ([4db6416](https://github.com/ory/hydra/commit/4db64161699e4301c003b2787baecae22c912c17)) +* Parallel generation of JSON web key set ([#3561](https://github.com/ory/hydra/issues/3561)) ([5bd9002](https://github.com/ory/hydra/commit/5bd9002db7baa2fe2c2529fee38825d66a68991f)) +* Propagate logout to identity provider ([#3596](https://github.com/ory/hydra/issues/3596)) ([c004fee](https://github.com/ory/hydra/commit/c004fee69497a5a0f8af5ccb6a2ab8d104fd9249)): + + * feat: propagate logout to identity provider + + This commit improves the integration between Hydra and Kratos when logging + out the user. + + This adds a new configuration key for configuring a Kratos admin URL. + Additionally, Kratos can send a session ID when accepting a login request. + If a session ID was specified and a Kratos admin URL was configured, + Hydra will disable the corresponding Kratos session through the admin API + if a frontchannel or backchannel logout was triggered. + + * fix: add special case for MySQL + + * chore: update sdk + + * chore: consistent naming + + * fix: cleanup persister + +* Support different jwt scope claim strategies ([#3531](https://github.com/ory/hydra/issues/3531)) ([45da11e](https://github.com/ory/hydra/commit/45da11e4fb4f0a2f939f11682c095b8dbfcddb78)) + + +# [2.2.0-pre.0](https://github.com/ory/hydra/compare/v2.2.0-rc.2...v2.2.0-pre.0) (2023-06-22) + +Test release + + + + + + +### Code Generation + +* Pin v2.2.0-pre.0 release commit ([116c1e8](https://github.com/ory/hydra/commit/116c1e89c423eebc333e2a9ff3e582090c5798a5)) + +### Features + +* Add distroless docker image ([#3539](https://github.com/ory/hydra/issues/3539)) ([c1e1a56](https://github.com/ory/hydra/commit/c1e1a569621d88365dceee7372ca49ecd119f939)) +* Add event tracing ([#3546](https://github.com/ory/hydra/issues/3546)) ([44ed0ac](https://github.com/ory/hydra/commit/44ed0ac89558bd83513e5240e8c937c908514d76)) + + +# [2.2.0-rc.2](https://github.com/ory/hydra/compare/v2.2.0-rc.1...v2.2.0-rc.2) (2023-06-13) + +This release optimizes the performance of authorization code grant flows by minimizing the number of database queries. We acheive this by storing the flow in an AEAD-encoded cookie and AEAD-encoded request parameters for the authentication and consent screens. + +BREAKING CHANGE: + +* The client that is used as part of the authorization grant flow is stored in the AEAD-encoding. Therefore, running flows will not observe updates to the client after they were started. +* Because the login and consent challenge values now include the AEAD-encoded flow, their size increased to around 1kB for a flow without any metadata (and increases linearly with the amount of metadata). Please adjust your ingress / gateway accordingly. + + + + + +### Bug Fixes + +* Version clash in apk install ([24ebdd3](https://github.com/ory/hydra/commit/24ebdd3feb302f655000a243dad032b04cf25afc)) + +### Code Generation + +* Pin v2.2.0-rc.2 release commit ([b183040](https://github.com/ory/hydra/commit/b183040a0d6c33abd4db01eb21a1bb0e141ea9ec)) + +### Features + +* Hot-reload Oauth2 CORS settings ([#3537](https://github.com/ory/hydra/issues/3537)) ([a8ecf80](https://github.com/ory/hydra/commit/a8ecf807b2c6bfa6cc2d8b474f527a2fda12daef)) +* Sqa metrics v2 ([#3533](https://github.com/ory/hydra/issues/3533)) ([3ec683d](https://github.com/ory/hydra/commit/3ec683d7cf582443f29bd93c4c88392b3ce692a4)) + + +# [2.2.0-rc.1](https://github.com/ory/hydra/compare/v2.1.2...v2.2.0-rc.1) (2023-06-12) + +This release optimizes the performance of authorization code grant flows by minimizing the number of database queries. We acheive this by storing the flow in an AEAD-encoded cookie and AEAD-encoded request parameters for the authentication and consent screens. + +BREAKING CHANGE: + +* The client that is used as part of the authorization grant flow is stored in the AEAD-encoding. Therefore, running flows will not observe updates to the client after they were started. +* Because the login and consent challenge values now include the AEAD-encoded flow, their size increased to around 1kB for a flow without any metadata (and increases linearly with the amount of metadata). Please adjust your ingress / gateway accordingly. + + + +## Breaking Changes + +* The client that is used as part of the authorization grant flow is stored in the AEAD-encoding. Therefore, running flows will not observe updates to the client after they were started. +* Because the login and consent challenge values now include the AEAD-encoded flow, their size increased to around 1kB for a flow without any metadata (and increases linearly with the amount of metadata). Please adjust your ingress / gateway accordingly. + + + +### Bug Fixes + +* Cockroach migration error when hydra upgrades v2 ([#3536](https://github.com/ory/hydra/issues/3536)) ([be6e005](https://github.com/ory/hydra/commit/be6e005e8eb245d3844eba133d1f78f9e21b0d0d)): + + Referring to issue https://github.com/ory/hydra/issues/3535 this PR is + intended to fix the Cockroach migration bug when upgrading Hydra from + v1.11.10 to v2. + + +### Code Generation + +* Pin v2.2.0-rc.1 release commit ([262ebbb](https://github.com/ory/hydra/commit/262ebbb5a7a585a26117a8c0fba6c257fc97b7b4)) + +### Features + +* Add metrics to disabled access log ([#3526](https://github.com/ory/hydra/issues/3526)) ([fc7af90](https://github.com/ory/hydra/commit/fc7af904407b27d1b5c0e5e62f82fd81ab81ecb2)) +* Stateless authorization code flow ([#3515](https://github.com/ory/hydra/issues/3515)) ([f29fe3a](https://github.com/ory/hydra/commit/f29fe3af97fb72061f2d6d7a2fc454cea5e870e9)): + + This patch optimizes the performance of authorization code grant flows by minimizing the number of database queries. We acheive this by storing the flow in an AEAD-encoded cookie and AEAD-encoded request parameters for the authentication and consent screens. + + +### Unclassified + +* Revert "fix: cockroach migration error when hydra upgrades v2 (#3536)" (#3542) ([4d8622f](https://github.com/ory/hydra/commit/4d8622fedcd54308c2e3a402a54f9f6eb751c9ce)), closes [#3536](https://github.com/ory/hydra/issues/3536) [#3542](https://github.com/ory/hydra/issues/3542): + + This reverts commit be6e005e8eb245d3844eba133d1f78f9e21b0d0d. + + + +# [2.1.2](https://github.com/ory/hydra/compare/v2.1.1...v2.1.2) (2023-05-24) + +We are excited to announce the next Ory Hydra release! This release includes the following important changes: + +- Fixed a memory leak in the OpenTelemetry implementation, improving overall memory usage and stability. +- Added a missing index for faster janitor cleanup, resulting in quicker and more efficient cleanup operations. +- Fixed a bug related to SameSite in dev mode, ensuring proper functionality and consistency in handling SameSite attributes during development. + +We appreciate your continuous support and feedback. Please feel free to reach out to us with any further suggestions or issues. -# [0.0.0](https://github.com/ory/hydra/compare/v2.0.3...v0.0.0) (2022-12-23) + +### Bug Fixes + +* Add index on requested_at for refresh tokens and use it in janitor ([#3516](https://github.com/ory/hydra/issues/3516)) ([5b8e712](https://github.com/ory/hydra/commit/5b8e7121c49a0dfed6312b599a617e692f324fdb)) +* Disable health check request logs ([#3496](https://github.com/ory/hydra/issues/3496)) ([eddf7f3](https://github.com/ory/hydra/commit/eddf7f3867e8977e58d09681c583e99bca503448)) +* Do not use prepared SQL statements and bump deps ([#3506](https://github.com/ory/hydra/issues/3506)) ([31b9e66](https://github.com/ory/hydra/commit/31b9e663b183f8244d86ddd1ae9f55267e190a69)) +* Proper SameSite=None in dev mode ([#3502](https://github.com/ory/hydra/issues/3502)) ([5751fae](https://github.com/ory/hydra/commit/5751fae7b37a2692ad484c785356e702928f1b9b)) +* Sqa config values unified across projects ([#3490](https://github.com/ory/hydra/issues/3490)) ([1b1899e](https://github.com/ory/hydra/commit/1b1899e9472fecfbdeb07f5e99c27713b82478e5)) +* **sql:** Incorrect JWK query ([#3499](https://github.com/ory/hydra/issues/3499)) ([13ce0d6](https://github.com/ory/hydra/commit/13ce0d6f39febed83c6b1e10b45b0be2ed75a415)): + + `persister_grant_jwk` had an OR statement without bracket leading to not using the last part of the query. + + +### Code Generation + +* Pin v2.1.2 release commit ([d94ed6e](https://github.com/ory/hydra/commit/d94ed6e4486ee270d8903e6e9376134931a742d9)) + +### Documentation + +* Incorrect json output format example ([#3497](https://github.com/ory/hydra/issues/3497)) ([b71a36b](https://github.com/ory/hydra/commit/b71a36bf5c063a719a9e31ff348af594d87dc794)) + +### Features + +* Add --skip-consent flag to hydra cli ([#3492](https://github.com/ory/hydra/issues/3492)) ([083d518](https://github.com/ory/hydra/commit/083d518cf51240c8977f0d9226897a9886cfbb50)) + + +# [2.1.1](https://github.com/ory/hydra/compare/v2.1.0...v2.1.1) (2023-04-11) + +Resolve a regression in looking up access and refresh tokens. + + + + + +### Bug Fixes + +* Double-hashed access token signatures ([#3486](https://github.com/ory/hydra/issues/3486)) ([8720b25](https://github.com/ory/hydra/commit/8720b250b92b49c651d87f6e727beda31c227dfe)), closes [#3485](https://github.com/ory/hydra/issues/3485) + +### Code Generation + +* Pin v2.1.1 release commit ([6efae7c](https://github.com/ory/hydra/commit/6efae7cfa7430cecaa145e2e71958699a2394115)) + + +# [2.1.0](https://github.com/ory/hydra/compare/v2.1.0-pre.2...v2.1.0) (2023-04-06) + +We are excited to share this year's Q1 release of Ory Hydra: v2.1.0! + +Highlights: + +* Support for Datadog tracing (#3431). +* Ability to skip consent for trusted clients (#3451). +* Setting access token type in the OAuth2 Client is now possible (#3446). +* Revoke login sessions by SessionID (#3450). +* Session lifespan extended on session refresh (#3464). +* Token request hooks added for all grant types (#3427). +* Reduced SQL tracing noise (#3481). + +Don't want to run the upgrade yourself? Switch to [Ory Network](https://console.ory.sh/registration?flow=d1ae4761-3493-4dd9-b0ce-3200916b38aa)! + + + + + +### Bug Fixes + +* Reduce SQL tracing noise ([#3481](https://github.com/ory/hydra/issues/3481)) ([6e1f545](https://github.com/ory/hydra/commit/6e1f5454be3ff00b0016e3d72b121701ccd23625)) + +### Code Generation + +* Pin v2.1.0 release commit ([3649832](https://github.com/ory/hydra/commit/3649832421bff09b5e4c172b37dc61027dac0869)) + + +# [2.1.0-pre.2](https://github.com/ory/hydra/compare/v2.1.0-pre.1...v2.1.0-pre.2) (2023-04-03) + +autogen: pin v2.1.0-pre.2 release commit + + + + + +### Code Generation + +* Pin v2.1.0-pre.2 release commit ([3b1d87e](https://github.com/ory/hydra/commit/3b1d87e3a16dd4b4b55725c5c78eb062fefc8f2f)) + + +# [2.1.0-pre.1](https://github.com/ory/hydra/compare/v2.1.0-pre.0...v2.1.0-pre.1) (2023-04-03) + +autogen: pin v2.1.0-pre.1 release commit + + + + + +### Code Generation + +* Pin v2.1.0-pre.1 release commit ([2289e6b](https://github.com/ory/hydra/commit/2289e6b8159becde96b31fc99aa2a218631d70ea)) + + +# [2.1.0-pre.0](https://github.com/ory/hydra/compare/v2.0.3...v2.1.0-pre.0) (2023-03-31) + +autogen: pin v2.1.0-pre.0 release commit + + + + + +### Bug Fixes + +* Append /v2 to module path ([f56e5fa](https://github.com/ory/hydra/commit/f56e5fad74632c1f0c5f3768a0de8465f351a533)) +* Broken OIDC compliance images ([#3454](https://github.com/ory/hydra/issues/3454)) ([50bc1b4](https://github.com/ory/hydra/commit/50bc1b4267045a19845816af295b638179be9c2c)) +* Clean up unused code ([488f930](https://github.com/ory/hydra/commit/488f930e4f2c39386b1c1ff68dd60d1aaf968cb9)) +* Ensure RSA key length fullfills 4096bit requirement ([#2905](https://github.com/ory/hydra/issues/2905)) ([#3402](https://github.com/ory/hydra/issues/3402)) ([a663927](https://github.com/ory/hydra/commit/a6639277fcdee7ee2101bc6e40ab7facd7265d54)) +* Migration typo ([#3453](https://github.com/ory/hydra/issues/3453)) ([ed27c10](https://github.com/ory/hydra/commit/ed27c1016fe8f8fea5a99a0e2203552c3bdc0ab3)) +* No longer use separate public and private keys in HSM key manager ([#3401](https://github.com/ory/hydra/issues/3401)) ([375bd5a](https://github.com/ory/hydra/commit/375bd5a69c0ece3aea0714ab7374ff8d09672c10)) +* Pin nancy ([0156556](https://github.com/ory/hydra/commit/0156556bb35278fcbc416b02504bc04511c468a7)) +* Release issue ([115da11](https://github.com/ory/hydra/commit/115da11930ed3723c53a1334eca47fd5ab6160ac)) +* Support allowed_cors_origins with client_secret_post ([#3457](https://github.com/ory/hydra/issues/3457)) ([ffe4943](https://github.com/ory/hydra/commit/ffe49430e31eee98ace65e829be5db3188c8fd4b)), closes [#3456](https://github.com/ory/hydra/issues/3456) +* Use correct default value ([#3469](https://github.com/ory/hydra/issues/3469)) ([2796d53](https://github.com/ory/hydra/commit/2796d53798c3a2fa36738fe40d287f93480f08d7)), closes [#3420](https://github.com/ory/hydra/issues/3420) + +### Code Generation + +* Pin v2.1.0-pre.0 release commit ([61f342c](https://github.com/ory/hydra/commit/61f342c2d9f266774885cf1242db796cb671ecad)) + +### Documentation + +* Update security email ([#3465](https://github.com/ory/hydra/issues/3465)) ([751c8e8](https://github.com/ory/hydra/commit/751c8e8a2f7393c52cd395e899b8852595f8682a)) + +### Features + +* Add ability to revoke login sessions by SessionID ([#3450](https://github.com/ory/hydra/issues/3450)) ([b42482b](https://github.com/ory/hydra/commit/b42482b7260d4e1771d01fc719e8216f5961ce65)), closes [#3448](https://github.com/ory/hydra/issues/3448): + + API `revokeOAuth2LoginSessions` can now revoke a single session by a SessionID (`sid` claim in the id_token) and execute an OpenID Connect Back-channel logout. + +* Add session cookie path configuration ([#3475](https://github.com/ory/hydra/issues/3475)) ([af9fa81](https://github.com/ory/hydra/commit/af9fa81ac0b3a877fe1a67505b6ae54d4ef58c00)), closes [#3473](https://github.com/ory/hydra/issues/3473) +* Add token request hooks for all grant types ([#3427](https://github.com/ory/hydra/issues/3427)) ([9bdf225](https://github.com/ory/hydra/commit/9bdf225d8f04c0b16dcdc4bbcc2d7bebc7534b4d)), closes [#3244](https://github.com/ory/hydra/issues/3244): + + Added a generic token hook that is called for all grant types and includes `payload` with a single allowed value - `assertion` to cover the `jwt-bearer` grant type customization. + + The existing `refresh token hook` is left unchanged and is considered to be deprecated in favor of the new hook logic. The `refresh token hook` will at some point be removed. + +* Allow setting access token type in client ([#3446](https://github.com/ory/hydra/issues/3446)) ([a6beed4](https://github.com/ory/hydra/commit/a6beed4659febd0917379d6da1e51d8ef75bc859)): + + The access token type (`jwt` or `opaque`) can now be set in the client configuration. The value set here will overwrite the global value for all flows concerning that client. + +* Allow skipping consent for trusted clients ([#3451](https://github.com/ory/hydra/issues/3451)) ([4f65365](https://github.com/ory/hydra/commit/4f65365f14ea28f979ebab7eb9c3396cbb25d619)): + + This adds a new boolean parameter `skip_consent` to the admin APIs of + the OAuth clients. This parameter will be forwarded to the consent app + as `client.skip_consent`. + + It is up to the consent app to act on this parameter, but the canonical + implementation accepts the consent on the user's behalf, similar to + when `skip` is set. + +* Extend session lifespan on session refresh ([#3464](https://github.com/ory/hydra/issues/3464)) ([7511436](https://github.com/ory/hydra/commit/751143644dbc842c5928b1961d2c04d55b76b06b)), closes [#1690](https://github.com/ory/hydra/issues/1690) [#1557](https://github.com/ory/hydra/issues/1557) [#2246](https://github.com/ory/hydra/issues/2246) [#2848](https://github.com/ory/hydra/issues/2848): + + It is now possible to extend session lifespans when accepting login challenges. + +* Render complete config schema during CI ([#3433](https://github.com/ory/hydra/issues/3433)) ([ae3e781](https://github.com/ory/hydra/commit/ae3e7811ae2ba031fc4f1569a889d8b4ba0c96fd)): + + * chore: bump ory/x + + * chore: script to render the complete config + +* Support datadog tracing ([#3431](https://github.com/ory/hydra/issues/3431)) ([3ea014f](https://github.com/ory/hydra/commit/3ea014f98f72b1456909838e8f7c40ceade7b2f6)) + + # [2.0.3](https://github.com/ory/hydra/compare/v2.0.2...v2.0.3) (2022-12-08) Bugfixes for migration and pagination regressions and a new endpoint. @@ -1701,7 +3246,7 @@ Signed-off-by: Grant Zvolsky * Respect local DNS restrictions ([7eb1d1c](https://github.com/ory/hydra/commit/7eb1d1c0ff7189bcd76792ac38e7425e9b7c6f86)) * **sdk:** Add missing bearer security definition ([a85bc7a](https://github.com/ory/hydra/commit/a85bc7ab52aa6bce20eec52985a465fc31544b57)) * **sdk:** Type nulls ([fe70395](https://github.com/ory/hydra/commit/fe70395ae58e52a573bfac7385941d4504a1e403)) -* Support alternate hashing algorithms for client secrets ([ddba42f](https://github.com/ory/hydra/commit/ddba42f49837c48d4ee9bf9203ffa81f3b31757c)), closes [rfc6819#section-5](https://github.com/rfc6819/issues/section-5) [/datatracker.ietf.org/doc/html/rfc6819#section-5](https://github.com//datatracker.ietf.org/doc/html/rfc6819/issues/section-5): +* Support alternate hashing algorithms for client secrets ([ddba42f](https://github.com/ory/hydra/commit/ddba42f49837c48d4ee9bf9203ffa81f3b31757c)): This patch adds support for hashing client secrets using pbkdf2 instead of bcrypt, which might be a more appropriate algorithm in certain settings. As we assume that most environments fall in this category, we also changed the default to pbkdf2 with 25.000 rounds (roughly 1-3ms per hash on an Apple M1 Max core). @@ -1710,6 +3255,16 @@ Signed-off-by: Grant Zvolsky As most client secrets are auto-generated, using high hash costs is not useful. The password (OAuth2 Client Secret) is not user chosen and unlikely to be reused. As such, there is little point in using excessive hash costs to protect users. High hash costs in a system like Ory Hydra will cause high CPU costs from mostly automated traffic (OAuth2 Client interactions). It has also been a point of critizism from some who wish for better RPS on specific endpoints. Other systems like Keycloak do not [hash client secrets at all](https://groups.google.com/g/keycloak-dev/c/TmsNfnol0_g), referencing more secure authentication mechanisms such as assertion-based client authentication. + + We and the IETF disagree though, as [rfc6819#section-5.1.4.1.3](https://datatracker.ietf.org/doc/html/rfc6819#section-5.1.4.1.3) states: + + > The authorization server should not store credentials in clear text. + Typical approaches are to store hashes instead or to encrypt + credentials. If the credential lacks a reasonable entropy level + (because it is a user password), an additional salt will harden the + storage to make offline dictionary attacks more difficult. + + For that reason, cleartext storage of client secrets is not going to be supported. * Support ES256 for generating JWTs ([9a080ad](https://github.com/ory/hydra/commit/9a080ad2fa75c932da6ec0a40602cbfdeee8fd94)) * Switch to otelx ([#3108](https://github.com/ory/hydra/issues/3108)) ([05eaf6d](https://github.com/ory/hydra/commit/05eaf6d3be68f52cbed4de2a8586bfa777d1187f)) @@ -2029,9 +3584,11 @@ Ory Hydra moved from CircleCI to GitHub Actions! * Add cloud ([76d4d80](https://github.com/ory/hydra/commit/76d4d805b5f25bc5b9f8fdf2ab3b1660968f3ad3)) * Add options for using SQLite & Cockroach DB to 5min tutorial, fix typo in contribution guidelines ([#2970](https://github.com/ory/hydra/issues/2970)) ([05038de](https://github.com/ory/hydra/commit/05038deebc170258813839ea04caa351aec03639)) -* Recommend to start with one container in prod to complete first-time setup. ([#2945](https://github.com/ory/hydra/issues/2945)) ([e257f3e](https://github.com/ory/hydra/commit/e257f3e6a4549b07533557aab941e5a1aa45337e)), closes [/github.com/ory/hydra/discussions/2943#discussioncomment-1997531](https://github.com//github.com/ory/hydra/discussions/2943/issues/discussioncomment-1997531): +* Recommend to start with one container in prod to complete first-time setup. ([#2945](https://github.com/ory/hydra/issues/2945)) ([e257f3e](https://github.com/ory/hydra/commit/e257f3e6a4549b07533557aab941e5a1aa45337e)): This is to ensure multiple concurrent workers don't both generate JWKs needlessly, for example. + + See https://github.com/ory/hydra/discussions/2943#discussioncomment-1997531 * Update readme ([2b1fb64](https://github.com/ory/hydra/commit/2b1fb6421dd25f38aacc6192895be950874fcb7e)) @@ -2437,10 +3994,7 @@ This patch changes how issuer and public URLs are used. Please be aware that goi * Do not use error_hint anymore ([#2450](https://github.com/ory/hydra/issues/2450)) ([ff90c47](https://github.com/ory/hydra/commit/ff90c47ff52c30ffeb0f9740b870be0f5313fd04)) * Handled requests respond with 410 Gone and include redirect URL ([#2473](https://github.com/ory/hydra/issues/2473)) ([e3d9158](https://github.com/ory/hydra/commit/e3d9158aebb750386c4dd2ebed0dfdaf5b374805)), closes [#1569](https://github.com/ory/hydra/issues/1569) * Link in documentation ([#2478](https://github.com/ory/hydra/issues/2478)) ([5fdd913](https://github.com/ory/hydra/commit/5fdd91302a8068956515c750a7d160dfa10057a6)) -* Login and consent redirect behavior change since 1.9.x ([#2457](https://github.com/ory/hydra/issues/2457)) ([2f3a1af](https://github.com/ory/hydra/commit/2f3a1afb09c96400484f0e4b397c6b811fe72fe4)), closes [#2363](https://github.com/ory/hydra/issues/2363): - - Allow #fragment in configured url to keep backwards compatibility. - +* Login and consent redirect behavior change since 1.9.x ([#2457](https://github.com/ory/hydra/issues/2457)) ([2f3a1af](https://github.com/ory/hydra/commit/2f3a1afb09c96400484f0e4b397c6b811fe72fe4)), closes [#fragment](https://github.com/ory/hydra/issues/fragment) [#2363](https://github.com/ory/hydra/issues/2363) * Make token user command work with public clients ([#2479](https://github.com/ory/hydra/issues/2479)) ([a033d6a](https://github.com/ory/hydra/commit/a033d6a732c13b2d15ba073f582a994d174e299c)) * Resolve clidoc issues ([f6e5958](https://github.com/ory/hydra/commit/f6e59589eba86f179ac4462f1b00fc1d2066d4b5)) * Resolve specignore issues ([1431167](https://github.com/ory/hydra/commit/143116732bdf86ba92a1e42928519edb23ed53b7)) @@ -2566,7 +4120,12 @@ To follow OAuth2 best-practice, refresh tokens will now invalidate the whole acc This patch phases out the `/oauth2/flush` endpoint as the janitor is better suited for background tasks, is easier to run in a targeted fashion (e.g. as a singleton job), and does not cause HTTP timeouts. -* Flush refresh tokens for service oauth2/flush ([#2373](https://github.com/ory/hydra/issues/2373)) ([b46a14c](https://github.com/ory/hydra/commit/b46a14cd6d260a7dee748de34abfea54908f1a0b)), closes [/github.com/ory/hydra/issues/1574#issuecomment-736684327](https://github.com//github.com/ory/hydra/issues/1574/issues/issuecomment-736684327) +* Flush refresh tokens for service oauth2/flush ([#2373](https://github.com/ory/hydra/issues/2373)) ([b46a14c](https://github.com/ory/hydra/commit/b46a14cd6d260a7dee748de34abfea54908f1a0b)): + + See https://github.com/ory/hydra/issues/1574#issuecomment-736684327 + + + * Move to go 1.16 and static embed files ([6fa591c](https://github.com/ory/hydra/commit/6fa591c849c3d63b036d7a4001496f42f02b821b)) * Refresh token reuse detection ([#2383](https://github.com/ory/hydra/issues/2383)) ([bc349f1](https://github.com/ory/hydra/commit/bc349f1fbaf19340081d9a6c097de2b76e848e46)), closes [#2022](https://github.com/ory/hydra/issues/2022): @@ -3516,9 +5075,15 @@ If you haven't yet, consider joining our [Slack family](https://slack.ory.sh)! * Add config schema for log.leak_sensitive_values ([#1905](https://github.com/ory/hydra/issues/1905)) ([d954649](https://github.com/ory/hydra/commit/d954649cd382728b7ec8b58b56e75d2f0913d75a)) * Properly return when subject is empty ([#1909](https://github.com/ory/hydra/issues/1909)) ([5b54519](https://github.com/ory/hydra/commit/5b5451929196eaa09a2fa21b1fbb5797693bf897)), closes [#1842](https://github.com/ory/hydra/issues/1842) * Remove duplicated tracing logger ([#1900](https://github.com/ory/hydra/issues/1900)) ([48c2c6d](https://github.com/ory/hydra/commit/48c2c6de27a7ec73c77cb29c86578b8ca78885e8)) -* Same site legacy workaround on iOS 12 ([#1908](https://github.com/ory/hydra/issues/1908)) ([128ad98](https://github.com/ory/hydra/commit/128ad987d548e719b62e789264a82ef5e611ff59)), closes [#1810](https://github.com/ory/hydra/issues/1810) [/github.com/golang/go/blob/release-branch.go1.14/src/net/http/cookie.go#L221](https://github.com//github.com/golang/go/blob/release-branch.go1.14/src/net/http/cookie.go/issues/L221) [/tools.ietf.org/html/draft-ietf-httpbis-cookie-same-site-00#section-4](https://github.com//tools.ietf.org/html/draft-ietf-httpbis-cookie-same-site-00/issues/section-4) [239226#L118](https://github.com/239226/issues/L118) [#1907](https://github.com/ory/hydra/issues/1907): +* Same site legacy workaround on iOS 12 ([#1908](https://github.com/ory/hydra/issues/1908)) ([128ad98](https://github.com/ory/hydra/commit/128ad987d548e719b62e789264a82ef5e611ff59)), closes [#1907](https://github.com/ory/hydra/issues/1907): Enables legacy compatibility on iOS version < 13 and macOS version < 10.15 + + #1810 incorrectly implements https://web.dev/samesite-cookie-recipes/#handling-incompatible-clients + + Notice Set-cookie: 3pcookie-legacy=value; Secure the cookie does not have the SameSite attribute present. The http.SameSiteDefaultMode used in hydra implementation results in attribute without the value, see https://github.com/golang/go/blob/release-branch.go1.14/src/net/http/cookie.go#L221 + + That triggers the problems with the older iOS and macOS versions, as Apple did not follow the https://tools.ietf.org/html/draft-ietf-httpbis-cookie-same-site-00#section-4.1 see https://trac.webkit.org/browser/webkit/trunk/Source/WebInspectorUI/UserInterface/Models/Cookie.js?rev=239226#L118 * Use .bin in PATH and improve CI tasks ([#1897](https://github.com/ory/hydra/issues/1897)) ([9c6eba8](https://github.com/ory/hydra/commit/9c6eba8d0611fb4a79820a90c31f72eb578ca3d5)) @@ -5178,7 +6743,12 @@ ci: Use yaml in configuration docs runner -* Bump Golang to 1.12.1 ([#1315](https://github.com/ory/hydra/issues/1315)) ([a073966](https://github.com/ory/hydra/commit/a0739661340f67ff541a4987e1c8bd224d8b9851)), closes [/golang.org/doc/devel/release.html#go1](https://github.com//golang.org/doc/devel/release.html/issues/go1) +* Bump Golang to 1.12.1 ([#1315](https://github.com/ory/hydra/issues/1315)) ([a073966](https://github.com/ory/hydra/commit/a0739661340f67ff541a4987e1c8bd224d8b9851)): + + https://golang.org/doc/devel/release.html#go1.12.minor + + + * Bump ory/x to 0.0.35 ([#1267](https://github.com/ory/hydra/issues/1267)) ([b503e15](https://github.com/ory/hydra/commit/b503e151f25021958099e31ba2162d879d3cc7d3)), closes [#1266](https://github.com/ory/hydra/issues/1266) * Bump testify and crypto ([#1262](https://github.com/ory/hydra/issues/1262)) ([5eadbe5](https://github.com/ory/hydra/commit/5eadbe5d0409cfc0805cd15d50f57a57cc5e2248)) * Disable modules temporarily when fetching a tool ([#1302](https://github.com/ory/hydra/issues/1302)) ([bd5b90b](https://github.com/ory/hydra/commit/bd5b90b1a71fb431cc917640acca230bcf09cbfd)) @@ -5207,7 +6777,12 @@ ci: Use yaml in configuration docs runner * Fix description of clients create --subject-type option ([#1305](https://github.com/ory/hydra/issues/1305)) ([fa40b43](https://github.com/ory/hydra/commit/fa40b43571a29da398103b13c3b175c6f81ef9c6)) * Fix disable-telemetry check ([#1258](https://github.com/ory/hydra/issues/1258)) ([d7be0c7](https://github.com/ory/hydra/commit/d7be0c7328bfda9e24c5aeb02389aca814e40de1)) -* Fix docker-compose wrong restart values ([#1313](https://github.com/ory/hydra/issues/1313)) ([4d004bf](https://github.com/ory/hydra/commit/4d004bf67e2ec5c8fe533adea4f3bbe797060879)), closes [#1312](https://github.com/ory/hydra/issues/1312) +* Fix docker-compose wrong restart values ([#1313](https://github.com/ory/hydra/issues/1313)) ([4d004bf](https://github.com/ory/hydra/commit/4d004bf67e2ec5c8fe533adea4f3bbe797060879)): + + Replace unless-stopped values as they are not supported in used compose version (v2), for values supported in this compose version: on-failure. Closes #1312. + + + * Fix no-open inverted flag check ([#1306](https://github.com/ory/hydra/issues/1306)) ([1aad679](https://github.com/ory/hydra/commit/1aad67920c63669ae9e8e23c4d505477a72f19e7)) * Fix swagger documentation for oauth2/token ([#1284](https://github.com/ory/hydra/issues/1284)) ([3db25f6](https://github.com/ory/hydra/commit/3db25f6a69bfe09d929556a447a27b12348159e6)), closes [#1274](https://github.com/ory/hydra/issues/1274) * Login revokation is exposed at public not admin ([#1333](https://github.com/ory/hydra/issues/1333)) ([7c4b6d4](https://github.com/ory/hydra/commit/7c4b6d4a61191fcfe947acca8b4dbf942fec3b15)), closes [#1329](https://github.com/ory/hydra/issues/1329) @@ -5222,7 +6797,12 @@ ci: Use yaml in configuration docs runner * Return proper refresh token expiration time ([#1300](https://github.com/ory/hydra/issues/1300)) ([a18c44e](https://github.com/ory/hydra/commit/a18c44ef3b77f0beec7590ba6f9b1e32387c5b3e)), closes [#1296](https://github.com/ory/hydra/issues/1296) * Support multi proxies between TLS termination proxy and hydra ([#1283](https://github.com/ory/hydra/issues/1283)) ([769491d](https://github.com/ory/hydra/commit/769491deecde28c75de16069218d15627f034e8e)), closes [#1282](https://github.com/ory/hydra/issues/1282) * Support transactions in SQL store ([#1277](https://github.com/ory/hydra/issues/1277)) ([65415ff](https://github.com/ory/hydra/commit/65415ff731658b822ccd9628d4d497fb6f7634db)), closes [#1247](https://github.com/ory/hydra/issues/1247) [#1247](https://github.com/ory/hydra/issues/1247) [#1247](https://github.com/ory/hydra/issues/1247) [#1247](https://github.com/ory/hydra/issues/1247) [#1247](https://github.com/ory/hydra/issues/1247) [#1247](https://github.com/ory/hydra/issues/1247) -* Update docker-compose to v3 ([d5993cb](https://github.com/ory/hydra/commit/d5993cbe29ef674ca621d847d8b75ef1452e2679)), closes [#1321](https://github.com/ory/hydra/issues/1321) +* Update docker-compose to v3 ([d5993cb](https://github.com/ory/hydra/commit/d5993cbe29ef674ca621d847d8b75ef1452e2679)): + + Update docker-compose to v3. Closes #1321. + + + # [1.0.0-rc.6+oryOS.10](https://github.com/ory/hydra/compare/v1.0.0-rc.5+oryOS.10...v1.0.0-rc.6+oryOS.10) (2018-12-18) @@ -9318,7 +10898,12 @@ all: resolve and test for issues in rethinkdb coldstart - closes #207 * Fixed nil pointer issue ([f09bc08](https://github.com/ory/hydra/commit/f09bc08d152a98eba5a31b9266c3b65237b17520)) * Fixed null pointer in cli call to oauthHandler ([e7a827f](https://github.com/ory/hydra/commit/e7a827f3b7adf4a91c46afa8e918da34580e2b43)) * Fixed permission typo and tests ([5a4ec4a](https://github.com/ory/hydra/commit/5a4ec4ab6968df4ca53a415a7ec1ea1f7974ae89)) -* Fixed smaller bugs and typos in RethinkDB and PostgreSQL. ([aebd9d6](https://github.com/ory/hydra/commit/aebd9d61c939e3e56cf8363c5f969d7f3bb68694)), closes [#53](https://github.com/ory/hydra/issues/53) +* Fixed smaller bugs and typos in RethinkDB and PostgreSQL. ([aebd9d6](https://github.com/ory/hydra/commit/aebd9d61c939e3e56cf8363c5f969d7f3bb68694)): + + Storage/RethinkDB: Refactored smaller parts in conformance to discussions in #53. + + + * Fixed tests on linux hosts ([82c7431](https://github.com/ory/hydra/commit/82c74319d223e4e9c1143f5cd015001058be5501)) * Fixed typos, improved instructions ([546a109](https://github.com/ory/hydra/commit/546a1094d1acbc3ba4b61afaacbc8f699944cdd6)) * Fosite note ([f09cf2d](https://github.com/ory/hydra/commit/f09cf2dedfc5ef888efee40f1da47fa4668ccf51)) diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 4861c9d1844..9cebaf358e3 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -39,6 +39,16 @@ Examples of unacceptable behavior include: - Other conduct which could reasonably be considered inappropriate in a professional setting +## Open Source Community Support + +Ory Open source software is collaborative and based on contributions by +developers in the Ory community. There is no obligation from Ory to help with +individual problems. If Ory open source software is used in production in a +for-profit company or enterprise environment, we mandate a paid support contract +where Ory is obligated under their service level agreements (SLAs) to offer a +defined level of availability and responsibility. For more information about +paid support please contact us at sales@ory.sh. + ## Enforcement Responsibilities Community leaders are responsible for clarifying and enforcing our standards of diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 0911efc6a8d..13e063a394f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -10,7 +10,7 @@ - [FAQ](#faq) - [How can I contribute?](#how-can-i-contribute) - [Communication](#communication) -- [Contribute examples](#contribute-examples) +- [Contribute examples or community projects](#contribute-examples-or-community-projects) - [Contribute code](#contribute-code) - [Contribute documentation](#contribute-documentation) - [Disclosing vulnerabilities](#disclosing-vulnerabilities) @@ -64,7 +64,7 @@ won't clash with Ory Hydra's direction. A great way to do this is via [a Contributors License Agreement?](https://cla-assistant.io/ory/hydra) - I would like updates about new versions of Ory Hydra. - [How are new releases announced?](https://ory.us10.list-manage.com/subscribe?u=ffb1a878e4ec6c0ed312a3480&id=f605a41b53) + [How are new releases announced?](https://www.ory.sh/l/sign-up-newsletter) ## How can I contribute? @@ -123,32 +123,16 @@ the projects that you are interested in. Also, [follow us on Twitter](https://twitter.com/orycorp). -## Contribute examples +## Contribute examples or community projects -One of the most impactful ways to contribute is by adding examples. You can find -an overview of examples using Ory services on the -[documentation examples page](https://www.ory.sh/docs/examples). Source code for -examples can be found in most cases in the -[ory/examples](https://github.com/ory/examples) repository. +One of the most impactful ways to contribute is by adding code examples or other +Ory-related code. You can find an overview of community code in the +[awesome-ory](https://github.com/ory/awesome-ory) repository. _If you would like to contribute a new example, we would love to hear from you!_ -Please [open an issue](https://github.com/ory/examples/issues/new/choose) to -describe your example before you start working on it. We would love to provide -guidance to make for a pleasant contribution experience. Go through this -checklist to contribute an example: - -1. Create a GitHub issue proposing a new example and make sure it's different - from an existing one. -1. Fork the repo and create a feature branch off of `master` so that changes do - not get mixed up. -1. Add a descriptive prefix to commits. This ensures a uniform commit history - and helps structure the changelog. Please refer to this - [list of prefixes for Hydra](https://github.com/ory/hydra/blob/master/.github/semantic.yml) - for an overview. -1. Create a `README.md` that explains how to use the example. (Use - [the README template](https://github.com/ory/examples/blob/master/_common/README)). -1. Open a pull request and maintainers will review and merge your example. +Please [open a pull request at awesome-ory](https://github.com/ory/awesome-ory/) +to add your example or Ory-related project to the awesome-ory README. ## Contribute code @@ -172,8 +156,10 @@ request, go through this checklist: 1. Run `make format` 1. Add a descriptive prefix to commits. This ensures a uniform commit history and helps structure the changelog. Please refer to this - [list of prefixes for Hydra](https://github.com/ory/hydra/blob/master/.github/semantic.yml) - for an overview. + [Convential Commits configuration](https://github.com/ory/hydra/blob/master/.github/workflows/conventional_commits.yml) + for the list of accepted prefixes. You can read more about the Conventional + Commit specification + [at their site](https://www.conventionalcommits.org/en/v1.0.0/). If a pull request is not ready to be reviewed yet [it should be marked as a "Draft"](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request). diff --git a/DEVELOP.md b/DEVELOP.md new file mode 100644 index 00000000000..0b1feb88d12 --- /dev/null +++ b/DEVELOP.md @@ -0,0 +1,221 @@ +# Development + +This document explains how to develop Ory Hydra, run tests, and work with the +tooling around it. + +## Upgrading and changelog + +New releases might introduce breaking changes. To help you identify and +incorporate those changes, we document these changes in +[CHANGELOG.md](./CHANGELOG.md). + +## Command line documentation + +To see available commands and flags, run: + +```shell +hydra -h +# or +hydra help +``` + +## Contribution guidelines + +We encourage all contributions. Before opening a pull request, read the +[contribution guidelines](./CONTRIBUTING.md). + +## Prerequisites + +You need Go 1.13+ with `GO111MODULE=on` and, for the test suites: + +- Docker and Docker Compose +- Makefile +- Node.js and npm + +It is possible to develop Ory Hydra on Windows, but please be aware that all +guides assume a Unix shell like bash or zsh. + +## Formatting code + +Format all code using: + +```shell +make format +``` + +The continuous integration pipeline checks code formatting. + +## Running tests + +There are three types of tests: + +- Short tests that do not require a SQL database +- Regular tests that require PostgreSQL, MySQL, and CockroachDB +- End to end tests that use real databases and a test browser + +### Short tests + +Short tests run fairly quickly and use SQLite in-memory. + +All tests run against a sqlite in-memory database, thus it is required to use +the `-tags sqlite` build tag. + +Run all short tests: + +```shell +go test -v -failfast -short -tags sqlite ./... +``` + +Run short tests in a specific module: + +```shell +go test -v -failfast -short -tags sqlite ./client +``` + +Run a specific test: + +```shell +go test -v -failfast -short -tags sqlite -run ^TestName$ ./... +``` + +### Regular tests + +Regular tests require a database setup. + +The test suite can use [ory/dockertest](https://github.com/ory/dockertest) to +work with docker directly, but we encourage using the Makefile instead. Using +dockertest can bloat the number of Docker Images on your system and are quite +slow. + +Run the full test suite: + +```shell +make test +``` + +> Note: `make test` recreates the databases every time. This can be annoying if +> you are trying to fix something very specific and need the database tests all +> the time. + +If you want to reuse databases across test runs, initialize them once: + +```shell +make test-resetdb +export TEST_DATABASE_MYSQL='mysql://root:secret@(127.0.0.1:3444)/mysql?parseTime=true&multiStatements=true' +export TEST_DATABASE_POSTGRESQL='postgres://postgres:secret@127.0.0.1:3445/postgres?sslmode=disable' +export TEST_DATABASE_COCKROACHDB='cockroach://root@127.0.0.1:3446/defaultdb?sslmode=disable' +``` + +Then you can run Go tests directly as often as needed: + +```shell +go test -p 1 ./... + +# or in a module: +cd client +go test . +``` + +### End-to-end tests + +The E2E tests use [Cypress](https://www.cypress.io) to run full browser tests. + +Run e2e tests: + +``` +make e2e +``` + +The runner will not show the Browser window, as it runs in the CI Mode +(background). That makes debugging these type of tests very difficult, but +thankfully you can run the e2e test in the browser which helps with debugging: + +```shell +./test/e2e/circle-ci.bash memory --watch + +# Or for the JSON Web Token Access Token strategy: +# ./test/e2e/circle-ci.bash memory-jwt --watch +``` + +Or if you would like to test one of the databases: + +```shell +make test-resetdb +export TEST_DATABASE_MYSQL='mysql://root:secret@(127.0.0.1:3444)/mysql?parseTime=true&multiStatements=true' +export TEST_DATABASE_POSTGRESQL='postgres://postgres:secret@127.0.0.1:3445/postgres?sslmode=disable' +export TEST_DATABASE_COCKROACHDB='cockroach://root@127.0.0.1:3446/defaultdb?sslmode=disable' + +# You can test against each individual database: +./test/e2e/circle-ci.bash postgres --watch +./test/e2e/circle-ci.bash memory --watch +./test/e2e/circle-ci.bash mysql --watch +# ... +``` + +Once you run the script, a Cypress window will appear. Hit the button "Run all +Specs"! + +The code for these tests is located in +[./cypress/integration](./cypress/integration) and +[./cypress/support](./cypress/support) and +[./cypress/helpers](./cypress/helpers). The website you're seeing is located in +[./test/e2e/oauth2-client](./test/e2e/oauth2-client). + +#### OpenID Connect conformity tests + +To run Ory Hydra against the OpenID Connect conformity suite, run: + +```shell +./test/conformity/start.sh --build +``` + +and then in a separate shell: + +```shell +./test/conformity/test.sh +``` + +Running these tests will take a significant amount of time which is why they are +not part of the CI pipeline. + +## Build Docker image + +To build a development Docker Image: + +```shell +make docker +``` + +> [!WARNING] If you already have a production image (e.g. `oryd/hydra:v2.2.0`) +> pulled, the above `make docker` command will replace it with a local build of +> the image that is more equivalent to the `-distroless` variant on Docker Hub. +> +> You can pull the production image any time using `docker pull` + +## Run the Docker Compose quickstarts + +If you wish to check your code changes against any of the docker-compose +quickstart files, run: + +```shell +docker compose -f quickstart.yml up --build +``` + +## Add a new migration + +1. `mkdir persistence/sql/src/YYYYMMDD000001_migration_name/` +2. Put the migration files into this directory, following the standard naming + conventions. If you wish to execute different parts of a migration in + separate transactions, add split marks (lines with the text `--split`) where + desired. Why this might be necessary is explained in + https://github.com/gobuffalo/fizz/issues/104. +3. Run `make persistence/sql/migrations/` to generate migration + fragments. +4. If an update causes the migration to have fewer fragments than the number + already generated, run + `make persistence/sql/migrations/-clean`. This is equivalent to + a `rm` command with the right parameters, but comes with better tab + completion. +5. Before committing generated migration fragments, run the above clean command + and generate a fresh copy of migration fragments to make sure the `sql/src` + and `sql/migrations` directories are consistent. diff --git a/Makefile b/Makefile index 88aa8faf17a..250430fe0bb 100644 --- a/Makefile +++ b/Makefile @@ -1,25 +1,13 @@ SHELL=/bin/bash -o pipefail -export GO111MODULE := on -export PATH := .bin:${PATH} -export PWD := $(shell pwd) +export PATH := .bin:${PATH} +export PWD := $(shell pwd) +export IMAGE_TAG := $(if $(IMAGE_TAG),$(IMAGE_TAG),latest) -GOLANGCI_LINT_VERSION = 1.46.2 +GOLANGCI_LINT_VERSION = 2.4.0 -GO_DEPENDENCIES = github.com/ory/go-acc \ - github.com/golang/mock/mockgen \ - github.com/go-swagger/go-swagger/cmd/swagger \ - github.com/go-bindata/go-bindata/go-bindata - -define make-go-dependency - # go install is responsible for not re-building when the code hasn't changed - .bin/$(notdir $1): go.sum go.mod - GOBIN=$(PWD)/.bin/ go install $1 -endef - -.bin/golangci-lint-$(GOLANGCI_LINT_VERSION): +.bin/golangci-lint: Makefile curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b .bin v$(GOLANGCI_LINT_VERSION) - mv .bin/golangci-lint .bin/golangci-lint-$(GOLANGCI_LINT_VERSION) $(foreach dep, $(GO_DEPENDENCIES), $(eval $(call make-go-dependency, $(dep)))) @@ -27,56 +15,47 @@ node_modules: package-lock.json npm ci touch node_modules -.PHONY: .bin/yq -.bin/yq: - go build -o .bin/yq github.com/mikefarah/yq/v4 - .bin/clidoc: go.mod go build -o .bin/clidoc ./cmd/clidoc/. docs/cli: .bin/clidoc clidoc . -.bin/goimports: go.sum Makefile - GOBIN=$(shell pwd)/.bin go install golang.org/x/tools/cmd/goimports@latest - .bin/licenses: Makefile - curl https://raw.githubusercontent.com/ory/ci/master/licenses/install | sh + curl --retry 7 --retry-connrefused https://raw.githubusercontent.com/ory/ci/master/licenses/install | sh .bin/ory: Makefile - curl https://raw.githubusercontent.com/ory/meta/master/install.sh | bash -s -- -b .bin ory v0.1.48 + curl --retry 7 --retry-connrefused https://raw.githubusercontent.com/ory/meta/master/install.sh | bash -s -- -b .bin ory v0.2.2 touch .bin/ory .PHONY: lint -lint: .bin/golangci-lint-$(GOLANGCI_LINT_VERSION) - .bin/golangci-lint-$(GOLANGCI_LINT_VERSION) run -v ./... +lint: .bin/golangci-lint + golangci-lint run -v ./... # Runs full test suite including tests where databases are enabled .PHONY: test -test: .bin/go-acc +test: make test-resetdb - source scripts/test-env.sh && go-acc ./... -- -failfast -timeout=20m -tags sqlite,json1 + source scripts/test-env.sh && go test -failfast -timeout=20m -tags sqlite,sqlite_omit_load_extension ./... docker rm -f hydra_test_database_mysql docker rm -f hydra_test_database_postgres docker rm -f hydra_test_database_cockroach # Resets the test databases .PHONY: test-resetdb -test-resetdb: node_modules - docker kill hydra_test_database_mysql || true - docker kill hydra_test_database_postgres || true - docker kill hydra_test_database_cockroach || true - docker rm -f hydra_test_database_mysql || true - docker rm -f hydra_test_database_postgres || true - docker rm -f hydra_test_database_cockroach || true - docker run --rm --name hydra_test_database_mysql --platform linux/amd64 -p 3444:3306 -e MYSQL_ROOT_PASSWORD=secret -d mysql:8.0.26 - docker run --rm --name hydra_test_database_postgres --platform linux/amd64 -p 3445:5432 -e POSTGRES_PASSWORD=secret -e POSTGRES_DB=postgres -d postgres:11.8 - docker run --rm --name hydra_test_database_cockroach --platform linux/amd64 -p 3446:26257 -d cockroachdb/cockroach:v22.1.10 start-single-node --insecure +test-resetdb: + docker rm --force --volumes hydra_test_database_mysql || true + docker rm --force --volumes hydra_test_database_postgres || true + docker rm --force --volumes hydra_test_database_cockroach || true + docker run --rm --name hydra_test_database_mysql -p 3444:3306 -e MYSQL_ROOT_PASSWORD=secret -d mysql:8.0 + docker run --rm --name hydra_test_database_postgres -p 3445:5432 -e POSTGRES_PASSWORD=secret -e POSTGRES_DB=postgres -d postgres:16 + docker run --rm --name hydra_test_database_cockroach -p 3446:26257 -d cockroachdb/cockroach:latest-v25.4 start-single-node --insecure # Build local docker images .PHONY: docker docker: - docker build -f .docker/Dockerfile-build -t oryd/hydra:latest-sqlite . + DOCKER_CONTENT_TRUST=1 docker build --progress=plain -f .docker/Dockerfile-local-build -t oryd/hydra:${IMAGE_TAG} . + echo "Local development image has been built." .PHONY: e2e e2e: node_modules test-resetdb @@ -89,45 +68,51 @@ e2e: node_modules test-resetdb # Runs tests in short mode, without database adapters .PHONY: quicktest quicktest: - go test -failfast -short -tags sqlite,json1 ./... + go test -failfast -short -tags sqlite,sqlite_omit_load_extension ./... .PHONY: quicktest-hsm quicktest-hsm: - docker build --progress=plain -f .docker/Dockerfile-hsm --target test-hsm . + DOCKER_CONTENT_TRUST=1 docker build --progress=plain -f .docker/Dockerfile-test-hsm --target test-hsm -t oryd/hydra:${IMAGE_TAG} --target test-hsm . + +.PHONY: test-refresh +test-refresh: + UPDATE_SNAPSHOTS=true go test -short -tags sqlite,sqlite_omit_load_extension ./... + DOCKER_CONTENT_TRUST=1 docker build --progress=plain -f .docker/Dockerfile-test-hsm --target test-refresh-hsm -t oryd/hydra:${IMAGE_TAG} --target test-refresh-hsm . authors: # updates the AUTHORS file - curl https://raw.githubusercontent.com/ory/ci/master/authors/authors.sh | env PRODUCT="Ory Hydra" bash + curl --retry 7 --retry-connrefused https://raw.githubusercontent.com/ory/ci/master/authors/authors.sh | env PRODUCT="Ory Hydra" bash # Formats the code .PHONY: format -format: .bin/goimports .bin/ory node_modules - .bin/ory dev headers copyright --type=open-source --exclude=internal/httpclient - .bin/goimports -w --local github.com/ory . +format: .bin/ory node_modules + ory dev headers copyright --type=open-source --exclude=internal/httpclient --exclude=oryx + go tool goimports -w --local github.com/ory . npm exec -- prettier --write . # Generates mocks .PHONY: mocks -mocks: .bin/mockgen - mockgen -package oauth2_test -destination oauth2/oauth2_provider_mock_test.go github.com/ory/fosite OAuth2Provider - mockgen -package jwk_test -destination jwk/registry_mock_test.go -source=jwk/registry.go +mocks: + go tool mockgen -package oauth2_test -destination oauth2/oauth2_provider_mock_test.go github.com/ory/fosite OAuth2Provider + go tool mockgen -package jwk_test -destination jwk/registry_mock_test.go -source=jwk/registry.go go generate ./... # Generates the SDKs .PHONY: sdk -sdk: .bin/swagger .bin/ory node_modules - swagger generate spec -m -o spec/swagger.json \ - -c github.com/ory/hydra/client \ - -c github.com/ory/hydra/consent \ - -c github.com/ory/hydra/health \ - -c github.com/ory/hydra/jwk \ - -c github.com/ory/hydra/oauth2 \ - -c github.com/ory/hydra/x \ +sdk: .bin/ory node_modules + go tool swagger generate spec -m -o spec/swagger.json \ + -c github.com/ory/hydra/v2/client \ + -c github.com/ory/hydra/v2/consent \ + -c github.com/ory/hydra/v2/flow \ + -c github.com/ory/hydra/v2/health \ + -c github.com/ory/hydra/v2/jwk \ + -c github.com/ory/hydra/v2/oauth2 \ + -c github.com/ory/hydra/v2/x \ -c github.com/ory/x/healthx \ -c github.com/ory/x/openapix \ -c github.com/ory/x/pagination \ -c github.com/ory/herodot ory dev swagger sanitize ./spec/swagger.json - swagger validate ./spec/swagger.json + go tool swagger validate ./spec/swagger.json CIRCLE_PROJECT_USERNAME=ory CIRCLE_PROJECT_REPONAME=hydra \ ory dev openapi migrate \ --health-path-tags metadata \ @@ -141,12 +126,20 @@ sdk: .bin/swagger .bin/ory node_modules spec/swagger.json spec/api.json rm -rf "internal/httpclient" - npm run openapi-generator-cli -- generate -i "spec/api.json" \ + npx openapi-generator-cli generate -i "spec/api.json" \ -g go \ -o "internal/httpclient" \ --git-user-id ory \ - --git-repo-id hydra-client-go \ - --git-host github.com + --git-repo-id hydra-client-go/v2 \ + --git-host github.com \ + --api-name-suffix "API" \ + --global-property apiTests=false + + (cd internal/httpclient;\ + go get golang.org/x/net@latest;\ + go get google.golang.org/protobuf@latest;\ + go get golang.org/x/oauth2@latest;\ + go mod tidy) make format @@ -175,24 +168,25 @@ $(MIGRATIONS_DST_DIR:%/=%-clean): $(MIGRATION_CLEAN_TARGETS) install-stable: HYDRA_LATEST=$$(git describe --abbrev=0 --tags) git checkout $$HYDRA_LATEST - GO111MODULE=on go install \ - -tags sqlite,json1 \ - -ldflags "-X github.com/ory/hydra/driver/config.Version=$$HYDRA_LATEST -X github.com/ory/hydra/driver/config.Date=`TZ=UTC date -u '+%Y-%m-%dT%H:%M:%SZ'` -X github.com/ory/hydra/driver/config.Commit=`git rev-parse HEAD`" \ + go install \ + -tags sqlite,sqlite_omit_load_extension \ + -ldflags "-X github.com/ory/hydra/v2/driver/config.Version=$$HYDRA_LATEST -X github.com/ory/hydra/v2/driver/config.Date=`TZ=UTC date -u '+%Y-%m-%dT%H:%M:%SZ'` -X github.com/ory/hydra/v2/driver/config.Commit=`git rev-parse HEAD`" \ . git checkout master .PHONY: install install: - GO111MODULE=on go install -tags sqlite,json1 . + go install -tags sqlite,sqlite_omit_load_extension . -.PHONY: post-release -post-release: .bin/yq - yq e '.services.hydra.image = "oryd/hydra:'$$DOCKER_TAG'"' -i quickstart.yml - yq e '.services.hydra-migrate.image = "oryd/hydra:'$$DOCKER_TAG'"' -i quickstart.yml - yq e '.services.consent.image = "oryd/hydra-login-consent-node:'$$DOCKER_TAG'"' -i quickstart.yml +.PHONY: pre-release +pre-release: + go tool yq e '.services.hydra.image = "oryd/hydra:'$$DOCKER_TAG'"' -i quickstart.yml + go tool yq e '.services.hydra-migrate.image = "oryd/hydra:'$$DOCKER_TAG'"' -i quickstart.yml + go tool yq e '.services.consent.image = "oryd/hydra-login-consent-node:'$$DOCKER_TAG'"' -i quickstart.yml -generate: .bin/mockgen - go generate ./... +.PHONY: post-release +post-release: + echo "nothing to do" licenses: .bin/licenses node_modules # checks open-source licenses .bin/licenses diff --git a/README.md b/README.md index a8efa2d6953..a194642badc 100644 --- a/README.md +++ b/README.md @@ -1,558 +1,554 @@ -

Ory Hydra - Open Source OAuth 2 and OpenID Connect server

+

+ Ory Hydra - Open Source OAuth 2 and OpenID Connect server +

- Chat | - Discussions | - Newsletter

- Guide | - API Docs | - Code Docs

- Support this project!

- Work in Open Source, Ory is hiring! + Chat · + Discussions · + Newsletter · + Docs · + Try Ory Network · + Jobs

+Ory Hydra is a hardened, OpenID Certified OAuth 2.0 Server and OpenID Connect +Provider optimized for low-latency, high throughput, and low resource +consumption. It connects to your existing identity provider through a login and +consent app, giving you absolute control over the user interface and experience. + --- -

- CI Tasks for Ory Hydra - - Go Report Card - PkgGoDev - CII Best Practices - - + + + +- [What is Ory Hydra?](#what-is-ory-hydra) + - [Why Ory Hydra](#why-ory-hydra) + - [OAuth2 and OpenID Connect: Open Standards](#oauth2-and-openid-connect-open-standards) + - [OpenID Connect Certified](#openid-connect-certified) +- [Deployment options](#deployment-options) + - [Use Ory Hydra on the Ory Network](#use-ory-hydra-on-the-ory-network) + - [Self-host Ory Hydra](#self-host-ory-hydra) +- [Quickstart](#quickstart) +- [Who is using Ory Hydra](#who-is-using-ory-hydra) +- [Ecosystem](#ecosystem) + - [Ory Kratos: Identity and User Infrastructure and Management](#ory-kratos-identity-and-user-infrastructure-and-management) + - [Ory Hydra: OAuth2 & OpenID Connect Server](#ory-hydra-oauth2--openid-connect-server) + - [Ory Oathkeeper: Identity & Access Proxy](#ory-oathkeeper-identity--access-proxy) + - [Ory Keto: Access Control Policies as a Server](#ory-keto-access-control-policies-as-a-server) +- [Documentation](#documentation) +- [Developing Ory Hydra](#developing-ory-hydra) +- [Security](#security) + - [Disclosing vulnerabilities](#disclosing-vulnerabilities) +- [Telemetry](#telemetry) +- [Libraries and third-party projects](#libraries-and-third-party-projects) + + + +## What is Ory Hydra? + +Ory Hydra is a server implementation of the OAuth 2.0 authorization framework +and the OpenID Connect Core 1.0. It follows +[cloud architecture best practices](https://www.ory.sh/docs/ecosystem/software-architecture-philosophy) +and focuses on: + +- OAuth 2.0 and OpenID Connect flows +- Token issuance and validation +- Client management +- Consent and login flow orchestration +- JWKS management +- Low latency and high throughput + +We recommend starting with the +[Ory Hydra introduction docs](https://www.ory.sh/docs/hydra) to learn more about +its architecture, feature set, and how it compares to other systems. + +### Why Ory Hydra + +Ory Hydra is designed to: + +- Be a standalone OAuth 2.0 and OpenID Connect server without user management +- Connect to any existing identity provider through a login and consent app +- Give you absolute control over the user interface and experience flows +- Work with any authentication endpoint: + [Ory Kratos](https://github.com/ory/kratos), + [authboss](https://github.com/go-authboss/authboss), + [User Frosting](https://www.userfrosting.com/), or your proprietary system +- Scale to large numbers of clients and tokens +- Fit into modern cloud native environments such as Kubernetes and managed + platforms + +### OAuth2 and OpenID Connect: Open Standards + +Ory Hydra implements Open Standards set by the IETF: + +- [The OAuth 2.0 Authorization Framework](https://tools.ietf.org/html/rfc6749) +- [OAuth 2.0 Threat Model and Security Considerations](https://tools.ietf.org/html/rfc6819) +- [OAuth 2.0 Token Revocation](https://tools.ietf.org/html/rfc7009) +- [OAuth 2.0 Token Introspection](https://tools.ietf.org/html/rfc7662) +- [OAuth 2.0 for Native Apps](https://tools.ietf.org/html/draft-ietf-oauth-native-apps-10) +- [OAuth 2.0 Dynamic Client Registration Protocol](https://datatracker.ietf.org/doc/html/rfc7591) +- [OAuth 2.0 Dynamic Client Registration Management Protocol](https://datatracker.ietf.org/doc/html/rfc7592) +- [Proof Key for Code Exchange by OAuth Public Clients](https://tools.ietf.org/html/rfc7636) +- [JSON Web Token (JWT) Profile for OAuth 2.0 Client Authentication and Authorization Grants](https://tools.ietf.org/html/rfc7523) + +and the OpenID Foundation: + +- [OpenID Connect Core 1.0](http://openid.net/specs/openid-connect-core-1_0.html) +- [OpenID Connect Discovery 1.0](https://openid.net/specs/openid-connect-discovery-1_0.html) +- [OpenID Connect Dynamic Client Registration 1.0](https://openid.net/specs/openid-connect-registration-1_0.html) +- [OpenID Connect Front-Channel Logout 1.0](https://openid.net/specs/openid-connect-frontchannel-1_0.html) +- [OpenID Connect Back-Channel Logout 1.0](https://openid.net/specs/openid-connect-backchannel-1_0.html) + +### OpenID Connect Certified + +Ory Hydra is an OpenID Foundation +[certified OpenID Provider (OP)](http://openid.net/certification/#OPs). + +

+ Ory Hydra is a certified OpenID Providier

-Ory Hydra is a hardened, **OpenID Certified OAuth 2.0 Server and OpenID Connect -Provider** optimized for low-latency, high throughput, and low resource -consumption. Ory Hydra _is not_ an identity provider (user sign up, user login, -password reset flow), but connects to your existing identity provider through a -[login and consent app](https://www.ory.sh/docs/hydra/oauth2#authenticating-users-and-requesting-consent). -Implementing the login and consent app in a different language is easy, and -exemplary consent apps ([Node](https://github.com/ory/hydra-login-consent-node)) -and [SDKs](https://www.ory.sh/docs/kratos/sdk/index) for common languages are -provided. +The following OpenID profiles are certified: + +- [Basic OpenID Provider](http://openid.net/specs/openid-connect-core-1_0.html#CodeFlowAuth) + (response types `code`) +- [Implicit OpenID Provider](http://openid.net/specs/openid-connect-core-1_0.html#ImplicitFlowAuth) + (response types `id_token`, `id_token+token`) +- [Hybrid OpenID Provider](http://openid.net/specs/openid-connect-core-1_0.html#HybridFlowAuth) + (response types `code+id_token`, `code+id_token+token`, `code+token`) +- [OpenID Provider Publishing Configuration Information](https://openid.net/specs/openid-connect-discovery-1_0.html) +- [Dynamic OpenID Provider](https://openid.net/specs/openid-connect-registration-1_0.html) + +To obtain certification, we deployed the +[reference user login and consent app](https://github.com/ory/hydra-login-consent-node) +(unmodified) and Ory Hydra v1.0.0. -Ory Hydra can use [Ory Kratos](https://github.com/ory/kratos) as its identity -server. +## Deployment options + +You can run Ory Hydra in two main ways: + +- As a managed service on the Ory Network +- As a self hosted service under your own control, with or without the Ory + Enterprise License + +### Use Ory Hydra on the Ory Network + +The [Ory Network](https://www.ory.sh/cloud) is the fastest way to use Ory +services in production. **Ory OAuth2 & OpenID Connect** is powered by the open +source Ory Hydra server and is API compatible. + +The Ory Network provides: + +- OAuth2 and OpenID Connect for single sign on, API access, and machine to + machine authorization +- Identity and credential management that scales to billions of users and + devices +- Registration, login, and account management flows for passkeys, biometrics, + social login, SSO, and multi factor authentication +- Prebuilt login, registration, and account management pages and components +- Low latency permission checks based on the Zanzibar model with the Ory + Permission Language +- GDPR friendly storage with data locality and compliance in mind +- Web based Ory Console and Ory CLI for administration and operations +- Cloud native APIs compatible with the open source servers +- Fair, usage based [pricing](https://www.ory.sh/pricing) + +Sign up for a +[free developer account](https://console.ory.sh/registration?utm_source=github&utm_medium=banner&utm_campaign=hydra-readme) +to get started. + +### Self-host Ory Hydra + +You can run Ory Hydra yourself for full control over infrastructure, deployment, +and customization. + +The [install guide](https://www.ory.sh/docs/hydra/install) explains how to: + +- Install Hydra on Linux, macOS, Windows, and Docker +- Configure databases such as PostgreSQL, MySQL, and CockroachDB +- Deploy to Kubernetes and other orchestration systems +- Build Hydra from source + +This guide uses the open source distribution to get you started without license +requirements. It is a great fit for individuals, researchers, hackers, and +companies that want to experiment, prototype, or run unimportant workloads +without SLAs. You get the full core engine, and you are free to inspect, extend, +and build it from source. + +If you run Hydra as part of a business-critical system, for example OAuth2 and +OpenID Connect for all your users, you should use a commercial agreement to +reduce operational and security risk. The **Ory Enterprise License (OEL)** +layers on top of self-hosted Hydra and provides: + +- Additional enterprise features that are not available in the open source + version +- Regular security releases, including CVE patches, with service level + agreements +- Support for advanced scaling, multi-tenancy, and complex deployments +- Premium support options with SLAs, direct access to engineers, and onboarding + help +- Access to a private Docker registry with frequent and vetted, up-to-date + enterprise builds + +For guaranteed CVE fixes, current enterprise builds, advanced features, and +support in production, you need a valid +[Ory Enterprise License](https://www.ory.com/ory-enterprise-license) and access +to the Ory Enterprise Docker registry. To learn more, +[contact the Ory team](https://www.ory.sh/contact/). -## Get Started +## Quickstart -You can use -[Docker to run Ory Hydra locally](https://www.ory.sh/docs/hydra/5min-tutorial) -or use the Ory CLI to try out Ory Hydra: +Install the [Ory CLI](https://www.ory.sh/docs/guides/cli/installation) and +create a new project to try Ory OAuth2 & OpenID Connect. -```shell -# This example works best in Bash +```bash +# Install the Ory CLI if you do not have it yet: bash <(curl https://raw.githubusercontent.com/ory/meta/master/install.sh) -b . ory sudo mv ./ory /usr/local/bin/ -# Or with Homebrew installed -brew install ory/tap/cli -``` - -create a new project (you may also use -[Docker](https://www.ory.sh/docs/hydra/5min-tutorial)) +# Sign in or sign up +ory auth -``` -ory create project --name "Ory Hydra 2.0 Example" -project_id="{set to the id from output}" +# Create a new project +ory create project --create-workspace "Ory Open Source" --name "GitHub Quickstart" --use-project ``` -and follow the quick & easy steps below. +Try out the OAuth 2.0 Client Credentials flow: -### OAuth 2.0 Client Credentials / Machine-to-Machine - -Create an OAuth 2.0 Client, and run the OAuth 2.0 Client Credentials flow: - -```shell -ory create oauth2-client --project $project_id \ +```bash +ory create oauth2-client \ --name "Client Credentials Demo" \ --grant-type client_credentials -client_id="{set to client id from output}" -client_secret="{set to client secret from output}" +# Note the client ID and secret from output -ory perform client-credentials --client-id=$client_id --client-secret=$client_secret --project $project_id -access_token="{set to access token from output}" +ory perform client-credentials \ + --client-id \ + --client-secret +# Note the access token from output -ory introspect token $access_token --project $project_id +ory introspect token ``` -### OAuth 2.0 Authorize Code + OpenID Connect - -Try out the OAuth 2.0 Authorize Code grant right away! +Try out the OAuth 2.0 Authorize Code + OpenID Connect flow: -By accepting permissions `openid` and `offline_access` at the consent screen, -Ory refreshes and OpenID Connect ID token, - -```shell -ory create oauth2-client --project $project_id \ +```bash +ory create oauth2-client \ --name "Authorize Code with OpenID Connect Demo" \ --grant-type authorization_code,refresh_token \ --response-type code \ --redirect-uri http://127.0.0.1:4446/callback -code_client_id="{set to client id from output}" -code_client_secret="{set to client secret from output}" ory perform authorization-code \ - --project $project_id \ - --client-id $code_client_id \ - --client-secret $code_client_secret -code_access_token="{set to access token from output}" - -ory introspect token $code_access_token --project $project_id + --client-id \ + --client-secret ``` ---- - - - - -- [What is Ory Hydra?](#what-is-ory-hydra) - - [Who's using it?](#whos-using-it) - - [OAuth2 and OpenID Connect: Open Standards!](#oauth2-and-openid-connect-open-standards) - - [OpenID Connect Certified](#openid-connect-certified) -- [Quickstart](#quickstart) - - [Installation](#installation) -- [Ecosystem](#ecosystem) - - [Ory Kratos: Identity and User Infrastructure and Management](#ory-kratos-identity-and-user-infrastructure-and-management) - - [Ory Hydra: OAuth2 & OpenID Connect Server](#ory-hydra-oauth2--openid-connect-server) - - [Ory Oathkeeper: Identity & Access Proxy](#ory-oathkeeper-identity--access-proxy) - - [Ory Keto: Access Control Policies as a Server](#ory-keto-access-control-policies-as-a-server) -- [Security](#security) - - [Disclosing vulnerabilities](#disclosing-vulnerabilities) -- [Benchmarks](#benchmarks) -- [Telemetry](#telemetry) -- [Documentation](#documentation) - - [Guide](#guide) - - [HTTP API documentation](#http-api-documentation) - - [Upgrading and Changelog](#upgrading-and-changelog) - - [Command line documentation](#command-line-documentation) - - [Develop](#develop) - - [Dependencies](#dependencies) - - [Formatting Code](#formatting-code) - - [Running Tests](#running-tests) - - [Short Tests](#short-tests) - - [Regular Tests](#regular-tests) - - [E2E Tests](#e2e-tests) - - [OpenID Connect Conformity Tests](#openid-connect-conformity-tests) - - [Build Docker](#build-docker) - - [Run the Docker Compose quickstarts](#run-the-docker-compose-quickstarts) - - [Add a new migration](#add-a-new-migration) -- [Libraries and third-party projects](#libraries-and-third-party-projects) - - - -## What is Ory Hydra? - -Ory Hydra is a server implementation of the OAuth 2.0 authorization framework -and the OpenID Connect Core 1.0. Existing OAuth2 implementations usually ship as -libraries or SDKs such as -[node-oauth2-server](https://github.com/oauthjs/node-oauth2-server) or -[Ory Fosite](https://github.com/ory/fosite/issues), or as fully featured -identity solutions with user management and user interfaces, such as -[Keycloak](https://www.keycloak.org). - -Implementing and using OAuth2 without understanding the whole specification is -challenging and prone to errors, even when SDKs are being used. The primary goal -of Ory Hydra is to make OAuth 2.0 and OpenID Connect 1.0 better accessible. - -Ory Hydra implements the flows described in OAuth2 and OpenID Connect 1.0 -without forcing you to use a "Hydra User Management" or some template engine or -a predefined front-end. Instead, it relies on HTTP redirection and cryptographic -methods to verify user consent allowing you to use Ory Hydra with any -authentication endpoint, be it [Ory Kratos](https://github.com/ory/kratos), -[authboss](https://github.com/go-authboss/authboss), -[User Frosting](https://www.userfrosting.com/) or your proprietary Java -authentication. - -### Who's using it? +## Who is using Ory Hydra The Ory community stands on the shoulders of individuals, companies, and -maintainers. We thank everyone involved - from submitting bug reports and -feature requests, to contributing patches, to sponsoring our work. Our community -is 1000+ strong and growing rapidly. The Ory stack protects 16.000.000.000+ API -requests every month with over 250.000+ active service nodes. We would have -never been able to achieve this without each and everyone of you! +maintainers. The Ory team thanks everyone involved - from submitting bug reports +and feature requests, to contributing patches and documentation. The Ory +community counts more than 50.000 members and is growing. The Ory stack protects +7.000.000.000+ API requests every day across thousands of companies. None of +this would have been possible without each and everyone of you! The following list represents companies that have accompanied us along the way and that have made outstanding contributions to our ecosystem. _If you think that your company deserves a spot here, reach out to -office-muc@ory.sh now_! - -**Please consider giving back by becoming a sponsor of our open source work on -Patreon or -Open Collective.** +office@ory.sh now_! - + - - + - + + - - - - - - - - + - + + - - + - + + - - + - + + - - + - + + - - + - + + - - + - + + - - + - + + - - + - + - - + - + - - + - + - - + - + - - + - + - - + - + - - + - + - - + - + - - + - + - - + - + - - - - - - - - + - + - - + - + - - + - + - - + - + - - + - + - - + - + - - + - + - - + - + - - - - - - - - - - - - - - - - - - - - - - + - + - + + + + + + + + + + + + + + + + + + + + + + + +
Type Name Logo WebsiteCase Study
SponsorRaspberry PI FoundationOpenAI - - Raspberry PI Foundation + + OpenAI raspberrypi.orgopenai.comOpenAI Case Study
ContributorKyma Project - - - Kyma Project - - kyma-project.io
SponsorTulipFandom - - Tulip Retail + + Fandom tulip.comfandom.comFandom Case Study
SponsorCashdeck / All My FundsLumin - - All My Funds + + Lumin cashdeck.com.auluminpdf.comLumin Case Study
ContributorHootsuiteSencrop - - Hootsuite + + Sencrop hootsuite.comsencrop.comSencrop Case Study
Adopter *SegmentOSINT Industries - - Segment + + OSINT Industries segment.comosint.industriesOSINT Industries Case Study
Adopter *ArduinoHGV - - Arduino + + HGV arduino.cchgv.itHGV Case Study
Adopter *DataDetectMaxroll - - Datadetect + + Maxroll unifiedglobalarchiving.com/data-detect/maxroll.ggMaxroll Case Study
Adopter *Sainsbury'sZezam - - Sainsbury's + + Zezam sainsburys.co.ukzezam.ioZezam Case Study
Adopter *ContrasteT.RowePrice - - Contraste + + T.RowePrice contraste.comtroweprice.com
Adopter *ReyahMistral - - Reyah + + Mistral reyah.eumistral.ai
Adopter *ZeroAxel Springer - - Project Zero by Commit + + Axel Springer getzero.devaxelspringer.com
Adopter *PadisHemnet - - Padis + + Hemnet padis.iohemnet.se
Adopter *CloudbearCisco - - Cloudbear + + Cisco cloudbear.eucisco.com
Adopter *Security Onion SolutionsPresidencia de la República Dominicana - - Security Onion Solutions + + Presidencia de la República Dominicana securityonionsolutions.compresidencia.gob.do
Adopter *FactlyMoonpig - - Factly + + Moonpig factlylabs.commoonpig.com
Adopter *NortalBooster - - Nortal + + Booster nortal.comchoosebooster.com
SponsorOrderMyGearZaptec - - OrderMyGear + + Zaptec ordermygear.comzaptec.com
SponsorSpiri.boKlarna - - Spiri.bo + + Klarna spiri.boklarna.com
SponsorStrivacity - - - Spiri.bo - - strivacity.com
Adopter *HankoRaspberry PI Foundation - - Hanko + + Raspberry PI Foundation hanko.ioraspberrypi.org
Adopter *RabbitTulip - - Rabbit + + Tulip Retail rabbit.co.thtulip.com
Adopter *inMusicHootsuite - - InMusic + + Hootsuite inmusicbrands.comhootsuite.com
Adopter *BuhtaSegment - - Buhta + + Segment buhta.comsegment.com
Adopter *ConnctdArduino - - Connctd + + Arduino connctd.comarduino.cc
Adopter *ParalusSainsbury's - - Paralus + + Sainsbury's paralus.iosainsburys.co.uk
Adopter *TIER IVContraste - - TIER IV + + Contraste tier4.jpcontraste.com
Adopter *R2DevopsinMusic - - R2Devops + + InMusic r2devops.ioinmusicbrands.com
Adopter *LunaSec - - - LunaSec - - lunasec.io
Adopter *Serlo - - - Serlo - - serlo.org
Adopter *dyrector.io - - - dyrector.io - - dyrector.io
Adopter *StackspinBuhta - - stackspin.net + + Buhta stackspin.netbuhta.com
Adopter * Amplitude @@ -562,88 +558,39 @@ that your company deserves a spot here, reach out to amplitude.com
TIER IVKyma ProjectSerloPadis
CloudbearSecurity Onion SolutionsFactlyAll My Funds
NortalOrderMyGearR2DevopsParalus
dyrector.iopinniped.devpvotal.tech
-We also want to thank all individual contributors +Many thanks to all individual contributors -as well as all of our backers - - - -and past & current supporters (in alphabetical order) on -[Patreon](https://www.patreon.com/_ory): Alexander Alimovs, Billy, Chancy -Kennedy, Drozzy, Edwin Trejos, Howard Edidin, Ken Adler Oz Haven, Stefan Hans, -TheCrealm. - -\* Uses one of Ory's major projects in production. - -### OAuth2 and OpenID Connect: Open Standards! - -Ory Hydra implements Open Standards set by the IETF: - -- [The OAuth 2.0 Authorization Framework](https://tools.ietf.org/html/rfc6749) -- [OAuth 2.0 Threat Model and Security Considerations](https://tools.ietf.org/html/rfc6819) -- [OAuth 2.0 Token Revocation](https://tools.ietf.org/html/rfc7009) -- [OAuth 2.0 Token Introspection](https://tools.ietf.org/html/rfc7662) -- [OAuth 2.0 for Native Apps](https://tools.ietf.org/html/draft-ietf-oauth-native-apps-10) -- [OAuth 2.0 Dynamic Client Registration Protocol](https://datatracker.ietf.org/doc/html/rfc7591) -- [OAuth 2.0 Dynamic Client Registration Management Protocol](https://datatracker.ietf.org/doc/html/rfc7592) -- [Proof Key for Code Exchange by OAuth Public Clients](https://tools.ietf.org/html/rfc7636) -- [JSON Web Token (JWT) Profile for OAuth 2.0 Client Authentication and Authorization Grants](https://tools.ietf.org/html/rfc7523) - -and the OpenID Foundation: - -- [OpenID Connect Core 1.0](http://openid.net/specs/openid-connect-core-1_0.html) -- [OpenID Connect Discovery 1.0](https://openid.net/specs/openid-connect-discovery-1_0.html) -- [OpenID Connect Dynamic Client Registration 1.0](https://openid.net/specs/openid-connect-registration-1_0.html) -- [OpenID Connect Front-Channel Logout 1.0](https://openid.net/specs/openid-connect-frontchannel-1_0.html) -- [OpenID Connect Back-Channel Logout 1.0](https://openid.net/specs/openid-connect-backchannel-1_0.html) - -### OpenID Connect Certified - -Ory Hydra is an OpenID Foundation -[certified OpenID Provider (OP)](http://openid.net/certification/#OPs). - -

- Ory Hydra is a certified OpenID Providier -

- -The following OpenID profiles are certified: - -- [Basic OpenID Provider](http://openid.net/specs/openid-connect-core-1_0.html#CodeFlowAuth) - (response types `code`) -- [Implicit OpenID Provider](http://openid.net/specs/openid-connect-core-1_0.html#ImplicitFlowAuth) - (response types `id_token`, `id_token+token`) -- [Hybrid OpenID Provider](http://openid.net/specs/openid-connect-core-1_0.html#HybridFlowAuth) - (response types `code+id_token`, `code+id_token+token`, `code+token`) -- [OpenID Provider Publishing Configuration Information](https://openid.net/specs/openid-connect-discovery-1_0.html) -- [Dynamic OpenID Provider](https://openid.net/specs/openid-connect-registration-1_0.html) - -To obtain certification, we deployed the -[reference user login and consent app](https://github.com/ory/hydra-login-consent-node) -(unmodified) and Ory Hydra v1.0.0. - -## Quickstart - -This section is a starter guide to working with Ory Hydra. In-depth docs are -available as well: - -- The documentation is available [here](https://www.ory.sh/docs/hydra). -- The REST API documentation is available - [here](https://www.ory.sh/docs/hydra/sdk/api). - -### Installation - -Head over to the -[Ory Developer Documentation](https://www.ory.sh/docs/hydra/install) to learn -how to install Ory Hydra on Linux, macOS, Windows, and Docker and how to build -Ory Hydra from source. - ## Ecosystem @@ -696,258 +643,52 @@ to perform a certain action on a resource. -## Security - -_Why should I use Ory Hydra? It's not that hard to implement two OAuth2 -endpoints and there are numerous SDKs out there!_ - -OAuth2 and OAuth2 related specifications are over 400 written pages. -Implementing OAuth2 is easy, getting it right is hard. Ory Hydra is trusted by -companies all around the world, has a vibrant community and faces millions of -requests in production each day. Of course, we also compiled a security guide -with more details on cryptography and security concepts. Read -[the security guide now](https://www.ory.sh/docs/hydra/security-architecture). - -### Disclosing vulnerabilities - -If you think you found a security vulnerability, please refrain from posting it -publicly on the forums, the chat, or GitHub and send us an email to -[hi@ory.am](mailto:hi@ory.sh) instead. - -## Benchmarks - -Our continuous integration runs a collection of benchmarks against Ory Hydra. -You can find the results [here](https://www.ory.sh/docs/performance/hydra). - -## Telemetry - -Our services collect summarized, anonymized data that can optionally be turned -off. Click [here](https://www.ory.sh/docs/ecosystem/sqa) to learn more. - ## Documentation -### Guide - -The full Ory Hydra documentation is available -[here](https://www.ory.sh/docs/hydra). - -### HTTP API documentation +The full Ory Hydra documentation is available at +[www.ory.sh/docs/hydra](https://www.ory.sh/docs/hydra), including: -The HTTP API is documented [here](https://www.ory.sh/docs/hydra/sdk/api). +- [Installation guides](https://www.ory.sh/docs/hydra/install) +- [Configuration reference](https://www.ory.sh/docs/hydra/reference/configuration) +- [HTTP API documentation](https://www.ory.sh/docs/hydra/sdk/api) +- [Security architecture](https://www.ory.sh/docs/hydra/security-architecture) +- [Performance benchmarks](https://www.ory.sh/docs/performance/hydra) -### Upgrading and Changelog - -New releases might introduce breaking changes. To help you identify and -incorporate those changes, we document these changes in +For upgrading and changelogs, check +[releases tab](https://github.com/ory/hydra/releases) and [CHANGELOG.md](./CHANGELOG.md). -### Command line documentation - -Run `hydra -h` or `hydra help`. - -### Develop - -We love all contributions! Please read our -[contribution guidelines](./CONTRIBUTING.md). - -#### Dependencies - -You need Go 1.13+ with `GO111MODULE=on` and (for the test suites): - -- Docker and Docker Compose -- Makefile -- NodeJS / npm - -It is possible to develop Ory Hydra on Windows, but please be aware that all -guides assume a Unix shell like bash or zsh. - -#### Formatting Code - -You can format all code using `make format`. Our CI checks if your code is -properly formatted. - -#### Running Tests +## Developing Ory Hydra -There are three types of tests you can run: +See [DEVELOP.md](./DEVELOP.md) for information on: -- Short tests (do not require a SQL database like PostgreSQL) -- Regular tests (do require PostgreSQL, MySQL, CockroachDB) -- End to end tests (do require databases and will use a test browser) +- Contribution guidelines +- Prerequisites +- Install from source +- Running tests +- Build Docker image +- Preview API documentation -All of the above tests can be run using the makefile. See the commands below. - -**Makefile commands** - -```shell -# quick tests -make quicktest - -# regular tests -make test -test-resetdb - -# end-to-end tests -make e2e -``` - -##### Short Tests - -It is recommended to use the make file to run your tests using `make quicktest` -, however, you can still use the `go test` command. - -**Please note**: - -All tests run against a sqlite in-memory database, thus it is required to use -the `-tags sqlite,json1` build tag. - -Short tests run fairly quickly. You can either test all of the code at once: - -```shell script -go test -v -failfast -short -tags sqlite,json1 ./... -``` - -or test just a specific module: - -```shell script -go test -v -failfast -short -tags sqlite,json1 ./client -``` - -or a specific test: - -```shell script -go test -v -failfast -short -tags sqlite,json1 -run ^TestName$ ./... -``` - -##### Regular Tests - -Regular tests require a database set up. Our test suite is able to work with -docker directly (using [ory/dockertest](https://github.com/ory/dockertest)) but -we encourage to use the Makefile instead. Using dockertest can bloat the number -of Docker Images on your system and are quite slow. Instead we recommend doing: - -```shell script -make test -``` - -Please be aware that `make test` recreates the databases every time you run -`make test`. This can be annoying if you are trying to fix something very -specific and need the database tests all the time. In that case we suggest that -you initialize the databases with: - -```shell script -make test-resetdb -export TEST_DATABASE_MYSQL='mysql://root:secret@(127.0.0.1:3444)/mysql?parseTime=true&multiStatements=true' -export TEST_DATABASE_POSTGRESQL='postgres://postgres:secret@127.0.0.1:3445/postgres?sslmode=disable' -export TEST_DATABASE_COCKROACHDB='cockroach://root@127.0.0.1:3446/defaultdb?sslmode=disable' -``` - -Then you can run `go test` as often as you'd like: - -```shell script -go test -p 1 ./... - -# or in a module: -cd client; go test . -``` - -#### E2E Tests - -The E2E tests use [Cypress](https://www.cypress.io) to run full browser tests. -You can execute these tests with: - -``` -make e2e -``` - -The runner will not show the Browser window, as it runs in the CI Mode -(background). That makes debugging these type of tests very difficult, but -thankfully you can run the e2e test in the browser which helps with debugging! -Just run: - -```shell script -./test/e2e/circle-ci.bash memory --watch - -# Or for the JSON Web Token Access Token strategy: -# ./test/e2e/circle-ci.bash memory-jwt --watch -``` - -or if you would like to test one of the databases: - -```shell script -make test-resetdb -export TEST_DATABASE_MYSQL='mysql://root:secret@(127.0.0.1:3444)/mysql?parseTime=true&multiStatements=true' -export TEST_DATABASE_POSTGRESQL='postgres://postgres:secret@127.0.0.1:3445/postgres?sslmode=disable' -export TEST_DATABASE_COCKROACHDB='cockroach://root@127.0.0.1:3446/defaultdb?sslmode=disable' - -# You can test against each individual database: -./test/e2e/circle-ci.bash postgres --watch -./test/e2e/circle-ci.bash memory --watch -./test/e2e/circle-ci.bash mysql --watch -# ... -``` - -Once you run the script, a Cypress window will appear. Hit the button "Run all -Specs"! - -The code for these tests is located in -[./cypress/integration](./cypress/integration) and -[./cypress/support](./cypress/support) and -[./cypress/helpers](./cypress/helpers). The website you're seeing is located in -[./test/e2e/oauth2-client](./test/e2e/oauth2-client). - -##### OpenID Connect Conformity Tests - -To run Ory Hydra against the OpenID Connect conformity suite, run - -```shell script -$ test/conformity/start.sh --build -``` - -and then in a separate shell - -```shell script -$ test/conformity/test.sh -``` - -Running these tests will take a significant amount of time which is why they are -not part of the CI pipeline. - -#### Build Docker - -You can build a development Docker Image using: +## Security -```shell script -make docker -``` +OAuth2 and OAuth2 related specifications are over 400 written pages. +Implementing OAuth2 is easy, getting it right is hard. Ory Hydra is trusted by +companies all around the world, has a vibrant community and faces millions of +requests in production each day. Read +[the security guide](https://www.ory.sh/docs/hydra/security-architecture) for +more details on cryptography and security concepts. -#### Run the Docker Compose quickstarts +### Disclosing vulnerabilities -If you wish to check your code changes against any of the docker-compose -quickstart files, run: +If you think you found a security vulnerability, please refrain from posting it +publicly on the forums, the chat, or GitHub. You can find all info for +responsible disclosure in our +[security.txt](https://www.ory.sh/.well-known/security.txt). -```shell script -make docker -docker compose -f quickstart.yml up # .... -``` +## Telemetry -#### Add a new migration - -1. `mkdir persistence/sql/src/YYYYMMDD000001_migration_name/` -2. Put the migration files into this directory, following the standard naming - conventions. If you wish to execute different parts of a migration in - separate transactions, add split marks (lines with the text `--split`) where - desired. Why this might be necessary is explained in - https://github.com/gobuffalo/fizz/issues/104. -3. Run `make persistence/sql/migrations/` to generate migration - fragments. -4. If an update causes the migration to have fewer fragments than the number - already generated, run - `make persistence/sql/migrations/-clean`. This is equivalent to - a `rm` command with the right parameters, but comes with better tab - completion. -5. Before committing generated migration fragments, run the above clean command - and generate a fresh copy of migration fragments to make sure the `sql/src` - and `sql/migrations` directories are consistent. +Our services collect summarized, anonymized data that can optionally be turned +off. Click [here](https://www.ory.sh/docs/ecosystem/sqa) to learn more. ## Libraries and third-party projects diff --git a/SECURITY.md b/SECURITY.md index 7a05c1cfc62..6104514805c 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,30 +1,56 @@ - - - -- [Security Policy](#security-policy) - - [Supported Versions](#supported-versions) - - [Reporting a Vulnerability](#reporting-a-vulnerability) - - - -# Security Policy - -## Supported Versions - -We release patches for security vulnerabilities. Which versions are eligible for -receiving such patches depends on the CVSS v3.0 Rating: - -| CVSS v3.0 | Supported Versions | -| --------- | ----------------------------------------- | -| 9.0-10.0 | Releases within the previous three months | -| 4.0-8.9 | Most recent release | +# Ory Security Policy + +This policy outlines Ory's security commitments and practices for users across +different licensing and deployment models. + +To learn more about Ory's security service level agreements (SLAs) and +processes, please [contact us](https://www.ory.sh/contact/). + +## Ory Network Users + +- **Security SLA:** Ory addresses vulnerabilities in the Ory Network according + to the following guidelines: + - Critical: Typically addressed within 14 days. + - High: Typically addressed within 30 days. + - Medium: Typically addressed within 90 days. + - Low: Typically addressed within 180 days. + - Informational: Addressed as necessary. + These timelines are targets and may vary based on specific circumstances. +- **Release Schedule:** Updates are deployed to the Ory Network as + vulnerabilities are resolved. +- **Version Support:** The Ory Network always runs the latest version, ensuring + up-to-date security fixes. + +## Ory Enterprise License Customers + +- **Security SLA:** Ory addresses vulnerabilities based on their severity: + - Critical: Typically addressed within 14 days. + - High: Typically addressed within 30 days. + - Medium: Typically addressed within 90 days. + - Low: Typically addressed within 180 days. + - Informational: Addressed as necessary. + These timelines are targets and may vary based on specific circumstances. +- **Release Schedule:** Updates are made available as vulnerabilities are + resolved. Ory works closely with enterprise customers to ensure timely updates + that align with their operational needs. +- **Version Support:** Ory may provide security support for multiple versions, + depending on the terms of the enterprise agreement. + +## Apache 2.0 License Users + +- **Security SLA:** Ory does not provide a formal SLA for security issues under + the Apache 2.0 License. +- **Release Schedule:** Releases prioritize new functionality and include fixes + for known security vulnerabilities at the time of release. While major + releases typically occur one to two times per year, Ory does not guarantee a + fixed release schedule. +- **Version Support:** Security patches are only provided for the latest release + version. ## Reporting a Vulnerability -Please report (suspected) security vulnerabilities to -**[security@ory.sh](mailto:security@ory.sh)**. You will receive a response from -us within 48 hours. If the issue is confirmed, we will release a patch as soon -as possible depending on complexity but historically within a few days. +For details on how to report security vulnerabilities, visit our +[security policy documentation](https://www.ory.sh/docs/ecosystem/security). diff --git a/aead/aead.go b/aead/aead.go new file mode 100644 index 00000000000..3258380d9de --- /dev/null +++ b/aead/aead.go @@ -0,0 +1,28 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package aead + +import ( + "context" + + "github.com/ory/hydra/v2/fosite" +) + +// Cipher provides AEAD (authenticated encryption with associated data). The +// ciphertext is returned base64url-encoded. +type Cipher interface { + // Encrypt encrypts and encodes the given plaintext, optionally using + // additional data. + Encrypt(ctx context.Context, plaintext, additionalData []byte) (ciphertext string, err error) + + // Decrypt decodes, decrypts, and verifies the plaintext and additional data + // from the ciphertext. The ciphertext must be given in the form as returned + // by Encrypt. + Decrypt(ctx context.Context, ciphertext string, additionalData []byte) (plaintext []byte, err error) +} + +type Dependencies interface { + fosite.GlobalSecretProvider + fosite.RotatedGlobalSecretsProvider +} diff --git a/aead/aead_test.go b/aead/aead_test.go new file mode 100644 index 00000000000..41a075e6af4 --- /dev/null +++ b/aead/aead_test.go @@ -0,0 +1,150 @@ +// Copyright © 2022 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package aead_test + +import ( + "context" + "crypto/rand" + "fmt" + "io" + "testing" + + "github.com/pborman/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/aead" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/x/configx" +) + +func secret(t *testing.T) string { + bytes := make([]byte, 32) + _, err := io.ReadFull(rand.Reader, bytes) + require.NoError(t, err) + return fmt.Sprintf("%X", bytes) +} + +func TestAEAD(t *testing.T) { + t.Parallel() + for _, tc := range []struct { + name string + new func(aead.Dependencies) aead.Cipher + }{ + {"AES-GCM", func(d aead.Dependencies) aead.Cipher { return aead.NewAESGCM(d) }}, + {"XChaChaPoly", func(d aead.Dependencies) aead.Cipher { return aead.NewXChaCha20Poly1305(d) }}, + } { + tc := tc + + t.Run("cipher="+tc.name, func(t *testing.T) { + NewCipher := tc.new + + t.Run("case=without-rotation", func(t *testing.T) { + t.Parallel() + ctx := context.Background() + c := testhelpers.NewConfigurationWithDefaults(t, configx.WithValue(config.KeyGetSystemSecret, []string{secret(t)})) + a := NewCipher(c) + + plain := []byte(uuid.New()) + ct, err := a.Encrypt(ctx, plain, nil) + assert.NoError(t, err) + + ct2, err := a.Encrypt(ctx, plain, nil) + assert.NoError(t, err) + assert.NotEqual(t, ct, ct2, "ciphertexts for the same plaintext must be different each time") + + res, err := a.Decrypt(ctx, ct, nil) + assert.NoError(t, err) + assert.Equal(t, plain, res) + }) + + t.Run("case=wrong-secret", func(t *testing.T) { + t.Parallel() + ctx := context.Background() + c := testhelpers.NewConfigurationWithDefaults(t, configx.WithValue(config.KeyGetSystemSecret, []string{secret(t)})) + a := NewCipher(c) + + ct, err := a.Encrypt(ctx, []byte(uuid.New()), nil) + require.NoError(t, err) + + c.MustSet(ctx, config.KeyGetSystemSecret, []string{secret(t)}) + _, err = a.Decrypt(ctx, ct, nil) + require.Error(t, err) + }) + + t.Run("case=with-rotation", func(t *testing.T) { + t.Parallel() + ctx := context.Background() + old := secret(t) + c := testhelpers.NewConfigurationWithDefaults(t, configx.WithValue(config.KeyGetSystemSecret, []string{old})) + a := NewCipher(c) + + plain := []byte(uuid.New()) + ct, err := a.Encrypt(ctx, plain, nil) + require.NoError(t, err) + + // Sets the old secret as a rotated secret and creates a new one. + c.MustSet(ctx, config.KeyGetSystemSecret, []string{secret(t), old}) + res, err := a.Decrypt(ctx, ct, nil) + require.NoError(t, err) + assert.Equal(t, plain, res) + + // THis should also work when we re-encrypt the same plain text. + ct2, err := a.Encrypt(ctx, plain, nil) + require.NoError(t, err) + assert.NotEqual(t, ct2, ct) + + res, err = a.Decrypt(ctx, ct, nil) + require.NoError(t, err) + assert.Equal(t, plain, res) + }) + + t.Run("case=with-rotation-wrong-secret", func(t *testing.T) { + t.Parallel() + ctx := context.Background() + c := testhelpers.NewConfigurationWithDefaults(t, configx.WithValue(config.KeyGetSystemSecret, []string{secret(t)})) + a := NewCipher(c) + + plain := []byte(uuid.New()) + ct, err := a.Encrypt(ctx, plain, nil) + require.NoError(t, err) + + // When the secrets do not match, an error should be thrown during decryption. + c.MustSet(ctx, config.KeyGetSystemSecret, []string{secret(t), secret(t)}) + _, err = a.Decrypt(ctx, ct, nil) + require.Error(t, err) + }) + + t.Run("suite=with additional data", func(t *testing.T) { + t.Parallel() + ctx := context.Background() + c := testhelpers.NewConfigurationWithDefaults(t, configx.WithValue(config.KeyGetSystemSecret, []string{secret(t)})) + a := NewCipher(c) + + plain := []byte(uuid.New()) + ct, err := a.Encrypt(ctx, plain, []byte("additional data")) + assert.NoError(t, err) + + t.Run("case=additional data matches", func(t *testing.T) { + res, err := a.Decrypt(ctx, ct, []byte("additional data")) + assert.NoError(t, err) + assert.Equal(t, plain, res) + }) + + t.Run("case=additional data does not match", func(t *testing.T) { + res, err := a.Decrypt(ctx, ct, []byte("wrong data")) + assert.Error(t, err) + assert.Nil(t, res) + }) + + t.Run("case=missing additional data", func(t *testing.T) { + res, err := a.Decrypt(ctx, ct, nil) + assert.Error(t, err) + assert.Nil(t, res) + }) + }) + }) + } +} diff --git a/aead/aesgcm.go b/aead/aesgcm.go new file mode 100644 index 00000000000..fde0f60e6b3 --- /dev/null +++ b/aead/aesgcm.go @@ -0,0 +1,124 @@ +// Copyright © 2022 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package aead + +import ( + "context" + "crypto/aes" + "crypto/cipher" + "crypto/rand" + "encoding/base64" + "io" + + "github.com/pkg/errors" +) + +type AESGCM struct { + c Dependencies +} + +func NewAESGCM(c Dependencies) *AESGCM { + return &AESGCM{c: c} +} + +func aeadKey(key []byte) *[32]byte { + var result [32]byte + copy(result[:], key[:32]) + return &result +} + +func (c *AESGCM) Encrypt(ctx context.Context, plaintext, additionalData []byte) (string, error) { + key, err := encryptionKey(ctx, c.c, 32) + if err != nil { + return "", err + } + + ciphertext, err := aesGCMEncrypt(plaintext, aeadKey(key), additionalData) + if err != nil { + return "", errors.WithStack(err) + } + + return base64.URLEncoding.EncodeToString(ciphertext), nil +} + +func (c *AESGCM) Decrypt(ctx context.Context, ciphertext string, aad []byte) (plaintext []byte, err error) { + msg, err := base64.URLEncoding.DecodeString(ciphertext) + if err != nil { + return nil, errors.WithStack(err) + } + + keys, err := allKeys(ctx, c.c) + if err != nil { + return nil, errors.WithStack(err) + } + + for _, key := range keys { + if plaintext, err = c.decrypt(msg, key, aad); err == nil { + return plaintext, nil + } + } + + return nil, err +} + +func (*AESGCM) decrypt(ciphertext, key, additionalData []byte) ([]byte, error) { + if len(key) != 32 { + return nil, errors.Errorf("key must be exactly 32 long bytes, got %d bytes", len(key)) + } + + plaintext, err := aesGCMDecrypt(ciphertext, aeadKey(key), additionalData) + if err != nil { + return nil, errors.WithStack(err) + } + + return plaintext, nil +} + +// aesGCMEncrypt encrypts data using 256-bit AES-GCM. This both hides the content of +// the data and provides a check that it hasn't been altered. Output takes the +// form nonce|ciphertext|tag where '|' indicates concatenation. +func aesGCMEncrypt(plaintext []byte, key *[32]byte, additionalData []byte) (ciphertext []byte, err error) { + block, err := aes.NewCipher(key[:]) + if err != nil { + return nil, err + } + + gcm, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + + nonce := make([]byte, gcm.NonceSize()) + _, err = io.ReadFull(rand.Reader, nonce) + if err != nil { + return nil, err + } + + return gcm.Seal(nonce, nonce, plaintext, additionalData), nil +} + +// aesGCMDecrypt decrypts data using 256-bit AES-GCM. This both hides the content of +// the data and provides a check that it hasn't been altered. Expects input +// form nonce|ciphertext|tag where '|' indicates concatenation. +func aesGCMDecrypt(ciphertext []byte, key *[32]byte, additionalData []byte) (plaintext []byte, err error) { + block, err := aes.NewCipher(key[:]) + if err != nil { + return nil, err + } + + gcm, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + + if len(ciphertext) < gcm.NonceSize() { + return nil, errors.New("malformed ciphertext") + } + + return gcm.Open(nil, + ciphertext[:gcm.NonceSize()], + ciphertext[gcm.NonceSize():], + additionalData, + ) +} diff --git a/aead/helpers.go b/aead/helpers.go new file mode 100644 index 00000000000..7acd06c3a0d --- /dev/null +++ b/aead/helpers.go @@ -0,0 +1,41 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package aead + +import ( + "context" + "fmt" +) + +func encryptionKey(ctx context.Context, d Dependencies, keySize int) ([]byte, error) { + keys, err := allKeys(ctx, d) + if err != nil { + return nil, err + } + + key := keys[0] + if len(key) != keySize { + return nil, fmt.Errorf("key must be exactly %d bytes long, got %d bytes", keySize, len(key)) + } + + return key, nil +} + +func allKeys(ctx context.Context, d Dependencies) ([][]byte, error) { + global, err := d.GetGlobalSecret(ctx) + if err != nil { + return nil, err + } + + rotated, err := d.GetRotatedGlobalSecrets(ctx) + if err != nil { + return nil, err + } + + keys := append([][]byte{global}, rotated...) + if len(keys) == 0 { + return nil, fmt.Errorf("at least one encryption key must be defined but none were") + } + return keys, nil +} diff --git a/aead/xchacha20.go b/aead/xchacha20.go new file mode 100644 index 00000000000..12524d2063f --- /dev/null +++ b/aead/xchacha20.go @@ -0,0 +1,85 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package aead + +import ( + "context" + "crypto/cipher" + cryptorand "crypto/rand" + "encoding/base64" + "fmt" + "math" + + "github.com/pkg/errors" + "golang.org/x/crypto/chacha20poly1305" +) + +var _ Cipher = (*XChaCha20Poly1305)(nil) + +type ( + XChaCha20Poly1305 struct { + d Dependencies + } +) + +func NewXChaCha20Poly1305(d Dependencies) *XChaCha20Poly1305 { + return &XChaCha20Poly1305{d} +} + +func (x *XChaCha20Poly1305) Encrypt(ctx context.Context, plaintext, additionalData []byte) (string, error) { + key, err := encryptionKey(ctx, x.d, chacha20poly1305.KeySize) + if err != nil { + return "", err + } + + aead, err := chacha20poly1305.NewX(key) + if err != nil { + return "", errors.WithStack(err) + } + + // Make sure the size calculation does not overflow. + if len(plaintext) > math.MaxInt-aead.NonceSize()-aead.Overhead() { + return "", errors.WithStack(fmt.Errorf("plaintext too large")) + } + + nonce := make([]byte, aead.NonceSize(), aead.NonceSize()+len(plaintext)+aead.Overhead()) + _, err = cryptorand.Read(nonce) + if err != nil { + return "", errors.WithStack(err) + } + + ciphertext := aead.Seal(nonce, nonce, plaintext, additionalData) + return base64.URLEncoding.EncodeToString(ciphertext), nil +} + +func (x *XChaCha20Poly1305) Decrypt(ctx context.Context, ciphertext string, aad []byte) (plaintext []byte, err error) { + msg, err := base64.URLEncoding.DecodeString(ciphertext) + if err != nil { + return nil, errors.WithStack(err) + } + + if len(msg) < chacha20poly1305.NonceSizeX { + return nil, errors.WithStack(fmt.Errorf("malformed ciphertext: too short")) + } + nonce, ciphered := msg[:chacha20poly1305.NonceSizeX], msg[chacha20poly1305.NonceSizeX:] + + keys, err := allKeys(ctx, x.d) + if err != nil { + return nil, errors.WithStack(err) + } + + var aead cipher.AEAD + for _, key := range keys { + aead, err = chacha20poly1305.NewX(key) + if err != nil { + continue + } + plaintext, err = aead.Open(nil, nonce, ciphered, aad) + if err == nil { + return plaintext, nil + } + } + + return nil, errors.WithStack(err) +} diff --git a/client/.snapshots/TestClientSDK-case=id_can_not_be_set.json b/client/.snapshots/TestClientSDK-case=id_can_not_be_set.json deleted file mode 100644 index 01826781960..00000000000 --- a/client/.snapshots/TestClientSDK-case=id_can_not_be_set.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "error": "The request was malformed or contained invalid parameters", - "error_description": "It is no longer possible to set an OAuth2 Client ID as a user. The system will generate a unique ID for you." -} diff --git a/client/.snapshots/TestHandler-common-case=create_clients-case=0-description=basic_dynamic_client_registration.json b/client/.snapshots/TestHandler-common-case=create_clients-case=0-description=basic_dynamic_client_registration.json index ddbf114aa9d..a9ac8197dff 100644 --- a/client/.snapshots/TestHandler-common-case=create_clients-case=0-description=basic_dynamic_client_registration.json +++ b/client/.snapshots/TestHandler-common-case=create_clients-case=0-description=basic_dynamic_client_registration.json @@ -20,6 +20,8 @@ "token_endpoint_auth_method": "client_secret_basic", "userinfo_signed_response_alg": "none", "metadata": {}, + "skip_consent": false, + "skip_logout_consent": null, "authorization_code_grant_access_token_lifespan": null, "authorization_code_grant_id_token_lifespan": null, "authorization_code_grant_refresh_token_lifespan": null, @@ -29,5 +31,8 @@ "jwt_bearer_grant_access_token_lifespan": null, "refresh_token_grant_id_token_lifespan": null, "refresh_token_grant_access_token_lifespan": null, - "refresh_token_grant_refresh_token_lifespan": null + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null } diff --git a/client/.snapshots/TestHandler-common-case=create_clients-case=1-description=basic_admin_registration.json b/client/.snapshots/TestHandler-common-case=create_clients-case=1-description=basic_admin_registration.json index 06a5bf42d57..75972d053bb 100644 --- a/client/.snapshots/TestHandler-common-case=create_clients-case=1-description=basic_admin_registration.json +++ b/client/.snapshots/TestHandler-common-case=create_clients-case=1-description=basic_admin_registration.json @@ -23,6 +23,8 @@ "metadata": { "foo": "bar" }, + "skip_consent": false, + "skip_logout_consent": null, "authorization_code_grant_access_token_lifespan": null, "authorization_code_grant_id_token_lifespan": null, "authorization_code_grant_refresh_token_lifespan": null, @@ -32,5 +34,8 @@ "jwt_bearer_grant_access_token_lifespan": null, "refresh_token_grant_id_token_lifespan": null, "refresh_token_grant_access_token_lifespan": null, - "refresh_token_grant_refresh_token_lifespan": null + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null } diff --git a/client/.snapshots/TestHandler-common-case=create_clients-case=10-description=setting_skip_logout_consent_succeeds_for_admin_registration.json b/client/.snapshots/TestHandler-common-case=create_clients-case=10-description=setting_skip_logout_consent_succeeds_for_admin_registration.json new file mode 100644 index 00000000000..16fb5b31144 --- /dev/null +++ b/client/.snapshots/TestHandler-common-case=create_clients-case=10-description=setting_skip_logout_consent_succeeds_for_admin_registration.json @@ -0,0 +1,39 @@ +{ + "client_name": "", + "client_secret": "2SKZkBf2P5g4toAXXnCrr~_sDM", + "redirect_uris": [ + "http://localhost:3000/cb" + ], + "grant_types": null, + "response_types": null, + "scope": "offline_access offline openid", + "audience": [], + "owner": "", + "policy_uri": "", + "allowed_cors_origins": [], + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "public", + "jwks": {}, + "token_endpoint_auth_method": "client_secret_basic", + "userinfo_signed_response_alg": "none", + "metadata": {}, + "skip_consent": false, + "skip_logout_consent": true, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null +} diff --git a/client/.snapshots/TestHandler-common-case=create_clients-case=6-description=basic_dynamic_client_registration.json b/client/.snapshots/TestHandler-common-case=create_clients-case=11-description=basic_dynamic_client_registration.json similarity index 100% rename from client/.snapshots/TestHandler-common-case=create_clients-case=6-description=basic_dynamic_client_registration.json rename to client/.snapshots/TestHandler-common-case=create_clients-case=11-description=basic_dynamic_client_registration.json diff --git a/client/.snapshots/TestHandler-common-case=create_clients-case=7-description=empty_ID_succeeds.json b/client/.snapshots/TestHandler-common-case=create_clients-case=12-description=empty_ID_succeeds.json similarity index 78% rename from client/.snapshots/TestHandler-common-case=create_clients-case=7-description=empty_ID_succeeds.json rename to client/.snapshots/TestHandler-common-case=create_clients-case=12-description=empty_ID_succeeds.json index c21aa5b3710..69682c03242 100644 --- a/client/.snapshots/TestHandler-common-case=create_clients-case=7-description=empty_ID_succeeds.json +++ b/client/.snapshots/TestHandler-common-case=create_clients-case=12-description=empty_ID_succeeds.json @@ -21,6 +21,8 @@ "token_endpoint_auth_method": "client_secret_basic", "userinfo_signed_response_alg": "none", "metadata": {}, + "skip_consent": false, + "skip_logout_consent": null, "authorization_code_grant_access_token_lifespan": null, "authorization_code_grant_id_token_lifespan": null, "authorization_code_grant_refresh_token_lifespan": null, @@ -30,5 +32,8 @@ "jwt_bearer_grant_access_token_lifespan": null, "refresh_token_grant_id_token_lifespan": null, "refresh_token_grant_access_token_lifespan": null, - "refresh_token_grant_refresh_token_lifespan": null + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null } diff --git a/client/.snapshots/TestHandler-common-case=create_clients-case=2-description=metadata_fails_for_dynamic_client_registration.json b/client/.snapshots/TestHandler-common-case=create_clients-case=2-description=metadata_fails_for_dynamic_client_registration.json index 378b2243d22..b0ec7b11720 100644 --- a/client/.snapshots/TestHandler-common-case=create_clients-case=2-description=metadata_fails_for_dynamic_client_registration.json +++ b/client/.snapshots/TestHandler-common-case=create_clients-case=2-description=metadata_fails_for_dynamic_client_registration.json @@ -1,4 +1,4 @@ { "error": "invalid_client_metadata", - "error_description": "The value of one of the Client Metadata fields is invalid and the server has rejected this request. Note that an Authorization Server MAY choose to substitute a valid value for any requested parameter of a Client's Metadata. metadata cannot be set for dynamic client registration'" + "error_description": "The value of one of the Client Metadata fields is invalid and the server has rejected this request. Note that an Authorization Server MAY choose to substitute a valid value for any requested parameter of a Client's Metadata. 'metadata' cannot be set for dynamic client registration" } diff --git a/client/.snapshots/TestHandler-common-case=create_clients-case=4-description=non-uuid_fails.json b/client/.snapshots/TestHandler-common-case=create_clients-case=4-description=non-uuid_fails.json deleted file mode 100644 index 01826781960..00000000000 --- a/client/.snapshots/TestHandler-common-case=create_clients-case=4-description=non-uuid_fails.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "error": "The request was malformed or contained invalid parameters", - "error_description": "It is no longer possible to set an OAuth2 Client ID as a user. The system will generate a unique ID for you." -} diff --git a/client/.snapshots/TestHandler-common-case=create_clients-case=4-description=non-uuid_works.json b/client/.snapshots/TestHandler-common-case=create_clients-case=4-description=non-uuid_works.json new file mode 100644 index 00000000000..25e7e615220 --- /dev/null +++ b/client/.snapshots/TestHandler-common-case=create_clients-case=4-description=non-uuid_works.json @@ -0,0 +1,41 @@ +{ + "client_id": "not-a-uuid", + "client_name": "", + "client_secret": "averylongsecret", + "redirect_uris": [ + "http://localhost:3000/cb" + ], + "grant_types": null, + "response_types": null, + "scope": "offline_access offline openid", + "audience": [], + "owner": "", + "policy_uri": "", + "allowed_cors_origins": [], + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "public", + "jwks": {}, + "token_endpoint_auth_method": "client_secret_basic", + "userinfo_signed_response_alg": "none", + "metadata": {}, + "registration_client_uri": "http://localhost:4444/oauth2/register/not-a-uuid", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null +} diff --git a/client/.snapshots/TestHandler-common-case=create_clients-case=5-description=setting_client_id_as_uuid_works.json b/client/.snapshots/TestHandler-common-case=create_clients-case=5-description=setting_client_id_as_uuid_works.json new file mode 100644 index 00000000000..e88c1c9d9be --- /dev/null +++ b/client/.snapshots/TestHandler-common-case=create_clients-case=5-description=setting_client_id_as_uuid_works.json @@ -0,0 +1,41 @@ +{ + "client_id": "98941dac-f963-4468-8a23-9483b1e04e3c", + "client_name": "", + "client_secret": "not too short", + "redirect_uris": [ + "http://localhost:3000/cb" + ], + "grant_types": null, + "response_types": null, + "scope": "offline_access offline openid", + "audience": [], + "owner": "", + "policy_uri": "", + "allowed_cors_origins": [], + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "public", + "jwks": {}, + "token_endpoint_auth_method": "client_secret_basic", + "userinfo_signed_response_alg": "none", + "metadata": {}, + "registration_client_uri": "http://localhost:4444/oauth2/register/98941dac-f963-4468-8a23-9483b1e04e3c", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null +} diff --git a/client/.snapshots/TestHandler-common-case=create_clients-case=5-description=setting_client_id_fails.json b/client/.snapshots/TestHandler-common-case=create_clients-case=5-description=setting_client_id_fails.json deleted file mode 100644 index 01826781960..00000000000 --- a/client/.snapshots/TestHandler-common-case=create_clients-case=5-description=setting_client_id_fails.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "error": "The request was malformed or contained invalid parameters", - "error_description": "It is no longer possible to set an OAuth2 Client ID as a user. The system will generate a unique ID for you." -} diff --git a/client/.snapshots/TestHandler-common-case=create_clients-case=6-description=setting_access_token_strategy_fails.json b/client/.snapshots/TestHandler-common-case=create_clients-case=6-description=setting_access_token_strategy_fails.json new file mode 100644 index 00000000000..a7db27c2f95 --- /dev/null +++ b/client/.snapshots/TestHandler-common-case=create_clients-case=6-description=setting_access_token_strategy_fails.json @@ -0,0 +1,4 @@ +{ + "error": "The request was malformed or contained invalid parameters", + "error_description": "It is not allowed to choose your own access token strategy." +} diff --git a/client/.snapshots/TestHandler-common-case=create_clients-case=7-description=setting_skip_consent_fails_for_dynamic_registration.json b/client/.snapshots/TestHandler-common-case=create_clients-case=7-description=setting_skip_consent_fails_for_dynamic_registration.json new file mode 100644 index 00000000000..4b65ecee3f1 --- /dev/null +++ b/client/.snapshots/TestHandler-common-case=create_clients-case=7-description=setting_skip_consent_fails_for_dynamic_registration.json @@ -0,0 +1,4 @@ +{ + "error": "invalid_request", + "error_description": "'skip_consent' cannot be set for dynamic client registration" +} diff --git a/client/.snapshots/TestHandler-common-case=create_clients-case=8-description=setting_skip_consent_succeeds_for_admin_registration.json b/client/.snapshots/TestHandler-common-case=create_clients-case=8-description=setting_skip_consent_succeeds_for_admin_registration.json new file mode 100644 index 00000000000..1191ae414eb --- /dev/null +++ b/client/.snapshots/TestHandler-common-case=create_clients-case=8-description=setting_skip_consent_succeeds_for_admin_registration.json @@ -0,0 +1,39 @@ +{ + "client_name": "", + "client_secret": "2SKZkBf2P5g4toAXXnCrr~_sDM", + "redirect_uris": [ + "http://localhost:3000/cb" + ], + "grant_types": null, + "response_types": null, + "scope": "offline_access offline openid", + "audience": [], + "owner": "", + "policy_uri": "", + "allowed_cors_origins": [], + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "public", + "jwks": {}, + "token_endpoint_auth_method": "client_secret_basic", + "userinfo_signed_response_alg": "none", + "metadata": {}, + "skip_consent": true, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null +} diff --git a/client/.snapshots/TestHandler-common-case=create_clients-case=9-description=setting_skip_logout_consent_fails_for_dynamic_registration.json b/client/.snapshots/TestHandler-common-case=create_clients-case=9-description=setting_skip_logout_consent_fails_for_dynamic_registration.json new file mode 100644 index 00000000000..0d6da85531a --- /dev/null +++ b/client/.snapshots/TestHandler-common-case=create_clients-case=9-description=setting_skip_logout_consent_fails_for_dynamic_registration.json @@ -0,0 +1,4 @@ +{ + "error": "invalid_request", + "error_description": "'skip_logout_consent' cannot be set for dynamic client registration" +} diff --git a/client/.snapshots/TestHandler-common-case=delete_non-existing_client-path=-admin-clients-foo.json b/client/.snapshots/TestHandler-common-case=delete_non-existing_client-path=-admin-clients-foo.json new file mode 100644 index 00000000000..5b1c8352a82 --- /dev/null +++ b/client/.snapshots/TestHandler-common-case=delete_non-existing_client-path=-admin-clients-foo.json @@ -0,0 +1,7 @@ +{ + "body": { + "error": "Unable to locate the resource", + "error_description": "" + }, + "status": 404 +} diff --git a/client/.snapshots/TestHandler-common-case=fetching_existing_client-endpoint=admin.json b/client/.snapshots/TestHandler-common-case=fetching_existing_client-endpoint=admin.json index 483ce3be627..9fc694022cd 100644 --- a/client/.snapshots/TestHandler-common-case=fetching_existing_client-endpoint=admin.json +++ b/client/.snapshots/TestHandler-common-case=fetching_existing_client-endpoint=admin.json @@ -21,6 +21,8 @@ "token_endpoint_auth_method": "client_secret_basic", "userinfo_signed_response_alg": "none", "metadata": {}, + "skip_consent": false, + "skip_logout_consent": null, "authorization_code_grant_access_token_lifespan": null, "authorization_code_grant_id_token_lifespan": null, "authorization_code_grant_refresh_token_lifespan": null, @@ -30,7 +32,10 @@ "jwt_bearer_grant_access_token_lifespan": null, "refresh_token_grant_id_token_lifespan": null, "refresh_token_grant_access_token_lifespan": null, - "refresh_token_grant_refresh_token_lifespan": null + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null }, "status": 200 } diff --git a/client/.snapshots/TestHandler-common-case=fetching_existing_client-endpoint=selfservice.json b/client/.snapshots/TestHandler-common-case=fetching_existing_client-endpoint=selfservice.json index 2c35fefcccf..d6544830e52 100644 --- a/client/.snapshots/TestHandler-common-case=fetching_existing_client-endpoint=selfservice.json +++ b/client/.snapshots/TestHandler-common-case=fetching_existing_client-endpoint=selfservice.json @@ -20,6 +20,8 @@ "jwks": {}, "token_endpoint_auth_method": "client_secret_basic", "userinfo_signed_response_alg": "none", + "skip_consent": false, + "skip_logout_consent": null, "authorization_code_grant_access_token_lifespan": null, "authorization_code_grant_id_token_lifespan": null, "authorization_code_grant_refresh_token_lifespan": null, @@ -29,7 +31,10 @@ "jwt_bearer_grant_access_token_lifespan": null, "refresh_token_grant_id_token_lifespan": null, "refresh_token_grant_access_token_lifespan": null, - "refresh_token_grant_refresh_token_lifespan": null + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null }, "status": 200 } diff --git a/client/.snapshots/TestHandler-common-case=fetching_non-existing_client-path=-admin-clients-foo.json b/client/.snapshots/TestHandler-common-case=fetching_non-existing_client-path=-admin-clients-foo.json new file mode 100644 index 00000000000..5b1c8352a82 --- /dev/null +++ b/client/.snapshots/TestHandler-common-case=fetching_non-existing_client-path=-admin-clients-foo.json @@ -0,0 +1,7 @@ +{ + "body": { + "error": "Unable to locate the resource", + "error_description": "" + }, + "status": 404 +} diff --git a/client/.snapshots/TestHandler-common-case=update_the_lifespans_of_an_OAuth2_client.json b/client/.snapshots/TestHandler-common-case=update_the_lifespans_of_an_OAuth2_client.json index 33549433840..aca2c7bbca9 100644 --- a/client/.snapshots/TestHandler-common-case=update_the_lifespans_of_an_OAuth2_client.json +++ b/client/.snapshots/TestHandler-common-case=update_the_lifespans_of_an_OAuth2_client.json @@ -21,6 +21,8 @@ "token_endpoint_auth_method": "client_secret_basic", "userinfo_signed_response_alg": "none", "metadata": {}, + "skip_consent": false, + "skip_logout_consent": null, "authorization_code_grant_access_token_lifespan": "31h0m0s", "authorization_code_grant_id_token_lifespan": "32h0m0s", "authorization_code_grant_refresh_token_lifespan": "33h0m0s", @@ -30,7 +32,10 @@ "jwt_bearer_grant_access_token_lifespan": "37h0m0s", "refresh_token_grant_id_token_lifespan": "40h0m0s", "refresh_token_grant_access_token_lifespan": "41h0m0s", - "refresh_token_grant_refresh_token_lifespan": "42h0m0s" + "refresh_token_grant_refresh_token_lifespan": "42h0m0s", + "device_authorization_grant_id_token_lifespan": "45h0m0s", + "device_authorization_grant_access_token_lifespan": "46h0m0s", + "device_authorization_grant_refresh_token_lifespan": "47h0m0s" }, "status": 200 } diff --git a/client/.snapshots/TestHandler-common-case=updating_existing_client-endpoint=admin.json b/client/.snapshots/TestHandler-common-case=updating_existing_client-endpoint=admin.json index 070c4259ca5..4953cd54220 100644 --- a/client/.snapshots/TestHandler-common-case=updating_existing_client-endpoint=admin.json +++ b/client/.snapshots/TestHandler-common-case=updating_existing_client-endpoint=admin.json @@ -23,6 +23,8 @@ "token_endpoint_auth_method": "client_secret_basic", "userinfo_signed_response_alg": "none", "metadata": {}, + "skip_consent": false, + "skip_logout_consent": null, "authorization_code_grant_access_token_lifespan": null, "authorization_code_grant_id_token_lifespan": null, "authorization_code_grant_refresh_token_lifespan": null, @@ -32,7 +34,10 @@ "jwt_bearer_grant_access_token_lifespan": null, "refresh_token_grant_id_token_lifespan": null, "refresh_token_grant_access_token_lifespan": null, - "refresh_token_grant_refresh_token_lifespan": null + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null }, "status": 200 } diff --git a/client/.snapshots/TestHandler-common-case=updating_existing_client-endpoint=dynamic_client_registration.json b/client/.snapshots/TestHandler-common-case=updating_existing_client-endpoint=dynamic_client_registration.json index 1b2a3fd88dd..5727960363b 100644 --- a/client/.snapshots/TestHandler-common-case=updating_existing_client-endpoint=dynamic_client_registration.json +++ b/client/.snapshots/TestHandler-common-case=updating_existing_client-endpoint=dynamic_client_registration.json @@ -22,6 +22,8 @@ "token_endpoint_auth_method": "client_secret_basic", "userinfo_signed_response_alg": "none", "metadata": {}, + "skip_consent": false, + "skip_logout_consent": null, "authorization_code_grant_access_token_lifespan": null, "authorization_code_grant_id_token_lifespan": null, "authorization_code_grant_refresh_token_lifespan": null, @@ -31,7 +33,10 @@ "jwt_bearer_grant_access_token_lifespan": null, "refresh_token_grant_id_token_lifespan": null, "refresh_token_grant_access_token_lifespan": null, - "refresh_token_grant_refresh_token_lifespan": null + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null }, "status": 200 } diff --git a/client/.snapshots/TestHandler-common-case=updating_existing_client_fails_with_metadata_on_self_service.json b/client/.snapshots/TestHandler-common-case=updating_existing_client_fails_with_metadata_on_self_service.json index 4564a266965..d227f6befa1 100644 --- a/client/.snapshots/TestHandler-common-case=updating_existing_client_fails_with_metadata_on_self_service.json +++ b/client/.snapshots/TestHandler-common-case=updating_existing_client_fails_with_metadata_on_self_service.json @@ -1,7 +1,7 @@ { "body": { "error": "invalid_client_metadata", - "error_description": "The value of one of the Client Metadata fields is invalid and the server has rejected this request. Note that an Authorization Server MAY choose to substitute a valid value for any requested parameter of a Client's Metadata. metadata cannot be set for dynamic client registration'" + "error_description": "The value of one of the Client Metadata fields is invalid and the server has rejected this request. Note that an Authorization Server MAY choose to substitute a valid value for any requested parameter of a Client's Metadata. 'metadata' cannot be set for dynamic client registration" }, "status": 400 } diff --git a/client/.snapshots/TestHandler-common-case=updating_non-existing_client-path=-admin-clients-foo.json b/client/.snapshots/TestHandler-common-case=updating_non-existing_client-path=-admin-clients-foo.json new file mode 100644 index 00000000000..5b1c8352a82 --- /dev/null +++ b/client/.snapshots/TestHandler-common-case=updating_non-existing_client-path=-admin-clients-foo.json @@ -0,0 +1,7 @@ +{ + "body": { + "error": "Unable to locate the resource", + "error_description": "" + }, + "status": 404 +} diff --git a/client/.snapshots/TestHandler-create_client_registration_tokens-case=0-dynamic=true.json b/client/.snapshots/TestHandler-create_client_registration_tokens-case=0-dynamic=true.json index eadba6cabf9..b161bf055fa 100644 --- a/client/.snapshots/TestHandler-create_client_registration_tokens-case=0-dynamic=true.json +++ b/client/.snapshots/TestHandler-create_client_registration_tokens-case=0-dynamic=true.json @@ -16,6 +16,8 @@ "subject_type": "", "jwks": {}, "metadata": {}, + "skip_consent": false, + "skip_logout_consent": null, "authorization_code_grant_access_token_lifespan": null, "authorization_code_grant_id_token_lifespan": null, "authorization_code_grant_refresh_token_lifespan": null, @@ -25,5 +27,8 @@ "jwt_bearer_grant_access_token_lifespan": null, "refresh_token_grant_id_token_lifespan": null, "refresh_token_grant_access_token_lifespan": null, - "refresh_token_grant_refresh_token_lifespan": null + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null } diff --git a/client/.snapshots/TestHandler-create_client_registration_tokens-case=1-dynamic=false.json b/client/.snapshots/TestHandler-create_client_registration_tokens-case=1-dynamic=false.json index eadba6cabf9..b161bf055fa 100644 --- a/client/.snapshots/TestHandler-create_client_registration_tokens-case=1-dynamic=false.json +++ b/client/.snapshots/TestHandler-create_client_registration_tokens-case=1-dynamic=false.json @@ -16,6 +16,8 @@ "subject_type": "", "jwks": {}, "metadata": {}, + "skip_consent": false, + "skip_logout_consent": null, "authorization_code_grant_access_token_lifespan": null, "authorization_code_grant_id_token_lifespan": null, "authorization_code_grant_refresh_token_lifespan": null, @@ -25,5 +27,8 @@ "jwt_bearer_grant_access_token_lifespan": null, "refresh_token_grant_id_token_lifespan": null, "refresh_token_grant_access_token_lifespan": null, - "refresh_token_grant_refresh_token_lifespan": null + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null } diff --git a/client/.snapshots/TestHandler-create_client_registration_tokens-case=2-dynamic=false.json b/client/.snapshots/TestHandler-create_client_registration_tokens-case=2-dynamic=false.json index ea1bf694195..aa0b8b3ae78 100644 --- a/client/.snapshots/TestHandler-create_client_registration_tokens-case=2-dynamic=false.json +++ b/client/.snapshots/TestHandler-create_client_registration_tokens-case=2-dynamic=false.json @@ -17,6 +17,8 @@ "subject_type": "", "jwks": {}, "metadata": {}, + "skip_consent": false, + "skip_logout_consent": null, "authorization_code_grant_access_token_lifespan": null, "authorization_code_grant_id_token_lifespan": null, "authorization_code_grant_refresh_token_lifespan": null, @@ -26,5 +28,8 @@ "jwt_bearer_grant_access_token_lifespan": null, "refresh_token_grant_id_token_lifespan": null, "refresh_token_grant_access_token_lifespan": null, - "refresh_token_grant_refresh_token_lifespan": null + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null } diff --git a/client/client.go b/client/client.go index 0a56836253c..4d04c899c58 100644 --- a/client/client.go +++ b/client/client.go @@ -4,18 +4,19 @@ package client import ( + "strconv" "strings" "time" - "github.com/ory/x/stringsx" - - "github.com/gobuffalo/pop/v6" + "github.com/go-jose/go-jose/v3" "github.com/gofrs/uuid" + "github.com/twmb/murmur3" - jose "gopkg.in/square/go-jose.v2" // Naming the dependency jose is important for go-swagger to work, see https://github.com/go-swagger/go-swagger/issues/1587 + "github.com/ory/pop/v6" - "github.com/ory/fosite" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/x" "github.com/ory/x/sqlxx" ) @@ -31,17 +32,12 @@ var ( // // swagger:model oAuth2Client type Client struct { - ID uuid.UUID `json:"-" db:"pk"` NID uuid.UUID `db:"nid" faker:"-" json:"-"` // OAuth 2.0 Client ID // - // The ID is autogenerated and immutable. - LegacyClientID string `json:"client_id" db:"id"` - - // DEPRECATED: This field is deprecated and will be removed. It serves - // no purpose except the database not complaining. - PKDeprecated int64 `json:"-" db:"pk_deprecated"` + // The ID is immutable. If no ID is provided, a UUID4 will be generated. + ID string `json:"client_id" db:"id"` // OAuth 2.0 Client Name // @@ -72,6 +68,7 @@ type Client struct { // - OpenID Connect Implicit Grant (deprecated!): `implicit` // - Refresh Token Grant: `refresh_token` // - OAuth 2.0 Token Exchange: `urn:ietf:params:oauth:grant-type:jwt-bearer` + // - OAuth 2.0 Device Code Grant: `urn:ietf:params:oauth:grant-type:device_code` GrantTypes sqlxx.StringSliceJSONFormat `json:"grant_types" db:"grant_types"` // OAuth 2.0 Client Response Types @@ -118,7 +115,7 @@ type Client struct { // OAuth 2.0 Client Allowed CORS Origins // // One or more URLs (scheme://host[:port]) which are allowed to make CORS requests - // to the /oauth/token endpoint. If this array is empty, the sever's CORS origin configuration (`CORS_ALLOWED_ORIGINS`) + // to the /oauth/token endpoint. If this array is empty, the server's CORS origin configuration (`CORS_ALLOWED_ORIGINS`) // will be used instead. If this array is set, the allowed origins are appended to the server's CORS origin configuration. // Be aware that environment variable `CORS_ENABLED` MUST be set to `true` for this to work. AllowedCORSOrigins sqlxx.StringSliceJSONFormat `json:"allowed_cors_origins" db:"allowed_cors_origins"` @@ -195,10 +192,12 @@ type Client struct { // // Requested Client Authentication method for the Token Endpoint. The options are: // - // - `client_secret_post`: (default) Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` in the HTTP body. - // - `client_secret_basic`: Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` encoded in the HTTP Authorization header. + // - `client_secret_basic`: (default) Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` encoded in the HTTP Authorization header. + // - `client_secret_post`: Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` in the HTTP body. // - `private_key_jwt`: Use JSON Web Tokens to authenticate the client. // - `none`: Used for public clients (native apps, mobile apps) which can not have secrets. + // + // default: client_secret_basic TokenEndpointAuthMethod string `json:"token_endpoint_auth_method,omitempty" db:"token_endpoint_auth_method" faker:"len=25"` // OAuth 2.0 Token Endpoint Signing Algorithm @@ -272,7 +271,7 @@ type Client struct { // OAuth 2.0 Client Metadata // - // Use this field to story arbitrary data about the OAuth 2.0 Client. Can not be modified using OpenID Connect Dynamic Client Registration protocol. + // Use this field to store arbitrary data about the OAuth 2.0 Client. Can not be modified using OpenID Connect Dynamic Client Registration protocol. Metadata sqlxx.JSONRawMessage `json:"metadata,omitempty" db:"metadata" faker:"-"` // OpenID Connect Dynamic Client Registration Access Token @@ -291,6 +290,21 @@ type Client struct { // RegistrationClientURI is the URL used to update, get, or delete the OAuth2 Client. RegistrationClientURI string `json:"registration_client_uri,omitempty" db:"-"` + // OAuth 2.0 Access Token Strategy + // + // AccessTokenStrategy is the strategy used to generate access tokens. + // Valid options are `jwt` and `opaque`. `jwt` is a bad idea, see https://www.ory.sh/docs/oauth2-oidc/jwt-access-token + // Setting the strategy here overrides the global setting in `strategies.access_token`. + AccessTokenStrategy string `json:"access_token_strategy,omitempty" db:"access_token_strategy" faker:"-"` + + // SkipConsent skips the consent screen for this client. This field can only + // be set from the admin API. + SkipConsent bool `json:"skip_consent" db:"skip_consent" faker:"-"` + + // SkipLogoutConsent skips the logout consent screen for this client. This field can only + // be set from the admin API. + SkipLogoutConsent sqlxx.NullBool `json:"skip_logout_consent" db:"skip_logout_consent" faker:"-"` + Lifespans } @@ -355,6 +369,21 @@ type Lifespans struct { // // The lifespan of a refresh token issued by the OAuth2 2.0 Refresh Token Grant for this OAuth 2.0 Client. RefreshTokenGrantRefreshTokenLifespan x.NullDuration `json:"refresh_token_grant_refresh_token_lifespan,omitempty" db:"refresh_token_grant_refresh_token_lifespan"` + + // OAuth2 2.0 Device Authorization Grant ID Token Lifespan + // + // The lifespan of an ID token issued by the OAuth2 2.0 Device Authorization Grant for this OAuth 2.0 Client. + DeviceAuthorizationGrantIDTokenLifespan x.NullDuration `json:"device_authorization_grant_id_token_lifespan,omitempty" db:"device_authorization_grant_id_token_lifespan"` + + // OAuth2 2.0 Device Authorization Grant Access Token Lifespan + // + // The lifespan of an access token issued by the OAuth2 2.0 Device Authorization Grant for this OAuth 2.0 Client. + DeviceAuthorizationGrantAccessTokenLifespan x.NullDuration `json:"device_authorization_grant_access_token_lifespan,omitempty" db:"device_authorization_grant_access_token_lifespan"` + + // OAuth2 2.0 Device Authorization Grant Device Authorization Lifespan + // + // The lifespan of a Device Authorization issued by the OAuth2 2.0 Device Authorization Grant for this OAuth 2.0 Client. + DeviceAuthorizationGrantRefreshTokenLifespan x.NullDuration `json:"device_authorization_grant_refresh_token_lifespan,omitempty" db:"device_authorization_grant_refresh_token_lifespan"` } func (Client) TableName() string { @@ -392,7 +421,7 @@ func (c *Client) BeforeSave(_ *pop.Connection) error { } func (c *Client) GetID() string { - return stringsx.Coalesce(c.LegacyClientID, c.ID.String()) + return c.ID } func (c *Client) GetRedirectURIs() []string { @@ -404,7 +433,7 @@ func (c *Client) GetHashedSecret() []byte { } func (c *Client) GetScopes() fosite.Arguments { - return fosite.Arguments(strings.Fields(c.Scope)) + return strings.Fields(c.Scope) } func (c *Client) GetAudience() fosite.Arguments { @@ -525,6 +554,14 @@ func (c *Client) GetEffectiveLifespan(gt fosite.GrantType, tt fosite.TokenType, } else if tt == fosite.RefreshToken && c.RefreshTokenGrantRefreshTokenLifespan.Valid { cl = &c.RefreshTokenGrantRefreshTokenLifespan.Duration } + } else if gt == fosite.GrantTypeDeviceCode { + if tt == fosite.AccessToken && c.DeviceAuthorizationGrantAccessTokenLifespan.Valid { + cl = &c.DeviceAuthorizationGrantAccessTokenLifespan.Duration + } else if tt == fosite.IDToken && c.DeviceAuthorizationGrantIDTokenLifespan.Valid { + cl = &c.DeviceAuthorizationGrantIDTokenLifespan.Duration + } else if tt == fosite.RefreshToken && c.DeviceAuthorizationGrantRefreshTokenLifespan.Valid { + cl = &c.DeviceAuthorizationGrantRefreshTokenLifespan.Duration + } } if cl == nil { @@ -532,3 +569,27 @@ func (c *Client) GetEffectiveLifespan(gt fosite.GrantType, tt fosite.TokenType, } return *cl } + +func (c *Client) GetAccessTokenStrategy() config.AccessTokenStrategyType { + // We ignore the error here, because the empty string will default to + // the global access token strategy. + s, _ := config.ToAccessTokenStrategyType(c.AccessTokenStrategy) + return s +} + +func AccessTokenStrategySource(client fosite.Client) config.AccessTokenStrategySource { + if source, ok := client.(config.AccessTokenStrategySource); ok { + return source + } + return nil +} + +func (c *Client) CookieSuffix() string { + return CookieSuffix(c) +} + +type IDer interface{ GetID() string } + +func CookieSuffix(client IDer) string { + return strconv.Itoa(int(murmur3.Sum32([]byte(client.GetID())))) +} diff --git a/client/client_test.go b/client/client_test.go index b51527a050c..cee41d4ab20 100644 --- a/client/client_test.go +++ b/client/client_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/assert" - "github.com/ory/fosite" + "github.com/ory/hydra/v2/fosite" ) var _ fosite.OpenIDConnectClient = new(Client) @@ -16,7 +16,7 @@ var _ fosite.Client = new(Client) func TestClient(t *testing.T) { c := &Client{ - LegacyClientID: "foo", + ID: "foo", RedirectURIs: []string{"foo"}, Scope: "foo bar", TokenEndpointAuthMethod: "none", diff --git a/client/error.go b/client/error.go index 45fd03925cf..d11f2b5d63e 100644 --- a/client/error.go +++ b/client/error.go @@ -6,7 +6,7 @@ package client import ( "net/http" - "github.com/ory/fosite" + "github.com/ory/hydra/v2/fosite" ) var ErrInvalidClientMetadata = &fosite.RFC6749Error{ @@ -20,3 +20,9 @@ var ErrInvalidRedirectURI = &fosite.RFC6749Error{ ErrorField: "invalid_redirect_uri", CodeField: http.StatusBadRequest, } + +var ErrInvalidRequest = &fosite.RFC6749Error{ + DescriptionField: "The request is missing a required parameter, includes an unsupported parameter value (other than grant type), repeats a parameter, includes multiple credentials, utilizes more than one mechanism for authenticating the client, or is otherwise malformed.", + ErrorField: "invalid_request", + CodeField: http.StatusBadRequest, +} diff --git a/client/handler.go b/client/handler.go index 5f020f23b4e..2de4afb768f 100644 --- a/client/handler.go +++ b/client/handler.go @@ -9,29 +9,21 @@ import ( "encoding/json" "io" "net/http" + "net/url" "strings" "time" - "github.com/ory/x/pagination/tokenpagination" + "github.com/pkg/errors" + "github.com/ory/herodot" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/x" "github.com/ory/x/httprouterx" - - "github.com/ory/x/openapix" - - "github.com/ory/x/uuidx" - "github.com/ory/x/jsonx" + "github.com/ory/x/openapix" + keysetpagination "github.com/ory/x/pagination/keysetpagination_v2" "github.com/ory/x/urlx" - - "github.com/ory/fosite" - - "github.com/ory/x/errorsx" - - "github.com/ory/herodot" - "github.com/ory/hydra/x" - - "github.com/julienschmidt/httprouter" - "github.com/pkg/errors" + "github.com/ory/x/uuidx" ) type Handler struct { @@ -44,29 +36,31 @@ const ( ) func NewHandler(r InternalRegistry) *Handler { - return &Handler{ - r: r, - } + return &Handler{r: r} +} + +func (h *Handler) SetAdminRoutes(r *httprouterx.RouterAdmin) { + r.GET(ClientsHandlerPath, h.listOAuth2Clients) + r.POST(ClientsHandlerPath, h.createOAuth2Client) + r.GET(ClientsHandlerPath+"/{id}", h.Get) + r.PUT(ClientsHandlerPath+"/{id}", h.setOAuth2Client) + r.PATCH(ClientsHandlerPath+"/{id}", h.patchOAuth2Client) + r.DELETE(ClientsHandlerPath+"/{id}", h.deleteOAuth2Client) + r.PUT(ClientsHandlerPath+"/{id}/lifespans", h.setOAuth2ClientLifespans) } -func (h *Handler) SetRoutes(admin *httprouterx.RouterAdmin, public *httprouterx.RouterPublic) { - admin.GET(ClientsHandlerPath, h.listOAuth2Clients) - admin.POST(ClientsHandlerPath, h.createOAuth2Client) - admin.GET(ClientsHandlerPath+"/:id", h.Get) - admin.PUT(ClientsHandlerPath+"/:id", h.setOAuth2Client) - admin.PATCH(ClientsHandlerPath+"/:id", h.patchOAuth2Client) - admin.DELETE(ClientsHandlerPath+"/:id", h.deleteOAuth2Client) - admin.PUT(ClientsHandlerPath+"/:id/lifespans", h.setOAuth2ClientLifespans) - - public.POST(DynClientsHandlerPath, h.createOidcDynamicClient) - public.GET(DynClientsHandlerPath+"/:id", h.getOidcDynamicClient) - public.PUT(DynClientsHandlerPath+"/:id", h.setOidcDynamicClient) - public.DELETE(DynClientsHandlerPath+"/:id", h.deleteOidcDynamicClient) +func (h *Handler) SetPublicRoutes(r *httprouterx.RouterPublic) { + r.POST(DynClientsHandlerPath, h.createOidcDynamicClient) + r.GET(DynClientsHandlerPath+"/{id}", h.getOidcDynamicClient) + r.PUT(DynClientsHandlerPath+"/{id}", h.setOidcDynamicClient) + r.DELETE(DynClientsHandlerPath+"/{id}", h.deleteOidcDynamicClient) } // OAuth 2.0 Client Creation Parameters // // swagger:parameters createOAuth2Client +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type createOAuth2Client struct { // OAuth 2.0 Client Request Body // @@ -94,19 +88,21 @@ type createOAuth2Client struct { // 201: oAuth2Client // 400: errorOAuth2BadRequest // default: errorOAuth2Default -func (h *Handler) createOAuth2Client(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { +func (h *Handler) createOAuth2Client(w http.ResponseWriter, r *http.Request) { c, err := h.CreateClient(r, h.r.ClientValidator().Validate, false) if err != nil { - h.r.Writer().WriteError(w, r, errorsx.WithStack(err)) + h.r.Writer().WriteError(w, r, err) return } - h.r.Writer().WriteCreated(w, r, "/admin"+ClientsHandlerPath+"/"+c.GetID(), &c) + h.r.Writer().WriteCreated(w, r, urlx.MustJoin("/admin", ClientsHandlerPath, url.PathEscape(c.GetID())), &c) } // OpenID Connect Dynamic Client Registration Parameters // // swagger:parameters createOidcDynamicClient +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type createOidcDynamicClient struct { // Dynamic Client Registration Request Body // @@ -143,39 +139,34 @@ type createOidcDynamicClient struct { // 201: oAuth2Client // 400: errorOAuth2BadRequest // default: errorOAuth2Default -func (h *Handler) createOidcDynamicClient(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { +func (h *Handler) createOidcDynamicClient(w http.ResponseWriter, r *http.Request) { if err := h.requireDynamicAuth(r); err != nil { h.r.Writer().WriteError(w, r, err) return } c, err := h.CreateClient(r, h.r.ClientValidator().ValidateDynamicRegistration, true) if err != nil { - h.r.Writer().WriteError(w, r, errorsx.WithStack(err)) + h.r.Writer().WriteError(w, r, errors.WithStack(err)) return } - h.r.Writer().WriteCreated(w, r, "/admin"+ClientsHandlerPath+"/"+c.GetID(), &c) + h.r.Writer().WriteCreated(w, r, urlx.MustJoin("admin", ClientsHandlerPath, url.PathEscape(c.GetID())), &c) } func (h *Handler) CreateClient(r *http.Request, validator func(context.Context, *Client) error, isDynamic bool) (*Client, error) { var c Client if err := json.NewDecoder(r.Body).Decode(&c); err != nil { - return nil, err + return nil, errors.WithStack(herodot.ErrBadRequest.WithReasonf("Unable to decode the request body: %s", err)) } if isDynamic { if c.Secret != "" { - return nil, errorsx.WithStack(herodot.ErrBadRequest.WithReasonf("It is not allowed to choose your own OAuth2 Client secret.")) + return nil, errors.WithStack(herodot.ErrBadRequest.WithReasonf("It is not allowed to choose your own OAuth2 Client secret.")) } + // We do not allow to set the client ID for dynamic clients. + c.ID = uuidx.NewV4().String() } - if len(c.LegacyClientID) > 0 { - return nil, errorsx.WithStack(herodot.ErrBadRequest.WithReason("It is no longer possible to set an OAuth2 Client ID as a user. The system will generate a unique ID for you.")) - } - - c.ID = uuidx.NewV4() - c.LegacyClientID = c.ID.String() - if len(c.Secret) == 0 { secretb, err := x.GenerateSecret(26) if err != nil { @@ -199,7 +190,7 @@ func (h *Handler) CreateClient(r *http.Request, validator func(context.Context, c.RegistrationAccessToken = token c.RegistrationAccessTokenSignature = signature - c.RegistrationClientURI = urlx.AppendPaths(h.r.Config().PublicURL(r.Context()), DynClientsHandlerPath+"/"+c.GetID()).String() + c.RegistrationClientURI = urlx.AppendPaths(h.r.Config().PublicURL(r.Context()), DynClientsHandlerPath, url.PathEscape(c.GetID())).String() if err := h.r.ClientManager().CreateClient(r.Context(), &c); err != nil { return nil, err @@ -214,6 +205,8 @@ func (h *Handler) CreateClient(r *http.Request, validator func(context.Context, // Set OAuth 2.0 Client Parameters // // swagger:parameters setOAuth2Client +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type setOAuth2Client struct { // OAuth 2.0 Client ID // @@ -253,14 +246,14 @@ type setOAuth2Client struct { // 400: errorOAuth2BadRequest // 404: errorOAuth2NotFound // default: errorOAuth2Default -func (h *Handler) setOAuth2Client(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { +func (h *Handler) setOAuth2Client(w http.ResponseWriter, r *http.Request) { var c Client if err := json.NewDecoder(r.Body).Decode(&c); err != nil { - h.r.Writer().WriteError(w, r, errorsx.WithStack(herodot.ErrBadRequest.WithReasonf("Unable to decode the request body: %s", err))) + h.r.Writer().WriteError(w, r, errors.WithStack(herodot.ErrBadRequest.WithReasonf("Unable to decode the request body: %s", err))) return } - c.LegacyClientID = ps.ByName("id") + c.ID = r.PathValue("id") if err := h.updateClient(r.Context(), &c, h.r.ClientValidator().Validate); err != nil { h.r.Writer().WriteError(w, r, err) return @@ -290,6 +283,8 @@ func (h *Handler) updateClient(ctx context.Context, c *Client, validator func(co // Set Dynamic Client Parameters // // swagger:parameters setOidcDynamicClient +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type setOidcDynamicClient struct { // OAuth 2.0 Client ID // @@ -339,13 +334,13 @@ type setOidcDynamicClient struct { // 200: oAuth2Client // 404: errorOAuth2NotFound // default: errorOAuth2Default -func (h *Handler) setOidcDynamicClient(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { +func (h *Handler) setOidcDynamicClient(w http.ResponseWriter, r *http.Request) { if err := h.requireDynamicAuth(r); err != nil { h.r.Writer().WriteError(w, r, err) return } - client, err := h.ValidDynamicAuth(r, ps) + client, err := h.ValidDynamicAuth(r, r.PathValue("id")) if err != nil { h.r.Writer().WriteError(w, r, err) return @@ -353,12 +348,12 @@ func (h *Handler) setOidcDynamicClient(w http.ResponseWriter, r *http.Request, p var c Client if err := json.NewDecoder(r.Body).Decode(&c); err != nil { - h.r.Writer().WriteError(w, r, errorsx.WithStack(herodot.ErrBadRequest.WithReasonf("Unable to decode the request body. Is it valid JSON?").WithDebug(err.Error()))) + h.r.Writer().WriteError(w, r, errors.WithStack(herodot.ErrBadRequest.WithReasonf("Unable to decode the request body. Is it valid JSON?").WithDebug(err.Error()))) return } if c.Secret != "" { - h.r.Writer().WriteError(w, r, errorsx.WithStack(herodot.ErrForbidden.WithReasonf("It is not allowed to choose your own OAuth2 Client secret."))) + h.r.Writer().WriteError(w, r, errors.WithStack(herodot.ErrForbidden.WithReasonf("It is not allowed to choose your own OAuth2 Client secret."))) return } @@ -371,7 +366,7 @@ func (h *Handler) setOidcDynamicClient(w http.ResponseWriter, r *http.Request, p c.RegistrationAccessToken = token c.RegistrationAccessTokenSignature = signature - c.LegacyClientID = client.GetID() + c.ID = client.GetID() if err := h.updateClient(r.Context(), &c, h.r.ClientValidator().ValidateDynamicRegistration); err != nil { h.r.Writer().WriteError(w, r, err) return @@ -383,6 +378,8 @@ func (h *Handler) setOidcDynamicClient(w http.ResponseWriter, r *http.Request, p // Patch OAuth 2.0 Client Parameters // // swagger:parameters patchOAuth2Client +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type patchOAuth2Client struct { // The id of the OAuth 2.0 Client. // @@ -420,23 +417,24 @@ type patchOAuth2Client struct { // 200: oAuth2Client // 404: errorOAuth2NotFound // default: errorOAuth2Default -func (h *Handler) patchOAuth2Client(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { +func (h *Handler) patchOAuth2Client(w http.ResponseWriter, r *http.Request) { patchJSON, err := io.ReadAll(r.Body) if err != nil { h.r.Writer().WriteError(w, r, err) return } - id := ps.ByName("id") - c, err := h.r.ClientManager().GetConcreteClient(r.Context(), id) + id := r.PathValue("id") + client, err := h.r.ClientManager().GetConcreteClient(r.Context(), id) if err != nil { h.r.Writer().WriteError(w, r, err) return } - oldSecret := c.Secret + oldSecret := client.Secret - if err := jsonx.ApplyJSONPatch(patchJSON, c, "/id"); err != nil { + client, err = jsonx.ApplyJSONPatch(patchJSON, client, "/id") + if err != nil { h.r.Writer().WriteError(w, r, err) return } @@ -445,23 +443,25 @@ func (h *Handler) patchOAuth2Client(w http.ResponseWriter, r *http.Request, ps h // GetConcreteClient returns a client with the hashed secret, however updateClient expects // an empty secret if the secret hasn't changed. As such we need to check if the patch has // updated the secret or not - if oldSecret == c.Secret { - c.Secret = "" + if oldSecret == client.Secret { + client.Secret = "" } - if err := h.updateClient(r.Context(), c, h.r.ClientValidator().Validate); err != nil { + if err := h.updateClient(r.Context(), client, h.r.ClientValidator().Validate); err != nil { h.r.Writer().WriteError(w, r, err) return } - h.r.Writer().Write(w, r, c) + h.r.Writer().Write(w, r, client) } // Paginated OAuth2 Client List Response // // swagger:response listOAuth2Clients +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type listOAuth2ClientsResponse struct { - tokenpagination.ResponseHeaders + keysetpagination.ResponseHeaders // List of OAuth 2.0 Clients // @@ -472,8 +472,10 @@ type listOAuth2ClientsResponse struct { // Paginated OAuth2 Client List Parameters // // swagger:parameters listOAuth2Clients +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type listOAuth2ClientsParameters struct { - tokenpagination.RequestParameters + keysetpagination.RequestParameters // The name of the clients to filter by. // @@ -504,16 +506,20 @@ type listOAuth2ClientsParameters struct { // Responses: // 200: listOAuth2Clients // default: errorOAuth2Default -func (h *Handler) listOAuth2Clients(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - page, itemsPerPage := x.ParsePagination(r) +func (h *Handler) listOAuth2Clients(w http.ResponseWriter, r *http.Request) { + pageKeys := h.r.Config().GetPaginationEncryptionKeys(r.Context()) + pagination, err := keysetpagination.ParseQueryParams(pageKeys, r.URL.Query()) + if err != nil { + h.r.Writer().WriteError(w, r, errors.WithStack(herodot.ErrBadRequest.WithReasonf("Unable to parse pagination parameters: %s", err))) + return + } filters := Filter{ - Limit: itemsPerPage, - Offset: page * itemsPerPage, - Name: r.URL.Query().Get("client_name"), - Owner: r.URL.Query().Get("owner"), + PageOpts: pagination, + Name: r.URL.Query().Get("client_name"), + Owner: r.URL.Query().Get("owner"), } - c, err := h.r.ClientManager().GetClients(r.Context(), filters) + c, nextPage, err := h.r.ClientManager().GetClients(r.Context(), filters) if err != nil { h.r.Writer().WriteError(w, r, err) return @@ -527,19 +533,15 @@ func (h *Handler) listOAuth2Clients(w http.ResponseWriter, r *http.Request, ps h c[k].Secret = "" } - total, err := h.r.ClientManager().CountClients(r.Context()) - if err != nil { - h.r.Writer().WriteError(w, r, err) - return - } - - x.PaginationHeader(w, r.URL, int64(total), page, itemsPerPage) + keysetpagination.SetLinkHeader(w, pageKeys, r.URL, nextPage) h.r.Writer().Write(w, r, c) } // Get OAuth2 Client Parameters // // swagger:parameters getOAuth2Client +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type adminGetOAuth2Client struct { // The id of the OAuth 2.0 Client. // @@ -568,8 +570,8 @@ type adminGetOAuth2Client struct { // Responses: // 200: oAuth2Client // default: errorOAuth2Default -func (h *Handler) Get(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - var id = ps.ByName("id") +func (h *Handler) Get(w http.ResponseWriter, r *http.Request) { + id := r.PathValue("id") c, err := h.r.ClientManager().GetConcreteClient(r.Context(), id) if err != nil { h.r.Writer().WriteError(w, r, err) @@ -583,6 +585,8 @@ func (h *Handler) Get(w http.ResponseWriter, r *http.Request, ps httprouter.Para // Get OpenID Connect Dynamic Client Parameters // // swagger:parameters getOidcDynamicClient +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type getOidcDynamicClient struct { // The id of the OAuth 2.0 Client. // @@ -617,13 +621,13 @@ type getOidcDynamicClient struct { // Responses: // 200: oAuth2Client // default: errorOAuth2Default -func (h *Handler) getOidcDynamicClient(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { +func (h *Handler) getOidcDynamicClient(w http.ResponseWriter, r *http.Request) { if err := h.requireDynamicAuth(r); err != nil { h.r.Writer().WriteError(w, r, err) return } - client, err := h.ValidDynamicAuth(r, ps) + client, err := h.ValidDynamicAuth(r, r.PathValue("id")) if err != nil { h.r.Writer().WriteError(w, r, err) return @@ -644,6 +648,8 @@ func (h *Handler) getOidcDynamicClient(w http.ResponseWriter, r *http.Request, p // Delete OAuth2 Client Parameters // // swagger:parameters deleteOAuth2Client +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type deleteOAuth2Client struct { // The id of the OAuth 2.0 Client. // @@ -674,8 +680,8 @@ type deleteOAuth2Client struct { // Responses: // 204: emptyResponse // default: genericError -func (h *Handler) deleteOAuth2Client(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - var id = ps.ByName("id") +func (h *Handler) deleteOAuth2Client(w http.ResponseWriter, r *http.Request) { + id := r.PathValue("id") if err := h.r.ClientManager().DeleteClient(r.Context(), id); err != nil { h.r.Writer().WriteError(w, r, err) return @@ -687,6 +693,8 @@ func (h *Handler) deleteOAuth2Client(w http.ResponseWriter, r *http.Request, ps // Set OAuth 2.0 Client Token Lifespans // // swagger:parameters setOAuth2ClientLifespans +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type setOAuth2ClientLifespans struct { // OAuth 2.0 Client ID // @@ -712,8 +720,8 @@ type setOAuth2ClientLifespans struct { // Responses: // 200: oAuth2Client // default: genericError -func (h *Handler) setOAuth2ClientLifespans(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - var id = ps.ByName("id") +func (h *Handler) setOAuth2ClientLifespans(w http.ResponseWriter, r *http.Request) { + id := r.PathValue("id") c, err := h.r.ClientManager().GetConcreteClient(r.Context(), id) if err != nil { h.r.Writer().WriteError(w, r, err) @@ -722,7 +730,7 @@ func (h *Handler) setOAuth2ClientLifespans(w http.ResponseWriter, r *http.Reques var ls Lifespans if err := json.NewDecoder(r.Body).Decode(&ls); err != nil { - h.r.Writer().WriteError(w, r, errorsx.WithStack(herodot.ErrBadRequest.WithReasonf("Unable to decode the request body: %s", err))) + h.r.Writer().WriteError(w, r, errors.WithStack(herodot.ErrBadRequest.WithReasonf("Unable to decode the request body: %s", err))) return } @@ -738,6 +746,8 @@ func (h *Handler) setOAuth2ClientLifespans(w http.ResponseWriter, r *http.Reques } // swagger:parameters deleteOidcDynamicClient +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type dynamicClientRegistrationDeleteOAuth2Client struct { // The id of the OAuth 2.0 Client. // @@ -773,12 +783,12 @@ type dynamicClientRegistrationDeleteOAuth2Client struct { // Responses: // 204: emptyResponse // default: genericError -func (h *Handler) deleteOidcDynamicClient(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { +func (h *Handler) deleteOidcDynamicClient(w http.ResponseWriter, r *http.Request) { if err := h.requireDynamicAuth(r); err != nil { h.r.Writer().WriteError(w, r, err) return } - client, err := h.ValidDynamicAuth(r, ps) + client, err := h.ValidDynamicAuth(r, r.PathValue("id")) if err != nil { h.r.Writer().WriteError(w, r, err) return @@ -792,8 +802,8 @@ func (h *Handler) deleteOidcDynamicClient(w http.ResponseWriter, r *http.Request w.WriteHeader(http.StatusNoContent) } -func (h *Handler) ValidDynamicAuth(r *http.Request, ps httprouter.Params) (fosite.Client, error) { - c, err := h.r.ClientManager().GetConcreteClient(r.Context(), ps.ByName("id")) +func (h *Handler) ValidDynamicAuth(r *http.Request, id string) (fosite.Client, error) { + c, err := h.r.ClientManager().GetConcreteClient(r.Context(), id) if err != nil { return nil, herodot.ErrUnauthorized. WithTrace(err). @@ -806,13 +816,26 @@ func (h *Handler) ValidDynamicAuth(r *http.Request, ps httprouter.Params) (fosit } token := strings.TrimPrefix(fosite.AccessTokenFromRequest(r), "ory_at_") - if err := h.r.OAuth2HMACStrategy().Enigma.Validate(r.Context(), token); err != nil { + if err := h.r.OAuth2HMACStrategy().ValidateAccessToken( + r.Context(), + // The strategy checks the expiry time of the token. Registration tokens don't expire (we don't have a way of + // rotating them) so we set the expiry time to a time in the future. + &fosite.Request{ + Session: &fosite.DefaultSession{ + ExpiresAt: map[fosite.TokenType]time.Time{ + fosite.AccessToken: time.Now().Add(time.Hour), + }, + }, + RequestedAt: time.Now(), + }, + token, + ); err != nil { return nil, herodot.ErrUnauthorized. WithTrace(err). WithReason("The requested OAuth 2.0 client does not exist or you provided incorrect credentials.").WithDebug(err.Error()) } - signature := h.r.OAuth2HMACStrategy().Enigma.Signature(token) + signature := h.r.OAuth2EnigmaStrategy().Signature(token) if subtle.ConstantTimeCompare([]byte(c.RegistrationAccessTokenSignature), []byte(signature)) == 0 { return nil, errors.WithStack(herodot.ErrUnauthorized. WithReason("The requested OAuth 2.0 client does not exist or you provided incorrect credentials.").WithDebug("Registration access tokens do not match.")) diff --git a/client/handler_test.go b/client/handler_test.go index e8d060495ed..9a3d763176a 100644 --- a/client/handler_test.go +++ b/client/handler_test.go @@ -11,29 +11,33 @@ import ( "io" "net/http" "net/http/httptest" + "net/url" + "strings" "testing" + "github.com/prometheus/client_golang/prometheus/promhttp" + "github.com/urfave/negroni" + "github.com/ory/x/httprouterx" + "github.com/ory/x/prometheusx" + "github.com/ory/x/sqlxx" + "github.com/ory/x/urlx" "github.com/tidwall/sjson" "github.com/gofrs/uuid" "github.com/tidwall/gjson" - "github.com/ory/hydra/internal/testhelpers" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/x" - "github.com/ory/hydra/driver/config" - "github.com/ory/x/contextx" + "github.com/ory/hydra/v2/driver/config" - "github.com/julienschmidt/httprouter" "github.com/stretchr/testify/assert" - - "github.com/ory/x/snapshotx" - "github.com/stretchr/testify/require" - "github.com/ory/hydra/client" - "github.com/ory/hydra/internal" + "github.com/ory/hydra/v2/client" + "github.com/ory/x/snapshotx" ) type responseSnapshot struct { @@ -54,9 +58,8 @@ func getClientID(body string) string { func TestHandler(t *testing.T) { ctx := context.Background() - reg := internal.NewMockedRegistry(t, &contextx.Default{}) + reg := testhelpers.NewRegistryMemory(t) h := client.NewHandler(reg) - reg.WithContextualizer(&contextx.TestContextualizer{}) t.Run("create client registration tokens", func(t *testing.T) { for k, tc := range []struct { @@ -94,7 +97,7 @@ func TestHandler(t *testing.T) { except = append(except, "client_id", "client_secret", "registration_client_uri") } - snapshotx.SnapshotTExcept(t, c, except) + snapshotx.SnapshotT(t, c, snapshotx.ExceptPaths(except...)) }) } }) @@ -108,42 +111,62 @@ func TestHandler(t *testing.T) { require.NoError(t, err) t.Run("valid auth", func(t *testing.T) { - actual, err := h.ValidDynamicAuth(&http.Request{Header: http.Header{"Authorization": {"Bearer " + expected.RegistrationAccessToken}}}, httprouter.Params{ - httprouter.Param{Key: "id", Value: expected.GetID()}, - }) + actual, err := h.ValidDynamicAuth(&http.Request{Header: http.Header{"Authorization": {"Bearer " + expected.RegistrationAccessToken}}}, expected.ID) require.NoError(t, err, "authentication with registration access token works") assert.EqualValues(t, expected.GetID(), actual.GetID()) }) t.Run("missing auth", func(t *testing.T) { - _, err := h.ValidDynamicAuth(&http.Request{}, httprouter.Params{ - httprouter.Param{Key: "id", Value: expected.GetID()}, - }) + _, err = h.ValidDynamicAuth(&http.Request{}, expected.ID) require.Error(t, err, "authentication without registration access token fails") }) t.Run("incorrect auth", func(t *testing.T) { - _, err := h.ValidDynamicAuth(&http.Request{Header: http.Header{"Authorization": {"Bearer invalid"}}}, httprouter.Params{ - httprouter.Param{Key: "id", Value: expected.GetID()}, - }) + _, err = h.ValidDynamicAuth(&http.Request{Header: http.Header{"Authorization": {"Bearer invalid"}}}, expected.ID) require.Error(t, err, "authentication with invalid registration access token fails") }) }) - newServer := func(t *testing.T, dynamicEnabled bool) (*httptest.Server, *http.Client) { + newServer := func(t *testing.T, dynamicEnabled bool) (adminTs *httptest.Server, publicTs *httptest.Server) { require.NoError(t, reg.Config().Set(ctx, config.KeyPublicAllowDynamicRegistration, dynamicEnabled)) - router := httprouter.New() - h.SetRoutes(httprouterx.NewRouterAdminWithPrefixAndRouter(router, "/admin", reg.Config().AdminURL), &httprouterx.RouterPublic{Router: router}) - ts := httptest.NewServer(router) - t.Cleanup(ts.Close) - reg.Config().MustSet(ctx, config.KeyAdminURL, ts.URL) - return ts, ts.Client() + + metrics := prometheusx.NewMetricsManagerWithPrefix("hydra", prometheusx.HTTPMetrics, config.Version, config.Commit, config.Date) + { + n := negroni.New() + n.UseFunc(httprouterx.TrimTrailingSlashNegroni) + n.UseFunc(httprouterx.NoCacheNegroni) + n.UseFunc(httprouterx.AddAdminPrefixIfNotPresentNegroni) + n.Use(metrics) + + router := x.NewRouterAdmin(metrics) + h.SetAdminRoutes(router) + router.Handler("GET", prometheusx.MetricsPrometheusPath, promhttp.Handler()) + n.UseHandler(router) + + adminTs = httptest.NewServer(n) + t.Cleanup(adminTs.Close) + reg.Config().MustSet(ctx, config.KeyAdminURL, adminTs.URL) + } + { + n := negroni.New() + n.UseFunc(httprouterx.TrimTrailingSlashNegroni) + n.UseFunc(httprouterx.NoCacheNegroni) + + router := x.NewRouterPublic(metrics) + h.SetPublicRoutes(router) + n.UseHandler(router) + + publicTs = httptest.NewServer(n) + t.Cleanup(publicTs.Close) + reg.Config().MustSet(ctx, config.KeyAdminURL, publicTs.URL) + } + return } fetch := func(t *testing.T, url string) (string, *http.Response) { res, err := http.Get(url) require.NoError(t, err) - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck body, err := io.ReadAll(res.Body) require.NoError(t, err) return string(body), res @@ -153,9 +176,10 @@ func TestHandler(t *testing.T) { r, err := http.NewRequest(method, url, body) require.NoError(t, err) r.Header.Set("Authorization", "Bearer "+token) + r.Header.Set("Accept", "application/json") res, err := http.DefaultClient.Do(r) require.NoError(t, err) - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck out, err := io.ReadAll(res.Body) require.NoError(t, err) return string(out), res @@ -169,7 +193,7 @@ func TestHandler(t *testing.T) { r.Header.Set("Content-Type", "application/json") res, err := ts.Client().Do(r) require.NoError(t, err) - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck rb, err := io.ReadAll(res.Body) require.NoError(t, err) return string(rb), res @@ -182,26 +206,26 @@ func TestHandler(t *testing.T) { } t.Run("selfservice disabled", func(t *testing.T) { - ts, hc := newServer(t, false) + adminTs, publicTs := newServer(t, false) trap := &client.Client{} - actual := createClient(t, trap, ts, client.ClientsHandlerPath) + actual := createClient(t, trap, adminTs, client.ClientsHandlerPath) actualID := getClientID(actual) for _, tc := range []struct { method string path string }{ - {method: "GET", path: ts.URL + client.DynClientsHandlerPath + "/" + actualID}, - {method: "POST", path: ts.URL + client.DynClientsHandlerPath}, - {method: "PUT", path: ts.URL + client.DynClientsHandlerPath + "/" + actualID}, - {method: "DELETE", path: ts.URL + client.DynClientsHandlerPath + "/" + actualID}, + {method: "GET", path: urlx.MustJoin(publicTs.URL, client.DynClientsHandlerPath, url.PathEscape(actualID))}, + {method: "POST", path: urlx.MustJoin(publicTs.URL, client.DynClientsHandlerPath)}, + {method: "PUT", path: urlx.MustJoin(publicTs.URL, client.DynClientsHandlerPath, url.PathEscape(actualID))}, + {method: "DELETE", path: urlx.MustJoin(publicTs.URL, client.DynClientsHandlerPath, url.PathEscape(actualID))}, } { t.Run("method="+tc.method, func(t *testing.T) { req, err := http.NewRequest(tc.method, tc.path, nil) require.NoError(t, err) - res, err := hc.Do(req) + res, err := publicTs.Client().Do(req) require.NoError(t, err) require.Equal(t, http.StatusNotFound, res.StatusCode) }) @@ -209,48 +233,48 @@ func TestHandler(t *testing.T) { }) t.Run("case=selfservice with incorrect or missing auth", func(t *testing.T) { - ts, hc := newServer(t, true) + adminTs, publicTs := newServer(t, true) expectedFirst := createClient(t, &client.Client{ Secret: "averylongsecret", RedirectURIs: []string{"http://localhost:3000/cb"}, TokenEndpointAuthMethod: "client_secret_basic", - }, ts, client.ClientsHandlerPath) + }, adminTs, client.ClientsHandlerPath) expectedFirstID := getClientID(expectedFirst) // Create the second client expectedSecond := createClient(t, &client.Client{ Secret: "averylongsecret", RedirectURIs: []string{"http://localhost:3000/cb"}, - }, ts, client.ClientsHandlerPath) + }, adminTs, client.ClientsHandlerPath) expectedSecondID := getClientID(expectedSecond) t.Run("endpoint=selfservice", func(t *testing.T) { for _, method := range []string{"GET", "DELETE", "PUT"} { t.Run("method="+method, func(t *testing.T) { t.Run("without auth", func(t *testing.T) { - req, err := http.NewRequest(method, ts.URL+client.DynClientsHandlerPath+"/"+expectedFirstID, nil) + req, err := http.NewRequest(method, urlx.MustJoin(publicTs.URL, client.DynClientsHandlerPath, url.PathEscape(expectedFirstID)), nil) require.NoError(t, err) - res, err := hc.Do(req) + res, err := publicTs.Client().Do(req) require.NoError(t, err) - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck body, err := io.ReadAll(res.Body) require.NoError(t, err) - snapshotx.SnapshotTExcept(t, newResponseSnapshot(string(body), res), nil) + snapshotx.SnapshotT(t, newResponseSnapshot(string(body), res)) }) t.Run("without incorrect auth", func(t *testing.T) { - body, res := fetchWithBearerAuth(t, method, ts.URL+client.DynClientsHandlerPath+"/"+expectedFirstID, "incorrect", nil) + body, res := fetchWithBearerAuth(t, method, urlx.MustJoin(publicTs.URL, client.DynClientsHandlerPath, url.PathEscape(expectedFirstID)), "incorrect", nil) assert.Equal(t, http.StatusUnauthorized, res.StatusCode) - snapshotx.SnapshotTExcept(t, newResponseSnapshot(body, res), nil) + snapshotx.SnapshotT(t, newResponseSnapshot(body, res)) }) t.Run("with a different client auth", func(t *testing.T) { - body, res := fetchWithBearerAuth(t, method, ts.URL+client.DynClientsHandlerPath+"/"+expectedFirstID, expectedSecondID, nil) + body, res := fetchWithBearerAuth(t, method, urlx.MustJoin(publicTs.URL, client.DynClientsHandlerPath, url.PathEscape(expectedFirstID)), expectedSecondID, nil) assert.Equal(t, http.StatusUnauthorized, res.StatusCode) - snapshotx.SnapshotTExcept(t, newResponseSnapshot(body, res), nil) + snapshotx.SnapshotT(t, newResponseSnapshot(body, res)) }) }) } @@ -258,12 +282,12 @@ func TestHandler(t *testing.T) { }) t.Run("common", func(t *testing.T) { - ts, _ := newServer(t, true) + adminTs, publicTs := newServer(t, true) expected := createClient(t, &client.Client{ Secret: "averylongsecret", RedirectURIs: []string{"http://localhost:3000/cb"}, TokenEndpointAuthMethod: "client_secret_basic", - }, ts, client.ClientsHandlerPath) + }, adminTs, client.ClientsHandlerPath) t.Run("case=create clients", func(t *testing.T) { for k, tc := range []struct { @@ -309,31 +333,78 @@ func TestHandler(t *testing.T) { statusCode: http.StatusBadRequest, }, { - d: "non-uuid fails", + d: "non-uuid works", payload: &client.Client{ - LegacyClientID: "not-a-uuid", - Secret: "averylongsecret", - RedirectURIs: []string{"http://localhost:3000/cb"}, + ID: "not-a-uuid", + Secret: "averylongsecret", + RedirectURIs: []string{"http://localhost:3000/cb"}, }, path: client.ClientsHandlerPath, + statusCode: http.StatusCreated, + }, + { + d: "setting client id as uuid works", + payload: &client.Client{ + ID: "98941dac-f963-4468-8a23-9483b1e04e3c", + Secret: "not too short", + RedirectURIs: []string{"http://localhost:3000/cb"}, + }, + path: client.ClientsHandlerPath, + statusCode: http.StatusCreated, + }, + { + d: "setting access token strategy fails", + payload: &client.Client{ + RedirectURIs: []string{"http://localhost:3000/cb"}, + AccessTokenStrategy: "jwt", + }, + path: client.DynClientsHandlerPath, + statusCode: http.StatusBadRequest, + }, + { + d: "setting skip_consent fails for dynamic registration", + payload: &client.Client{ + RedirectURIs: []string{"http://localhost:3000/cb"}, + SkipConsent: true, + }, + path: client.DynClientsHandlerPath, statusCode: http.StatusBadRequest, }, { - d: "setting client id fails", + d: "setting skip_consent succeeds for admin registration", payload: &client.Client{ - LegacyClientID: "98941dac-f963-4468-8a23-9483b1e04e3c", - Secret: "short", - RedirectURIs: []string{"http://localhost:3000/cb"}, + RedirectURIs: []string{"http://localhost:3000/cb"}, + Secret: "2SKZkBf2P5g4toAXXnCrr~_sDM", + SkipConsent: true, }, path: client.ClientsHandlerPath, + statusCode: http.StatusCreated, + }, + { + d: "setting skip_logout_consent fails for dynamic registration", + payload: &client.Client{ + RedirectURIs: []string{"http://localhost:3000/cb"}, + SkipLogoutConsent: sqlxx.NullBool{Bool: true, Valid: true}, + }, + path: client.DynClientsHandlerPath, statusCode: http.StatusBadRequest, }, + { + d: "setting skip_logout_consent succeeds for admin registration", + payload: &client.Client{ + RedirectURIs: []string{"http://localhost:3000/cb"}, + SkipLogoutConsent: sqlxx.NullBool{Bool: true, Valid: true}, + Secret: "2SKZkBf2P5g4toAXXnCrr~_sDM", + }, + path: client.ClientsHandlerPath, + statusCode: http.StatusCreated, + }, { d: "basic dynamic client registration", payload: &client.Client{ - LegacyClientID: "ead800c5-a316-4d0c-bf00-d25666ba72cf", - Secret: "averylongsecret", - RedirectURIs: []string{"http://localhost:3000/cb"}, + ID: "ead800c5-a316-4d0c-bf00-d25666ba72cf", + Secret: "averylongsecret", + RedirectURIs: []string{"http://localhost:3000/cb"}, }, path: client.DynClientsHandlerPath, statusCode: http.StatusBadRequest, @@ -349,13 +420,19 @@ func TestHandler(t *testing.T) { }, } { t.Run(fmt.Sprintf("case=%d/description=%s", k, tc.d), func(t *testing.T) { + var ts *httptest.Server + if strings.HasPrefix(tc.path, client.DynClientsHandlerPath) { + ts = publicTs + } else { + ts = adminTs + } body, res := makeJSON(t, ts, "POST", tc.path, tc.payload) require.Equal(t, tc.statusCode, res.StatusCode, body) exclude := []string{"updated_at", "created_at", "registration_access_token"} if tc.path == client.DynClientsHandlerPath { exclude = append(exclude, "client_id", "client_secret", "registration_client_uri") } - if tc.payload.LegacyClientID == "" { + if tc.payload.ID == "" { exclude = append(exclude, "client_id", "registration_client_uri") assert.NotEqual(t, uuid.Nil.String(), gjson.Get(body, "client_id").String(), body) } @@ -364,73 +441,91 @@ func TestHandler(t *testing.T) { assert.NotEmpty(t, gjson.Get(body, key).String(), "%s in %s", key, body) } } - snapshotx.SnapshotTExcept(t, json.RawMessage(body), exclude) + snapshotx.SnapshotT(t, json.RawMessage(body), snapshotx.ExceptPaths(exclude...)) }) } }) t.Run("case=fetching non-existing client", func(t *testing.T) { for _, path := range []string{ - client.DynClientsHandlerPath + "/foo", - client.ClientsHandlerPath + "/foo", + urlx.MustJoin(client.DynClientsHandlerPath, "foo"), + urlx.MustJoin(client.ClientsHandlerPath, "foo"), } { t.Run("path="+path, func(t *testing.T) { + var ts *httptest.Server + if strings.HasPrefix(path, client.DynClientsHandlerPath) { + ts = publicTs + } else { + ts = adminTs + } body, res := fetchWithBearerAuth(t, "GET", ts.URL+path, gjson.Get(expected, "registration_access_token").String(), nil) - snapshotx.SnapshotTExcept(t, newResponseSnapshot(body, res), nil) + snapshotx.SnapshotT(t, newResponseSnapshot(body, res)) }) } }) t.Run("case=updating non-existing client", func(t *testing.T) { for _, path := range []string{ - client.DynClientsHandlerPath + "/foo", - client.ClientsHandlerPath + "/foo", + urlx.MustJoin(client.DynClientsHandlerPath, "foo"), + urlx.MustJoin(client.ClientsHandlerPath, "foo"), } { t.Run("path="+path, func(t *testing.T) { + var ts *httptest.Server + if strings.HasPrefix(path, client.DynClientsHandlerPath) { + ts = publicTs + } else { + ts = adminTs + } body, res := fetchWithBearerAuth(t, "PUT", ts.URL+path, "invalid", bytes.NewBufferString("{}")) - snapshotx.SnapshotTExcept(t, newResponseSnapshot(body, res), nil) + snapshotx.SnapshotT(t, newResponseSnapshot(body, res)) }) } }) t.Run("case=delete non-existing client", func(t *testing.T) { for _, path := range []string{ - client.DynClientsHandlerPath + "/foo", - client.ClientsHandlerPath + "/foo", + urlx.MustJoin(client.DynClientsHandlerPath, "foo"), + urlx.MustJoin(client.ClientsHandlerPath, "foo"), } { + var ts *httptest.Server + if strings.HasPrefix(path, client.DynClientsHandlerPath) { + ts = publicTs + } else { + ts = adminTs + } t.Run("path="+path, func(t *testing.T) { body, res := fetchWithBearerAuth(t, "DELETE", ts.URL+path, "invalid", nil) - snapshotx.SnapshotTExcept(t, newResponseSnapshot(body, res), nil) + snapshotx.SnapshotT(t, newResponseSnapshot(body, res)) }) } }) t.Run("case=patching non-existing client", func(t *testing.T) { - body, res := fetchWithBearerAuth(t, "PATCH", ts.URL+client.ClientsHandlerPath+"/foo", "", nil) - snapshotx.SnapshotTExcept(t, newResponseSnapshot(body, res), nil) + body, res := fetchWithBearerAuth(t, "PATCH", urlx.MustJoin(adminTs.URL, client.ClientsHandlerPath, "foo"), "", nil) + snapshotx.SnapshotT(t, newResponseSnapshot(body, res)) }) t.Run("case=fetching existing client", func(t *testing.T) { expected := createClient(t, &client.Client{ Secret: "rdetzfuzgihojuzgtfrdes", RedirectURIs: []string{"http://localhost:3000/cb"}, - }, ts, client.ClientsHandlerPath) + }, adminTs, client.ClientsHandlerPath) id := gjson.Get(expected, "client_id").String() rat := gjson.Get(expected, "registration_access_token").String() t.Run("endpoint=admin", func(t *testing.T) { - body, res := fetch(t, ts.URL+client.ClientsHandlerPath+"/"+id) + body, res := fetch(t, urlx.MustJoin(adminTs.URL, client.ClientsHandlerPath, url.PathEscape(id))) assert.Equal(t, http.StatusOK, res.StatusCode) assert.Equal(t, id, gjson.Get(body, "client_id").String()) - snapshotx.SnapshotTExcept(t, newResponseSnapshot(body, res), []string{"body.client_id", "body.created_at", "body.updated_at"}) + snapshotx.SnapshotT(t, newResponseSnapshot(body, res), snapshotx.ExceptPaths("body.client_id", "body.created_at", "body.updated_at")) }) t.Run("endpoint=selfservice", func(t *testing.T) { - body, res := fetchWithBearerAuth(t, "GET", ts.URL+client.DynClientsHandlerPath+"/"+id, rat, nil) + body, res := fetchWithBearerAuth(t, "GET", urlx.MustJoin(publicTs.URL, client.DynClientsHandlerPath, url.PathEscape(id)), rat, nil) assert.Equal(t, http.StatusOK, res.StatusCode) assert.Equal(t, id, gjson.Get(body, "client_id").String()) assert.False(t, gjson.Get(body, "metadata").Bool()) - snapshotx.SnapshotTExcept(t, newResponseSnapshot(body, res), []string{"body.client_id", "body.created_at", "body.updated_at"}) + snapshotx.SnapshotT(t, newResponseSnapshot(body, res), snapshotx.ExceptPaths("body.client_id", "body.created_at", "body.updated_at")) }) }) @@ -439,7 +534,7 @@ func TestHandler(t *testing.T) { Secret: "averylongsecret", RedirectURIs: []string{"http://localhost:3000/cb"}, TokenEndpointAuthMethod: "client_secret_basic", - }, ts, client.ClientsHandlerPath) + }, adminTs, client.ClientsHandlerPath) id := gjson.Get(expected, "client_id").String() // Possible to update the secret @@ -449,9 +544,9 @@ func TestHandler(t *testing.T) { payload, err = sjson.Set(payload, "client_secret", "") require.NoError(t, err) - body, res := fetchWithBearerAuth(t, "PUT", ts.URL+client.DynClientsHandlerPath+"/"+id, gjson.Get(expected, "registration_access_token").String(), bytes.NewBufferString(payload)) + body, res := fetchWithBearerAuth(t, "PUT", urlx.MustJoin(publicTs.URL, client.DynClientsHandlerPath, url.PathEscape(id)), gjson.Get(expected, "registration_access_token").String(), bytes.NewBufferString(payload)) assert.Equal(t, http.StatusBadRequest, res.StatusCode, "%s\n%s", body, payload) - snapshotx.SnapshotTExcept(t, newResponseSnapshot(body, res), nil) + snapshotx.SnapshotT(t, newResponseSnapshot(body, res)) }) t.Run("case=updating existing client", func(t *testing.T) { @@ -460,13 +555,13 @@ func TestHandler(t *testing.T) { Secret: "averylongsecret", RedirectURIs: []string{"http://localhost:3000/cb"}, TokenEndpointAuthMethod: "client_secret_basic", - }, ts, client.ClientsHandlerPath) + }, adminTs, client.ClientsHandlerPath) expectedID := getClientID(expected) payload, _ := sjson.Set(expected, "redirect_uris", []string{"http://localhost:3000/cb", "https://foobar.com"}) - body, res := makeJSON(t, ts, "PUT", client.ClientsHandlerPath+"/"+expectedID, json.RawMessage(payload)) + body, res := makeJSON(t, adminTs, "PUT", urlx.MustJoin(client.ClientsHandlerPath, url.PathEscape(expectedID)), json.RawMessage(payload)) assert.Equal(t, http.StatusOK, res.StatusCode) - snapshotx.SnapshotTExcept(t, newResponseSnapshot(body, res), []string{"body.created_at", "body.updated_at", "body.client_id", "body.registration_client_uri", "body.registration_access_token"}) + snapshotx.SnapshotT(t, newResponseSnapshot(body, res), snapshotx.ExceptPaths("body.created_at", "body.updated_at", "body.client_id", "body.registration_client_uri", "body.registration_access_token")) }) t.Run("endpoint=dynamic client registration", func(t *testing.T) { @@ -474,7 +569,7 @@ func TestHandler(t *testing.T) { Secret: "averylongsecret", RedirectURIs: []string{"http://localhost:3000/cb"}, TokenEndpointAuthMethod: "client_secret_basic", - }, ts, client.ClientsHandlerPath) + }, adminTs, client.ClientsHandlerPath) expectedID := getClientID(expected) // Possible to update the secret @@ -483,16 +578,16 @@ func TestHandler(t *testing.T) { payload, _ = sjson.Delete(payload, "metadata") originalRAT := gjson.Get(expected, "registration_access_token").String() - body, res := fetchWithBearerAuth(t, "PUT", ts.URL+client.DynClientsHandlerPath+"/"+expectedID, originalRAT, bytes.NewBufferString(payload)) + body, res := fetchWithBearerAuth(t, "PUT", urlx.MustJoin(publicTs.URL, client.DynClientsHandlerPath, url.PathEscape(expectedID)), originalRAT, bytes.NewBufferString(payload)) assert.Equal(t, http.StatusOK, res.StatusCode, "%s\n%s", body, payload) newToken := gjson.Get(body, "registration_access_token").String() assert.NotEmpty(t, newToken) require.NotEqual(t, originalRAT, newToken, "the new token should be different from the old token") - snapshotx.SnapshotTExcept(t, newResponseSnapshot(body, res), []string{"body.created_at", "body.updated_at", "body.registration_access_token", "body.client_id", "body.registration_client_uri"}) + snapshotx.SnapshotT(t, newResponseSnapshot(body, res), snapshotx.ExceptPaths("body.created_at", "body.updated_at", "body.registration_access_token", "body.client_id", "body.registration_client_uri")) - _, res = fetchWithBearerAuth(t, "GET", ts.URL+client.DynClientsHandlerPath+"/"+expectedID, originalRAT, bytes.NewBufferString(payload)) + _, res = fetchWithBearerAuth(t, "GET", urlx.MustJoin(publicTs.URL, client.DynClientsHandlerPath, url.PathEscape(expectedID)), originalRAT, bytes.NewBufferString(payload)) assert.Equal(t, http.StatusUnauthorized, res.StatusCode) - body, res = fetchWithBearerAuth(t, "GET", ts.URL+client.DynClientsHandlerPath+"/"+expectedID, newToken, bytes.NewBufferString(payload)) + body, res = fetchWithBearerAuth(t, "GET", urlx.MustJoin(publicTs.URL, client.DynClientsHandlerPath, url.PathEscape(expectedID)), newToken, bytes.NewBufferString(payload)) assert.Equal(t, http.StatusOK, res.StatusCode) assert.Empty(t, gjson.Get(body, "registration_access_token").String()) }) @@ -501,7 +596,7 @@ func TestHandler(t *testing.T) { expected := createClient(t, &client.Client{ RedirectURIs: []string{"http://localhost:3000/cb"}, TokenEndpointAuthMethod: "client_secret_basic", - }, ts, client.ClientsHandlerPath) + }, adminTs, client.ClientsHandlerPath) expectedID := getClientID(expected) // Possible to update the secret @@ -509,19 +604,19 @@ func TestHandler(t *testing.T) { payload, _ = sjson.Set(payload, "secret", "") originalRAT := gjson.Get(expected, "registration_access_token").String() - body, res := fetchWithBearerAuth(t, "PUT", ts.URL+client.DynClientsHandlerPath+"/"+expectedID, originalRAT, bytes.NewBufferString(payload)) + body, res := fetchWithBearerAuth(t, "PUT", urlx.MustJoin(publicTs.URL, client.DynClientsHandlerPath, url.PathEscape(expectedID)), originalRAT, bytes.NewBufferString(payload)) assert.Equal(t, http.StatusForbidden, res.StatusCode) - snapshotx.SnapshotTExcept(t, newResponseSnapshot(body, res), nil) + snapshotx.SnapshotT(t, newResponseSnapshot(body, res)) }) }) t.Run("case=creating a client dynamically does not allow setting the secret", func(t *testing.T) { - body, res := makeJSON(t, ts, "POST", client.DynClientsHandlerPath, &client.Client{ + body, res := makeJSON(t, publicTs, "POST", client.DynClientsHandlerPath, &client.Client{ TokenEndpointAuthMethod: "client_secret_basic", Secret: "foobarbaz", }) require.Equal(t, http.StatusBadRequest, res.StatusCode, body) - snapshotx.SnapshotTExcept(t, newResponseSnapshot(body, res), nil) + snapshotx.SnapshotT(t, newResponseSnapshot(body, res)) }) t.Run("case=update the lifespans of an OAuth2 client", func(t *testing.T) { @@ -531,12 +626,24 @@ func TestHandler(t *testing.T) { RedirectURIs: []string{"http://localhost:3000/cb"}, TokenEndpointAuthMethod: "client_secret_basic", } - body, res := makeJSON(t, ts, "POST", client.ClientsHandlerPath, expected) + body, res := makeJSON(t, adminTs, "POST", client.ClientsHandlerPath, expected) require.Equal(t, http.StatusCreated, res.StatusCode, body) - body, res = makeJSON(t, ts, "PUT", client.ClientsHandlerPath+"/"+gjson.Get(body, "client_id").String()+"/lifespans", testhelpers.TestLifespans) + body, res = makeJSON(t, adminTs, "PUT", urlx.MustJoin(client.ClientsHandlerPath, url.PathEscape(gjson.Get(body, "client_id").String()), "lifespans"), testhelpers.TestLifespans) require.Equal(t, http.StatusOK, res.StatusCode, body) - snapshotx.SnapshotTExcept(t, newResponseSnapshot(body, res), []string{"body.client_id", "body.created_at", "body.updated_at"}) + snapshotx.SnapshotT(t, newResponseSnapshot(body, res), snapshotx.ExceptPaths("body.client_id", "body.created_at", "body.updated_at")) + // Check metrics. + { + req, _ := http.NewRequest("GET", adminTs.URL+"/admin"+prometheusx.MetricsPrometheusPath, nil) + res, err := adminTs.Client().Do(req) + require.NoError(t, err) + require.EqualValues(t, http.StatusOK, res.StatusCode) + + respBody, err := io.ReadAll(res.Body) + require.NoError(t, err) + require.NotEmpty(t, respBody) + } + }) t.Run("case=delete existing client", func(t *testing.T) { @@ -544,10 +651,10 @@ func TestHandler(t *testing.T) { expected := createClient(t, &client.Client{ RedirectURIs: []string{"http://localhost:3000/cb"}, TokenEndpointAuthMethod: "client_secret_basic", - }, ts, client.ClientsHandlerPath) + }, adminTs, client.ClientsHandlerPath) expectedID := getClientID(expected) - _, res := makeJSON(t, ts, "DELETE", client.ClientsHandlerPath+"/"+expectedID, nil) + _, res := makeJSON(t, adminTs, "DELETE", urlx.MustJoin(client.ClientsHandlerPath, url.PathEscape(expectedID)), nil) assert.Equal(t, http.StatusNoContent, res.StatusCode) }) @@ -556,12 +663,13 @@ func TestHandler(t *testing.T) { Secret: "averylongsecret", RedirectURIs: []string{"http://localhost:3000/cb"}, TokenEndpointAuthMethod: "client_secret_basic", - }, ts, client.ClientsHandlerPath) + }, adminTs, client.ClientsHandlerPath) expectedID := getClientID(expected) originalRAT := gjson.Get(expected, "registration_access_token").String() - _, res := fetchWithBearerAuth(t, "DELETE", ts.URL+client.DynClientsHandlerPath+"/"+expectedID, originalRAT, nil) + _, res := fetchWithBearerAuth(t, "DELETE", urlx.MustJoin(publicTs.URL, client.DynClientsHandlerPath, url.PathEscape(expectedID)), originalRAT, nil) assert.Equal(t, http.StatusNoContent, res.StatusCode) + }) }) }) diff --git a/client/manager.go b/client/manager.go index ad8cca7df51..b616797fcfe 100644 --- a/client/manager.go +++ b/client/manager.go @@ -6,36 +6,26 @@ package client import ( "context" - "github.com/ory/fosite" + "github.com/ory/hydra/v2/fosite" + keysetpagination "github.com/ory/x/pagination/keysetpagination_v2" ) // swagger:ignore type Filter struct { - // The maximum amount of clients to returned, upper bound is 500 clients. - // in: query - Limit int `json:"limit"` - - // The offset from where to start looking. - // in: query - Offset int `json:"offset"` - - // The name of the clients to filter by. - // in: query - Name string `json:"client_name"` - - // The owner of the clients to filter by. - // in: query - Owner string `json:"owner"` + PageOpts []keysetpagination.Option + Name string + Owner string + IDs []string } type Manager interface { Storage - Authenticate(ctx context.Context, id string, secret []byte) (*Client, error) + AuthenticateClient(ctx context.Context, id string, secret []byte) (*Client, error) } type Storage interface { - GetClient(ctx context.Context, id string) (fosite.Client, error) + fosite.ClientManager CreateClient(ctx context.Context, c *Client) error @@ -43,9 +33,11 @@ type Storage interface { DeleteClient(ctx context.Context, id string) error - GetClients(ctx context.Context, filters Filter) ([]Client, error) - - CountClients(ctx context.Context) (int, error) + GetClients(ctx context.Context, filters Filter) ([]Client, *keysetpagination.Paginator, error) GetConcreteClient(ctx context.Context, id string) (*Client, error) } + +type ManagerProvider interface { + ClientManager() Manager +} diff --git a/client/manager_test_helpers.go b/client/manager_test_helpers.go index c3394ecf2d0..88c7b2cea51 100644 --- a/client/manager_test_helpers.go +++ b/client/manager_test_helpers.go @@ -10,25 +10,24 @@ import ( "testing" "time" - "github.com/gobuffalo/pop/v6" - "gopkg.in/square/go-jose.v2" - - "github.com/ory/x/assertx" - "github.com/ory/x/contextx" - "github.com/ory/x/sqlcon" - - "github.com/bxcodec/faker/v3" + "github.com/go-faker/faker/v4" + "github.com/go-jose/go-jose/v3" "github.com/gofrs/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/ory/fosite" + "github.com/ory/x/uuidx" - testhelpersuuid "github.com/ory/hydra/internal/testhelpers/uuid" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/fosite" + testhelpersuuid "github.com/ory/hydra/v2/internal/testhelpers/uuid" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/assertx" + "github.com/ory/x/contextx" + keysetpagination "github.com/ory/x/pagination/keysetpagination_v2" + "github.com/ory/x/sqlcon" ) -func TestHelperClientAutoGenerateKey(k string, m Storage) func(t *testing.T) { +func TestHelperClientAutoGenerateKey(m Storage) func(t *testing.T) { return func(t *testing.T) { ctx := context.TODO() c := &Client{ @@ -36,35 +35,36 @@ func TestHelperClientAutoGenerateKey(k string, m Storage) func(t *testing.T) { RedirectURIs: []string{"http://redirect"}, TermsOfServiceURI: "foo", } - assert.NoError(t, m.CreateClient(ctx, c)) + require.NoError(t, m.CreateClient(ctx, c)) dbClient, err := m.GetClient(ctx, c.GetID()) - assert.NoError(t, err) + require.NoError(t, err) dbClientConcrete, ok := dbClient.(*Client) - assert.True(t, ok) - testhelpersuuid.AssertUUID(t, &dbClientConcrete.ID) + require.True(t, ok) + testhelpersuuid.AssertUUID(t, dbClientConcrete.ID) assert.NoError(t, m.DeleteClient(ctx, c.GetID())) } } -func TestHelperClientAuthenticate(k string, m Manager) func(t *testing.T) { +func TestHelperClientAuthenticate(m Manager) func(t *testing.T) { return func(t *testing.T) { ctx := context.TODO() require.NoError(t, m.CreateClient(ctx, &Client{ - LegacyClientID: "1234321", - Secret: "secret", - RedirectURIs: []string{"http://redirect"}, + ID: "1234321", + Secret: "secret", + RedirectURIs: []string{"http://redirect"}, })) - c, err := m.Authenticate(ctx, "1234321", []byte("secret1")) - require.NotNil(t, err) + c, err := m.AuthenticateClient(ctx, "1234321", []byte("secret1")) + require.Error(t, err) + require.Nil(t, c) - c, err = m.Authenticate(ctx, "1234321", []byte("secret")) + c, err = m.AuthenticateClient(ctx, "1234321", []byte("secret")) require.NoError(t, err) assert.Equal(t, "1234321", c.GetID()) } } -func TestHelperUpdateTwoClients(_ string, m Manager) func(t *testing.T) { +func TestHelperUpdateTwoClients(m Manager) func(t *testing.T) { return func(t *testing.T) { c1, c2 := &Client{Name: "test client 1"}, &Client{Name: "test client 2"} @@ -78,30 +78,27 @@ func TestHelperUpdateTwoClients(_ string, m Manager) func(t *testing.T) { } } -func testHelperUpdateClient(t *testing.T, ctx context.Context, network Storage, k string) { - d, err := network.GetClient(ctx, "1234") - assert.NoError(t, err) - err = network.UpdateClient(ctx, &Client{ - LegacyClientID: "2-1234", +func testHelperUpdateClient(t *testing.T, ctx context.Context, store Storage, toUpdate *Client) { + require.NoError(t, store.UpdateClient(ctx, &Client{ + ID: toUpdate.ID, Name: "name-new", Secret: "secret-new", RedirectURIs: []string{"http://redirect/new"}, TermsOfServiceURI: "bar", JSONWebKeys: new(x.JoseJSONWebKeySet), - }) - require.NoError(t, err) + })) - nc, err := network.GetConcreteClient(ctx, "2-1234") + nc, err := store.GetConcreteClient(ctx, toUpdate.ID) require.NoError(t, err) - if k != "http" { - // http always returns an empty secret - assert.NotEqual(t, d.GetHashedSecret(), nc.GetHashedSecret()) - } + require.NotZero(t, toUpdate.GetHashedSecret()) + require.NotZero(t, nc.GetHashedSecret()) + assert.NotEqual(t, toUpdate.GetHashedSecret(), nc.GetHashedSecret(), "ensure that the secret hash was updated") + assert.Equal(t, "bar", nc.TermsOfServiceURI) assert.Equal(t, "name-new", nc.Name) - assert.EqualValues(t, []string{"http://redirect/new"}, nc.GetRedirectURIs()) - assert.Zero(t, len(nc.Contacts)) + assert.Equal(t, []string{"http://redirect/new"}, nc.GetRedirectURIs()) + assert.Len(t, nc.Contacts, 0) } func TestHelperCreateGetUpdateDeleteClientNext(t *testing.T, m Storage, networks []uuid.UUID) { @@ -119,7 +116,7 @@ func TestHelperCreateGetUpdateDeleteClientNext(t *testing.T, m Storage, networks client.CreatedAt = time.Now().Truncate(time.Second).UTC() t.Run("lifecycle=does not exist", func(t *testing.T) { - _, err := m.GetClient(ctx, "1234") + _, err := m.GetClient(ctx, uuidx.NewV4().String()) require.Error(t, err) }) @@ -133,11 +130,7 @@ func TestHelperCreateGetUpdateDeleteClientNext(t *testing.T, m Storage, networks "updated_at", }) - n, err := m.CountClients(ctx) - assert.NoError(t, err) - assert.Equal(t, 1, n) - copy := client - require.Error(t, m.CreateClient(ctx, ©)) + require.ErrorIs(t, m.CreateClient(ctx, &client), sqlcon.ErrUniqueViolation) }) t.Run("lifecycle=update", func(t *testing.T) { @@ -166,7 +159,7 @@ func TestHelperCreateGetUpdateDeleteClientNext(t *testing.T, m Storage, networks for _, expected := range clients { c, err := m.GetClient(ctx, expected.GetID()) if check != original { - t.Run(fmt.Sprintf("case=must not find client %s", expected.ID), func(t *testing.T) { + t.Run(fmt.Sprintf("case=must not find client %s", expected.GetID()), func(t *testing.T) { require.ErrorIs(t, err, sqlcon.ErrNoRows) }) } else { @@ -191,25 +184,21 @@ func TestHelperCreateGetUpdateDeleteClientNext(t *testing.T, m Storage, networks _, err := m.GetClient(ctx, client.GetID()) assert.ErrorIs(t, err, sqlcon.ErrNoRows) - n, err := m.CountClients(ctx) - assert.NoError(t, err) - assert.Equal(t, 0, n) - assert.Error(t, m.DeleteClient(ctx, client.GetID())) + assert.ErrorIs(t, m.DeleteClient(ctx, client.GetID()), sqlcon.ErrNoRows) }) } }) } } -func TestHelperCreateGetUpdateDeleteClient(k string, connection *pop.Connection, t1 Storage, t2 Storage) func(t *testing.T) { +func TestHelperCreateGetUpdateDeleteClient(t1, t2 Storage) func(t *testing.T) { return func(t *testing.T) { ctx := context.Background() _, err := t1.GetClient(ctx, "1234") require.Error(t, err) t1c1 := &Client{ - ID: uuid.FromStringOrNil("96bfe52e-af88-4cba-ab00-ae7a8b082228"), - LegacyClientID: "1234", + ID: uuidx.NewV4().String(), Name: "name", Secret: "secret", RedirectURIs: []string{"http://redirect", "http://redirect1"}, @@ -242,29 +231,20 @@ func TestHelperCreateGetUpdateDeleteClient(k string, connection *pop.Connection, } require.NoError(t, t1.CreateClient(ctx, t1c1)) - { - t2c1 := *t1c1 - require.Error(t, connection.Create(&t2c1), "should not be able to create the same client in other manager/network; are they backed by the same database?") - t2c1.ID = uuid.Nil - require.NoError(t, t2.CreateClient(ctx, &t2c1), "we should be able to create a client with the same GetID() but different ID in other network") - } + assert.NotEmpty(t, t1c1.GetHashedSecret()) + + t2c1 := *t1c1 + require.NoError(t, t2.CreateClient(ctx, &t2c1), "we should be able to create a client with the same ID in other network") t2c3 := *t1c1 { - pk, _ := uuid.NewV4() - t2c3.ID = pk - t2c3.LegacyClientID = "t2c2-1234" + t2c3.ID = uuidx.NewV4().String() require.NoError(t, t2.CreateClient(ctx, &t2c3)) - require.Error(t, t2.CreateClient(ctx, &t2c3)) - } - assert.Equal(t, t1c1.GetID(), "1234") - if k != "http" { - assert.NotEmpty(t, t1c1.GetHashedSecret()) + require.ErrorIs(t, t2.CreateClient(ctx, &t2c3), sqlcon.ErrUniqueViolation) } c2Template := &Client{ - ID: uuid.FromStringOrNil("a6bfe52e-af88-4cba-ab00-ae7a8b082228"), - LegacyClientID: "2-1234", + ID: uuidx.NewV4().String(), Name: "name2", Secret: "secret", RedirectURIs: []string{"http://redirect"}, @@ -272,72 +252,106 @@ func TestHelperCreateGetUpdateDeleteClient(k string, connection *pop.Connection, SecretExpiresAt: 1, } assert.NoError(t, t1.CreateClient(ctx, c2Template)) - c2Template.ID = uuid.Nil assert.NoError(t, t2.CreateClient(ctx, c2Template)) - d, err := t1.GetClient(ctx, "1234") + d, err := t1.GetClient(ctx, t1c1.ID) require.NoError(t, err) cc := d.(*Client) - testhelpersuuid.AssertUUID(t, &cc.NID) - - compare(t, t1c1, d, k) - - ds, err := t1.GetClients(ctx, Filter{Limit: 100, Offset: 0}) - assert.NoError(t, err) - assert.Len(t, ds, 2) - assert.NotEqual(t, ds[0].GetID(), ds[1].GetID()) - assert.NotEqual(t, ds[0].GetID(), ds[1].GetID()) - // test if SecretExpiresAt was set properly - assert.Equal(t, ds[0].SecretExpiresAt, 0) - assert.Equal(t, ds[1].SecretExpiresAt, 1) - - ds, err = t1.GetClients(ctx, Filter{Limit: 1, Offset: 0}) - assert.NoError(t, err) - assert.Len(t, ds, 1) - - ds, err = t1.GetClients(ctx, Filter{Limit: 100, Offset: 100}) - assert.NoError(t, err) - - // get by name - ds, err = t1.GetClients(ctx, Filter{Limit: 100, Offset: 0, Name: "name"}) - assert.NoError(t, err) - assert.Len(t, ds, 1) - assert.Equal(t, ds[0].Name, "name") - - // get by name not exist - ds, err = t1.GetClients(ctx, Filter{Limit: 100, Offset: 0, Name: "bad name"}) - assert.NoError(t, err) - assert.Len(t, ds, 0) - - // get by owner - ds, err = t1.GetClients(ctx, Filter{Limit: 100, Offset: 0, Owner: "aeneas"}) - assert.NoError(t, err) - assert.Len(t, ds, 1) - assert.Equal(t, ds[0].Owner, "aeneas") - - testHelperUpdateClient(t, ctx, t1, k) - testHelperUpdateClient(t, ctx, t2, k) - - err = t1.DeleteClient(ctx, "1234") - assert.NoError(t, err) - err = t1.DeleteClient(ctx, t2c3.GetID()) - assert.Error(t, err) - - _, err = t1.GetClient(ctx, "1234") - assert.NotNil(t, err) - - n, err := t1.CountClients(ctx) - assert.NoError(t, err) - assert.Equal(t, 1, n) + testhelpersuuid.AssertUUID(t, cc.NID) + + compare(t, t1c1, d) + + t.Run("list all", func(t *testing.T) { + cs, nextPage, err := t1.GetClients(ctx, Filter{}) + require.NoError(t, err) + require.Len(t, cs, 2) + assert.ElementsMatch(t, []string{t1c1.ID, c2Template.ID}, []string{cs[0].GetID(), cs[1].GetID()}) + // test if SecretExpiresAt was set properly + assert.ElementsMatch(t, []int{0, 1}, []int{cs[0].SecretExpiresAt, cs[1].SecretExpiresAt}) + assert.True(t, nextPage.IsLast()) + }) + + t.Run("pagination", func(t *testing.T) { + observedIDs := make([]string, 2) + + cs, nextPage, err := t1.GetClients(ctx, Filter{PageOpts: []keysetpagination.Option{keysetpagination.WithSize(1)}}) + require.NoError(t, err) + require.Len(t, cs, 1) + assert.False(t, nextPage.IsLast()) + observedIDs[0] = cs[0].ID + + cs, nextPage, err = t1.GetClients(ctx, Filter{PageOpts: nextPage.ToOptions()}) + require.NoError(t, err) + require.Len(t, cs, 1) + assert.True(t, nextPage.IsLast()) + observedIDs[1] = cs[0].ID + + assert.ElementsMatch(t, []string{t1c1.ID, c2Template.ID}, observedIDs) + }) + + t.Run("list by name", func(t *testing.T) { + cs, nextPage, err := t1.GetClients(ctx, Filter{Name: "name"}) + require.NoError(t, err) + require.Len(t, cs, 1) + assert.Equal(t, cs[0].Name, "name") + assert.True(t, nextPage.IsLast()) + }) + + t.Run("list by unknown name", func(t *testing.T) { + cs, nextPage, err := t1.GetClients(ctx, Filter{Name: "bad name"}) + require.NoError(t, err) + assert.Len(t, cs, 0) + assert.True(t, nextPage.IsLast()) + }) + + t.Run("list by owner", func(t *testing.T) { + cs, nextPage, err := t1.GetClients(ctx, Filter{Owner: "aeneas"}) + require.NoError(t, err) + require.Len(t, cs, 1) + assert.Equal(t, cs[0].Owner, "aeneas") + assert.True(t, nextPage.IsLast()) + }) + + t.Run("list by ids", func(t *testing.T) { + cs, nextPage, err := t1.GetClients(ctx, Filter{IDs: []string{t1c1.ID, c2Template.ID}}) + require.NoError(t, err) + require.Len(t, cs, 2) + assert.ElementsMatch(t, []string{t1c1.ID, c2Template.ID}, []string{cs[0].GetID(), cs[1].GetID()}) + assert.True(t, nextPage.IsLast()) + + cs, nextPage, err = t1.GetClients(ctx, Filter{IDs: []string{t1c1.ID}}) + require.NoError(t, err) + require.Len(t, cs, 1) + assert.Equal(t, t1c1.ID, cs[0].GetID()) + assert.True(t, nextPage.IsLast()) + + cs, nextPage, err = t1.GetClients(ctx, Filter{IDs: []string{c2Template.ID}}) + require.NoError(t, err) + require.Len(t, cs, 1) + assert.Equal(t, c2Template.ID, cs[0].GetID()) + assert.True(t, nextPage.IsLast()) + + cs, nextPage, err = t1.GetClients(ctx, Filter{IDs: []string{uuidx.NewV4().String()}}) + require.NoError(t, err) + require.Len(t, cs, 0) + assert.True(t, nextPage.IsLast()) + }) + + testHelperUpdateClient(t, ctx, t1, t1c1) + testHelperUpdateClient(t, ctx, t2, &t2c1) + + assert.ErrorIs(t, t1.DeleteClient(ctx, t2c3.ID), sqlcon.ErrNoRows) + + assert.NoError(t, t1.DeleteClient(ctx, t1c1.ID)) + _, err = t1.GetClient(ctx, t1c1.ID) + assert.ErrorIs(t, err, sqlcon.ErrNoRows) } } -func compare(t *testing.T, expected *Client, actual fosite.Client, k string) { +func compare(t *testing.T, expected *Client, actual fosite.Client) { assert.EqualValues(t, expected.GetID(), actual.GetID()) - if k != "http" { - assert.EqualValues(t, expected.GetHashedSecret(), actual.GetHashedSecret()) - } + assert.EqualValues(t, expected.GetHashedSecret(), actual.GetHashedSecret()) assert.EqualValues(t, expected.GetRedirectURIs(), actual.GetRedirectURIs()) assert.EqualValues(t, expected.GetGrantTypes(), actual.GetGrantTypes()) @@ -357,7 +371,7 @@ func compare(t *testing.T, expected *Client, actual fosite.Client, k string) { assert.EqualValues(t, expected.SectorIdentifierURI, actual.SectorIdentifierURI) assert.EqualValues(t, expected.UserinfoSignedResponseAlg, actual.UserinfoSignedResponseAlg) assert.EqualValues(t, expected.CreatedAt.UTC().Unix(), actual.CreatedAt.UTC().Unix()) - // these values are not the same because of https://github.com/gobuffalo/pop/issues/591 + // these values are not the same because of https://github.com/ory/pop/issues/591 //assert.EqualValues(t, expected.UpdatedAt.UTC().Unix(), actual.UpdatedAt.UTC().Unix(), "%s\n%s", expected.UpdatedAt.String(), actual.UpdatedAt.String()) assert.EqualValues(t, expected.FrontChannelLogoutURI, actual.FrontChannelLogoutURI) assert.EqualValues(t, expected.FrontChannelLogoutSessionRequired, actual.FrontChannelLogoutSessionRequired) diff --git a/client/registry.go b/client/registry.go index 707be5c9f7e..aa7b4abefe0 100644 --- a/client/registry.go +++ b/client/registry.go @@ -4,12 +4,14 @@ package client import ( - "github.com/ory/hydra/driver/config" + "github.com/ory/hydra/v2/driver/config" - "github.com/ory/fosite" - foauth2 "github.com/ory/fosite/handler/oauth2" - "github.com/ory/hydra/jwk" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/fosite" + foauth2 "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/handler/rfc8628" + enigma "github.com/ory/hydra/v2/fosite/token/hmac" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/x" ) type InternalRegistry interface { @@ -21,7 +23,11 @@ type Registry interface { ClientValidator() *Validator ClientManager() Manager ClientHasher() fosite.Hasher - OpenIDJWTStrategy() jwk.JWTSigner - OAuth2HMACStrategy() *foauth2.HMACSHAStrategy + OpenIDJWTSigner() jwk.JWTSigner + OAuth2HMACStrategy() foauth2.CoreStrategy + OAuth2EnigmaStrategy() *enigma.HMACStrategy + rfc8628.DeviceRateLimitStrategyProvider + rfc8628.DeviceCodeStrategyProvider + rfc8628.UserCodeStrategyProvider config.Provider } diff --git a/client/sdk_test.go b/client/sdk_test.go index 1e0b5af28d7..547ce5242eb 100644 --- a/client/sdk_test.go +++ b/client/sdk_test.go @@ -5,60 +5,56 @@ package client_test import ( "context" - "encoding/json" - "io" + "net/http" "net/http/httptest" "strings" "testing" - "github.com/ory/x/assertx" - - "github.com/ory/x/ioutilx" - - "github.com/ory/x/snapshotx" - - "github.com/ory/x/uuidx" - "github.com/mohae/deepcopy" - - "github.com/ory/hydra/x" - "github.com/ory/x/contextx" - "github.com/ory/x/pointerx" - - "github.com/ory/hydra/driver/config" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - - "github.com/ory/hydra/internal" + goauth2 "golang.org/x/oauth2" + "golang.org/x/oauth2/clientcredentials" hydra "github.com/ory/hydra-client-go/v2" - "github.com/ory/hydra/client" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/assertx" + "github.com/ory/x/configx" + "github.com/ory/x/ioutilx" + "github.com/ory/x/pointerx" + "github.com/ory/x/prometheusx" + "github.com/ory/x/uuidx" ) func createTestClient(prefix string) hydra.OAuth2Client { return hydra.OAuth2Client{ - ClientName: pointerx.String(prefix + "name"), - ClientSecret: pointerx.String(prefix + "secret"), - ClientUri: pointerx.String(prefix + "uri"), + ClientName: pointerx.Ptr(prefix + "name"), + ClientSecret: pointerx.Ptr(prefix + "secret"), + ClientUri: pointerx.Ptr("https://example.org/" + prefix + "uri"), Contacts: []string{prefix + "peter", prefix + "pan"}, GrantTypes: []string{prefix + "client_credentials", prefix + "authorize_code"}, - LogoUri: pointerx.String(prefix + "logo"), - Owner: pointerx.String(prefix + "an-owner"), - PolicyUri: pointerx.String(prefix + "policy-uri"), - Scope: pointerx.String(prefix + "foo bar baz"), - TosUri: pointerx.String(prefix + "tos-uri"), + LogoUri: pointerx.Ptr("https://example.org/" + prefix + "logo"), + Owner: pointerx.Ptr(prefix + "an-owner"), + PolicyUri: pointerx.Ptr("https://example.org/" + prefix + "policy-uri"), + Scope: pointerx.Ptr(prefix + "foo bar baz"), + TosUri: pointerx.Ptr("https://example.org/" + prefix + "tos"), ResponseTypes: []string{prefix + "id_token", prefix + "code"}, RedirectUris: []string{"https://" + prefix + "redirect-url", "https://" + prefix + "redirect-uri"}, - ClientSecretExpiresAt: pointerx.Int64(0), - TokenEndpointAuthMethod: pointerx.String("client_secret_basic"), - UserinfoSignedResponseAlg: pointerx.String("none"), - SubjectType: pointerx.String("public"), + ClientSecretExpiresAt: pointerx.Ptr[int64](0), + TokenEndpointAuthMethod: pointerx.Ptr("client_secret_basic"), + UserinfoSignedResponseAlg: pointerx.Ptr("none"), + SubjectType: pointerx.Ptr("public"), Metadata: map[string]interface{}{"foo": "bar"}, // because these values are not nullable in the SQL schema, we have to set them not nil AllowedCorsOrigins: []string{}, Audience: []string{}, - Jwks: map[string]interface{}{}, + Jwks: &hydra.JsonWebKeySet{}, + SkipConsent: pointerx.Ptr(false), } } @@ -66,28 +62,38 @@ var defaultIgnoreFields = []string{"client_id", "registration_access_token", "re func TestClientSDK(t *testing.T) { ctx := context.Background() - conf := internal.NewConfigurationWithDefaults() - conf.MustSet(ctx, config.KeySubjectTypesSupported, []string{"public"}) - conf.MustSet(ctx, config.KeyDefaultClientScope, []string{"foo", "bar"}) - conf.MustSet(ctx, config.KeyPublicAllowDynamicRegistration, true) - r := internal.NewRegistryMemory(t, conf, &contextx.Static{C: conf.Source(ctx)}) - - routerAdmin := x.NewRouterAdmin(conf.AdminURL) - routerPublic := x.NewRouterPublic() - handler := client.NewHandler(r) - handler.SetRoutes(routerAdmin, routerPublic) + r := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions(configx.WithValues(map[string]any{ + config.KeySubjectTypesSupported: []string{"public"}, + config.KeyDefaultClientScope: []string{"foo", "bar"}, + config.KeyPublicAllowDynamicRegistration: true, + }))) + + metrics := prometheusx.NewMetricsManagerWithPrefix("hydra", prometheusx.HTTPMetrics, config.Version, config.Commit, config.Date) + routerAdmin := x.NewRouterAdmin(metrics) + routerPublic := x.NewRouterPublic(metrics) + clHandler := client.NewHandler(r) + clHandler.SetPublicRoutes(routerPublic) + clHandler.SetAdminRoutes(routerAdmin) + o2Handler := oauth2.NewHandler(r) + o2Handler.SetPublicRoutes(routerPublic, func(h http.Handler) http.Handler { return h }) + o2Handler.SetAdminRoutes(routerAdmin) + server := httptest.NewServer(routerAdmin) - conf.MustSet(ctx, config.KeyAdminURL, server.URL) + t.Cleanup(server.Close) + publicServer := httptest.NewServer(routerPublic) + t.Cleanup(publicServer.Close) + r.Config().MustSet(ctx, config.KeyAdminURL, server.URL) + r.Config().MustSet(ctx, config.KeyOAuth2TokenURL, publicServer.URL+"/oauth2/token") c := hydra.NewAPIClient(hydra.NewConfiguration()) c.GetConfig().Servers = hydra.ServerConfigurations{{URL: server.URL}} t.Run("case=client default scopes are set", func(t *testing.T) { - result, _, err := c.OAuth2Api.CreateOAuth2Client(ctx).OAuth2Client(hydra.OAuth2Client{}).Execute() + result, _, err := c.OAuth2API.CreateOAuth2Client(ctx).OAuth2Client(hydra.OAuth2Client{}).Execute() require.NoError(t, err) - assert.EqualValues(t, conf.DefaultClientScope(ctx), strings.Split(*result.Scope, " ")) + assert.EqualValues(t, r.Config().DefaultClientScope(ctx), strings.Split(*result.Scope, " ")) - _, err = c.OAuth2Api.DeleteOAuth2Client(ctx, *result.ClientId).Execute() + _, err = c.OAuth2API.DeleteOAuth2Client(ctx, *result.ClientId).Execute() require.NoError(t, err) }) @@ -98,12 +104,15 @@ func TestClientSDK(t *testing.T) { // createClient.SecretExpiresAt = 10 // returned client is correct on Create - result, _, err := c.OAuth2Api.CreateOAuth2Client(ctx).OAuth2Client(createClient).Execute() - require.NoError(t, err) + result, res, err := c.OAuth2API.CreateOAuth2Client(ctx).OAuth2Client(createClient).Execute() + if !assert.NoError(t, err) { + t.Fatalf("error: %s", ioutilx.MustReadAll(res.Body)) + } assert.NotEmpty(t, result.UpdatedAt) assert.NotEmpty(t, result.CreatedAt) assert.NotEmpty(t, result.RegistrationAccessToken) assert.NotEmpty(t, result.RegistrationClientUri) + assert.NotEmpty(t, *result.TosUri) assert.NotEmpty(t, result.ClientId) createClient.ClientId = result.ClientId @@ -111,32 +120,32 @@ func TestClientSDK(t *testing.T) { assert.EqualValues(t, "bar", result.Metadata.(map[string]interface{})["foo"]) // secret is not returned on GetOAuth2Client - compareClient.ClientSecret = x.ToPointer("") - gresult, _, err := c.OAuth2Api.GetOAuth2Client(context.Background(), *createClient.ClientId).Execute() + compareClient.ClientSecret = pointerx.Ptr("") + gresult, _, err := c.OAuth2API.GetOAuth2Client(context.Background(), *createClient.ClientId).Execute() require.NoError(t, err) assertx.EqualAsJSONExcept(t, compareClient, gresult, append(defaultIgnoreFields, "client_secret")) // get client will return The request could not be authorized - gresult, _, err = c.OAuth2Api.GetOAuth2Client(context.Background(), "unknown").Execute() + gresult, _, err = c.OAuth2API.GetOAuth2Client(context.Background(), "unknown").Execute() require.Error(t, err) assert.Empty(t, gresult) assert.True(t, strings.Contains(err.Error(), "404"), err.Error()) // listing clients returns the only added one - results, _, err := c.OAuth2Api.ListOAuth2Clients(context.Background()).PageSize(100).Execute() + results, _, err := c.OAuth2API.ListOAuth2Clients(context.Background()).PageSize(100).Execute() require.NoError(t, err) assert.Len(t, results, 1) assertx.EqualAsJSONExcept(t, compareClient, results[0], append(defaultIgnoreFields, "client_secret")) // SecretExpiresAt gets overwritten with 0 on Update compareClient.ClientSecret = createClient.ClientSecret - uresult, _, err := c.OAuth2Api.SetOAuth2Client(context.Background(), *createClient.ClientId).OAuth2Client(createClient).Execute() + uresult, _, err := c.OAuth2API.SetOAuth2Client(context.Background(), *createClient.ClientId).OAuth2Client(createClient).Execute() require.NoError(t, err) assertx.EqualAsJSONExcept(t, compareClient, uresult, append(defaultIgnoreFields, "client_secret")) // create another client updateClient := createTestClient("foo") - uresult, _, err = c.OAuth2Api.SetOAuth2Client(context.Background(), *createClient.ClientId).OAuth2Client(updateClient).Execute() + uresult, _, err = c.OAuth2API.SetOAuth2Client(context.Background(), *createClient.ClientId).OAuth2Client(updateClient).Execute() require.NoError(t, err) assert.NotEqual(t, updateClient.ClientId, uresult.ClientId) updateClient.ClientId = uresult.ClientId @@ -144,39 +153,39 @@ func TestClientSDK(t *testing.T) { // again, test if secret is not returned on Get compareClient = updateClient - compareClient.ClientSecret = x.ToPointer("") - gresult, _, err = c.OAuth2Api.GetOAuth2Client(context.Background(), *updateClient.ClientId).Execute() + compareClient.ClientSecret = pointerx.Ptr("") + gresult, _, err = c.OAuth2API.GetOAuth2Client(context.Background(), *updateClient.ClientId).Execute() require.NoError(t, err) assertx.EqualAsJSONExcept(t, compareClient, gresult, append(defaultIgnoreFields, "client_secret")) // client can not be found after being deleted - _, err = c.OAuth2Api.DeleteOAuth2Client(context.Background(), *updateClient.ClientId).Execute() + _, err = c.OAuth2API.DeleteOAuth2Client(context.Background(), *updateClient.ClientId).Execute() require.NoError(t, err) - _, _, err = c.OAuth2Api.GetOAuth2Client(context.Background(), *updateClient.ClientId).Execute() + _, _, err = c.OAuth2API.GetOAuth2Client(context.Background(), *updateClient.ClientId).Execute() require.Error(t, err) }) t.Run("case=public client is transmitted without secret", func(t *testing.T) { - result, _, err := c.OAuth2Api.CreateOAuth2Client(context.Background()).OAuth2Client(hydra.OAuth2Client{ - TokenEndpointAuthMethod: x.ToPointer("none"), + result, _, err := c.OAuth2API.CreateOAuth2Client(context.Background()).OAuth2Client(hydra.OAuth2Client{ + TokenEndpointAuthMethod: pointerx.Ptr("none"), }).Execute() require.NoError(t, err) - assert.Equal(t, "", x.FromPointer[string](result.ClientSecret)) + assert.Equal(t, "", pointerx.Deref(result.ClientSecret)) - result, _, err = c.OAuth2Api.CreateOAuth2Client(context.Background()).OAuth2Client(createTestClient("")).Execute() + result, _, err = c.OAuth2API.CreateOAuth2Client(context.Background()).OAuth2Client(createTestClient("")).Execute() require.NoError(t, err) - assert.Equal(t, "secret", x.FromPointer[string](result.ClientSecret)) + assert.Equal(t, "secret", pointerx.Deref(result.ClientSecret)) }) - t.Run("case=id can not be set", func(t *testing.T) { - _, res, err := c.OAuth2Api.CreateOAuth2Client(context.Background()).OAuth2Client(hydra.OAuth2Client{ClientId: x.ToPointer(uuidx.NewV4().String())}).Execute() - require.Error(t, err) - body, err := io.ReadAll(res.Body) + t.Run("case=id can be set", func(t *testing.T) { + id := uuidx.NewV4().String() + result, _, err := c.OAuth2API.CreateOAuth2Client(context.Background()).OAuth2Client(hydra.OAuth2Client{ClientId: pointerx.Ptr(id)}).Execute() require.NoError(t, err) - snapshotx.SnapshotT(t, json.RawMessage(body)) + + assert.Equal(t, id, pointerx.Deref(result.ClientId)) }) t.Run("case=patch client legally", func(t *testing.T) { @@ -184,15 +193,15 @@ func TestClientSDK(t *testing.T) { path := "/redirect_uris/-" value := "http://foo.bar" - client := createTestClient("") - created, _, err := c.OAuth2Api.CreateOAuth2Client(context.Background()).OAuth2Client(client).Execute() + cl := createTestClient("") + created, _, err := c.OAuth2API.CreateOAuth2Client(context.Background()).OAuth2Client(cl).Execute() require.NoError(t, err) - client.ClientId = created.ClientId + cl.ClientId = created.ClientId - expected := deepcopy.Copy(client).(hydra.OAuth2Client) + expected := deepcopy.Copy(cl).(hydra.OAuth2Client) expected.RedirectUris = append(expected.RedirectUris, value) - result, _, err := c.OAuth2Api.PatchOAuth2Client(context.Background(), *client.ClientId).JsonPatch([]hydra.JsonPatch{{Op: op, Path: path, Value: value}}).Execute() + result, _, err := c.OAuth2API.PatchOAuth2Client(context.Background(), *cl.ClientId).JsonPatch([]hydra.JsonPatch{{Op: op, Path: path, Value: value}}).Execute() require.NoError(t, err) expected.CreatedAt = result.CreatedAt expected.UpdatedAt = result.UpdatedAt @@ -206,31 +215,69 @@ func TestClientSDK(t *testing.T) { path := "/id" value := "foo" - client := createTestClient("") - created, res, err := c.OAuth2Api.CreateOAuth2Client(context.Background()).OAuth2Client(client).Execute() + cl := createTestClient("") + created, res, err := c.OAuth2API.CreateOAuth2Client(context.Background()).OAuth2Client(cl).Execute() require.NoError(t, err, "%s", ioutilx.MustReadAll(res.Body)) - client.ClientId = created.ClientId + cl.ClientId = created.ClientId - _, _, err = c.OAuth2Api.PatchOAuth2Client(context.Background(), *client.ClientId).JsonPatch([]hydra.JsonPatch{{Op: op, Path: path, Value: value}}).Execute() + _, _, err = c.OAuth2API.PatchOAuth2Client(context.Background(), *cl.ClientId).JsonPatch([]hydra.JsonPatch{{Op: op, Path: path, Value: value}}).Execute() require.Error(t, err) }) t.Run("case=patch should not alter secret if not requested", func(t *testing.T) { - op := "replace" - path := "/client_uri" - value := "http://foo.bar" + created, _, err := c.OAuth2API.CreateOAuth2Client(context.Background()).OAuth2Client(createTestClient("")).Execute() + require.NoError(t, err) + require.Equal(t, "secret", *created.ClientSecret) + + cc := clientcredentials.Config{ + ClientID: *created.ClientId, + ClientSecret: "secret", + TokenURL: r.Config().OAuth2TokenURL(t.Context()).String(), + AuthStyle: goauth2.AuthStyleInHeader, + } + token, err := cc.Token(t.Context()) + require.NoError(t, err) + require.NotZero(t, token.AccessToken) - client := createTestClient("") - created, _, err := c.OAuth2Api.CreateOAuth2Client(context.Background()).OAuth2Client(client).Execute() + ignoreFields := []string{"registration_access_token", "registration_client_uri", "updated_at"} + + patchedURI, _, err := c.OAuth2API.PatchOAuth2Client(context.Background(), *created.ClientId).JsonPatch([]hydra.JsonPatch{{Op: "replace", Path: "/client_uri", Value: "http://foo.bar"}}).Execute() require.NoError(t, err) - client.ClientId = created.ClientId + require.Nil(t, patchedURI.ClientSecret, "client secret should not be returned in the response if it wasn't changed") + assertx.EqualAsJSONExcept(t, created, patchedURI, append(ignoreFields, "client_uri", "client_secret"), "client unchanged except client_uri; client_secret should not be returned") - result1, _, err := c.OAuth2Api.PatchOAuth2Client(context.Background(), *client.ClientId).JsonPatch([]hydra.JsonPatch{{Op: op, Path: path, Value: value}}).Execute() + token2, err := cc.Token(t.Context()) require.NoError(t, err) - result2, _, err := c.OAuth2Api.PatchOAuth2Client(context.Background(), *client.ClientId).JsonPatch([]hydra.JsonPatch{{Op: op, Path: path, Value: value}}).Execute() + require.NotZero(t, token2.AccessToken) + require.NotEqual(t, token.AccessToken, token2.AccessToken, "Got a new token after patching, with unchanged secret") + + patchedSecret, _, err := c.OAuth2API.PatchOAuth2Client(context.Background(), *created.ClientId).JsonPatch([]hydra.JsonPatch{{Op: "replace", Path: "/client_secret", Value: "newsecret"}}).Execute() require.NoError(t, err) + require.Equal(t, "newsecret", *patchedSecret.ClientSecret, "client secret should be returned if it was changed") + assertx.EqualAsJSONExcept(t, patchedURI, patchedSecret, append(ignoreFields, "client_secret"), "client unchanged except secret") - // secret hashes shouldn't change between these PUT calls - require.Equal(t, result1.ClientSecret, result2.ClientSecret) + _, err = cc.Token(t.Context()) + require.ErrorContains(t, err, "Client authentication failed", "should not be able to get a token with the old secret") + + cc.ClientSecret = "newsecret" + token3, err := cc.Token(t.Context()) + require.NoError(t, err, "should be able to get a token with the new secret") + require.NotZero(t, token3.AccessToken, "Got a new token after patching with new secret") + }) + + t.Run("case=patch client that has JSONWebKeysURI", func(t *testing.T) { + op := "replace" + path := "/client_name" + value := "test" + + cl := createTestClient("") + cl.SetJwksUri("https://example.org/.well-known/jwks.json") + created, _, err := c.OAuth2API.CreateOAuth2Client(context.Background()).OAuth2Client(cl).Execute() + require.NoError(t, err) + cl.ClientId = created.ClientId + + result, _, err := c.OAuth2API.PatchOAuth2Client(context.Background(), *cl.ClientId).JsonPatch([]hydra.JsonPatch{{Op: op, Path: path, Value: value}}).Execute() + require.NoError(t, err) + require.Equal(t, value, pointerx.Deref(result.ClientName)) }) } diff --git a/client/validator.go b/client/validator.go index a2d032cec1c..168461c93f5 100644 --- a/client/validator.go +++ b/client/validator.go @@ -7,31 +7,35 @@ import ( "context" "encoding/json" "fmt" + "io" "net/url" + "slices" "strings" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/x" - "github.com/ory/x/ipx" - - "github.com/ory/x/errorsx" + "github.com/hashicorp/go-retryablehttp" + "github.com/pkg/errors" - "github.com/ory/x/stringslice" + "github.com/ory/herodot" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/ipx" ) -var ( - supportedAuthTokenSigningAlgs = []string{ - "RS256", - "RS384", - "RS512", - "PS256", - "PS384", - "PS512", - "ES256", - "ES384", - "ES512", - } -) +var supportedAuthTokenSigningAlgs = []string{ + "RS256", + "RS384", + "RS512", + "PS256", + "PS384", + "PS512", + "ES256", + "ES384", + "ES512", +} + +func isSupportedAuthTokenSigningAlg(alg string) bool { + return slices.Contains(supportedAuthTokenSigningAlgs, alg) +} type validatorRegistry interface { x.HTTPClientProvider @@ -42,26 +46,32 @@ type Validator struct { r validatorRegistry } -func NewValidator(registry validatorRegistry) *Validator { - return &Validator{ - r: registry, - } +func NewValidator(r validatorRegistry) *Validator { + return &Validator{r: r} } func (v *Validator) Validate(ctx context.Context, c *Client) error { if c.TokenEndpointAuthMethod == "" { c.TokenEndpointAuthMethod = "client_secret_basic" } else if c.TokenEndpointAuthMethod == "private_key_jwt" { - if len(c.JSONWebKeysURI) == 0 && c.JSONWebKeys == nil { - return errorsx.WithStack(ErrInvalidClientMetadata.WithHint("When token_endpoint_auth_method is 'private_key_jwt', either jwks or jwks_uri must be set.")) + if len(c.JSONWebKeysURI) == 0 && c.GetJSONWebKeys() == nil { + return errors.WithStack(ErrInvalidClientMetadata.WithHint("When token_endpoint_auth_method is 'private_key_jwt', either jwks or jwks_uri must be set.")) } if c.TokenEndpointAuthSigningAlgorithm != "" && !isSupportedAuthTokenSigningAlg(c.TokenEndpointAuthSigningAlgorithm) { - return errorsx.WithStack(ErrInvalidClientMetadata.WithHint("Only RS256, RS384, RS512, PS256, PS384, PS512, ES256, ES384 and ES512 are supported as algorithms for private key authentication.")) + return errors.WithStack(ErrInvalidClientMetadata.WithHint("Only RS256, RS384, RS512, PS256, PS384, PS512, ES256, ES384 and ES512 are supported as algorithms for private key authentication.")) } } - if len(c.JSONWebKeysURI) > 0 && c.JSONWebKeys != nil { - return errorsx.WithStack(ErrInvalidClientMetadata.WithHint("Fields jwks and jwks_uri can not both be set, you must choose one.")) + if len(c.JSONWebKeysURI) > 0 && c.GetJSONWebKeys() != nil { + return errors.WithStack(ErrInvalidClientMetadata.WithHint("Fields jwks and jwks_uri can not both be set, you must choose one.")) + } + + if jsonWebKeys := c.GetJSONWebKeys(); jsonWebKeys != nil { + for _, k := range jsonWebKeys.Keys { + if !k.Valid() { + return errors.WithStack(ErrInvalidClientMetadata.WithHint("Invalid JSON web key in set.")) + } + } } if v.r.Config().ClientHTTPNoPrivateIPRanges() { @@ -75,12 +85,38 @@ func (v *Validator) Validate(ctx context.Context, c *Client) error { } if err := ipx.AreAllAssociatedIPsAllowed(values); err != nil { - return errorsx.WithStack(ErrInvalidClientMetadata.WithHintf("Client IP address is not allowed: %s", err)) + return errors.WithStack(ErrInvalidClientMetadata.WithHintf("Client IP address is not allowed: %s", err)) + } + } + + // TODO: For clients that support dynamic registration, validate that each of these URIs are part of the redirect_uris, as per the spec. + // The authorization server SHOULD check to see if the "logo_uri", "tos_uri", "client_uri", and "policy_uri" have the same host and scheme as the those defined in the array of "redirect_uris" and that all of these URIs resolve to valid web pages. + // https://datatracker.ietf.org/doc/html/rfc7591#section-5 + + // TODO: In addition, the logo_uri should be a valid image. + // The value of this field MUST point to a valid image file. + + for f, uri := range map[string]string{ + "tos_uri": c.TermsOfServiceURI, + "policy_uri": c.PolicyURI, + "logo_uri": c.LogoURI, + "client_uri": c.ClientURI, + } { + if uri == "" { + continue + } + u, err := url.ParseRequestURI(uri) + if err != nil { + return errors.WithStack(ErrInvalidClientMetadata.WithHintf("Field %s must be a valid URI.", f)) + } + + if u.Scheme != "https" && u.Scheme != "http" { + return errors.WithStack(ErrInvalidClientMetadata.WithHintf("%s must use https:// or http:// as HTTP scheme for field %s.", uri, f)) } } if len(c.Secret) > 0 && len(c.Secret) < 6 { - return errorsx.WithStack(ErrInvalidClientMetadata.WithHint("Field client_secret must contain a secret that is at least 6 characters long.")) + return errors.WithStack(ErrInvalidClientMetadata.WithHint("Field client_secret must contain a secret that is at least 6 characters long.")) } if len(c.Scope) == 0 { @@ -90,20 +126,20 @@ func (v *Validator) Validate(ctx context.Context, c *Client) error { for k, origin := range c.AllowedCORSOrigins { u, err := url.Parse(origin) if err != nil { - return errorsx.WithStack(ErrInvalidClientMetadata.WithHintf("Origin URL %s from allowed_cors_origins could not be parsed: %s", origin, err)) + return errors.WithStack(ErrInvalidClientMetadata.WithHintf("Origin URL %s from allowed_cors_origins could not be parsed: %s", origin, err)) } if u.Scheme != "https" && u.Scheme != "http" { - return errorsx.WithStack(ErrInvalidClientMetadata.WithHintf("Origin URL %s must use https:// or http:// as HTTP scheme.", origin)) + return errors.WithStack(ErrInvalidClientMetadata.WithHintf("Origin URL %s must use https:// or http:// as HTTP scheme.", origin)) } if u.User != nil && len(u.User.String()) > 0 { - return errorsx.WithStack(ErrInvalidClientMetadata.WithHintf("Origin URL %s has HTTP user and/or password set which is not allowed.", origin)) + return errors.WithStack(ErrInvalidClientMetadata.WithHintf("Origin URL %s has HTTP user and/or password set which is not allowed.", origin)) } u.Path = strings.TrimRight(u.Path, "/") if len(u.Path)+len(u.RawQuery)+len(u.Fragment) > 0 { - return errorsx.WithStack(ErrInvalidClientMetadata.WithHintf("Origin URL %s must have an empty path, query, and fragment but one of the parts is not empty.", origin)) + return errors.WithStack(ErrInvalidClientMetadata.WithHintf("Origin URL %s must have an empty path, query, and fragment but one of the parts is not empty.", origin)) } c.AllowedCORSOrigins[k] = u.String() @@ -123,54 +159,57 @@ func (v *Validator) Validate(ctx context.Context, c *Client) error { } if c.UserinfoSignedResponseAlg != "none" && c.UserinfoSignedResponseAlg != "RS256" { - return errorsx.WithStack(ErrInvalidClientMetadata.WithHint("Field userinfo_signed_response_alg can either be 'none' or 'RS256'.")) + return errors.WithStack(ErrInvalidClientMetadata.WithHint("Field userinfo_signed_response_alg can either be 'none' or 'RS256'.")) } - var redirs []url.URL - for _, r := range c.RedirectURIs { - u, err := url.ParseRequestURI(r) - if err != nil { - return errorsx.WithStack(ErrInvalidRedirectURI.WithHintf("Unable to parse redirect URL: %s", r)) - } - redirs = append(redirs, *u) - + redirs := make([]*url.URL, len(c.RedirectURIs)) + for i, r := range c.RedirectURIs { if strings.Contains(r, "#") { - return errorsx.WithStack(ErrInvalidRedirectURI.WithHint("Redirect URIs must not contain fragments (#).")) + return errors.WithStack(ErrInvalidRedirectURI.WithHint("Redirect URIs must not contain fragments (#).")) + } + var err error + redirs[i], err = url.ParseRequestURI(r) + if err != nil { + return errors.WithStack(ErrInvalidRedirectURI.WithHintf("Unable to parse redirect URL: %s", r)) } } if c.SubjectType != "" { - if !stringslice.Has(v.r.Config().SubjectTypesSupported(ctx), c.SubjectType) { - return errorsx.WithStack(ErrInvalidClientMetadata.WithHintf("Subject type %s is not supported by server, only %v are allowed.", c.SubjectType, v.r.Config().SubjectTypesSupported(ctx))) + if !slices.Contains(v.r.Config().SubjectTypesSupported(ctx, c), c.SubjectType) { + return errors.WithStack(ErrInvalidClientMetadata.WithHintf("Subject type %s is not supported by server, only %v are allowed.", c.SubjectType, v.r.Config().SubjectTypesSupported(ctx, c))) } } else { - if stringslice.Has(v.r.Config().SubjectTypesSupported(ctx), "public") { + supportedTypes := v.r.Config().SubjectTypesSupported(ctx, c) + if slices.Contains(supportedTypes, "public") { c.SubjectType = "public" } else { - c.SubjectType = v.r.Config().SubjectTypesSupported(ctx)[0] + c.SubjectType = supportedTypes[0] } } for _, l := range c.PostLogoutRedirectURIs { u, err := url.ParseRequestURI(l) if err != nil { - return errorsx.WithStack(ErrInvalidClientMetadata.WithHintf("Unable to parse post_logout_redirect_uri: %s", l)) + return errors.WithStack(ErrInvalidClientMetadata.WithHintf("Unable to parse post_logout_redirect_uri: %s", l)) } - var found bool - for _, r := range redirs { - if r.Hostname() == u.Hostname() && - r.Port() == u.Port() && - r.Scheme == u.Scheme { - found = true - } + if !slices.ContainsFunc(redirs, func(r *url.URL) bool { + return r.Scheme == u.Scheme && r.Hostname() == u.Hostname() && r.Port() == u.Port() + }) { + return errors.WithStack(ErrInvalidClientMetadata. + WithHintf("post_logout_redirect_uri %q must match the domain, port, scheme of at least one of the registered redirect URIs but did not", l), + ) } + } - if !found { - return errorsx.WithStack(ErrInvalidClientMetadata. - WithHintf(`post_logout_redirect_uri "%s" must match the domain, port, scheme of at least one of the registered redirect URIs but did not'`, l), - ) + if c.AccessTokenStrategy != "" { + s, err := config.ToAccessTokenStrategyType(c.AccessTokenStrategy) + if err != nil { + return errors.WithStack(ErrInvalidClientMetadata. + WithHintf("invalid access token strategy: %v", err)) } + // Canonicalize, just in case. + c.AccessTokenStrategy = string(s) } return nil @@ -178,9 +217,16 @@ func (v *Validator) Validate(ctx context.Context, c *Client) error { func (v *Validator) ValidateDynamicRegistration(ctx context.Context, c *Client) error { if c.Metadata != nil { - return errorsx.WithStack(ErrInvalidClientMetadata. - WithHint(`metadata cannot be set for dynamic client registration'`), - ) + return errors.WithStack(ErrInvalidClientMetadata.WithHint(`"metadata" cannot be set for dynamic client registration`)) + } + if c.AccessTokenStrategy != "" { + return errors.WithStack(herodot.ErrBadRequest.WithReasonf("It is not allowed to choose your own access token strategy.")) + } + if c.SkipConsent { + return errors.WithStack(ErrInvalidRequest.WithDescription(`"skip_consent" cannot be set for dynamic client registration`)) + } + if c.SkipLogoutConsent.Bool { + return errors.WithStack(ErrInvalidRequest.WithDescription(`"skip_logout_consent" cannot be set for dynamic client registration`)) } return v.Validate(ctx, c) @@ -189,42 +235,38 @@ func (v *Validator) ValidateDynamicRegistration(ctx context.Context, c *Client) func (v *Validator) ValidateSectorIdentifierURL(ctx context.Context, location string, redirectURIs []string) error { l, err := url.Parse(location) if err != nil { - return errorsx.WithStack(ErrInvalidClientMetadata.WithHintf("Value of sector_identifier_uri could not be parsed because %s.", err)) + return errors.WithStack(ErrInvalidClientMetadata.WithHintf("Value of sector_identifier_uri could not be parsed because %s.", err)) } if l.Scheme != "https" { - return errorsx.WithStack(ErrInvalidClientMetadata.WithDebug("Value sector_identifier_uri must be an HTTPS URL but it is not.")) + return errors.WithStack(ErrInvalidClientMetadata.WithDebug("Value sector_identifier_uri must be an HTTPS URL but it is not.")) } - response, err := v.r.HTTPClient(ctx).Get(location) + req, err := retryablehttp.NewRequestWithContext(ctx, "GET", location, nil) + if err != nil { + return errors.WithStack(ErrInvalidClientMetadata.WithDebugf("Value sector_identifier_uri must be an HTTPS URL but it is not: %s", err.Error())) + } + response, err := v.r.HTTPClient(ctx).Do(req) if err != nil { - return errorsx.WithStack(ErrInvalidClientMetadata.WithDebug(fmt.Sprintf("Unable to connect to URL set by sector_identifier_uri: %s", err))) + return errors.WithStack(ErrInvalidClientMetadata.WithDebug(fmt.Sprintf("Unable to connect to URL set by sector_identifier_uri: %s", err))) } - defer response.Body.Close() + defer response.Body.Close() //nolint:errcheck + response.Body = io.NopCloser(io.LimitReader(response.Body, 5<<20 /* 5 MiB */)) var urls []string if err := json.NewDecoder(response.Body).Decode(&urls); err != nil { - return errorsx.WithStack(ErrInvalidClientMetadata.WithDebug(fmt.Sprintf("Unable to decode values from sector_identifier_uri: %s", err))) + return errors.WithStack(ErrInvalidClientMetadata.WithDebug(fmt.Sprintf("Unable to decode values from sector_identifier_uri: %s", err))) } if len(urls) == 0 { - return errorsx.WithStack(ErrInvalidClientMetadata.WithDebug("Array from sector_identifier_uri contains no items")) + return errors.WithStack(ErrInvalidClientMetadata.WithDebug("Array from sector_identifier_uri contains no items")) } for _, r := range redirectURIs { - if !stringslice.Has(urls, r) { - return errorsx.WithStack(ErrInvalidClientMetadata.WithDebug(fmt.Sprintf("Redirect URL \"%s\" does not match values from sector_identifier_uri.", r))) + if !slices.Contains(urls, r) { + return errors.WithStack(ErrInvalidClientMetadata.WithDebug(fmt.Sprintf("Redirect URL \"%s\" does not match values from sector_identifier_uri.", r))) } } return nil } - -func isSupportedAuthTokenSigningAlg(alg string) bool { - for _, sAlg := range supportedAuthTokenSigningAlgs { - if alg == sAlg { - return true - } - } - return false -} diff --git a/client/validator_test.go b/client/validator_test.go index 98be4ab4f94..bd60978ef76 100644 --- a/client/validator_test.go +++ b/client/validator_test.go @@ -5,124 +5,209 @@ package client_test import ( "context" + "encoding/json" "fmt" "net/http" "net/http/httptest" + "strings" "testing" + "github.com/go-jose/go-jose/v3" "github.com/hashicorp/go-retryablehttp" - - "github.com/ory/hydra/driver" - "github.com/ory/x/httpx" - - "github.com/gofrs/uuid" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - jose "gopkg.in/square/go-jose.v2" - . "github.com/ory/hydra/client" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal" - "github.com/ory/hydra/x" - "github.com/ory/x/contextx" + . "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/configx" + "github.com/ory/x/httpx" ) func TestValidate(t *testing.T) { ctx := context.Background() - c := internal.NewConfigurationWithDefaults() - c.MustSet(ctx, config.KeySubjectTypesSupported, []string{"pairwise", "public"}) - c.MustSet(ctx, config.KeyDefaultClientScope, []string{"openid"}) - reg := internal.NewRegistryMemory(t, c, &contextx.Static{C: c.Source(ctx)}) + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions(configx.WithValues(map[string]any{ + config.KeySubjectTypesSupported: []string{"pairwise", "public"}, + config.KeyDefaultClientScope: []string{"openid"}, + }))) v := NewValidator(reg) - testCtx := context.TODO() + dec := json.NewDecoder(strings.NewReader(validJWKS)) + dec.DisallowUnknownFields() + var goodJWKS jose.JSONWebKeySet + require.NoError(t, dec.Decode(&goodJWKS)) for k, tc := range []struct { in *Client - check func(t *testing.T, c *Client) - expectErr bool - v func(t *testing.T) *Validator + check func(*testing.T, *Client) + assertErr func(t assert.TestingT, err error, msg ...interface{}) bool + v func(*testing.T) *Validator }{ { in: new(Client), check: func(t *testing.T, c *Client) { - assert.Equal(t, uuid.Nil.String(), c.GetID()) - assert.EqualValues(t, c.GetID(), c.ID.String()) - assert.Empty(t, c.LegacyClientID) + assert.Zero(t, c.GetID()) + assert.EqualValues(t, c.GetID(), c.ID) }, }, { - in: &Client{LegacyClientID: "foo"}, + in: &Client{ID: "foo"}, check: func(t *testing.T, c *Client) { - assert.EqualValues(t, c.GetID(), c.LegacyClientID) + assert.EqualValues(t, c.GetID(), c.ID) }, }, { - in: &Client{LegacyClientID: "foo"}, + in: &Client{ID: "foo"}, check: func(t *testing.T, c *Client) { - assert.EqualValues(t, c.GetID(), c.LegacyClientID) + assert.EqualValues(t, c.GetID(), c.ID) }, }, { - in: &Client{LegacyClientID: "foo", UserinfoSignedResponseAlg: "foo"}, - expectErr: true, + in: &Client{ID: "foo", UserinfoSignedResponseAlg: "foo"}, + assertErr: assert.Error, }, { - in: &Client{LegacyClientID: "foo", TokenEndpointAuthMethod: "private_key_jwt"}, - expectErr: true, + in: &Client{ID: "foo", TokenEndpointAuthMethod: "private_key_jwt"}, + assertErr: assert.Error, }, { - in: &Client{LegacyClientID: "foo", JSONWebKeys: &x.JoseJSONWebKeySet{JSONWebKeySet: new(jose.JSONWebKeySet)}, JSONWebKeysURI: "asdf", TokenEndpointAuthMethod: "private_key_jwt"}, - expectErr: true, + in: &Client{ID: "foo", JSONWebKeys: &x.JoseJSONWebKeySet{JSONWebKeySet: new(jose.JSONWebKeySet)}, JSONWebKeysURI: "asdf", TokenEndpointAuthMethod: "private_key_jwt"}, + assertErr: assert.Error, }, { - in: &Client{LegacyClientID: "foo", JSONWebKeys: &x.JoseJSONWebKeySet{JSONWebKeySet: new(jose.JSONWebKeySet)}, TokenEndpointAuthMethod: "private_key_jwt", TokenEndpointAuthSigningAlgorithm: "HS256"}, - expectErr: true, + in: &Client{ID: "foo", JSONWebKeys: &x.JoseJSONWebKeySet{JSONWebKeySet: new(jose.JSONWebKeySet)}, TokenEndpointAuthMethod: "private_key_jwt", TokenEndpointAuthSigningAlgorithm: "HS256"}, + assertErr: assert.Error, }, { - in: &Client{LegacyClientID: "foo", PostLogoutRedirectURIs: []string{"https://bar/"}, RedirectURIs: []string{"https://foo/"}}, - expectErr: true, + in: &Client{ID: "foo", TermsOfServiceURI: "file://i-am-a-file"}, + assertErr: assert.Error, }, { - in: &Client{LegacyClientID: "foo", PostLogoutRedirectURIs: []string{"http://foo/"}, RedirectURIs: []string{"https://foo/"}}, - expectErr: true, + in: &Client{ID: "foo", PolicyURI: "file://you-are-a-file"}, + assertErr: assert.Error, }, { - in: &Client{LegacyClientID: "foo", PostLogoutRedirectURIs: []string{"https://foo:1234/"}, RedirectURIs: []string{"https://foo/"}}, - expectErr: true, + in: &Client{ID: "foo", ClientURI: "file://i-am-a-file"}, + assertErr: assert.Error, + }, + { + in: &Client{ID: "foo", LogoURI: "file://you-are-a-file"}, + assertErr: assert.Error, }, { - in: &Client{LegacyClientID: "foo", PostLogoutRedirectURIs: []string{"https://foo/"}, RedirectURIs: []string{"https://foo/"}}, + in: &Client{ID: "foo", PolicyURI: "javascript://ory.com/?%0d\\u{61}\\u{6c}\\u{65}\\u{72}\\u{74}\\`\\u{31}\\`"}, + assertErr: assert.Error, + }, + { + in: &Client{ID: "foo", PolicyURI: "https://example.org/policy"}, + check: func(t *testing.T, c *Client) { + assert.Equal(t, "https://example.org/policy", c.PolicyURI) + }, + }, + { + in: &Client{ID: "foo", TermsOfServiceURI: "https://example.org/terms"}, + check: func(t *testing.T, c *Client) { + assert.Equal(t, "https://example.org/terms", c.TermsOfServiceURI) + }, + }, + { + in: &Client{ID: "foo", ClientURI: "https://example.org/client"}, + check: func(t *testing.T, c *Client) { + assert.Equal(t, "https://example.org/client", c.ClientURI) + }, + }, + { + in: &Client{ID: "foo", LogoURI: "https://example.org/logo.png"}, + check: func(t *testing.T, c *Client) { + assert.Equal(t, "https://example.org/logo.png", c.LogoURI) + }, + }, + { + in: &Client{ID: "foo", JSONWebKeys: &x.JoseJSONWebKeySet{JSONWebKeySet: new(jose.JSONWebKeySet)}, JSONWebKeysURI: "https://example.org/jwks.json"}, + assertErr: func(t assert.TestingT, err error, msg ...interface{}) bool { + e := new(fosite.RFC6749Error) + assert.ErrorAs(t, err, &e) + assert.Contains(t, e.HintField, "jwks and jwks_uri can not both be set") + return true + }, + }, + { + in: &Client{ID: "foo", JSONWebKeys: &x.JoseJSONWebKeySet{JSONWebKeySet: &goodJWKS}}, + check: func(t *testing.T, c *Client) { + assert.Len(t, c.JSONWebKeys.Keys, 2) + assert.Equal(t, c.JSONWebKeys.Keys[0].KeyID, "1") + assert.Equal(t, c.JSONWebKeys.Keys[1].KeyID, "2011-04-29") + }, + }, + { + in: &Client{ID: "foo", JSONWebKeys: &x.JoseJSONWebKeySet{JSONWebKeySet: &jose.JSONWebKeySet{Keys: []jose.JSONWebKey{{}}}}}, + assertErr: func(t assert.TestingT, err error, msg ...interface{}) bool { + e := new(fosite.RFC6749Error) + assert.ErrorAs(t, err, &e) + assert.Contains(t, e.HintField, "Invalid JSON web key in set") + return true + }, + }, + { + in: &Client{ID: "foo", JSONWebKeys: new(x.JoseJSONWebKeySet), JSONWebKeysURI: "https://example.org/jwks.json"}, + check: func(t *testing.T, c *Client) { + assert.Nil(t, c.GetJSONWebKeys()) + }, + }, + { + in: &Client{ID: "foo", PostLogoutRedirectURIs: []string{"https://bar/"}, RedirectURIs: []string{"https://foo/"}}, + assertErr: assert.Error, + }, + { + in: &Client{ID: "foo", PostLogoutRedirectURIs: []string{"http://foo/"}, RedirectURIs: []string{"https://foo/"}}, + assertErr: assert.Error, + }, + { + in: &Client{ID: "foo", PostLogoutRedirectURIs: []string{"https://foo:1234/"}, RedirectURIs: []string{"https://foo/"}}, + assertErr: assert.Error, + }, + { + in: &Client{ID: "foo", PostLogoutRedirectURIs: []string{"https://foo/"}, RedirectURIs: []string{"https://foo/"}}, check: func(t *testing.T, c *Client) { assert.Equal(t, []string{"https://foo/"}, []string(c.PostLogoutRedirectURIs)) }, }, { - in: &Client{LegacyClientID: "foo"}, + in: &Client{ID: "foo", TermsOfServiceURI: "https://example.org"}, + assertErr: assert.NoError, + }, + { + in: &Client{ID: "foo", TermsOfServiceURI: "javascript:alert('XSS')"}, + assertErr: assert.Error, + }, + { + in: &Client{ID: "foo"}, check: func(t *testing.T, c *Client) { assert.Equal(t, "public", c.SubjectType) }, }, { v: func(t *testing.T) *Validator { - c.MustSet(ctx, config.KeySubjectTypesSupported, []string{"pairwise"}) + reg.Config().MustSet(ctx, config.KeySubjectTypesSupported, []string{"pairwise"}) return NewValidator(reg) }, - in: &Client{LegacyClientID: "foo"}, + in: &Client{ID: "foo"}, check: func(t *testing.T, c *Client) { assert.Equal(t, "pairwise", c.SubjectType) }, }, { - in: &Client{LegacyClientID: "foo", SubjectType: "pairwise"}, + in: &Client{ID: "foo", SubjectType: "pairwise"}, check: func(t *testing.T, c *Client) { assert.Equal(t, "pairwise", c.SubjectType) }, }, { - in: &Client{LegacyClientID: "foo", SubjectType: "foo"}, - expectErr: true, + in: &Client{ID: "foo", SubjectType: "foo"}, + assertErr: assert.Error, }, } { t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { @@ -131,9 +216,9 @@ func TestValidate(t *testing.T) { return v } } - err := tc.v(t).Validate(testCtx, tc.in) - if tc.expectErr { - require.Error(t, err) + err := tc.v(t).Validate(ctx, tc.in) + if tc.assertErr != nil { + tc.assertErr(t, err) } else { require.NoError(t, err) tc.check(t, tc.in) @@ -143,25 +228,27 @@ func TestValidate(t *testing.T) { } type fakeHTTP struct { - driver.Registry + *driver.RegistrySQL c *http.Client } -func (f *fakeHTTP) HTTPClient(ctx context.Context, opts ...httpx.ResilientOptions) *retryablehttp.Client { - return httpx.NewResilientClient(httpx.ResilientClientWithClient(f.c)) +func (f *fakeHTTP) HTTPClient(_ context.Context, opts ...httpx.ResilientOptions) *retryablehttp.Client { + c := httpx.NewResilientClient(opts...) + c.HTTPClient = f.c + return c } func TestValidateSectorIdentifierURL(t *testing.T) { - reg := internal.NewMockedRegistry(t, &contextx.Default{}) + reg := testhelpers.NewRegistryMemory(t) var payload string var h http.HandlerFunc = func(w http.ResponseWriter, r *http.Request) { - w.Write([]byte(payload)) + _, _ = w.Write([]byte(payload)) } ts := httptest.NewTLSServer(h) defer ts.Close() - v := NewValidator(&fakeHTTP{Registry: reg, c: ts.Client()}) + v := NewValidator(&fakeHTTP{RegistrySQL: reg, c: ts.Client()}) for k, tc := range []struct { p string r []string @@ -205,19 +292,45 @@ func TestValidateSectorIdentifierURL(t *testing.T) { } } +// from https://datatracker.ietf.org/doc/html/rfc7517#appendix-A.2 +const validJWKS = ` +{"keys": +[ + {"kty":"EC", + "crv":"P-256", + "x":"MKBCTNIcKUSDii11ySs3526iDZ8AiTo7Tu6KPAqv7D4", + "y":"4Etl6SRW2YiLUrN5vfvVHuhp7x8PxltmWWlbbM4IFyM", + "d":"870MB6gfuTJ4HtUnUvYMyJpr5eUZNP4Bk43bVdj3eAE", + "use":"enc", + "kid":"1"}, + + {"kty":"RSA", + "n":"0vx7agoebGcQSuuPiLJXZptN9nndrQmbXEps2aiAFbWhM78LhWx4cbbfAAtVT86zwu1RK7aPFFxuhDR1L6tSoc_BJECPebWKRXjBZCiFV4n3oknjhMstn64tZ_2W-5JsGY4Hc5n9yBXArwl93lqt7_RN5w6Cf0h4QyQ5v-65YGjQR0_FDW2QvzqY368QQMicAtaSqzs8KJZgnYb9c7d0zgdAZHzu6qMQvRL5hajrn1n91CbOpbISD08qNLyrdkt-bFTWhAI4vMQFh6WeZu0fM4lFd2NcRwr3XPksINHaQ-G_xBniIqbw0Ls1jF44-csFCur-kEgU8awapJzKnqDKgw", + "e":"AQAB", + "d":"X4cTteJY_gn4FYPsXB8rdXix5vwsg1FLN5E3EaG6RJoVH-HLLKD9M7dx5oo7GURknchnrRweUkC7hT5fJLM0WbFAKNLWY2vv7B6NqXSzUvxT0_YSfqijwp3RTzlBaCxWp4doFk5N2o8Gy_nHNKroADIkJ46pRUohsXywbReAdYaMwFs9tv8d_cPVY3i07a3t8MN6TNwm0dSawm9v47UiCl3Sk5ZiG7xojPLu4sbg1U2jx4IBTNBznbJSzFHK66jT8bgkuqsk0GjskDJk19Z4qwjwbsnn4j2WBii3RL-Us2lGVkY8fkFzme1z0HbIkfz0Y6mqnOYtqc0X4jfcKoAC8Q", + "p":"83i-7IvMGXoMXCskv73TKr8637FiO7Z27zv8oj6pbWUQyLPQBQxtPVnwD20R-60eTDmD2ujnMt5PoqMrm8RfmNhVWDtjjMmCMjOpSXicFHj7XOuVIYQyqVWlWEh6dN36GVZYk93N8Bc9vY41xy8B9RzzOGVQzXvNEvn7O0nVbfs", + "q":"3dfOR9cuYq-0S-mkFLzgItgMEfFzB2q3hWehMuG0oCuqnb3vobLyumqjVZQO1dIrdwgTnCdpYzBcOfW5r370AFXjiWft_NGEiovonizhKpo9VVS78TzFgxkIdrecRezsZ-1kYd_s1qDbxtkDEgfAITAG9LUnADun4vIcb6yelxk", + "dp":"G4sPXkc6Ya9y8oJW9_ILj4xuppu0lzi_H7VTkS8xj5SdX3coE0oimYwxIi2emTAue0UOa5dpgFGyBJ4c8tQ2VF402XRugKDTP8akYhFo5tAA77Qe_NmtuYZc3C3m3I24G2GvR5sSDxUyAN2zq8Lfn9EUms6rY3Ob8YeiKkTiBj0", + "dq":"s9lAH9fggBsoFR8Oac2R_E2gw282rT2kGOAhvIllETE1efrA6huUUvMfBcMpn8lqeW6vzznYY5SSQF7pMdC_agI3nG8Ibp1BUb0JUiraRNqUfLhcQb_d9GF4Dh7e74WbRsobRonujTYN1xCaP6TO61jvWrX-L18txXw494Q_cgk", + "qi":"GyM_p6JrXySiz1toFgKbWV-JdI3jQ4ypu9rbMWx3rQJBfmt0FoYzgUIZEVFEcOqwemRN81zoDAaa-Bk0KWNGDjJHZDdDmFhW3AN7lI-puxk_mHZGJ11rxyR8O55XLSe3SPmRfKwZI6yU24ZxvQKFYItdldUKGzO6Ia6zTKhAVRU", + "alg":"RS256", + "kid":"2011-04-29"} +] +} +` + func TestValidateIPRanges(t *testing.T) { - ctx := context.Background() - c := internal.NewConfigurationWithDefaults() - reg := internal.NewRegistryMemory(t, c, &contextx.Static{C: c.Source(ctx)}) + ctx := t.Context() + reg := testhelpers.NewRegistryMemory(t) v := NewValidator(reg) - c.MustSet(ctx, config.ViperKeyClientHTTPNoPrivateIPRanges, true) + reg.Config().MustSet(t.Context(), config.KeyClientHTTPNoPrivateIPRanges, true) require.NoError(t, v.ValidateDynamicRegistration(ctx, &Client{})) require.ErrorContains(t, v.ValidateDynamicRegistration(ctx, &Client{JSONWebKeysURI: "https://localhost:1234"}), "invalid_client_metadata") require.ErrorContains(t, v.ValidateDynamicRegistration(ctx, &Client{BackChannelLogoutURI: "https://localhost:1234"}), "invalid_client_metadata") require.ErrorContains(t, v.ValidateDynamicRegistration(ctx, &Client{RequestURIs: []string{"https://google", "https://localhost:1234"}}), "invalid_client_metadata") - c.MustSet(ctx, config.ViperKeyClientHTTPNoPrivateIPRanges, false) + reg.Config().MustSet(t.Context(), config.KeyClientHTTPNoPrivateIPRanges, false) require.NoError(t, v.ValidateDynamicRegistration(ctx, &Client{})) require.NoError(t, v.ValidateDynamicRegistration(ctx, &Client{JSONWebKeysURI: "https://localhost:1234"})) require.NoError(t, v.ValidateDynamicRegistration(ctx, &Client{BackChannelLogoutURI: "https://localhost:1234"})) @@ -225,13 +338,11 @@ func TestValidateIPRanges(t *testing.T) { } func TestValidateDynamicRegistration(t *testing.T) { - ctx := context.Background() - c := internal.NewConfigurationWithDefaults() - c.MustSet(ctx, config.KeySubjectTypesSupported, []string{"pairwise", "public"}) - c.MustSet(ctx, config.KeyDefaultClientScope, []string{"openid"}) - reg := internal.NewRegistryMemory(t, c, &contextx.Static{C: c.Source(ctx)}) + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions(configx.WithValues(map[string]any{ + config.KeySubjectTypesSupported: []string{"pairwise", "public"}, + config.KeyDefaultClientScope: []string{"openid"}, + }))) - testCtx := context.TODO() v := NewValidator(reg) for k, tc := range []struct { in *Client @@ -241,39 +352,39 @@ func TestValidateDynamicRegistration(t *testing.T) { }{ { in: &Client{ - LegacyClientID: "foo", + ID: "foo", PostLogoutRedirectURIs: []string{"https://foo/"}, RedirectURIs: []string{"https://foo/"}, - Metadata: []byte("{\"access_token_ttl\":10}"), + Metadata: []byte(`{"access_token_ttl":10}`), }, expectErr: true, }, { in: &Client{ - LegacyClientID: "foo", + ID: "foo", PostLogoutRedirectURIs: []string{"https://foo/"}, RedirectURIs: []string{"https://foo/"}, - Metadata: []byte("{\"id_token_ttl\":10}"), + Metadata: []byte(`{"id_token_ttl":10}`), }, expectErr: true, }, { in: &Client{ - LegacyClientID: "foo", + ID: "foo", PostLogoutRedirectURIs: []string{"https://foo/"}, RedirectURIs: []string{"https://foo/"}, - Metadata: []byte("{\"anything\":10}"), + Metadata: []byte(`{"anything":10}`), }, expectErr: true, }, { in: &Client{ - LegacyClientID: "foo", + ID: "foo", PostLogoutRedirectURIs: []string{"https://foo/"}, RedirectURIs: []string{"https://foo/"}, }, check: func(t *testing.T, c *Client) { - assert.EqualValues(t, "foo", c.LegacyClientID) + assert.EqualValues(t, "foo", c.ID) }, }, } { @@ -283,7 +394,7 @@ func TestValidateDynamicRegistration(t *testing.T) { return v } } - err := tc.v(t).ValidateDynamicRegistration(testCtx, tc.in) + err := tc.v(t).ValidateDynamicRegistration(t.Context(), tc.in) if tc.expectErr { require.Error(t, err) } else { diff --git a/cmd/.snapshots/TestCreateClient-case=creates_successfully.json b/cmd/.snapshots/TestCreateClient-case=creates_successfully.json index 9281fa7d230..f20d01cd379 100644 --- a/cmd/.snapshots/TestCreateClient-case=creates_successfully.json +++ b/cmd/.snapshots/TestCreateClient-case=creates_successfully.json @@ -15,6 +15,8 @@ "code" ], "scope": "offline_access offline openid", + "skip_consent": false, + "skip_logout_consent": false, "subject_type": "public", "token_endpoint_auth_method": "client_secret_basic", "tos_uri": "", diff --git a/cmd/.snapshots/TestCreateClient-case=supports_encryption.json b/cmd/.snapshots/TestCreateClient-case=supports_encryption.json index 4bef1689f7e..984cedb2f2c 100644 --- a/cmd/.snapshots/TestCreateClient-case=supports_encryption.json +++ b/cmd/.snapshots/TestCreateClient-case=supports_encryption.json @@ -21,6 +21,8 @@ "code" ], "scope": "offline_access offline openid", + "skip_consent": false, + "skip_logout_consent": false, "subject_type": "public", "token_endpoint_auth_method": "client_secret_basic", "tos_uri": "", diff --git a/cmd/.snapshots/TestCreateClient-case=supports_setting_flags.json b/cmd/.snapshots/TestCreateClient-case=supports_setting_flags.json index 4bef1689f7e..984cedb2f2c 100644 --- a/cmd/.snapshots/TestCreateClient-case=supports_setting_flags.json +++ b/cmd/.snapshots/TestCreateClient-case=supports_setting_flags.json @@ -21,6 +21,8 @@ "code" ], "scope": "offline_access offline openid", + "skip_consent": false, + "skip_logout_consent": false, "subject_type": "public", "token_endpoint_auth_method": "client_secret_basic", "tos_uri": "", diff --git a/cmd/.snapshots/TestGetClient-case=gets_client.json b/cmd/.snapshots/TestGetClient-case=gets_client.json index 07ec29f7642..5c482bdb99a 100644 --- a/cmd/.snapshots/TestGetClient-case=gets_client.json +++ b/cmd/.snapshots/TestGetClient-case=gets_client.json @@ -8,6 +8,7 @@ "owner": "", "policy_uri": "", "scope": "", + "skip_consent": false, "subject_type": "", "token_endpoint_auth_method": "client_secret_post", "tos_uri": "" diff --git a/cmd/.snapshots/TestGetClient-case=gets_multiple_clients.json b/cmd/.snapshots/TestGetClient-case=gets_multiple_clients.json index 971096952d4..1685a7ccfd8 100644 --- a/cmd/.snapshots/TestGetClient-case=gets_multiple_clients.json +++ b/cmd/.snapshots/TestGetClient-case=gets_multiple_clients.json @@ -15,6 +15,7 @@ "redirect_uris": [], "response_types": [], "scope": "", + "skip_consent": false, "subject_type": "", "token_endpoint_auth_method": "client_secret_post", "tos_uri": "" @@ -35,6 +36,7 @@ "redirect_uris": [], "response_types": [], "scope": "", + "skip_consent": false, "subject_type": "", "token_endpoint_auth_method": "client_secret_post", "tos_uri": "" diff --git a/cmd/.snapshots/TestImportClient-case=imports_clients_from_single_file.json b/cmd/.snapshots/TestImportClient-case=imports_clients_from_single_file.json index 432a5b8ee48..6ca11ca6230 100644 --- a/cmd/.snapshots/TestImportClient-case=imports_clients_from_single_file.json +++ b/cmd/.snapshots/TestImportClient-case=imports_clients_from_single_file.json @@ -11,6 +11,7 @@ "owner": "", "policy_uri": "", "scope": "foo", + "skip_consent": false, "subject_type": "public", "token_endpoint_auth_method": "client_secret_basic", "tos_uri": "", @@ -28,6 +29,7 @@ "owner": "", "policy_uri": "", "scope": "bar", + "skip_consent": false, "subject_type": "public", "token_endpoint_auth_method": "client_secret_basic", "tos_uri": "", diff --git a/cmd/.snapshots/TestImportClient-case=performs_appropriate_error_reporting.json b/cmd/.snapshots/TestImportClient-case=performs_appropriate_error_reporting.json index 432a5b8ee48..6ca11ca6230 100644 --- a/cmd/.snapshots/TestImportClient-case=performs_appropriate_error_reporting.json +++ b/cmd/.snapshots/TestImportClient-case=performs_appropriate_error_reporting.json @@ -11,6 +11,7 @@ "owner": "", "policy_uri": "", "scope": "foo", + "skip_consent": false, "subject_type": "public", "token_endpoint_auth_method": "client_secret_basic", "tos_uri": "", @@ -28,6 +29,7 @@ "owner": "", "policy_uri": "", "scope": "bar", + "skip_consent": false, "subject_type": "public", "token_endpoint_auth_method": "client_secret_basic", "tos_uri": "", diff --git a/cmd/.snapshots/TestUpdateClient-case=creates_successfully.json b/cmd/.snapshots/TestUpdateClient-case=creates_successfully.json index 368b9960eff..c62158c5d9d 100644 --- a/cmd/.snapshots/TestUpdateClient-case=creates_successfully.json +++ b/cmd/.snapshots/TestUpdateClient-case=creates_successfully.json @@ -15,6 +15,8 @@ "code" ], "scope": "offline_access offline openid", + "skip_consent": false, + "skip_logout_consent": false, "subject_type": "public", "token_endpoint_auth_method": "client_secret_basic", "tos_uri": "", diff --git a/cmd/.snapshots/TestUpdateClient-case=supports_encryption.json b/cmd/.snapshots/TestUpdateClient-case=supports_encryption.json index 368b9960eff..c62158c5d9d 100644 --- a/cmd/.snapshots/TestUpdateClient-case=supports_encryption.json +++ b/cmd/.snapshots/TestUpdateClient-case=supports_encryption.json @@ -15,6 +15,8 @@ "code" ], "scope": "offline_access offline openid", + "skip_consent": false, + "skip_logout_consent": false, "subject_type": "public", "token_endpoint_auth_method": "client_secret_basic", "tos_uri": "", diff --git a/cmd/.snapshots/TestUpdateClient-case=updates_from_file-file=from_disk.json b/cmd/.snapshots/TestUpdateClient-case=updates_from_file-file=from_disk.json new file mode 100644 index 00000000000..e9a42532a2f --- /dev/null +++ b/cmd/.snapshots/TestUpdateClient-case=updates_from_file-file=from_disk.json @@ -0,0 +1,24 @@ +{ + "client_name": "updated through file from disk", + "client_secret_expires_at": 0, + "client_uri": "", + "grant_types": [ + "implicit" + ], + "jwks": {}, + "logo_uri": "", + "metadata": {}, + "owner": "", + "policy_uri": "", + "request_object_signing_alg": "RS256", + "response_types": [ + "code" + ], + "scope": "offline_access offline openid", + "skip_consent": false, + "skip_logout_consent": false, + "subject_type": "public", + "token_endpoint_auth_method": "client_secret_basic", + "tos_uri": "", + "userinfo_signed_response_alg": "none" +} diff --git a/cmd/.snapshots/TestUpdateClient-case=updates_from_file-file=stdin.json b/cmd/.snapshots/TestUpdateClient-case=updates_from_file-file=stdin.json new file mode 100644 index 00000000000..4491f0eed55 --- /dev/null +++ b/cmd/.snapshots/TestUpdateClient-case=updates_from_file-file=stdin.json @@ -0,0 +1,24 @@ +{ + "client_name": "updated through file stdin", + "client_secret_expires_at": 0, + "client_uri": "", + "grant_types": [ + "implicit" + ], + "jwks": {}, + "logo_uri": "", + "metadata": {}, + "owner": "", + "policy_uri": "", + "request_object_signing_alg": "RS256", + "response_types": [ + "code" + ], + "scope": "offline_access offline openid", + "skip_consent": false, + "skip_logout_consent": false, + "subject_type": "public", + "token_endpoint_auth_method": "client_secret_basic", + "tos_uri": "", + "userinfo_signed_response_alg": "none" +} diff --git a/cmd/cli/handler.go b/cmd/cli/handler.go index bc452ab15d6..69b332a73dd 100644 --- a/cmd/cli/handler.go +++ b/cmd/cli/handler.go @@ -4,9 +4,7 @@ package cli import ( - "github.com/ory/hydra/driver" - "github.com/ory/x/configx" - "github.com/ory/x/servicelocatorx" + "github.com/ory/hydra/v2/driver" ) type Handler struct { @@ -14,9 +12,9 @@ type Handler struct { Janitor *JanitorHandler } -func NewHandler(slOpts []servicelocatorx.Option, dOpts []driver.OptionsModifier, cOpts []configx.OptionModifier) *Handler { +func NewHandler(dOpts []driver.OptionsModifier) *Handler { return &Handler{ - Migration: newMigrateHandler(), - Janitor: NewJanitorHandler(slOpts, dOpts, cOpts), + Migration: newMigrateHandler(dOpts), + Janitor: newJanitorHandler(dOpts), } } diff --git a/cmd/cli/handler_janitor.go b/cmd/cli/handler_janitor.go index bec0401fe36..88a8b7f54de 100644 --- a/cmd/cli/handler_janitor.go +++ b/cmd/cli/handler_janitor.go @@ -6,22 +6,17 @@ package cli import ( "context" "fmt" + "io" "time" - "github.com/ory/x/servicelocatorx" - - "github.com/ory/hydra/persistence" - "github.com/pkg/errors" - - "github.com/ory/x/flagx" - "github.com/spf13/cobra" - "github.com/ory/hydra/driver" - "github.com/ory/hydra/driver/config" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/persistence" "github.com/ory/x/configx" - "github.com/ory/x/errorsx" + "github.com/ory/x/flagx" ) const ( @@ -39,25 +34,22 @@ const ( ) type JanitorHandler struct { - slOpts []servicelocatorx.Option - dOpts []driver.OptionsModifier - cOpts []configx.OptionModifier + dOpts []driver.OptionsModifier } -func NewJanitorHandler(slOpts []servicelocatorx.Option, dOpts []driver.OptionsModifier, cOpts []configx.OptionModifier) *JanitorHandler { +func newJanitorHandler(dOpts []driver.OptionsModifier) *JanitorHandler { return &JanitorHandler{ - slOpts: slOpts, - dOpts: dOpts, - cOpts: cOpts, + dOpts: dOpts, } } -func (_ *JanitorHandler) Args(cmd *cobra.Command, args []string) error { +func (*JanitorHandler) Args(cmd *cobra.Command, args []string) error { if len(args) == 0 && !flagx.MustGetBool(cmd, ReadFromEnv) && len(flagx.MustGetStringSlice(cmd, Config)) == 0 { fmt.Printf("%s\n", cmd.UsageString()) + //lint:ignore ST1005 formatted error string used in CLI output return fmt.Errorf("%s\n%s\n%s\n", "A DSN is required as a positional argument when not passing any of the following flags:", "- Using the environment variable with flag -e, --read-from-env", @@ -65,6 +57,7 @@ func (_ *JanitorHandler) Args(cmd *cobra.Command, args []string) error { } if !flagx.MustGetBool(cmd, OnlyTokens) && !flagx.MustGetBool(cmd, OnlyRequests) && !flagx.MustGetBool(cmd, OnlyGrants) { + //lint:ignore ST1005 formatted error string used in CLI output return fmt.Errorf("%s\n%s\n", cmd.UsageString(), "Janitor requires at least one of --tokens, --requests or --grants to be set") } @@ -72,10 +65,12 @@ func (_ *JanitorHandler) Args(cmd *cobra.Command, args []string) error { limit := flagx.MustGetInt(cmd, Limit) batchSize := flagx.MustGetInt(cmd, BatchSize) if limit <= 0 || batchSize <= 0 { + //lint:ignore ST1005 formatted error string used in CLI output return fmt.Errorf("%s\n%s\n", cmd.UsageString(), "Values for --limit and --batch-size should both be greater than 0") } if batchSize > limit { + //lint:ignore ST1005 formatted error string used in CLI output return fmt.Errorf("%s\n%s\n", cmd.UsageString(), "Value for --batch-size must not be greater than value for --limit") } @@ -84,12 +79,11 @@ func (_ *JanitorHandler) Args(cmd *cobra.Command, args []string) error { } func (j *JanitorHandler) RunE(cmd *cobra.Command, args []string) error { - return purge(cmd, args, servicelocatorx.NewOptions(j.slOpts...), j.dOpts) + return purge(cmd, args, j.dOpts) } -func purge(cmd *cobra.Command, args []string, sl *servicelocatorx.Options, dOpts []driver.OptionsModifier) error { +func purge(cmd *cobra.Command, args []string, dOpts []driver.OptionsModifier) error { ctx := cmd.Context() - var d driver.Registry co := []configx.OptionModifier{ configx.WithFlags(cmd.Flags()), @@ -121,15 +115,16 @@ func purge(cmd *cobra.Command, args []string, sl *servicelocatorx.Options, dOpts do := append(dOpts, driver.DisableValidation(), driver.DisablePreloading(), - driver.WithOptions(co...), + driver.WithConfigOptions(co...), ) - d, err := driver.New(ctx, sl, do) + d, err := driver.New(ctx, do...) if err != nil { return errors.Wrap(err, "Could not create driver") } if len(d.Config().DSN()) == 0 { + //lint:ignore ST1005 formatted error string used in CLI output return fmt.Errorf("%s\n%s\n%s\n", cmd.UsageString(), "When using flag -e, environment variable DSN must be set.", "When using flag -c, the dsn property should be set.") @@ -154,20 +149,20 @@ func purge(cmd *cobra.Command, args []string, sl *servicelocatorx.Options, dOpts routineFlags = append(routineFlags, OnlyGrants) } - return cleanupRun(cmd.Context(), notAfter, limit, batchSize, addRoutine(p, routineFlags...)...) + return cleanupRun(cmd.Context(), notAfter, limit, batchSize, addRoutine(cmd.OutOrStdout(), p, routineFlags...)...) } -func addRoutine(p persistence.Persister, names ...string) []cleanupRoutine { +func addRoutine(out io.Writer, p persistence.Persister, names ...string) []cleanupRoutine { var routines []cleanupRoutine for _, n := range names { switch n { case OnlyTokens: - routines = append(routines, cleanup(p.FlushInactiveAccessTokens, "access tokens")) - routines = append(routines, cleanup(p.FlushInactiveRefreshTokens, "refresh tokens")) + routines = append(routines, cleanup(out, p.FlushInactiveAccessTokens, "access tokens")) + routines = append(routines, cleanup(out, p.FlushInactiveRefreshTokens, "refresh tokens")) case OnlyRequests: - routines = append(routines, cleanup(p.FlushInactiveLoginConsentRequests, "login-consent requests")) + routines = append(routines, cleanup(out, p.FlushInactiveLoginConsentRequests, "login-consent requests")) case OnlyGrants: - routines = append(routines, cleanup(p.FlushInactiveGrants, "grants")) + routines = append(routines, cleanup(out, p.FlushInactiveGrants, "grants")) } } return routines @@ -175,12 +170,12 @@ func addRoutine(p persistence.Persister, names ...string) []cleanupRoutine { type cleanupRoutine func(ctx context.Context, notAfter time.Time, limit int, batchSize int) error -func cleanup(cr cleanupRoutine, routineName string) cleanupRoutine { +func cleanup(out io.Writer, cr cleanupRoutine, routineName string) cleanupRoutine { return func(ctx context.Context, notAfter time.Time, limit int, batchSize int) error { if err := cr(ctx, notAfter, limit, batchSize); err != nil { - return errors.Wrap(errorsx.WithStack(err), fmt.Sprintf("Could not cleanup inactive %s", routineName)) + return errors.Wrap(errors.WithStack(err), fmt.Sprintf("Could not cleanup inactive %s", routineName)) } - fmt.Printf("Successfully completed Janitor run on %s\n", routineName) + _, _ = fmt.Fprintf(out, "Successfully completed Janitor run on %s\n", routineName) return nil } } diff --git a/cmd/cli/handler_janitor_test.go b/cmd/cli/handler_janitor_test.go index c99440c23b1..a1d00a83347 100644 --- a/cmd/cli/handler_janitor_test.go +++ b/cmd/cli/handler_janitor_test.go @@ -9,32 +9,28 @@ import ( "testing" "time" - "github.com/ory/hydra/cmd" + "github.com/ory/hydra/v2/cmd" "github.com/spf13/cobra" "github.com/stretchr/testify/require" - "github.com/ory/hydra/cmd/cli" - "github.com/ory/hydra/internal/testhelpers" + "github.com/ory/hydra/v2/cmd/cli" + "github.com/ory/hydra/v2/internal/testhelpers" "github.com/ory/x/cmdx" ) func newJanitorCmd() *cobra.Command { - return cmd.NewRootCmd(nil, nil, nil) + return cmd.NewRootCmd() } func TestJanitorHandler_PurgeTokenNotAfter(t *testing.T) { - ctx := context.Background() - testCycles := testhelpers.NewConsentJanitorTestHelper("").GetNotAfterTestCycles() + ctx := t.Context() - require.True(t, len(testCycles) > 0) - - for k, v := range testCycles { + for k, v := range testhelpers.NotAfterTestCycles { t.Run(fmt.Sprintf("case=%s", k), func(t *testing.T) { jt := testhelpers.NewConsentJanitorTestHelper(t.Name()) - reg, err := jt.GetRegistry(ctx, k) - require.NoError(t, err) + reg := jt.GetRegistry(t) // setup test t.Run("step=setup-access", jt.AccessTokenNotAfterSetup(ctx, reg.ClientManager(), reg.OAuth2Storage())) @@ -45,10 +41,10 @@ func TestJanitorHandler_PurgeTokenNotAfter(t *testing.T) { cmdx.ExecNoErr(t, newJanitorCmd(), "janitor", fmt.Sprintf("--%s=%s", cli.KeepIfYounger, v.String()), - fmt.Sprintf("--%s=%s", cli.AccessLifespan, jt.GetAccessTokenLifespan(ctx).String()), - fmt.Sprintf("--%s=%s", cli.RefreshLifespan, jt.GetRefreshTokenLifespan(ctx).String()), + fmt.Sprintf("--%s=%s", cli.AccessLifespan, jt.GetAccessTokenLifespan().String()), + fmt.Sprintf("--%s=%s", cli.RefreshLifespan, jt.GetRefreshTokenLifespan().String()), fmt.Sprintf("--%s", cli.OnlyTokens), - jt.GetDSN(ctx), + reg.Config().DSN(), ) }) @@ -60,163 +56,30 @@ func TestJanitorHandler_PurgeTokenNotAfter(t *testing.T) { } } -func TestJanitorHandler_PurgeLoginConsentNotAfter(t *testing.T) { - ctx := context.Background() - - testCycles := testhelpers.NewConsentJanitorTestHelper("").GetNotAfterTestCycles() - - for k, v := range testCycles { - jt := testhelpers.NewConsentJanitorTestHelper(k) - reg, err := jt.GetRegistry(ctx, k) - require.NoError(t, err) - - t.Run(fmt.Sprintf("case=%s", k), func(t *testing.T) { - // Setup the test - t.Run("step=setup", jt.LoginConsentNotAfterSetup(ctx, reg.ConsentManager(), reg.ClientManager())) - // Run the cleanup routine - t.Run("step=cleanup", func(t *testing.T) { - cmdx.ExecNoErr(t, newJanitorCmd(), - "janitor", - fmt.Sprintf("--%s=%s", cli.KeepIfYounger, v.String()), - fmt.Sprintf("--%s=%s", cli.ConsentRequestLifespan, jt.GetConsentRequestLifespan(ctx).String()), - fmt.Sprintf("--%s", cli.OnlyRequests), - jt.GetDSN(ctx), - ) - }) - - notAfter := time.Now().Round(time.Second).Add(-v) - consentLifespan := time.Now().Round(time.Second).Add(-jt.GetConsentRequestLifespan(ctx)) - t.Run("step=validate", jt.LoginConsentNotAfterValidate(ctx, notAfter, consentLifespan, reg.ConsentManager())) - }) - } - -} - -func TestJanitorHandler_PurgeLoginConsent(t *testing.T) { - /* - Login and Consent also needs to be purged on two conditions besides the KeyConsentRequestMaxAge and notAfter time - - when a login/consent request was never completed (timed out) - - when a login/consent request was rejected - */ - - t.Run("case=login-consent-timeout", func(t *testing.T) { - t.Run("case=login-timeout", func(t *testing.T) { - ctx := context.Background() - jt := testhelpers.NewConsentJanitorTestHelper(t.Name()) - reg, err := jt.GetRegistry(ctx, t.Name()) - require.NoError(t, err) - - // setup - t.Run("step=setup", jt.LoginTimeoutSetup(ctx, reg.ConsentManager(), reg.ClientManager())) - - // cleanup - t.Run("step=cleanup", func(t *testing.T) { - cmdx.ExecNoErr(t, newJanitorCmd(), - "janitor", - fmt.Sprintf("--%s", cli.OnlyRequests), - jt.GetDSN(ctx), - ) - }) - - t.Run("step=validate", jt.LoginTimeoutValidate(ctx, reg.ConsentManager())) - - }) - - t.Run("case=consent-timeout", func(t *testing.T) { - ctx := context.Background() - jt := testhelpers.NewConsentJanitorTestHelper(t.Name()) - reg, err := jt.GetRegistry(ctx, t.Name()) - require.NoError(t, err) - - // setup - t.Run("step=setup", jt.ConsentTimeoutSetup(ctx, reg.ConsentManager(), reg.ClientManager())) - - // run cleanup - t.Run("step=cleanup", func(t *testing.T) { - cmdx.ExecNoErr(t, newJanitorCmd(), - "janitor", - fmt.Sprintf("--%s", cli.OnlyRequests), - jt.GetDSN(ctx), - ) - }) - - // validate - t.Run("step=validate", jt.ConsentTimeoutValidate(ctx, reg.ConsentManager())) - }) - - }) - - t.Run("case=login-consent-rejection", func(t *testing.T) { - ctx := context.Background() - - t.Run("case=login-rejection", func(t *testing.T) { - jt := testhelpers.NewConsentJanitorTestHelper(t.Name()) - reg, err := jt.GetRegistry(ctx, t.Name()) - require.NoError(t, err) - - // setup - t.Run("step=setup", jt.LoginRejectionSetup(ctx, reg.ConsentManager(), reg.ClientManager())) - - // cleanup - t.Run("step=cleanup", func(t *testing.T) { - cmdx.ExecNoErr(t, newJanitorCmd(), - "janitor", - fmt.Sprintf("--%s", cli.OnlyRequests), - jt.GetDSN(ctx), - ) - }) - - // validate - t.Run("step=validate", jt.LoginRejectionValidate(ctx, reg.ConsentManager())) - }) - - t.Run("case=consent-rejection", func(t *testing.T) { - jt := testhelpers.NewConsentJanitorTestHelper(t.Name()) - reg, err := jt.GetRegistry(ctx, t.Name()) - require.NoError(t, err) - - // setup - t.Run("step=setup", jt.ConsentRejectionSetup(ctx, reg.ConsentManager(), reg.ClientManager())) - - // cleanup - t.Run("step=cleanup", func(t *testing.T) { - cmdx.ExecNoErr(t, newJanitorCmd(), - "janitor", - fmt.Sprintf("--%s", cli.OnlyRequests), - jt.GetDSN(ctx), - ) - }) - - // validate - t.Run("step=validate", jt.ConsentRejectionValidate(ctx, reg.ConsentManager())) - }) - }) -} - func TestJanitorHandler_Arguments(t *testing.T) { - cmdx.ExecNoErr(t, cmd.NewRootCmd(nil, nil, nil), + cmdx.ExecNoErr(t, cmd.NewRootCmd(), "janitor", fmt.Sprintf("--%s", cli.OnlyRequests), "memory", ) - cmdx.ExecNoErr(t, cmd.NewRootCmd(nil, nil, nil), + cmdx.ExecNoErr(t, cmd.NewRootCmd(), "janitor", fmt.Sprintf("--%s", cli.OnlyTokens), "memory", ) - cmdx.ExecNoErr(t, cmd.NewRootCmd(nil, nil, nil), + cmdx.ExecNoErr(t, cmd.NewRootCmd(), "janitor", fmt.Sprintf("--%s", cli.OnlyGrants), "memory", ) - _, _, err := cmdx.ExecCtx(context.Background(), cmd.NewRootCmd(nil, nil, nil), nil, + _, _, err := cmdx.ExecCtx(context.Background(), cmd.NewRootCmd(), nil, "janitor", "memory") require.Error(t, err) require.Contains(t, err.Error(), "Janitor requires at least one of --tokens, --requests or --grants to be set") - cmdx.ExecNoErr(t, cmd.NewRootCmd(nil, nil, nil), + cmdx.ExecNoErr(t, cmd.NewRootCmd(), "janitor", fmt.Sprintf("--%s", cli.OnlyRequests), fmt.Sprintf("--%s=%s", cli.Limit, "1000"), @@ -224,7 +87,7 @@ func TestJanitorHandler_Arguments(t *testing.T) { "memory", ) - _, _, err = cmdx.ExecCtx(context.Background(), cmd.NewRootCmd(nil, nil, nil), nil, + _, _, err = cmdx.ExecCtx(context.Background(), cmd.NewRootCmd(), nil, "janitor", fmt.Sprintf("--%s", cli.OnlyRequests), fmt.Sprintf("--%s=%s", cli.Limit, "0"), @@ -232,7 +95,7 @@ func TestJanitorHandler_Arguments(t *testing.T) { require.Error(t, err) require.Contains(t, err.Error(), "Values for --limit and --batch-size should both be greater than 0") - _, _, err = cmdx.ExecCtx(context.Background(), cmd.NewRootCmd(nil, nil, nil), nil, + _, _, err = cmdx.ExecCtx(context.Background(), cmd.NewRootCmd(), nil, "janitor", fmt.Sprintf("--%s", cli.OnlyRequests), fmt.Sprintf("--%s=%s", cli.Limit, "-100"), @@ -240,7 +103,7 @@ func TestJanitorHandler_Arguments(t *testing.T) { require.Error(t, err) require.Contains(t, err.Error(), "Values for --limit and --batch-size should both be greater than 0") - _, _, err = cmdx.ExecCtx(context.Background(), cmd.NewRootCmd(nil, nil, nil), nil, + _, _, err = cmdx.ExecCtx(context.Background(), cmd.NewRootCmd(), nil, "janitor", fmt.Sprintf("--%s", cli.OnlyRequests), fmt.Sprintf("--%s=%s", cli.BatchSize, "0"), @@ -248,7 +111,7 @@ func TestJanitorHandler_Arguments(t *testing.T) { require.Error(t, err) require.Contains(t, err.Error(), "Values for --limit and --batch-size should both be greater than 0") - _, _, err = cmdx.ExecCtx(context.Background(), cmd.NewRootCmd(nil, nil, nil), nil, + _, _, err = cmdx.ExecCtx(context.Background(), cmd.NewRootCmd(), nil, "janitor", fmt.Sprintf("--%s", cli.OnlyRequests), fmt.Sprintf("--%s=%s", cli.BatchSize, "-100"), @@ -256,7 +119,7 @@ func TestJanitorHandler_Arguments(t *testing.T) { require.Error(t, err) require.Contains(t, err.Error(), "Values for --limit and --batch-size should both be greater than 0") - _, _, err = cmdx.ExecCtx(context.Background(), cmd.NewRootCmd(nil, nil, nil), nil, + _, _, err = cmdx.ExecCtx(context.Background(), cmd.NewRootCmd(), nil, "janitor", fmt.Sprintf("--%s", cli.OnlyRequests), fmt.Sprintf("--%s=%s", cli.Limit, "100"), @@ -267,19 +130,15 @@ func TestJanitorHandler_Arguments(t *testing.T) { } func TestJanitorHandler_PurgeGrantNotAfter(t *testing.T) { - ctx := context.Background() - testCycles := testhelpers.NewConsentJanitorTestHelper("").GetNotAfterTestCycles() - - require.True(t, len(testCycles) > 0) + ctx := t.Context() - for k, v := range testCycles { + for k, v := range testhelpers.NotAfterTestCycles { t.Run(fmt.Sprintf("case=%s", k), func(t *testing.T) { jt := testhelpers.NewConsentJanitorTestHelper(t.Name()) - reg, err := jt.GetRegistry(ctx, k) - require.NoError(t, err) + reg := jt.GetRegistry(t) // setup test - t.Run("step=setup", jt.GrantNotAfterSetup(ctx, reg.ClientManager(), reg.GrantManager())) + t.Run("step=setup", jt.GrantNotAfterSetup(ctx, reg.GrantManager())) // run the cleanup routine t.Run("step=cleanup", func(t *testing.T) { @@ -287,7 +146,7 @@ func TestJanitorHandler_PurgeGrantNotAfter(t *testing.T) { "janitor", fmt.Sprintf("--%s=%s", cli.KeepIfYounger, v.String()), fmt.Sprintf("--%s", cli.OnlyGrants), - jt.GetDSN(ctx), + reg.Config().DSN(), ) }) diff --git a/cmd/cli/handler_jwk.go b/cmd/cli/handler_jwk.go index 0845fe5c1fe..aaecd433bea 100644 --- a/cmd/cli/handler_jwk.go +++ b/cmd/cli/handler_jwk.go @@ -4,7 +4,7 @@ package cli import ( - jose "gopkg.in/square/go-jose.v2" + jose "github.com/go-jose/go-jose/v3" ) func ToSDKFriendlyJSONWebKey(key interface{}, kid, use string) jose.JSONWebKey { diff --git a/cmd/cli/handler_migrate.go b/cmd/cli/handler_migrate.go index 23d3c8ad925..ca1346c46d3 100644 --- a/cmd/cli/handler_migrate.go +++ b/cmd/cli/handler_migrate.go @@ -4,348 +4,77 @@ package cli import ( - "bytes" - "context" "fmt" - "io" - "io/fs" - "os" - "path/filepath" - "regexp" - "strings" - - "github.com/ory/x/servicelocatorx" - - "github.com/pkg/errors" - - "github.com/ory/x/configx" - - "github.com/ory/x/errorsx" - - "github.com/ory/x/cmdx" "github.com/spf13/cobra" - "github.com/ory/hydra/driver" - "github.com/ory/hydra/driver/config" - "github.com/ory/x/flagx" -) - -type MigrateHandler struct{} - -func newMigrateHandler() *MigrateHandler { - return &MigrateHandler{} -} - -const ( - genericDialectKey = "any" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/persistence/sql" + "github.com/ory/x/cmdx" + "github.com/ory/x/configx" + "github.com/ory/x/popx" ) -var fragmentHeader = []byte(strings.TrimLeft(` --- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen -`, "\n")) - -var blankFragment = []byte(strings.TrimLeft(` --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen -`, "\n")) - -var mrx = regexp.MustCompile(`^(\d{14})000000_([^.]+)(\.[a-z0-9]+)?\.(up|down)\.sql$`) - -type migration struct { - Path string - ID string - Name string - Dialect string - Direction string -} - -type migrationGroup struct { - ID string - Name string - Children []*migration - fallbackUpMigration *migration - fallbackDownMigration *migration -} - -func (m *migration) ReadSource(fs fs.FS) ([]byte, error) { - f, err := fs.Open(m.Path) - if err != nil { - return nil, errors.WithStack(err) - } - defer f.Close() - return io.ReadAll(f) -} - -func (m migration) generateMigrationFragments(source []byte) ([][]byte, error) { - chunks := bytes.Split(source, []byte("--split")) - if len(chunks) < 1 { - return nil, errors.New("no migration chunks found") - } - for i := range chunks { - chunks[i] = append(fragmentHeader, chunks[i]...) - } - return chunks, nil -} - -func (mg migrationGroup) fragmentName(m *migration, i int) string { - if m.Dialect == genericDialectKey { - return fmt.Sprintf("%s%06d_%s.%s.sql", mg.ID, i, mg.Name, m.Direction) - } else { - return fmt.Sprintf("%s%06d_%s.%s.%s.sql", mg.ID, i, mg.Name, m.Dialect, m.Direction) - } -} - -// GenerateSQL splits the migration sources into chunks and writes them to the -// target directory. -func (mg migrationGroup) generateSQL(sourceFS fs.FS, target string) error { - ms := mg.Children - if mg.fallbackDownMigration != nil { - ms = append(ms, mg.fallbackDownMigration) - } - if mg.fallbackUpMigration != nil { - ms = append(ms, mg.fallbackUpMigration) - } - dialectFragmentCounts := map[string]int{} - maxFragmentCount := -1 - for _, m := range ms { - source, err := m.ReadSource(sourceFS) - if err != nil { - return errors.WithStack(err) - } - - fragments, err := m.generateMigrationFragments(source) - dialectFragmentCounts[m.Dialect] = len(fragments) - if maxFragmentCount < len(fragments) { - maxFragmentCount = len(fragments) - } - if err != nil { - return errors.Errorf("failed to process %s: %s", m.Path, err.Error()) - } - for i, fragment := range fragments { - dst := filepath.Join(target, mg.fragmentName(m, i)) - if err = os.WriteFile(dst, fragment, 0600); err != nil { - return errors.WithStack(errors.Errorf("failed to write file %s", dst)) - } - } - } - for _, m := range ms { - for i := dialectFragmentCounts[m.Dialect]; i < maxFragmentCount; i += 1 { - dst := filepath.Join(target, mg.fragmentName(m, i)) - if err := os.WriteFile(dst, blankFragment, 0600); err != nil { - return errors.WithStack(errors.Errorf("failed to write file %s", dst)) - } - } - } - return nil +type MigrateHandler struct { + dOpts []driver.OptionsModifier } -func parseMigration(filename string) (*migration, error) { - matches := mrx.FindAllStringSubmatch(filename, -1) - if matches == nil { - return nil, errors.Errorf("failed to parse migration filename %s; %s does not match pattern ", filename, mrx.String()) - } - if len(matches) != 1 && len(matches[0]) != 5 { - return nil, errors.Errorf("invalid migration %s; expected %s", filename, mrx.String()) +func newMigrateHandler(dOpts []driver.OptionsModifier) *MigrateHandler { + return &MigrateHandler{ + dOpts: dOpts, } - dialect := matches[0][3] - if dialect == "" { - dialect = genericDialectKey - } else { - dialect = dialect[1:] - } - return &migration{ - Path: filename, - ID: matches[0][1], - Name: matches[0][2], - Dialect: dialect, - Direction: matches[0][4], - }, nil } -func readMigrations(migrationSourceFS fs.FS, expectedDialects []string) (map[string]*migrationGroup, error) { - mgs := make(map[string]*migrationGroup) - err := fs.WalkDir(migrationSourceFS, ".", func(p string, d fs.DirEntry, err2 error) error { - if err2 != nil { - fmt.Println("Warning: unexpected error " + err2.Error()) - return nil - } - if d.IsDir() { - return nil - } - if p != filepath.Base(p) { - fmt.Println("Warning: ignoring nested file " + p) - return nil - } - - m, err := parseMigration(p) - if err != nil { - return err - } - - if _, ok := mgs[m.ID]; !ok { - mgs[m.ID] = &migrationGroup{ - ID: m.ID, - Name: m.Name, - Children: nil, - } - } - - if m.Dialect == genericDialectKey && m.Direction == "up" { - mgs[m.ID].fallbackUpMigration = m - } else if m.Dialect == genericDialectKey && m.Direction == "down" { - mgs[m.ID].fallbackDownMigration = m - } else { - mgs[m.ID].Children = append(mgs[m.ID].Children, m) - } - - return nil - }) +func (h *MigrateHandler) makeMigrationManager(cmd *cobra.Command, args []string) (*sql.MigrationManager, error) { + opts := append([]driver.OptionsModifier{ + driver.WithConfigOptions( + configx.SkipValidation(), + configx.WithFlags(cmd.Flags())), + driver.DisableValidation(), + driver.DisablePreloading(), + driver.SkipNetworkInit(), + }, h.dOpts...) + if len(args) > 0 { + opts = append(opts, driver.WithConfigOptions( + configx.WithValue(config.KeyDSN, args[0]), + )) + } + + d, err := driver.New( + cmd.Context(), + opts...) if err != nil { return nil, err } - - if len(expectedDialects) == 0 { - return mgs, nil - } - - eds := make(map[string]struct{}) - for i := range expectedDialects { - eds[expectedDialects[i]] = struct{}{} - } - for _, mg := range mgs { - expect := make(map[string]struct{}) - for _, m := range mg.Children { - if _, ok := eds[m.Dialect]; !ok { - return nil, errors.Errorf("unexpected dialect %s in filename %s", m.Dialect, m.Path) - } - - expect[m.Dialect+"."+m.Direction] = struct{}{} - } - for _, d := range expectedDialects { - if _, ok := expect[d+".up"]; !ok && mg.fallbackUpMigration == nil { - return nil, errors.Errorf("dialect %s not found for up migration %s; use --dialects=\"\" to disable dialect validation", d, mg.ID) - } - if _, ok := expect[d+".down"]; !ok && mg.fallbackDownMigration == nil { - return nil, errors.Errorf("dialect %s not found for down migration %s; use --dialects=\"\" to disable dialect validation", d, mg.ID) - } - } + if len(d.Config().DSN()) == 0 { + _, _ = fmt.Fprintln(cmd.ErrOrStderr(), "No DSN provided. Please provide a DSN as the first argument or set the DSN environment variable.") + return nil, cmdx.FailSilently(cmd) } - return mgs, nil + return d.Migrator(), nil } -func (h *MigrateHandler) MigrateGen(cmd *cobra.Command, args []string) { - cmdx.ExactArgs(cmd, args, 2) - expectedDialects := flagx.MustGetStringSlice(cmd, "dialects") - - sourceDir := args[0] - targetDir := args[1] - sourceFS := os.DirFS(sourceDir) - mgs, err := readMigrations(sourceFS, expectedDialects) +func (h *MigrateHandler) MigrateSQLUp(cmd *cobra.Command, args []string) (err error) { + p, err := h.makeMigrationManager(cmd, args) if err != nil { - fmt.Println(err.Error()) - os.Exit(1) - } - for _, mg := range mgs { - err = mg.generateSQL(sourceFS, targetDir) - if err != nil { - fmt.Println(err.Error()) - os.Exit(1) - } + return err } - - os.Exit(0) + return popx.MigrateSQLUp(cmd, p) } -func (h *MigrateHandler) MigrateSQL(cmd *cobra.Command, args []string) (err error) { - var d driver.Registry - - if flagx.MustGetBool(cmd, "read-from-env") { - d, err = driver.New( - cmd.Context(), - servicelocatorx.NewOptions(), - []driver.OptionsModifier{ - driver.WithOptions( - configx.SkipValidation(), - configx.WithFlags(cmd.Flags())), - driver.DisableValidation(), - driver.DisablePreloading(), - driver.SkipNetworkInit(), - }) - if err != nil { - return err - } - if len(d.Config().DSN()) == 0 { - _, _ = fmt.Fprintln(cmd.ErrOrStderr(), "When using flag -e, environment variable DSN must be set.") - return cmdx.FailSilently(cmd) - } - } else { - if len(args) != 1 { - _, _ = fmt.Fprintln(cmd.ErrOrStderr(), "Please provide the database URL.") - return cmdx.FailSilently(cmd) - } - d, err = driver.New( - cmd.Context(), - servicelocatorx.NewOptions(), - []driver.OptionsModifier{ - driver.WithOptions( - configx.WithFlags(cmd.Flags()), - configx.SkipValidation(), - configx.WithValue(config.KeyDSN, args[0]), - ), - driver.DisableValidation(), - driver.DisablePreloading(), - driver.SkipNetworkInit(), - }) - if err != nil { - return err - } - } - - p := d.Persister() - conn := p.Connection(context.Background()) - if conn == nil { - _, _ = fmt.Fprintln(cmd.ErrOrStderr(), "Migrations can only be executed against a SQL-compatible driver but DSN is not a SQL source.") - return cmdx.FailSilently(cmd) - } - - if err := conn.Open(); err != nil { - _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not open the database connection:\n%+v\n", err) - return cmdx.FailSilently(cmd) - } - - // convert migration tables - if err := p.PrepareMigration(context.Background()); err != nil { - _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not convert the migration table:\n%+v\n", err) - return cmdx.FailSilently(cmd) - } - - // print migration status - _, _ = fmt.Fprintln(cmd.OutOrStdout(), "The following migration is planned:") - - status, err := p.MigrationStatus(context.Background()) +func (h *MigrateHandler) MigrateSQLDown(cmd *cobra.Command, args []string) (err error) { + p, err := h.makeMigrationManager(cmd, args) if err != nil { - fmt.Fprintf(cmd.ErrOrStderr(), "Could not get the migration status:\n%+v\n", errorsx.WithStack(err)) - return cmdx.FailSilently(cmd) - } - _ = status.Write(os.Stdout) - - if !flagx.MustGetBool(cmd, "yes") { - _, _ = fmt.Fprintln(cmd.ErrOrStderr(), "To skip the next question use flag --yes (at your own risk).") - if !cmdx.AskForConfirmation("Do you wish to execute this migration plan?", nil, nil) { - _, _ = fmt.Fprintln(cmd.OutOrStdout(), "Migration aborted.") - return nil - } + return err } + return popx.MigrateSQLDown(cmd, p) +} - // apply migrations - if err := p.MigrateUp(context.Background()); err != nil { - _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not apply migrations:\n%+v\n", errorsx.WithStack(err)) - return cmdx.FailSilently(cmd) +func (h *MigrateHandler) MigrateStatus(cmd *cobra.Command, args []string) error { + p, err := h.makeMigrationManager(cmd, args) + if err != nil { + return err } - - _, _ = fmt.Fprintln(cmd.OutOrStdout(), "Successfully applied migrations!") - return nil + return popx.MigrateStatus(cmd, p) } diff --git a/cmd/clidoc/main.go b/cmd/clidoc/main.go index 4987677e34d..4939dec34b7 100644 --- a/cmd/clidoc/main.go +++ b/cmd/clidoc/main.go @@ -9,11 +9,11 @@ import ( "github.com/ory/x/clidoc" - "github.com/ory/hydra/cmd" + "github.com/ory/hydra/v2/cmd" ) func main() { - if err := clidoc.Generate(cmd.NewRootCmd(nil, nil, nil), os.Args[1:]); err != nil { + if err := clidoc.Generate(cmd.NewRootCmd(), os.Args[1:]); err != nil { _, _ = fmt.Fprintf(os.Stderr, "%+v", err) os.Exit(1) } diff --git a/cmd/cmd_create_client.go b/cmd/cmd_create_client.go index 84a1a61c6c0..0e9683843cc 100644 --- a/cmd/cmd_create_client.go +++ b/cmd/cmd_create_client.go @@ -8,18 +8,22 @@ import ( "github.com/spf13/cobra" - "github.com/ory/hydra/cmd/cliclient" + "github.com/ory/hydra/v2/cmd/cliclient" "github.com/ory/x/cmdx" "github.com/ory/x/flagx" "github.com/ory/x/pointerx" - "github.com/ory/hydra/cmd/cli" + "github.com/ory/hydra/v2/cmd/cli" ) const ( + flagFile = "file" + + flagClientAccessTokenStrategy = "access-token-strategy" flagClientAllowedCORSOrigin = "allowed-cors-origin" flagClientAudience = "audience" flagClientBackchannelLogoutCallback = "backchannel-logout-callback" + flagClientId = "id" flagClientName = "name" flagClientClientURI = "client-uri" flagClientContact = "contact" @@ -38,6 +42,8 @@ const ( flagClientResponseType = "response-type" flagClientScope = "scope" flagClientSectorIdentifierURI = "sector-identifier-uri" + flagClientSkipConsent = "skip-consent" + flagClientLogoutSkipConsent = "skip-logout-consent" flagClientSubjectType = "subject-type" flagClientTokenEndpointAuthMethod = "token-endpoint-auth-method" flagClientSecret = "secret" @@ -51,9 +57,9 @@ func NewCreateClientsCommand() *cobra.Command { Short: "Create an OAuth 2.0 Client", Aliases: []string{"client"}, Args: cobra.NoArgs, - Example: `{{ .CommandPath }} -n "my app" -c http://localhost/cb -g authorization_code -r code -a core,foobar + Example: `{{ .CommandPath }} --name "my app" --redirect-uri http://localhost/cb --grant-type authorization_code --response-type code --scope core,foobar -Use the tool jq (or any other JSON tool) to get the OAuth2 Client ID and and Secret: +Use the tool jq (or any other JSON tool) to get the OAuth2 Client ID and Secret: client=$({{ .CommandPath }} \ --format json \ @@ -68,7 +74,7 @@ the Authorize Code, Implicit, Refresh flow. This command allows settings all fie To encrypt an auto-generated OAuth2 Client Secret, use flags ` + "`--pgp-key`" + `, ` + "`--pgp-key-url`" + ` or ` + "`--keybase`" + ` flag, for example: - {{ .CommandPath }} -n "my app" -g client_credentials -r token -a core,foobar --keybase keybase_username + {{ .CommandPath }} --name "my app" --grant-type client_credentials --response-type token --scope core,foobar --keybase keybase_username `, RunE: func(cmd *cobra.Command, args []string) error { m, _, err := cliclient.NewClient(cmd) @@ -83,14 +89,20 @@ To encrypt an auto-generated OAuth2 Client Secret, use flags ` + "`--pgp-key`" + } secret := flagx.MustGetString(cmd, flagClientSecret) + cl, err := clientFromFlags(cmd) + if err != nil { + return err + } + cl.ClientId = pointerx.Ptr(flagx.MustGetString(cmd, flagClientId)) + //nolint:bodyclose - client, _, err := m.OAuth2Api.CreateOAuth2Client(cmd.Context()).OAuth2Client(clientFromFlags(cmd)).Execute() + client, _, err := m.OAuth2API.CreateOAuth2Client(cmd.Context()).OAuth2Client(cl).Execute() if err != nil { return cmdx.PrintOpenAPIError(cmd, err) } if client.ClientSecret == nil && len(secret) > 0 { - client.ClientSecret = pointerx.String(secret) + client.ClientSecret = pointerx.Ptr(secret) } if encryptSecret && client.ClientSecret != nil { @@ -100,7 +112,7 @@ To encrypt an auto-generated OAuth2 Client Secret, use flags ` + "`--pgp-key`" + return cmdx.FailSilently(cmd) } - client.ClientSecret = pointerx.String(enc.Base64Encode()) + client.ClientSecret = pointerx.Ptr(enc.Base64Encode()) } cmdx.PrintRow(cmd, (*outputOAuth2Client)(client)) @@ -108,5 +120,6 @@ To encrypt an auto-generated OAuth2 Client Secret, use flags ` + "`--pgp-key`" + }, } registerClientFlags(cmd.Flags()) + cmd.Flags().String(flagClientId, "", "Provide the client's id.") return cmd } diff --git a/cmd/cmd_create_client_test.go b/cmd/cmd_create_client_test.go index 8e21f4f221d..9511b52b7ff 100644 --- a/cmd/cmd_create_client_test.go +++ b/cmd/cmd_create_client_test.go @@ -4,7 +4,6 @@ package cmd_test import ( - "context" "encoding/json" "testing" @@ -12,13 +11,14 @@ import ( "github.com/stretchr/testify/require" "github.com/tidwall/gjson" - "github.com/ory/hydra/cmd" + "github.com/ory/hydra/v2/cmd" "github.com/ory/x/cmdx" "github.com/ory/x/snapshotx" ) func TestCreateClient(t *testing.T) { - ctx := context.Background() + t.Parallel() + c := cmd.NewCreateClientsCommand() reg := setup(t, c) @@ -27,7 +27,7 @@ func TestCreateClient(t *testing.T) { assert.NotEmpty(t, actual.Get("client_id").String()) assert.NotEmpty(t, actual.Get("client_secret").String()) - expected, err := reg.ClientManager().GetClient(ctx, actual.Get("client_id").String()) + expected, err := reg.ClientManager().GetClient(t.Context(), actual.Get("client_id").String()) require.NoError(t, err) assert.Equal(t, expected.GetID(), actual.Get("client_id").String()) diff --git a/cmd/cmd_create_jwks.go b/cmd/cmd_create_jwks.go index ab30a1d53bc..6358cf05e7d 100644 --- a/cmd/cmd_create_jwks.go +++ b/cmd/cmd_create_jwks.go @@ -6,10 +6,12 @@ package cmd import ( "context" + "github.com/ory/hydra/v2/jwk" + "github.com/spf13/cobra" hydra "github.com/ory/hydra-client-go/v2" - "github.com/ory/hydra/cmd/cliclient" + "github.com/ory/hydra/v2/cmd/cliclient" "github.com/ory/x/cmdx" "github.com/ory/x/flagx" ) @@ -37,7 +39,7 @@ func NewCreateJWKSCmd() *cobra.Command { } //nolint:bodyclose - jwks, _, err := m.JwkApi.CreateJsonWebKeySet(context.Background(), args[0]).CreateJsonWebKeySet(hydra.CreateJsonWebKeySet{ + jwks, _, err := m.JwkAPI.CreateJsonWebKeySet(context.Background(), args[0]).CreateJsonWebKeySet(hydra.CreateJsonWebKeySet{ Alg: flagx.MustGetString(cmd, alg), Kid: kid, Use: flagx.MustGetString(cmd, use), @@ -46,12 +48,20 @@ func NewCreateJWKSCmd() *cobra.Command { return cmdx.PrintOpenAPIError(cmd, err) } + if flagx.MustGetBool(cmd, "public") { + jwks.Keys, err = jwk.OnlyPublicSDKKeys(jwks.Keys) + if err != nil { + return err + } + } + cmdx.PrintTable(cmd, &outputJSONWebKeyCollection{Keys: jwks.Keys, Set: args[0]}) return nil }, } - cmd.Root().Name() + cmd.Flags().String(alg, "RS256", "The algorithm to be used to generated they key. Supports: RS256, RS512, ES256, ES512, EdDSA") cmd.Flags().String(use, "sig", "The intended use of this key. Supports: sig, enc") + cmd.Flags().Bool("public", false, "Only return public keys") return cmd } diff --git a/cmd/cmd_create_jwks_test.go b/cmd/cmd_create_jwks_test.go index d9e07d51d24..086c34ac9e2 100644 --- a/cmd/cmd_create_jwks_test.go +++ b/cmd/cmd_create_jwks_test.go @@ -4,7 +4,6 @@ package cmd_test import ( - "context" "testing" "github.com/gofrs/uuid" @@ -13,12 +12,13 @@ import ( "github.com/stretchr/testify/require" "github.com/tidwall/gjson" - "github.com/ory/hydra/cmd" + "github.com/ory/hydra/v2/cmd" "github.com/ory/x/cmdx" ) func TestCreateJWKS(t *testing.T) { - ctx := context.Background() + t.Parallel() + c := cmd.NewCreateJWKSCmd() reg := setup(t, c) @@ -29,8 +29,16 @@ func TestCreateJWKS(t *testing.T) { assert.NotEmpty(t, actual.Get("keys.0.kid").Array(), "%s", actual.Raw) assert.Equal(t, "ES256", actual.Get("keys.0.alg").String(), "%s", actual.Raw) - expected, err := reg.KeyManager().GetKeySet(ctx, set) + expected, err := reg.KeyManager().GetKeySet(t.Context(), set) require.NoError(t, err) assert.Equal(t, expected.Keys[0].KeyID, actual.Get("keys.0.kid").String()) }) + + t.Run("case=gets jwks public", func(t *testing.T) { + set := uuid.Must(uuid.NewV4()).String() + actual := gjson.Parse(cmdx.ExecNoErr(t, c, set, "--use", "enc", "--alg", "RS256", "--public")) + + assert.NotEmptyf(t, actual.Get("keys.0.kid").String(), "Expected kid to be set but got: %s", actual.Raw) + assert.Empty(t, actual.Get("keys.0.p").String(), "public key should not contain private key components: %s", actual.Raw) + }) } diff --git a/cmd/cmd_delete_client.go b/cmd/cmd_delete_client.go index ad129c74eb6..3eae17bda76 100644 --- a/cmd/cmd_delete_client.go +++ b/cmd/cmd_delete_client.go @@ -6,7 +6,7 @@ package cmd import ( "github.com/spf13/cobra" - "github.com/ory/hydra/cmd/cliclient" + "github.com/ory/hydra/v2/cmd/cliclient" "github.com/ory/x/cmdx" ) @@ -34,7 +34,7 @@ To delete OAuth 2.0 Clients with the owner of "foo@bar.com", run: ) for _, c := range args { - _, err := m.OAuth2Api.DeleteOAuth2Client(cmd.Context(), c).Execute() //nolint:bodyclose + _, err := m.OAuth2API.DeleteOAuth2Client(cmd.Context(), c).Execute() //nolint:bodyclose if err != nil { failed[c] = cmdx.PrintOpenAPIError(cmd, err) continue diff --git a/cmd/cmd_delete_client_test.go b/cmd/cmd_delete_client_test.go index 4acc8f850a5..4b00dab337e 100644 --- a/cmd/cmd_delete_client_test.go +++ b/cmd/cmd_delete_client_test.go @@ -4,7 +4,6 @@ package cmd_test import ( - "context" "encoding/json" "fmt" "strings" @@ -13,7 +12,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/ory/hydra/cmd" + "github.com/ory/hydra/v2/cmd" "github.com/ory/x/assertx" "github.com/ory/x/cmdx" "github.com/ory/x/snapshotx" @@ -21,7 +20,8 @@ import ( ) func TestDeleteClient(t *testing.T) { - ctx := context.Background() + t.Parallel() + c := cmd.NewDeleteClientCmd() reg := setup(t, c) @@ -30,7 +30,7 @@ func TestDeleteClient(t *testing.T) { stdout := cmdx.ExecNoErr(t, c, expected.GetID()) assert.Equal(t, fmt.Sprintf(`"%s"`, expected.GetID()), strings.TrimSpace(stdout)) - _, err := reg.ClientManager().GetClient(ctx, expected.GetID()) + _, err := reg.ClientManager().GetClient(t.Context(), expected.GetID()) assert.ErrorIs(t, err, sqlcon.ErrNoRows) }) @@ -39,10 +39,10 @@ func TestDeleteClient(t *testing.T) { expected2 := createClient(t, reg, nil) assertx.EqualAsJSON(t, []string{expected1.GetID(), expected2.GetID()}, json.RawMessage(cmdx.ExecNoErr(t, c, expected1.GetID(), expected2.GetID()))) - _, err := reg.ClientManager().GetClient(ctx, expected1.GetID()) + _, err := reg.ClientManager().GetClient(t.Context(), expected1.GetID()) assert.ErrorIs(t, err, sqlcon.ErrNoRows) - _, err = reg.ClientManager().GetClient(ctx, expected2.GetID()) + _, err = reg.ClientManager().GetClient(t.Context(), expected2.GetID()) assert.ErrorIs(t, err, sqlcon.ErrNoRows) }) @@ -53,7 +53,7 @@ func TestDeleteClient(t *testing.T) { assert.Equal(t, fmt.Sprintf(`"%s"`, expected.GetID()), strings.TrimSpace(stdout)) snapshotx.SnapshotT(t, stderr) - _, err = reg.ClientManager().GetClient(ctx, expected.GetID()) + _, err = reg.ClientManager().GetClient(t.Context(), expected.GetID()) assert.ErrorIs(t, err, sqlcon.ErrNoRows) }) } diff --git a/cmd/cmd_delete_jwks.go b/cmd/cmd_delete_jwks.go index d9165422103..22bba778481 100644 --- a/cmd/cmd_delete_jwks.go +++ b/cmd/cmd_delete_jwks.go @@ -8,7 +8,7 @@ import ( "github.com/spf13/cobra" - "github.com/ory/hydra/cmd/cliclient" + "github.com/ory/hydra/v2/cmd/cliclient" "github.com/ory/x/cmdx" ) @@ -32,7 +32,7 @@ func NewDeleteJWKSCommand() *cobra.Command { ) for _, c := range args { - _, err = m.JwkApi.DeleteJsonWebKeySet(context.Background(), c).Execute() //nolint:bodyclose + _, err = m.JwkAPI.DeleteJsonWebKeySet(context.Background(), c).Execute() //nolint:bodyclose if err != nil { return cmdx.PrintOpenAPIError(cmd, err) } diff --git a/cmd/cmd_delete_jwks_test.go b/cmd/cmd_delete_jwks_test.go index 91fe3feee4a..7562cd11c03 100644 --- a/cmd/cmd_delete_jwks_test.go +++ b/cmd/cmd_delete_jwks_test.go @@ -4,7 +4,6 @@ package cmd_test import ( - "context" "encoding/json" "fmt" "strings" @@ -12,17 +11,18 @@ import ( "github.com/gofrs/uuid" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/x" "github.com/stretchr/testify/assert" - "github.com/ory/hydra/cmd" + "github.com/ory/hydra/v2/cmd" "github.com/ory/x/assertx" "github.com/ory/x/cmdx" ) func TestDeleteJwks(t *testing.T) { - ctx := context.Background() + t.Parallel() + c := cmd.NewDeleteJWKSCommand() reg := setup(t, c) @@ -32,7 +32,7 @@ func TestDeleteJwks(t *testing.T) { stdout := cmdx.ExecNoErr(t, c, set) assert.Equal(t, fmt.Sprintf(`"%s"`, set), strings.TrimSpace(stdout)) - _, err := reg.KeyManager().GetKeySet(ctx, set) + _, err := reg.KeyManager().GetKeySet(t.Context(), set) assert.ErrorIs(t, err, x.ErrNotFound) }) @@ -43,10 +43,10 @@ func TestDeleteJwks(t *testing.T) { _ = createJWK(t, reg, set2, "RS256") assertx.EqualAsJSON(t, []string{set1, set2}, json.RawMessage(cmdx.ExecNoErr(t, c, set1, set2))) - _, err := reg.KeyManager().GetKeySet(ctx, set1) + _, err := reg.KeyManager().GetKeySet(t.Context(), set1) assert.ErrorIs(t, err, x.ErrNotFound) - _, err = reg.KeyManager().GetKeySet(ctx, set2) + _, err = reg.KeyManager().GetKeySet(t.Context(), set2) assert.ErrorIs(t, err, x.ErrNotFound) }) } diff --git a/cmd/cmd_delete_tokens.go b/cmd/cmd_delete_tokens.go index 59c9d917fc2..f8add90c7ce 100644 --- a/cmd/cmd_delete_tokens.go +++ b/cmd/cmd_delete_tokens.go @@ -6,7 +6,7 @@ package cmd import ( "github.com/spf13/cobra" - "github.com/ory/hydra/cmd/cliclient" + "github.com/ory/hydra/v2/cmd/cliclient" "github.com/ory/x/cmdx" ) @@ -23,7 +23,7 @@ func NewDeleteAccessTokensCmd() *cobra.Command { } clientID := args[0] - _, err = client.OAuth2Api.DeleteOAuth2Token(cmd.Context()).ClientId(clientID).Execute() //nolint:bodyclose + _, err = client.OAuth2API.DeleteOAuth2Token(cmd.Context()).ClientId(clientID).Execute() //nolint:bodyclose if err != nil { return cmdx.PrintOpenAPIError(cmd, err) } diff --git a/cmd/cmd_delete_tokens_test.go b/cmd/cmd_delete_tokens_test.go index f965aa7cbf5..3cf2f4237c2 100644 --- a/cmd/cmd_delete_tokens_test.go +++ b/cmd/cmd_delete_tokens_test.go @@ -10,11 +10,13 @@ import ( "github.com/stretchr/testify/assert" - "github.com/ory/hydra/cmd" + "github.com/ory/hydra/v2/cmd" "github.com/ory/x/cmdx" ) func TestDeleteAccessTokensCmd(t *testing.T) { + t.Parallel() + c := cmd.NewDeleteAccessTokensCmd() reg := setup(t, c) diff --git a/cmd/cmd_get_client.go b/cmd/cmd_get_client.go index c5588b165d1..0a7841a327e 100644 --- a/cmd/cmd_get_client.go +++ b/cmd/cmd_get_client.go @@ -7,7 +7,7 @@ import ( "github.com/spf13/cobra" hydra "github.com/ory/hydra-client-go/v2" - "github.com/ory/hydra/cmd/cliclient" + "github.com/ory/hydra/v2/cmd/cliclient" "github.com/ory/x/cmdx" ) @@ -18,9 +18,9 @@ func NewGetClientsCmd() *cobra.Command { Args: cobra.MinimumNArgs(1), Short: "Get one or more OAuth 2.0 Clients by their ID(s)", Long: `This command gets all the details about an OAuth 2.0 Client. You can use this command in combination with jq.`, - Example: `To get the OAuth 2.0 Client's secret, run: + Example: `To get the OAuth 2.0 Client's name, run: - {{ .CommandPath }} --json | jq -r '.client_secret'`, + {{ .CommandPath }} --format json | jq -r '.client_name'`, RunE: func(cmd *cobra.Command, args []string) error { m, _, err := cliclient.NewClient(cmd) if err != nil { @@ -29,7 +29,7 @@ func NewGetClientsCmd() *cobra.Command { clients := make([]hydra.OAuth2Client, 0, len(args)) for _, id := range args { - client, _, err := m.OAuth2Api.GetOAuth2Client(cmd.Context(), id).Execute() //nolint:bodyclose + client, _, err := m.OAuth2API.GetOAuth2Client(cmd.Context(), id).Execute() //nolint:bodyclose if err != nil { return cmdx.PrintOpenAPIError(cmd, err) } diff --git a/cmd/cmd_get_client_test.go b/cmd/cmd_get_client_test.go index 05b0686cfc0..e3c4bdf19af 100644 --- a/cmd/cmd_get_client_test.go +++ b/cmd/cmd_get_client_test.go @@ -4,7 +4,6 @@ package cmd_test import ( - "context" "encoding/json" "testing" @@ -12,23 +11,24 @@ import ( "github.com/stretchr/testify/require" "github.com/tidwall/gjson" - "github.com/ory/hydra/cmd" + "github.com/ory/hydra/v2/cmd" "github.com/ory/x/cmdx" "github.com/ory/x/snapshotx" ) func TestGetClient(t *testing.T) { - ctx := context.Background() + t.Parallel() + c := cmd.NewGetClientsCmd() reg := setup(t, c) expected := createClient(t, reg, nil) t.Run("case=gets client", func(t *testing.T) { - actual := gjson.Parse(cmdx.ExecNoErr(t, c, expected.ID.String())) + actual := gjson.Parse(cmdx.ExecNoErr(t, c, expected.GetID())) assert.NotEmpty(t, actual.Get("client_id").String()) assert.Empty(t, actual.Get("client_secret").String()) - expected, err := reg.ClientManager().GetClient(ctx, actual.Get("client_id").String()) + expected, err := reg.ClientManager().GetClient(t.Context(), actual.Get("client_id").String()) require.NoError(t, err) assert.Equal(t, expected.GetID(), actual.Get("client_id").String()) @@ -36,7 +36,7 @@ func TestGetClient(t *testing.T) { }) t.Run("case=gets multiple clients", func(t *testing.T) { - actual := gjson.Parse(cmdx.ExecNoErr(t, c, expected.ID.String(), expected.ID.String())) + actual := gjson.Parse(cmdx.ExecNoErr(t, c, expected.GetID(), expected.ID)) snapshotx.SnapshotT(t, json.RawMessage(actual.Raw), snapshotExcludedClientFields...) }) } diff --git a/cmd/cmd_get_jwks.go b/cmd/cmd_get_jwks.go index 327fc8420c9..f9d1a754885 100644 --- a/cmd/cmd_get_jwks.go +++ b/cmd/cmd_get_jwks.go @@ -6,20 +6,28 @@ package cmd import ( "github.com/spf13/cobra" - "github.com/ory/hydra/cmd/cliclient" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/x/flagx" + + "github.com/ory/hydra/v2/cmd/cliclient" "github.com/ory/x/cmdx" ) func NewGetJWKSCmd() *cobra.Command { - return &cobra.Command{ + cmd := &cobra.Command{ Use: "jwk set-1 [set-2] ...", Aliases: []string{"jwks"}, Args: cobra.MinimumNArgs(1), Short: "Get one or more JSON Web Key Set by its ID(s)", Long: `This command gets all the details about an JSON Web Key. You can use this command in combination with jq.`, - Example: `To get the JSON Web Key Set's secret, run: + Example: `To get the JSON Web Key Set's use, run: + + {{ .CommandPath }} | jq -r '.[].use' - {{ .CommandPath }} | jq -r '.[].use'`, +To get the JSON Web Key Set as only public keys: + + {{ .CommandPath }} --public ' +`, RunE: func(cmd *cobra.Command, args []string) error { m, _, err := cliclient.NewClient(cmd) if err != nil { @@ -28,7 +36,7 @@ func NewGetJWKSCmd() *cobra.Command { var sets outputJSONWebKeyCollection for _, set := range args { - key, _, err := m.JwkApi.GetJsonWebKeySet(cmd.Context(), set).Execute() //nolint:bodyclose + key, _, err := m.JwkAPI.GetJsonWebKeySet(cmd.Context(), set).Execute() //nolint:bodyclose if err != nil { return cmdx.PrintOpenAPIError(cmd, err) } @@ -36,6 +44,13 @@ func NewGetJWKSCmd() *cobra.Command { sets.Keys = append(sets.Keys, key.Keys...) } + if flagx.MustGetBool(cmd, "public") { + sets.Keys, err = jwk.OnlyPublicSDKKeys(sets.Keys) + if err != nil { + return err + } + } + if len(sets.Keys) == 1 { cmdx.PrintRow(cmd, outputJsonWebKey{Set: args[0], JsonWebKey: sets.Keys[0]}) } else if len(sets.Keys) > 1 { @@ -45,4 +60,6 @@ func NewGetJWKSCmd() *cobra.Command { return nil }, } + cmd.Flags().Bool("public", false, "Only return public keys") + return cmd } diff --git a/cmd/cmd_get_jwks_test.go b/cmd/cmd_get_jwks_test.go index 8b37e61ecd9..0d7cb4cc6eb 100644 --- a/cmd/cmd_get_jwks_test.go +++ b/cmd/cmd_get_jwks_test.go @@ -4,7 +4,6 @@ package cmd_test import ( - "context" "testing" "github.com/gofrs/uuid" @@ -13,12 +12,13 @@ import ( "github.com/stretchr/testify/require" "github.com/tidwall/gjson" - "github.com/ory/hydra/cmd" + "github.com/ory/hydra/v2/cmd" "github.com/ory/x/cmdx" ) -func TestGetJwks(t *testing.T) { - ctx := context.Background() +func TestGetJWKS(t *testing.T) { + t.Parallel() + c := cmd.NewGetJWKSCmd() reg := setup(t, c) @@ -29,9 +29,21 @@ func TestGetJwks(t *testing.T) { actual := gjson.Parse(cmdx.ExecNoErr(t, c, set)) assert.NotEmpty(t, actual.Get("kid").String(), actual.Raw) - expected, err := reg.KeyManager().GetKeySet(ctx, set) + expected, err := reg.KeyManager().GetKeySet(t.Context(), set) + require.NoError(t, err) + + assert.Equal(t, expected.Keys[0].KeyID, actual.Get("kid").String()) + }) + + t.Run("case=gets jwks public", func(t *testing.T) { + actual := gjson.Parse(cmdx.ExecNoErr(t, c, set, "--public")) + + expected, err := reg.KeyManager().GetKeySet(t.Context(), set) require.NoError(t, err) assert.Equal(t, expected.Keys[0].KeyID, actual.Get("kid").String()) + + assert.NotEmptyf(t, actual.Get("kid").String(), "Expected kid to be set but got: %s", actual.Raw) + assert.Empty(t, actual.Get("p").String(), "public key should not contain private key components: %s", actual.Raw) }) } diff --git a/cmd/cmd_helper_client.go b/cmd/cmd_helper_client.go index 0248c621d94..2ca93e4a171 100644 --- a/cmd/cmd_helper_client.go +++ b/cmd/cmd_helper_client.go @@ -5,46 +5,68 @@ package cmd import ( "encoding/json" + "fmt" + "os" "strings" "github.com/spf13/cobra" "github.com/spf13/pflag" hydra "github.com/ory/hydra-client-go/v2" - "github.com/ory/hydra/cmd/cli" + "github.com/ory/hydra/v2/cmd/cli" "github.com/ory/x/flagx" "github.com/ory/x/pointerx" ) -func clientFromFlags(cmd *cobra.Command) hydra.OAuth2Client { +func clientFromFlags(cmd *cobra.Command) (hydra.OAuth2Client, error) { + if filename := flagx.MustGetString(cmd, flagFile); filename != "" { + src := cmd.InOrStdin() + if filename != "-" { + f, err := os.Open(filename) // #nosec G304 + if err != nil { + return hydra.OAuth2Client{}, fmt.Errorf("unable to open file %q: %w", filename, err) + } + defer f.Close() //nolint:errcheck + src = f + } + client := hydra.OAuth2Client{} + if err := json.NewDecoder(src).Decode(&client); err != nil { + return hydra.OAuth2Client{}, fmt.Errorf("unable to decode JSON: %w", err) + } + return client, nil + } + return hydra.OAuth2Client{ + AccessTokenStrategy: pointerx.Ptr(flagx.MustGetString(cmd, flagClientAccessTokenStrategy)), AllowedCorsOrigins: flagx.MustGetStringSlice(cmd, flagClientAllowedCORSOrigin), Audience: flagx.MustGetStringSlice(cmd, flagClientAudience), - BackchannelLogoutSessionRequired: pointerx.Bool(flagx.MustGetBool(cmd, flagClientBackChannelLogoutSessionRequired)), - BackchannelLogoutUri: pointerx.String(flagx.MustGetString(cmd, flagClientBackchannelLogoutCallback)), - ClientName: pointerx.String(flagx.MustGetString(cmd, flagClientName)), - ClientSecret: pointerx.String(flagx.MustGetString(cmd, flagClientSecret)), - ClientUri: pointerx.String(flagx.MustGetString(cmd, flagClientClientURI)), + BackchannelLogoutSessionRequired: pointerx.Ptr(flagx.MustGetBool(cmd, flagClientBackChannelLogoutSessionRequired)), + BackchannelLogoutUri: pointerx.Ptr(flagx.MustGetString(cmd, flagClientBackchannelLogoutCallback)), + ClientName: pointerx.Ptr(flagx.MustGetString(cmd, flagClientName)), + ClientSecret: pointerx.Ptr(flagx.MustGetString(cmd, flagClientSecret)), + ClientUri: pointerx.Ptr(flagx.MustGetString(cmd, flagClientClientURI)), Contacts: flagx.MustGetStringSlice(cmd, flagClientContact), - FrontchannelLogoutSessionRequired: pointerx.Bool(flagx.MustGetBool(cmd, flagClientFrontChannelLogoutSessionRequired)), - FrontchannelLogoutUri: pointerx.String(flagx.MustGetString(cmd, flagClientFrontChannelLogoutCallback)), + FrontchannelLogoutSessionRequired: pointerx.Ptr(flagx.MustGetBool(cmd, flagClientFrontChannelLogoutSessionRequired)), + FrontchannelLogoutUri: pointerx.Ptr(flagx.MustGetString(cmd, flagClientFrontChannelLogoutCallback)), GrantTypes: flagx.MustGetStringSlice(cmd, flagClientGrantType), - JwksUri: pointerx.String(flagx.MustGetString(cmd, flagClientJWKSURI)), - LogoUri: pointerx.String(flagx.MustGetString(cmd, flagClientLogoURI)), + JwksUri: pointerx.Ptr(flagx.MustGetString(cmd, flagClientJWKSURI)), + LogoUri: pointerx.Ptr(flagx.MustGetString(cmd, flagClientLogoURI)), Metadata: json.RawMessage(flagx.MustGetString(cmd, flagClientMetadata)), - Owner: pointerx.String(flagx.MustGetString(cmd, flagClientOwner)), - PolicyUri: pointerx.String(flagx.MustGetString(cmd, flagClientPolicyURI)), + Owner: pointerx.Ptr(flagx.MustGetString(cmd, flagClientOwner)), + PolicyUri: pointerx.Ptr(flagx.MustGetString(cmd, flagClientPolicyURI)), PostLogoutRedirectUris: flagx.MustGetStringSlice(cmd, flagClientPostLogoutCallback), RedirectUris: flagx.MustGetStringSlice(cmd, flagClientRedirectURI), - RequestObjectSigningAlg: pointerx.String(flagx.MustGetString(cmd, flagClientRequestObjectSigningAlg)), + RequestObjectSigningAlg: pointerx.Ptr(flagx.MustGetString(cmd, flagClientRequestObjectSigningAlg)), RequestUris: flagx.MustGetStringSlice(cmd, flagClientRequestURI), ResponseTypes: flagx.MustGetStringSlice(cmd, flagClientResponseType), - Scope: pointerx.String(strings.Join(flagx.MustGetStringSlice(cmd, flagClientScope), " ")), - SectorIdentifierUri: pointerx.String(flagx.MustGetString(cmd, flagClientSectorIdentifierURI)), - SubjectType: pointerx.String(flagx.MustGetString(cmd, flagClientSubjectType)), - TokenEndpointAuthMethod: pointerx.String(flagx.MustGetString(cmd, flagClientTokenEndpointAuthMethod)), - TosUri: pointerx.String(flagx.MustGetString(cmd, flagClientTOSURI)), - } + Scope: pointerx.Ptr(strings.Join(flagx.MustGetStringSlice(cmd, flagClientScope), " ")), + SkipConsent: pointerx.Ptr(flagx.MustGetBool(cmd, flagClientSkipConsent)), + SkipLogoutConsent: pointerx.Ptr(flagx.MustGetBool(cmd, flagClientLogoutSkipConsent)), + SectorIdentifierUri: pointerx.Ptr(flagx.MustGetString(cmd, flagClientSectorIdentifierURI)), + SubjectType: pointerx.Ptr(flagx.MustGetString(cmd, flagClientSubjectType)), + TokenEndpointAuthMethod: pointerx.Ptr(flagx.MustGetString(cmd, flagClientTokenEndpointAuthMethod)), + TosUri: pointerx.Ptr(flagx.MustGetString(cmd, flagClientTOSURI)), + }, nil } func registerEncryptFlags(flags *pflag.FlagSet) { @@ -55,6 +77,8 @@ func registerEncryptFlags(flags *pflag.FlagSet) { } func registerClientFlags(flags *pflag.FlagSet) { + flags.String(flagFile, "", "Read a JSON file representing a client from this location. If set, the other client flags are ignored.") + flags.String(flagClientMetadata, "{}", "Metadata is an arbitrary JSON String of your choosing.") flags.String(flagClientOwner, "", "The owner of this client, typically email addresses or a user ID.") flags.StringSlice(flagClientContact, nil, "A list representing ways to contact people responsible for this client, typically email addresses.") @@ -77,6 +101,9 @@ func registerClientFlags(flags *pflag.FlagSet) { flags.String(flagClientSecret, "", "Provide the client's secret.") flags.String(flagClientName, "", "The client's name.") flags.StringSlice(flagClientPostLogoutCallback, []string{}, "List of allowed URLs to be redirected to after a logout.") + flags.Bool(flagClientSkipConsent, false, "Boolean flag specifying whether to skip the consent screen for this client. If omitted, the default value is false.") + flags.Bool(flagClientLogoutSkipConsent, false, "Boolean flag specifying whether to skip the logout consent screen for this client. If omitted, the default value is false.") + flags.String(flagClientAccessTokenStrategy, "", "The strategy used to generate access tokens. Valid options are `opaque` and `jwt`.") // back-channel logout options flags.Bool(flagClientBackChannelLogoutSessionRequired, false, "Boolean flag specifying whether the client requires that a sid (session ID) Claim be included in the Logout Token to identify the client session with the OP when the backchannel-logout-callback is used. If omitted, the default value is false.") diff --git a/cmd/cmd_helper_test.go b/cmd/cmd_helper_test.go index bec0041e023..4b030b78cb8 100644 --- a/cmd/cmd_helper_test.go +++ b/cmd/cmd_helper_test.go @@ -10,19 +10,15 @@ import ( "os" "testing" + "github.com/go-jose/go-jose/v3" "github.com/gofrs/uuid" - - "gopkg.in/square/go-jose.v2" - "github.com/spf13/cobra" "github.com/stretchr/testify/require" - "github.com/ory/hydra/client" - "github.com/ory/hydra/driver" - "github.com/ory/hydra/internal" - "github.com/ory/hydra/internal/testhelpers" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/internal/testhelpers" "github.com/ory/x/cmdx" - "github.com/ory/x/contextx" "github.com/ory/x/snapshotx" ) @@ -36,11 +32,11 @@ func base64EncodedPGPPublicKey(t *testing.T) string { return base64.StdEncoding.EncodeToString(gpgPublicKey) } -func setupRoutes(t *testing.T, cmd *cobra.Command) (*httptest.Server, *httptest.Server, driver.Registry) { +func setupRoutes(t *testing.T, cmd *cobra.Command) (*httptest.Server, *httptest.Server, *driver.RegistrySQL) { ctx, cancel := context.WithCancel(context.Background()) t.Cleanup(cancel) - reg := internal.NewMockedRegistry(t, &contextx.Default{}) + reg := testhelpers.NewRegistryMemory(t) public, admin := testhelpers.NewOAuth2Server(ctx, t, reg) cmdx.RegisterHTTPClientFlags(cmd.Flags()) @@ -49,14 +45,14 @@ func setupRoutes(t *testing.T, cmd *cobra.Command) (*httptest.Server, *httptest. return public, admin, reg } -func setup(t *testing.T, cmd *cobra.Command) driver.Registry { +func setup(t *testing.T, cmd *cobra.Command) *driver.RegistrySQL { _, admin, reg := setupRoutes(t, cmd) require.NoError(t, cmd.Flags().Set(cmdx.FlagEndpoint, admin.URL)) require.NoError(t, cmd.Flags().Set(cmdx.FlagFormat, string(cmdx.FormatJSON))) return reg } -var snapshotExcludedClientFields = []snapshotx.ExceptOpt{ +var snapshotExcludedClientFields = []snapshotx.Opt{ snapshotx.ExceptNestedKeys("client_id"), snapshotx.ExceptNestedKeys("registration_access_token"), snapshotx.ExceptNestedKeys("registration_client_uri"), @@ -65,7 +61,7 @@ var snapshotExcludedClientFields = []snapshotx.ExceptOpt{ snapshotx.ExceptNestedKeys("updated_at"), } -func createClientCredentialsClient(t *testing.T, reg driver.Registry) *client.Client { +func createClientCredentialsClient(t *testing.T, reg *driver.RegistrySQL) *client.Client { return createClient(t, reg, &client.Client{ GrantTypes: []string{"client_credentials"}, TokenEndpointAuthMethod: "client_secret_basic", @@ -73,18 +69,18 @@ func createClientCredentialsClient(t *testing.T, reg driver.Registry) *client.Cl }) } -func createClient(t *testing.T, reg driver.Registry, c *client.Client) *client.Client { +func createClient(t *testing.T, reg *driver.RegistrySQL, c *client.Client) *client.Client { if c == nil { c = &client.Client{TokenEndpointAuthMethod: "client_secret_post", Secret: uuid.Must(uuid.NewV4()).String()} } secret := c.Secret - require.NoError(t, reg.ClientManager().CreateClient(context.Background(), c)) + require.NoError(t, reg.ClientManager().CreateClient(t.Context(), c)) c.Secret = secret return c } -func createJWK(t *testing.T, reg driver.Registry, set string, alg string) jose.JSONWebKey { - c, err := reg.KeyManager().GenerateAndPersistKeySet(context.Background(), set, "", alg, "sig") +func createJWK(t *testing.T, reg *driver.RegistrySQL, set string, alg string) jose.JSONWebKey { + c, err := reg.KeyManager().GenerateAndPersistKeySet(t.Context(), set, "", alg, "sig") require.NoError(t, err) return c.Keys[0] } diff --git a/cmd/cmd_import_client.go b/cmd/cmd_import_client.go index 5b3279d42e0..f6bd63f0623 100644 --- a/cmd/cmd_import_client.go +++ b/cmd/cmd_import_client.go @@ -14,8 +14,8 @@ import ( "github.com/spf13/cobra" hydra "github.com/ory/hydra-client-go/v2" - "github.com/ory/hydra/cmd/cli" - "github.com/ory/hydra/cmd/cliclient" + "github.com/ory/hydra/v2/cmd/cli" + "github.com/ory/hydra/v2/cmd/cliclient" "github.com/ory/x/cmdx" "github.com/ory/x/pointerx" ) @@ -47,7 +47,7 @@ Alternatively: To encrypt an auto-generated OAuth2 Client Secret, use flags ` + "`--pgp-key`" + `, ` + "`--pgp-key-url`" + ` or ` + "`--keybase`" + ` flag, for example: - {{ .CommandPath }} -n "my app" -g client_credentials -r token -a core,foobar --keybase keybase_username + {{ .CommandPath }} --name "my app" --grant-type client_credentials --response-type token --scope core,foobar --keybase keybase_username `, Long: `This command reads in each listed JSON file and imports their contents as a list of OAuth 2.0 Clients. @@ -75,7 +75,7 @@ Please be aware that this command does not update existing clients. If the clien streams := map[string]io.Reader{"STDIN": cmd.InOrStdin()} for _, path := range args { - contents, err := os.ReadFile(path) + contents, err := os.ReadFile(path) // #nosec G304 if err != nil { _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not open file %s: %s", path, err) return cmdx.FailSilently(cmd) @@ -101,7 +101,7 @@ Please be aware that this command does not update existing clients. If the clien for src, cc := range clients { for _, c := range cc { - result, _, err := m.OAuth2Api.CreateOAuth2Client(cmd.Context()).OAuth2Client(c).Execute() //nolint:bodyclose + result, _, err := m.OAuth2API.CreateOAuth2Client(cmd.Context()).OAuth2Client(c).Execute() //nolint:bodyclose if err != nil { failed[src] = cmdx.PrintOpenAPIError(cmd, err) continue diff --git a/cmd/cmd_import_client_test.go b/cmd/cmd_import_client_test.go index 6728c7d4c12..eac23b7c356 100644 --- a/cmd/cmd_import_client_test.go +++ b/cmd/cmd_import_client_test.go @@ -5,9 +5,9 @@ package cmd_test import ( "bytes" - "context" "encoding/json" "os" + "path/filepath" "testing" "github.com/stretchr/testify/assert" @@ -15,7 +15,7 @@ import ( "github.com/tidwall/gjson" hydra "github.com/ory/hydra-client-go/v2" - "github.com/ory/hydra/cmd" + "github.com/ory/hydra/v2/cmd" "github.com/ory/x/cmdx" "github.com/ory/x/pointerx" "github.com/ory/x/snapshotx" @@ -23,23 +23,22 @@ import ( func writeTempFile(t *testing.T, contents interface{}) string { t.Helper() - ij, err := json.Marshal(contents) - require.NoError(t, err) - f, err := os.CreateTemp(t.TempDir(), "") - require.NoError(t, err) - _, err = f.Write(ij) + fn := filepath.Join(t.TempDir(), "content.json") + f, err := os.Create(fn) require.NoError(t, err) + require.NoError(t, json.NewEncoder(f).Encode(contents)) require.NoError(t, f.Close()) - return f.Name() + return fn } func TestImportClient(t *testing.T) { - ctx := context.Background() + t.Context() + c := cmd.NewImportClientCmd() reg := setup(t, c) - file1 := writeTempFile(t, []hydra.OAuth2Client{{Scope: pointerx.String("foo")}, {Scope: pointerx.String("bar"), ClientSecret: pointerx.String("some-secret")}}) - file2 := writeTempFile(t, []hydra.OAuth2Client{{Scope: pointerx.String("baz")}, {Scope: pointerx.String("zab"), ClientSecret: pointerx.String("some-secret")}}) + file1 := writeTempFile(t, []hydra.OAuth2Client{{Scope: pointerx.Ptr("foo")}, {Scope: pointerx.Ptr("bar"), ClientSecret: pointerx.Ptr("some-secret")}}) + file2 := writeTempFile(t, []hydra.OAuth2Client{{Scope: pointerx.Ptr("baz")}, {Scope: pointerx.Ptr("zab"), ClientSecret: pointerx.Ptr("some-secret")}}) t.Run("case=imports clients from single file", func(t *testing.T) { actual := gjson.Parse(cmdx.ExecNoErr(t, c, file1)) @@ -48,10 +47,10 @@ func TestImportClient(t *testing.T) { assert.NotEmpty(t, actual.Get("0.client_secret").String()) assert.Equal(t, "some-secret", actual.Get("1.client_secret").String()) - _, err := reg.ClientManager().GetClient(ctx, actual.Get("0.client_id").String()) + _, err := reg.ClientManager().GetClient(t.Context(), actual.Get("0.client_id").String()) require.NoError(t, err) - _, err = reg.ClientManager().GetClient(ctx, actual.Get("1.client_id").String()) + _, err = reg.ClientManager().GetClient(t.Context(), actual.Get("1.client_id").String()) require.NoError(t, err) snapshotx.SnapshotT(t, json.RawMessage(actual.Raw), snapshotExcludedClientFields...) @@ -77,7 +76,7 @@ func TestImportClient(t *testing.T) { t.Run("case=imports clients from multiple files and stdin", func(t *testing.T) { var stdin bytes.Buffer - require.NoError(t, json.NewEncoder(&stdin).Encode([]hydra.OAuth2Client{{Scope: pointerx.String("oof")}, {Scope: pointerx.String("rab"), ClientSecret: pointerx.String("some-secret")}})) + require.NoError(t, json.NewEncoder(&stdin).Encode([]hydra.OAuth2Client{{Scope: pointerx.Ptr("oof")}, {Scope: pointerx.Ptr("rab"), ClientSecret: pointerx.Ptr("some-secret")}})) stdout, _, err := cmdx.Exec(t, c, &stdin, file1, file2) require.NoError(t, err) @@ -93,7 +92,7 @@ func TestImportClient(t *testing.T) { }) t.Run("case=performs appropriate error reporting", func(t *testing.T) { - file3 := writeTempFile(t, []hydra.OAuth2Client{{ClientSecret: pointerx.String("short")}}) + file3 := writeTempFile(t, []hydra.OAuth2Client{{ClientSecret: pointerx.Ptr("short")}}) stdout, stderr, err := cmdx.Exec(t, c, nil, file1, file3) require.Error(t, err) actual := gjson.Parse(stdout) diff --git a/cmd/cmd_import_jwk.go b/cmd/cmd_import_jwk.go index e73dda2ed7c..1654892d1d5 100644 --- a/cmd/cmd_import_jwk.go +++ b/cmd/cmd_import_jwk.go @@ -14,8 +14,8 @@ import ( "github.com/spf13/cobra" hydra "github.com/ory/hydra-client-go/v2" - "github.com/ory/hydra/cmd/cli" - "github.com/ory/hydra/cmd/cliclient" + "github.com/ory/hydra/v2/cmd/cli" + "github.com/ory/hydra/v2/cmd/cliclient" "github.com/ory/x/cmdx" "github.com/ory/x/flagx" "github.com/ory/x/josex" @@ -45,7 +45,7 @@ the imported keys will be added to that set. Otherwise, a new set will be create streams["STDIN"] = cmd.InOrStdin() } else { for _, path := range args[1:] { - contents, err := os.ReadFile(path) + contents, err := os.ReadFile(path) // #nosec G304 if err != nil { _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not open file %s: %s", path, err) return cmdx.FailSilently(cmd) @@ -73,15 +73,18 @@ the imported keys will be added to that set. Otherwise, a new set will be create key = cli.ToSDKFriendlyJSONWebKey(key, "", "") - var buf bytes.Buffer - var jsonWebKey hydra.JsonWebKey + type jwk hydra.JsonWebKey // opt out of OpenAPI-generated UnmarshalJSON + var ( + buf bytes.Buffer + jsonWebKey jwk + ) if err := json.NewEncoder(&buf).Encode(key); err != nil { _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not encode key from `%s` to JSON: %s", src, err) return cmdx.FailSilently(cmd) } if err := json.NewDecoder(&buf).Decode(&jsonWebKey); err != nil { - _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not decode key from `%s` to JSON: %s", src, err) + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not decode key from `%s` from JSON: %s", src, err) return cmdx.FailSilently(cmd) } @@ -107,14 +110,14 @@ the imported keys will be added to that set. Otherwise, a new set will be create return cmdx.FailSilently(cmd) } - keys[src] = append(keys[src], jsonWebKey) + keys[src] = append(keys[src], hydra.JsonWebKey(jsonWebKey)) } imported := make([]hydra.JsonWebKey, 0, len(keys)) failed := make(map[string]error) for src, kk := range keys { for _, k := range kk { - result, _, err := m.JwkApi.SetJsonWebKey(cmd.Context(), set, k.Kid).JsonWebKey(k).Execute() //nolint:bodyclose + result, _, err := m.JwkAPI.SetJsonWebKey(cmd.Context(), set, k.Kid).JsonWebKey(k).Execute() //nolint:bodyclose if err != nil { failed[src] = cmdx.PrintOpenAPIError(cmd, err) continue diff --git a/cmd/cmd_import_jwk_test.go b/cmd/cmd_import_jwk_test.go index c7584489dd8..ef39480f2ef 100644 --- a/cmd/cmd_import_jwk_test.go +++ b/cmd/cmd_import_jwk_test.go @@ -5,28 +5,27 @@ package cmd_test import ( "bytes" + "cmp" + _ "embed" "encoding/json" "testing" "github.com/gofrs/uuid" - "github.com/stretchr/testify/require" - - "github.com/ory/x/snapshotx" - "github.com/ory/x/stringsx" - "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/tidwall/gjson" - _ "embed" - - "github.com/ory/hydra/cmd" + "github.com/ory/hydra/v2/cmd" "github.com/ory/x/cmdx" + "github.com/ory/x/snapshotx" ) //go:embed stub/jwk.json var stubJsonWebKeySet []byte func TestImportJWKS(t *testing.T) { + t.Parallel() + c := cmd.NewKeysImportCmd() _ = setup(t, c) @@ -53,8 +52,8 @@ func TestImportJWKS(t *testing.T) { actual := gjson.Parse(cmdx.ExecNoErr(t, c, args...)) assert.Len(t, actual.Get("keys.0").Array(), 1, "%s", actual.Raw) assert.NotEmpty(t, actual.Get("keys.0.kid").String(), "%s", actual.Raw) - assert.NotEmpty(t, stringsx.Coalesce(actual.Get("keys.0.x").String(), actual.Get("keys.0.n").String()), "%s", actual.Raw) - assert.Equal(t, stringsx.Coalesce(tc[0], "RS256"), actual.Get("keys.0.alg").String(), "%s", actual.Raw) + assert.NotEmpty(t, cmp.Or(actual.Get("keys.0.x").String(), actual.Get("keys.0.n").String()), "%s", actual.Raw) + assert.Equal(t, cmp.Or(tc[0], "RS256"), actual.Get("keys.0.alg").String(), "%s", actual.Raw) }) } diff --git a/cmd/cmd_introspect_token.go b/cmd/cmd_introspect_token.go index c8024af960e..151424d33d9 100644 --- a/cmd/cmd_introspect_token.go +++ b/cmd/cmd_introspect_token.go @@ -6,14 +6,14 @@ package cmd import ( "strings" - "github.com/ory/hydra/cmd/cliclient" - "github.com/ory/x/cmdx" - "github.com/ory/x/flagx" - "github.com/spf13/cobra" + + "github.com/ory/hydra/v2/cmd/cliclient" + "github.com/ory/x/cmdx" ) func NewIntrospectTokenCmd() *cobra.Command { + var scope []string cmd := &cobra.Command{ Use: "token the-token", Args: cobra.ExactArgs(1), @@ -25,9 +25,9 @@ func NewIntrospectTokenCmd() *cobra.Command { return err } - result, _, err := client.OAuth2Api.IntrospectOAuth2Token(cmd.Context()). + result, _, err := client.OAuth2API.IntrospectOAuth2Token(cmd.Context()). Token(args[0]). - Scope(strings.Join(flagx.MustGetStringSlice(cmd, "scope"), " ")).Execute() //nolint:bodyclose + Scope(strings.Join(scope, " ")).Execute() //nolint:bodyclose if err != nil { return cmdx.PrintOpenAPIError(cmd, err) } @@ -36,6 +36,6 @@ func NewIntrospectTokenCmd() *cobra.Command { return nil }, } - cmd.Flags().StringSlice("scope", []string{}, "Additionally check if the scope was granted.") + cmd.Flags().StringSliceVar(&scope, "scope", []string{}, "Additionally check if the scope was granted.") return cmd } diff --git a/cmd/cmd_introspect_token_test.go b/cmd/cmd_introspect_token_test.go index 77f32275caa..53b198be2f8 100644 --- a/cmd/cmd_introspect_token_test.go +++ b/cmd/cmd_introspect_token_test.go @@ -13,11 +13,13 @@ import ( "github.com/stretchr/testify/require" "github.com/tidwall/gjson" - "github.com/ory/hydra/cmd" + "github.com/ory/hydra/v2/cmd" "github.com/ory/x/cmdx" ) func TestIntrospectToken(t *testing.T) { + t.Parallel() + c := cmd.NewIntrospectTokenCmd() public, admin, reg := setupRoutes(t, c) require.NoError(t, c.Flags().Set(cmdx.FlagEndpoint, admin.URL)) diff --git a/cmd/cmd_list_clients.go b/cmd/cmd_list_clients.go index f848a5c1dfd..66b11915a34 100644 --- a/cmd/cmd_list_clients.go +++ b/cmd/cmd_list_clients.go @@ -8,7 +8,7 @@ import ( "github.com/spf13/cobra" - "github.com/ory/hydra/cmd/cliclient" + "github.com/ory/hydra/v2/cmd/cliclient" "github.com/ory/x/cmdx" ) @@ -31,15 +31,15 @@ func NewListClientsCmd() *cobra.Command { return err } - list, resp, err := m.OAuth2Api.ListOAuth2Clients(cmd.Context()).PageSize(int64(pageSize)).PageToken(pageToken).Execute() + // nolint:bodyclose + list, resp, err := m.OAuth2API.ListOAuth2Clients(cmd.Context()).PageSize(int64(pageSize)).PageToken(pageToken).Execute() if err != nil { return cmdx.PrintOpenAPIError(cmd, err) } + defer resp.Body.Close() //nolint:errcheck var collection outputOAuth2ClientCollection - for k := range list { - collection.clients = append(collection.clients, list[k]) - } + collection.clients = append(collection.clients, list...) interfaceList := make([]interface{}, len(list)) for k := range collection.clients { diff --git a/cmd/cmd_list_clients_test.go b/cmd/cmd_list_clients_test.go index 87f875d7a3f..a981025b7c7 100644 --- a/cmd/cmd_list_clients_test.go +++ b/cmd/cmd_list_clients_test.go @@ -8,16 +8,18 @@ import ( "github.com/stretchr/testify/require" - "github.com/ory/hydra/client" + "github.com/ory/hydra/v2/client" "github.com/stretchr/testify/assert" "github.com/tidwall/gjson" - "github.com/ory/hydra/cmd" + "github.com/ory/hydra/v2/cmd" "github.com/ory/x/cmdx" ) func TestListClient(t *testing.T) { + t.Parallel() + c := cmd.NewListClientsCmd() reg := setup(t, c) diff --git a/cmd/cmd_perform_authorization_code.go b/cmd/cmd_perform_authorization_code.go index f077efda32c..dfc17b91cf9 100644 --- a/cmd/cmd_perform_authorization_code.go +++ b/cmd/cmd_perform_authorization_code.go @@ -4,39 +4,105 @@ package cmd import ( + "bytes" "context" "crypto/rand" "crypto/rsa" "crypto/tls" + "encoding/json" "fmt" "html/template" + "io" "net/http" + "net/url" "os" "strconv" "strings" "time" - "github.com/ory/hydra/cmd/cliclient" - "github.com/pkg/errors" - - "github.com/ory/graceful" - - "github.com/julienschmidt/httprouter" "github.com/spf13/cobra" "github.com/toqueteos/webbrowser" "golang.org/x/oauth2" + "github.com/ory/graceful" + openapi "github.com/ory/hydra-client-go/v2" + "github.com/ory/hydra/v2/cmd/cliclient" "github.com/ory/x/cmdx" "github.com/ory/x/flagx" + "github.com/ory/x/pointerx" "github.com/ory/x/randx" "github.com/ory/x/tlsx" "github.com/ory/x/urlx" ) +var tokenUserLogin = template.Must(template.New("").Parse(` + +

Login step

+
+ + + Remember login
+ Revoke previous consents
+ + +
+{{ if .Skip }} + user authenticated, could skip login UI. +{{ else }} + User unknown. +{{ end }} +
+

Complete login request

+
{{ .Raw }}
+ +`)) + +var tokenUserConsent = template.Must(template.New("").Parse(` + +

Consent step

+
+ + {{ if not .Audiences }} + No token audiences requested. + {{ else }} +

Requested audiences:

+
    + {{ range .Audiences }} +
  • {{ . }}
  • + {{ end }} +
+ {{ end }} + {{ if not .Scopes }} + No scopes requested. + {{ else }} +

Requested scopes:

+ {{ range .Scopes }} + {{ . }}
+ {{ end }} + {{ end }} +
+ Remember consent
+ + +
+{{ if .Skip }} + Consent established, could skip consent UI. +{{ else }} + No previous matching consent found, or client has requested re-consent. +{{ end }} +
+

Previous consents for this login session ({{ .SessionID }})

+
{{ .PreviousConsents }}
+
+

Complete consent request

+
{{ .Raw }}
+ +`)) + var tokenUserWelcome = template.Must(template.New("").Parse(` -

Welcome to the exemplary OAuth 2.0 Consumer!

+

Welcome to the example OAuth 2.0 Consumer!

This is an example app which emulates an OAuth 2.0 consumer application. Usually, this would be your web or mobile application and would use an OAuth 2.0 or OpenID Connect library.

@@ -63,7 +129,7 @@ var tokenUserResult = template.Must(template.New("").Parse(`
  • Access Token: {{ .AccessToken }}
  • Refresh Token: {{ .RefreshToken }}
  • -
  • Expires in: {{ .Expiry }}
  • +
  • Expires at: {{ .Expiry }}
  • ID Token: {{ .IDToken }}
{{ if .DisplayBackButton }} @@ -76,8 +142,8 @@ func NewPerformAuthorizationCodeCmd() *cobra.Command { cmd := &cobra.Command{ Use: "authorization-code", Example: "{{ .CommandPath }} --client-id ... --client-secret ...", - Short: "An exemplary OAuth 2.0 Client performing the OAuth 2.0 Authorize Code Flow", - Long: `Starts an exemplary web server that acts as an OAuth 2.0 Client performing the Authorize Code Flow. + Short: "Example OAuth 2.0 Client performing the OAuth 2.0 Authorize Code Flow", + Long: `Starts an example web server that acts as an OAuth 2.0 Client performing the Authorize Code Flow. This command will help you to see if Ory Hydra has been configured properly. This command must not be used for anything else than manual testing or demo purposes. The server will terminate on error @@ -90,15 +156,18 @@ and success, unless if the --no-shutdown flag is provided.`, endpoint = cliclient.GetOAuth2URLOverride(cmd, endpoint) - ctx := context.WithValue(cmd.Context(), oauth2.HTTPClient, client) isSSL := flagx.MustGetBool(cmd, "https") port := flagx.MustGetInt(cmd, "port") scopes := flagx.MustGetStringSlice(cmd, "scope") prompt := flagx.MustGetStringSlice(cmd, "prompt") maxAge := flagx.MustGetInt(cmd, "max-age") redirectUrl := flagx.MustGetString(cmd, "redirect") + authUrl := flagx.MustGetString(cmd, "auth-url") + tokenUrl := flagx.MustGetString(cmd, "token-url") audience := flagx.MustGetStringSlice(cmd, "audience") noShutdown := flagx.MustGetBool(cmd, "no-shutdown") + skip := flagx.MustGetBool(cmd, "skip") + responseMode := flagx.MustGetString(cmd, "response-mode") clientID := flagx.MustGetString(cmd, "client-id") if clientID == "" { @@ -118,53 +187,61 @@ and success, unless if the --no-shutdown flag is provided.`, redirectUrl = serverLocation + "callback" } - if err != nil { - return err + if authUrl == "" { + authUrl = urlx.AppendPaths(endpoint, "/oauth2/auth").String() + } + + if tokenUrl == "" { + tokenUrl = urlx.AppendPaths(endpoint, "/oauth2/token").String() } + conf := oauth2.Config{ ClientID: clientID, ClientSecret: clientSecret, Endpoint: oauth2.Endpoint{ - TokenURL: urlx.AppendPaths(endpoint, "/oauth2/token").String(), - AuthURL: urlx.AppendPaths(endpoint, "/oauth2/auth").String(), + AuthURL: authUrl, + TokenURL: tokenUrl, }, RedirectURL: redirectUrl, Scopes: scopes, } - var generateAuthCodeURL = func() (string, []rune) { - state, err := randx.RuneSequence(24, randx.AlphaLower) - cmdx.Must(err, "Could not generate random state: %s", err) + var generateAuthCodeURL = func() (string, string) { + state := flagx.MustGetString(cmd, "state") + if len(state) == 0 { + generatedState, err := randx.RuneSequence(24, randx.AlphaLower) + cmdx.Must(err, "Could not generate random state: %s", err) + state = string(generatedState) + } nonce, err := randx.RuneSequence(24, randx.AlphaLower) cmdx.Must(err, "Could not generate random state: %s", err) - authCodeURL := conf.AuthCodeURL( - string(state), - oauth2.SetAuthURLParam("audience", strings.Join(audience, "+")), - oauth2.SetAuthURLParam("nonce", string(nonce)), - oauth2.SetAuthURLParam("prompt", strings.Join(prompt, "+")), - oauth2.SetAuthURLParam("max_age", strconv.Itoa(maxAge)), - ) + opts := []oauth2.AuthCodeOption{oauth2.SetAuthURLParam("nonce", string(nonce))} + if len(audience) > 0 { + opts = append(opts, oauth2.SetAuthURLParam("audience", strings.Join(audience, " "))) + } + if len(prompt) > 0 { + opts = append(opts, oauth2.SetAuthURLParam("prompt", strings.Join(prompt, " "))) + } + if maxAge >= 0 { + opts = append(opts, oauth2.SetAuthURLParam("max_age", strconv.Itoa(maxAge))) + } + if responseMode != "" { + opts = append(opts, oauth2.SetAuthURLParam("response_mode", responseMode)) + } + + authCodeURL := conf.AuthCodeURL(state, opts...) return authCodeURL, state } authCodeURL, state := generateAuthCodeURL() - if !flagx.MustGetBool(cmd, "no-open") { - _ = webbrowser.Open(serverLocation) // ignore errors - } - - _, _ = fmt.Fprintln(os.Stderr, "Setting up home route on "+serverLocation) - _, _ = fmt.Fprintln(os.Stderr, "Setting up callback listener on "+serverLocation+"callback") - _, _ = fmt.Fprintln(os.Stderr, "Press ctrl + c on Linux / Windows or cmd + c on OSX to end the process.") - _, _ = fmt.Fprintf(os.Stderr, "If your browser does not open automatically, navigate to:\n\n\t%s\n\n", serverLocation) - - r := httprouter.New() + r := http.NewServeMux() var tlsc *tls.Config if isSSL { key, err := rsa.GenerateKey(rand.Reader, 2048) if err != nil { - _, _ = fmt.Fprintf(os.Stderr, "Unable to generate RSA key pair: %s", err) + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Unable to generate RSA key pair: %s", err) return cmdx.FailSilently(cmd) } @@ -179,91 +256,54 @@ and success, unless if the --no-shutdown flag is provided.`, Handler: r, TLSConfig: tlsc, ReadHeaderTimeout: time.Second * 5, }) - var shutdown = func() { + shutdown := func() { time.Sleep(time.Second * 1) ctx, cancel := context.WithTimeout(context.Background(), time.Second*5) defer cancel() _ = server.Shutdown(ctx) } - var onDone = func() { - if !noShutdown { - go shutdown() - } else { - // regenerate because we don't want to shutdown and we don't want to reuse nonce & state - authCodeURL, state = generateAuthCodeURL() - } - } - r.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { + r.Handle("GET /", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { _ = tokenUserWelcome.Execute(w, &struct{ URL string }{URL: authCodeURL}) - }) - - type ed struct { - Name string - Description string - Hint string - Debug string + })) + + r.Handle("GET /perform-flow", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + http.Redirect(w, r, authCodeURL, http.StatusFound) + })) + + rt := router{ + cl: client, + skip: skip, + cmd: cmd, + state: &state, + conf: &conf, + onDone: func() { + if !noShutdown { + go shutdown() + } else { + // regenerate because we don't want to shutdown and we don't want to reuse nonce & state + authCodeURL, state = generateAuthCodeURL() + } + }, + serverLocation: serverLocation, + noShutdown: noShutdown, } - r.GET("/callback", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { - if len(r.URL.Query().Get("error")) > 0 { - _, _ = fmt.Fprintf(os.Stderr, "Got error: %s\n", r.URL.Query().Get("error_description")) - - w.WriteHeader(http.StatusInternalServerError) - _ = tokenUserError.Execute(w, &ed{ - Name: r.URL.Query().Get("error"), - Description: r.URL.Query().Get("error_description"), - Hint: r.URL.Query().Get("error_hint"), - Debug: r.URL.Query().Get("error_debug"), - }) + r.Handle("GET /login", http.HandlerFunc(rt.loginGET)) + r.Handle("POST /login", http.HandlerFunc(rt.loginPOST)) + r.Handle("GET /consent", http.HandlerFunc(rt.consentGET)) + r.Handle("POST /consent", http.HandlerFunc(rt.consentPOST)) + r.Handle("GET /callback", http.HandlerFunc(rt.callback)) + r.Handle("POST /callback", http.HandlerFunc(rt.callbackPOSTForm)) - onDone() - return - } - - if r.URL.Query().Get("state") != string(state) { - _, _ = fmt.Fprintf(os.Stderr, "States do not match. Expected %s, got %s\n", string(state), r.URL.Query().Get("state")) - - w.WriteHeader(http.StatusInternalServerError) - _ = tokenUserError.Execute(w, &ed{ - Name: "States do not match", - Description: "Expected state " + string(state) + " but got " + r.URL.Query().Get("state"), - }) - onDone() - return - } - - code := r.URL.Query().Get("code") - token, err := conf.Exchange(ctx, code) - if err != nil { - _, _ = fmt.Fprintf(os.Stderr, "Unable to exchange code for token: %s\n", err) - - w.WriteHeader(http.StatusInternalServerError) - _ = tokenUserError.Execute(w, &ed{ - Name: err.Error(), - }) - onDone() - return - } + if !flagx.MustGetBool(cmd, "no-open") { + _ = webbrowser.Open(serverLocation) // ignore errors + } - cmdx.PrintRow(cmd, outputOAuth2Token(*token)) - _ = tokenUserResult.Execute(w, struct { - AccessToken string - RefreshToken string - Expiry string - IDToken string - BackURL string - DisplayBackButton bool - }{ - AccessToken: token.AccessToken, - RefreshToken: token.RefreshToken, - Expiry: token.Expiry.Format(time.RFC1123), - IDToken: fmt.Sprintf("%s", token.Extra("id_token")), - BackURL: serverLocation, - DisplayBackButton: noShutdown, - }) - onDone() - }) + _, _ = fmt.Fprintln(rt.cmd.ErrOrStderr(), "Setting up home route on "+serverLocation) + _, _ = fmt.Fprintln(rt.cmd.ErrOrStderr(), "Setting up callback listener on "+serverLocation+"callback") + _, _ = fmt.Fprintln(rt.cmd.ErrOrStderr(), "Press ctrl + c on Linux / Windows or cmd + c on OSX to end the process.") + _, _ = fmt.Fprintf(rt.cmd.ErrOrStderr(), "If your browser does not open automatically, navigate to:\n\n\t%s\n\n", serverLocation) if isSSL { err = server.ListenAndServeTLS("", "") @@ -285,17 +325,335 @@ and success, unless if the --no-shutdown flag is provided.`, cmd.Flags().IntP("port", "p", 4446, "The port on which the server should run") cmd.Flags().StringSlice("scope", []string{"offline", "openid"}, "Request OAuth2 scope") cmd.Flags().StringSlice("prompt", []string{}, "Set the OpenID Connect prompt parameter") - cmd.Flags().Int("max-age", 0, "Set the OpenID Connect max_age parameter") + cmd.Flags().Int("max-age", -1, "Set the OpenID Connect max_age parameter. -1 means no max_age parameter will be used.") cmd.Flags().Bool("no-shutdown", false, "Do not terminate on success/error. State and nonce will be regenerated when auth flow has completed (either due to an error or success).") cmd.Flags().String("client-id", os.Getenv("OAUTH2_CLIENT_ID"), "Use the provided OAuth 2.0 Client ID, defaults to environment variable OAUTH2_CLIENT_ID") cmd.Flags().String("client-secret", os.Getenv("OAUTH2_CLIENT_SECRET"), "Use the provided OAuth 2.0 Client Secret, defaults to environment variable OAUTH2_CLIENT_SECRET") + cmd.Flags().String("state", "", "Force a state value (insecure)") cmd.Flags().String("redirect", "", "Force a redirect url") cmd.Flags().StringSlice("audience", []string{}, "Request a specific OAuth 2.0 Access Token Audience") cmd.Flags().String("auth-url", "", "Usually it is enough to specify the `endpoint` flag, but if you want to force the authorization url, use this flag") cmd.Flags().String("token-url", "", "Usually it is enough to specify the `endpoint` flag, but if you want to force the token url, use this flag") cmd.Flags().Bool("https", false, "Sets up HTTPS for the endpoint using a self-signed certificate which is re-generated every time you start this command") + cmd.Flags().Bool("skip", false, "Skip login and/or consent steps if possible. Only effective if you have configured the Login and Consent UI URLs to point to this server.") + cmd.Flags().String("response-mode", "", "Set the response mode. Can be query (default) or form_post.") return cmd } + +type router struct { + cl *openapi.APIClient + skip bool + cmd *cobra.Command + state *string + conf *oauth2.Config + onDone func() + serverLocation string + noShutdown bool +} + +func (rt *router) loginGET(w http.ResponseWriter, r *http.Request) { + req, raw, err := rt.cl.OAuth2API.GetOAuth2LoginRequest(r.Context()). + LoginChallenge(r.URL.Query().Get("login_challenge")). + Execute() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer raw.Body.Close() //nolint:errcheck + + if rt.skip && req.GetSkip() { + req, res, err := rt.cl.OAuth2API.AcceptOAuth2LoginRequest(r.Context()). + LoginChallenge(req.Challenge). + AcceptOAuth2LoginRequest(openapi.AcceptOAuth2LoginRequest{Subject: req.Subject}). + Execute() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer res.Body.Close() //nolint:errcheck + http.Redirect(w, r, req.RedirectTo, http.StatusFound) + return + } + + pretty, err := prettyJSON(raw.Body) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + _ = tokenUserLogin.Execute(w, struct { + LoginChallenge string + Skip bool + SessionID string + Raw string + }{ + LoginChallenge: req.Challenge, + Skip: req.GetSkip(), + SessionID: req.GetSessionId(), + Raw: pretty, + }) +} + +func (rt *router) loginPOST(w http.ResponseWriter, r *http.Request) { + if err := r.ParseForm(); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + if r.FormValue("revoke-consents") == "on" { + res, err := rt.cl.OAuth2API.RevokeOAuth2ConsentSessions(r.Context()). + Subject(r.FormValue("username")). + All(true). + Execute() + if err != nil { + _, _ = fmt.Fprintln(rt.cmd.ErrOrStderr(), "Error revoking previous consents:", err) + } else { + _, _ = fmt.Fprintln(rt.cmd.ErrOrStderr(), "Revoked all previous consents") + } + defer res.Body.Close() //nolint:errcheck + } + switch r.FormValue("action") { + case "accept": + + req, res, err := rt.cl.OAuth2API.AcceptOAuth2LoginRequest(r.Context()). + LoginChallenge(r.FormValue("ls")). + AcceptOAuth2LoginRequest(openapi.AcceptOAuth2LoginRequest{ + Subject: r.FormValue("username"), + Remember: pointerx.Ptr(r.FormValue("remember") == "on"), + RememberFor: pointerx.Int64(3600), + Context: map[string]string{ + "context from": "login step", + }, + }).Execute() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer res.Body.Close() //nolint:errcheck + http.Redirect(w, r, req.RedirectTo, http.StatusFound) + + case "deny": + req, res, err := rt.cl.OAuth2API.RejectOAuth2LoginRequest(r.Context()).LoginChallenge(r.FormValue("ls")).Execute() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer res.Body.Close() //nolint:errcheck + http.Redirect(w, r, req.RedirectTo, http.StatusFound) + + default: + http.Error(w, "Invalid action", http.StatusBadRequest) + } +} + +func (rt *router) consentGET(w http.ResponseWriter, r *http.Request) { + req, raw, err := rt.cl.OAuth2API.GetOAuth2ConsentRequest(r.Context()). + ConsentChallenge(r.URL.Query().Get("consent_challenge")). + Execute() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer raw.Body.Close() //nolint:errcheck + + if rt.skip && req.GetSkip() { + req, res, err := rt.cl.OAuth2API.AcceptOAuth2ConsentRequest(r.Context()). + ConsentChallenge(req.Challenge). + AcceptOAuth2ConsentRequest(openapi.AcceptOAuth2ConsentRequest{ + GrantScope: req.GetRequestedScope(), + GrantAccessTokenAudience: req.GetRequestedAccessTokenAudience(), + Remember: pointerx.Ptr(true), + RememberFor: pointerx.Int64(3600), + Session: &openapi.AcceptOAuth2ConsentRequestSession{ + AccessToken: map[string]string{ + "foo": "bar", + }, + IdToken: map[string]string{ + "baz": "bar", + }, + }, + }).Execute() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer res.Body.Close() //nolint:errcheck + http.Redirect(w, r, req.RedirectTo, http.StatusFound) + return + } + + pretty, err := prettyJSON(raw.Body) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + _, raw, err = rt.cl.OAuth2API.ListOAuth2ConsentSessions(r.Context()). + Subject(req.GetSubject()). + LoginSessionId(req.GetLoginSessionId()). + Execute() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer raw.Body.Close() //nolint:errcheck + prettyPrevConsent, err := prettyJSON(raw.Body) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + _ = tokenUserConsent.Execute(w, struct { + ConsentChallenge string + Audiences []string + Scopes []string + Skip bool + SessionID string + PreviousConsents string + Raw string + }{ + ConsentChallenge: req.Challenge, + Audiences: req.RequestedAccessTokenAudience, + Scopes: req.RequestedScope, + Skip: req.GetSkip(), + SessionID: req.GetLoginSessionId(), + PreviousConsents: prettyPrevConsent, + Raw: pretty, + }) +} + +func (rt *router) consentPOST(w http.ResponseWriter, r *http.Request) { + if err := r.ParseForm(); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + switch r.FormValue("action") { + case "accept": + req, res, err := rt.cl.OAuth2API.AcceptOAuth2ConsentRequest(r.Context()). + ConsentChallenge(r.FormValue("cs")). + AcceptOAuth2ConsentRequest(openapi.AcceptOAuth2ConsentRequest{ + GrantScope: r.Form["scope"], + GrantAccessTokenAudience: r.Form["audience"], + Remember: pointerx.Ptr(r.FormValue("remember") == "on"), + RememberFor: pointerx.Int64(3600), + Session: &openapi.AcceptOAuth2ConsentRequestSession{ + AccessToken: map[string]string{ + "foo": "bar", + }, + IdToken: map[string]string{ + "baz": "bar", + }, + }, + }).Execute() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer res.Body.Close() //nolint:errcheck + http.Redirect(w, r, req.RedirectTo, http.StatusFound) + + case "deny": + req, res, err := rt.cl.OAuth2API.RejectOAuth2ConsentRequest(r.Context()). + ConsentChallenge(r.FormValue("cs")). + Execute() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer res.Body.Close() //nolint:errcheck + http.Redirect(w, r, req.RedirectTo, http.StatusFound) + + default: + http.Error(w, "Invalid action", http.StatusBadRequest) + } +} + +func (rt *router) callback(w http.ResponseWriter, r *http.Request) { + defer rt.onDone() + + if len(r.URL.Query().Get("error")) > 0 { + _, _ = fmt.Fprintf(rt.cmd.ErrOrStderr(), "Got error: %s\n", r.URL.Query().Get("error_description")) + + w.WriteHeader(http.StatusInternalServerError) + _ = tokenUserError.Execute(w, &ed{ + Name: r.URL.Query().Get("error"), + Description: r.URL.Query().Get("error_description"), + Hint: r.URL.Query().Get("error_hint"), + Debug: r.URL.Query().Get("error_debug"), + }) + return + } + + if r.URL.Query().Get("state") != *rt.state { + descr := fmt.Sprintf("States do not match. Expected %q, got %q.", *rt.state, r.URL.Query().Get("state")) + _, _ = fmt.Fprintln(rt.cmd.ErrOrStderr(), descr) + + w.WriteHeader(http.StatusInternalServerError) + _ = tokenUserError.Execute(w, &ed{ + Name: "States do not match", + Description: descr, + }) + return + } + + code := r.URL.Query().Get("code") + ctx := context.WithValue(rt.cmd.Context(), oauth2.HTTPClient, rt.cl.GetConfig().HTTPClient) + token, err := rt.conf.Exchange(ctx, code) + if err != nil { + _, _ = fmt.Fprintf(rt.cmd.ErrOrStderr(), "Unable to exchange code for token: %s\n", err) + + w.WriteHeader(http.StatusInternalServerError) + _ = tokenUserError.Execute(w, &ed{ + Name: err.Error(), + }) + return + } + + cmdx.PrintRow(rt.cmd, outputOAuth2Token(*token)) + _ = tokenUserResult.Execute(w, struct { + AccessToken string + RefreshToken string + Expiry string + IDToken string + BackURL string + DisplayBackButton bool + }{ + AccessToken: token.AccessToken, + RefreshToken: token.RefreshToken, + Expiry: token.Expiry.Format(time.RFC1123), + IDToken: fmt.Sprintf("%s", token.Extra("id_token")), + BackURL: rt.serverLocation, + DisplayBackButton: rt.noShutdown, + }) +} + +func (rt *router) callbackPOSTForm(w http.ResponseWriter, r *http.Request) { + if err := r.ParseForm(); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + u := url.URL{ + Path: r.URL.Path, + RawQuery: r.PostForm.Encode(), + } + http.Redirect(w, r, u.String(), http.StatusFound) +} + +type ed struct { + Name string + Description string + Hint string + Debug string +} + +func prettyJSON(r io.Reader) (string, error) { + contentsRaw, err := io.ReadAll(r) + if err != nil { + return "", err + } + var buf bytes.Buffer + if err := json.Indent(&buf, contentsRaw, "", " "); err != nil { + return "", err + } + return buf.String(), nil +} diff --git a/cmd/cmd_perform_client_credentials.go b/cmd/cmd_perform_client_credentials.go index ac645b30532..5868ee3757f 100644 --- a/cmd/cmd_perform_client_credentials.go +++ b/cmd/cmd_perform_client_credentials.go @@ -10,7 +10,7 @@ import ( "os" "strings" - "github.com/ory/hydra/cmd/cliclient" + "github.com/ory/hydra/v2/cmd/cliclient" "github.com/spf13/cobra" "golang.org/x/oauth2" @@ -35,7 +35,7 @@ using the CLI.`, return err } - ctx := context.WithValue(cmd.Context(), oauth2.HTTPClient, hc) + ctx := context.WithValue(cmd.Context(), oauth2.HTTPClient, hc.GetConfig().HTTPClient) scopes := flagx.MustGetStringSlice(cmd, "scope") audience := flagx.MustGetStringSlice(cmd, "audience") diff --git a/cmd/cmd_perform_client_credentials_test.go b/cmd/cmd_perform_client_credentials_test.go index 6026e352944..7a4adeff7ad 100644 --- a/cmd/cmd_perform_client_credentials_test.go +++ b/cmd/cmd_perform_client_credentials_test.go @@ -11,18 +11,20 @@ import ( "github.com/stretchr/testify/assert" "github.com/tidwall/gjson" - "github.com/ory/hydra/cmd" + "github.com/ory/hydra/v2/cmd" "github.com/ory/x/cmdx" ) func TestPerformClientCredentialsGrant(t *testing.T) { + t.Parallel() + c := cmd.NewPerformClientCredentialsCmd() public, _, reg := setupRoutes(t, c) require.NoError(t, c.Flags().Set(cmdx.FlagEndpoint, public.URL)) expected := createClientCredentialsClient(t, reg) t.Run("case=exchanges for access token", func(t *testing.T) { - result := cmdx.ExecNoErr(t, c, "--client-id", expected.ID.String(), "--client-secret", expected.Secret) + result := cmdx.ExecNoErr(t, c, "--client-id", expected.GetID(), "--client-secret", expected.Secret) actual := gjson.Parse(result) assert.Equal(t, "bearer", actual.Get("token_type").String(), result) assert.NotEmpty(t, actual.Get("access_token").String(), result) diff --git a/cmd/cmd_perform_device_flow.go b/cmd/cmd_perform_device_flow.go new file mode 100644 index 00000000000..f697722e65f --- /dev/null +++ b/cmd/cmd_perform_device_flow.go @@ -0,0 +1,115 @@ +// Copyright © 2022 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package cmd + +import ( + "context" + "fmt" + "os" + "strings" + + "github.com/ory/hydra/v2/cmd/cliclient" + + "github.com/spf13/cobra" + "golang.org/x/oauth2" + + "github.com/ory/x/cmdx" + "github.com/ory/x/flagx" + "github.com/ory/x/urlx" +) + +func NewPerformDeviceCodeCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "device-code", + Example: "{{ .CommandPath }} --client-id ...", + Short: "An exemplary OAuth 2.0 Client performing the OAuth 2.0 Device Code Flow", + Long: `Performs the device code flow. Useful for getting an access token and an ID token in machines without a browser. +The client that will be used MUST use the "none" or "client_secret_post" token-endpoint-auth-method.`, + RunE: func(cmd *cobra.Command, args []string) error { + client, endpoint, err := cliclient.NewClient(cmd) + if err != nil { + return err + } + + endpoint = cliclient.GetOAuth2URLOverride(cmd, endpoint) + + ctx := context.WithValue(cmd.Context(), oauth2.HTTPClient, client) + scopes := flagx.MustGetStringSlice(cmd, "scope") + deviceAuthUrl := flagx.MustGetString(cmd, "device-auth-url") + tokenUrl := flagx.MustGetString(cmd, "token-url") + audience := flagx.MustGetStringSlice(cmd, "audience") + + clientID := flagx.MustGetString(cmd, "client-id") + if clientID == "" { + _, _ = fmt.Fprintln(cmd.ErrOrStderr(), cmd.UsageString()) + _, _ = fmt.Fprintln(cmd.ErrOrStderr(), "Please provide a Client ID using --client-id flag, or OAUTH2_CLIENT_ID environment variable.") + return cmdx.FailSilently(cmd) + } + + clientSecret := flagx.MustGetString(cmd, "client-secret") + + if deviceAuthUrl == "" { + deviceAuthUrl = urlx.AppendPaths(endpoint, "/oauth2/device/auth").String() + } + + if tokenUrl == "" { + tokenUrl = urlx.AppendPaths(endpoint, "/oauth2/token").String() + } + + conf := oauth2.Config{ + ClientID: clientID, + ClientSecret: clientSecret, + Endpoint: oauth2.Endpoint{ + DeviceAuthURL: deviceAuthUrl, + TokenURL: tokenUrl, + }, + Scopes: scopes, + } + + params := []oauth2.AuthCodeOption{oauth2.SetAuthURLParam("audience", strings.Join(audience, "+"))} + if clientSecret != "" { + params = append(params, oauth2.SetAuthURLParam("client_secret", clientSecret)) + } + + deviceAuthResponse, err := conf.DeviceAuth( + ctx, + params..., + ) + if err != nil { + _, _ = fmt.Fprintf( + cmd.ErrOrStderr(), "Failed to perform the device authorization request: %s\n", err) + return cmdx.FailSilently(cmd) + } + + _, _ = fmt.Fprintln( + cmd.ErrOrStderr(), + "To login please go to:\n\t", + deviceAuthResponse.VerificationURIComplete, + ) + + token, err := conf.DeviceAccessToken(ctx, deviceAuthResponse) + if err != nil { + _, _ = fmt.Fprintf( + cmd.ErrOrStderr(), "Failed to perform the device token request: %s\n", err) + return cmdx.FailSilently(cmd) + } + + _, _ = fmt.Fprintln(cmd.ErrOrStderr(), "Successfully signed in!") + + cmdx.PrintRow(cmd, outputOAuth2Token(*token)) + return nil + }, + } + + cmd.Flags().StringSlice("scope", []string{"offline", "openid"}, "Request OAuth2 scope") + + cmd.Flags().String("client-id", os.Getenv("OAUTH2_CLIENT_ID"), "Use the provided OAuth 2.0 Client ID, defaults to environment variable OAUTH2_CLIENT_ID") + cmd.Flags().String("client-secret", os.Getenv("OAUTH2_CLIENT_SECRET"), "Use the provided OAuth 2.0 Client Secret, defaults to environment variable OAUTH2_CLIENT_SECRET") + + cmd.Flags().StringSlice("audience", []string{}, "Request a specific OAuth 2.0 Access Token Audience") + cmd.Flags().String("device-auth-url", "", "Usually it is enough to specify the `endpoint` flag, but if you want to force the device authorization url, use this flag") + cmd.Flags().String("token-url", "", "Usually it is enough to specify the `endpoint` flag, but if you want to force the token url, use this flag") + + return cmd +} diff --git a/cmd/cmd_revoke_token.go b/cmd/cmd_revoke_token.go index d637a9bfcf5..7e0e5f8831e 100644 --- a/cmd/cmd_revoke_token.go +++ b/cmd/cmd_revoke_token.go @@ -9,7 +9,7 @@ import ( "os" hydra "github.com/ory/hydra-client-go/v2" - "github.com/ory/hydra/cmd/cliclient" + "github.com/ory/hydra/v2/cmd/cliclient" "github.com/ory/x/cmdx" "github.com/ory/x/flagx" @@ -38,7 +38,7 @@ Please provide a Client ID and Client Secret using flags --client-id and --clien } token := args[0] - _, err = client.OAuth2Api.RevokeOAuth2Token( + _, err = client.OAuth2API.RevokeOAuth2Token( context.WithValue(cmd.Context(), hydra.ContextBasicAuth, hydra.BasicAuth{ UserName: clientID, Password: clientSecret, diff --git a/cmd/cmd_revoke_token_test.go b/cmd/cmd_revoke_token_test.go index fec8078386e..e82a9c54d12 100644 --- a/cmd/cmd_revoke_token_test.go +++ b/cmd/cmd_revoke_token_test.go @@ -12,12 +12,14 @@ import ( "github.com/tidwall/gjson" "golang.org/x/oauth2/clientcredentials" - "github.com/ory/hydra/cmd" + "github.com/ory/hydra/v2/cmd" "github.com/ory/x/cmdx" "github.com/ory/x/snapshotx" ) func TestRevokeToken(t *testing.T) { + t.Parallel() + c := cmd.NewRevokeTokenCmd() public, _, reg := setupRoutes(t, c) require.NoError(t, c.Flags().Set(cmdx.FlagEndpoint, public.URL)) diff --git a/cmd/cmd_update_client.go b/cmd/cmd_update_client.go index 1067b861b1d..426355c6048 100644 --- a/cmd/cmd_update_client.go +++ b/cmd/cmd_update_client.go @@ -9,8 +9,8 @@ import ( "github.com/spf13/cobra" - "github.com/ory/hydra/cmd/cli" - "github.com/ory/hydra/cmd/cliclient" + "github.com/ory/hydra/v2/cmd/cli" + "github.com/ory/hydra/v2/cmd/cliclient" "github.com/ory/x/cmdx" "github.com/ory/x/flagx" "github.com/ory/x/pointerx" @@ -22,11 +22,11 @@ func NewUpdateClientCmd() *cobra.Command { Aliases: []string{"client"}, Short: "Update an OAuth 2.0 Client", Args: cobra.ExactArgs(1), - Example: `{{ .CommandPath }} -c http://localhost/cb -g authorization_code -r code -a core,foobar + Example: `{{ .CommandPath }} --redirect-uri http://localhost/cb --grant-type authorization_code --response-type code --scope core,foobar To encrypt an auto-generated OAuth2 Client Secret, use flags ` + "`--pgp-key`" + `, ` + "`--pgp-key-url`" + ` or ` + "`--keybase`" + ` flag, for example: - {{ .CommandPath }} e6e96aa5-9cd2-4a70-bf56-ad6434c8aaa2 -n "my app" -g client_credentials -r token -a core,foobar --keybase keybase_username + {{ .CommandPath }} e6e96aa5-9cd2-4a70-bf56-ad6434c8aaa2 --name "my app" --grant-type client_credentials --response-type token --scope core,foobar --keybase keybase_username `, Long: `This command replaces an OAuth 2.0 Client by its ID. Please be aware that this command replaces the entire client. If only the name flag (-n "my updated app") is provided, the all other fields are updated to their default values.`, RunE: func(cmd *cobra.Command, args []string) error { @@ -42,15 +42,18 @@ To encrypt an auto-generated OAuth2 Client Secret, use flags ` + "`--pgp-key`" + } id := args[0] - cc := clientFromFlags(cmd) + cc, err := clientFromFlags(cmd) + if err != nil { + return err + } - client, _, err := m.OAuth2Api.SetOAuth2Client(context.Background(), id).OAuth2Client(cc).Execute() //nolint:bodyclose + client, _, err := m.OAuth2API.SetOAuth2Client(context.Background(), id).OAuth2Client(cc).Execute() //nolint:bodyclose if err != nil { return cmdx.PrintOpenAPIError(cmd, err) } if client.ClientSecret == nil && len(secret) > 0 { - client.ClientSecret = pointerx.String(secret) + client.ClientSecret = pointerx.Ptr(secret) } if encryptSecret && client.ClientSecret != nil { @@ -60,7 +63,7 @@ To encrypt an auto-generated OAuth2 Client Secret, use flags ` + "`--pgp-key`" + return cmdx.FailSilently(cmd) } - client.ClientSecret = pointerx.String(enc.Base64Encode()) + client.ClientSecret = pointerx.Ptr(enc.Base64Encode()) } cmdx.PrintRow(cmd, (*outputOAuth2Client)(client)) diff --git a/cmd/cmd_update_client_test.go b/cmd/cmd_update_client_test.go index e52638ace73..edc71c3d409 100644 --- a/cmd/cmd_update_client_test.go +++ b/cmd/cmd_update_client_test.go @@ -4,44 +4,86 @@ package cmd_test import ( - "context" + "bytes" "encoding/json" "testing" + "github.com/tidwall/sjson" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/tidwall/gjson" - "github.com/ory/hydra/cmd" + "github.com/ory/hydra/v2/cmd" "github.com/ory/x/cmdx" "github.com/ory/x/snapshotx" ) func TestUpdateClient(t *testing.T) { - ctx := context.Background() + t.Parallel() + c := cmd.NewUpdateClientCmd() reg := setup(t, c) original := createClient(t, reg, nil) t.Run("case=creates successfully", func(t *testing.T) { - actual := gjson.Parse(cmdx.ExecNoErr(t, c, "--grant-type", "implicit", original.ID.String())) - expected, err := reg.ClientManager().GetClient(ctx, actual.Get("client_id").String()) + actual := gjson.Parse(cmdx.ExecNoErr(t, c, "--grant-type", "implicit", original.GetID())) + expected, err := reg.ClientManager().GetClient(t.Context(), actual.Get("client_id").Str) require.NoError(t, err) - assert.Equal(t, expected.GetID(), actual.Get("client_id").String()) - assert.Equal(t, "implicit", actual.Get("grant_types").Array()[0].String()) + assert.Equal(t, expected.GetID(), actual.Get("client_id").Str) + assert.Equal(t, "implicit", actual.Get("grant_types").Array()[0].Str) snapshotx.SnapshotT(t, json.RawMessage(actual.Raw), snapshotExcludedClientFields...) }) t.Run("case=supports encryption", func(t *testing.T) { actual := gjson.Parse(cmdx.ExecNoErr(t, c, - original.ID.String(), + original.GetID(), "--secret", "some-userset-secret", "--pgp-key", base64EncodedPGPPublicKey(t), )) - assert.NotEmpty(t, actual.Get("client_id").String()) - assert.NotEmpty(t, actual.Get("client_secret").String()) + assert.Equal(t, original.ID, actual.Get("client_id").Str) + assert.NotEmpty(t, actual.Get("client_secret").Str) + assert.NotEqual(t, original.Secret, actual.Get("client_secret").Str) snapshotx.SnapshotT(t, json.RawMessage(actual.Raw), snapshotExcludedClientFields...) }) + + t.Run("case=updates from file", func(t *testing.T) { + original, err := reg.ClientManager().GetConcreteClient(t.Context(), original.GetID()) + require.NoError(t, err) + + raw, err := json.Marshal(original) + require.NoError(t, err) + + t.Run("file=stdin", func(t *testing.T) { + raw, err = sjson.SetBytes(raw, "client_name", "updated through file stdin") + require.NoError(t, err) + + stdout, stderr, err := cmdx.Exec(t, c, bytes.NewReader(raw), original.GetID(), "--file", "-") + require.NoError(t, err, stderr) + + actual := gjson.Parse(stdout) + assert.Equal(t, original.ID, actual.Get("client_id").Str) + assert.Equal(t, "updated through file stdin", actual.Get("client_name").Str) + + snapshotx.SnapshotT(t, json.RawMessage(actual.Raw), snapshotExcludedClientFields...) + }) + + t.Run("file=from disk", func(t *testing.T) { + raw, err = sjson.SetBytes(raw, "client_name", "updated through file from disk") + require.NoError(t, err) + + fn := writeTempFile(t, json.RawMessage(raw)) + + stdout, stderr, err := cmdx.Exec(t, c, nil, original.GetID(), "--file", fn) + require.NoError(t, err, stderr) + + actual := gjson.Parse(stdout) + assert.Equal(t, original.ID, actual.Get("client_id").Str) + assert.Equal(t, "updated through file from disk", actual.Get("client_name").Str) + + snapshotx.SnapshotT(t, json.RawMessage(actual.Raw), snapshotExcludedClientFields...) + }) + }) } diff --git a/cmd/helper.go b/cmd/helper.go index 1a0520c19e6..9862d370e78 100644 --- a/cmd/helper.go +++ b/cmd/helper.go @@ -4,21 +4,12 @@ package cmd import ( - "fmt" "net/http" "net/url" - "os" "github.com/tomnomnom/linkheader" ) -var osExit = os.Exit - -func fatal(message string, args ...interface{}) { - fmt.Printf(message+"\n", args...) - osExit(1) -} - func getPageToken(resp *http.Response) string { for _, link := range linkheader.Parse(resp.Header.Get("Link")) { if link.Rel != "next" { diff --git a/cmd/helper_test.go b/cmd/helper_test.go index d19624e0c48..bf35b3f7ffa 100644 --- a/cmd/helper_test.go +++ b/cmd/helper_test.go @@ -13,22 +13,6 @@ import ( "github.com/ory/x/pagination/tokenpagination" ) -func TestFatal(t *testing.T) { - oldOsExit := osExit - defer func() { osExit = oldOsExit }() - - var got int - myExit := func(code int) { - got = code - } - - osExit = myExit - fatal("Fatal message") - if exp := 1; got != exp { - t.Errorf("Expected exit code: %d, got: %d", exp, got) - } -} - func TestGetPageToken(t *testing.T) { u, _ := url.Parse("https://example.com/foobar") rec := httptest.NewRecorder() diff --git a/cmd/janitor.go b/cmd/janitor.go index 31ffd4e7a63..003e5e49ea4 100644 --- a/cmd/janitor.go +++ b/cmd/janitor.go @@ -6,18 +6,16 @@ package cmd import ( "github.com/spf13/cobra" - "github.com/ory/hydra/driver" - "github.com/ory/x/servicelocatorx" - - "github.com/ory/hydra/cmd/cli" + "github.com/ory/hydra/v2/cmd/cli" + "github.com/ory/hydra/v2/driver" "github.com/ory/x/configx" ) -func NewJanitorCmd(slOpts []servicelocatorx.Option, dOpts []driver.OptionsModifier, cOpts []configx.OptionModifier) *cobra.Command { +func NewJanitorCmd(dOpts []driver.OptionsModifier) *cobra.Command { cmd := &cobra.Command{ - Use: "janitor []", + Use: "janitor [[database_url]]", Short: "This command cleans up stale database rows.", - Example: `hydra janitor --keep-if-younger 23h --access-lifespan 1h --refresh-lifespan 40h --consent-request-lifespan 10m `, + Example: `hydra janitor --keep-if-younger 23h --access-lifespan 1h --refresh-lifespan 40h --consent-request-lifespan 10m [database_url]`, Long: `This command cleans up stale database rows. This will select records to delete with a limit and delete records in batch to ensure that no table locking issues arise in big production databases. @@ -62,8 +60,8 @@ Janitor can be used in several ways. hydra janitor --tokens --requests --grants {database-url} `, - RunE: cli.NewHandler(slOpts, dOpts, cOpts).Janitor.RunE, - Args: cli.NewHandler(slOpts, dOpts, cOpts).Janitor.Args, + RunE: cli.NewHandler(dOpts).Janitor.RunE, + Args: cli.NewHandler(dOpts).Janitor.Args, } cmd.Flags().Int(cli.Limit, 10000, "Limit the number of records retrieved from database for deletion.") cmd.Flags().Int(cli.BatchSize, 100, "Define how many records are deleted with each iteration.") diff --git a/cmd/migrate_gen.go b/cmd/migrate_gen.go deleted file mode 100644 index 05a46fa6c87..00000000000 --- a/cmd/migrate_gen.go +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package cmd - -import ( - "github.com/spf13/cobra" - - "github.com/ory/hydra/cmd/cli" -) - -func NewMigrateGenCmd() *cobra.Command { - cmd := &cobra.Command{ - Use: "gen ", - Short: "Generate migration files from migration templates", - Run: cli.NewHandler(nil, nil, nil).Migration.MigrateGen, - } - cmd.Flags().StringSlice("dialects", []string{"sqlite", "cockroach", "mysql", "postgres"}, "Expect migrations for these dialects and no others to be either explicitly defined, or to have a generic fallback. \"\" disables dialect validation.") - return cmd -} diff --git a/cmd/migrate_sql.go b/cmd/migrate_sql.go index 4b03588cf04..f6bfd179208 100644 --- a/cmd/migrate_sql.go +++ b/cmd/migrate_sql.go @@ -6,17 +6,17 @@ package cmd import ( "github.com/spf13/cobra" - "github.com/ory/hydra/driver" - "github.com/ory/x/configx" - "github.com/ory/x/servicelocatorx" + "github.com/ory/x/popx" - "github.com/ory/hydra/cmd/cli" + "github.com/ory/hydra/v2/cmd/cli" + "github.com/ory/hydra/v2/driver" ) -func NewMigrateSqlCmd(slOpts []servicelocatorx.Option, dOpts []driver.OptionsModifier, cOpts []configx.OptionModifier) *cobra.Command { +func NewMigrateSQLCmd(dOpts []driver.OptionsModifier) *cobra.Command { cmd := &cobra.Command{ - Use: "sql ", - Short: "Create SQL schemas and apply migration plans", + Use: "sql [database_url]", + Deprecated: "Please use `hydra migrate sql up` instead.", + Short: "Perform SQL migrations", Long: `Run this command on a fresh SQL installation and when you upgrade Hydra to a new minor version. For example, upgrading Hydra 0.7.0 to 0.8.0 requires running this command. @@ -25,16 +25,32 @@ This decreases risk of failure and decreases time required. You can read in the database URL using the -e flag, for example: export DSN=... - hydra migrate sql -e + hydra migrate sql up -e ### WARNING ### Before running this command on an existing database, create a back up!`, - RunE: cli.NewHandler(slOpts, dOpts, cOpts).Migration.MigrateSQL, + RunE: cli.NewHandler(dOpts).Migration.MigrateSQLUp, } - cmd.Flags().BoolP("read-from-env", "e", false, "If set, reads the database connection string from the environment variable DSN or config file key dsn.") cmd.Flags().BoolP("yes", "y", false, "If set all confirmation requests are accepted without user interaction.") + cmd.PersistentFlags().BoolP("read-from-env", "e", false, "If set, reads the database connection string from the environment variable DSN or config file key dsn.") + + cmd.AddCommand(newMigrateSQLDownCmd(dOpts)) + cmd.AddCommand(newMigrateSQLUpCmd(dOpts)) + cmd.AddCommand(newMigrateSQLStatusCmd(dOpts)) return cmd } + +func newMigrateSQLDownCmd(dOpts []driver.OptionsModifier) *cobra.Command { + return popx.NewMigrateSQLDownCmd(cli.NewHandler(dOpts).Migration.MigrateSQLDown) +} + +func newMigrateSQLStatusCmd(dOpts []driver.OptionsModifier) *cobra.Command { + return popx.NewMigrateSQLStatusCmd(cli.NewHandler(dOpts).Migration.MigrateStatus) +} + +func newMigrateSQLUpCmd(dOpts []driver.OptionsModifier) *cobra.Command { + return popx.NewMigrateSQLUpCmd(cli.NewHandler(dOpts).Migration.MigrateSQLUp) +} diff --git a/cmd/migrate_status.go b/cmd/migrate_status.go new file mode 100644 index 00000000000..9828b473cf4 --- /dev/null +++ b/cmd/migrate_status.go @@ -0,0 +1,24 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package cmd + +import ( + "github.com/spf13/cobra" + + "github.com/ory/x/popx" + + "github.com/ory/hydra/v2/cmd/cli" + "github.com/ory/hydra/v2/driver" +) + +func NewMigrateStatusCmd(dOpts []driver.OptionsModifier) *cobra.Command { + cmd := popx.RegisterMigrateStatusFlags(&cobra.Command{ + Use: "status", + Deprecated: "Please use `hydra migrate sql status` instead.", + Short: "Get the current migration status", + RunE: cli.NewHandler(dOpts).Migration.MigrateStatus, + }) + cmd.PersistentFlags().BoolP("read-from-env", "e", false, "If set, reads the database connection string from the environment variable DSN or config file key dsn.") + return cmd +} diff --git a/cmd/output_client.go b/cmd/output_client.go index 1b052c56967..3f060f281df 100644 --- a/cmd/output_client.go +++ b/cmd/output_client.go @@ -19,7 +19,7 @@ type ( } ) -func (_ outputOAuth2Client) Header() []string { +func (outputOAuth2Client) Header() []string { return []string{"CLIENT ID", "CLIENT SECRET", "GRANT TYPES", "RESPONSE TYPES", "SCOPE", "AUDIENCE", "REDIRECT URIS"} } @@ -40,7 +40,7 @@ func (i outputOAuth2Client) Interface() interface{} { return i } -func (_ outputOAuth2ClientCollection) Header() []string { +func (outputOAuth2ClientCollection) Header() []string { return outputOAuth2Client{}.Header() } diff --git a/cmd/output_introspection.go b/cmd/output_introspection.go index e3aa576421d..1f89f016530 100644 --- a/cmd/output_introspection.go +++ b/cmd/output_introspection.go @@ -16,7 +16,7 @@ type ( outputOAuth2TokenIntrospection hydra.IntrospectedOAuth2Token ) -func (_ outputOAuth2TokenIntrospection) Header() []string { +func (outputOAuth2TokenIntrospection) Header() []string { return []string{"ACTIVE", "SUBJECT", "CLIENT ID", "SCOPE", "EXPIRY", "TOKEN USE"} } diff --git a/cmd/output_jwks.go b/cmd/output_jwks.go index 3b42af3b113..207e33a9d1f 100644 --- a/cmd/output_jwks.go +++ b/cmd/output_jwks.go @@ -20,7 +20,7 @@ type ( } ) -func (_ outputJsonWebKey) Header() []string { +func (outputJsonWebKey) Header() []string { return []string{"SET ID", "KEY ID", "ALGORITHM", "USE"} } @@ -38,7 +38,7 @@ func (i outputJsonWebKey) Interface() interface{} { return i } -func (_ outputJSONWebKeyCollection) Header() []string { +func (outputJSONWebKeyCollection) Header() []string { return outputJsonWebKey{}.Header() } diff --git a/cmd/output_token.go b/cmd/output_token.go index c91add12cb5..70841edde03 100644 --- a/cmd/output_token.go +++ b/cmd/output_token.go @@ -4,11 +4,10 @@ package cmd import ( + "cmp" "fmt" "time" - "github.com/ory/x/stringsx" - "golang.org/x/oauth2" ) @@ -16,7 +15,7 @@ type ( outputOAuth2Token oauth2.Token ) -func (_ outputOAuth2Token) Header() []string { +func (outputOAuth2Token) Header() []string { return []string{"ACCESS TOKEN", "REFRESH TOKEN", "ID TOKEN", "EXPIRY"} } @@ -29,7 +28,7 @@ func (i outputOAuth2Token) Columns() []string { return []string{ i.AccessToken, - stringsx.Coalesce(i.RefreshToken, ""), + cmp.Or(i.RefreshToken, ""), printIDToken, i.Expiry.Round(time.Second).String(), } diff --git a/cmd/root.go b/cmd/root.go index 082e9553877..e77e9b89f8d 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -7,26 +7,26 @@ import ( "fmt" "os" - "github.com/ory/x/cmdx" + "github.com/pkg/errors" - "github.com/ory/hydra/driver" - "github.com/ory/x/configx" - "github.com/ory/x/servicelocatorx" + "github.com/ory/x/cmdx" "github.com/spf13/cobra" + + "github.com/ory/hydra/v2/driver" ) -func NewRootCmd(slOpts []servicelocatorx.Option, dOpts []driver.OptionsModifier, cOpts []configx.OptionModifier) *cobra.Command { +func NewRootCmd(opts ...driver.OptionsModifier) *cobra.Command { cmd := &cobra.Command{ Use: "hydra", Short: "Run and manage Ory Hydra", } cmdx.EnableUsageTemplating(cmd) - RegisterCommandRecursive(cmd, slOpts, dOpts, cOpts) + RegisterCommandRecursive(cmd, opts...) return cmd } -func RegisterCommandRecursive(parent *cobra.Command, slOpts []servicelocatorx.Option, dOpts []driver.OptionsModifier, cOpts []configx.OptionModifier) { +func RegisterCommandRecursive(parent *cobra.Command, opts ...driver.OptionsModifier) { createCmd := NewCreateCmd() createCmd.AddCommand( NewCreateClientsCommand(), @@ -62,6 +62,7 @@ func RegisterCommandRecursive(parent *cobra.Command, slOpts []servicelocatorx.Op performCmd.AddCommand( NewPerformClientCredentialsCmd(), NewPerformAuthorizationCodeCmd(), + NewPerformDeviceCodeCmd(), ) revokeCmd := NewRevokeCmd() @@ -71,13 +72,13 @@ func RegisterCommandRecursive(parent *cobra.Command, slOpts []servicelocatorx.Op introspectCmd.AddCommand(NewIntrospectTokenCmd()) migrateCmd := NewMigrateCmd() - migrateCmd.AddCommand(NewMigrateGenCmd()) - migrateCmd.AddCommand(NewMigrateSqlCmd(slOpts, dOpts, cOpts)) + migrateCmd.AddCommand(NewMigrateSQLCmd(opts)) + migrateCmd.AddCommand(NewMigrateStatusCmd(opts)) serveCmd := NewServeCmd() - serveCmd.AddCommand(NewServeAdminCmd(slOpts, dOpts, cOpts)) - serveCmd.AddCommand(NewServePublicCmd(slOpts, dOpts, cOpts)) - serveCmd.AddCommand(NewServeAllCmd(slOpts, dOpts, cOpts)) + serveCmd.AddCommand(NewServeAdminCmd(opts)) + serveCmd.AddCommand(NewServePublicCmd(opts)) + serveCmd.AddCommand(NewServeAllCmd(opts)) parent.AddCommand( createCmd, @@ -91,15 +92,18 @@ func RegisterCommandRecursive(parent *cobra.Command, slOpts []servicelocatorx.Op revokeCmd, migrateCmd, serveCmd, - NewJanitorCmd(slOpts, dOpts, cOpts), + NewJanitorCmd(opts), NewVersionCmd(), ) } // Execute adds all child commands to the root command sets flags appropriately. func Execute() { - if err := NewRootCmd(nil, nil, nil).Execute(); err != nil { - fmt.Println(err) - os.Exit(-1) + c := NewRootCmd() + if err := c.Execute(); err != nil { + if !errors.Is(err, cmdx.ErrNoPrintButFail) { + _, _ = fmt.Fprintln(c.ErrOrStderr(), err) + } + os.Exit(1) } } diff --git a/cmd/root_test.go b/cmd/root_test.go index 3ff077493d9..62b6efd716e 100644 --- a/cmd/root_test.go +++ b/cmd/root_test.go @@ -10,5 +10,5 @@ import ( ) func TestUsageStrings(t *testing.T) { - cmdx.AssertUsageTemplates(t, NewRootCmd(nil, nil, nil)) + cmdx.AssertUsageTemplates(t, NewRootCmd(nil, nil)) } diff --git a/cmd/serve_admin.go b/cmd/serve_admin.go index 28de0576ac7..c78d61169a7 100644 --- a/cmd/serve_admin.go +++ b/cmd/serve_admin.go @@ -6,15 +6,12 @@ package cmd import ( "github.com/spf13/cobra" - "github.com/ory/hydra/driver" - "github.com/ory/x/configx" - "github.com/ory/x/servicelocatorx" - - "github.com/ory/hydra/cmd/server" + "github.com/ory/hydra/v2/cmd/server" + "github.com/ory/hydra/v2/driver" ) -// adminCmd represents the admin command -func NewServeAdminCmd(slOpts []servicelocatorx.Option, dOpts []driver.OptionsModifier, cOpts []configx.OptionModifier) *cobra.Command { +// NewServeAdminCmd returns a new admin serve command +func NewServeAdminCmd(dOpts []driver.OptionsModifier) *cobra.Command { return &cobra.Command{ Use: "admin", Short: "Serves Administrative HTTP/2 APIs", @@ -30,6 +27,6 @@ This command does not work with the "memory" database. Both services (administra connection to be able to synchronize. ` + serveControls, - RunE: server.RunServeAdmin(slOpts, dOpts, cOpts), + RunE: server.RunServeAdmin(dOpts), } } diff --git a/cmd/serve_all.go b/cmd/serve_all.go index 229c919d7b3..bccaae5ed5a 100644 --- a/cmd/serve_all.go +++ b/cmd/serve_all.go @@ -6,15 +6,11 @@ package cmd import ( "github.com/spf13/cobra" - "github.com/ory/hydra/driver" - "github.com/ory/x/configx" - "github.com/ory/x/servicelocatorx" - - "github.com/ory/hydra/cmd/server" + "github.com/ory/hydra/v2/cmd/server" + "github.com/ory/hydra/v2/driver" ) -// allCmd represents the all command -func NewServeAllCmd(slOpts []servicelocatorx.Option, dOpts []driver.OptionsModifier, cOpts []configx.OptionModifier) *cobra.Command { +func NewServeAllCmd(dOpts []driver.OptionsModifier) *cobra.Command { return &cobra.Command{ Use: "all", Short: "Serves both public and administrative HTTP/2 APIs", @@ -31,6 +27,6 @@ All possible controls are listed below. This command exposes exposes command lin the controls section. ` + serveControls, - RunE: server.RunServeAll(slOpts, dOpts, cOpts), + RunE: server.RunServeAll(dOpts), } } diff --git a/cmd/serve_public.go b/cmd/serve_public.go index e09311b16fe..12fbdfc7e59 100644 --- a/cmd/serve_public.go +++ b/cmd/serve_public.go @@ -6,15 +6,11 @@ package cmd import ( "github.com/spf13/cobra" - "github.com/ory/hydra/driver" - "github.com/ory/x/configx" - "github.com/ory/x/servicelocatorx" - - "github.com/ory/hydra/cmd/server" + "github.com/ory/hydra/v2/cmd/server" + "github.com/ory/hydra/v2/driver" ) -// servePublicCmd represents the public command -func NewServePublicCmd(slOpts []servicelocatorx.Option, dOpts []driver.OptionsModifier, cOpts []configx.OptionModifier) *cobra.Command { +func NewServePublicCmd(dOpts []driver.OptionsModifier) *cobra.Command { return &cobra.Command{ Use: "public", Short: "Serves Public HTTP/2 APIs", @@ -31,6 +27,6 @@ This command does not work with the "memory" database. Both services (privileged connection to be able to synchronize. ` + serveControls, - RunE: server.RunServePublic(slOpts, dOpts, cOpts), + RunE: server.RunServePublic(dOpts), } } diff --git a/cmd/server/banner.go b/cmd/server/banner.go index 230420b798c..26721272c9c 100644 --- a/cmd/server/banner.go +++ b/cmd/server/banner.go @@ -8,5 +8,5 @@ func banner(version string) string { Take security seriously and subscribe to the Ory Security Newsletter. Stay on top of new patches and security insights. ->> Subscribe now: http://eepurl.com/di390P <<` +>> Subscribe now: https://www.ory.sh/l/sign-up-newsletter <<` } diff --git a/cmd/server/handler.go b/cmd/server/handler.go index 79d25bab550..4f017ac2af7 100644 --- a/cmd/server/handler.go +++ b/cmd/server/handler.go @@ -9,225 +9,240 @@ import ( "fmt" "net/http" "strings" - "sync" "time" - "github.com/ory/x/servicelocatorx" - - "github.com/ory/x/corsx" - "github.com/ory/x/httprouterx" - - analytics "github.com/ory/analytics-go/v4" - "github.com/ory/x/configx" - - "github.com/ory/x/reqlog" - - "github.com/julienschmidt/httprouter" "github.com/rs/cors" "github.com/spf13/cobra" "github.com/urfave/negroni" - "go.uber.org/automaxprocs/maxprocs" + "go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp" + "golang.org/x/sync/errgroup" + "github.com/ory/analytics-go/v5" "github.com/ory/graceful" + "github.com/ory/x/configx" + "github.com/ory/x/contextx" "github.com/ory/x/healthx" + "github.com/ory/x/httprouterx" "github.com/ory/x/metricsx" "github.com/ory/x/networkx" "github.com/ory/x/otelx" - - "github.com/ory/hydra/client" - "github.com/ory/hydra/consent" - "github.com/ory/hydra/driver" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/jwk" - "github.com/ory/hydra/oauth2" - "github.com/ory/hydra/x" - prometheus "github.com/ory/x/prometheusx" + "github.com/ory/x/otelx/semconv" + "github.com/ory/x/prometheusx" + "github.com/ory/x/reqlog" + "github.com/ory/x/tlsx" + "github.com/ory/x/urlx" + + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/consent" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/x" ) -var _ = &consent.Handler{} - -func EnhanceMiddleware(ctx context.Context, sl *servicelocatorx.Options, d driver.Registry, n *negroni.Negroni, address string, router *httprouter.Router, enableCORS bool, iface config.ServeInterface) http.Handler { - if !networkx.AddressIsUnixSocket(address) { - n.UseFunc(x.RejectInsecureRequests(d, d.Config().TLS(ctx, iface))) - } - - for _, mw := range sl.HTTPMiddlewares() { - n.UseFunc(mw) - } - - n.UseHandler(router) - corsx.ContextualizedMiddleware(func(ctx context.Context) (opts cors.Options, enabled bool) { - return d.Config().CORS(ctx, iface) - }) - - return n -} - -func isDSNAllowed(ctx context.Context, r driver.Registry) { +func ensureNoMemoryDSN(r *driver.RegistrySQL) { if r.Config().DSN() == "memory" { r.Logger().Fatalf(`When using "hydra serve admin" or "hydra serve public" the DSN can not be set to "memory".`) } } -func RunServeAdmin(slOpts []servicelocatorx.Option, dOpts []driver.OptionsModifier, cOpts []configx.OptionModifier) func(cmd *cobra.Command, args []string) error { +func RunServeAdmin(dOpts []driver.OptionsModifier) func(cmd *cobra.Command, args []string) error { return func(cmd *cobra.Command, args []string) error { + fmt.Println(banner(config.Version)) + ctx := cmd.Context() - sl := servicelocatorx.NewOptions(slOpts...) - d, err := driver.New(cmd.Context(), sl, append(dOpts, driver.WithOptions(configx.WithFlags(cmd.Flags())))) + d, err := driver.New(ctx, append(dOpts, driver.WithConfigOptions(configx.WithFlags(cmd.Flags())))...) if err != nil { return err } - isDSNAllowed(ctx, d) - - admin, _, adminmw, _ := setup(ctx, d, cmd) - d.PrometheusManager().RegisterRouter(admin.Router) - - var wg sync.WaitGroup - wg.Add(1) - - go serve( - ctx, - d, - cmd, - &wg, - config.AdminInterface, - EnhanceMiddleware(ctx, sl, d, adminmw, d.Config().ListenOn(config.AdminInterface), admin.Router, true, config.AdminInterface), - d.Config().ListenOn(config.AdminInterface), - d.Config().SocketPermission(config.AdminInterface), - ) + ensureNoMemoryDSN(d) - wg.Wait() - return nil + srv, err := adminServer(ctx, d, sqa(ctx, d, cmd)) + if err != nil { + return err + } + return srv() } } -func RunServePublic(slOpts []servicelocatorx.Option, dOpts []driver.OptionsModifier, cOpts []configx.OptionModifier) func(cmd *cobra.Command, args []string) error { +func RunServePublic(dOpts []driver.OptionsModifier) func(cmd *cobra.Command, args []string) error { return func(cmd *cobra.Command, args []string) error { + fmt.Println(banner(config.Version)) + ctx := cmd.Context() - sl := servicelocatorx.NewOptions(slOpts...) - d, err := driver.New(cmd.Context(), sl, append(dOpts, driver.WithOptions(configx.WithFlags(cmd.Flags())))) + d, err := driver.New(ctx, append(dOpts, driver.WithConfigOptions(configx.WithFlags(cmd.Flags())))...) if err != nil { return err } - isDSNAllowed(ctx, d) - - _, public, _, publicmw := setup(ctx, d, cmd) - d.PrometheusManager().RegisterRouter(public.Router) - - var wg sync.WaitGroup - wg.Add(1) - - go serve( - ctx, - d, - cmd, - &wg, - config.PublicInterface, - EnhanceMiddleware(ctx, sl, d, publicmw, d.Config().ListenOn(config.PublicInterface), public.Router, false, config.PublicInterface), - d.Config().ListenOn(config.PublicInterface), - d.Config().SocketPermission(config.PublicInterface), - ) + ensureNoMemoryDSN(d) - wg.Wait() - return nil + srv, err := publicServer(ctx, d, sqa(ctx, d, cmd)) + if err != nil { + return err + } + return srv() } } -func RunServeAll(slOpts []servicelocatorx.Option, dOpts []driver.OptionsModifier, cOpts []configx.OptionModifier) func(cmd *cobra.Command, args []string) error { +func RunServeAll(dOpts []driver.OptionsModifier) func(cmd *cobra.Command, args []string) error { return func(cmd *cobra.Command, args []string) error { + fmt.Println(banner(config.Version)) + ctx := cmd.Context() - sl := servicelocatorx.NewOptions(slOpts...) - d, err := driver.New(cmd.Context(), sl, append(dOpts, driver.WithOptions(configx.WithFlags(cmd.Flags())))) + d, err := driver.New(ctx, append(dOpts, driver.WithConfigOptions(configx.WithFlags(cmd.Flags())))...) if err != nil { return err } - admin, public, adminmw, publicmw := setup(ctx, d, cmd) + eg, ctx := errgroup.WithContext(ctx) + ms := sqa(ctx, d, cmd) - d.PrometheusManager().RegisterRouter(admin.Router) - d.PrometheusManager().RegisterRouter(public.Router) + srvAdmin, err := adminServer(ctx, d, ms) + if err != nil { + return err + } + srvPublic, err := publicServer(ctx, d, ms) + if err != nil { + return err + } - var wg sync.WaitGroup - wg.Add(2) + eg.Go(srvAdmin) + eg.Go(srvPublic) + return eg.Wait() + } +} - go serve( - ctx, - d, - cmd, - &wg, - config.PublicInterface, - EnhanceMiddleware(ctx, sl, d, publicmw, d.Config().ListenOn(config.PublicInterface), public.Router, false, config.PublicInterface), - d.Config().ListenOn(config.PublicInterface), - d.Config().SocketPermission(config.PublicInterface), - ) +var prometheusManager = prometheusx.NewMetricsManagerWithPrefix("hydra", prometheusx.HTTPMetrics, config.Version, config.Commit, config.Date) - go serve( - ctx, - d, - cmd, - &wg, - config.AdminInterface, - EnhanceMiddleware(ctx, sl, d, adminmw, d.Config().ListenOn(config.AdminInterface), admin.Router, true, config.AdminInterface), - d.Config().ListenOn(config.AdminInterface), - d.Config().SocketPermission(config.AdminInterface), - ) +func adminServer(ctx context.Context, d *driver.RegistrySQL, sqaMetrics *metricsx.Service) (func() error, error) { + cfg := d.Config().ServeAdmin(contextx.RootContext) - wg.Wait() - return nil - } -} + n := negroni.New() -func setup(ctx context.Context, d driver.Registry, cmd *cobra.Command) (admin *httprouterx.RouterAdmin, public *httprouterx.RouterPublic, adminmw, publicmw *negroni.Negroni) { - fmt.Println(banner(config.Version)) + logger := reqlog. + NewMiddlewareFromLogger(d.Logger(), + fmt.Sprintf("hydra/admin: %s", d.Config().IssuerURL(ctx).String())) + if cfg.RequestLog.DisableHealth { + logger.ExcludePaths(healthx.AliveCheckPath, healthx.ReadyCheckPath, "/admin"+prometheusx.MetricsPrometheusPath) + } - if d.Config().CGroupsV1AutoMaxProcsEnabled() { - _, err := maxprocs.Set(maxprocs.Logger(d.Logger().Infof)) + n.UseFunc(httprouterx.TrimTrailingSlashNegroni) + n.UseFunc(httprouterx.NoCacheNegroni) + n.UseFunc(httprouterx.AddAdminPrefixIfNotPresentNegroni) + n.UseFunc(semconv.Middleware) + n.Use(logger) + if cfg.TLS.Enabled && !networkx.AddressIsUnixSocket(cfg.Host) { + mw, err := tlsx.EnforceTLSRequests(d, cfg.TLS.AllowTerminationFrom) if err != nil { - d.Logger().WithError(err).Fatal("Couldn't set GOMAXPROCS") + return nil, err } + n.Use(mw) } - adminmw = negroni.New() - publicmw = negroni.New() + for _, mw := range d.HTTPMiddlewares() { + n.Use(mw) + } + n.UseFunc(func(w http.ResponseWriter, r *http.Request, next http.HandlerFunc) { + cfg, enabled := d.Config().CORSAdmin(r.Context()) + if !enabled { + next(w, r) + return + } + cors.New(cfg).ServeHTTP(w, r, next) + }) + n.Use(sqaMetrics) + + router := httprouterx.NewRouterAdminWithPrefix(prometheusManager) + d.RegisterAdminRoutes(router) - admin = x.NewRouterAdmin(d.Config().AdminURL) - public = x.NewRouterPublic() + n.UseHandler(router) - adminLogger := reqlog. - NewMiddlewareFromLogger(d.Logger(), - fmt.Sprintf("hydra/admin: %s", d.Config().IssuerURL(ctx).String())) - if d.Config().DisableHealthAccessLog(config.AdminInterface) { - adminLogger = adminLogger.ExcludePaths("/admin"+healthx.AliveCheckPath, "/admin"+healthx.ReadyCheckPath) - } + n.UseFunc(otelx.SpanNameRecorderNegroniFunc) + return func() error { + return serve(ctx, d, cfg, n, "admin") + }, nil +} + +func publicServer(ctx context.Context, d *driver.RegistrySQL, sqaMetrics *metricsx.Service) (func() error, error) { + cfg := d.Config().ServePublic(contextx.RootContext) - adminmw.Use(adminLogger) - adminmw.Use(d.PrometheusManager()) + n := negroni.New() - publicLogger := reqlog.NewMiddlewareFromLogger( + logger := reqlog.NewMiddlewareFromLogger( d.Logger(), fmt.Sprintf("hydra/public: %s", d.Config().IssuerURL(ctx).String()), ) - if d.Config().DisableHealthAccessLog(config.PublicInterface) { - publicLogger.ExcludePaths(healthx.AliveCheckPath, healthx.ReadyCheckPath) + if cfg.RequestLog.DisableHealth { + logger.ExcludePaths(healthx.AliveCheckPath, healthx.ReadyCheckPath) } - publicmw.Use(publicLogger) - publicmw.Use(d.PrometheusManager()) + n.UseFunc(httprouterx.TrimTrailingSlashNegroni) + n.UseFunc(httprouterx.NoCacheNegroni) + n.UseFunc(semconv.Middleware) + n.Use(logger) + if cfg.TLS.Enabled && !networkx.AddressIsUnixSocket(cfg.Host) { + mw, err := tlsx.EnforceTLSRequests(d, cfg.TLS.AllowTerminationFrom) + if err != nil { + return nil, err + } + n.Use(mw) + } - metrics := metricsx.New( + for _, mw := range d.HTTPMiddlewares() { + n.Use(mw) + } + n.UseFunc(func(w http.ResponseWriter, r *http.Request, next http.HandlerFunc) { + cfg, enabled := d.Config().CORSPublic(r.Context()) + if !enabled { + next(w, r) + return + } + cors.New(cfg).ServeHTTP(w, r, next) + }) + n.Use(sqaMetrics) + + router := x.NewRouterPublic(prometheusManager) + d.RegisterPublicRoutes(ctx, router) + + n.UseHandler(router) + n.UseFunc(otelx.SpanNameRecorderNegroniFunc) + return func() error { + return serve(ctx, d, cfg, n, "public") + }, nil +} + +func sqa(ctx context.Context, d *driver.RegistrySQL, cmd *cobra.Command) *metricsx.Service { + urls := []string{ + d.Config().IssuerURL(ctx).Host, + d.Config().PublicURL(ctx).Host, + d.Config().AdminURL(ctx).Host, + d.Config().ServePublic(ctx).BaseURL.Host, + d.Config().ServeAdmin(ctx).BaseURL.Host, + d.Config().LoginURL(ctx).Host, + d.Config().LogoutURL(ctx).Host, + d.Config().ConsentURL(ctx).Host, + d.Config().RegistrationURL(ctx).Host, + } + if c, y := d.Config().CORSPublic(ctx); y { + urls = append(urls, c.AllowedOrigins...) + } + if c, y := d.Config().CORSAdmin(ctx); y { + urls = append(urls, c.AllowedOrigins...) + } + host := urlx.ExtractPublicAddress(urls...) + + return metricsx.New( cmd, d.Logger(), d.Config().Source(ctx), &metricsx.Options{ - Service: "ory-hydra", - ClusterID: metricsx.Hash(fmt.Sprintf("%s|%s", - d.Config().IssuerURL(ctx).String(), - d.Config().DSN(), - )), + Service: "hydra", + DeploymentId: metricsx.Hash(d.Persister().NetworkID(ctx).String()), IsDevelopment: d.Config().DSN() == "memory" || d.Config().IssuerURL(ctx).String() == "" || strings.Contains(d.Config().IssuerURL(ctx).String(), "localhost"), @@ -236,7 +251,7 @@ func setup(ctx context.Context, d driver.Registry, cmd *cobra.Command) (admin *h "/admin" + jwk.KeyHandlerPath, jwk.WellKnownKeysPath, - "/admin" + client.ClientsHandlerPath, + urlx.MustJoin("/admin", client.ClientsHandlerPath), client.DynClientsHandlerPath, oauth2.DefaultConsentPath, @@ -272,8 +287,8 @@ func setup(ctx context.Context, d driver.Registry, cmd *cobra.Command) (admin *h "/admin" + healthx.ReadyCheckPath, healthx.VersionPath, "/admin" + healthx.VersionPath, - prometheus.MetricsPrometheusPath, - "/admin" + prometheus.MetricsPrometheusPath, + prometheusx.MetricsPrometheusPath, + "/admin" + prometheusx.MetricsPrometheusPath, "/", }, BuildVersion: config.Version, @@ -283,57 +298,48 @@ func setup(ctx context.Context, d driver.Registry, cmd *cobra.Command) (admin *h Endpoint: "https://sqa.ory.sh", GzipCompressionLevel: 6, BatchMaxSize: 500 * 1000, - BatchSize: 250, - Interval: time.Hour * 24, + BatchSize: 1000, + Interval: time.Hour * 6, }, + Hostname: host, }, ) - - adminmw.Use(metrics) - publicmw.Use(metrics) - - d.RegisterRoutes(ctx, admin, public) - - return } func serve( ctx context.Context, - d driver.Registry, - cmd *cobra.Command, - wg *sync.WaitGroup, - iface config.ServeInterface, + d *driver.RegistrySQL, + cfg *configx.Serve, handler http.Handler, - address string, - permission *configx.UnixPermission, -) { - defer wg.Done() - - if tracer := d.Tracer(cmd.Context()); tracer.IsLoaded() { - handler = otelx.TraceHandler(handler) + ifaceName string, +) error { + if tracer := d.Tracer(ctx); tracer.IsLoaded() { + handler = otelx.NewMiddleware(handler, ifaceName, + otelhttp.WithTracerProvider(tracer.Provider()), + ) } var tlsConfig *tls.Config - stopReload := make(chan struct{}) - if tc := d.Config().TLS(ctx, iface); tc.Enabled() { + if cfg.TLS.Enabled { // #nosec G402 - This is a false positive because we use graceful.WithDefaults which sets the correct TLS settings. - tlsConfig = &tls.Config{GetCertificate: GetOrCreateTLSCertificate(ctx, d, iface, stopReload)} + tlsConfig = &tls.Config{GetCertificate: GetOrCreateTLSCertificate(ctx, d, cfg.TLS, ifaceName)} } - var srv = graceful.WithDefaults(&http.Server{ + srv := graceful.WithDefaults(&http.Server{ Handler: handler, TLSConfig: tlsConfig, ReadHeaderTimeout: time.Second * 5, }) - if err := graceful.Graceful(func() error { - d.Logger().Infof("Setting up http server on %s", address) - listener, err := networkx.MakeListener(address, permission) + addr := configx.GetAddress(cfg.Host, cfg.Port) + return graceful.Graceful(func() error { + d.Logger().Infof("Setting up http server on %s", addr) + listener, err := networkx.MakeListener(addr, &cfg.Socket) if err != nil { return err } - if networkx.AddressIsUnixSocket(address) { + if networkx.AddressIsUnixSocket(addr) { return srv.Serve(listener) } @@ -341,15 +347,8 @@ func serve( return srv.ServeTLS(listener, "", "") } - if iface == config.PublicInterface { - d.Logger().Warnln("HTTPS is disabled. Please ensure that your proxy is configured to provide HTTPS, and that it redirects HTTP to HTTPS.") - } + d.Logger().Warnln("HTTPS is disabled. Please ensure that your proxy is configured to provide HTTPS, and that it redirects HTTP to HTTPS.") return srv.Serve(listener) - }, func(ctx context.Context) error { - close(stopReload) - return srv.Shutdown(ctx) - }); err != nil { - d.Logger().WithError(err).Fatal("Could not gracefully run server") - } + }, srv.Shutdown) } diff --git a/cmd/server/helper_cert.go b/cmd/server/helper_cert.go index e85f27ea296..f5cdd497ae1 100644 --- a/cmd/server/helper_cert.go +++ b/cmd/server/helper_cert.go @@ -12,18 +12,12 @@ import ( "encoding/pem" "sync" - "github.com/gofrs/uuid" - - "gopkg.in/square/go-jose.v2" - - "github.com/ory/hydra/driver" - "github.com/ory/hydra/driver/config" - - "github.com/pkg/errors" + "github.com/go-jose/go-jose/v3" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/x/configx" "github.com/ory/x/tlsx" - - "github.com/ory/hydra/jwk" ) const ( @@ -44,21 +38,22 @@ var lock sync.Mutex // GetOrCreateTLSCertificate returns a function for use with // "net/tls".Config.GetCertificate. If the certificate and key are read from // disk, they will be automatically reloaded until stopReload is close()'d. -func GetOrCreateTLSCertificate(ctx context.Context, d driver.Registry, iface config.ServeInterface, stopReload <-chan struct{}) func(*tls.ClientHelloInfo) (*tls.Certificate, error) { +func GetOrCreateTLSCertificate(ctx context.Context, d *driver.RegistrySQL, tlsConfig configx.TLS, ifaceName string) func(*tls.ClientHelloInfo) (*tls.Certificate, error) { lock.Lock() defer lock.Unlock() // check if certificates are configured - certFunc, err := d.Config().TLS(ctx, iface).GetCertificateFunc(stopReload, d.Logger()) - if err == nil { - return certFunc - } else if !errors.Is(err, tlsx.ErrNoCertificatesConfigured) { + if certFunc, err := tlsConfig.GetCertFunc(ctx, d.Logger(), ifaceName); err != nil { d.Logger().WithError(err).Fatal("Unable to load HTTPS TLS Certificate") return nil // in case Fatal is hooked + } else if certFunc != nil { + return certFunc } + d.Logger().Infof("No certificate found for %s, generating a self-signed certificate.", ifaceName) + // no certificates configured: self-sign a new cert - priv, err := jwk.GetOrGenerateKeys(ctx, d, d.SoftwareKeyManager(), TlsKeyName, uuid.Must(uuid.NewV4()).String(), "RS256") + priv, err := jwk.GetOrGenerateKeys(ctx, d, TlsKeyName, "RS256") if err != nil { d.Logger().WithError(err).Fatal("Unable to fetch or generate HTTPS TLS key pair") return nil // in case Fatal is hooked @@ -72,12 +67,12 @@ func GetOrCreateTLSCertificate(ctx context.Context, d driver.Registry, iface con } AttachCertificate(priv, cert) - if err := d.SoftwareKeyManager().DeleteKey(ctx, TlsKeyName, priv.KeyID); err != nil { + if err := d.KeyManager().DeleteKey(ctx, TlsKeyName, priv.KeyID); err != nil { d.Logger().WithError(err).Fatal(`Could not update (delete) the self signed TLS certificate`) return nil // in case Fatal is hooked } - if err := d.SoftwareKeyManager().AddKey(ctx, TlsKeyName, priv); err != nil { + if err := d.KeyManager().AddKey(ctx, TlsKeyName, priv); err != nil { d.Logger().WithError(err).Fatalf(`Could not update (add) the self signed TLS certificate: %s %x %d`, cert.SignatureAlgorithm, cert.Signature, len(cert.Signature)) return nil // in case Fatalf is hooked } diff --git a/cmd/server/helper_cert_test.go b/cmd/server/helper_cert_test.go index c9e2a24fd0d..844cc0ec34d 100644 --- a/cmd/server/helper_cert_test.go +++ b/cmd/server/helper_cert_test.go @@ -5,7 +5,6 @@ package server_test import ( "bytes" - "context" "crypto/x509" "encoding/base64" "encoding/json" @@ -13,40 +12,38 @@ import ( "testing" "time" - "github.com/google/uuid" + "github.com/go-jose/go-jose/v3" + "github.com/gofrs/uuid" "github.com/sirupsen/logrus/hooks/test" "github.com/stretchr/testify/require" - "gopkg.in/square/go-jose.v2" "github.com/ory/x/configx" "github.com/ory/x/logrusx" + "github.com/ory/x/servicelocatorx" "github.com/ory/x/tlsx" - "github.com/ory/hydra/cmd/server" - "github.com/ory/hydra/driver" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal/testhelpers" - "github.com/ory/hydra/jwk" + "github.com/ory/hydra/v2/cmd/server" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/jwk" ) func TestGetOrCreateTLSCertificate(t *testing.T) { certPath, keyPath, cert, priv := testhelpers.GenerateTLSCertificateFilesForTests(t) logger := logrusx.New("", "") logger.Logger.ExitFunc = func(code int) { t.Fatalf("Logger called os.Exit(%v)", code) } - hook := test.NewLocal(logger.Logger) - cfg := config.MustNew( - context.Background(), - logger, - configx.WithValues(map[string]interface{}{ + d, err := driver.New(t.Context(), + driver.WithConfigOptions(configx.WithValues(map[string]interface{}{ "dsn": config.DSNMemory, "serve.tls.enabled": true, "serve.tls.cert.path": certPath, "serve.tls.key.path": keyPath, - }), + })), + driver.WithServiceLocatorOptions(servicelocatorx.WithLogger(logger)), ) - d, err := driver.NewRegistryWithoutInit(cfg, logger) require.NoError(t, err) - getCert := server.GetOrCreateTLSCertificate(context.Background(), d, config.AdminInterface, nil) + getCert := server.GetOrCreateTLSCertificate(t.Context(), d, d.Config().ServeAdmin(t.Context()).TLS, "admin") require.NotNil(t, getCert) tlsCert, err := getCert(nil) require.NoError(t, err) @@ -65,6 +62,8 @@ func TestGetOrCreateTLSCertificate(t *testing.T) { require.NotEqual(t, certPath, newCertPath) require.NotEqual(t, keyPath, newKeyPath) + hook := test.NewLocal(logger.Logger) + // move them into place require.NoError(t, os.Rename(newKeyPath, keyPath)) require.NoError(t, os.Rename(newCertPath, certPath)) @@ -96,7 +95,7 @@ func TestGetOrCreateTLSCertificate(t *testing.T) { default: } } - require.Contains(t, hook.LastEntry().Message, "Failed to reload TLS certificates. Using the previously loaded certificates.") + require.Contains(t, hook.LastEntry().Message, "Failed to reload TLS certificates, using previous certificates") } func TestGetOrCreateTLSCertificateBase64(t *testing.T) { @@ -108,23 +107,14 @@ func TestGetOrCreateTLSCertificateBase64(t *testing.T) { require.NoError(t, err) keyBase64 := base64.StdEncoding.EncodeToString(keyPEM) - logger := logrusx.New("", "") - logger.Logger.ExitFunc = func(code int) { t.Fatalf("Logger called os.Exit(%v)", code) } - hook := test.NewLocal(logger.Logger) - _ = hook - cfg := config.MustNew( - context.Background(), - logger, - configx.WithValues(map[string]interface{}{ - "dsn": config.DSNMemory, - "serve.tls.enabled": true, - "serve.tls.cert.base64": certBase64, - "serve.tls.key.base64": keyBase64, - }), - ) - d, err := driver.NewRegistryWithoutInit(cfg, logger) + d, err := driver.New(t.Context(), driver.WithConfigOptions(configx.WithValues(map[string]interface{}{ + "dsn": config.DSNMemory, + "serve.tls.enabled": true, + "serve.tls.cert.base64": certBase64, + "serve.tls.key.base64": keyBase64, + }))) require.NoError(t, err) - getCert := server.GetOrCreateTLSCertificate(context.Background(), d, config.AdminInterface, nil) + getCert := server.GetOrCreateTLSCertificate(t.Context(), d, d.Config().ServeAdmin(t.Context()).TLS, "admin") require.NotNil(t, getCert) tlsCert, err := getCert(nil) require.NoError(t, err) @@ -138,7 +128,7 @@ func TestGetOrCreateTLSCertificateBase64(t *testing.T) { } func TestCreateSelfSignedCertificate(t *testing.T) { - keys, err := jwk.GenerateJWK(context.Background(), jose.RS256, uuid.New().String(), "sig") + keys, err := jwk.GenerateJWK(jose.RS256, uuid.Must(uuid.NewV4()).String(), "sig") require.NoError(t, err) private := keys.Keys[0] diff --git a/cmd/version.go b/cmd/version.go index 0caf3a315dd..88c0da2ee0b 100644 --- a/cmd/version.go +++ b/cmd/version.go @@ -6,7 +6,7 @@ package cmd import ( "fmt" - "github.com/ory/hydra/driver/config" + "github.com/ory/hydra/v2/driver/config" "github.com/spf13/cobra" ) diff --git a/consent/csrf.go b/consent/csrf.go new file mode 100644 index 00000000000..168a35e1b89 --- /dev/null +++ b/consent/csrf.go @@ -0,0 +1,71 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package consent + +import ( + "context" + "net/http" + "strings" + "time" + + "github.com/gorilla/sessions" + "github.com/pkg/errors" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/mapx" +) + +func setCSRFCookie(ctx context.Context, w http.ResponseWriter, r *http.Request, conf x.CookieConfigProvider, store sessions.Store, name, csrfValue string, maxAge time.Duration) error { + // Errors can be ignored here, because we always get a session back. Error typically means that the + // session doesn't exist yet. + session, _ := store.Get(r, name) + + sameSite := conf.CookieSameSiteMode(ctx) + if isLegacyCSRFCookieName(name) { + sameSite = 0 + } + + session.Values["csrf"] = csrfValue + session.Options.HttpOnly = true + session.Options.Secure = conf.CookieSecure(ctx) + session.Options.SameSite = sameSite + session.Options.Domain = conf.CookieDomain(ctx) + session.Options.MaxAge = int(maxAge.Seconds()) + if err := session.Save(r, w); err != nil { + return errors.WithStack(err) + } + + if sameSite == http.SameSiteNoneMode && conf.CookieSameSiteLegacyWorkaround(ctx) { + return setCSRFCookie(ctx, w, r, conf, store, legacyCSRFCookieName(name), csrfValue, maxAge) + } + + return nil +} + +func validateCSRFCookie(ctx context.Context, r *http.Request, conf x.CookieConfigProvider, store sessions.Store, name, expectedCSRF string) error { + if cookie, err := getCSRFCookie(ctx, r, store, conf, name); err != nil { + return errors.WithStack(fosite.ErrRequestForbidden.WithHint("CSRF session cookie could not be decoded.")) + } else if csrf, err := mapx.GetString(cookie.Values, "csrf"); err != nil { + return errors.WithStack(fosite.ErrRequestForbidden.WithHint("No CSRF value available in the session cookie.")) + } else if csrf != expectedCSRF { + return errors.WithStack(fosite.ErrRequestForbidden.WithHint("The CSRF value from the token does not match the CSRF value from the data store.")) + } + + return nil +} + +func getCSRFCookie(ctx context.Context, r *http.Request, store sessions.Store, conf x.CookieConfigProvider, name string) (*sessions.Session, error) { + cookie, err := store.Get(r, name) + if !isLegacyCSRFCookieName(name) && + conf.CookieSameSiteMode(ctx) == http.SameSiteNoneMode && + conf.CookieSameSiteLegacyWorkaround(ctx) && + (err != nil || len(cookie.Values) == 0) { + return store.Get(r, legacyCSRFCookieName(name)) + } + return cookie, err +} + +func legacyCSRFCookieName(name string) string { return name + "_legacy" } +func isLegacyCSRFCookieName(name string) bool { return strings.HasSuffix(name, "_legacy") } diff --git a/consent/handler.go b/consent/handler.go index 99078be690d..ee17592fe54 100644 --- a/consent/handler.go +++ b/consent/handler.go @@ -4,47 +4,41 @@ package consent import ( + "cmp" "encoding/json" "net/http" "net/url" "time" - "github.com/ory/x/pagination/tokenpagination" - - "github.com/ory/x/httprouterx" - - "github.com/julienschmidt/httprouter" "github.com/pkg/errors" - "github.com/ory/fosite" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/x" - "github.com/ory/x/errorsx" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/flow" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/x" + "github.com/ory/hydra/v2/x/events" + "github.com/ory/x/httprouterx" + "github.com/ory/x/otelx" + keysetpagination "github.com/ory/x/pagination/keysetpagination_v2" + "github.com/ory/x/pagination/tokenpagination" "github.com/ory/x/sqlxx" - "github.com/ory/x/stringsx" "github.com/ory/x/urlx" ) type Handler struct { r InternalRegistry - c *config.DefaultProvider } const ( LoginPath = "/oauth2/auth/requests/login" + DevicePath = "/oauth2/auth/requests/device" ConsentPath = "/oauth2/auth/requests/consent" LogoutPath = "/oauth2/auth/requests/logout" SessionsPath = "/oauth2/auth/sessions" ) -func NewHandler( - r InternalRegistry, - c *config.DefaultProvider, -) *Handler { - return &Handler{ - c: c, - r: r, - } +func NewHandler(r InternalRegistry) *Handler { + return &Handler{r: r} } func (h *Handler) SetRoutes(admin *httprouterx.RouterAdmin) { @@ -63,18 +57,19 @@ func (h *Handler) SetRoutes(admin *httprouterx.RouterAdmin) { admin.GET(LogoutPath, h.getOAuth2LogoutRequest) admin.PUT(LogoutPath+"/accept", h.acceptOAuth2LogoutRequest) admin.PUT(LogoutPath+"/reject", h.rejectOAuth2LogoutRequest) + + admin.PUT(DevicePath+"/accept", h.acceptUserCodeRequest) } // Revoke OAuth 2.0 Consent Session Parameters // // swagger:parameters revokeOAuth2ConsentSessions -type revokeOAuth2ConsentSessions struct { +type _ struct { // OAuth 2.0 Consent Subject // // The subject whose consent sessions should be deleted. // // in: query - // required: true Subject string `json:"subject"` // OAuth 2.0 Client ID @@ -84,6 +79,13 @@ type revokeOAuth2ConsentSessions struct { // in: query Client string `json:"client"` + // Consent Request ID + // + // If set, revoke all token chains derived from this particular consent request ID. + // + // in: query + ConsentRequestID string `json:"consent_request_id"` + // Revoke All Consent Sessions // // If set to `true` deletes all consent sessions by the Subject that have been granted. @@ -110,28 +112,38 @@ type revokeOAuth2ConsentSessions struct { // Responses: // 204: emptyResponse // default: errorOAuth2 -func (h *Handler) revokeOAuth2ConsentSessions(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - subject := r.URL.Query().Get("subject") - client := r.URL.Query().Get("client") - allClients := r.URL.Query().Get("all") == "true" - if subject == "" { - h.r.Writer().WriteError(w, r, errorsx.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter 'subject' is not defined but should have been.`))) - return - } +func (h *Handler) revokeOAuth2ConsentSessions(w http.ResponseWriter, r *http.Request) { + var ( + subject = r.URL.Query().Get("subject") + clientID = r.URL.Query().Get("client") + consentRequestID = r.URL.Query().Get("consent_request_id") + allClients = r.URL.Query().Get("all") == "true" + ) switch { - case len(client) > 0: - if err := h.r.ConsentManager().RevokeSubjectClientConsentSession(r.Context(), subject, client); err != nil && !errors.Is(err, x.ErrNotFound) { + case consentRequestID != "" && subject == "" && clientID == "": + if err := h.r.ConsentManager().RevokeConsentSessionByID(r.Context(), consentRequestID); err != nil && !errors.Is(err, x.ErrNotFound) { + h.r.Writer().WriteError(w, r, err) + return + } + events.Trace(r.Context(), events.ConsentRevoked, events.WithConsentRequestID(consentRequestID)) + + case consentRequestID == "" && subject != "" && clientID != "" && !allClients: + if err := h.r.ConsentManager().RevokeSubjectClientConsentSession(r.Context(), subject, clientID); err != nil && !errors.Is(err, x.ErrNotFound) { h.r.Writer().WriteError(w, r, err) return } - case allClients: + events.Trace(r.Context(), events.ConsentRevoked, events.WithSubject(subject), events.WithClientID(clientID)) + + case consentRequestID == "" && subject != "" && clientID == "" && allClients: if err := h.r.ConsentManager().RevokeSubjectConsentSession(r.Context(), subject); err != nil && !errors.Is(err, x.ErrNotFound) { h.r.Writer().WriteError(w, r, err) return } + events.Trace(r.Context(), events.ConsentRevoked, events.WithSubject(subject)) + default: - h.r.Writer().WriteError(w, r, errorsx.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter both 'client' and 'all' is not defined but one of them should have been.`))) + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHint("Invalid combination of query parameters."))) return } @@ -141,7 +153,7 @@ func (h *Handler) revokeOAuth2ConsentSessions(w http.ResponseWriter, r *http.Req // List OAuth 2.0 Consent Session Parameters // // swagger:parameters listOAuth2ConsentSessions -type listOAuth2ConsentSessions struct { +type _ struct { tokenpagination.RequestParameters // The subject to list the consent sessions for. @@ -149,6 +161,7 @@ type listOAuth2ConsentSessions struct { // in: query // required: true Subject string `json:"subject"` + // The login session id to list the consent sessions for. // // in: query @@ -175,71 +188,78 @@ type listOAuth2ConsentSessions struct { // Responses: // 200: oAuth2ConsentSessions // default: errorOAuth2 -func (h *Handler) listOAuth2ConsentSessions(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { +func (h *Handler) listOAuth2ConsentSessions(w http.ResponseWriter, r *http.Request) { subject := r.URL.Query().Get("subject") if subject == "" { - h.r.Writer().WriteError(w, r, errorsx.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter 'subject' is not defined but should have been.`))) + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter 'subject' is not defined but should have been.`))) return } - loginSessionId := r.URL.Query().Get("login_session_id") - page, itemsPerPage := x.ParsePagination(r) + pageKeys := h.r.Config().GetPaginationEncryptionKeys(r.Context()) + pageOpts, err := keysetpagination.ParseQueryParams(pageKeys, r.URL.Query()) + if err != nil { + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithWrap(err).WithHintf("Unable to parse pagination parameters: %s", err))) + return + } - var s []AcceptOAuth2ConsentRequest - var err error - if len(loginSessionId) == 0 { - s, err = h.r.ConsentManager().FindSubjectsGrantedConsentRequests(r.Context(), subject, itemsPerPage, itemsPerPage*page) + var requests []flow.Flow + var nextPage *keysetpagination.Paginator + if loginSessionID := r.URL.Query().Get("login_session_id"); len(loginSessionID) == 0 { + requests, nextPage, err = h.r.ConsentManager().FindSubjectsGrantedConsentRequests(r.Context(), subject, pageOpts...) } else { - s, err = h.r.ConsentManager().FindSubjectsSessionGrantedConsentRequests(r.Context(), subject, loginSessionId, itemsPerPage, itemsPerPage*page) + requests, nextPage, err = h.r.ConsentManager().FindSubjectsSessionGrantedConsentRequests(r.Context(), subject, loginSessionID, pageOpts...) } if errors.Is(err, ErrNoPreviousConsentFound) { - h.r.Writer().Write(w, r, []OAuth2ConsentSession{}) + h.r.Writer().Write(w, r, []flow.OAuth2ConsentSession{}) return } else if err != nil { h.r.Writer().WriteError(w, r, err) return } - var a []OAuth2ConsentSession - for _, session := range s { - session.ConsentRequest.Client = sanitizeClient(session.ConsentRequest.Client) - a = append(a, OAuth2ConsentSession(session)) - } - - if len(a) == 0 { - a = []OAuth2ConsentSession{} + // For legacy reasons, this API returns the format like below. Internally, we keep a different format. + sessions := make([]*flow.OAuth2ConsentSession, len(requests)) + for i, f := range requests { + sessions[i] = f.ToListConsentSessionResponse() } - n, err := h.r.ConsentManager().CountSubjectsGrantedConsentRequests(r.Context(), subject) - if err != nil { - h.r.Writer().WriteError(w, r, err) - return - } - - x.PaginationHeader(w, r.URL, int64(n), itemsPerPage, itemsPerPage*page) - h.r.Writer().Write(w, r, a) + keysetpagination.SetLinkHeader(w, pageKeys, r.URL, nextPage) + h.r.Writer().Write(w, r, sessions) } // Revoke OAuth 2.0 Consent Login Sessions Parameters // // swagger:parameters revokeOAuth2LoginSessions -type revokeOAuth2LoginSessions struct { +type _ struct { // OAuth 2.0 Subject // // The subject to revoke authentication sessions for. // // in: query - // required: true Subject string `json:"subject"` + + // Login Session ID + // + // The login session to revoke. + // + // in: query + SessionID string `json:"sid"` } // swagger:route DELETE /admin/oauth2/auth/sessions/login oAuth2 revokeOAuth2LoginSessions // -// # Revokes All OAuth 2.0 Login Sessions of a Subject +// # Revokes OAuth 2.0 Login Sessions by either a Subject or a SessionID +// +// This endpoint invalidates authentication sessions. After revoking the authentication session(s), the subject +// has to re-authenticate at the Ory OAuth2 Provider. This endpoint does not invalidate any tokens. +// +// If you send the subject in a query param, all authentication sessions that belong to that subject are revoked. +// No OpenID Connect Front- or Back-channel logout is performed in this case. // -// This endpoint invalidates a subject's authentication session. After revoking the authentication session, the subject -// has to re-authenticate at the Ory OAuth2 Provider. This endpoint does not invalidate any tokens and -// does not work with OpenID Connect Front- or Back-channel logout. +// Alternatively, you can send a SessionID via `sid` query param, in which case, only the session that is connected +// to that SessionID is revoked. OpenID Connect Back-channel logout is performed in this case. +// +// When using Ory for the identity provider, the login provider will also invalidate the session cookie. // // Consumes: // - application/json @@ -252,14 +272,26 @@ type revokeOAuth2LoginSessions struct { // Responses: // 204: emptyResponse // default: errorOAuth2 -func (h *Handler) revokeOAuth2LoginSessions(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { +func (h *Handler) revokeOAuth2LoginSessions(w http.ResponseWriter, r *http.Request) { + sid := r.URL.Query().Get("sid") subject := r.URL.Query().Get("subject") - if subject == "" { - h.r.Writer().WriteError(w, r, errorsx.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter 'subject' is not defined but should have been.`))) + + if sid == "" && subject == "" { + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHint(`Either 'subject' or 'sid' query parameters need to be defined.`))) return } - if err := h.r.ConsentManager().RevokeSubjectLoginSession(r.Context(), subject); err != nil { + if sid != "" { + if err := h.r.ConsentStrategy().HandleHeadlessLogout(r.Context(), w, r, sid); err != nil { + h.r.Writer().WriteError(w, r, err) + return + } + + w.WriteHeader(http.StatusNoContent) + return + } + + if err := h.r.LoginManager().RevokeSubjectLoginSession(r.Context(), subject); err != nil { h.r.Writer().WriteError(w, r, err) return } @@ -270,7 +302,7 @@ func (h *Handler) revokeOAuth2LoginSessions(w http.ResponseWriter, r *http.Reque // Get OAuth 2.0 Login Request // // swagger:parameters getOAuth2LoginRequest -type getOAuth2LoginRequest struct { +type _ struct { // OAuth 2.0 Login Request Challenge // // in: query @@ -304,37 +336,45 @@ type getOAuth2LoginRequest struct { // 200: oAuth2LoginRequest // 410: oAuth2RedirectTo // default: errorOAuth2 -func (h *Handler) getOAuth2LoginRequest(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - challenge := stringsx.Coalesce( +func (h *Handler) getOAuth2LoginRequest(w http.ResponseWriter, r *http.Request) { + var err error + ctx, span := h.r.Tracer(r.Context()).Tracer().Start(r.Context(), "consent.getOAuth2LoginRequest") + defer otelx.End(span, &err) + + challenge := cmp.Or( r.URL.Query().Get("login_challenge"), r.URL.Query().Get("challenge"), ) if challenge == "" { - h.r.Writer().WriteError(w, r, errorsx.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter 'challenge' is not defined but should have been.`))) + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter 'challenge' is not defined but should have been.`))) return } - request, err := h.r.ConsentManager().GetLoginRequest(r.Context(), challenge) + f, err := flow.DecodeFromLoginChallenge(ctx, h.r, challenge) if err != nil { h.r.Writer().WriteError(w, r, err) return } - if request.WasHandled { - h.r.Writer().WriteCode(w, r, http.StatusGone, &OAuth2RedirectTo{ - RedirectTo: request.RequestURL, + + if f.State.LoginWasUsed() { + h.r.Writer().WriteCode(w, r, http.StatusGone, &flow.OAuth2RedirectTo{ + RedirectTo: f.RequestURL, }) return } - request.Client = sanitizeClient(request.Client) - h.r.Writer().Write(w, r, request) + // Keep compatibility with the old / existing login request format. + lr := f.GetLoginRequest() + lr.ID = challenge // The ID of the login request is the AEAD challenge. + lr.Client.Secret = "" + h.r.Writer().Write(w, r, lr) } // Accept OAuth 2.0 Login Request // // swagger:parameters acceptOAuth2LoginRequest -type acceptOAuth2LoginRequest struct { +type _ struct { // OAuth 2.0 Login Request Challenge // // in: query @@ -342,7 +382,7 @@ type acceptOAuth2LoginRequest struct { Challenge string `json:"login_challenge"` // in: body - Body HandledLoginRequest + Body flow.HandledLoginRequest } // swagger:route PUT /admin/oauth2/auth/requests/login/accept oAuth2 acceptOAuth2LoginRequest @@ -372,71 +412,86 @@ type acceptOAuth2LoginRequest struct { // Responses: // 200: oAuth2RedirectTo // default: errorOAuth2 -func (h *Handler) acceptOAuth2LoginRequest(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - challenge := stringsx.Coalesce( +func (h *Handler) acceptOAuth2LoginRequest(w http.ResponseWriter, r *http.Request) { + var err error + ctx, span := h.r.Tracer(r.Context()).Tracer().Start(r.Context(), "consent.acceptOAuth2LoginRequest") + defer otelx.End(span, &err) + + challenge := cmp.Or( r.URL.Query().Get("login_challenge"), r.URL.Query().Get("challenge"), ) if challenge == "" { - h.r.Writer().WriteError(w, r, errorsx.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter 'challenge' is not defined but should have been.`))) + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter 'challenge' is not defined but should have been.`))) return } - var p HandledLoginRequest - d := json.NewDecoder(r.Body) - d.DisallowUnknownFields() - if err := d.Decode(&p); err != nil { - h.r.Writer().WriteError(w, r, errorsx.WithStack(fosite.ErrInvalidRequest.WithWrap(err).WithHintf("Unable to decode body because: %s", err))) + var payload flow.HandledLoginRequest + if err := json.NewDecoder(r.Body).Decode(&payload); err != nil { + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithWrap(err).WithHintf("Unable to decode body because: %s", err))) return } - if p.Subject == "" { - h.r.Writer().WriteError(w, r, errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("Field 'subject' must not be empty."))) + if payload.Subject == "" { + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHint("Field 'subject' must not be empty."))) return } - p.ID = challenge - ar, err := h.r.ConsentManager().GetLoginRequest(r.Context(), challenge) + f, err := flow.DecodeFromLoginChallenge(ctx, h.r, challenge) if err != nil { h.r.Writer().WriteError(w, r, err) return - } else if ar.Subject != "" && p.Subject != ar.Subject { - h.r.Writer().WriteError(w, r, errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("Field 'subject' does not match subject from previous authentication."))) + } else if f.Subject != "" && payload.Subject != f.Subject { + // The subject that was confirmed by the login screen does not match what we + // remembered in the session cookie. We handle this gracefully by redirecting the + // original authorization request URL, but attaching "prompt=login" to the query. + // This forces the user to log in again. + requestURL, err := url.Parse(f.RequestURL) + if err != nil { + h.r.Writer().WriteError(w, r, err) + return + } + h.r.Writer().Write(w, r, &flow.OAuth2RedirectTo{ + RedirectTo: urlx.SetQuery(requestURL, url.Values{"prompt": {"login"}}).String(), + }) return } - if ar.Skip { - p.Remember = true // If skip is true remember is also true to allow consecutive calls as the same user! - p.AuthenticatedAt = ar.AuthenticatedAt + if f.LoginSkip { + payload.Remember = true // If skip is true remember is also true to allow consecutive calls as the same user! } else { - p.AuthenticatedAt = sqlxx.NullTime(time.Now().UTC(). + f.LoginAuthenticatedAt = sqlxx.NullTime(time.Now().UTC(). // Rounding is important to avoid SQL time synchronization issues in e.g. MySQL! Truncate(time.Second)) - ar.AuthenticatedAt = p.AuthenticatedAt } - p.RequestedAt = ar.RequestedAt - request, err := h.r.ConsentManager().HandleLoginRequest(r.Context(), challenge, &p) + if err := f.HandleLoginRequest(&payload); err != nil { + h.r.Writer().WriteError(w, r, errors.WithStack(err)) + return + } + + ru, err := url.Parse(f.RequestURL) if err != nil { - h.r.Writer().WriteError(w, r, errorsx.WithStack(err)) + h.r.Writer().WriteError(w, r, err) return } - ru, err := url.Parse(request.RequestURL) + verifier, err := f.ToLoginVerifier(ctx, h.r) if err != nil { h.r.Writer().WriteError(w, r, err) return } - h.r.Writer().Write(w, r, &OAuth2RedirectTo{ - RedirectTo: urlx.SetQuery(ru, url.Values{"login_verifier": {request.Verifier}}).String(), + events.Trace(ctx, events.LoginAccepted, events.WithClientID(f.Client.GetID()), events.WithSubject(payload.Subject)) + h.r.Writer().Write(w, r, &flow.OAuth2RedirectTo{ + RedirectTo: urlx.SetQuery(ru, url.Values{"login_verifier": {verifier}}).String(), }) } // Reject OAuth 2.0 Login Request // // swagger:parameters rejectOAuth2LoginRequest -type rejectOAuth2LoginRequest struct { +type _ struct { // OAuth 2.0 Login Request Challenge // // in: query @@ -444,7 +499,7 @@ type rejectOAuth2LoginRequest struct { Challenge string `json:"login_challenge"` // in: body - Body RequestDeniedError + Body flow.RequestDeniedError } // swagger:route PUT /admin/oauth2/auth/requests/login/reject oAuth2 rejectOAuth2LoginRequest @@ -473,57 +528,63 @@ type rejectOAuth2LoginRequest struct { // Responses: // 200: oAuth2RedirectTo // default: errorOAuth2 -func (h *Handler) rejectOAuth2LoginRequest(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - challenge := stringsx.Coalesce( +func (h *Handler) rejectOAuth2LoginRequest(w http.ResponseWriter, r *http.Request) { + var err error + ctx, span := h.r.Tracer(r.Context()).Tracer().Start(r.Context(), "consent.rejectOAuth2LoginRequest") + defer otelx.End(span, &err) + + challenge := cmp.Or( r.URL.Query().Get("login_challenge"), r.URL.Query().Get("challenge"), ) + if challenge == "" { - h.r.Writer().WriteError(w, r, errorsx.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter 'challenge' is not defined but should have been.`))) + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter 'challenge' is not defined but should have been.`))) return } - var p RequestDeniedError - d := json.NewDecoder(r.Body) - d.DisallowUnknownFields() - if err := d.Decode(&p); err != nil { - h.r.Writer().WriteError(w, r, errorsx.WithStack(fosite.ErrInvalidRequest.WithWrap(err).WithHintf("Unable to decode body because: %s", err))) + var payload flow.RequestDeniedError + if err := json.NewDecoder(r.Body).Decode(&payload); err != nil { + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithWrap(err).WithHintf("Unable to decode body because: %s", err))) return } - p.valid = true - p.SetDefaults(loginRequestDeniedErrorName) - ar, err := h.r.ConsentManager().GetLoginRequest(r.Context(), challenge) + payload.Valid = true + payload.SetDefaults(flow.LoginRequestDeniedErrorName) + f, err := flow.DecodeFromLoginChallenge(ctx, h.r, challenge) if err != nil { h.r.Writer().WriteError(w, r, err) return } - request, err := h.r.ConsentManager().HandleLoginRequest(r.Context(), challenge, &HandledLoginRequest{ - Error: &p, - ID: challenge, - RequestedAt: ar.RequestedAt, - }) + if err := f.HandleLoginError(&payload); err != nil { + h.r.Writer().WriteError(w, r, errors.WithStack(err)) + return + } + + verifier, err := f.ToLoginVerifier(ctx, h.r) if err != nil { - h.r.Writer().WriteError(w, r, errorsx.WithStack(err)) + h.r.Writer().WriteError(w, r, err) return } - ru, err := url.Parse(request.RequestURL) + ru, err := url.Parse(f.RequestURL) if err != nil { h.r.Writer().WriteError(w, r, err) return } - h.r.Writer().Write(w, r, &OAuth2RedirectTo{ - RedirectTo: urlx.SetQuery(ru, url.Values{"login_verifier": {request.Verifier}}).String(), + events.Trace(ctx, events.LoginRejected, events.WithClientID(f.Client.GetID()), events.WithSubject(f.Subject)) + + h.r.Writer().Write(w, r, &flow.OAuth2RedirectTo{ + RedirectTo: urlx.SetQuery(ru, url.Values{"login_verifier": {verifier}}).String(), }) } // Get OAuth 2.0 Consent Request // // swagger:parameters getOAuth2ConsentRequest -type getOAuth2ConsentRequest struct { +type _ struct { // OAuth 2.0 Consent Request Challenge // // in: query @@ -558,44 +619,43 @@ type getOAuth2ConsentRequest struct { // 200: oAuth2ConsentRequest // 410: oAuth2RedirectTo // default: errorOAuth2 -func (h *Handler) getOAuth2ConsentRequest(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - challenge := stringsx.Coalesce( +func (h *Handler) getOAuth2ConsentRequest(w http.ResponseWriter, r *http.Request) { + var err error + ctx, span := h.r.Tracer(r.Context()).Tracer().Start(r.Context(), "consent.getOAuth2ConsentRequest") + defer otelx.End(span, &err) + + challenge := cmp.Or( r.URL.Query().Get("consent_challenge"), r.URL.Query().Get("challenge"), ) if challenge == "" { - h.r.Writer().WriteError(w, r, errorsx.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter 'challenge' is not defined but should have been.`))) + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter 'challenge' is not defined but should have been.`))) return } - request, err := h.r.ConsentManager().GetConsentRequest(r.Context(), challenge) + f, err := flow.DecodeFromConsentChallenge(ctx, h.r, challenge) if err != nil { h.r.Writer().WriteError(w, r, err) return } - if request.WasHandled { - h.r.Writer().WriteCode(w, r, http.StatusGone, &OAuth2RedirectTo{ - RedirectTo: request.RequestURL, + + if f.State.ConsentWasUsed() { + h.r.Writer().WriteCode(w, r, http.StatusGone, &flow.OAuth2RedirectTo{ + RedirectTo: f.RequestURL, }) return } - if request.RequestedScope == nil { - request.RequestedScope = []string{} - } - - if request.RequestedAudience == nil { - request.RequestedAudience = []string{} - } - - request.Client = sanitizeClient(request.Client) - h.r.Writer().Write(w, r, request) + // Transform flow to the existing API format. + req := f.GetConsentRequest(challenge) + req.Client.Secret = "" + h.r.Writer().Write(w, r, req) } // Accept OAuth 2.0 Consent Request // // swagger:parameters acceptOAuth2ConsentRequest -type acceptOAuth2ConsentRequest struct { +type _ struct { // OAuth 2.0 Consent Request Challenge // // in: query @@ -603,7 +663,7 @@ type acceptOAuth2ConsentRequest struct { Challenge string `json:"consent_challenge"` // in: body - Body AcceptOAuth2ConsentRequest + Body flow.AcceptOAuth2ConsentRequest } // swagger:route PUT /admin/oauth2/auth/requests/consent/accept oAuth2 acceptOAuth2ConsentRequest @@ -638,57 +698,59 @@ type acceptOAuth2ConsentRequest struct { // Responses: // 200: oAuth2RedirectTo // default: errorOAuth2 -func (h *Handler) acceptOAuth2ConsentRequest(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - challenge := stringsx.Coalesce( +func (h *Handler) acceptOAuth2ConsentRequest(w http.ResponseWriter, r *http.Request) { + var err error + ctx, span := h.r.Tracer(r.Context()).Tracer().Start(r.Context(), "consent.acceptOAuth2ConsentRequest") + defer otelx.End(span, &err) + + challenge := cmp.Or( r.URL.Query().Get("consent_challenge"), r.URL.Query().Get("challenge"), ) if challenge == "" { - h.r.Writer().WriteError(w, r, errorsx.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter 'challenge' is not defined but should have been.`))) + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter 'challenge' is not defined but should have been.`))) return } - var p AcceptOAuth2ConsentRequest - d := json.NewDecoder(r.Body) - d.DisallowUnknownFields() - if err := d.Decode(&p); err != nil { - h.r.Writer().WriteErrorCode(w, r, http.StatusBadRequest, errorsx.WithStack(err)) + var payload flow.AcceptOAuth2ConsentRequest + if err := json.NewDecoder(r.Body).Decode(&payload); err != nil { + h.r.Writer().WriteErrorCode(w, r, http.StatusBadRequest, errors.WithStack(err)) return } - cr, err := h.r.ConsentManager().GetConsentRequest(r.Context(), challenge) + f, err := flow.DecodeFromConsentChallenge(ctx, h.r, challenge) if err != nil { - h.r.Writer().WriteError(w, r, errorsx.WithStack(err)) + h.r.Writer().WriteError(w, r, errors.WithStack(err)) return } - p.ID = challenge - p.RequestedAt = cr.RequestedAt - p.HandledAt = sqlxx.NullTime(time.Now().UTC()) + if err := f.HandleConsentRequest(&payload); err != nil { + h.r.Writer().WriteError(w, r, errors.WithStack(err)) + return + } - hr, err := h.r.ConsentManager().HandleConsentRequest(r.Context(), &p) + ru, err := url.Parse(f.RequestURL) if err != nil { - h.r.Writer().WriteError(w, r, errorsx.WithStack(err)) + h.r.Writer().WriteError(w, r, err) return - } else if hr.Skip { - p.Remember = false } - ru, err := url.Parse(hr.RequestURL) + verifier, err := f.ToConsentVerifier(ctx, h.r) if err != nil { h.r.Writer().WriteError(w, r, err) return } - h.r.Writer().Write(w, r, &OAuth2RedirectTo{ - RedirectTo: urlx.SetQuery(ru, url.Values{"consent_verifier": {hr.Verifier}}).String(), + events.Trace(ctx, events.ConsentAccepted, events.WithClientID(f.Client.GetID()), events.WithSubject(f.Subject)) + h.r.Writer().Write(w, r, &flow.OAuth2RedirectTo{ + RedirectTo: urlx.SetQuery(ru, url.Values{"consent_verifier": {verifier}}).String(), }) } // Reject OAuth 2.0 Consent Request // // swagger:parameters rejectOAuth2ConsentRequest -type adminRejectOAuth2ConsentRequest struct { +type _ struct { // OAuth 2.0 Consent Request Challenge // // in: query @@ -696,7 +758,7 @@ type adminRejectOAuth2ConsentRequest struct { Challenge string `json:"consent_challenge"` // in: body - Body RequestDeniedError + Body flow.RequestDeniedError } // swagger:route PUT /admin/oauth2/auth/requests/consent/reject oAuth2 rejectOAuth2ConsentRequest @@ -730,58 +792,60 @@ type adminRejectOAuth2ConsentRequest struct { // Responses: // 200: oAuth2RedirectTo // default: errorOAuth2 -func (h *Handler) rejectOAuth2ConsentRequest(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - challenge := stringsx.Coalesce( +func (h *Handler) rejectOAuth2ConsentRequest(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + challenge := cmp.Or( r.URL.Query().Get("consent_challenge"), r.URL.Query().Get("challenge"), ) if challenge == "" { - h.r.Writer().WriteError(w, r, errorsx.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter 'challenge' is not defined but should have been.`))) + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter 'challenge' is not defined but should have been.`))) return } - var p RequestDeniedError - d := json.NewDecoder(r.Body) - d.DisallowUnknownFields() - if err := d.Decode(&p); err != nil { - h.r.Writer().WriteErrorCode(w, r, http.StatusBadRequest, errorsx.WithStack(err)) + var payload flow.RequestDeniedError + if err := json.NewDecoder(r.Body).Decode(&payload); err != nil { + h.r.Writer().WriteErrorCode(w, r, http.StatusBadRequest, errors.WithStack(err)) return } - p.valid = true - p.SetDefaults(consentRequestDeniedErrorName) - hr, err := h.r.ConsentManager().GetConsentRequest(r.Context(), challenge) + payload.Valid = true + payload.SetDefaults(flow.ConsentRequestDeniedErrorName) + f, err := flow.DecodeFromConsentChallenge(ctx, h.r, challenge) if err != nil { - h.r.Writer().WriteError(w, r, errorsx.WithStack(err)) + h.r.Writer().WriteError(w, r, errors.WithStack(err)) return } - request, err := h.r.ConsentManager().HandleConsentRequest(r.Context(), &AcceptOAuth2ConsentRequest{ - Error: &p, - ID: challenge, - RequestedAt: hr.RequestedAt, - HandledAt: sqlxx.NullTime(time.Now().UTC()), - }) + if err := f.HandleConsentError(&payload); err != nil { + h.r.Writer().WriteError(w, r, errors.WithStack(err)) + return + } + + ru, err := url.Parse(f.RequestURL) if err != nil { - h.r.Writer().WriteError(w, r, errorsx.WithStack(err)) + h.r.Writer().WriteError(w, r, err) return } - ru, err := url.Parse(request.RequestURL) + verifier, err := f.ToConsentVerifier(ctx, h.r) if err != nil { h.r.Writer().WriteError(w, r, err) return } - h.r.Writer().Write(w, r, &OAuth2RedirectTo{ - RedirectTo: urlx.SetQuery(ru, url.Values{"consent_verifier": {request.Verifier}}).String(), + events.Trace(ctx, events.ConsentRejected, events.WithClientID(f.Client.GetID()), events.WithSubject(f.Subject)) + + h.r.Writer().Write(w, r, &flow.OAuth2RedirectTo{ + RedirectTo: urlx.SetQuery(ru, url.Values{"consent_verifier": {verifier}}).String(), }) } // Accept OAuth 2.0 Logout Request // // swagger:parameters acceptOAuth2LogoutRequest -type acceptOAuth2LogoutRequest struct { +type _ struct { // OAuth 2.0 Logout Request Challenge // // in: query @@ -805,27 +869,27 @@ type acceptOAuth2LogoutRequest struct { // Responses: // 200: oAuth2RedirectTo // default: errorOAuth2 -func (h *Handler) acceptOAuth2LogoutRequest(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - challenge := stringsx.Coalesce( +func (h *Handler) acceptOAuth2LogoutRequest(w http.ResponseWriter, r *http.Request) { + challenge := cmp.Or( r.URL.Query().Get("logout_challenge"), r.URL.Query().Get("challenge"), ) - c, err := h.r.ConsentManager().AcceptLogoutRequest(r.Context(), challenge) + c, err := h.r.LogoutManager().AcceptLogoutRequest(r.Context(), challenge) if err != nil { h.r.Writer().WriteError(w, r, err) return } - h.r.Writer().Write(w, r, &OAuth2RedirectTo{ - RedirectTo: urlx.SetQuery(urlx.AppendPaths(h.c.PublicURL(r.Context()), "/oauth2/sessions/logout"), url.Values{"logout_verifier": {c.Verifier}}).String(), + h.r.Writer().Write(w, r, &flow.OAuth2RedirectTo{ + RedirectTo: urlx.SetQuery(urlx.AppendPaths(h.r.Config().PublicURL(r.Context()), "/oauth2/sessions/logout"), url.Values{"logout_verifier": {c.Verifier}}).String(), }) } // Reject OAuth 2.0 Logout Request // // swagger:parameters rejectOAuth2LogoutRequest -type rejectOAuth2LogoutRequest struct { +type _ struct { // in: query // required: true Challenge string `json:"logout_challenge"` @@ -848,13 +912,13 @@ type rejectOAuth2LogoutRequest struct { // Responses: // 204: emptyResponse // default: errorOAuth2 -func (h *Handler) rejectOAuth2LogoutRequest(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - challenge := stringsx.Coalesce( +func (h *Handler) rejectOAuth2LogoutRequest(w http.ResponseWriter, r *http.Request) { + challenge := cmp.Or( r.URL.Query().Get("logout_challenge"), r.URL.Query().Get("challenge"), ) - if err := h.r.ConsentManager().RejectLogoutRequest(r.Context(), challenge); err != nil { + if err := h.r.LogoutManager().RejectLogoutRequest(r.Context(), challenge); err != nil { h.r.Writer().WriteError(w, r, err) return } @@ -865,7 +929,7 @@ func (h *Handler) rejectOAuth2LogoutRequest(w http.ResponseWriter, r *http.Reque // Get OAuth 2.0 Logout Request // // swagger:parameters getOAuth2LogoutRequest -type getOAuth2LogoutRequest struct { +type _ struct { // in: query // required: true Challenge string `json:"logout_challenge"` @@ -886,13 +950,13 @@ type getOAuth2LogoutRequest struct { // 200: oAuth2LogoutRequest // 410: oAuth2RedirectTo // default: errorOAuth2 -func (h *Handler) getOAuth2LogoutRequest(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - challenge := stringsx.Coalesce( +func (h *Handler) getOAuth2LogoutRequest(w http.ResponseWriter, r *http.Request) { + challenge := cmp.Or( r.URL.Query().Get("logout_challenge"), r.URL.Query().Get("challenge"), ) - request, err := h.r.ConsentManager().GetLogoutRequest(r.Context(), challenge) + request, err := h.r.LogoutManager().GetLogoutRequest(r.Context(), challenge) if err != nil { h.r.Writer().WriteError(w, r, err) return @@ -904,7 +968,7 @@ func (h *Handler) getOAuth2LogoutRequest(w http.ResponseWriter, r *http.Request, } if request.WasHandled { - h.r.Writer().WriteCode(w, r, http.StatusGone, &OAuth2RedirectTo{ + h.r.Writer().WriteCode(w, r, http.StatusGone, &flow.OAuth2RedirectTo{ RedirectTo: request.RequestURL, }) return @@ -912,3 +976,119 @@ func (h *Handler) getOAuth2LogoutRequest(w http.ResponseWriter, r *http.Request, h.r.Writer().Write(w, r, request) } + +// Verify OAuth 2.0 User Code Request +// +// swagger:parameters acceptUserCodeRequest +type _ struct { + // in: query + // required: true + Challenge string `json:"device_challenge"` + + // in: body + Body flow.AcceptDeviceUserCodeRequest +} + +// swagger:route PUT /admin/oauth2/auth/requests/device/accept oAuth2 acceptUserCodeRequest +// +// # Accepts a device grant user_code request +// +// Accepts a device grant user_code request +// +// Consumes: +// - application/json +// +// Produces: +// - application/json +// +// Schemes: http, https +// +// Responses: +// 200: oAuth2RedirectTo +// default: errorOAuth2 +func (h *Handler) acceptUserCodeRequest(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + challenge := r.URL.Query().Get("device_challenge") + if challenge == "" { + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter 'device_challenge' is not defined but should have been.`))) + return + } + + var reqBody flow.AcceptDeviceUserCodeRequest + if err := json.NewDecoder(r.Body).Decode(&reqBody); err != nil { + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithWrap(err).WithHintf("Unable to decode request body: %s", err.Error()))) + return + } + + if reqBody.UserCode == "" { + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHint("Field 'user_code' must not be empty."))) + return + } + + f, err := flow.DecodeFromDeviceChallenge(ctx, h.r, challenge) + if err != nil { + h.r.Writer().WriteError(w, r, err) + return + } + + userCodeSignature, err := h.r.UserCodeStrategy().UserCodeSignature(r.Context(), reqBody.UserCode) + if err != nil { + h.r.Writer().WriteError(w, r, fosite.ErrServerError.WithWrap(err).WithHint(`The 'user_code' signature could not be computed.`)) + return + } + + userCodeRequest, err := h.r.OAuth2Storage().GetUserCodeSession(r.Context(), userCodeSignature, nil) + if err != nil { + h.r.Writer().WriteError(w, r, fosite.ErrInvalidRequest.WithWrap(err).WithHint(`The 'user_code' session could not be found or has expired or is otherwise malformed.`)) + return + } + + if err := h.r.UserCodeStrategy().ValidateUserCode(ctx, userCodeRequest, reqBody.UserCode); err != nil { + h.r.Writer().WriteError(w, r, fosite.ErrInvalidRequest.WithWrap(err).WithHint(`The 'user_code' session could not be found or has expired or is otherwise malformed.`)) + return + } + + p := flow.HandledDeviceUserAuthRequest{ + Client: userCodeRequest.GetClient().(*client.Client), + DeviceCodeRequestID: userCodeRequest.GetID(), + RequestedScope: []string(userCodeRequest.GetRequestedScopes()), + RequestedAudience: []string(userCodeRequest.GetRequestedAudience()), + } + + // Append the client_id to the original RequestURL, as it is needed for the login flow + reqURL, err := url.Parse(f.RequestURL) + if err != nil { + h.r.Writer().WriteError(w, r, errors.WithStack(err)) + return + } + + if reqURL.Query().Get("client_id") == "" { + q := reqURL.Query() + q.Add("client_id", userCodeRequest.GetClient().GetID()) + reqURL.RawQuery = q.Encode() + } + + f.RequestURL = reqURL.String() + if err := f.HandleDeviceUserAuthRequest(&p); err != nil { + h.r.Writer().WriteError(w, r, err) + return + } + + ru, err := url.Parse(f.RequestURL) + if err != nil { + h.r.Writer().WriteError(w, r, fosite.ErrInvalidRequest.WithWrap(err).WithHint(`Unable to parse the request_url.`)) + return + } + + verifier, err := f.ToDeviceVerifier(ctx, h.r) + if err != nil { + h.r.Writer().WriteError(w, r, err) + return + } + + events.Trace(ctx, events.DeviceUserCodeAccepted, events.WithClientID(userCodeRequest.GetClient().GetID())) + h.r.Writer().Write(w, r, &flow.OAuth2RedirectTo{ + RedirectTo: urlx.SetQuery(ru, url.Values{"device_verifier": {verifier}, "client_id": {userCodeRequest.GetClient().GetID()}}).String(), + }) +} diff --git a/consent/handler_test.go b/consent/handler_test.go index ca4e10fa596..6a46a39ba7f 100644 --- a/consent/handler_test.go +++ b/consent/handler_test.go @@ -5,275 +5,461 @@ package consent_test import ( "bytes" - "context" "encoding/json" - "fmt" "net/http" "net/http/httptest" "testing" "time" - "github.com/ory/x/pointerx" - - "github.com/ory/hydra/consent" - "github.com/ory/hydra/x" - "github.com/ory/x/contextx" - "github.com/ory/x/sqlxx" - - "github.com/ory/hydra/internal" - + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" hydra "github.com/ory/hydra-client-go/v2" - "github.com/ory/hydra/client" - . "github.com/ory/hydra/consent" + "github.com/ory/hydra/v2/client" + . "github.com/ory/hydra/v2/consent" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/flow" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/ioutilx" + "github.com/ory/x/prometheusx" + "github.com/ory/x/sqlxx" + "github.com/ory/x/uuidx" ) func TestGetLogoutRequest(t *testing.T) { - for k, tc := range []struct { - exists bool - handled bool - status int - }{ - {false, false, http.StatusNotFound}, - {true, false, http.StatusOK}, - {true, true, http.StatusGone}, - } { - t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { - key := fmt.Sprint(k) - challenge := "challenge" + key - requestURL := "http://192.0.2.1" - - conf := internal.NewConfigurationWithDefaults() - reg := internal.NewRegistryMemory(t, conf, &contextx.Default{}) - - if tc.exists { - cl := &client.Client{LegacyClientID: "client" + key} - require.NoError(t, reg.ClientManager().CreateClient(context.Background(), cl)) - require.NoError(t, reg.ConsentManager().CreateLogoutRequest(context.TODO(), &LogoutRequest{ - Client: cl, - ID: challenge, - WasHandled: tc.handled, - RequestURL: requestURL, - })) - } - - h := NewHandler(reg, conf) - r := x.NewRouterAdmin(conf.AdminURL) - h.SetRoutes(r) - ts := httptest.NewServer(r) - defer ts.Close() - - c := &http.Client{} - resp, err := c.Get(ts.URL + "/admin" + LogoutPath + "?challenge=" + challenge) - require.NoError(t, err) - require.EqualValues(t, tc.status, resp.StatusCode) - - if tc.handled { - var result OAuth2RedirectTo - require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) - require.Equal(t, requestURL, result.RedirectTo) - } else if tc.exists { - var result LogoutRequest - require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) - require.Equal(t, challenge, result.ID) - require.Equal(t, requestURL, result.RequestURL) - } - }) + t.Parallel() + + reg := testhelpers.NewRegistryMemory(t) + + h := NewHandler(reg) + r := x.NewRouterAdmin(prometheusx.NewMetricsManagerWithPrefix("hydra", prometheusx.HTTPMetrics, config.Version, config.Commit, config.Date)) + h.SetRoutes(r) + ts := httptest.NewServer(r) + defer ts.Close() + + cl := &client.Client{ + ID: "test client id", + Name: "test client name", } + require.NoError(t, reg.ClientManager().CreateClient(t.Context(), cl)) + + requestURL := "http://192.0.2.1" + + t.Run("unhandled logout request", func(t *testing.T) { + challenge := "test-challenge-unhandled" + require.NoError(t, reg.LogoutManager().CreateLogoutRequest(t.Context(), &flow.LogoutRequest{ + Client: cl, + ID: challenge, + RequestURL: requestURL, + Verifier: uuidx.NewV4().String(), + SessionID: "test-session-id", + Subject: "test-subject", + })) + + resp, err := ts.Client().Get(ts.URL + "/admin" + LogoutPath + "?challenge=" + challenge) + require.NoError(t, err) + require.EqualValues(t, http.StatusOK, resp.StatusCode) + + var result flow.LogoutRequest + require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) + assert.Equal(t, challenge, result.ID) + assert.Equal(t, requestURL, result.RequestURL) + }) + + t.Run("handled logout request", func(t *testing.T) { + challenge := "test-challenge-handled" + require.NoError(t, reg.LogoutManager().CreateLogoutRequest(t.Context(), &flow.LogoutRequest{ + Client: cl, + ID: challenge, + RequestURL: requestURL, + Verifier: uuidx.NewV4().String(), + WasHandled: true, + })) + + resp, err := ts.Client().Get(ts.URL + "/admin" + LogoutPath + "?challenge=" + challenge) + require.NoError(t, err) + require.EqualValues(t, http.StatusGone, resp.StatusCode) + + var result flow.OAuth2RedirectTo + require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) + assert.Equal(t, requestURL, result.RedirectTo) + }) + + t.Run("unknown challenge", func(t *testing.T) { + resp, err := ts.Client().Get(ts.URL + "/admin" + LogoutPath + "?challenge=unknown-challenge") + require.NoError(t, err) + assert.EqualValuesf(t, http.StatusNotFound, resp.StatusCode, "%s", ioutilx.MustReadAll(resp.Body)) + }) } func TestGetLoginRequest(t *testing.T) { - for k, tc := range []struct { - exists bool - handled bool - status int - }{ - {false, false, http.StatusNotFound}, - {true, false, http.StatusOK}, - {true, true, http.StatusGone}, - } { - t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { - key := fmt.Sprint(k) - challenge := "challenge" + key - requestURL := "http://192.0.2.1" - - conf := internal.NewConfigurationWithDefaults() - reg := internal.NewRegistryMemory(t, conf, &contextx.Default{}) - - if tc.exists { - cl := &client.Client{LegacyClientID: "client" + key} - require.NoError(t, reg.ClientManager().CreateClient(context.Background(), cl)) - require.NoError(t, reg.ConsentManager().CreateLoginRequest(context.Background(), &LoginRequest{ - Client: cl, - ID: challenge, - RequestURL: requestURL, - })) - - if tc.handled { - _, err := reg.ConsentManager().HandleLoginRequest(context.Background(), challenge, &HandledLoginRequest{ID: challenge, WasHandled: true}) - require.NoError(t, err) - } - } - - h := NewHandler(reg, conf) - r := x.NewRouterAdmin(conf.AdminURL) - h.SetRoutes(r) - ts := httptest.NewServer(r) - defer ts.Close() - - c := &http.Client{} - resp, err := c.Get(ts.URL + "/admin" + LoginPath + "?challenge=" + challenge) - require.NoError(t, err) - require.EqualValues(t, tc.status, resp.StatusCode) - - if tc.handled { - var result OAuth2RedirectTo - require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) - require.Equal(t, requestURL, result.RedirectTo) - } else if tc.exists { - var result LoginRequest - require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) - require.Equal(t, challenge, result.ID) - require.Equal(t, requestURL, result.RequestURL) - require.NotNil(t, result.Client) - } - }) + t.Parallel() + + reg := testhelpers.NewRegistryMemory(t) + + h := NewHandler(reg) + r := x.NewRouterAdmin(prometheusx.NewMetricsManagerWithPrefix("hydra", prometheusx.HTTPMetrics, config.Version, config.Commit, config.Date)) + h.SetRoutes(r) + ts := httptest.NewServer(r) + defer ts.Close() + + cl := &client.Client{ + ID: "test client id", + Name: "test client name", } + + requestURL := "http://192.0.2.1" + + f := &flow.Flow{ + Client: cl, + RequestURL: requestURL, + RequestedAt: time.Now(), + State: flow.FlowStateLoginUnused, + NID: reg.Persister().NetworkID(t.Context()), + RequestedAudience: []string{"audience1", "audience2"}, + RequestedScope: []string{"scope1", "scope2"}, + Subject: "test subject", + SessionID: "test session id", + } + + unhandledChallenge, err := f.ToLoginChallenge(t.Context(), reg) + require.NoError(t, err) + + t.Run("unhandled flow", func(t *testing.T) { + resp, err := ts.Client().Get(ts.URL + "/admin" + LoginPath + "?challenge=" + unhandledChallenge) + require.NoError(t, err) + require.EqualValues(t, http.StatusOK, resp.StatusCode) + + var result flow.LoginRequest + require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) + assert.Equal(t, unhandledChallenge, result.ID) + assert.Equal(t, requestURL, result.RequestURL) + assert.NotNil(t, result.Client) + }) + + t.Run("handled flow", func(t *testing.T) { + f.State = flow.FlowStateLoginUnused + require.NoError(t, f.InvalidateLoginRequest()) + handledChallenge, err := f.ToLoginChallenge(t.Context(), reg) + require.NoError(t, err) + + resp, err := ts.Client().Get(ts.URL + "/admin" + LoginPath + "?challenge=" + handledChallenge) + require.NoError(t, err) + require.EqualValues(t, http.StatusGone, resp.StatusCode) + + var result flow.OAuth2RedirectTo + require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) + assert.Equal(t, requestURL, result.RedirectTo) + }) + + t.Run("unknown challenge", func(t *testing.T) { + resp, err := ts.Client().Get(ts.URL + "/admin" + LoginPath + "?challenge=unknown-challenge") + require.NoError(t, err) + assert.EqualValuesf(t, http.StatusNotFound, resp.StatusCode, "%s", ioutilx.MustReadAll(resp.Body)) + }) } func TestGetConsentRequest(t *testing.T) { - for k, tc := range []struct { - exists bool - handled bool - status int - }{ - {false, false, http.StatusNotFound}, - {true, false, http.StatusOK}, - {true, true, http.StatusGone}, - } { - t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { - key := fmt.Sprint(k) - challenge := "challenge" + key - requestURL := "http://192.0.2.1" - - conf := internal.NewConfigurationWithDefaults() - reg := internal.NewRegistryMemory(t, conf, &contextx.Default{}) - - if tc.exists { - cl := &client.Client{LegacyClientID: "client" + key} - require.NoError(t, reg.ClientManager().CreateClient(context.Background(), cl)) - lr := &LoginRequest{ID: "login-" + challenge, Client: cl, RequestURL: requestURL} - require.NoError(t, reg.ConsentManager().CreateLoginRequest(context.Background(), lr)) - _, err := reg.ConsentManager().HandleLoginRequest(context.Background(), lr.ID, &consent.HandledLoginRequest{ - ID: lr.ID, - }) - require.NoError(t, err) - require.NoError(t, reg.ConsentManager().CreateConsentRequest(context.Background(), &OAuth2ConsentRequest{ - Client: cl, - ID: challenge, - Verifier: challenge, - CSRF: challenge, - LoginChallenge: sqlxx.NullString(lr.ID), - })) - - if tc.handled { - _, err := reg.ConsentManager().HandleConsentRequest(context.Background(), &AcceptOAuth2ConsentRequest{ - ID: challenge, - WasHandled: true, - HandledAt: sqlxx.NullTime(time.Now()), - }) - require.NoError(t, err) - } - } - - h := NewHandler(reg, conf) - - r := x.NewRouterAdmin(conf.AdminURL) - h.SetRoutes(r) - ts := httptest.NewServer(r) - defer ts.Close() - - c := &http.Client{} - resp, err := c.Get(ts.URL + "/admin" + ConsentPath + "?challenge=" + challenge) - require.NoError(t, err) - require.EqualValues(t, tc.status, resp.StatusCode) - - if tc.handled { - var result OAuth2RedirectTo - require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) - require.Equal(t, requestURL, result.RedirectTo) - } else if tc.exists { - var result OAuth2ConsentRequest - require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) - require.Equal(t, challenge, result.ID) - require.Equal(t, requestURL, result.RequestURL) - require.NotNil(t, result.Client) - } - }) + t.Parallel() + + reg := testhelpers.NewRegistryMemory(t) + + h := NewHandler(reg) + r := x.NewRouterAdmin(prometheusx.NewMetricsManagerWithPrefix("hydra", prometheusx.HTTPMetrics, config.Version, config.Commit, config.Date)) + h.SetRoutes(r) + ts := httptest.NewServer(r) + defer ts.Close() + + cl := &client.Client{ + ID: "test client id", + Name: "test client name", + } + + requestURL := "http://192.0.2.1" + consentRequestID := "test consent request id" + + f := &flow.Flow{ + Client: cl, + RequestURL: requestURL, + RequestedAt: time.Now(), + State: flow.FlowStateConsentUnused, + NID: reg.Persister().NetworkID(t.Context()), + ConsentRequestID: sqlxx.NullString(consentRequestID), } + + unhandledChallenge, err := f.ToConsentChallenge(t.Context(), reg) + require.NoError(t, err) + + t.Run("unhandled flow", func(t *testing.T) { + resp, err := ts.Client().Get(ts.URL + "/admin" + ConsentPath + "?challenge=" + unhandledChallenge) + require.NoError(t, err) + require.EqualValues(t, http.StatusOK, resp.StatusCode) + + var result flow.OAuth2ConsentRequest + require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) + assert.Equal(t, unhandledChallenge, result.Challenge) + assert.Equal(t, requestURL, result.RequestURL) + assert.NotNil(t, result.Client) + }) + + t.Run("handled flow", func(t *testing.T) { + f.State = flow.FlowStateConsentUnused + require.NoError(t, f.InvalidateConsentRequest()) + handledChallenge, err := f.ToConsentChallenge(t.Context(), reg) + require.NoError(t, err) + + resp, err := ts.Client().Get(ts.URL + "/admin" + ConsentPath + "?challenge=" + handledChallenge) + require.NoError(t, err) + require.EqualValues(t, http.StatusGone, resp.StatusCode) + + var result flow.OAuth2RedirectTo + require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) + assert.Equal(t, requestURL, result.RedirectTo) + }) + + t.Run("unknown challenge", func(t *testing.T) { + resp, err := ts.Client().Get(ts.URL + "/admin" + ConsentPath + "?challenge=unknown-challenge") + require.NoError(t, err) + assert.EqualValuesf(t, http.StatusNotFound, resp.StatusCode, "%s", ioutilx.MustReadAll(resp.Body)) + }) } -func TestGetLoginRequestWithDuplicateAccept(t *testing.T) { - t.Run("Test get login request with duplicate accept", func(t *testing.T) { - challenge := "challenge" - requestURL := "http://192.0.2.1" +func TestAcceptLoginRequestDouble(t *testing.T) { + t.Parallel() - conf := internal.NewConfigurationWithDefaults() - reg := internal.NewRegistryMemory(t, conf, &contextx.Default{}) + requestURL := "http://192.0.2.1" - cl := &client.Client{LegacyClientID: "client"} - require.NoError(t, reg.ClientManager().CreateClient(context.Background(), cl)) - require.NoError(t, reg.ConsentManager().CreateLoginRequest(context.Background(), &LoginRequest{ - Client: cl, - ID: challenge, - RequestURL: requestURL, - })) + reg := testhelpers.NewRegistryMemory(t) + + f := flow.Flow{ + Client: &client.Client{ID: "client"}, + RequestURL: requestURL, + RequestedAt: time.Now(), + NID: reg.Persister().NetworkID(t.Context()), + State: flow.FlowStateLoginUnused, + } + challenge, err := f.ToLoginChallenge(t.Context(), reg) + require.NoError(t, err) + + h := NewHandler(reg) + r := x.NewRouterAdmin(prometheusx.NewMetricsManagerWithPrefix("hydra", prometheusx.HTTPMetrics, config.Version, config.Commit, config.Date)) + h.SetRoutes(r) + ts := httptest.NewServer(r) + defer ts.Close() - h := NewHandler(reg, conf) - r := x.NewRouterAdmin(conf.AdminURL) - h.SetRoutes(r) - ts := httptest.NewServer(r) - defer ts.Close() + // marshal User to json + acceptLoginJson, err := json.Marshal(&flow.HandledLoginRequest{Subject: "sub123"}) + require.NoError(t, err) - c := &http.Client{} + req, err := http.NewRequest(http.MethodPut, ts.URL+"/admin"+LoginPath+"/accept?challenge="+challenge, bytes.NewReader(acceptLoginJson)) + require.NoError(t, err) - sub := "sub123" - acceptLogin := &hydra.AcceptOAuth2LoginRequest{Remember: pointerx.Bool(true), Subject: sub} + for range 2 { + resp, err := ts.Client().Do(req) + require.NoError(t, err) + require.EqualValues(t, http.StatusOK, resp.StatusCode) + + var result flow.OAuth2RedirectTo + require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) + require.Contains(t, result.RedirectTo, "login_verifier") + } +} + +func TestAcceptCodeDeviceRequest(t *testing.T) { + requestURL := "https://hydra.example.com/" + oauth2.DeviceVerificationPath - // marshal User to json - acceptLoginJson, err := json.Marshal(acceptLogin) - if err != nil { - panic(err) - } + reg := testhelpers.NewRegistryMemory(t) + + cl := &client.Client{ID: "client"} + require.NoError(t, reg.ClientManager().CreateClient(t.Context(), cl)) + f := &flow.Flow{ + Client: cl, + RequestURL: requestURL, + RequestedAt: time.Now(), + State: flow.DeviceFlowStateUnused, + } + f.NID = reg.Networker().NetworkID(t.Context()) + challenge, err := f.ToDeviceChallenge(t.Context(), reg) + require.NoError(t, err) + + h := NewHandler(reg) + r := x.NewRouterAdmin(prometheusx.NewMetricsManagerWithPrefix("hydra", prometheusx.HTTPMetrics, config.Version, config.Commit, config.Date)) + h.SetRoutes(r) + ts := httptest.NewServer(r) + t.Cleanup(ts.Close) + + submitCode := func(t *testing.T, reqBody any, challenge string) *http.Response { + body, err := json.Marshal(reqBody) + require.NoError(t, err) // set the HTTP method, url, and request body - req, err := http.NewRequest(http.MethodPut, ts.URL+"/admin"+LoginPath+"/accept?challenge="+challenge, bytes.NewBuffer(acceptLoginJson)) - if err != nil { - panic(err) - } + req, err := http.NewRequest(http.MethodPut, ts.URL+"/admin"+DevicePath+"/accept?device_challenge="+challenge, bytes.NewReader(body)) + require.NoError(t, err) + + resp, err := ts.Client().Do(req) + require.NoError(t, err) + + return resp + } + + t.Run("case=successful user_code submission", func(t *testing.T) { + deviceRequest := fosite.NewDeviceRequest() + deviceRequest.Client = cl + deviceRequest.SetSession(oauth2.NewTestSession(t, "test-subject")) - resp, err := c.Do(req) + _, deviceCodeSig, err := reg.DeviceCodeStrategy().GenerateDeviceCode(t.Context()) + require.NoError(t, err) + userCode, sig, err := reg.UserCodeStrategy().GenerateUserCode(t.Context()) require.NoError(t, err) + require.NoError(t, reg.OAuth2Storage().CreateDeviceAuthSession(t.Context(), deviceCodeSig, sig, deviceRequest)) + + resp := submitCode(t, &flow.AcceptDeviceUserCodeRequest{UserCode: userCode}, challenge) require.EqualValues(t, http.StatusOK, resp.StatusCode) - var result OAuth2RedirectTo + var result flow.OAuth2RedirectTo require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) - require.NotNil(t, result.RedirectTo) - require.Contains(t, result.RedirectTo, "login_verifier") + assert.Contains(t, result.RedirectTo, requestURL) + assert.Contains(t, result.RedirectTo, "device_verifier") + + t.Run("double submit", func(t *testing.T) { + resp := submitCode(t, &flow.AcceptDeviceUserCodeRequest{UserCode: userCode}, challenge) + require.NoError(t, err) + require.EqualValues(t, http.StatusOK, resp.StatusCode) + + var result flow.OAuth2RedirectTo + require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) + assert.Contains(t, result.RedirectTo, requestURL) + assert.Contains(t, result.RedirectTo, "device_verifier") + }) + }) + + t.Run("case=random user_code, not persisted in the database", func(t *testing.T) { + userCode, _, err := reg.UserCodeStrategy().GenerateUserCode(t.Context()) + require.NoError(t, err) + + resp := submitCode(t, &flow.AcceptDeviceUserCodeRequest{UserCode: userCode}, challenge) + require.EqualValues(t, http.StatusBadRequest, resp.StatusCode) + + result := fosite.RFC6749Error{} + require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) + assert.Contains(t, result.DescriptionField, fosite.ErrInvalidRequest.DescriptionField) + assert.Contains(t, result.DescriptionField, "The 'user_code' session could not be found or has expired or is otherwise malformed.") + }) + + t.Run("case=empty user_code", func(t *testing.T) { + resp := submitCode(t, &flow.AcceptDeviceUserCodeRequest{UserCode: ""}, challenge) + require.EqualValues(t, http.StatusBadRequest, resp.StatusCode) + + result := fosite.RFC6749Error{} + require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) + assert.Contains(t, result.DescriptionField, fosite.ErrInvalidRequest.DescriptionField) + assert.Contains(t, result.DescriptionField, "'user_code' must not be empty") + }) + + t.Run("case=empty challenge", func(t *testing.T) { + userCode, _, err := reg.UserCodeStrategy().GenerateUserCode(t.Context()) + require.NoError(t, err) + resp := submitCode(t, &flow.AcceptDeviceUserCodeRequest{UserCode: userCode}, "") + require.EqualValues(t, http.StatusBadRequest, resp.StatusCode) + + result := fosite.RFC6749Error{} + require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) + assert.Contains(t, result.DescriptionField, fosite.ErrInvalidRequest.DescriptionField) + assert.Contains(t, result.DescriptionField, "'device_challenge' is not defined but should have been") + }) + + t.Run("case=invalid challenge", func(t *testing.T) { + userCode, _, err := reg.UserCodeStrategy().GenerateUserCode(t.Context()) + require.NoError(t, err) + resp := submitCode(t, &hydra.AcceptDeviceUserCodeRequest{UserCode: &userCode}, "invalid-challenge") + require.EqualValues(t, http.StatusNotFound, resp.StatusCode) + + result := fosite.RFC6749Error{} + require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) + assert.Contains(t, result.DescriptionField, x.ErrNotFound.DescriptionField) + }) + + t.Run("case=expired user_code", func(t *testing.T) { + deviceRequest := fosite.NewDeviceRequest() + deviceRequest.Client = cl + deviceRequest.SetSession(oauth2.NewTestSession(t, "test-subject")) + deviceRequest.Session.SetExpiresAt(fosite.UserCode, time.Now().Add(-time.Hour).UTC()) + + _, deviceCodeSig, err := reg.DeviceCodeStrategy().GenerateDeviceCode(t.Context()) + require.NoError(t, err) + userCode, sig, err := reg.UserCodeStrategy().GenerateUserCode(t.Context()) + require.NoError(t, err) + require.NoError(t, reg.OAuth2Storage().CreateDeviceAuthSession(t.Context(), deviceCodeSig, sig, deviceRequest)) + + resp := submitCode(t, &flow.AcceptDeviceUserCodeRequest{UserCode: userCode}, challenge) + require.EqualValues(t, http.StatusBadRequest, resp.StatusCode) - req2, err := http.NewRequest(http.MethodPut, ts.URL+"/admin"+LoginPath+"/accept?challenge="+challenge, bytes.NewBuffer(acceptLoginJson)) - if err != nil { - panic(err) - } + result := fosite.RFC6749Error{} + require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) + assert.Contains(t, result.DescriptionField, fosite.ErrInvalidRequest.DescriptionField) + assert.Contains(t, result.DescriptionField, "The 'user_code' session could not be found or has expired or is otherwise malformed.") + }) + + t.Run("case=accepted user_code", func(t *testing.T) { + deviceRequest := fosite.NewDeviceRequest() + deviceRequest.Client = cl + deviceRequest.SetSession(oauth2.NewTestSession(t, "test-subject")) + deviceRequest.UserCodeState = fosite.UserCodeAccepted + + _, deviceCodeSig, err := reg.DeviceCodeStrategy().GenerateDeviceCode(t.Context()) + require.NoError(t, err) + userCode, sig, err := reg.UserCodeStrategy().GenerateUserCode(t.Context()) + require.NoError(t, err) + require.NoError(t, reg.OAuth2Storage().CreateDeviceAuthSession(t.Context(), deviceCodeSig, sig, deviceRequest)) + + resp := submitCode(t, &hydra.AcceptDeviceUserCodeRequest{UserCode: &userCode}, challenge) + require.EqualValues(t, http.StatusBadRequest, resp.StatusCode) + + result := fosite.RFC6749Error{} + require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) + assert.Contains(t, result.DescriptionField, fosite.ErrInvalidRequest.DescriptionField) + assert.Contains(t, result.DescriptionField, "The 'user_code' session could not be found or has expired or is otherwise malformed.") + }) - resp2, err := c.Do(req2) + t.Run("case=rejected user_code", func(t *testing.T) { + deviceRequest := fosite.NewDeviceRequest() + deviceRequest.Client = cl + deviceRequest.SetSession(oauth2.NewTestSession(t, "test-subject")) + deviceRequest.UserCodeState = fosite.UserCodeRejected + + _, deviceCodesig, err := reg.DeviceCodeStrategy().GenerateDeviceCode(t.Context()) + require.NoError(t, err) + userCode, sig, err := reg.UserCodeStrategy().GenerateUserCode(t.Context()) + require.NoError(t, err) + require.NoError(t, reg.OAuth2Storage().CreateDeviceAuthSession(t.Context(), deviceCodesig, sig, deviceRequest)) + + resp := submitCode(t, &hydra.AcceptDeviceUserCodeRequest{UserCode: &userCode}, challenge) + require.EqualValues(t, http.StatusBadRequest, resp.StatusCode) + + result := fosite.RFC6749Error{} + require.NoError(t, json.NewDecoder(resp.Body).Decode(&result)) + assert.Contains(t, result.DescriptionField, fosite.ErrInvalidRequest.DescriptionField) + assert.Contains(t, result.DescriptionField, "The 'user_code' session could not be found or has expired or is otherwise malformed.") + }) + + t.Run("case=extra fields", func(t *testing.T) { + deviceRequest := fosite.NewDeviceRequest() + deviceRequest.Client = cl + deviceRequest.SetSession(oauth2.NewTestSession(t, "test-subject")) + + _, deviceCodeSig, err := reg.DeviceCodeStrategy().GenerateDeviceCode(t.Context()) + require.NoError(t, err) + userCode, sig, err := reg.UserCodeStrategy().GenerateUserCode(t.Context()) require.NoError(t, err) - require.EqualValues(t, http.StatusOK, resp2.StatusCode) + require.NoError(t, reg.OAuth2Storage().CreateDeviceAuthSession(t.Context(), deviceCodeSig, sig, deviceRequest)) - var result2 OAuth2RedirectTo - require.NoError(t, json.NewDecoder(resp2.Body).Decode(&result2)) - require.NotNil(t, result2.RedirectTo) - require.Contains(t, result2.RedirectTo, "login_verifier") + resp := submitCode(t, map[string]string{ + "user_code": userCode, + "extra": "extra", + }, challenge) + assert.EqualValues(t, http.StatusOK, resp.StatusCode) }) } diff --git a/consent/helper.go b/consent/helper.go index 42ed8963a6b..288999530d3 100644 --- a/consent/helper.go +++ b/consent/helper.go @@ -4,21 +4,11 @@ package consent import ( - "net/http" + "net/url" "strings" - "time" - - "github.com/ory/hydra/x" - - "github.com/ory/x/errorsx" - - "github.com/gorilla/sessions" - - "github.com/ory/fosite" - "github.com/ory/x/mapx" - - "github.com/ory/hydra/client" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/fosite" ) func sanitizeClientFromRequest(ar fosite.AuthorizeRequester) *client.Client { @@ -33,75 +23,26 @@ func sanitizeClient(c *client.Client) *client.Client { return cc } -func matchScopes(scopeStrategy fosite.ScopeStrategy, previousConsent []AcceptOAuth2ConsentRequest, requestedScope []string) *AcceptOAuth2ConsentRequest { - for _, cs := range previousConsent { - var found = true - for _, scope := range requestedScope { - if !scopeStrategy(cs.GrantedScope, scope) { - found = false - break - } - } - - if found { - return &cs +func matchScopes(scopeStrategy fosite.ScopeStrategy, grantedScope, requestedScope []string) bool { + for _, scope := range requestedScope { + if !scopeStrategy(grantedScope, scope) { + return false } } - - return nil -} - -func createCsrfSession(w http.ResponseWriter, r *http.Request, conf x.CookieConfigProvider, store sessions.Store, name string, csrfValue string, maxAge time.Duration) error { - // Errors can be ignored here, because we always get a session back. Error typically means that the - // session doesn't exist yet. - session, _ := store.Get(r, name) - - sameSite := conf.CookieSameSiteMode(r.Context()) - if isLegacyCsrfSessionName(name) { - sameSite = 0 - } - - session.Values["csrf"] = csrfValue - session.Options.HttpOnly = true - session.Options.Secure = conf.CookieSecure(r.Context()) - session.Options.SameSite = sameSite - session.Options.Domain = conf.CookieDomain(r.Context()) - session.Options.MaxAge = int(maxAge.Seconds()) - if err := session.Save(r, w); err != nil { - return errorsx.WithStack(err) - } - - if sameSite == http.SameSiteNoneMode && conf.CookieSameSiteLegacyWorkaround(r.Context()) { - return createCsrfSession(w, r, conf, store, legacyCsrfSessionName(name), csrfValue, maxAge) - } - - return nil -} - -func validateCsrfSession(r *http.Request, conf x.CookieConfigProvider, store sessions.Store, name, expectedCSRF string) error { - if cookie, err := getCsrfSession(r, store, conf, name); err != nil { - return errorsx.WithStack(fosite.ErrRequestForbidden.WithHint("CSRF session cookie could not be decoded.")) - } else if csrf, err := mapx.GetString(cookie.Values, "csrf"); err != nil { - return errorsx.WithStack(fosite.ErrRequestForbidden.WithHint("No CSRF value available in the session cookie.")) - } else if csrf != expectedCSRF { - return errorsx.WithStack(fosite.ErrRequestForbidden.WithHint("The CSRF value from the token does not match the CSRF value from the data store.")) - } - - return nil -} - -func getCsrfSession(r *http.Request, store sessions.Store, conf x.CookieConfigProvider, name string) (*sessions.Session, error) { - cookie, err := store.Get(r, name) - if !isLegacyCsrfSessionName(name) && conf.CookieSameSiteMode(r.Context()) == http.SameSiteNoneMode && conf.CookieSameSiteLegacyWorkaround(r.Context()) && (err != nil || len(cookie.Values) == 0) { - return store.Get(r, legacyCsrfSessionName(name)) + return true +} + +func caseInsensitiveFilterParam(q url.Values, key string) url.Values { + query := url.Values{} + key = strings.ToLower(key) + for k, vs := range q { + if key == strings.ToLower(k) { + query.Set(k, "****") + } else { + for _, v := range vs { + query.Add(k, v) + } + } } - return cookie, err -} - -func legacyCsrfSessionName(name string) string { - return name + "_legacy" -} - -func isLegacyCsrfSessionName(name string) bool { - return strings.HasSuffix(name, "_legacy") + return query } diff --git a/consent/helper_test.go b/consent/helper_test.go index 86fcc15940c..71fb3a75792 100644 --- a/consent/helper_test.go +++ b/consent/helper_test.go @@ -7,19 +7,20 @@ import ( "fmt" "net/http" "net/http/httptest" + "net/url" "testing" "time" "github.com/golang/mock/gomock" - "github.com/ory/hydra/internal/mock" + "github.com/ory/hydra/v2/internal/mock" "github.com/gorilla/securecookie" "github.com/gorilla/sessions" "github.com/stretchr/testify/assert" - "github.com/ory/fosite" - "github.com/ory/hydra/client" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/fosite" ) func TestSanitizeClient(t *testing.T) { @@ -38,52 +39,27 @@ func TestSanitizeClient(t *testing.T) { func TestMatchScopes(t *testing.T) { for k, tc := range []struct { - granted []AcceptOAuth2ConsentRequest - requested []string - expectChallenge string - }{ - { - granted: []AcceptOAuth2ConsentRequest{{ID: "1", GrantedScope: []string{"foo", "bar"}}}, - requested: []string{"foo", "bar"}, - expectChallenge: "1", - }, - { - granted: []AcceptOAuth2ConsentRequest{{ID: "1", GrantedScope: []string{"foo", "bar"}}}, - requested: []string{"foo", "bar", "baz"}, - expectChallenge: "", - }, - { - granted: []AcceptOAuth2ConsentRequest{ - {ID: "1", GrantedScope: []string{"foo", "bar"}}, - {ID: "2", GrantedScope: []string{"foo", "bar"}}, - }, - requested: []string{"foo", "bar"}, - expectChallenge: "1", - }, - { - granted: []AcceptOAuth2ConsentRequest{ - {ID: "1", GrantedScope: []string{"foo", "bar"}}, - {ID: "2", GrantedScope: []string{"foo", "bar", "baz"}}, - }, - requested: []string{"foo", "bar", "baz"}, - expectChallenge: "2", - }, - { - granted: []AcceptOAuth2ConsentRequest{ - {ID: "1", GrantedScope: []string{"foo", "bar"}}, - {ID: "2", GrantedScope: []string{"foo", "bar", "baz"}}, - }, - requested: []string{"zab"}, - expectChallenge: "", - }, - } { + granted, requested []string + expected bool + }{{ + granted: []string{"foo", "bar"}, + requested: []string{"foo", "bar"}, + expected: true, + }, { + granted: []string{"foo", "bar"}, + requested: []string{"foo", "bar", "baz"}, + expected: false, + }, { + granted: []string{"foo", "bar"}, + requested: []string{"foo"}, + expected: true, + }, { + granted: []string{"foo", "bar"}, + requested: []string{"zab", "baz"}, + expected: false, + }} { t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { - got := matchScopes(fosite.ExactScopeStrategy, tc.granted, tc.requested) - if tc.expectChallenge == "" { - assert.Nil(t, got) - return - } - assert.Equal(t, tc.expectChallenge, got.ID) + assert.Equal(t, tc.expected, matchScopes(fosite.ExactScopeStrategy, tc.granted, tc.requested)) }) } } @@ -166,7 +142,7 @@ func TestValidateCsrfSession(t *testing.T) { { cookies: []cookie{ { - name: legacyCsrfSessionName(name), + name: legacyCSRFCookieName(name), csrfValue: "CSRF-VALUE", sameSite: http.SameSiteDefaultMode, }, @@ -178,7 +154,7 @@ func TestValidateCsrfSession(t *testing.T) { { cookies: []cookie{ { - name: legacyCsrfSessionName(name), + name: legacyCSRFCookieName(name), csrfValue: "CSRF-VALUE", sameSite: http.SameSiteDefaultMode, }, @@ -191,7 +167,7 @@ func TestValidateCsrfSession(t *testing.T) { { cookies: []cookie{ { - name: legacyCsrfSessionName(name), + name: legacyCSRFCookieName(name), csrfValue: "CSRF-VALUE", sameSite: http.SameSiteDefaultMode, }, @@ -209,7 +185,7 @@ func TestValidateCsrfSession(t *testing.T) { sameSite: http.SameSiteNoneMode, }, { - name: legacyCsrfSessionName(name), + name: legacyCSRFCookieName(name), csrfValue: "CSRF-VALUE", sameSite: http.SameSiteDefaultMode, }, @@ -226,7 +202,7 @@ func TestValidateCsrfSession(t *testing.T) { sameSite: http.SameSiteNoneMode, }, { - name: legacyCsrfSessionName(name), + name: legacyCSRFCookieName(name), csrfValue: "CSRF-VALUE", sameSite: http.SameSiteDefaultMode, }, @@ -266,7 +242,7 @@ func TestValidateCsrfSession(t *testing.T) { assert.NoError(t, err, "failed to save cookie %s", c.name) } - err := validateCsrfSession(r, config, store, name, tc.csrfValue) + err := validateCSRFCookie(t.Context(), r, config, store, name, tc.csrfValue) if tc.expectError { assert.Error(t, err) } else { @@ -403,7 +379,7 @@ func TestCreateCsrfSession(t *testing.T) { config.EXPECT().CookieSecure(gomock.Any()).Return(tc.secure).AnyTimes() config.EXPECT().CookieDomain(gomock.Any()).Return(tc.domain).AnyTimes() - err := createCsrfSession(rr, req, config, store, tc.name, "value", tc.maxAge) + err := setCSRFCookie(t.Context(), rr, req, config, store, tc.name, "value", tc.maxAge) assert.NoError(t, err) cookies := make(map[string]cookie) @@ -420,3 +396,42 @@ func TestCreateCsrfSession(t *testing.T) { }) } } + +func TestCaseInsensitiveFilterParam(t *testing.T) { + for k, tc := range []struct { + requestedQuery string + key string + + expectedQuery url.Values + }{ + { + requestedQuery: "key=value", + key: "key2", + expectedQuery: url.Values{"key": []string{"value"}}, + }, + { + requestedQuery: "KeY=value", + key: "key", + expectedQuery: url.Values{"KeY": []string{"****"}}, + }, + { + requestedQuery: "KeY=value", + key: "kEy", + expectedQuery: url.Values{"KeY": []string{"****"}}, + }, + { + requestedQuery: "key=value&KEY2=value2", + key: "key2", + expectedQuery: url.Values{"key": []string{"value"}, "KEY2": []string{"****"}}, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + query, err := url.ParseQuery(tc.requestedQuery) + assert.NoError(t, err) + + q := caseInsensitiveFilterParam(query, tc.key) + + assert.Equal(t, tc.expectedQuery, q) + }) + } +} diff --git a/consent/janitor_consent_test_helper.go b/consent/janitor_consent_test_helper.go deleted file mode 100644 index 6467eb1a63d..00000000000 --- a/consent/janitor_consent_test_helper.go +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package consent - -import ( - "time" - - "github.com/ory/x/sqlxx" -) - -func NewHandledLoginRequest(challenge string, hasError bool, requestedAt time.Time, authenticatedAt sqlxx.NullTime) *HandledLoginRequest { - var deniedErr *RequestDeniedError - if hasError { - deniedErr = &RequestDeniedError{ - Name: "consent request denied", - Description: "some description", - Hint: "some hint", - Code: 403, - Debug: "some debug", - valid: true, - } - } - - return &HandledLoginRequest{ - ID: challenge, - Error: deniedErr, - WasHandled: true, - RequestedAt: requestedAt, - AuthenticatedAt: authenticatedAt, - } -} - -func NewHandledConsentRequest(challenge string, hasError bool, requestedAt time.Time, authenticatedAt sqlxx.NullTime) *AcceptOAuth2ConsentRequest { - var deniedErr *RequestDeniedError - if hasError { - deniedErr = &RequestDeniedError{ - Name: "consent request denied", - Description: "some description", - Hint: "some hint", - Code: 403, - Debug: "some debug", - valid: true, - } - } - - return &AcceptOAuth2ConsentRequest{ - ID: challenge, - HandledAt: sqlxx.NullTime(time.Now().Round(time.Second)), - Error: deniedErr, - RequestedAt: requestedAt, - AuthenticatedAt: authenticatedAt, - WasHandled: true, - } -} diff --git a/consent/manager.go b/consent/manager.go index bc84ddf91c9..515524e9aed 100644 --- a/consent/manager.go +++ b/consent/manager.go @@ -5,11 +5,12 @@ package consent import ( "context" - "time" "github.com/gofrs/uuid" - "github.com/ory/hydra/client" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/flow" + keysetpagination "github.com/ory/x/pagination/keysetpagination_v2" ) type ForcedObfuscatedLoginSession struct { @@ -19,44 +20,46 @@ type ForcedObfuscatedLoginSession struct { NID uuid.UUID `db:"nid"` } -func (_ ForcedObfuscatedLoginSession) TableName() string { +func (ForcedObfuscatedLoginSession) TableName() string { return "hydra_oauth2_obfuscated_authentication_session" } -type Manager interface { - CreateConsentRequest(ctx context.Context, req *OAuth2ConsentRequest) error - GetConsentRequest(ctx context.Context, challenge string) (*OAuth2ConsentRequest, error) - HandleConsentRequest(ctx context.Context, r *AcceptOAuth2ConsentRequest) (*OAuth2ConsentRequest, error) - RevokeSubjectConsentSession(ctx context.Context, user string) error - RevokeSubjectClientConsentSession(ctx context.Context, user, client string) error - - VerifyAndInvalidateConsentRequest(ctx context.Context, verifier string) (*AcceptOAuth2ConsentRequest, error) - FindGrantedAndRememberedConsentRequests(ctx context.Context, client, user string) ([]AcceptOAuth2ConsentRequest, error) - FindSubjectsGrantedConsentRequests(ctx context.Context, user string, limit, offset int) ([]AcceptOAuth2ConsentRequest, error) - FindSubjectsSessionGrantedConsentRequests(ctx context.Context, user, sid string, limit, offset int) ([]AcceptOAuth2ConsentRequest, error) - CountSubjectsGrantedConsentRequests(ctx context.Context, user string) (int, error) - - // Cookie management - GetRememberedLoginSession(ctx context.Context, id string) (*LoginSession, error) - CreateLoginSession(ctx context.Context, session *LoginSession) error - DeleteLoginSession(ctx context.Context, id string) error - RevokeSubjectLoginSession(ctx context.Context, user string) error - ConfirmLoginSession(ctx context.Context, id string, authTime time.Time, subject string, remember bool) error - - CreateLoginRequest(ctx context.Context, req *LoginRequest) error - GetLoginRequest(ctx context.Context, challenge string) (*LoginRequest, error) - HandleLoginRequest(ctx context.Context, challenge string, r *HandledLoginRequest) (*LoginRequest, error) - VerifyAndInvalidateLoginRequest(ctx context.Context, verifier string) (*HandledLoginRequest, error) - - CreateForcedObfuscatedLoginSession(ctx context.Context, session *ForcedObfuscatedLoginSession) error - GetForcedObfuscatedLoginSession(ctx context.Context, client, obfuscated string) (*ForcedObfuscatedLoginSession, error) - - ListUserAuthenticatedClientsWithFrontChannelLogout(ctx context.Context, subject, sid string) ([]client.Client, error) - ListUserAuthenticatedClientsWithBackChannelLogout(ctx context.Context, subject, sid string) ([]client.Client, error) - - CreateLogoutRequest(ctx context.Context, request *LogoutRequest) error - GetLogoutRequest(ctx context.Context, challenge string) (*LogoutRequest, error) - AcceptLogoutRequest(ctx context.Context, challenge string) (*LogoutRequest, error) - RejectLogoutRequest(ctx context.Context, challenge string) error - VerifyAndInvalidateLogoutRequest(ctx context.Context, verifier string) (*LogoutRequest, error) -} +type ( + Manager interface { + RevokeSubjectConsentSession(ctx context.Context, subject string) error + RevokeSubjectClientConsentSession(ctx context.Context, subject, client string) error + RevokeConsentSessionByID(ctx context.Context, consentRequestID string) error + + CreateConsentSession(ctx context.Context, f *flow.Flow) error + FindGrantedAndRememberedConsentRequest(ctx context.Context, client, subject string) (*flow.Flow, error) + FindSubjectsGrantedConsentRequests(ctx context.Context, subject string, pageOpts ...keysetpagination.Option) ([]flow.Flow, *keysetpagination.Paginator, error) + FindSubjectsSessionGrantedConsentRequests(ctx context.Context, subject, sid string, pageOpts ...keysetpagination.Option) ([]flow.Flow, *keysetpagination.Paginator, error) + + ListUserAuthenticatedClientsWithFrontChannelLogout(ctx context.Context, subject, sid string) ([]client.Client, error) + ListUserAuthenticatedClientsWithBackChannelLogout(ctx context.Context, subject, sid string) ([]client.Client, error) + } + ObfuscatedSubjectManager interface { + CreateForcedObfuscatedLoginSession(ctx context.Context, session *ForcedObfuscatedLoginSession) error + GetForcedObfuscatedLoginSession(ctx context.Context, client, obfuscated string) (*ForcedObfuscatedLoginSession, error) + } + LoginManager interface { + GetRememberedLoginSession(ctx context.Context, id string) (*flow.LoginSession, error) + DeleteLoginSession(ctx context.Context, id string) (deletedSession *flow.LoginSession, err error) + RevokeSubjectLoginSession(ctx context.Context, subject string) error + ConfirmLoginSession(ctx context.Context, loginSession *flow.LoginSession) error + } + LogoutManager interface { + CreateLogoutRequest(ctx context.Context, request *flow.LogoutRequest) error + GetLogoutRequest(ctx context.Context, challenge string) (*flow.LogoutRequest, error) + AcceptLogoutRequest(ctx context.Context, challenge string) (*flow.LogoutRequest, error) + RejectLogoutRequest(ctx context.Context, challenge string) error + VerifyAndInvalidateLogoutRequest(ctx context.Context, verifier string) (*flow.LogoutRequest, error) + } + + ManagerProvider interface{ ConsentManager() Manager } + ObfuscatedSubjectManagerProvider interface { + ObfuscatedSubjectManager() ObfuscatedSubjectManager + } + LoginManagerProvider interface{ LoginManager() LoginManager } + LogoutManagerProvider interface{ LogoutManager() LogoutManager } +) diff --git a/consent/manager_test_helpers.go b/consent/manager_test_helpers.go deleted file mode 100644 index 0a2133c461f..00000000000 --- a/consent/manager_test_helpers.go +++ /dev/null @@ -1,1032 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package consent - -import ( - "context" - "errors" - "fmt" - "testing" - "time" - - "github.com/ory/x/assertx" - - gofrsuuid "github.com/gofrs/uuid" - "github.com/google/uuid" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/ory/x/sqlxx" - - "github.com/ory/fosite" - - "github.com/ory/hydra/client" - "github.com/ory/hydra/x" -) - -func MockConsentRequest(key string, remember bool, rememberFor int, hasError bool, skip bool, authAt bool, loginChallengeBase string, network string) (c *OAuth2ConsentRequest, h *AcceptOAuth2ConsentRequest) { - c = &OAuth2ConsentRequest{ - ID: makeID("challenge", network, key), - RequestedScope: []string{"scopea" + key, "scopeb" + key}, - RequestedAudience: []string{"auda" + key, "audb" + key}, - Skip: skip, - Subject: "subject" + key, - OpenIDConnectContext: &OAuth2ConsentRequestOpenIDConnectContext{ - ACRValues: []string{"1" + key, "2" + key}, - UILocales: []string{"fr" + key, "de" + key}, - Display: "popup" + key, - }, - Client: &client.Client{LegacyClientID: "fk-client-" + key}, - RequestURL: "https://request-url/path" + key, - LoginChallenge: sqlxx.NullString(makeID(loginChallengeBase, network, key)), - LoginSessionID: sqlxx.NullString(makeID("fk-login-session", network, key)), - ForceSubjectIdentifier: "forced-subject", - Verifier: makeID("verifier", network, key), - CSRF: "csrf" + key, - ACR: "1", - AuthenticatedAt: sqlxx.NullTime(time.Now().UTC().Add(-time.Hour)), - RequestedAt: time.Now().UTC().Add(-time.Hour), - Context: sqlxx.JSONRawMessage(`{"foo": "bar` + key + `"}`), - } - - var err *RequestDeniedError - if hasError { - err = &RequestDeniedError{ - Name: "error_name" + key, - Description: "error_description" + key, - Hint: "error_hint,omitempty" + key, - Code: 100, - Debug: "error_debug,omitempty" + key, - valid: true, - } - } - - var authenticatedAt sqlxx.NullTime - if authAt { - authenticatedAt = sqlxx.NullTime(time.Now().UTC().Add(-time.Minute)) - } - - h = &AcceptOAuth2ConsentRequest{ - ConsentRequest: c, - RememberFor: rememberFor, - Remember: remember, - ID: makeID("challenge", network, key), - RequestedAt: time.Now().UTC().Add(-time.Minute), - AuthenticatedAt: authenticatedAt, - GrantedScope: []string{"scopea" + key, "scopeb" + key}, - GrantedAudience: []string{"auda" + key, "audb" + key}, - Error: err, - HandledAt: sqlxx.NullTime(time.Now().UTC()), - // WasUsed: true, - } - - return c, h -} - -func MockLogoutRequest(key string, withClient bool, network string) (c *LogoutRequest) { - var cl *client.Client - if withClient { - cl = &client.Client{ - LegacyClientID: "fk-client-" + key, - } - } - return &LogoutRequest{ - Subject: "subject" + key, - ID: makeID("challenge", network, key), - Verifier: makeID("verifier", network, key), - SessionID: makeID("session", network, key), - RPInitiated: true, - RequestURL: "http://request-me/", - PostLogoutRedirectURI: "http://redirect-me/", - WasHandled: false, - Accepted: false, - Client: cl, - } -} - -func MockAuthRequest(key string, authAt bool, network string) (c *LoginRequest, h *HandledLoginRequest) { - c = &LoginRequest{ - OpenIDConnectContext: &OAuth2ConsentRequestOpenIDConnectContext{ - ACRValues: []string{"1" + key, "2" + key}, - UILocales: []string{"fr" + key, "de" + key}, - Display: "popup" + key, - }, - RequestedAt: time.Now().UTC().Add(-time.Minute), - Client: &client.Client{LegacyClientID: "fk-client-" + key}, - Subject: "subject" + key, - RequestURL: "https://request-url/path" + key, - Skip: true, - ID: makeID("challenge", network, key), - Verifier: makeID("verifier", network, key), - RequestedScope: []string{"scopea" + key, "scopeb" + key}, - CSRF: "csrf" + key, - SessionID: sqlxx.NullString(makeID("fk-login-session", network, key)), - } - - var err = &RequestDeniedError{ - Name: "error_name" + key, - Description: "error_description" + key, - Hint: "error_hint,omitempty" + key, - Code: 100, - Debug: "error_debug,omitempty" + key, - valid: true, - } - - var authenticatedAt time.Time - if authAt { - authenticatedAt = time.Now().UTC().Add(-time.Minute) - } - - h = &HandledLoginRequest{ - LoginRequest: c, - RememberFor: 120, - Remember: true, - ID: makeID("challenge", network, key), - RequestedAt: time.Now().UTC().Add(-time.Minute), - AuthenticatedAt: sqlxx.NullTime(authenticatedAt), - Error: err, - Subject: c.Subject, - ACR: "acr", - ForceSubjectIdentifier: "forced-subject", - WasHandled: false, - } - - return c, h -} - -func SaneMockHandleConsentRequest(t *testing.T, m Manager, c *OAuth2ConsentRequest, authAt time.Time, rememberFor int, remember bool, hasError bool) *AcceptOAuth2ConsentRequest { - var rde *RequestDeniedError - if hasError { - rde = &RequestDeniedError{ - Name: "error_name", - Description: "error_description", - Hint: "error_hint", - Code: 100, - Debug: "error_debug", - valid: true, - } - } - - h := &AcceptOAuth2ConsentRequest{ - ConsentRequest: c, - RememberFor: rememberFor, - Remember: remember, - ID: c.ID, - RequestedAt: time.Now().UTC().Add(-time.Minute), - AuthenticatedAt: sqlxx.NullTime(authAt), - GrantedScope: []string{"scopea", "scopeb"}, - GrantedAudience: []string{"auda", "audb"}, - Error: rde, - WasHandled: false, - HandledAt: sqlxx.NullTime(time.Now().UTC().Add(-time.Minute)), - } - - _, err := m.HandleConsentRequest(context.Background(), h) - require.NoError(t, err) - return h -} - -// SaneMockConsentRequest does the same thing as MockConsentRequest but uses less insanity and implicit dependencies. -func SaneMockConsentRequest(t *testing.T, m Manager, ar *LoginRequest, skip bool) (c *OAuth2ConsentRequest) { - c = &OAuth2ConsentRequest{ - RequestedScope: []string{"scopea", "scopeb"}, - RequestedAudience: []string{"auda", "audb"}, - Skip: skip, - Subject: ar.Subject, - OpenIDConnectContext: &OAuth2ConsentRequestOpenIDConnectContext{ - ACRValues: []string{"1", "2"}, - UILocales: []string{"fr", "de"}, - Display: "popup", - }, - Client: ar.Client, - RequestURL: "https://request-url/path", - LoginChallenge: sqlxx.NullString(ar.ID), - LoginSessionID: ar.SessionID, - ForceSubjectIdentifier: "forced-subject", - ACR: "1", - AuthenticatedAt: sqlxx.NullTime(time.Now().UTC().Add(-time.Hour)), - RequestedAt: time.Now().UTC().Add(-time.Hour), - Context: sqlxx.JSONRawMessage(`{"foo": "bar"}`), - - ID: uuid.New().String(), - Verifier: uuid.New().String(), - CSRF: uuid.New().String(), - } - - require.NoError(t, m.CreateConsentRequest(context.Background(), c)) - return c -} - -// SaneMockAuthRequest does the same thing as MockAuthRequest but uses less insanity and implicit dependencies. -func SaneMockAuthRequest(t *testing.T, m Manager, ls *LoginSession, cl *client.Client) (c *LoginRequest) { - c = &LoginRequest{ - OpenIDConnectContext: &OAuth2ConsentRequestOpenIDConnectContext{ - ACRValues: []string{"1", "2"}, - UILocales: []string{"fr", "de"}, - Display: "popup", - }, - RequestedAt: time.Now().UTC().Add(-time.Hour), - Client: cl, - Subject: ls.Subject, - RequestURL: "https://request-url/path", - Skip: true, - RequestedScope: []string{"scopea", "scopeb"}, - SessionID: sqlxx.NullString(ls.ID), - - CSRF: uuid.New().String(), - ID: uuid.New().String(), - Verifier: uuid.New().String(), - } - require.NoError(t, m.CreateLoginRequest(context.Background(), c)) - return c -} - -func makeID(base string, network string, key string) string { - return fmt.Sprintf("%s-%s-%s", base, network, key) -} - -func TestHelperNID(t1ClientManager client.Manager, t1ValidNID Manager, t2InvalidNID Manager) func(t *testing.T) { - testClient := client.Client{LegacyClientID: fmt.Sprintf("2022-03-11-client-nid-test-1")} - testLS := LoginSession{ - ID: "2022-03-11-ls-nid-test-1", - Subject: "2022-03-11-test-1-sub", - } - testLR := LoginRequest{ - ID: "2022-03-11-lr-nid-test-1", - Subject: "2022-03-11-test-1-sub", - Verifier: "2022-03-11-test-1-ver", - RequestedAt: time.Now(), - Client: &client.Client{LegacyClientID: fmt.Sprintf("2022-03-11-client-nid-test-1")}, - } - testHLR := HandledLoginRequest{ - LoginRequest: &testLR, - RememberFor: 120, - Remember: true, - ID: testLR.ID, - RequestedAt: testLR.RequestedAt, - AuthenticatedAt: sqlxx.NullTime(time.Now()), - Error: nil, - Subject: testLR.Subject, - ACR: "acr", - ForceSubjectIdentifier: "2022-03-11-test-1-forced-sub", - WasHandled: false, - } - - return func(t *testing.T) { - require.NoError(t, t1ClientManager.CreateClient(context.Background(), &testClient)) - require.Error(t, t2InvalidNID.CreateLoginSession(context.Background(), &testLS)) - require.NoError(t, t1ValidNID.CreateLoginSession(context.Background(), &testLS)) - require.Error(t, t2InvalidNID.CreateLoginRequest(context.Background(), &testLR)) - require.NoError(t, t1ValidNID.CreateLoginRequest(context.Background(), &testLR)) - _, err := t2InvalidNID.GetLoginRequest(context.Background(), testLR.ID) - require.Error(t, err) - _, err = t1ValidNID.GetLoginRequest(context.Background(), testLR.ID) - require.NoError(t, err) - _, err = t2InvalidNID.HandleLoginRequest(context.Background(), testLR.ID, &testHLR) - require.Error(t, err) - _, err = t1ValidNID.HandleLoginRequest(context.Background(), testLR.ID, &testHLR) - require.NoError(t, err) - require.NoError(t, t2InvalidNID.ConfirmLoginSession(context.Background(), testLS.ID, time.Now(), testLS.Subject, true)) - require.NoError(t, t1ValidNID.ConfirmLoginSession(context.Background(), testLS.ID, time.Now(), testLS.Subject, true)) - require.Error(t, t2InvalidNID.DeleteLoginSession(context.Background(), testLS.ID)) - require.NoError(t, t1ValidNID.DeleteLoginSession(context.Background(), testLS.ID)) - } -} - -func ManagerTests(m Manager, clientManager client.Manager, fositeManager x.FositeStorer, network string, parallel bool) func(t *testing.T) { - lr := make(map[string]*LoginRequest) - - return func(t *testing.T) { - if parallel { - t.Parallel() - } - t.Run("case=init-fks", func(t *testing.T) { - for _, k := range []string{"1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "rv1", "rv2"} { - require.NoError(t, clientManager.CreateClient(context.Background(), &client.Client{LegacyClientID: fmt.Sprintf("fk-client-%s", k)})) - - require.NoError(t, m.CreateLoginSession(context.Background(), &LoginSession{ - ID: makeID("fk-login-session", network, k), - AuthenticatedAt: sqlxx.NullTime(time.Now().Round(time.Second).UTC()), - Subject: fmt.Sprintf("subject-%s", k), - })) - - lr[k] = &LoginRequest{ - ID: makeID("fk-login-challenge", network, k), - Subject: fmt.Sprintf("subject%s", k), - SessionID: sqlxx.NullString(makeID("fk-login-session", network, k)), - Verifier: makeID("fk-login-verifier", network, k), - Client: &client.Client{LegacyClientID: fmt.Sprintf("fk-client-%s", k)}, - AuthenticatedAt: sqlxx.NullTime(time.Now()), - RequestedAt: time.Now(), - } - - require.NoError(t, m.CreateLoginRequest(context.Background(), lr[k])) - } - }) - - t.Run("case=auth-session", func(t *testing.T) { - for _, tc := range []struct { - s LoginSession - }{ - { - s: LoginSession{ - ID: makeID("session", network, "1"), - AuthenticatedAt: sqlxx.NullTime(time.Now().Round(time.Second).Add(-time.Minute).UTC()), - Subject: "subject1", - }, - }, - { - s: LoginSession{ - ID: makeID("session", network, "2"), - AuthenticatedAt: sqlxx.NullTime(time.Now().Round(time.Minute).Add(-time.Minute).UTC()), - Subject: "subject2", - }, - }, - } { - t.Run("case=create-get-"+tc.s.ID, func(t *testing.T) { - _, err := m.GetRememberedLoginSession(context.Background(), tc.s.ID) - require.EqualError(t, err, x.ErrNotFound.Error(), "%#v", err) - - err = m.CreateLoginSession(context.Background(), &tc.s) - require.NoError(t, err) - - _, err = m.GetRememberedLoginSession(context.Background(), tc.s.ID) - require.EqualError(t, err, x.ErrNotFound.Error()) - - updatedAuth := time.Time(tc.s.AuthenticatedAt).Add(time.Second) - require.NoError(t, m.ConfirmLoginSession(context.Background(), tc.s.ID, updatedAuth, tc.s.Subject, true)) - - got, err := m.GetRememberedLoginSession(context.Background(), tc.s.ID) - require.NoError(t, err) - assert.EqualValues(t, tc.s.ID, got.ID) - assert.Equal(t, updatedAuth.Unix(), time.Time(got.AuthenticatedAt).Unix()) // this was updated from confirm... - assert.EqualValues(t, tc.s.Subject, got.Subject) - - time.Sleep(time.Second) // Make sure AuthAt does not equal... - updatedAuth2 := time.Now().Truncate(time.Second).UTC() - require.NoError(t, m.ConfirmLoginSession(context.Background(), tc.s.ID, updatedAuth2, "some-other-subject", true)) - - got2, err := m.GetRememberedLoginSession(context.Background(), tc.s.ID) - require.NoError(t, err) - assert.EqualValues(t, tc.s.ID, got2.ID) - assert.Equal(t, updatedAuth2.Unix(), time.Time(got2.AuthenticatedAt).Unix()) // this was updated from confirm... - assert.EqualValues(t, "some-other-subject", got2.Subject) - }) - } - for _, tc := range []struct { - id string - }{ - { - id: makeID("session", network, "1"), - }, - { - id: makeID("session", network, "2"), - }, - } { - t.Run("case=delete-get-"+tc.id, func(t *testing.T) { - err := m.DeleteLoginSession(context.Background(), tc.id) - require.NoError(t, err) - - _, err = m.GetRememberedLoginSession(context.Background(), tc.id) - require.Error(t, err) - }) - } - }) - - t.Run("case=auth-request", func(t *testing.T) { - for _, tc := range []struct { - key string - authAt bool - }{ - {"1", true}, - {"2", true}, - {"3", true}, - {"4", true}, - {"5", true}, - {"6", false}, - {"7", true}, - } { - t.Run("key="+tc.key, func(t *testing.T) { - c, h := MockAuthRequest(tc.key, tc.authAt, network) - _ = clientManager.CreateClient(context.Background(), c.Client) // Ignore errors that are caused by duplication - - _, err := m.GetLoginRequest(context.Background(), makeID("challenge", network, tc.key)) - require.Error(t, err) - - require.NoError(t, m.CreateLoginRequest(context.Background(), c)) - - got1, err := m.GetLoginRequest(context.Background(), makeID("challenge", network, tc.key)) - require.NoError(t, err) - assert.False(t, got1.WasHandled) - compareAuthenticationRequest(t, c, got1) - - got1, err = m.HandleLoginRequest(context.Background(), makeID("challenge", network, tc.key), h) - require.NoError(t, err) - compareAuthenticationRequest(t, c, got1) - - got2, err := m.VerifyAndInvalidateLoginRequest(context.Background(), makeID("verifier", network, tc.key)) - require.NoError(t, err) - compareAuthenticationRequest(t, c, got2.LoginRequest) - assert.Equal(t, c.ID, got2.ID) - - _, err = m.VerifyAndInvalidateLoginRequest(context.Background(), makeID("verifier", network, tc.key)) - require.Error(t, err) - - got1, err = m.GetLoginRequest(context.Background(), makeID("challenge", network, tc.key)) - require.NoError(t, err) - assert.True(t, got1.WasHandled) - }) - } - }) - - t.Run("case=consent-request", func(t *testing.T) { - for _, tc := range []struct { - key string - remember bool - rememberFor int - hasError bool - skip bool - authAt bool - }{ - {"1", true, 0, false, false, true}, - {"2", true, 0, true, false, true}, - {"3", true, 1, false, false, true}, - {"4", false, 0, false, false, true}, - {"5", true, 120, false, false, true}, - {"6", true, 120, false, true, true}, - {"7", false, 0, false, false, false}, - } { - t.Run("key="+tc.key, func(t *testing.T) { - c, h := MockConsentRequest(tc.key, tc.remember, tc.rememberFor, tc.hasError, tc.skip, tc.authAt, "challenge", network) - _ = clientManager.CreateClient(context.Background(), c.Client) // Ignore errors that are caused by duplication - - consentChallenge := makeID("challenge", network, tc.key) - - _, err := m.GetConsentRequest(context.Background(), consentChallenge) - require.Error(t, err) - - require.NoError(t, m.CreateConsentRequest(context.Background(), c)) - - got1, err := m.GetConsentRequest(context.Background(), consentChallenge) - require.NoError(t, err) - compareConsentRequest(t, c, got1) - assert.False(t, got1.WasHandled) - - got1, err = m.HandleConsentRequest(context.Background(), h) - require.NoError(t, err) - assertx.TimeDifferenceLess(t, time.Now(), time.Time(h.HandledAt), 5) - compareConsentRequest(t, c, got1) - - h.GrantedAudience = sqlxx.StringSliceJSONFormat{"new-audience"} - _, err = m.HandleConsentRequest(context.Background(), h) - require.NoError(t, err) - - got2, err := m.VerifyAndInvalidateConsentRequest(context.Background(), makeID("verifier", network, tc.key)) - require.NoError(t, err) - compareConsentRequest(t, c, got2.ConsentRequest) - assert.Equal(t, c.ID, got2.ID) - assert.Equal(t, h.GrantedAudience, got2.GrantedAudience) - - // Trying to update this again should return an error because the consent request was used. - h.GrantedAudience = sqlxx.StringSliceJSONFormat{"new-audience", "new-audience-2"} - _, err = m.HandleConsentRequest(context.Background(), h) - require.Error(t, err) - - if tc.hasError { - assert.True(t, got2.HasError()) - } - assert.Equal(t, tc.remember, got2.Remember) - assert.Equal(t, tc.rememberFor, got2.RememberFor) - - _, err = m.VerifyAndInvalidateConsentRequest(context.Background(), makeID("verifier", network, tc.key)) - require.Error(t, err) - - got1, err = m.GetConsentRequest(context.Background(), consentChallenge) - require.NoError(t, err) - assert.True(t, got1.WasHandled) - }) - } - - for _, tc := range []struct { - keyC string - keyS string - expectedLength int - }{ - {"1", "1", 1}, - {"2", "2", 0}, - {"3", "3", 0}, - {"4", "4", 0}, - {"1", "2", 0}, - {"2", "1", 0}, - {"5", "5", 1}, - {"6", "6", 0}, - } { - t.Run("key="+tc.keyC+"-"+tc.keyS, func(t *testing.T) { - rs, err := m.FindGrantedAndRememberedConsentRequests(context.Background(), "fk-client-"+tc.keyC, "subject"+tc.keyS) - if tc.expectedLength == 0 { - assert.EqualError(t, err, ErrNoPreviousConsentFound.Error()) - } else { - require.NoError(t, err) - assert.Len(t, rs, tc.expectedLength) - } - }) - } - }) - - t.Run("case=revoke-auth-request", func(t *testing.T) { - require.NoError(t, m.CreateLoginSession(context.Background(), &LoginSession{ - ID: makeID("rev-session", network, "-1"), - AuthenticatedAt: sqlxx.NullTime(time.Now()), - Subject: "subject-1", - })) - - require.NoError(t, m.CreateLoginSession(context.Background(), &LoginSession{ - ID: makeID("rev-session", network, "-2"), - AuthenticatedAt: sqlxx.NullTime(time.Now()), - Subject: "subject-2", - })) - - require.NoError(t, m.CreateLoginSession(context.Background(), &LoginSession{ - ID: makeID("rev-session", network, "-3"), - AuthenticatedAt: sqlxx.NullTime(time.Now()), - Subject: "subject-1", - })) - - for i, tc := range []struct { - subject string - ids []string - }{ - { - subject: "subject-1", - ids: []string{makeID("rev-session", network, "-1"), makeID("rev-session", network, "-3")}, - }, - { - subject: "subject-2", - ids: []string{makeID("rev-session", network, "-1"), makeID("rev-session", network, "-3"), makeID("rev-session", network, "-2")}, - }, - } { - t.Run(fmt.Sprintf("case=%d/subject=%s", i, tc.subject), func(t *testing.T) { - require.NoError(t, m.RevokeSubjectLoginSession(context.Background(), tc.subject)) - - for _, id := range tc.ids { - t.Run(fmt.Sprintf("id=%s", id), func(t *testing.T) { - _, err := m.GetRememberedLoginSession(context.Background(), id) - assert.EqualError(t, err, x.ErrNotFound.Error()) - }) - } - }) - } - }) - - challengerv1 := makeID("challenge", network, "rv1") - challengerv2 := makeID("challenge", network, "rv2") - t.Run("case=revoke-used-consent-request", func(t *testing.T) { - cr1, hcr1 := MockConsentRequest("rv1", false, 0, false, false, false, "fk-login-challenge", network) - cr2, hcr2 := MockConsentRequest("rv2", false, 0, false, false, false, "fk-login-challenge", network) - - // Ignore duplication errors - _ = clientManager.CreateClient(context.Background(), cr1.Client) - _ = clientManager.CreateClient(context.Background(), cr2.Client) - - require.NoError(t, m.CreateConsentRequest(context.Background(), cr1)) - require.NoError(t, m.CreateConsentRequest(context.Background(), cr2)) - _, err := m.HandleConsentRequest(context.Background(), hcr1) - require.NoError(t, err) - _, err = m.HandleConsentRequest(context.Background(), hcr2) - require.NoError(t, err) - - require.NoError(t, fositeManager.CreateAccessTokenSession(context.Background(), makeID("", network, "trva1"), &fosite.Request{Client: cr1.Client, ID: challengerv1, RequestedAt: time.Now()})) - require.NoError(t, fositeManager.CreateRefreshTokenSession(context.Background(), makeID("", network, "rrva1"), &fosite.Request{Client: cr1.Client, ID: challengerv1, RequestedAt: time.Now()})) - require.NoError(t, fositeManager.CreateAccessTokenSession(context.Background(), makeID("", network, "trva2"), &fosite.Request{Client: cr2.Client, ID: challengerv2, RequestedAt: time.Now()})) - require.NoError(t, fositeManager.CreateRefreshTokenSession(context.Background(), makeID("", network, "rrva2"), &fosite.Request{Client: cr2.Client, ID: challengerv2, RequestedAt: time.Now()})) - - for i, tc := range []struct { - subject string - client string - at string - rt string - ids []string - }{ - { - at: makeID("", network, "trva1"), rt: makeID("", network, "rrva1"), - subject: "subjectrv1", - client: "", - ids: []string{challengerv1}, - }, - { - at: makeID("", network, "trva2"), rt: makeID("", network, "rrva2"), - subject: "subjectrv2", - client: "fk-client-rv2", - ids: []string{challengerv2}, - }, - } { - t.Run(fmt.Sprintf("case=%d/subject=%s", i, tc.subject), func(t *testing.T) { - _, err := fositeManager.GetAccessTokenSession(context.Background(), tc.at, nil) - assert.NoError(t, err) - _, err = fositeManager.GetRefreshTokenSession(context.Background(), tc.rt, nil) - assert.NoError(t, err) - - if tc.client == "" { - require.NoError(t, m.RevokeSubjectConsentSession(context.Background(), tc.subject)) - } else { - require.NoError(t, m.RevokeSubjectClientConsentSession(context.Background(), tc.subject, tc.client)) - } - - for _, id := range tc.ids { - t.Run(fmt.Sprintf("id=%s", id), func(t *testing.T) { - _, err := m.GetConsentRequest(context.Background(), id) - assert.True(t, errors.Is(err, x.ErrNotFound)) - }) - } - - r, err := fositeManager.GetAccessTokenSession(context.Background(), tc.at, nil) - assert.Error(t, err, "%+v", r) - r, err = fositeManager.GetRefreshTokenSession(context.Background(), tc.rt, nil) - assert.Error(t, err, "%+v", r) - }) - } - - require.EqualError(t, m.RevokeSubjectConsentSession(context.Background(), "i-do-not-exist"), x.ErrNotFound.Error()) - require.EqualError(t, m.RevokeSubjectClientConsentSession(context.Background(), "i-do-not-exist", "i-do-not-exist"), x.ErrNotFound.Error()) - }) - - t.Run("case=list-used-consent-requests", func(t *testing.T) { - require.NoError(t, m.CreateLoginRequest(context.Background(), lr["rv1"])) - require.NoError(t, m.CreateLoginRequest(context.Background(), lr["rv2"])) - - cr1, hcr1 := MockConsentRequest("rv1", true, 0, false, false, false, "fk-login-challenge", network) - cr2, hcr2 := MockConsentRequest("rv2", false, 0, false, false, false, "fk-login-challenge", network) - - // Ignore duplicate errors - _ = clientManager.CreateClient(context.Background(), cr1.Client) - _ = clientManager.CreateClient(context.Background(), cr2.Client) - - require.NoError(t, m.CreateConsentRequest(context.Background(), cr1)) - require.NoError(t, m.CreateConsentRequest(context.Background(), cr2)) - _, err := m.HandleConsentRequest(context.Background(), hcr1) - require.NoError(t, err) - _, err = m.HandleConsentRequest(context.Background(), hcr2) - require.NoError(t, err) - - for i, tc := range []struct { - subject string - sid string - challenges []string - clients []string - }{ - { - subject: cr1.Subject, - sid: makeID("fk-login-session", network, "rv1"), - challenges: []string{challengerv1}, - clients: []string{"fk-client-rv1"}, - }, - { - subject: cr2.Subject, - sid: makeID("fk-login-session", network, "rv2"), - challenges: []string{challengerv2}, - clients: []string{"fk-client-rv2"}, - }, - { - subject: "subjectrv3", - sid: makeID("fk-login-session", network, "rv2"), - challenges: []string{}, - clients: []string{}, - }, - } { - t.Run(fmt.Sprintf("case=%d/subject=%s/session=%s", i, tc.subject, tc.sid), func(t *testing.T) { - consents, err := m.FindSubjectsSessionGrantedConsentRequests(context.Background(), tc.subject, tc.sid, 100, 0) - assert.Equal(t, len(tc.challenges), len(consents)) - - if len(tc.challenges) == 0 { - assert.EqualError(t, err, ErrNoPreviousConsentFound.Error()) - } else { - require.NoError(t, err) - for _, consent := range consents { - assert.Contains(t, tc.challenges, consent.ID) - assert.Contains(t, tc.clients, consent.ConsentRequest.Client.GetID()) - } - } - - n, err := m.CountSubjectsGrantedConsentRequests(context.Background(), tc.subject) - require.NoError(t, err) - assert.Equal(t, n, len(tc.challenges)) - - }) - } - - for i, tc := range []struct { - subject string - challenges []string - clients []string - }{ - { - subject: "subjectrv1", - challenges: []string{challengerv1}, - clients: []string{"fk-client-rv1"}, - }, - { - subject: "subjectrv2", - challenges: []string{challengerv2}, - clients: []string{"fk-client-rv2"}, - }, - { - subject: "subjectrv3", - challenges: []string{}, - clients: []string{}, - }, - } { - t.Run(fmt.Sprintf("case=%d/subject=%s", i, tc.subject), func(t *testing.T) { - consents, err := m.FindSubjectsGrantedConsentRequests(context.Background(), tc.subject, 100, 0) - assert.Equal(t, len(tc.challenges), len(consents)) - - if len(tc.challenges) == 0 { - assert.EqualError(t, err, ErrNoPreviousConsentFound.Error()) - } else { - require.NoError(t, err) - for _, consent := range consents { - assert.Contains(t, tc.challenges, consent.ID) - assert.Contains(t, tc.clients, consent.ConsentRequest.Client.GetID()) - } - } - - n, err := m.CountSubjectsGrantedConsentRequests(context.Background(), tc.subject) - require.NoError(t, err) - assert.Equal(t, n, len(tc.challenges)) - - }) - } - - t.Run("case=obfuscated", func(t *testing.T) { - _, err := m.GetForcedObfuscatedLoginSession(context.Background(), "fk-client-1", "obfuscated-1") - require.True(t, errors.Is(err, x.ErrNotFound)) - - expect := &ForcedObfuscatedLoginSession{ - ClientID: "fk-client-1", - Subject: "subject-1", - SubjectObfuscated: "obfuscated-1", - } - require.NoError(t, m.CreateForcedObfuscatedLoginSession(context.Background(), expect)) - - got, err := m.GetForcedObfuscatedLoginSession(context.Background(), "fk-client-1", "obfuscated-1") - require.NoError(t, err) - require.NotEqual(t, got.NID, gofrsuuid.Nil) - got.NID = gofrsuuid.Nil - assert.EqualValues(t, expect, got) - - expect = &ForcedObfuscatedLoginSession{ - ClientID: "fk-client-1", - Subject: "subject-1", - SubjectObfuscated: "obfuscated-2", - } - require.NoError(t, m.CreateForcedObfuscatedLoginSession(context.Background(), expect)) - - got, err = m.GetForcedObfuscatedLoginSession(context.Background(), "fk-client-1", "obfuscated-2") - require.NotEqual(t, got.NID, gofrsuuid.Nil) - got.NID = gofrsuuid.Nil - require.NoError(t, err) - assert.EqualValues(t, expect, got) - - _, err = m.GetForcedObfuscatedLoginSession(context.Background(), "fk-client-1", "obfuscated-1") - require.True(t, errors.Is(err, x.ErrNotFound)) - }) - - t.Run("case=ListUserAuthenticatedClientsWithFrontAndBackChannelLogout", func(t *testing.T) { - // The idea of this test is to create two identities (subjects) with 4 sessions each, where - // only some sessions have been associated with a client that has a front channel logout url - - subjects := make([]string, 1) - for k := range subjects { - subjects[k] = fmt.Sprintf("subject-ListUserAuthenticatedClientsWithFrontAndBackChannelLogout-%d", k) - } - - sessions := make([]LoginSession, len(subjects)*1) - frontChannels := map[string][]client.Client{} - backChannels := map[string][]client.Client{} - for k := range sessions { - id := uuid.New().String() - subject := subjects[k%len(subjects)] - t.Run(fmt.Sprintf("create/session=%s/subject=%s", id, subject), func(t *testing.T) { - ls := &LoginSession{ - ID: id, - AuthenticatedAt: sqlxx.NullTime(time.Now()), - Subject: subject, - } - require.NoError(t, m.CreateLoginSession(context.Background(), ls)) - - cl := &client.Client{LegacyClientID: uuid.New().String()} - switch k % 4 { - case 0: - cl.FrontChannelLogoutURI = "http://some-url.com/" - frontChannels[id] = append(frontChannels[id], *cl) - case 1: - cl.BackChannelLogoutURI = "http://some-url.com/" - backChannels[id] = append(backChannels[id], *cl) - case 2: - cl.FrontChannelLogoutURI = "http://some-url.com/" - cl.BackChannelLogoutURI = "http://some-url.com/" - frontChannels[id] = append(frontChannels[id], *cl) - backChannels[id] = append(backChannels[id], *cl) - } - require.NoError(t, clientManager.CreateClient(context.Background(), cl)) - - ar := SaneMockAuthRequest(t, m, ls, cl) - cr := SaneMockConsentRequest(t, m, ar, false) - _ = SaneMockHandleConsentRequest(t, m, cr, time.Time{}, 0, false, false) - - sessions[k] = *ls - }) - } - - for _, ls := range sessions { - check := func(t *testing.T, expected map[string][]client.Client, actual []client.Client) { - es, ok := expected[ls.ID] - if !ok { - require.Len(t, actual, 0) - return - } - require.Len(t, actual, len(es)) - - for _, e := range es { - var found bool - for _, a := range actual { - if e.GetID() == a.GetID() { - found = true - } - assert.Equal(t, e.GetID(), a.GetID()) - assert.Equal(t, e.FrontChannelLogoutURI, a.FrontChannelLogoutURI) - assert.Equal(t, e.BackChannelLogoutURI, a.BackChannelLogoutURI) - } - require.True(t, found) - } - } - - t.Run(fmt.Sprintf("method=ListUserAuthenticatedClientsWithFrontChannelLogout/session=%s/subject=%s", ls.ID, ls.Subject), func(t *testing.T) { - actual, err := m.ListUserAuthenticatedClientsWithFrontChannelLogout(context.Background(), ls.Subject, ls.ID) - require.NoError(t, err) - check(t, frontChannels, actual) - }) - - t.Run(fmt.Sprintf("method=ListUserAuthenticatedClientsWithBackChannelLogout/session=%s", ls.ID), func(t *testing.T) { - actual, err := m.ListUserAuthenticatedClientsWithBackChannelLogout(context.Background(), ls.Subject, ls.ID) - require.NoError(t, err) - check(t, backChannels, actual) - }) - } - }) - - t.Run("case=LogoutRequest", func(t *testing.T) { - for k, tc := range []struct { - key string - authAt bool - withClient bool - }{ - {"LogoutRequest-1", true, true}, - {"LogoutRequest-2", true, true}, - {"LogoutRequest-3", true, true}, - {"LogoutRequest-4", true, true}, - {"LogoutRequest-5", true, false}, - {"LogoutRequest-6", false, false}, - } { - t.Run("key="+tc.key, func(t *testing.T) { - challenge := makeID("challenge", network, tc.key) - verifier := makeID("verifier", network, tc.key) - c := MockLogoutRequest(tc.key, tc.withClient, network) - if tc.withClient { - require.NoError(t, clientManager.CreateClient(context.Background(), c.Client)) // Ignore errors that are caused by duplication - } - - _, err := m.GetLogoutRequest(context.Background(), challenge) - require.Error(t, err) - - require.NoError(t, m.CreateLogoutRequest(context.Background(), c)) - - got2, err := m.GetLogoutRequest(context.Background(), challenge) - require.NoError(t, err) - assert.False(t, got2.WasHandled) - assert.False(t, got2.Accepted) - compareLogoutRequest(t, c, got2) - - if k%2 == 0 { - got2, err = m.AcceptLogoutRequest(context.Background(), challenge) - require.NoError(t, err) - assert.True(t, got2.Accepted) - compareLogoutRequest(t, c, got2) - - got3, err := m.VerifyAndInvalidateLogoutRequest(context.Background(), verifier) - require.NoError(t, err) - assert.True(t, got3.Accepted) - assert.True(t, got3.WasHandled) - compareLogoutRequest(t, c, got3) - - _, err = m.VerifyAndInvalidateLogoutRequest(context.Background(), verifier) - require.Error(t, err) - - got2, err = m.GetLogoutRequest(context.Background(), challenge) - require.NoError(t, err) - compareLogoutRequest(t, got3, got2) - assert.True(t, got2.WasHandled) - } else { - require.NoError(t, m.RejectLogoutRequest(context.Background(), challenge)) - _, err = m.GetLogoutRequest(context.Background(), challenge) - require.Error(t, err) - } - }) - } - }) - }) - - t.Run("case=foreign key regression", func(t *testing.T) { - cl := &client.Client{LegacyClientID: uuid.New().String()} - require.NoError(t, clientManager.CreateClient(context.Background(), cl)) - - subject := uuid.New().String() - s := LoginSession{ - ID: uuid.New().String(), - AuthenticatedAt: sqlxx.NullTime(time.Now().Round(time.Minute).Add(-time.Minute).UTC()), - Subject: subject, - } - - err := m.CreateLoginSession(context.Background(), &s) - require.NoError(t, err) - - lr := &LoginRequest{ - ID: uuid.New().String(), - Subject: uuid.New().String(), - Verifier: uuid.New().String(), - Client: cl, - AuthenticatedAt: sqlxx.NullTime(time.Now()), - RequestedAt: time.Now(), - SessionID: sqlxx.NullString(s.ID), - } - - require.NoError(t, m.CreateLoginRequest(context.Background(), lr)) - expected := &OAuth2ConsentRequest{ - ID: uuid.New().String(), - Skip: true, - Subject: subject, - OpenIDConnectContext: nil, - Client: cl, - ClientID: cl.LegacyClientID, - RequestURL: "", - LoginChallenge: sqlxx.NullString(lr.ID), - LoginSessionID: sqlxx.NullString(s.ID), - Verifier: uuid.New().String(), - CSRF: uuid.New().String(), - } - require.NoError(t, m.CreateConsentRequest(context.Background(), expected)) - - result, err := m.GetConsentRequest(context.Background(), expected.ID) - require.NoError(t, err) - assert.EqualValues(t, expected.ID, result.ID) - - require.NoError(t, m.DeleteLoginSession(context.Background(), s.ID)) - - result, err = m.GetConsentRequest(context.Background(), expected.ID) - require.NoError(t, err) - assert.EqualValues(t, expected.ID, result.ID) - }) - } -} - -func compareLogoutRequest(t *testing.T, a, b *LogoutRequest) { - require.True(t, (a.Client != nil && b.Client != nil) || (a.Client == nil && b.Client == nil)) - if a.Client != nil { - assert.EqualValues(t, a.Client.GetID(), b.Client.GetID()) - } - - assert.EqualValues(t, a.ID, b.ID) - assert.EqualValues(t, a.Subject, b.Subject) - assert.EqualValues(t, a.Verifier, b.Verifier) - assert.EqualValues(t, a.RequestURL, b.RequestURL) - assert.EqualValues(t, a.PostLogoutRedirectURI, b.PostLogoutRedirectURI) - assert.EqualValues(t, a.RPInitiated, b.RPInitiated) - assert.EqualValues(t, a.SessionID, b.SessionID) -} - -func compareAuthenticationRequest(t *testing.T, a, b *LoginRequest) { - assert.EqualValues(t, a.Client.GetID(), b.Client.GetID()) - assert.EqualValues(t, a.ID, b.ID) - assert.EqualValues(t, *a.OpenIDConnectContext, *b.OpenIDConnectContext) - assert.EqualValues(t, a.Subject, b.Subject) - assert.EqualValues(t, a.RequestedScope, b.RequestedScope) - assert.EqualValues(t, a.Verifier, b.Verifier) - assert.EqualValues(t, a.RequestURL, b.RequestURL) - assert.EqualValues(t, a.CSRF, b.CSRF) - assert.EqualValues(t, a.Skip, b.Skip) - assert.EqualValues(t, a.SessionID, b.SessionID) -} - -func compareConsentRequest(t *testing.T, a, b *OAuth2ConsentRequest) { - assert.EqualValues(t, a.Client.GetID(), b.Client.GetID()) - assert.EqualValues(t, a.ID, b.ID) - assert.EqualValues(t, *a.OpenIDConnectContext, *b.OpenIDConnectContext) - assert.EqualValues(t, a.Subject, b.Subject) - assert.EqualValues(t, a.RequestedScope, b.RequestedScope) - assert.EqualValues(t, a.Verifier, b.Verifier) - assert.EqualValues(t, a.RequestURL, b.RequestURL) - assert.EqualValues(t, a.CSRF, b.CSRF) - assert.EqualValues(t, a.Skip, b.Skip) - assert.EqualValues(t, a.LoginChallenge, b.LoginChallenge) - assert.EqualValues(t, a.LoginSessionID, b.LoginSessionID) -} diff --git a/consent/registry.go b/consent/registry.go index aa5fae8b758..552c6cd6090 100644 --- a/consent/registry.go +++ b/consent/registry.go @@ -4,11 +4,11 @@ package consent import ( - "context" - - "github.com/ory/fosite/handler/openid" - "github.com/ory/hydra/client" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/aead" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/internal/kratos" + "github.com/ory/hydra/v2/x" ) type InternalRegistry interface { @@ -16,15 +16,22 @@ type InternalRegistry interface { x.RegistryCookieStore x.RegistryLogger x.HTTPClientProvider + x.TracingProvider + x.NetworkProvider + kratos.Provider Registry client.Registry + FlowCipher() *aead.XChaCha20Poly1305 OAuth2Storage() x.FositeStorer OpenIDConnectRequestValidator() *openid.OpenIDConnectRequestValidator } type Registry interface { - ConsentManager() Manager + ManagerProvider + ObfuscatedSubjectManagerProvider + LoginManagerProvider + LogoutManagerProvider + ConsentStrategy() Strategy - SubjectIdentifierAlgorithm(ctx context.Context) map[string]SubjectIdentifierAlgorithm } diff --git a/consent/sdk_test.go b/consent/sdk_test.go deleted file mode 100644 index ffcfb74fd8a..00000000000 --- a/consent/sdk_test.go +++ /dev/null @@ -1,200 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package consent_test - -import ( - "context" - "fmt" - "net/http/httptest" - "testing" - "time" - - hydra "github.com/ory/hydra-client-go/v2" - - "github.com/ory/x/httprouterx" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - . "github.com/ory/hydra/consent" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal" - "github.com/ory/hydra/x" - "github.com/ory/x/contextx" -) - -func makeID(base string, network string, key string) string { - return fmt.Sprintf("%s-%s-%s", base, network, key) -} - -func TestSDK(t *testing.T) { - ctx := context.Background() - network := "t1" - conf := internal.NewConfigurationWithDefaults() - conf.MustSet(ctx, config.KeyIssuerURL, "https://www.ory.sh") - conf.MustSet(ctx, config.KeyAccessTokenLifespan, time.Minute) - reg := internal.NewRegistryMemory(t, conf, &contextx.Default{}) - - router := x.NewRouterPublic() - h := NewHandler(reg, conf) - - h.SetRoutes(httprouterx.NewRouterAdminWithPrefixAndRouter(router.Router, "/admin", conf.AdminURL)) - ts := httptest.NewServer(router) - - sdk := hydra.NewAPIClient(hydra.NewConfiguration()) - sdk.GetConfig().Servers = hydra.ServerConfigurations{{URL: ts.URL}} - - m := reg.ConsentManager() - - require.NoError(t, m.CreateLoginSession(context.Background(), &LoginSession{ - ID: "session1", - Subject: "subject1", - })) - - ar1, _ := MockAuthRequest("ar-1", false, network) - ar2, _ := MockAuthRequest("ar-2", false, network) - require.NoError(t, reg.ClientManager().CreateClient(context.Background(), ar1.Client)) - require.NoError(t, reg.ClientManager().CreateClient(context.Background(), ar2.Client)) - require.NoError(t, m.CreateLoginSession(context.Background(), &LoginSession{ - ID: ar1.SessionID.String(), - Subject: ar1.Subject, - })) - require.NoError(t, m.CreateLoginSession(context.Background(), &LoginSession{ - ID: ar2.SessionID.String(), - Subject: ar2.Subject, - })) - require.NoError(t, m.CreateLoginRequest(context.Background(), ar1)) - require.NoError(t, m.CreateLoginRequest(context.Background(), ar2)) - - cr1, hcr1 := MockConsentRequest("1", false, 0, false, false, false, "fk-login-challenge", network) - cr2, hcr2 := MockConsentRequest("2", false, 0, false, false, false, "fk-login-challenge", network) - cr3, hcr3 := MockConsentRequest("3", true, 3600, false, false, false, "fk-login-challenge", network) - cr4, hcr4 := MockConsentRequest("4", true, 3600, false, false, false, "fk-login-challenge", network) - require.NoError(t, reg.ClientManager().CreateClient(context.Background(), cr1.Client)) - require.NoError(t, reg.ClientManager().CreateClient(context.Background(), cr2.Client)) - require.NoError(t, reg.ClientManager().CreateClient(context.Background(), cr3.Client)) - require.NoError(t, reg.ClientManager().CreateClient(context.Background(), cr4.Client)) - require.NoError(t, m.CreateLoginRequest(context.Background(), &LoginRequest{ID: cr1.LoginChallenge.String(), Subject: cr1.Subject, Client: cr1.Client, Verifier: cr1.ID})) - require.NoError(t, m.CreateLoginRequest(context.Background(), &LoginRequest{ID: cr2.LoginChallenge.String(), Subject: cr2.Subject, Client: cr2.Client, Verifier: cr2.ID})) - require.NoError(t, m.CreateLoginSession(context.Background(), &LoginSession{ID: cr3.LoginSessionID.String()})) - require.NoError(t, m.CreateLoginRequest(context.Background(), &LoginRequest{ID: cr3.LoginChallenge.String(), Subject: cr3.Subject, Client: cr3.Client, Verifier: cr3.ID, RequestedAt: hcr3.RequestedAt, SessionID: cr3.LoginSessionID})) - require.NoError(t, m.CreateLoginSession(context.Background(), &LoginSession{ID: cr4.LoginSessionID.String()})) - require.NoError(t, m.CreateLoginRequest(context.Background(), &LoginRequest{ID: cr4.LoginChallenge.String(), Client: cr4.Client, Verifier: cr4.ID, SessionID: cr4.LoginSessionID})) - require.NoError(t, m.CreateConsentRequest(context.Background(), cr1)) - require.NoError(t, m.CreateConsentRequest(context.Background(), cr2)) - require.NoError(t, m.CreateConsentRequest(context.Background(), cr3)) - require.NoError(t, m.CreateConsentRequest(context.Background(), cr4)) - _, err := m.HandleConsentRequest(context.Background(), hcr1) - require.NoError(t, err) - _, err = m.HandleConsentRequest(context.Background(), hcr2) - require.NoError(t, err) - _, err = m.HandleConsentRequest(context.Background(), hcr3) - require.NoError(t, err) - _, err = m.HandleConsentRequest(context.Background(), hcr4) - require.NoError(t, err) - - lur1 := MockLogoutRequest("testsdk-1", true, network) - require.NoError(t, reg.ClientManager().CreateClient(context.Background(), lur1.Client)) - require.NoError(t, m.CreateLogoutRequest(context.Background(), lur1)) - - lur2 := MockLogoutRequest("testsdk-2", false, network) - require.NoError(t, m.CreateLogoutRequest(context.Background(), lur2)) - - crGot, _, err := sdk.OAuth2Api.GetOAuth2ConsentRequest(ctx).ConsentChallenge(makeID("challenge", network, "1")).Execute() - require.NoError(t, err) - compareSDKConsentRequest(t, cr1, *crGot) - - crGot, _, err = sdk.OAuth2Api.GetOAuth2ConsentRequest(ctx).ConsentChallenge(makeID("challenge", network, "2")).Execute() - require.NoError(t, err) - compareSDKConsentRequest(t, cr2, *crGot) - - arGot, _, err := sdk.OAuth2Api.GetOAuth2LoginRequest(ctx).LoginChallenge(makeID("challenge", network, "ar-1")).Execute() - require.NoError(t, err) - compareSDKLoginRequest(t, ar1, *arGot) - - arGot, _, err = sdk.OAuth2Api.GetOAuth2LoginRequest(ctx).LoginChallenge(makeID("challenge", network, "ar-2")).Execute() - require.NoError(t, err) - compareSDKLoginRequest(t, ar2, *arGot) - - _, err = sdk.OAuth2Api.RevokeOAuth2LoginSessions(ctx).Subject("subject1").Execute() - require.NoError(t, err) - - _, err = sdk.OAuth2Api.RevokeOAuth2ConsentSessions(ctx).Subject("subject1").Execute() - require.Error(t, err) - - _, err = sdk.OAuth2Api.RevokeOAuth2ConsentSessions(ctx).Subject(cr4.Subject).Client(cr4.Client.GetID()).Execute() - require.NoError(t, err) - - _, err = sdk.OAuth2Api.RevokeOAuth2ConsentSessions(ctx).Subject("subject1").All(true).Execute() - require.NoError(t, err) - - _, _, err = sdk.OAuth2Api.GetOAuth2ConsentRequest(ctx).ConsentChallenge(makeID("challenge", network, "1")).Execute() - require.Error(t, err) - - crGot, _, err = sdk.OAuth2Api.GetOAuth2ConsentRequest(ctx).ConsentChallenge(makeID("challenge", network, "2")).Execute() - require.NoError(t, err) - compareSDKConsentRequest(t, cr2, *crGot) - - _, err = sdk.OAuth2Api.RevokeOAuth2ConsentSessions(ctx).Subject("subject2").Client("fk-client-2").Execute() - require.NoError(t, err) - - _, _, err = sdk.OAuth2Api.GetOAuth2ConsentRequest(ctx).ConsentChallenge(makeID("challenge", network, "2")).Execute() - require.Error(t, err) - - csGot, _, err := sdk.OAuth2Api.ListOAuth2ConsentSessions(ctx).Subject("subject3").Execute() - require.NoError(t, err) - assert.Equal(t, 1, len(csGot)) - cs := csGot[0] - assert.Equal(t, makeID("challenge", network, "3"), cs.ConsentRequest.Challenge) - - csGot, _, err = sdk.OAuth2Api.ListOAuth2ConsentSessions(ctx).Subject("subject2").Execute() - require.NoError(t, err) - assert.Equal(t, 0, len(csGot)) - - csGot, _, err = sdk.OAuth2Api.ListOAuth2ConsentSessions(ctx).Subject("subject3").LoginSessionId("fk-login-session-t1-3").Execute() - require.NoError(t, err) - assert.Equal(t, 1, len(csGot)) - cs = csGot[0] - assert.Equal(t, makeID("challenge", network, "3"), cs.ConsentRequest.Challenge) - - csGot, _, err = sdk.OAuth2Api.ListOAuth2ConsentSessions(ctx).Subject("subject3").LoginSessionId("fk-login-session-t1-X").Execute() - require.NoError(t, err) - assert.Equal(t, 0, len(csGot)) - - luGot, _, err := sdk.OAuth2Api.GetOAuth2LogoutRequest(ctx).LogoutChallenge(makeID("challenge", network, "testsdk-1")).Execute() - require.NoError(t, err) - compareSDKLogoutRequest(t, lur1, luGot) - - luaGot, _, err := sdk.OAuth2Api.AcceptOAuth2LogoutRequest(ctx).LogoutChallenge(makeID("challenge", network, "testsdk-1")).Execute() - require.NoError(t, err) - assert.EqualValues(t, "https://www.ory.sh/oauth2/sessions/logout?logout_verifier="+makeID("verifier", network, "testsdk-1"), luaGot.RedirectTo) - - _, err = sdk.OAuth2Api.RejectOAuth2LogoutRequest(ctx).LogoutChallenge(lur2.ID).Execute() - require.NoError(t, err) - - _, _, err = sdk.OAuth2Api.GetOAuth2LogoutRequest(ctx).LogoutChallenge(lur2.ID).Execute() - require.Error(t, err) -} - -func compareSDKLoginRequest(t *testing.T, expected *LoginRequest, got hydra.OAuth2LoginRequest) { - assert.EqualValues(t, expected.ID, got.Challenge) - assert.EqualValues(t, expected.Subject, got.Subject) - assert.EqualValues(t, expected.Skip, got.Skip) - assert.EqualValues(t, expected.Client.GetID(), *got.Client.ClientId) -} - -func compareSDKConsentRequest(t *testing.T, expected *OAuth2ConsentRequest, got hydra.OAuth2ConsentRequest) { - assert.EqualValues(t, expected.ID, got.Challenge) - assert.EqualValues(t, expected.Subject, *got.Subject) - assert.EqualValues(t, expected.Skip, *got.Skip) - assert.EqualValues(t, expected.Client.GetID(), *got.Client.ClientId) -} - -func compareSDKLogoutRequest(t *testing.T, expected *LogoutRequest, got *hydra.OAuth2LogoutRequest) { - assert.EqualValues(t, expected.Subject, *got.Subject) - assert.EqualValues(t, expected.SessionID, *got.Sid) - assert.EqualValues(t, expected.SessionID, *got.Sid) - assert.EqualValues(t, expected.RequestURL, *got.RequestUrl) - assert.EqualValues(t, expected.RPInitiated, *got.RpInitiated) -} diff --git a/consent/strategy.go b/consent/strategy.go index 3ae24d0172a..8c033b741a7 100644 --- a/consent/strategy.go +++ b/consent/strategy.go @@ -7,13 +7,25 @@ import ( "context" "net/http" - "github.com/ory/fosite" + "github.com/ory/hydra/v2/flow" + "github.com/ory/hydra/v2/fosite" ) -var _ Strategy = new(DefaultStrategy) +var _ Strategy = (*defaultStrategy)(nil) type Strategy interface { - HandleOAuth2AuthorizationRequest(ctx context.Context, w http.ResponseWriter, r *http.Request, req fosite.AuthorizeRequester) (*AcceptOAuth2ConsentRequest, error) - HandleOpenIDConnectLogout(ctx context.Context, w http.ResponseWriter, r *http.Request) (*LogoutResult, error) + HandleOAuth2AuthorizationRequest( + ctx context.Context, + w http.ResponseWriter, + r *http.Request, + req fosite.AuthorizeRequester, + ) (*flow.Flow, error) + HandleOAuth2DeviceAuthorizationRequest( + ctx context.Context, + w http.ResponseWriter, + r *http.Request, + ) (*flow.Flow, error) + HandleOpenIDConnectLogout(ctx context.Context, w http.ResponseWriter, r *http.Request) (*flow.LogoutResult, error) + HandleHeadlessLogout(ctx context.Context, w http.ResponseWriter, r *http.Request, sid string) error ObfuscateSubjectIdentifier(ctx context.Context, cl fosite.Client, subject, forcedIdentifier string) (string, error) } diff --git a/consent/strategy_default.go b/consent/strategy_default.go index ecbeccb76e9..b692a51c9ea 100644 --- a/consent/strategy_default.go +++ b/consent/strategy_default.go @@ -5,74 +5,61 @@ package consent import ( "context" + stderrs "errors" "fmt" + "io" "net/http" "net/url" + "slices" "strconv" "strings" "time" - "github.com/twmb/murmur3" - - "github.com/ory/hydra/driver/config" - - "github.com/ory/x/errorsx" - - "github.com/ory/x/sqlcon" - "github.com/gorilla/sessions" + "github.com/hashicorp/go-retryablehttp" "github.com/pborman/uuid" "github.com/pkg/errors" "github.com/sirupsen/logrus" - - jwtgo "github.com/ory/fosite/token/jwt" - - "github.com/ory/x/sqlxx" - - "github.com/ory/fosite" - "github.com/ory/fosite/handler/openid" - "github.com/ory/fosite/token/jwt" + "go.opentelemetry.io/otel/trace" + + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/flow" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/token/jwt" + "github.com/ory/hydra/v2/x" "github.com/ory/x/mapx" - "github.com/ory/x/stringslice" + "github.com/ory/x/otelx" + "github.com/ory/x/sqlcon" + "github.com/ory/x/sqlxx" "github.com/ory/x/stringsx" "github.com/ory/x/urlx" - - "github.com/ory/hydra/client" - "github.com/ory/hydra/x" ) const ( + deviceVerificationPath = "/oauth2/device/verify" CookieAuthenticationSIDName = "sid" ) -type DefaultStrategy struct { - c *config.DefaultProvider - r InternalRegistry -} +type defaultStrategy struct{ r InternalRegistry } -func NewStrategy( - r InternalRegistry, - c *config.DefaultProvider, -) *DefaultStrategy { - return &DefaultStrategy{ - c: c, - r: r, - } +func NewStrategy(r InternalRegistry) Strategy { + return &defaultStrategy{r: r} } -var ErrAbortOAuth2Request = errors.New("the OAuth 2.0 Authorization request must be aborted") -var ErrNoPreviousConsentFound = errors.New("no previous OAuth 2.0 Consent could be found for this access request") -var ErrNoAuthenticationSessionFound = errors.New("no previous login session was found") -var ErrHintDoesNotMatchAuthentication = errors.New("subject from hint does not match subject from session") +var ErrAbortOAuth2Request = stderrs.New("the OAuth 2.0 Authorization request must be aborted") +var ErrNoPreviousConsentFound = stderrs.New("no previous OAuth 2.0 Consent could be found for this access request") +var ErrNoAuthenticationSessionFound = stderrs.New("no previous login session was found") +var ErrHintDoesNotMatchAuthentication = stderrs.New("subject from hint does not match subject from session") -func (s *DefaultStrategy) matchesValueFromSession(ctx context.Context, c fosite.Client, hintSubject string, sessionSubject string) error { +func (s *defaultStrategy) matchesValueFromSession(ctx context.Context, c fosite.Client, hintSubject, sessionSubject string) error { obfuscatedUserID, err := s.ObfuscateSubjectIdentifier(ctx, c, sessionSubject, "") if err != nil { return err } var forcedObfuscatedUserID string - if s, err := s.r.ConsentManager().GetForcedObfuscatedLoginSession(ctx, c.GetID(), hintSubject); errors.Is(err, x.ErrNotFound) { + if s, err := s.r.ObfuscatedSubjectManager().GetForcedObfuscatedLoginSession(ctx, c.GetID(), hintSubject); errors.Is(err, x.ErrNotFound) { // do nothing } else if err != nil { return err @@ -87,19 +74,19 @@ func (s *DefaultStrategy) matchesValueFromSession(ctx context.Context, c fosite. return nil } -func (s *DefaultStrategy) authenticationSession(ctx context.Context, w http.ResponseWriter, r *http.Request) (*LoginSession, error) { +func (s *defaultStrategy) authenticationSession(ctx context.Context, _ http.ResponseWriter, r *http.Request) (*flow.LoginSession, error) { store, err := s.r.CookieStore(ctx) if err != nil { return nil, err } // We try to open the session cookie. If it does not exist (indicated by the error), we must authenticate the user. - cookie, err := store.Get(r, s.c.SessionCookieName(ctx)) + cookie, err := store.Get(r, s.r.Config().SessionCookieName(ctx)) if err != nil { s.r.Logger(). WithRequest(r). WithError(err).Debug("User logout skipped because cookie store returned an error.") - return nil, errorsx.WithStack(ErrNoAuthenticationSessionFound) + return nil, errors.WithStack(ErrNoAuthenticationSessionFound) } sessionID := mapx.GetStringDefault(cookie.Values, CookieAuthenticationSIDName, "") @@ -107,14 +94,14 @@ func (s *DefaultStrategy) authenticationSession(ctx context.Context, w http.Resp s.r.Logger(). WithRequest(r). Debug("User logout skipped because cookie exists but session value is empty.") - return nil, errorsx.WithStack(ErrNoAuthenticationSessionFound) + return nil, errors.WithStack(ErrNoAuthenticationSessionFound) } - session, err := s.r.ConsentManager().GetRememberedLoginSession(r.Context(), sessionID) + session, err := s.r.LoginManager().GetRememberedLoginSession(ctx, sessionID) if errors.Is(err, x.ErrNotFound) { s.r.Logger().WithRequest(r).WithError(err). Debug("User logout skipped because cookie exists and session value exist but are not remembered any more.") - return nil, errorsx.WithStack(ErrNoAuthenticationSessionFound) + return nil, errors.WithStack(ErrNoAuthenticationSessionFound) } else if err != nil { return nil, err } @@ -122,22 +109,30 @@ func (s *DefaultStrategy) authenticationSession(ctx context.Context, w http.Resp return session, nil } -func (s *DefaultStrategy) requestAuthentication(ctx context.Context, w http.ResponseWriter, r *http.Request, ar fosite.AuthorizeRequester) error { +func (s *defaultStrategy) requestAuthentication( + ctx context.Context, + w http.ResponseWriter, + r *http.Request, + ar fosite.AuthorizeRequester, + f *flow.Flow, +) (err error) { + ctx, span := trace.SpanFromContext(ctx).TracerProvider().Tracer("").Start(ctx, "DefaultStrategy.requestAuthentication") + defer otelx.End(span, &err) + prompt := stringsx.Splitx(ar.GetRequestForm().Get("prompt"), " ") - if stringslice.Has(prompt, "login") { - return s.forwardAuthenticationRequest(ctx, w, r, ar, "", time.Time{}, nil) + if slices.Contains(prompt, "login") { + return s.forwardAuthenticationRequest(ctx, w, r, ar, nil, f) } session, err := s.authenticationSession(ctx, w, r) if errors.Is(err, ErrNoAuthenticationSessionFound) { - return s.forwardAuthenticationRequest(ctx, w, r, ar, "", time.Time{}, nil) + return s.forwardAuthenticationRequest(ctx, w, r, ar, nil, f) } else if err != nil { return err } maxAge := int64(-1) if ma := ar.GetRequestForm().Get("max_age"); len(ma) > 0 { - var err error maxAge, err = strconv.ParseInt(ma, 10, 64) if err != nil { return err @@ -145,40 +140,40 @@ func (s *DefaultStrategy) requestAuthentication(ctx context.Context, w http.Resp } if maxAge > -1 && time.Time(session.AuthenticatedAt).UTC().Add(time.Second*time.Duration(maxAge)).Before(time.Now().UTC()) { - if stringslice.Has(prompt, "none") { - return errorsx.WithStack(fosite.ErrLoginRequired.WithHint("Request failed because prompt is set to 'none' and authentication time reached 'max_age'.")) + if slices.Contains(prompt, "none") { + return errors.WithStack(fosite.ErrLoginRequired.WithHint("Request failed because prompt is set to 'none' and authentication time reached 'max_age'.")) } - return s.forwardAuthenticationRequest(ctx, w, r, ar, "", time.Time{}, nil) + return s.forwardAuthenticationRequest(ctx, w, r, ar, nil, f) } idTokenHint := ar.GetRequestForm().Get("id_token_hint") if idTokenHint == "" { - return s.forwardAuthenticationRequest(ctx, w, r, ar, session.Subject, time.Time(session.AuthenticatedAt), session) + return s.forwardAuthenticationRequest(ctx, w, r, ar, session, f) } - hintSub, err := s.getSubjectFromIDTokenHint(r.Context(), idTokenHint) + hintSub, err := s.getSubjectFromIDTokenHint(ctx, idTokenHint) if err != nil { return err } - if err := s.matchesValueFromSession(r.Context(), ar.GetClient(), hintSub, session.Subject); errors.Is(err, ErrHintDoesNotMatchAuthentication) { - return errorsx.WithStack(fosite.ErrLoginRequired.WithHint("Request failed because subject claim from id_token_hint does not match subject from authentication session.")) + if err := s.matchesValueFromSession(ctx, ar.GetClient(), hintSub, session.Subject); errors.Is(err, ErrHintDoesNotMatchAuthentication) { + return errors.WithStack(fosite.ErrLoginRequired.WithHint("Request failed because subject claim from id_token_hint does not match subject from authentication session.")) } - return s.forwardAuthenticationRequest(ctx, w, r, ar, session.Subject, time.Time(session.AuthenticatedAt), session) + return s.forwardAuthenticationRequest(ctx, w, r, ar, session, f) } -func (s *DefaultStrategy) getIDTokenHintClaims(ctx context.Context, idTokenHint string) (jwtgo.MapClaims, error) { - token, err := s.r.OpenIDJWTStrategy().Decode(ctx, idTokenHint) - if ve := new(jwtgo.ValidationError); errors.As(err, &ve) && ve.Errors == jwtgo.ValidationErrorExpired { +func (s *defaultStrategy) getIDTokenHintClaims(ctx context.Context, idTokenHint string) (jwt.MapClaims, error) { + token, err := s.r.OpenIDJWTSigner().Decode(ctx, idTokenHint) + if ve := new(jwt.ValidationError); errors.As(err, &ve) && ve.Errors == jwt.ValidationErrorExpired { // Expired is ok } else if err != nil { - return nil, errorsx.WithStack(fosite.ErrInvalidRequest.WithHint(err.Error())) + return nil, errors.WithStack(fosite.ErrInvalidRequest.WithHint(err.Error())) } return token.Claims, nil } -func (s *DefaultStrategy) getSubjectFromIDTokenHint(ctx context.Context, idTokenHint string) (string, error) { +func (s *defaultStrategy) getSubjectFromIDTokenHint(ctx context.Context, idTokenHint string) (string, error) { claims, err := s.getIDTokenHintClaims(ctx, idTokenHint) if err != nil { return "", err @@ -186,40 +181,55 @@ func (s *DefaultStrategy) getSubjectFromIDTokenHint(ctx context.Context, idToken sub, _ := claims["sub"].(string) if sub == "" { - return "", errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("Failed to validate OpenID Connect request because provided id token from id_token_hint does not have a subject.")) + return "", errors.WithStack(fosite.ErrInvalidRequest.WithHint("Failed to validate OpenID Connect request because provided id token from id_token_hint does not have a subject.")) } return sub, nil } -func (s *DefaultStrategy) forwardAuthenticationRequest(ctx context.Context, w http.ResponseWriter, r *http.Request, ar fosite.AuthorizeRequester, subject string, authenticatedAt time.Time, session *LoginSession) error { - if (subject != "" && authenticatedAt.IsZero()) || (subject == "" && !authenticatedAt.IsZero()) { - return errorsx.WithStack(fosite.ErrServerError.WithHint("Consent strategy returned a non-empty subject with an empty auth date, or an empty subject with a non-empty auth date.")) - } - +func (s *defaultStrategy) forwardAuthenticationRequest( + ctx context.Context, + w http.ResponseWriter, + r *http.Request, + ar fosite.AuthorizeRequester, + session *flow.LoginSession, + f *flow.Flow, +) error { + sessionID := uuid.New() skip := false - if subject != "" { + subject := "" + authenticatedAt := time.Time{} + + if session != nil { + sessionID = session.ID skip = true + subject = session.Subject + authenticatedAt = time.Time(session.AuthenticatedAt) } - // Let'id validate that prompt is actually not "none" if we can't skip authentication + // Let's validate that prompt is actually not "none" if we can't skip authentication prompt := stringsx.Splitx(ar.GetRequestForm().Get("prompt"), " ") - if stringslice.Has(prompt, "none") && !skip { - return errorsx.WithStack(fosite.ErrLoginRequired.WithHint(`Prompt 'none' was requested, but no existing login session was found.`)) + if slices.Contains(prompt, "none") && !skip { + return errors.WithStack(fosite.ErrLoginRequired.WithHint(`Prompt 'none' was requested, but no existing login session was found.`)) } // Set up csrf/challenge/verifier values - verifier := strings.Replace(uuid.New(), "-", "", -1) - challenge := strings.Replace(uuid.New(), "-", "", -1) - csrf := strings.Replace(uuid.New(), "-", "", -1) + challenge := strings.ReplaceAll(uuid.New(), "-", "") + csrf := strings.ReplaceAll(uuid.New(), "-", "") // Generate the request URL - iu := s.c.OAuth2AuthURL(ctx) - iu.RawQuery = r.URL.RawQuery + var requestURL string + if f != nil { + requestURL = f.RequestURL + } else { + oauth2URL := s.r.Config().OAuth2AuthURL(ctx) + oauth2URL.RawQuery = r.URL.RawQuery + requestURL = oauth2URL.String() + } - var idTokenHintClaims jwtgo.MapClaims + var idTokenHintClaims jwt.MapClaims if idTokenHint := ar.GetRequestForm().Get("id_token_hint"); len(idTokenHint) > 0 { - claims, err := s.getIDTokenHintClaims(r.Context(), idTokenHint) + claims, err := s.getIDTokenHintClaims(ctx, idTokenHint) if err != nil { return err } @@ -227,43 +237,45 @@ func (s *DefaultStrategy) forwardAuthenticationRequest(ctx context.Context, w ht idTokenHintClaims = claims } - sessionID := uuid.New() - if session != nil { - sessionID = session.ID - } else { - // Create a stub session so that we can later update it. - if err := s.r.ConsentManager().CreateLoginSession(r.Context(), &LoginSession{ID: sessionID}); err != nil { - return err - } - } - // Set the session cl := sanitizeClientFromRequest(ar) - if err := s.r.ConsentManager().CreateLoginRequest( - r.Context(), - &LoginRequest{ + + if f == nil { + // Regular grant + f = &flow.Flow{ ID: challenge, - Verifier: verifier, - CSRF: csrf, - Skip: skip, RequestedScope: []string(ar.GetRequestedScopes()), RequestedAudience: []string(ar.GetRequestedAudience()), + LoginSkip: skip, Subject: subject, - Client: cl, - RequestURL: iu.String(), - AuthenticatedAt: sqlxx.NullTime(authenticatedAt), - RequestedAt: time.Now().Truncate(time.Second).UTC(), - SessionID: sqlxx.NullString(sessionID), - OpenIDConnectContext: &OAuth2ConsentRequestOpenIDConnectContext{ + OpenIDConnectContext: &flow.OAuth2ConsentRequestOpenIDConnectContext{ IDTokenHintClaims: idTokenHintClaims, ACRValues: stringsx.Splitx(ar.GetRequestForm().Get("acr_values"), " "), UILocales: stringsx.Splitx(ar.GetRequestForm().Get("ui_locales"), " "), Display: ar.GetRequestForm().Get("display"), LoginHint: ar.GetRequestForm().Get("login_hint"), }, - }, - ); err != nil { - return errorsx.WithStack(err) + Client: cl, + ClientID: cl.ID, + RequestURL: requestURL, + SessionID: sqlxx.NullString(sessionID), + LoginCSRF: csrf, + LoginAuthenticatedAt: sqlxx.NullTime(authenticatedAt), + RequestedAt: time.Now().Truncate(time.Second).UTC(), + State: flow.FlowStateLoginUnused, + NID: s.r.Networker().NetworkID(ctx), + } + } else { + // Device auth grant + f.ID = challenge + f.LoginSkip = skip + f.Subject = subject + f.SessionID = sqlxx.NullString(sessionID) + f.LoginCSRF = csrf + f.LoginAuthenticatedAt = sqlxx.NullTime(authenticatedAt) + f.RequestedAt = time.Now().Truncate(time.Second).UTC() + f.State = flow.FlowStateLoginUnused + f.NID = s.r.Networker().NetworkID(ctx) } store, err := s.r.CookieStore(ctx) @@ -271,24 +283,43 @@ func (s *DefaultStrategy) forwardAuthenticationRequest(ctx context.Context, w ht return err } - clientSpecificCookieNameLoginCSRF := fmt.Sprintf("%s_%d", s.r.Config().CookieNameLoginCSRF(ctx), murmur3.Sum32(cl.ID.Bytes())) - if err := createCsrfSession(w, r, s.r.Config(), store, clientSpecificCookieNameLoginCSRF, csrf, s.c.ConsentRequestMaxAge(ctx)); err != nil { - return errorsx.WithStack(err) + clientSpecificCookieNameLoginCSRF := fmt.Sprintf("%s_%s", s.r.Config().CookieNameLoginCSRF(ctx), cl.CookieSuffix()) + if err := setCSRFCookie(ctx, w, r, s.r.Config(), store, clientSpecificCookieNameLoginCSRF, csrf, s.r.Config().ConsentRequestMaxAge(ctx)); err != nil { + return err } - http.Redirect(w, r, urlx.SetQuery(s.c.LoginURL(ctx), url.Values{"login_challenge": {challenge}}).String(), http.StatusFound) + encodedFlow, err := f.ToLoginChallenge(ctx, s.r) + if err != nil { + return err + } + + var authURL url.URL + if slices.Contains(prompt, "registration") { + authURL = *s.r.Config().RegistrationURL(ctx) + } else { + authURL = *s.r.Config().LoginURL(ctx) + } + + query := url.Values{"login_challenge": {encodedFlow}} + if idSchema := ar.GetRequestForm().Get("identity_schema"); idSchema != "" { + query.Set("identity_schema", idSchema) + } + + authURL.RawQuery = query.Encode() + + http.Redirect(w, r, authURL.String(), http.StatusFound) // generate the verifier - return errorsx.WithStack(ErrAbortOAuth2Request) + return errors.WithStack(ErrAbortOAuth2Request) } -func (s *DefaultStrategy) revokeAuthenticationSession(ctx context.Context, w http.ResponseWriter, r *http.Request) error { +func (s *defaultStrategy) revokeAuthenticationSession(ctx context.Context, w http.ResponseWriter, r *http.Request) error { store, err := s.r.CookieStore(ctx) if err != nil { return err } - sid, err := s.revokeAuthenticationCookie(w, r, store) + sid, err := s.revokeAuthenticationCookie(ctx, w, r, store) if err != nil { return err } @@ -297,45 +328,47 @@ func (s *DefaultStrategy) revokeAuthenticationSession(ctx context.Context, w htt return nil } - return s.r.ConsentManager().DeleteLoginSession(r.Context(), sid) + _, err = s.r.LoginManager().DeleteLoginSession(ctx, sid) + return err } -func (s *DefaultStrategy) revokeAuthenticationCookie(w http.ResponseWriter, r *http.Request, ss sessions.Store) (string, error) { - ctx := r.Context() - cookie, _ := ss.Get(r, s.c.SessionCookieName(ctx)) +func (s *defaultStrategy) revokeAuthenticationCookie(ctx context.Context, w http.ResponseWriter, r *http.Request, ss sessions.Store) (string, error) { + cookie, _ := ss.Get(r, s.r.Config().SessionCookieName(ctx)) sid, _ := mapx.GetString(cookie.Values, CookieAuthenticationSIDName) cookie.Values[CookieAuthenticationSIDName] = "" cookie.Options.HttpOnly = true - cookie.Options.Path = "/" - cookie.Options.SameSite = s.c.CookieSameSiteMode(ctx) - cookie.Options.Secure = s.c.CookieSecure(ctx) - cookie.Options.Domain = s.c.CookieDomain(ctx) + cookie.Options.Path = s.r.Config().SessionCookiePath(ctx) + cookie.Options.SameSite = s.r.Config().CookieSameSiteMode(ctx) + cookie.Options.Secure = s.r.Config().CookieSecure(ctx) + cookie.Options.Domain = s.r.Config().CookieDomain(ctx) cookie.Options.MaxAge = -1 if err := cookie.Save(r, w); err != nil { - return "", errorsx.WithStack(err) + return "", errors.WithStack(err) } return sid, nil } -func (s *DefaultStrategy) verifyAuthentication(w http.ResponseWriter, r *http.Request, req fosite.AuthorizeRequester, verifier string) (*HandledLoginRequest, error) { - ctx := r.Context() - session, err := s.r.ConsentManager().VerifyAndInvalidateLoginRequest(ctx, verifier) - if errors.Is(err, sqlcon.ErrNoRows) { - return nil, errorsx.WithStack(fosite.ErrAccessDenied.WithHint("The login verifier has already been used, has not been granted, or is invalid.")) - } else if err != nil { +func (s *defaultStrategy) verifyAuthentication( + ctx context.Context, + w http.ResponseWriter, + r *http.Request, + req fosite.AuthorizeRequester, + verifier string, +) (_ *flow.Flow, err error) { + ctx, span := trace.SpanFromContext(ctx).TracerProvider().Tracer("").Start(ctx, "DefaultStrategy.verifyAuthentication") + defer otelx.End(span, &err) + + f, err := flow.DecodeAndInvalidateLoginVerifier(ctx, s.r, verifier) + if err != nil { return nil, err } - if session.HasError() { - session.Error.SetDefaults(loginRequestDeniedErrorName) - return nil, errorsx.WithStack(session.Error.toRFCError()) - } - - if session.RequestedAt.Add(s.c.ConsentRequestMaxAge(ctx)).Before(time.Now()) { - return nil, errorsx.WithStack(fosite.ErrRequestUnauthorized.WithHint("The login request has expired. Please try again.")) + if f.LoginError.IsError() { + f.LoginError.SetDefaults(flow.LoginRequestDeniedErrorName) + return nil, errors.WithStack(f.LoginError.ToRFCError()) } store, err := s.r.CookieStore(ctx) @@ -343,31 +376,21 @@ func (s *DefaultStrategy) verifyAuthentication(w http.ResponseWriter, r *http.Re return nil, err } - clientSpecificCookieNameLoginCSRF := fmt.Sprintf("%s_%d", s.r.Config().CookieNameLoginCSRF(ctx), murmur3.Sum32(session.LoginRequest.Client.ID.Bytes())) - if err := validateCsrfSession(r, s.r.Config(), store, clientSpecificCookieNameLoginCSRF, session.LoginRequest.CSRF); err != nil { + clientSpecificCookieNameLoginCSRF := fmt.Sprintf("%s_%s", s.r.Config().CookieNameLoginCSRF(ctx), f.Client.CookieSuffix()) + if err := validateCSRFCookie(ctx, r, s.r.Config(), store, clientSpecificCookieNameLoginCSRF, f.LoginCSRF); err != nil { return nil, err } - if session.LoginRequest.Skip && !session.Remember { - return nil, errorsx.WithStack(fosite.ErrServerError.WithHint("The login request was previously remembered and can only be forgotten using the reject feature.")) - } - - if session.LoginRequest.Skip && session.Subject != session.LoginRequest.Subject { - // Revoke the session because there's clearly a mix up wrt the subject that's being authenticated - if err := s.revokeAuthenticationSession(ctx, w, r); err != nil { - return nil, err - } - - return nil, errorsx.WithStack(fosite.ErrServerError.WithHint("The login request is marked as remember, but the subject from the login confirmation does not match the original subject from the cookie.")) + if f.LoginSkip && !f.LoginRemember { + return nil, errors.WithStack(fosite.ErrServerError.WithHint("The login request was previously remembered and can only be forgotten using the reject feature.")) } - subjectIdentifier, err := s.ObfuscateSubjectIdentifier(ctx, req.GetClient(), session.Subject, session.ForceSubjectIdentifier) + subjectIdentifier, err := s.ObfuscateSubjectIdentifier(ctx, req.GetClient(), f.Subject, f.ForceSubjectIdentifier) if err != nil { return nil, err } - sessionID := session.LoginRequest.SessionID.String() - + sessionID := f.SessionID.String() if err := s.r.OpenIDConnectRequestValidator().ValidatePrompt(ctx, &fosite.AuthorizeRequest{ ResponseTypes: req.GetResponseTypes(), RedirectURI: req.GetRedirectURI(), @@ -387,11 +410,11 @@ func (s *DefaultStrategy) verifyAuthentication(w http.ResponseWriter, r *http.Re Subject: subjectIdentifier, IssuedAt: time.Now().UTC(), // doesn't matter ExpiresAt: time.Now().Add(time.Hour).UTC(), // doesn't matter - AuthTime: time.Time(session.AuthenticatedAt), - RequestedAt: session.RequestedAt, + AuthTime: time.Time(f.LoginAuthenticatedAt), + RequestedAt: f.RequestedAt, }, Headers: &jwt.Headers{}, - Subject: session.Subject, + Subject: f.Subject, }, }, }); errors.Is(err, fosite.ErrLoginRequired) { @@ -405,27 +428,44 @@ func (s *DefaultStrategy) verifyAuthentication(w http.ResponseWriter, r *http.Re return nil, err } - if session.ForceSubjectIdentifier != "" { - if err := s.r.ConsentManager().CreateForcedObfuscatedLoginSession(r.Context(), &ForcedObfuscatedLoginSession{ - Subject: session.Subject, + if f.ForceSubjectIdentifier != "" { + if err := s.r.ObfuscatedSubjectManager().CreateForcedObfuscatedLoginSession(ctx, &ForcedObfuscatedLoginSession{ + Subject: f.Subject, ClientID: req.GetClient().GetID(), - SubjectObfuscated: session.ForceSubjectIdentifier, + SubjectObfuscated: f.ForceSubjectIdentifier, }); err != nil { return nil, err } } - if !session.LoginRequest.Skip { - if time.Time(session.AuthenticatedAt).IsZero() { - return nil, errorsx.WithStack(fosite.ErrServerError.WithHint("Expected the handled login request to contain a valid authenticated_at value but it was zero. This is a bug which should be reported to https://github.com/ory/hydra.")) + rememberFor := s.r.Config().GetAuthenticationSessionLifespan(ctx) + if f.LoginRememberFor > 0 { + rememberFor = min(time.Second*time.Duration(f.LoginRememberFor), rememberFor) + } + + if !f.LoginSkip { + if time.Time(f.LoginAuthenticatedAt).IsZero() { + return nil, errors.WithStack(fosite.ErrServerError.WithHint( + "Expected the handled login request to contain a valid authenticated_at value but it was zero. " + + "This is a bug which should be reported to https://github.com/ory/hydra.")) } - if err := s.r.ConsentManager().ConfirmLoginSession(r.Context(), sessionID, time.Time(session.AuthenticatedAt), session.Subject, session.Remember); err != nil { + if err := s.r.LoginManager().ConfirmLoginSession(ctx, &flow.LoginSession{ + ID: sessionID, + AuthenticatedAt: f.LoginAuthenticatedAt, + Subject: f.Subject, + IdentityProviderSessionID: f.IdentityProviderSessionID, + Remember: f.LoginRemember, + ExpiresAt: sqlxx.NullTime(time.Now().Add(rememberFor).UTC()), + }); err != nil { + if errors.Is(err, sqlcon.ErrUniqueViolation) { + return nil, errors.WithStack(fosite.ErrAccessDenied.WithHint("The login verifier has already been used.")) + } return nil, err } } - if !session.Remember && !session.LoginRequest.Skip { + if !f.LoginRemember && !f.LoginSkip { // If the session should not be remembered (and we're actually not skipping), than the user clearly don't // wants us to store a cookie. So let's bust the authentication session (if one exists). if err := s.revokeAuthenticationSession(ctx, w, r); err != nil { @@ -433,41 +473,52 @@ func (s *DefaultStrategy) verifyAuthentication(w http.ResponseWriter, r *http.Re } } - if !session.Remember || session.LoginRequest.Skip { + if !f.LoginRemember || f.LoginSkip && !f.LoginExtendSessionLifespan { // If the user doesn't want to remember the session, we do not store a cookie. // If login was skipped, it means an authentication cookie was present and // we don't want to touch it (in order to preserve its original expiry date) - return session, nil + return f, nil } // Not a skipped login and the user asked to remember its session, store a cookie - cookie, _ := store.Get(r, s.c.SessionCookieName(ctx)) + cookie, _ := store.Get(r, s.r.Config().SessionCookieName(ctx)) cookie.Values[CookieAuthenticationSIDName] = sessionID - if session.RememberFor >= 0 { - cookie.Options.MaxAge = session.RememberFor + cookie.Options.MaxAge = int(s.r.Config().GetAuthenticationSessionLifespan(ctx).Seconds()) + if f.LoginRememberFor > 0 { + cookie.Options.MaxAge = f.LoginRememberFor } cookie.Options.HttpOnly = true - cookie.Options.Path = "/" - cookie.Options.SameSite = s.c.CookieSameSiteMode(ctx) - cookie.Options.Secure = s.c.CookieSecure(ctx) + cookie.Options.Path = s.r.Config().SessionCookiePath(ctx) + cookie.Options.SameSite = s.r.Config().CookieSameSiteMode(ctx) + cookie.Options.Secure = s.r.Config().CookieSecure(ctx) if err := cookie.Save(r, w); err != nil { - return nil, errorsx.WithStack(err) + return nil, errors.WithStack(err) } s.r.Logger().WithRequest(r). WithFields(logrus.Fields{ - "cookie_name": s.c.SessionCookieName(ctx), + "cookie_name": s.r.Config().SessionCookieName(ctx), "cookie_http_only": true, - "cookie_same_site": s.c.CookieSameSiteMode(ctx), - "cookie_secure": s.c.CookieSecure(ctx), + "cookie_same_site": s.r.Config().CookieSameSiteMode(ctx), + "cookie_secure": s.r.Config().CookieSecure(ctx), }).Debug("Authentication session cookie was set.") - return session, nil + + return f, nil } -func (s *DefaultStrategy) requestConsent(ctx context.Context, w http.ResponseWriter, r *http.Request, ar fosite.AuthorizeRequester, authenticationSession *HandledLoginRequest) error { +func (s *defaultStrategy) requestConsent( + ctx context.Context, + w http.ResponseWriter, + r *http.Request, + ar fosite.AuthorizeRequester, + f *flow.Flow, +) (err error) { + ctx, span := trace.SpanFromContext(ctx).TracerProvider().Tracer("").Start(ctx, "DefaultStrategy.requestConsent") + defer otelx.End(span, &err) + prompt := stringsx.Splitx(ar.GetRequestForm().Get("prompt"), " ") - if stringslice.Has(prompt, "consent") { - return s.forwardConsentRequest(ctx, w, r, ar, authenticationSession, nil) + if slices.Contains(prompt, "consent") { + return s.forwardConsentRequest(ctx, w, r, ar, f, false) } // https://tools.ietf.org/html/rfc6749 @@ -488,10 +539,14 @@ func (s *DefaultStrategy) requestConsent(ctx context.Context, w http.ResponseWri // The OpenID Connect Test Tool fails if this returns `consent_required` when `prompt=none` is used. // According to the quote above, it should be ok to allow https to skip consent. // + // Device initiated flows are never allowed to skip consent, the user must always explicitly authorize the device. + // // This is tracked as issue: https://github.com/ory/hydra/issues/866 // This is also tracked as upstream issue: https://github.com/openid-certification/oidctest/issues/97 - if !(ar.GetRedirectURI().Scheme == "https" || (fosite.IsLocalhost(ar.GetRedirectURI()) && ar.GetRedirectURI().Scheme == "http")) { - return s.forwardConsentRequest(ctx, w, r, ar, authenticationSession, nil) + if f.DeviceChallengeID != "" { + return s.forwardConsentRequest(ctx, w, r, ar, f, false) + } else if !(ar.GetRedirectURI().Scheme == "https" || (fosite.IsLocalhost(ar.GetRedirectURI()) && ar.GetRedirectURI().Scheme == "http")) { + return s.forwardConsentRequest(ctx, w, r, ar, f, false) } } @@ -502,61 +557,39 @@ func (s *DefaultStrategy) requestConsent(ctx context.Context, w http.ResponseWri // return s.forwardConsentRequest(w, r, ar, authenticationSession, nil) // } - consentSessions, err := s.r.ConsentManager().FindGrantedAndRememberedConsentRequests(r.Context(), ar.GetClient().GetID(), authenticationSession.Subject) + previousConsent, err := s.r.ConsentManager().FindGrantedAndRememberedConsentRequest(ctx, ar.GetClient().GetID(), f.Subject) if errors.Is(err, ErrNoPreviousConsentFound) { - return s.forwardConsentRequest(ctx, w, r, ar, authenticationSession, nil) + return s.forwardConsentRequest(ctx, w, r, ar, f, false) } else if err != nil { return err } - if found := matchScopes(s.r.Config().GetScopeStrategy(ctx), consentSessions, ar.GetRequestedScopes()); found != nil { - return s.forwardConsentRequest(ctx, w, r, ar, authenticationSession, found) - } - - return s.forwardConsentRequest(ctx, w, r, ar, authenticationSession, nil) + canSkip := matchScopes(s.r.Config().GetScopeStrategy(ctx), previousConsent.GrantedScope, ar.GetRequestedScopes()) + return s.forwardConsentRequest(ctx, w, r, ar, f, canSkip) } -func (s *DefaultStrategy) forwardConsentRequest(ctx context.Context, w http.ResponseWriter, r *http.Request, ar fosite.AuthorizeRequester, as *HandledLoginRequest, cs *AcceptOAuth2ConsentRequest) error { - skip := false - if cs != nil { - skip = true - } - +func (s *defaultStrategy) forwardConsentRequest( + ctx context.Context, + w http.ResponseWriter, + r *http.Request, + ar fosite.AuthorizeRequester, + f *flow.Flow, + canSkipConsent bool, +) error { prompt := stringsx.Splitx(ar.GetRequestForm().Get("prompt"), " ") - if stringslice.Has(prompt, "none") && !skip { - return errorsx.WithStack(fosite.ErrConsentRequired.WithHint(`Prompt 'none' was requested, but no previous consent was found.`)) + if slices.Contains(prompt, "none") && !canSkipConsent { + return errors.WithStack(fosite.ErrConsentRequired.WithHint(`Prompt 'none' was requested, but no previous consent was found.`)) } - // Set up csrf/challenge/verifier values - verifier := strings.Replace(uuid.New(), "-", "", -1) - challenge := strings.Replace(uuid.New(), "-", "", -1) - csrf := strings.Replace(uuid.New(), "-", "", -1) + f.ToStateConsentUnused( + flow.WithConsentRequestID(strings.ReplaceAll(uuid.New(), "-", "")), + flow.WithConsentSkip(canSkipConsent), + flow.WithConsentCSRF(strings.ReplaceAll(uuid.New(), "-", "")), + ) - cl := sanitizeClientFromRequest(ar) - if err := s.r.ConsentManager().CreateConsentRequest( - r.Context(), - &OAuth2ConsentRequest{ - ID: challenge, - ACR: as.ACR, - AMR: as.AMR, - Verifier: verifier, - CSRF: csrf, - Skip: skip, - RequestedScope: []string(ar.GetRequestedScopes()), - RequestedAudience: []string(ar.GetRequestedAudience()), - Subject: as.Subject, - Client: cl, - RequestURL: as.LoginRequest.RequestURL, - AuthenticatedAt: as.AuthenticatedAt, - RequestedAt: as.RequestedAt, - ForceSubjectIdentifier: as.ForceSubjectIdentifier, - OpenIDConnectContext: as.LoginRequest.OpenIDConnectContext, - LoginSessionID: as.LoginRequest.SessionID, - LoginChallenge: sqlxx.NullString(as.LoginRequest.ID), - Context: as.Context, - }, - ); err != nil { - return errorsx.WithStack(err) + consentChallenge, err := f.ToConsentChallenge(ctx, s.r) + if err != nil { + return err } store, err := s.r.CookieStore(ctx) @@ -564,40 +597,49 @@ func (s *DefaultStrategy) forwardConsentRequest(ctx context.Context, w http.Resp return err } - clientSpecificCookieNameConsentCSRF := fmt.Sprintf("%s_%d", s.r.Config().CookieNameConsentCSRF(ctx), murmur3.Sum32(cl.ID.Bytes())) - if err := createCsrfSession(w, r, s.r.Config(), store, clientSpecificCookieNameConsentCSRF, csrf, s.c.ConsentRequestMaxAge(ctx)); err != nil { - return errorsx.WithStack(err) + if f.Client.GetID() != ar.GetClient().GetID() { + return errors.WithStack(fosite.ErrInvalidClient.WithHint("The flow client id does not match the authorize request client id.")) + } + + clientSpecificCookieNameConsentCSRF := fmt.Sprintf("%s_%s", s.r.Config().CookieNameConsentCSRF(ctx), f.Client.CookieSuffix()) + if err := setCSRFCookie(ctx, w, r, s.r.Config(), store, clientSpecificCookieNameConsentCSRF, f.ConsentCSRF.String(), s.r.Config().ConsentRequestMaxAge(ctx)); err != nil { + return errors.WithStack(err) } http.Redirect( w, r, - urlx.SetQuery(s.c.ConsentURL(ctx), url.Values{"consent_challenge": {challenge}}).String(), + urlx.SetQuery(s.r.Config().ConsentURL(ctx), url.Values{"consent_challenge": {consentChallenge}}).String(), http.StatusFound, ) // generate the verifier - return errorsx.WithStack(ErrAbortOAuth2Request) + return errors.WithStack(ErrAbortOAuth2Request) } -func (s *DefaultStrategy) verifyConsent(ctx context.Context, w http.ResponseWriter, r *http.Request, req fosite.AuthorizeRequester, verifier string) (*AcceptOAuth2ConsentRequest, error) { - session, err := s.r.ConsentManager().VerifyAndInvalidateConsentRequest(r.Context(), verifier) +func (s *defaultStrategy) verifyConsent(ctx context.Context, _ http.ResponseWriter, r *http.Request, verifier string) (_ *flow.Flow, err error) { + ctx, span := trace.SpanFromContext(ctx).TracerProvider().Tracer("").Start(ctx, "DefaultStrategy.verifyConsent") + defer otelx.End(span, &err) + + f, err := flow.DecodeAndInvalidateConsentVerifier(ctx, s.r, verifier) if errors.Is(err, sqlcon.ErrNoRows) { - return nil, errorsx.WithStack(fosite.ErrAccessDenied.WithHint("The consent verifier has already been used, has not been granted, or is invalid.")) + return nil, errors.WithStack(fosite.ErrAccessDenied.WithHint("The consent verifier has already been used, has not been granted, or is invalid.")) } else if err != nil { return nil, err + } else if f.Client.GetID() != r.URL.Query().Get("client_id") { + return nil, errors.WithStack(fosite.ErrInvalidClient.WithHint("The flow client id does not match the authorize request client id.")) } - if session.RequestedAt.Add(s.c.ConsentRequestMaxAge(ctx)).Before(time.Now()) { - return nil, errorsx.WithStack(fosite.ErrRequestUnauthorized.WithHint("The consent request has expired, please try again.")) + if f.ConsentError.IsError() { + f.ConsentError.SetDefaults(flow.ConsentRequestDeniedErrorName) + return nil, errors.WithStack(f.ConsentError.ToRFCError()) } - if session.HasError() { - session.Error.SetDefaults(consentRequestDeniedErrorName) - return nil, errorsx.WithStack(session.Error.toRFCError()) - } - - if time.Time(session.ConsentRequest.AuthenticatedAt).IsZero() { - return nil, errorsx.WithStack(fosite.ErrServerError.WithHint("The authenticatedAt value was not set.")) + if err := s.r.ConsentManager().CreateConsentSession(ctx, f); errors.Is(err, sqlcon.ErrUniqueViolation) { + return nil, errors.WithStack(fosite.ErrAccessDenied.WithHint("The consent verifier has already been used.")) + } else if errors.Is(err, sqlcon.ErrNoRows) { + return nil, errors.WithStack(fosite.ErrAccessDenied.WithHint("The consent verifier has already been used, has not been granted, or is invalid.")) + } else if err != nil { + return nil, err } store, err := s.r.CookieStore(ctx) @@ -605,28 +647,23 @@ func (s *DefaultStrategy) verifyConsent(ctx context.Context, w http.ResponseWrit return nil, err } - clientSpecificCookieNameConsentCSRF := fmt.Sprintf("%s_%d", s.r.Config().CookieNameConsentCSRF(ctx), murmur3.Sum32(session.ConsentRequest.Client.ID.Bytes())) - if err := validateCsrfSession(r, s.r.Config(), store, clientSpecificCookieNameConsentCSRF, session.ConsentRequest.CSRF); err != nil { + clientSpecificCookieNameConsentCSRF := fmt.Sprintf("%s_%s", s.r.Config().CookieNameConsentCSRF(ctx), f.Client.CookieSuffix()) + if err := validateCSRFCookie(ctx, r, s.r.Config(), store, clientSpecificCookieNameConsentCSRF, f.ConsentCSRF.String()); err != nil { return nil, err } - if session.Session == nil { - session.Session = NewConsentRequestSessionData() + if f.SessionAccessToken == nil { + f.SessionAccessToken = map[string]interface{}{} } - if session.Session.AccessToken == nil { - session.Session.AccessToken = map[string]interface{}{} + if f.SessionIDToken == nil { + f.SessionIDToken = map[string]interface{}{} } - if session.Session.IDToken == nil { - session.Session.IDToken = map[string]interface{}{} - } - - session.AuthenticatedAt = session.ConsentRequest.AuthenticatedAt - return session, nil + return f, nil } -func (s *DefaultStrategy) generateFrontChannelLogoutURLs(ctx context.Context, subject, sid string) ([]string, error) { +func (s *defaultStrategy) generateFrontChannelLogoutURLs(ctx context.Context, subject, sid string) ([]string, error) { clients, err := s.r.ConsentManager().ListUserAuthenticatedClientsWithFrontChannelLogout(ctx, subject, sid) if err != nil { return nil, err @@ -636,11 +673,11 @@ func (s *DefaultStrategy) generateFrontChannelLogoutURLs(ctx context.Context, su for _, c := range clients { u, err := url.Parse(c.FrontChannelLogoutURI) if err != nil { - return nil, errorsx.WithStack(fosite.ErrServerError.WithHintf("Unable to parse frontchannel_logout_uri because %s.", c.FrontChannelLogoutURI).WithDebug(err.Error())) + return nil, errors.WithStack(fosite.ErrServerError.WithHintf("Unable to parse frontchannel_logout_uri because %s.", c.FrontChannelLogoutURI).WithDebug(err.Error())) } urls = append(urls, urlx.SetQuery(u, url.Values{ - "iss": {s.c.IssuerURL(ctx).String()}, + "iss": {s.r.Config().IssuerURL(ctx).String()}, "sid": {sid}, }).String()) } @@ -648,13 +685,13 @@ func (s *DefaultStrategy) generateFrontChannelLogoutURLs(ctx context.Context, su return urls, nil } -func (s *DefaultStrategy) executeBackChannelLogout(ctx context.Context, r *http.Request, subject, sid string) error { +func (s *defaultStrategy) executeBackChannelLogout(ctx context.Context, r *http.Request, subject, sid string) error { clients, err := s.r.ConsentManager().ListUserAuthenticatedClientsWithBackChannelLogout(ctx, subject, sid) if err != nil { return err } - openIDKeyID, err := s.r.OpenIDJWTStrategy().GetPublicKeyID(ctx) + openIDKeyID, err := s.r.OpenIDJWTSigner().GetPublicKeyID(ctx) if err != nil { return err } @@ -674,9 +711,9 @@ func (s *DefaultStrategy) executeBackChannelLogout(ctx context.Context, r *http. // s.r.ConsentManager().GetForcedObfuscatedLoginSession(context.Background(), subject, ) // sub := s.obfuscateSubjectIdentifier(c, subject, ) - t, _, err := s.r.OpenIDJWTStrategy().Generate(ctx, jwtgo.MapClaims{ - "iss": s.c.IssuerURL(ctx).String(), - "aud": []string{c.LegacyClientID}, + t, _, err := s.r.OpenIDJWTSigner().Generate(ctx, jwt.MapClaims{ + "iss": s.r.Config().IssuerURL(ctx).String(), + "aud": []string{c.ID}, "iat": time.Now().UTC().Unix(), "jti": uuid.New(), "events": map[string]struct{}{"http://schemas.openid.net/event/backchannel-logout": {}}, @@ -691,20 +728,30 @@ func (s *DefaultStrategy) executeBackChannelLogout(ctx context.Context, r *http. tasks = append(tasks, task{url: c.BackChannelLogoutURI, clientID: c.GetID(), token: t}) } - var execute = func(t task) { + span := trace.SpanFromContext(ctx) + cl := s.r.HTTPClient(ctx) + execute := func(t task) { log := s.r.Logger().WithRequest(r). WithField("client_id", t.clientID). WithField("backchannel_logout_url", t.url) - res, err := s.r.HTTPClient(ctx).PostForm(t.url, url.Values{"logout_token": {t.token}}) + body := url.Values{"logout_token": {t.token}}.Encode() + req, err := retryablehttp.NewRequestWithContext(trace.ContextWithSpan(context.Background(), span), "POST", t.url, []byte(body)) + if err != nil { + log.WithError(err).Error("Unable to construct OpenID Connect Back-Channel Logout Request") + return + } + req.Header.Add("Content-Type", "application/x-www-form-urlencoded") + res, err := cl.Do(req) if err != nil { log.WithError(err).Error("Unable to execute OpenID Connect Back-Channel Logout Request") return } - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck + res.Body = io.NopCloser(io.LimitReader(res.Body, 1<<20 /* 1 MB */)) // in case we ever start to read this response - if res.StatusCode != http.StatusOK { - log.WithError(errors.Errorf("expected HTTP status code %d but got %d", http.StatusOK, res.StatusCode)). + if res.StatusCode != http.StatusOK && res.StatusCode != http.StatusNoContent { + log.WithError(errors.Errorf("expected HTTP status code %d or %d but got %d", http.StatusOK, http.StatusNoContent, res.StatusCode)). Error("Unable to execute OpenID Connect Back-Channel Logout Request") return } else { @@ -719,7 +766,7 @@ func (s *DefaultStrategy) executeBackChannelLogout(ctx context.Context, r *http. return nil } -func (s *DefaultStrategy) issueLogoutVerifier(ctx context.Context, w http.ResponseWriter, r *http.Request) (*LogoutResult, error) { +func (s *defaultStrategy) issueLogoutVerifier(ctx context.Context, w http.ResponseWriter, r *http.Request) (*flow.LogoutResult, error) { // There are two types of log out flows: // // - RP initiated logout @@ -727,10 +774,10 @@ func (s *DefaultStrategy) issueLogoutVerifier(ctx context.Context, w http.Respon // Per default, we're redirecting to the global redirect URL. This is assuming that we're not an RP-initiated // logout flow. - redir := s.c.LogoutRedirectURL(ctx).String() + redir := s.r.Config().LogoutRedirectURL(ctx).String() if err := r.ParseForm(); err != nil { - return nil, errorsx.WithStack(fosite.ErrInvalidRequest. + return nil, errors.WithStack(fosite.ErrInvalidRequest. WithHintf("Logout failed because the '%s' request could not be parsed.", r.Method), ) } @@ -744,34 +791,36 @@ func (s *DefaultStrategy) issueLogoutVerifier(ctx context.Context, w http.Respon if len(state) > 0 { // state can only be set if it's an RP-initiated logout flow. If not, we should throw an error. - return nil, errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("Logout failed because query parameter state is set but id_token_hint is missing.")) + return nil, errors.WithStack(fosite.ErrInvalidRequest.WithHint("Logout failed because query parameter state is set but id_token_hint is missing.")) } if len(requestedRedir) > 0 { // post_logout_redirect_uri can only be set if it's an RP-initiated logout flow. If not, we should throw an error. - return nil, errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("Logout failed because query parameter post_logout_redirect_uri is set but id_token_hint is missing.")) + return nil, errors.WithStack(fosite.ErrInvalidRequest.WithHint("Logout failed because query parameter post_logout_redirect_uri is set but id_token_hint is missing.")) } session, err := s.authenticationSession(ctx, w, r) if errors.Is(err, ErrNoAuthenticationSessionFound) { // OP initiated log out but no session was found. Since we can not identify the user we can not call // any RPs. - s.r.AuditLogger(). + s.r.Logger(). WithRequest(r). Info("User logout skipped because no authentication session exists.") http.Redirect(w, r, redir, http.StatusFound) - return nil, errorsx.WithStack(ErrAbortOAuth2Request) + return nil, errors.WithStack(ErrAbortOAuth2Request) } else if err != nil { return nil, err } challenge := uuid.New() - if err := s.r.ConsentManager().CreateLogoutRequest(r.Context(), &LogoutRequest{ + if err := s.r.LogoutManager().CreateLogoutRequest(ctx, &flow.LogoutRequest{ RequestURL: r.URL.String(), ID: challenge, Subject: session.Subject, SessionID: session.ID, Verifier: uuid.New(), + RequestedAt: sqlxx.NullTime(time.Now().UTC().Round(time.Second)), + ExpiresAt: sqlxx.NullTime(time.Now().UTC().Round(time.Second).Add(s.r.Config().ConsentRequestMaxAge(ctx))), RPInitiated: false, // PostLogoutRedirectURI is set to the value from config.Provider().LogoutRedirectURL() @@ -780,60 +829,59 @@ func (s *DefaultStrategy) issueLogoutVerifier(ctx context.Context, w http.Respon return nil, err } - s.r.AuditLogger(). + s.r.Logger(). WithRequest(r). Info("User logout requires user confirmation, redirecting to Logout UI.") - http.Redirect(w, r, urlx.SetQuery(s.c.LogoutURL(ctx), url.Values{"logout_challenge": {challenge}}).String(), http.StatusFound) - return nil, errorsx.WithStack(ErrAbortOAuth2Request) + http.Redirect(w, r, urlx.SetQuery(s.r.Config().LogoutURL(ctx), url.Values{"logout_challenge": {challenge}}).String(), http.StatusFound) + return nil, errors.WithStack(ErrAbortOAuth2Request) } - claims, err := s.getIDTokenHintClaims(r.Context(), hint) + claims, err := s.getIDTokenHintClaims(ctx, hint) if err != nil { return nil, err } - mksi := mapx.KeyStringToInterface(claims) - if !claims.VerifyIssuer(s.c.IssuerURL(ctx).String(), true) { - return nil, errorsx.WithStack(fosite.ErrInvalidRequest. + if !claims.VerifyIssuer(s.r.Config().IssuerURL(ctx).String(), true) { + return nil, errors.WithStack(fosite.ErrInvalidRequest. WithHintf( `Logout failed because issuer claim value '%s' from query parameter id_token_hint does not match with issuer value from configuration '%s'.`, - mapx.GetStringDefault(mksi, "iss", ""), - s.c.IssuerURL(ctx).String(), + mapx.GetStringDefault(claims, "iss", ""), + s.r.Config().IssuerURL(ctx).String(), ), ) } now := time.Now().UTC().Unix() if !claims.VerifyIssuedAt(now, true) { - return nil, errorsx.WithStack(fosite.ErrInvalidRequest. + return nil, errors.WithStack(fosite.ErrInvalidRequest. WithHintf( `Logout failed because iat claim value '%.0f' from query parameter id_token_hint is before now ('%d').`, - mapx.GetFloat64Default(mksi, "iat", float64(0)), + mapx.GetFloat64Default(claims, "iat", float64(0)), now, ), ) } - hintSid := mapx.GetStringDefault(mksi, "sid", "") + hintSid := mapx.GetStringDefault(claims, "sid", "") if len(hintSid) == 0 { - return nil, errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("Logout failed because query parameter id_token_hint is missing sid claim.")) + return nil, errors.WithStack(fosite.ErrInvalidRequest.WithHint("Logout failed because query parameter id_token_hint is missing sid claim.")) } // It doesn't really make sense to use the subject value from the ID Token because it might be obfuscated. - if hintSub := mapx.GetStringDefault(mksi, "sub", ""); len(hintSub) == 0 { - return nil, errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("Logout failed because query parameter id_token_hint is missing sub claim.")) + if hintSub := mapx.GetStringDefault(claims, "sub", ""); len(hintSub) == 0 { + return nil, errors.WithStack(fosite.ErrInvalidRequest.WithHint("Logout failed because query parameter id_token_hint is missing sub claim.")) } // Let's find the client by cycling through the audiences. Typically, we only have one audience var cl *client.Client for _, aud := range mapx.GetStringSliceDefault( - mksi, + claims, "aud", []string{ - mapx.GetStringDefault(mksi, "aud", ""), + mapx.GetStringDefault(claims, "aud", ""), }, ) { - c, err := s.r.ClientManager().GetConcreteClient(r.Context(), aud) + c, err := s.r.ClientManager().GetConcreteClient(ctx, aud) if errors.Is(err, x.ErrNotFound) { continue } else if err != nil { @@ -844,7 +892,7 @@ func (s *DefaultStrategy) issueLogoutVerifier(ctx context.Context, w http.Respon } if cl == nil { - return nil, errorsx.WithStack(fosite.ErrInvalidRequest. + return nil, errors.WithStack(fosite.ErrInvalidRequest. WithHint("Logout failed because none of the listed audiences is a registered OAuth 2.0 Client.")) } @@ -854,7 +902,7 @@ func (s *DefaultStrategy) issueLogoutVerifier(ctx context.Context, w http.Respon if w == requestedRedir { u, err := url.Parse(w) if err != nil { - return nil, errorsx.WithStack(fosite.ErrServerError.WithHintf("Unable to parse post_logout_redirect_uri '%s'.", w).WithDebug(err.Error())) + return nil, errors.WithStack(fosite.ErrServerError.WithHintf("Unable to parse post_logout_redirect_uri '%s'.", w).WithDebug(err.Error())) } f = u @@ -862,7 +910,7 @@ func (s *DefaultStrategy) issueLogoutVerifier(ctx context.Context, w http.Respon } if f == nil { - return nil, errorsx.WithStack(fosite.ErrInvalidRequest. + return nil, errors.WithStack(fosite.ErrInvalidRequest. WithHint("Logout failed because query parameter post_logout_redirect_uri is not a whitelisted as a post_logout_redirect_uri for the client."), ) } @@ -877,18 +925,18 @@ func (s *DefaultStrategy) issueLogoutVerifier(ctx context.Context, w http.Respon // We do not really want to verify if the user (from id token hint) has a session here because it doesn't really matter. // Instead, we'll check this when we're actually revoking the cookie! - session, err := s.r.ConsentManager().GetRememberedLoginSession(r.Context(), hintSid) + session, err := s.r.LoginManager().GetRememberedLoginSession(ctx, hintSid) if errors.Is(err, x.ErrNotFound) { // Such a session does not exist - maybe it has already been revoked? In any case, we can't do much except // leaning back and redirecting back. http.Redirect(w, r, redir, http.StatusFound) - return nil, errorsx.WithStack(ErrAbortOAuth2Request) + return nil, errors.WithStack(ErrAbortOAuth2Request) } else if err != nil { return nil, err } challenge := uuid.New() - if err := s.r.ConsentManager().CreateLogoutRequest(r.Context(), &LogoutRequest{ + if err := s.r.LogoutManager().CreateLogoutRequest(ctx, &flow.LogoutRequest{ RequestURL: r.URL.String(), ID: challenge, SessionID: hintSid, @@ -903,14 +951,39 @@ func (s *DefaultStrategy) issueLogoutVerifier(ctx context.Context, w http.Respon return nil, err } - http.Redirect(w, r, urlx.SetQuery(s.c.LogoutURL(ctx), url.Values{"logout_challenge": {challenge}}).String(), http.StatusFound) - return nil, errorsx.WithStack(ErrAbortOAuth2Request) + http.Redirect(w, r, urlx.SetQuery(s.r.Config().LogoutURL(ctx), url.Values{"logout_challenge": {challenge}}).String(), http.StatusFound) + return nil, errors.WithStack(ErrAbortOAuth2Request) } -func (s *DefaultStrategy) completeLogout(ctx context.Context, w http.ResponseWriter, r *http.Request) (*LogoutResult, error) { - verifier := r.URL.Query().Get("logout_verifier") +func (s *defaultStrategy) performBackChannelLogoutAndDeleteSession(ctx context.Context, r *http.Request, subject, sid string) error { + if err := s.executeBackChannelLogout(ctx, r, subject, sid); err != nil { + return err + } + + // We delete the session after back channel log out has worked as the session is otherwise removed + // from the store which will break the query for finding all the channels. + // + // executeBackChannelLogout only fails on system errors so not on URL errors, so this should be fine + // even if an upstream URL fails! + if session, err := s.r.LoginManager().DeleteLoginSession(ctx, sid); errors.Is(err, sqlcon.ErrNoRows) { + // This is ok (session probably already revoked), do nothing! + } else if err != nil { + return err + } else { + // revoke Kratos session asynchronously + go func(ctx context.Context, kratosSessionID string) { + innerErr := s.r.Kratos().DisableSession(ctx, kratosSessionID) + if innerErr != nil { + s.r.Logger().WithError(innerErr).WithField("sid", sid).WithField("kratos-sid", kratosSessionID).Error("Unable to revoke session in Ory Kratos.") + } + }(context.WithoutCancel(ctx), session.IdentityProviderSessionID.String()) + } - lr, err := s.r.ConsentManager().VerifyAndInvalidateLogoutRequest(r.Context(), verifier) + return nil +} + +func (s *defaultStrategy) completeLogout(ctx context.Context, w http.ResponseWriter, r *http.Request, verifier string) (*flow.LogoutResult, error) { + lr, err := s.r.LogoutManager().VerifyAndInvalidateLogoutRequest(ctx, verifier) if err != nil { return nil, err } @@ -929,7 +1002,7 @@ func (s *DefaultStrategy) completeLogout(ctx context.Context, w http.ResponseWri // OP initiated log out but no session was found. So let's just redirect back... http.Redirect(w, r, lr.PostLogoutRedirectURI, http.StatusFound) - return nil, errorsx.WithStack(ErrAbortOAuth2Request) + return nil, errors.WithStack(ErrAbortOAuth2Request) } else if err != nil { return nil, err } @@ -940,7 +1013,7 @@ func (s *DefaultStrategy) completeLogout(ctx context.Context, w http.ResponseWri // case there isn't really a lot to do because we don't want to sign out a different ID, so let's just // go to the post redirect uri without actually doing anything! http.Redirect(w, r, lr.PostLogoutRedirectURI, http.StatusFound) - return nil, errorsx.WithStack(ErrAbortOAuth2Request) + return nil, errors.WithStack(ErrAbortOAuth2Request) } } @@ -949,86 +1022,261 @@ func (s *DefaultStrategy) completeLogout(ctx context.Context, w http.ResponseWri return nil, err } - _, _ = s.revokeAuthenticationCookie(w, r, store) // Cookie removal is optional + _, _ = s.revokeAuthenticationCookie(ctx, w, r, store) // Cookie removal is optional - urls, err := s.generateFrontChannelLogoutURLs(r.Context(), lr.Subject, lr.SessionID) + urls, err := s.generateFrontChannelLogoutURLs(ctx, lr.Subject, lr.SessionID) if err != nil { return nil, err } - if err := s.executeBackChannelLogout(r.Context(), r, lr.Subject, lr.SessionID); err != nil { - return nil, err - } - - // We delete the session after back channel log out has worked as the session is otherwise removed - // from the store which will break the query for finding all the channels. - // - // executeBackChannelLogout only fails on system errors so not on URL errors, so this should be fine - // even if an upstream URL fails! - if err := s.r.ConsentManager().DeleteLoginSession(r.Context(), lr.SessionID); errors.Is(err, sqlcon.ErrNoRows) { - // This is ok (session probably already revoked), do nothing! - } else if err != nil { + if err := s.performBackChannelLogoutAndDeleteSession(ctx, r, lr.Subject, lr.SessionID); err != nil { return nil, err } - s.r.AuditLogger(). + s.r.Logger(). WithRequest(r). WithField("subject", lr.Subject). Info("User logout completed!") - return &LogoutResult{ + return &flow.LogoutResult{ RedirectTo: lr.PostLogoutRedirectURI, FrontChannelLogoutURLs: urls, }, nil } -func (s *DefaultStrategy) HandleOpenIDConnectLogout(ctx context.Context, w http.ResponseWriter, r *http.Request) (*LogoutResult, error) { +func (s *defaultStrategy) HandleOpenIDConnectLogout(ctx context.Context, w http.ResponseWriter, r *http.Request) (*flow.LogoutResult, error) { verifier := r.URL.Query().Get("logout_verifier") if verifier == "" { return s.issueLogoutVerifier(ctx, w, r) } + return s.completeLogout(ctx, w, r, verifier) +} + +func (s *defaultStrategy) HandleHeadlessLogout(ctx context.Context, _ http.ResponseWriter, r *http.Request, sid string) error { + loginSession, lsErr := s.r.LoginManager().GetRememberedLoginSession(ctx, sid) + + if errors.Is(lsErr, x.ErrNotFound) { + // This is ok (session probably already revoked), do nothing! + // Not triggering the back-channel logout because the subject is not available + // See https://github.com/ory/hydra/pull/3450#discussion_r1127798485 + return nil + } else if lsErr != nil { + return lsErr + } - return s.completeLogout(ctx, w, r) + if err := s.performBackChannelLogoutAndDeleteSession(ctx, r, loginSession.Subject, sid); err != nil { + return err + } + + s.r.Logger(). + WithRequest(r). + WithField("subject", loginSession.Subject). + WithField("sid", sid). + Info("User logout completed via headless flow!") + + return nil } -func (s *DefaultStrategy) HandleOAuth2AuthorizationRequest(ctx context.Context, w http.ResponseWriter, r *http.Request, req fosite.AuthorizeRequester) (*AcceptOAuth2ConsentRequest, error) { - authenticationVerifier := strings.TrimSpace(req.GetRequestForm().Get("login_verifier")) +func (s *defaultStrategy) HandleOAuth2AuthorizationRequest( + ctx context.Context, + w http.ResponseWriter, + r *http.Request, + req fosite.AuthorizeRequester, +) (_ *flow.Flow, err error) { + ctx, span := trace.SpanFromContext(ctx).TracerProvider().Tracer("").Start(ctx, "DefaultStrategy.HandleOAuth2AuthorizationRequest") + defer otelx.End(span, &err) + + loginVerifier := strings.TrimSpace(req.GetRequestForm().Get("login_verifier")) consentVerifier := strings.TrimSpace(req.GetRequestForm().Get("consent_verifier")) - if authenticationVerifier == "" && consentVerifier == "" { - // ok, we need to process this request and redirect to auth endpoint - return nil, s.requestAuthentication(ctx, w, r, req) - } else if authenticationVerifier != "" { - authSession, err := s.verifyAuthentication(w, r, req, authenticationVerifier) + if loginVerifier == "" && consentVerifier == "" { + // ok, we need to process this request and redirect to the original endpoint + return nil, s.requestAuthentication(ctx, w, r, req, nil) + } else if loginVerifier != "" { + f, err := s.verifyAuthentication(ctx, w, r, req, loginVerifier) if err != nil { return nil, err } // ok, we need to process this request and redirect to auth endpoint - return nil, s.requestConsent(ctx, w, r, req, authSession) + return f, s.requestConsent(ctx, w, r, req, f) } - consentSession, err := s.verifyConsent(ctx, w, r, req, consentVerifier) + f, err := s.verifyConsent(ctx, w, r, consentVerifier) if err != nil { return nil, err } - return consentSession, nil + return f, nil } -func (s *DefaultStrategy) ObfuscateSubjectIdentifier(ctx context.Context, cl fosite.Client, subject, forcedIdentifier string) (string, error) { +// HandleOAuth2DeviceAuthorizationRequest handles the device authorization flow +func (s *defaultStrategy) HandleOAuth2DeviceAuthorizationRequest( + ctx context.Context, + w http.ResponseWriter, + r *http.Request, +) (_ *flow.Flow, err error) { + ctx, span := trace.SpanFromContext(ctx).TracerProvider().Tracer("").Start(ctx, "DefaultStrategy.HandleOAuth2DeviceAuthorizationRequest") + defer otelx.End(span, &err) + + // This handler has the following validation states: + // + // 1. The flow is initiated (no verifiers) -> we request a device verifier (can only be achieved by solving the device challenge) + // 2. Device verifier is given -> we request login verifier (can only be achieved by solving the login challenge) + // 3. Login verifier is given -> we request consent verifier (can only be achieved by solving the consent challenge) + // 4. Consent verifier is given -> done. + + deviceVerifier := strings.TrimSpace(r.URL.Query().Get("device_verifier")) + loginVerifier := strings.TrimSpace(r.URL.Query().Get("login_verifier")) + consentVerifier := strings.TrimSpace(r.URL.Query().Get("consent_verifier")) + + ar := fosite.NewAuthorizeRequest() + + var deviceFlow *flow.Flow + if deviceVerifier == "" && loginVerifier == "" && consentVerifier == "" { + // No verifiers are set, let's start by requesting the device verifier first. + return nil, s.requestDevice(ctx, w, r) + } else if deviceVerifier != "" && loginVerifier == "" && consentVerifier == "" { + // Device verifier is set, but login and consent are not. So we need to verify the device. + var err error + deviceFlow, err = s.verifyDevice(ctx, w, r, deviceVerifier) + if err != nil { + return nil, err + } + + ar.RequestedScope = fosite.Arguments(deviceFlow.RequestedScope) + ar.RequestedAudience = fosite.Arguments(deviceFlow.RequestedAudience) + } + + // Validate client_id + clientID := r.URL.Query().Get("client_id") + if clientID == "" { + return nil, errors.WithStack(fosite.ErrInvalidClient.WithHintf(`Query parameter 'client_id' is missing.`)) + } + c, err := s.r.ClientManager().GetConcreteClient(ctx, clientID) + if errors.Is(err, x.ErrNotFound) { + return nil, errors.WithStack(fosite.ErrInvalidClient.WithWrap(err).WithHintf(`Client does not exist`)) + } else if err != nil { + return nil, err + } + + // Fake an authorization request to instantiate the flow. + ar.Client = c + ar.Form = r.Form + + if loginVerifier == "" && consentVerifier == "" { + // Here we end up if the device has been verified, but login and verification are still missing. + // Let's request authentication. + return nil, s.requestAuthentication(ctx, w, r, ar, deviceFlow) + } else if loginVerifier != "" { + // Login verification was given, let's verify! + f, err := s.verifyAuthentication(ctx, w, r, ar, loginVerifier) + if err != nil { + return nil, err + } + + // ok, we need to process this request and redirect to consent endpoint + return f, s.requestConsent(ctx, w, r, ar, f) + } + + return s.verifyConsent(ctx, w, r, consentVerifier) +} + +func (s *defaultStrategy) ObfuscateSubjectIdentifier(ctx context.Context, cl fosite.Client, subject, forcedIdentifier string) (string, error) { if c, ok := cl.(*client.Client); ok && c.SubjectType == "pairwise" { - algorithm, ok := s.r.SubjectIdentifierAlgorithm(ctx)[c.SubjectType] - if !ok { - return "", errorsx.WithStack(fosite.ErrInvalidRequest.WithHintf(`Subject Identifier Algorithm '%s' was requested by OAuth 2.0 Client '%s' but is not configured.`, c.SubjectType, c.GetID())) + if !slices.Contains(s.r.Config().SubjectTypesSupported(ctx), "pairwise") { + return "", errors.WithStack(fosite.ErrInvalidRequest.WithHintf(`Subject Identifier Algorithm '%s' was requested by OAuth 2.0 Client '%s' but is not configured.`, c.SubjectType, c.GetID())) } if len(forcedIdentifier) > 0 { return forcedIdentifier, nil } - return algorithm.Obfuscate(subject, c) + salt := s.r.Config().SubjectIdentifierAlgorithmSalt(ctx) + return pairwiseObfuscate(salt, subject, c) } else if !ok { return "", errors.New("Unable to type assert OAuth 2.0 Client to *client.Client") } return subject, nil } + +func (s *defaultStrategy) requestDevice(ctx context.Context, w http.ResponseWriter, r *http.Request) error { + return s.forwardDeviceRequest(ctx, w, r) +} + +func (s *defaultStrategy) forwardDeviceRequest(ctx context.Context, w http.ResponseWriter, r *http.Request) error { + // Set up csrf/challenge/verifier values + challenge := strings.ReplaceAll(uuid.New(), "-", "") + csrf := strings.ReplaceAll(uuid.New(), "-", "") + + // Generate the request URL + iu := s.getDeviceVerificationPath(ctx) + // We don't want the user_code persisted in the database + q := r.URL.Query() + q = caseInsensitiveFilterParam(q, "user_code") + iu.RawQuery = q.Encode() + + f := &flow.Flow{ + DeviceChallengeID: sqlxx.NullString(challenge), + RequestURL: iu.String(), + DeviceCSRF: sqlxx.NullString(csrf), + RequestedAt: time.Now().Truncate(time.Second).UTC(), + State: flow.DeviceFlowStateUnused, + NID: s.r.Networker().NetworkID(ctx), + } + + encodedFlow, err := f.ToDeviceChallenge(ctx, s.r) + if err != nil { + return err + } + + store, err := s.r.CookieStore(ctx) + if err != nil { + return err + } + + CookieNameDeviceCSRF := s.r.Config().CookieNameDeviceCSRF(ctx) + if err := setCSRFCookie(ctx, w, r, s.r.Config(), store, CookieNameDeviceCSRF, csrf, s.r.Config().ConsentRequestMaxAge(ctx)); err != nil { + return err + } + + query := url.Values{"device_challenge": {encodedFlow}} + if r.URL.Query().Has("user_code") { + query.Add("user_code", r.URL.Query().Get("user_code")) + } + + http.Redirect( + w, + r, + urlx.SetQuery(s.r.Config().DeviceVerificationURL(ctx), query).String(), + http.StatusFound, + ) + + // generate the verifier + return errors.WithStack(ErrAbortOAuth2Request) +} + +func (s *defaultStrategy) verifyDevice(ctx context.Context, _ http.ResponseWriter, r *http.Request, verifier string) (_ *flow.Flow, err error) { + ctx, span := trace.SpanFromContext(ctx).TracerProvider().Tracer("").Start(ctx, "DefaultStrategy.verifyDevice") + defer otelx.End(span, &err) + + f, err := flow.DecodeAndInvalidateDeviceVerifier(ctx, s.r, verifier) + if err != nil { + return nil, err + } + + store, err := s.r.CookieStore(ctx) + if err != nil { + return nil, err + } + + cookieNameDeviceCSRF := s.r.Config().CookieNameDeviceCSRF(ctx) + if err := validateCSRFCookie(ctx, r, s.r.Config(), store, cookieNameDeviceCSRF, f.DeviceCSRF.String()); err != nil { + return nil, err + } + + return f, nil +} + +func (s *defaultStrategy) getDeviceVerificationPath(ctx context.Context) *url.URL { + return urlx.AppendPaths(s.r.Config().PublicURL(ctx), deviceVerificationPath) +} diff --git a/consent/strategy_default_test.go b/consent/strategy_default_test.go index 18475064b3d..ceb01218c45 100644 --- a/consent/strategy_default_test.go +++ b/consent/strategy_default_test.go @@ -8,36 +8,49 @@ import ( "net/http" "net/http/cookiejar" "net/http/httptest" + "net/url" + "strings" "testing" - hydra "github.com/ory/hydra-client-go/v2" - + "github.com/gofrs/uuid" "github.com/stretchr/testify/require" - - jwtgo "github.com/ory/fosite/token/jwt" - - "github.com/ory/fosite/token/jwt" - "github.com/ory/x/urlx" - - "net/url" - - "github.com/google/uuid" "github.com/tidwall/gjson" - "github.com/ory/hydra/client" - . "github.com/ory/hydra/consent" - "github.com/ory/hydra/driver" - "github.com/ory/hydra/internal/testhelpers" + hydra "github.com/ory/hydra-client-go/v2" + "github.com/ory/hydra/v2/client" + . "github.com/ory/hydra/v2/consent" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/fosite/token/jwt" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/oauth2" "github.com/ory/x/ioutilx" + "github.com/ory/x/urlx" ) +func checkAndAcceptDeviceHandler(t *testing.T, apiClient *hydra.APIClient) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + userCode := r.URL.Query().Get("user_code") + payload := hydra.AcceptDeviceUserCodeRequest{ + UserCode: &userCode, + } + + v, _, err := apiClient.OAuth2API.AcceptUserCodeRequest(context.Background()). + DeviceChallenge(r.URL.Query().Get("device_challenge")). + AcceptDeviceUserCodeRequest(payload). + Execute() + require.NoError(t, err) + require.NotEmpty(t, v.RedirectTo) + http.Redirect(w, r, v.RedirectTo, http.StatusFound) + } +} + func checkAndAcceptLoginHandler(t *testing.T, apiClient *hydra.APIClient, subject string, cb func(*testing.T, *hydra.OAuth2LoginRequest, error) hydra.AcceptOAuth2LoginRequest) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - res, _, err := apiClient.OAuth2Api.GetOAuth2LoginRequest(context.Background()).LoginChallenge(r.URL.Query().Get("login_challenge")).Execute() + res, _, err := apiClient.OAuth2API.GetOAuth2LoginRequest(context.Background()).LoginChallenge(r.URL.Query().Get("login_challenge")).Execute() payload := cb(t, res, err) payload.Subject = subject - v, _, err := apiClient.OAuth2Api.AcceptOAuth2LoginRequest(context.Background()). + v, _, err := apiClient.OAuth2API.AcceptOAuth2LoginRequest(context.Background()). LoginChallenge(r.URL.Query().Get("login_challenge")). AcceptOAuth2LoginRequest(payload). Execute() @@ -49,11 +62,11 @@ func checkAndAcceptLoginHandler(t *testing.T, apiClient *hydra.APIClient, subjec func checkAndAcceptConsentHandler(t *testing.T, apiClient *hydra.APIClient, cb func(*testing.T, *hydra.OAuth2ConsentRequest, error) hydra.AcceptOAuth2ConsentRequest) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - res, _, err := apiClient.OAuth2Api.GetOAuth2ConsentRequest(context.Background()).ConsentChallenge(r.URL.Query().Get("consent_challenge")).Execute() + res, _, err := apiClient.OAuth2API.GetOAuth2ConsentRequest(context.Background()).ConsentChallenge(r.URL.Query().Get("consent_challenge")).Execute() payload := cb(t, res, err) - v, _, err := apiClient.OAuth2Api.AcceptOAuth2ConsentRequest(context.Background()). - ConsentChallenge(r.URL.Query().Get("consent_challenge")). + v, _, err := apiClient.OAuth2API.AcceptOAuth2ConsentRequest(context.Background()). + ConsentChallenge(res.Challenge). AcceptOAuth2ConsentRequest(payload). Execute() require.NoError(t, err) @@ -62,33 +75,74 @@ func checkAndAcceptConsentHandler(t *testing.T, apiClient *hydra.APIClient, cb f } } -func makeOAuth2Request(t *testing.T, reg driver.Registry, hc *http.Client, oc *client.Client, values url.Values) (gjson.Result, *http.Response) { +func makeOAuth2Request(t *testing.T, reg *driver.RegistrySQL, hc *http.Client, oc *client.Client, values url.Values) (gjson.Result, *http.Response) { ctx := context.Background() if hc == nil { hc = testhelpers.NewEmptyJarClient(t) } values.Add("response_type", "code") - values.Add("state", uuid.New().String()) + values.Add("state", uuid.Must(uuid.NewV4()).String()) values.Add("client_id", oc.GetID()) + values.Add("redirect_uri", oc.GetRedirectURIs()[0]) res, err := hc.Get(urlx.CopyWithQuery(reg.Config().OAuth2AuthURL(ctx), values).String()) require.NoError(t, err) - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck + + return gjson.ParseBytes(ioutilx.MustReadAll(res.Body)), res +} + +func makeOAuth2DeviceAuthRequest(t *testing.T, reg *driver.RegistrySQL, hc *http.Client, oc *client.Client, scope string) (gjson.Result, *http.Response) { + ctx := context.Background() + if hc == nil { + hc = testhelpers.NewEmptyJarClient(t) + } + + data := url.Values{} + data.Set("scope", scope) + data.Set("client_id", oc.GetID()) + req, err := http.NewRequest( + http.MethodPost, + reg.Config().OAuth2DeviceAuthorisationURL(ctx).String(), + strings.NewReader(data.Encode()), + ) + require.NoError(t, err) + req.SetBasicAuth(oc.GetID(), oc.Secret) + req.Header.Add("Content-Type", "application/x-www-form-urlencoded") + + res, err := hc.Do(req) + require.NoError(t, err) + + defer res.Body.Close() //nolint:errcheck + + return gjson.ParseBytes(ioutilx.MustReadAll(res.Body)), res +} + +func makeOAuth2DeviceVerificationRequest(t *testing.T, reg *driver.RegistrySQL, hc *http.Client, oc *client.Client, values url.Values) (gjson.Result, *http.Response) { + ctx := context.Background() + if hc == nil { + hc = testhelpers.NewEmptyJarClient(t) + } + + values.Add("client_id", oc.GetID()) + res, err := hc.Get(urlx.CopyWithQuery(urlx.AppendPaths(reg.Config().PublicURL(ctx), oauth2.DeviceVerificationPath), values).String()) + require.NoError(t, err) + defer res.Body.Close() //nolint:errcheck return gjson.ParseBytes(ioutilx.MustReadAll(res.Body)), res } -func createClient(t *testing.T, reg driver.Registry, c *client.Client) *client.Client { - secret := uuid.New().String() +func createClient(t *testing.T, reg *driver.RegistrySQL, c *client.Client) *client.Client { + secret := uuid.Must(uuid.NewV4()).String() c.Secret = secret c.Scope = "openid offline" - c.LegacyClientID = uuid.New().String() + c.ID = uuid.Must(uuid.NewV4()).String() require.NoError(t, reg.ClientManager().CreateClient(context.Background(), c)) c.Secret = secret return c } -func newAuthCookieJar(t *testing.T, reg driver.Registry, u, sessionID string) http.CookieJar { +func newAuthCookieJar(t *testing.T, reg *driver.RegistrySQL, u, sessionID string) http.CookieJar { ctx := context.Background() cj, err := cookiejar.New(&cookiejar.Options{}) require.NoError(t, err) @@ -108,26 +162,26 @@ func newAuthCookieJar(t *testing.T, reg driver.Registry, u, sessionID string) ht return cj } -func genIDToken(t *testing.T, reg driver.Registry, c jwtgo.MapClaims) string { - r, _, err := reg.OpenIDJWTStrategy().Generate(context.Background(), c, jwt.NewHeaders()) +func genIDToken(t *testing.T, reg *driver.RegistrySQL, c jwt.MapClaims) string { + r, _, err := reg.OpenIDJWTSigner().Generate(context.Background(), c, jwt.NewHeaders()) require.NoError(t, err) return r } func checkAndDuplicateAcceptLoginHandler(t *testing.T, apiClient *hydra.APIClient, subject string, cb func(*testing.T, *hydra.OAuth2LoginRequest, error) hydra.AcceptOAuth2LoginRequest) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - res, _, err := apiClient.OAuth2Api.GetOAuth2LoginRequest(context.Background()).LoginChallenge(r.URL.Query().Get("login_challenge")).Execute() + res, _, err := apiClient.OAuth2API.GetOAuth2LoginRequest(context.Background()).LoginChallenge(r.URL.Query().Get("login_challenge")).Execute() payload := cb(t, res, err) payload.Subject = subject - v, _, err := apiClient.OAuth2Api.AcceptOAuth2LoginRequest(context.Background()). + v, _, err := apiClient.OAuth2API.AcceptOAuth2LoginRequest(context.Background()). LoginChallenge(r.URL.Query().Get("login_challenge")). AcceptOAuth2LoginRequest(payload). Execute() require.NoError(t, err) require.NotEmpty(t, v.RedirectTo) - v2, _, err := apiClient.OAuth2Api.AcceptOAuth2LoginRequest(context.Background()). + v2, _, err := apiClient.OAuth2API.AcceptOAuth2LoginRequest(context.Background()). LoginChallenge(r.URL.Query().Get("login_challenge")). AcceptOAuth2LoginRequest(payload). Execute() @@ -139,22 +193,22 @@ func checkAndDuplicateAcceptLoginHandler(t *testing.T, apiClient *hydra.APIClien func checkAndDuplicateAcceptConsentHandler(t *testing.T, apiClient *hydra.APIClient, cb func(*testing.T, *hydra.OAuth2ConsentRequest, error) hydra.AcceptOAuth2ConsentRequest) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - res, _, err := apiClient.OAuth2Api.GetOAuth2ConsentRequest(context.Background()). + res, _, err := apiClient.OAuth2API.GetOAuth2ConsentRequest(context.Background()). ConsentChallenge(r.URL.Query().Get("consent_challenge")). Execute() payload := cb(t, res, err) - v, _, err := apiClient.OAuth2Api.AcceptOAuth2ConsentRequest(context.Background()). + v, _, err := apiClient.OAuth2API.AcceptOAuth2ConsentRequest(context.Background()). ConsentChallenge(r.URL.Query().Get("consent_challenge")). AcceptOAuth2ConsentRequest(payload). Execute() require.NoError(t, err) require.NotEmpty(t, v.RedirectTo) - res2, _, err := apiClient.OAuth2Api.GetOAuth2ConsentRequest(context.Background()).ConsentChallenge(r.URL.Query().Get("consent_challenge")).Execute() + res2, _, err := apiClient.OAuth2API.GetOAuth2ConsentRequest(context.Background()).ConsentChallenge(r.URL.Query().Get("consent_challenge")).Execute() payload2 := cb(t, res2, err) - v2, _, err := apiClient.OAuth2Api.AcceptOAuth2ConsentRequest(context.Background()). + v2, _, err := apiClient.OAuth2API.AcceptOAuth2ConsentRequest(context.Background()). ConsentChallenge(r.URL.Query().Get("consent_challenge")). AcceptOAuth2ConsentRequest(payload2). Execute() diff --git a/consent/strategy_logout_test.go b/consent/strategy_logout_test.go index fb98b4727aa..eccee3aecec 100644 --- a/consent/strategy_logout_test.go +++ b/consent/strategy_logout_test.go @@ -16,28 +16,32 @@ import ( "testing" "time" - "github.com/ory/x/pointerx" + "github.com/ory/hydra/v2/driver" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/tidwall/gjson" - jwtgo "github.com/ory/fosite/token/jwt" - hydra "github.com/ory/hydra-client-go/v2" - "github.com/ory/hydra/client" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal" - "github.com/ory/hydra/internal/testhelpers" - "github.com/ory/x/contextx" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver/config" + jwtgo "github.com/ory/hydra/v2/fosite/token/jwt" + "github.com/ory/hydra/v2/internal/kratos" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/x/configx" "github.com/ory/x/ioutilx" + "github.com/ory/x/pointerx" ) func TestLogoutFlows(t *testing.T) { ctx := context.Background() - reg := internal.NewMockedRegistry(t, &contextx.Default{}) - reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "opaque") - reg.Config().MustSet(ctx, config.KeyConsentRequestMaxAge, time.Hour) + fakeKratos := kratos.NewFake() + reg := testhelpers.NewRegistryMemory(t, + driver.WithConfigOptions(configx.WithValues(map[string]any{ + config.KeyAccessTokenStrategy: "opaque", + config.KeyConsentRequestMaxAge: time.Hour, + })), + driver.WithKratosClient(fakeKratos)) defaultRedirectedMessage := "redirected to default server" postLogoutCallback := func(w http.ResponseWriter, r *http.Request) { @@ -72,7 +76,7 @@ func TestLogoutFlows(t *testing.T) { require.NoError(t, r.ParseForm()) lt := r.PostFormValue("logout_token") assert.NotEmpty(t, lt) - token, err := reg.OpenIDJWTStrategy().Decode(r.Context(), lt) + token, err := reg.OpenIDJWTSigner().Decode(r.Context(), lt) require.NoError(t, err) var b bytes.Buffer @@ -95,10 +99,33 @@ func TestLogoutFlows(t *testing.T) { resp, err = hc.PostForm(publicTS.URL+"/oauth2/sessions/logout", values) } require.NoError(t, err) - defer resp.Body.Close() + defer resp.Body.Close() //nolint:errcheck return string(ioutilx.MustReadAll(resp.Body)), resp } + makeHeadlessLogoutRequest := func(t *testing.T, hc *http.Client, values url.Values) (body string, resp *http.Response) { + var err error + req, err := http.NewRequest(http.MethodDelete, adminTS.URL+"/admin/oauth2/auth/sessions/login?"+values.Encode(), nil) + require.NoError(t, err) + + resp, err = hc.Do(req) + + require.NoError(t, err) + defer resp.Body.Close() //nolint:errcheck + return string(ioutilx.MustReadAll(resp.Body)), resp + } + + logoutViaHeadlessAndExpectNoContent := func(t *testing.T, browser *http.Client, values url.Values) { + _, res := makeHeadlessLogoutRequest(t, browser, values) + assert.EqualValues(t, http.StatusNoContent, res.StatusCode) + } + + logoutViaHeadlessAndExpectError := func(t *testing.T, browser *http.Client, values url.Values, expectedErrorMessage string) { + body, res := makeHeadlessLogoutRequest(t, browser, values) + assert.EqualValues(t, http.StatusBadRequest, res.StatusCode) + assert.Contains(t, body, expectedErrorMessage) + } + logoutAndExpectErrorPage := func(t *testing.T, browser *http.Client, method string, values url.Values, expectedErrorMessage string) { body, res := makeLogoutRequest(t, browser, method, values) assert.EqualValues(t, http.StatusInternalServerError, res.StatusCode) @@ -131,18 +158,20 @@ func TestLogoutFlows(t *testing.T) { return &wg } - checkAndAcceptLogout := func(t *testing.T, wg *sync.WaitGroup, cb func(*testing.T, *hydra.OAuth2LogoutRequest, error)) { + setupCheckAndAcceptLogoutHandler := func(t *testing.T, wg *sync.WaitGroup, cb func(*testing.T, *hydra.OAuth2LogoutRequest, error)) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if wg != nil { defer wg.Done() } - res, _, err := adminApi.OAuth2Api.GetOAuth2LogoutRequest(ctx).LogoutChallenge(r.URL.Query().Get("logout_challenge")).Execute() + res, _, err := adminApi.OAuth2API.GetOAuth2LogoutRequest(ctx).LogoutChallenge(r.URL.Query().Get("logout_challenge")).Execute() if cb != nil { cb(t, res, err) + } else { + require.NoError(t, err) } - v, _, err := adminApi.OAuth2Api.AcceptOAuth2LogoutRequest(ctx).LogoutChallenge(r.URL.Query().Get("logout_challenge")).Execute() + v, _, err := adminApi.OAuth2API.AcceptOAuth2LogoutRequest(ctx).LogoutChallenge(r.URL.Query().Get("logout_challenge")).Execute() require.NoError(t, err) require.NotEmpty(t, v.RedirectTo) http.Redirect(w, r, v.RedirectTo, http.StatusFound) @@ -153,27 +182,36 @@ func TestLogoutFlows(t *testing.T) { reg.Config().MustSet(ctx, config.KeyLogoutURL, server.URL) } - acceptLoginAsAndWatchSid := func(t *testing.T, subject string, sid chan string) { + acceptLoginAsAndWatchSidForConsumers := func(t *testing.T, subject string, sid chan string, remember bool, numSidConsumers int) { testhelpers.NewLoginConsentUI(t, reg.Config(), checkAndAcceptLoginHandler(t, adminApi, subject, func(t *testing.T, res *hydra.OAuth2LoginRequest, err error) hydra.AcceptOAuth2LoginRequest { require.NoError(t, err) - //res.Payload.SessionID - return hydra.AcceptOAuth2LoginRequest{Remember: pointerx.Bool(true)} + // res.Payload.SessionID + return hydra.AcceptOAuth2LoginRequest{ + Remember: pointerx.Ptr(true), + IdentityProviderSessionId: pointerx.Ptr(kratos.FakeSessionID), + } }), checkAndAcceptConsentHandler(t, adminApi, func(t *testing.T, res *hydra.OAuth2ConsentRequest, err error) hydra.AcceptOAuth2ConsentRequest { require.NoError(t, err) if sid != nil { - go func() { + for range numSidConsumers { sid <- *res.LoginSessionId - }() + } } - return hydra.AcceptOAuth2ConsentRequest{Remember: pointerx.Bool(true)} + return hydra.AcceptOAuth2ConsentRequest{Remember: pointerx.Bool(remember)} })) } + acceptLoginAsAndWatchSid := func(t *testing.T, subject string) <-chan string { + sid := make(chan string, 1) + acceptLoginAsAndWatchSidForConsumers(t, subject, sid, true, 1) + return sid + } + acceptLoginAs := func(t *testing.T, subject string) { - acceptLoginAsAndWatchSid(t, subject, nil) + acceptLoginAsAndWatchSidForConsumers(t, subject, nil, true, 0) } subject := "aeneas-rekkas" @@ -207,7 +245,7 @@ func TestLogoutFlows(t *testing.T) { acceptLoginAs(t, subject) wg := newWg(2) - checkAndAcceptLogout(t, wg, func(t *testing.T, res *hydra.OAuth2LogoutRequest, err error) { + setupCheckAndAcceptLogoutHandler(t, wg, func(t *testing.T, res *hydra.OAuth2LogoutRequest, err error) { require.NoError(t, err) assert.EqualValues(t, subject, *res.Subject) assert.NotEmpty(t, subject, res.Sid) @@ -227,7 +265,7 @@ func TestLogoutFlows(t *testing.T) { // run once to invalidate session wg := newWg(1) - checkAndAcceptLogout(t, wg, nil) + setupCheckAndAcceptLogoutHandler(t, wg, nil) logoutAndExpectPostLogoutPage(t, browser, http.MethodGet, url.Values{}, defaultRedirectedMessage) t.Run("method=get", testExpectPostLogoutPage(browser, http.MethodGet, url.Values{}, defaultRedirectedMessage)) @@ -236,12 +274,58 @@ func TestLogoutFlows(t *testing.T) { wg.Wait() // we want to ensure that logout ui was called exactly once }) + t.Run("case=should handle double-submit of the logout challenge gracefully", func(t *testing.T) { + acceptLoginAs(t, subject) + browser := createBrowserWithSession(t, createSampleClient(t)) + + var logoutReq *hydra.OAuth2LogoutRequest + setupCheckAndAcceptLogoutHandler(t, nil, func(t *testing.T, req *hydra.OAuth2LogoutRequest, err error) { + require.NoError(t, err) + logoutReq = req + }) + + // run once to log out + logoutAndExpectPostLogoutPage(t, browser, http.MethodGet, url.Values{}, defaultRedirectedMessage) + + // run again to ensure that the logout challenge is invalid + _, _, err := adminApi.OAuth2API.GetOAuth2LogoutRequest(ctx).LogoutChallenge(logoutReq.GetChallenge()).Execute() + assert.Error(t, err) + + v, _, err := adminApi.OAuth2API.AcceptOAuth2LogoutRequest(ctx).LogoutChallenge(logoutReq.GetChallenge()).Execute() + require.NoError(t, err) + require.NotEmpty(t, v.RedirectTo) + + res, err := browser.Get(v.RedirectTo) + require.NoError(t, err) + assert.Equal(t, 200, res.StatusCode) + }) + + t.Run("case=should handle an invalid logout challenge", func(t *testing.T) { + _, res, err := adminApi.OAuth2API.GetOAuth2LogoutRequest(ctx).LogoutChallenge("some-invalid-challenge").Execute() + assert.Error(t, err) + assert.Equal(t, http.StatusNotFound, res.StatusCode) + + _, res, err = adminApi.OAuth2API.AcceptOAuth2LogoutRequest(ctx).LogoutChallenge("some-invalid-challenge").Execute() + assert.Error(t, err) + assert.Equal(t, http.StatusNotFound, res.StatusCode) + + res, err = adminApi.OAuth2API.RejectOAuth2LogoutRequest(ctx).LogoutChallenge("some-invalid-challenge").Execute() + assert.Error(t, err) + assert.Equal(t, http.StatusNotFound, res.StatusCode) + }) + + t.Run("case=should handle an invalid logout verifier", func(t *testing.T) { + setupCheckAndAcceptLogoutHandler(t, nil, nil) + logoutAndExpectErrorPage(t, http.DefaultClient, http.MethodGet, url.Values{ + "logout_verifier": {"an-invalid-verifier"}, + }, "Description: Unable to locate the requested resource") + }) + t.Run("case=should execute backchannel logout if issued without rp-involvement", func(t *testing.T) { - sid := make(chan string) - acceptLoginAsAndWatchSid(t, subject, sid) + sid := acceptLoginAsAndWatchSid(t, subject) logoutWg := newWg(2) - checkAndAcceptLogout(t, logoutWg, nil) + setupCheckAndAcceptLogoutHandler(t, logoutWg, nil) backChannelWG := newWg(2) c := createClientWithBackchannelLogout(t, backChannelWG, func(t *testing.T, logoutToken gjson.Result) { @@ -263,7 +347,7 @@ func TestLogoutFlows(t *testing.T) { t.Run("case=should fail several flows when id_token_hint is invalid", func(t *testing.T) { t.Run("case=should error when rp-flow without valid id token", func(t *testing.T) { acceptLoginAs(t, "aeneas-rekkas") - checkAndAcceptLogout(t, nil, nil) + setupCheckAndAcceptLogoutHandler(t, nil, nil) expectedMessage := "compact JWS format must have three parts" browser := createBrowserWithSession(t, createSampleClient(t)) @@ -278,14 +362,14 @@ func TestLogoutFlows(t *testing.T) { expectedErrMessage string }{ { - d: "should fail rp-inititated flow because id token hint is missing issuer", + d: "should fail rp-initiated flow because id token hint is missing issuer", claims: jwtgo.MapClaims{ "iat": time.Now().Add(-time.Hour * 2).Unix(), }, expectedErrMessage: "Logout failed because issuer claim value '' from query parameter id_token_hint does not match with issuer value from configuration", }, { - d: "should fail rp-inititated flow because id token hint is using wrong issuer", + d: "should fail rp-initiated flow because id token hint is using wrong issuer", claims: jwtgo.MapClaims{ "iss": "some-issuer", "iat": time.Now().Add(-time.Hour * 2).Unix(), @@ -293,7 +377,7 @@ func TestLogoutFlows(t *testing.T) { expectedErrMessage: "Logout failed because issuer claim value 'some-issuer' from query parameter id_token_hint does not match with issuer value from configuration", }, { - d: "should fail rp-inititated flow because iat is in the future", + d: "should fail rp-initiated flow because iat is in the future", claims: jwtgo.MapClaims{ "iss": reg.Config().IssuerURL(ctx).String(), "iat": time.Now().Add(time.Hour * 2).Unix(), @@ -304,12 +388,11 @@ func TestLogoutFlows(t *testing.T) { t.Run("case="+tc.d, func(t *testing.T) { c := createSampleClient(t) - sid := make(chan string) - acceptLoginAsAndWatchSid(t, subject, sid) + sid := acceptLoginAsAndWatchSid(t, subject) browser := createBrowserWithSession(t, c) wg := newWg(1) - checkAndAcceptLogout(t, wg, nil) + setupCheckAndAcceptLogoutHandler(t, wg, nil) tc.claims["sub"] = subject tc.claims["sid"] = <-sid tc.claims["aud"] = c.GetID() @@ -351,10 +434,9 @@ func TestLogoutFlows(t *testing.T) { c := createSampleClient(t) run := func(method string, claims jwtgo.MapClaims) func(t *testing.T) { return func(t *testing.T) { - sid := make(chan string) - acceptLoginAsAndWatchSid(t, subject, sid) + sid := acceptLoginAsAndWatchSid(t, subject) - checkAndAcceptLogout(t, nil, nil) + setupCheckAndAcceptLogoutHandler(t, nil, nil) browser := createBrowserWithSession(t, c) sendClaims := jwtgo.MapClaims{ @@ -383,25 +465,25 @@ func TestLogoutFlows(t *testing.T) { } t.Run("case=should pass even if expiry is in the past", func(t *testing.T) { - // formerly: should pass rp-inititated even when expiry is in the past + // formerly: should pass rp-initiated even when expiry is in the past claims := jwtgo.MapClaims{"exp": time.Now().Add(-time.Hour).Unix()} t.Run("method=GET", run("GET", claims)) t.Run("method=POST", run("POST", claims)) }) t.Run("case=should pass even if audience is an array not a string", func(t *testing.T) { - // formerly: should pass rp-inititated flow" + // formerly: should pass rp-initiated flow" claims := jwtgo.MapClaims{"aud": []string{c.GetID()}} t.Run("method=GET", run("GET", claims)) t.Run("method=POST", run("POST", claims)) }) }) - t.Run("case=should pass rp-inititated flow without any action because SID is unknown", func(t *testing.T) { + t.Run("case=should pass rp-initiated flow without any action because SID is unknown", func(t *testing.T) { c := createSampleClient(t) - acceptLoginAsAndWatchSid(t, subject, nil) + acceptLoginAs(t, subject) - checkAndAcceptLogout(t, nil, func(t *testing.T, res *hydra.OAuth2LogoutRequest, err error) { + setupCheckAndAcceptLogoutHandler(t, nil, func(t *testing.T, res *hydra.OAuth2LogoutRequest, err error) { t.Fatalf("Logout should not have been called") }) browser := createBrowserWithSession(t, c) @@ -422,10 +504,9 @@ func TestLogoutFlows(t *testing.T) { t.Run("case=should not append a state param if no state was passed to logout server", func(t *testing.T) { c := createSampleClient(t) - sid := make(chan string) - acceptLoginAsAndWatchSid(t, subject, sid) + sid := acceptLoginAsAndWatchSid(t, subject) - checkAndAcceptLogout(t, nil, nil) + setupCheckAndAcceptLogoutHandler(t, nil, nil) browser := createBrowserWithSession(t, c) body, res := makeLogoutRequest(t, browser, "GET", url.Values{ @@ -447,12 +528,13 @@ func TestLogoutFlows(t *testing.T) { }) t.Run("case=should return to default post logout because session was revoked in browser context", func(t *testing.T) { + fakeKratos.Reset() c := createSampleClient(t) - sid := make(chan string) - acceptLoginAsAndWatchSid(t, subject, sid) + sid := acceptLoginAsAndWatchSid(t, subject) - wg := newWg(2) - checkAndAcceptLogout(t, wg, nil) + wg := newWg(3) + fakeKratos.DisableSessionCB = wg.Done + setupCheckAndAcceptLogoutHandler(t, wg, nil) browser := createBrowserWithSession(t, c) // Use another browser (without session cookie) to make the logout request: @@ -476,11 +558,11 @@ func TestLogoutFlows(t *testing.T) { defer wg.Done() require.NoError(t, err) assert.False(t, res.Skip) - return hydra.AcceptOAuth2LoginRequest{Remember: pointerx.Bool(true)} + return hydra.AcceptOAuth2LoginRequest{Remember: pointerx.Ptr(true)} }), checkAndAcceptConsentHandler(t, adminApi, func(t *testing.T, res *hydra.OAuth2ConsentRequest, err error) hydra.AcceptOAuth2ConsentRequest { require.NoError(t, err) - return hydra.AcceptOAuth2ConsentRequest{Remember: pointerx.Bool(true)} + return hydra.AcceptOAuth2ConsentRequest{Remember: pointerx.Ptr(true)} })) // Make an oauth 2 request to trigger the login check. @@ -489,5 +571,58 @@ func TestLogoutFlows(t *testing.T) { assert.NotEmpty(t, res.Request.URL.Query().Get("code")) wg.Wait() + + assert.True(t, fakeKratos.DisableSessionWasCalled) + assert.Equal(t, fakeKratos.LastDisabledSession, kratos.FakeSessionID) + }) + + t.Run("case=should execute backchannel logout in headless flow with sid", func(t *testing.T) { + fakeKratos.Reset() + numSidConsumers := 2 + sid := make(chan string, numSidConsumers) + acceptLoginAsAndWatchSidForConsumers(t, subject, sid, true, numSidConsumers) + + backChannelWG := newWg(2) + fakeKratos.DisableSessionCB = backChannelWG.Done + + c := createClientWithBackchannelLogout(t, backChannelWG, func(t *testing.T, logoutToken gjson.Result) { + assert.EqualValues(t, <-sid, logoutToken.Get("sid").String(), logoutToken.Raw) + assert.Empty(t, logoutToken.Get("sub").String(), logoutToken.Raw) // The sub claim should be empty because it doesn't work with forced obfuscation and thus we can't easily recover it. + assert.Empty(t, logoutToken.Get("nonce").String(), logoutToken.Raw) + }) + + logoutViaHeadlessAndExpectNoContent(t, createBrowserWithSession(t, c), url.Values{"sid": {<-sid}}) + + backChannelWG.Wait() // we want to ensure that all back channels have been called! + assert.True(t, fakeKratos.DisableSessionWasCalled) + assert.Equal(t, fakeKratos.LastDisabledSession, kratos.FakeSessionID) + }) + + t.Run("case=should logout in headless flow with non-existing sid", func(t *testing.T) { + fakeKratos.Reset() + logoutViaHeadlessAndExpectNoContent(t, browserWithoutSession, url.Values{"sid": {"non-existing-sid"}}) + assert.False(t, fakeKratos.DisableSessionWasCalled) + }) + + t.Run("case=should logout in headless flow with session that has remember=false", func(t *testing.T) { + fakeKratos.Reset() + sid := make(chan string, 1) + acceptLoginAsAndWatchSidForConsumers(t, subject, sid, false, 1) + + wg := newWg(1) + fakeKratos.DisableSessionCB = wg.Done + + c := createSampleClient(t) + + logoutViaHeadlessAndExpectNoContent(t, createBrowserWithSession(t, c), url.Values{"sid": {<-sid}}) + wg.Wait() + assert.True(t, fakeKratos.DisableSessionWasCalled) + assert.Equal(t, fakeKratos.LastDisabledSession, kratos.FakeSessionID) + }) + + t.Run("case=should fail headless logout because neither sid nor subject were provided", func(t *testing.T) { + fakeKratos.Reset() + logoutViaHeadlessAndExpectError(t, browserWithoutSession, url.Values{}, `Either 'subject' or 'sid' query parameters need to be defined.`) + assert.False(t, fakeKratos.DisableSessionWasCalled) }) } diff --git a/consent/strategy_oauth_test.go b/consent/strategy_oauth_test.go index e032e2e451c..b21e0aeebe2 100644 --- a/consent/strategy_oauth_test.go +++ b/consent/strategy_oauth_test.go @@ -10,46 +10,42 @@ import ( "encoding/json" "fmt" "net/http" + "net/http/cookiejar" "net/url" + "regexp" "testing" "time" - "github.com/ory/x/ioutilx" - - "github.com/twmb/murmur3" - - "golang.org/x/oauth2" - - "github.com/ory/x/pointerx" - - "github.com/tidwall/gjson" - "github.com/pborman/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/tidwall/gjson" + "golang.org/x/exp/slices" + "golang.org/x/oauth2" - "github.com/ory/hydra/internal/testhelpers" - "github.com/ory/x/contextx" - - "github.com/ory/fosite" + hydra "github.com/ory/hydra-client-go/v2" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/x/configx" + "github.com/ory/x/pointerx" "github.com/ory/x/urlx" "github.com/ory/x/uuidx" - - hydra "github.com/ory/hydra-client-go/v2" - "github.com/ory/hydra/client" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal" ) func TestStrategyLoginConsentNext(t *testing.T) { + t.Parallel() + ctx := context.Background() - reg := internal.NewMockedRegistry(t, &contextx.Default{}) - reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "opaque") - reg.Config().MustSet(ctx, config.KeyConsentRequestMaxAge, time.Hour) - reg.Config().MustSet(ctx, config.KeyConsentRequestMaxAge, time.Hour) - reg.Config().MustSet(ctx, config.KeyScopeStrategy, "exact") - reg.Config().MustSet(ctx, config.KeySubjectTypesSupported, []string{"pairwise", "public"}) - reg.Config().MustSet(ctx, config.KeySubjectIdentifierAlgorithmSalt, "76d5d2bf-747f-4592-9fbd-d2b895a54b3a") + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions(configx.WithValues(map[string]any{ + config.KeyAccessTokenStrategy: "opaque", + config.KeyConsentRequestMaxAge: time.Hour, + config.KeyScopeStrategy: "exact", + config.KeySubjectTypesSupported: []string{"pairwise", "public"}, + config.KeySubjectIdentifierAlgorithmSalt: "76d5d2bf-747f-4592-9fbd-d2b895a54b3a", + }))) publicTS, adminTS := testhelpers.NewOAuth2Server(ctx, t, reg) adminClient := hydra.NewAPIClient(hydra.NewConfiguration()) @@ -113,8 +109,12 @@ func TestStrategyLoginConsentNext(t *testing.T) { t.Run("case=should fail because a login verifier was given that doesn't exist in the store", func(t *testing.T) { testhelpers.NewLoginConsentUI(t, reg.Config(), testhelpers.HTTPServerNoExpectedCallHandler(t), testhelpers.HTTPServerNoExpectedCallHandler(t)) c := createDefaultClient(t) + hc := testhelpers.NewEmptyJarClient(t) - makeRequestAndExpectError(t, nil, c, url.Values{"login_verifier": {"does-not-exist"}}, "The login verifier has already been used, has not been granted, or is invalid.") + makeRequestAndExpectError( + t, hc, c, url.Values{"login_verifier": {"does-not-exist"}}, + "The resource owner or authorization server denied the request. The login verifier has already been used, has not been granted, or is invalid.", + ) }) t.Run("case=should fail because a non-existing consent verifier was given", func(t *testing.T) { @@ -123,7 +123,12 @@ func TestStrategyLoginConsentNext(t *testing.T) { // - This should fail because a consent verifier was given but no login verifier testhelpers.NewLoginConsentUI(t, reg.Config(), testhelpers.HTTPServerNoExpectedCallHandler(t), testhelpers.HTTPServerNoExpectedCallHandler(t)) c := createDefaultClient(t) - makeRequestAndExpectError(t, nil, c, url.Values{"consent_verifier": {"does-not-exist"}}, "The consent verifier has already been used, has not been granted, or is invalid.") + hc := testhelpers.NewEmptyJarClient(t) + + makeRequestAndExpectError( + t, hc, c, url.Values{"consent_verifier": {"does-not-exist"}}, + "The consent verifier has already been used, has not been granted, or is invalid.", + ) }) t.Run("case=should fail because the request was redirected but the login endpoint doesn't do anything (like redirecting back)", func(t *testing.T) { @@ -147,7 +152,7 @@ func TestStrategyLoginConsentNext(t *testing.T) { t.Run("case=should fail because the request was redirected but the login endpoint rejected the request", func(t *testing.T) { testhelpers.NewLoginConsentUI(t, reg.Config(), func(w http.ResponseWriter, r *http.Request) { - vr, _, err := adminClient.OAuth2Api.RejectOAuth2LoginRequest(context.Background()). + vr, _, err := adminClient.OAuth2API.RejectOAuth2LoginRequest(context.Background()). LoginChallenge(r.URL.Query().Get("login_challenge")). RejectOAuth2Request(hydra.RejectOAuth2Request{ Error: pointerx.String(fosite.ErrInteractionRequired.ErrorField), @@ -169,6 +174,7 @@ func TestStrategyLoginConsentNext(t *testing.T) { testhelpers.HTTPServerNoExpectedCallHandler(t)) hc := new(http.Client) + hc.Jar = DropCookieJar(regexp.MustCompile("ory_hydra_.*_csrf_.*")) makeRequestAndExpectError(t, hc, c, url.Values{}, "No CSRF value available in the session cookie.") }) @@ -177,7 +183,7 @@ func TestStrategyLoginConsentNext(t *testing.T) { testhelpers.NewLoginConsentUI(t, reg.Config(), acceptLoginHandler(t, "aeneas-rekkas", nil), func(w http.ResponseWriter, r *http.Request) { - vr, _, err := adminClient.OAuth2Api.RejectOAuth2ConsentRequest(context.Background()). + vr, _, err := adminClient.OAuth2API.RejectOAuth2ConsentRequest(context.Background()). ConsentChallenge(r.URL.Query().Get("consent_challenge")). RejectOAuth2Request(hydra.RejectOAuth2Request{ Error: pointerx.String(fosite.ErrInteractionRequired.ErrorField), @@ -191,6 +197,78 @@ func TestStrategyLoginConsentNext(t *testing.T) { makeRequestAndExpectError(t, nil, c, url.Values{}, "expect-reject-consent") }) + t.Run("suite=double-submit", func(t *testing.T) { + ctx := context.Background() + c := createDefaultClient(t) + hc := testhelpers.NewEmptyJarClient(t) + var loginChallenge, consentChallenge string + + testhelpers.NewLoginConsentUI(t, reg.Config(), + func(w http.ResponseWriter, r *http.Request) { + loginChallenge = r.URL.Query().Get("login_challenge") + res, _, err := adminClient.OAuth2API.GetOAuth2LoginRequest(ctx). + LoginChallenge(loginChallenge). + Execute() + require.NoError(t, err) + require.Equal(t, loginChallenge, res.Challenge) + + v, _, err := adminClient.OAuth2API.AcceptOAuth2LoginRequest(ctx). + LoginChallenge(loginChallenge). + AcceptOAuth2LoginRequest(hydra.AcceptOAuth2LoginRequest{Subject: "aeneas-rekkas"}). + Execute() + require.NoError(t, err) + require.NotEmpty(t, v.RedirectTo) + http.Redirect(w, r, v.RedirectTo, http.StatusFound) + }, + func(w http.ResponseWriter, r *http.Request) { + consentChallenge = r.URL.Query().Get("consent_challenge") + res, _, err := adminClient.OAuth2API.GetOAuth2ConsentRequest(ctx). + ConsentChallenge(consentChallenge). + Execute() + require.NoError(t, err) + require.Equal(t, consentChallenge, res.Challenge) + + v, _, err := adminClient.OAuth2API.AcceptOAuth2ConsentRequest(ctx). + ConsentChallenge(consentChallenge). + AcceptOAuth2ConsentRequest(hydra.AcceptOAuth2ConsentRequest{}). + Execute() + require.NoError(t, err) + require.NotEmpty(t, v.RedirectTo) + http.Redirect(w, r, v.RedirectTo, http.StatusFound) + }) + + makeRequestAndExpectCode(t, hc, c, url.Values{}) + + t.Run("case=double-submit login verifier", func(t *testing.T) { + v, _, err := adminClient.OAuth2API.AcceptOAuth2LoginRequest(ctx). + LoginChallenge(loginChallenge). + AcceptOAuth2LoginRequest(hydra.AcceptOAuth2LoginRequest{Subject: "aeneas-rekkas"}). + Execute() + require.NoError(t, err) + res, err := hc.Get(v.RedirectTo) + require.NoError(t, err) + q := res.Request.URL.Query() + assert.Equal(t, + "The resource owner or authorization server denied the request. The consent verifier has already been used.", + q.Get("error_description"), q) + }) + + t.Run("case=double-submit consent verifier", func(t *testing.T) { + v, _, err := adminClient.OAuth2API.AcceptOAuth2ConsentRequest(ctx). + ConsentChallenge(consentChallenge). + AcceptOAuth2ConsentRequest(hydra.AcceptOAuth2ConsentRequest{}). + Execute() + require.NoError(t, err) + res, err := hc.Get(v.RedirectTo) + require.NoError(t, err) + q := res.Request.URL.Query() + assert.Equal(t, + "The resource owner or authorization server denied the request. The consent verifier has already been used.", + q.Get("error_description"), q) + }) + + }) + t.Run("case=should pass and set acr values properly", func(t *testing.T) { c := createDefaultClient(t) testhelpers.NewLoginConsentUI(t, reg.Config(), @@ -210,6 +288,7 @@ func TestStrategyLoginConsentNext(t *testing.T) { subject := "aeneas-rekkas" c := createDefaultClient(t) + now := 1723546027 // Unix timestamps must round-trip through Hydra without converting to floats or similar testhelpers.NewLoginConsentUI(t, reg.Config(), acceptLoginHandler(t, subject, &hydra.AcceptOAuth2LoginRequest{ Remember: pointerx.Bool(true), @@ -218,8 +297,14 @@ func TestStrategyLoginConsentNext(t *testing.T) { Remember: pointerx.Bool(true), GrantScope: []string{"openid"}, Session: &hydra.AcceptOAuth2ConsentRequestSession{ - AccessToken: map[string]interface{}{"foo": "bar"}, - IdToken: map[string]interface{}{"bar": "baz"}, + AccessToken: map[string]interface{}{ + "foo": "bar", + "ts1": now, + }, + IdToken: map[string]interface{}{ + "bar": "baz", + "ts2": now, + }, }, })) @@ -234,13 +319,15 @@ func TestStrategyLoginConsentNext(t *testing.T) { token, err := conf.Exchange(context.Background(), code) require.NoError(t, err) - claims := testhelpers.IntrospectToken(t, conf, token.AccessToken, adminTS) - assert.Equal(t, "bar", claims.Get("ext.foo").String(), "%s", claims.Raw) + claims := testhelpers.IntrospectToken(t, token.AccessToken, adminTS) + assert.Equalf(t, `"bar"`, claims.Get("ext.foo").Raw, "%s", claims.Raw) // Raw rather than .Int() or .Value() to verify the exact JSON payload + assert.Equalf(t, "1723546027", claims.Get("ext.ts1").Raw, "%s", claims.Raw) // must round-trip as integer idClaims := testhelpers.DecodeIDToken(t, token) - assert.Equal(t, "baz", idClaims.Get("bar").String(), "%s", idClaims.Raw) + assert.Equalf(t, `"baz"`, idClaims.Get("bar").Raw, "%s", idClaims.Raw) // Raw rather than .Int() or .Value() to verify the exact JSON payload + assert.Equalf(t, "1723546027", idClaims.Get("ts2").Raw, "%s", idClaims.Raw) // must round-trip as integer sid = idClaims.Get("sid").String() - assert.NotNil(t, sid) + assert.NotEmpty(t, sid) } t.Run("perform first flow", run) @@ -255,21 +342,28 @@ func TestStrategyLoginConsentNext(t *testing.T) { assert.Empty(t, pointerx.StringR(res.Client.ClientSecret)) return hydra.AcceptOAuth2LoginRequest{ Subject: subject, - Context: map[string]interface{}{"foo": "bar"}, + Context: map[string]interface{}{"xyz": "abc"}, } }), - checkAndAcceptConsentHandler(t, adminClient, func(t *testing.T, res *hydra.OAuth2ConsentRequest, err error) hydra.AcceptOAuth2ConsentRequest { + checkAndAcceptConsentHandler(t, adminClient, func(t *testing.T, req *hydra.OAuth2ConsentRequest, err error) hydra.AcceptOAuth2ConsentRequest { require.NoError(t, err) - assert.True(t, *res.Skip) - assert.Equal(t, sid, *res.LoginSessionId) - assert.Equal(t, subject, *res.Subject) - assert.Empty(t, pointerx.StringR(res.Client.ClientSecret)) + assert.True(t, *req.Skip) + assert.Equal(t, sid, *req.LoginSessionId) + assert.Equal(t, subject, *req.Subject) + assert.Empty(t, pointerx.StringR(req.Client.ClientSecret)) + assert.Equal(t, map[string]interface{}{"xyz": "abc"}, req.Context) return hydra.AcceptOAuth2ConsentRequest{ Remember: pointerx.Bool(true), GrantScope: []string{"openid"}, Session: &hydra.AcceptOAuth2ConsentRequestSession{ - AccessToken: map[string]interface{}{"foo": "bar"}, - IdToken: map[string]interface{}{"bar": "baz"}, + AccessToken: map[string]interface{}{ + "foo": "bar", + "ts1": now, + }, + IdToken: map[string]interface{}{ + "bar": "baz", + "ts2": now, + }, }, } })) @@ -331,38 +425,202 @@ func TestStrategyLoginConsentNext(t *testing.T) { assert.EqualValues(t, http.StatusFound, oauthRes.StatusCode) loginChallengeRedirect, err := oauthRes.Location() require.NoError(t, err) - defer oauthRes.Body.Close() - setCookieHeader := oauthRes.Header.Get("set-cookie") - assert.NotNil(t, setCookieHeader) + defer oauthRes.Body.Close() //nolint:errcheck - t.Run("login cookie client specific suffix is set", func(t *testing.T) { - assert.Regexp(t, fmt.Sprintf("ory_hydra_login_csrf_dev_%d=.*", murmur3.Sum32(c.ID.Bytes())), setCookieHeader) - }) - - t.Run("login cookie max age is set", func(t *testing.T) { - assert.Regexp(t, fmt.Sprintf("ory_hydra_login_csrf_dev_%d=.*Max-Age=%.0f;.*", murmur3.Sum32(c.ID.Bytes()), consentRequestMaxAge), setCookieHeader) + foundLoginCookie := slices.ContainsFunc(oauthRes.Header.Values("set-cookie"), func(sc string) bool { + ok, err := regexp.MatchString(fmt.Sprintf("ory_hydra_login_csrf_dev_%s=.*Max-Age=%.0f;.*", c.CookieSuffix(), consentRequestMaxAge), sc) + require.NoError(t, err) + return ok }) + require.True(t, foundLoginCookie, "client-specific login cookie with max age set") loginChallengeRes, err := hc.Get(loginChallengeRedirect.String()) require.NoError(t, err) - defer loginChallengeRes.Body.Close() + defer loginChallengeRes.Body.Close() //nolint:errcheck loginVerifierRedirect, err := loginChallengeRes.Location() + require.NoError(t, err) loginVerifierRes, err := hc.Get(loginVerifierRedirect.String()) require.NoError(t, err) - defer loginVerifierRes.Body.Close() - setCookieHeader = loginVerifierRes.Header.Values("set-cookie")[1] - assert.NotNil(t, setCookieHeader) + defer loginVerifierRes.Body.Close() //nolint:errcheck + + foundConsentCookie := slices.ContainsFunc(loginVerifierRes.Header.Values("set-cookie"), func(sc string) bool { + ok, err := regexp.MatchString(fmt.Sprintf("ory_hydra_consent_csrf_dev_%s=.*Max-Age=%.0f;.*", c.CookieSuffix(), consentRequestMaxAge), sc) + require.NoError(t, err) + return ok + }) + require.True(t, foundConsentCookie, "client-specific consent cookie with max age set") + }) + + t.Run("case=should pass if both login and consent are granted and check remember flows with refresh session cookie", func(t *testing.T) { + + subject := "subject-1" + c := createDefaultClient(t) + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, subject, &hydra.AcceptOAuth2LoginRequest{ + Remember: pointerx.Bool(true), + }), + acceptConsentHandler(t, &hydra.AcceptOAuth2ConsentRequest{ + Remember: pointerx.Bool(true), + GrantScope: []string{"openid"}, + Session: &hydra.AcceptOAuth2ConsentRequestSession{ + AccessToken: map[string]interface{}{"foo": "bar"}, + IdToken: map[string]interface{}{"bar": "baz"}, + }, + })) + + hc := testhelpers.NewEmptyJarClient(t) + + followUpHandler := func(extendSessionLifespan bool) { + rememberFor := int64(12345) + testhelpers.NewLoginConsentUI(t, reg.Config(), + checkAndAcceptLoginHandler(t, adminClient, subject, func(t *testing.T, res *hydra.OAuth2LoginRequest, err error) hydra.AcceptOAuth2LoginRequest { + require.NoError(t, err) + assert.True(t, res.Skip) + assert.Equal(t, subject, res.Subject) + assert.Empty(t, res.Client.ClientSecret) + return hydra.AcceptOAuth2LoginRequest{ + Subject: subject, + Remember: pointerx.Bool(true), + RememberFor: pointerx.Int64(rememberFor), + ExtendSessionLifespan: pointerx.Bool(extendSessionLifespan), + Context: map[string]interface{}{"foo": "bar"}, + } + }), + checkAndAcceptConsentHandler(t, adminClient, func(t *testing.T, res *hydra.OAuth2ConsentRequest, err error) hydra.AcceptOAuth2ConsentRequest { + require.NoError(t, err) + assert.True(t, *res.Skip) + assert.Equal(t, subject, res.Subject) + assert.Empty(t, res.Client.ClientSecret) + return hydra.AcceptOAuth2ConsentRequest{ + Remember: pointerx.Bool(true), + GrantScope: []string{"openid"}, + Session: &hydra.AcceptOAuth2ConsentRequestSession{ + AccessToken: map[string]interface{}{"foo": "bar"}, + IdToken: map[string]interface{}{"bar": "baz"}, + }, + } + })) + + hc := &http.Client{ + Jar: hc.Jar, + Transport: &http.Transport{}, + CheckRedirect: func(req *http.Request, via []*http.Request) error { + return http.ErrUseLastResponse + }, + } - t.Run("consent cookie client specific suffix set", func(t *testing.T) { - assert.Regexp(t, fmt.Sprintf("ory_hydra_consent_csrf_dev_%d=.*", murmur3.Sum32(c.ID.Bytes())), setCookieHeader) + _, oauthRes := makeOAuth2Request(t, reg, hc, c, url.Values{"redirect_uri": {c.RedirectURIs[0]}, "scope": {"openid"}}) + assert.EqualValues(t, http.StatusFound, oauthRes.StatusCode) + loginChallengeRedirect, err := oauthRes.Location() + require.NoError(t, err) + defer oauthRes.Body.Close() //nolint:errcheck + + loginChallengeRes, err := hc.Get(loginChallengeRedirect.String()) + require.NoError(t, err) + defer loginChallengeRes.Body.Close() //nolint:errcheck + loginVerifierRedirect, err := loginChallengeRes.Location() + require.NoError(t, err) + + loginVerifierRes, err := hc.Get(loginVerifierRedirect.String()) + require.NoError(t, err) + defer loginVerifierRes.Body.Close() //nolint:errcheck + + setCookieHeader := loginVerifierRes.Header.Get("set-cookie") + assert.NotNil(t, setCookieHeader) + if extendSessionLifespan { + assert.Regexp(t, fmt.Sprintf("ory_hydra_session_dev=.*; Path=/; Expires=.*Max-Age=%d; HttpOnly; SameSite=Lax", rememberFor), setCookieHeader) + } else { + assert.NotContains(t, setCookieHeader, "ory_hydra_session_dev") + } + } + + t.Run("perform first flow", func(t *testing.T) { + makeRequestAndExpectCode(t, hc, c, url.Values{"redirect_uri": {c.RedirectURIs[0]}, + "scope": {"openid"}}) + }) + + t.Run("perform follow up flow with extend_session_lifespan=false", func(t *testing.T) { + followUpHandler(false) }) - t.Run("consent cookie max age is set", func(t *testing.T) { - assert.Regexp(t, fmt.Sprintf("ory_hydra_consent_csrf_dev_%d=.*Max-Age=%.0f;.*", murmur3.Sum32(c.ID.Bytes()), consentRequestMaxAge), setCookieHeader) + t.Run("perform follow up flow with extend_session_lifespan=true", func(t *testing.T) { + followUpHandler(true) }) }) + t.Run("case=should set session cookie with correct configuration", func(t *testing.T) { + cookiePath := "/foo" + reg.Config().MustSet(ctx, config.KeyCookieSessionPath, cookiePath) + defer reg.Config().MustSet(ctx, config.KeyCookieSessionPath, "/") + + subject := "subject-1" + c := createDefaultClient(t) + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, subject, &hydra.AcceptOAuth2LoginRequest{ + Remember: pointerx.Bool(true), + }), + acceptConsentHandler(t, &hydra.AcceptOAuth2ConsentRequest{ + Remember: pointerx.Bool(true), + GrantScope: []string{"openid"}, + Session: &hydra.AcceptOAuth2ConsentRequestSession{ + AccessToken: map[string]interface{}{"foo": "bar"}, + IdToken: map[string]interface{}{"bar": "baz"}, + }, + })) + testhelpers.NewLoginConsentUI(t, reg.Config(), + checkAndAcceptLoginHandler(t, adminClient, subject, func(t *testing.T, res *hydra.OAuth2LoginRequest, err error) hydra.AcceptOAuth2LoginRequest { + require.NoError(t, err) + assert.Empty(t, res.Subject) + assert.Empty(t, pointerx.StringR(res.Client.ClientSecret)) + return hydra.AcceptOAuth2LoginRequest{ + Subject: subject, + Context: map[string]interface{}{"foo": "bar"}, + } + }), + checkAndAcceptConsentHandler(t, adminClient, func(t *testing.T, res *hydra.OAuth2ConsentRequest, err error) hydra.AcceptOAuth2ConsentRequest { + require.NoError(t, err) + assert.Equal(t, subject, *res.Subject) + assert.Empty(t, pointerx.StringR(res.Client.ClientSecret)) + return hydra.AcceptOAuth2ConsentRequest{ + Remember: pointerx.Bool(true), + GrantScope: []string{"openid"}, + Session: &hydra.AcceptOAuth2ConsentRequestSession{ + AccessToken: map[string]interface{}{"foo": "bar"}, + IdToken: map[string]interface{}{"bar": "baz"}, + }, + } + })) + hc := &http.Client{ + Jar: testhelpers.NewEmptyCookieJar(t), + Transport: &http.Transport{}, + CheckRedirect: func(req *http.Request, via []*http.Request) error { + return http.ErrUseLastResponse + }, + } + + _, oauthRes := makeOAuth2Request(t, reg, hc, c, url.Values{"redirect_uri": {c.RedirectURIs[0]}, "scope": {"openid"}}) + assert.EqualValues(t, http.StatusFound, oauthRes.StatusCode) + loginChallengeRedirect, err := oauthRes.Location() + require.NoError(t, err) + defer oauthRes.Body.Close() //nolint:errcheck + + loginChallengeRes, err := hc.Get(loginChallengeRedirect.String()) + require.NoError(t, err) + defer loginChallengeRes.Body.Close() //nolint:errcheck + + loginVerifierRedirect, err := loginChallengeRes.Location() + require.NoError(t, err) + loginVerifierRes, err := hc.Get(loginVerifierRedirect.String()) + require.NoError(t, err) + defer loginVerifierRes.Body.Close() //nolint:errcheck + + setCookieHeader := loginVerifierRes.Header.Get("set-cookie") + assert.NotNil(t, setCookieHeader) + + assert.Regexp(t, fmt.Sprintf("ory_hydra_session_dev=.*; Path=%s; Expires=.*; Max-Age=0; HttpOnly; SameSite=Lax", cookiePath), setCookieHeader) + }) + t.Run("case=should pass and check if login context is set properly", func(t *testing.T) { // This should pass because login was remembered and session id should be set and session context should also work subject := "aeneas-rekkas" @@ -396,7 +654,7 @@ func TestStrategyLoginConsentNext(t *testing.T) { // - This should fail because prompt=none, client is public, and redirection scheme is not HTTPS but a custom scheme // - This should pass because prompt=none, client is public, redirection scheme is HTTP and host is localhost - c := &client.Client{LegacyClientID: uuidx.NewV4().String(), TokenEndpointAuthMethod: "none", + c := &client.Client{ID: uuidx.NewV4().String(), TokenEndpointAuthMethod: "none", RedirectURIs: []string{ testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler), "custom://redirection-scheme/path", @@ -411,9 +669,8 @@ func TestStrategyLoginConsentNext(t *testing.T) { hc := testhelpers.NewEmptyJarClient(t) - t.Run("set up initial session", func(t *testing.T) { - makeRequestAndExpectCode(t, hc, c, url.Values{"redirect_uri": {c.RedirectURIs[0]}}) - }) + // set up initial session + makeRequestAndExpectCode(t, hc, c, url.Values{"redirect_uri": {c.RedirectURIs[0]}}) // By not waiting here we ensure that there are no race conditions when it comes to authenticated_at and // requested_at time comparisons: @@ -457,7 +714,7 @@ func TestStrategyLoginConsentNext(t *testing.T) { }) }) - t.Run("case=should fail at login screen because subject in login challenge does not match subject from previous session", func(t *testing.T) { + t.Run("case=should retry the authorization with prompt=login if subject in login challenge does not match subject from previous session", func(t *testing.T) { // Previously: This should fail at login screen because subject from accept does not match subject from session c := createDefaultClient(t) testhelpers.NewLoginConsentUI(t, reg.Config(), @@ -470,13 +727,15 @@ func TestStrategyLoginConsentNext(t *testing.T) { testhelpers.NewLoginConsentUI(t, reg.Config(), func(w http.ResponseWriter, r *http.Request) { - _, res, err := adminClient.OAuth2Api.AcceptOAuth2LoginRequest(context.Background()). + res, _, err := adminClient.OAuth2API.AcceptOAuth2LoginRequest(context.Background()). LoginChallenge(r.URL.Query().Get("login_challenge")). AcceptOAuth2LoginRequest(hydra.AcceptOAuth2LoginRequest{ Subject: "not-aeneas-rekkas", }).Execute() - require.Error(t, err) - assert.Contains(t, string(ioutilx.MustReadAll(res.Body)), "Field 'subject' does not match subject from previous authentication") + require.NoError(t, err) + redirectURL, err := url.Parse(res.RedirectTo) + require.NoError(t, err) + assert.Equal(t, "login", redirectURL.Query().Get("prompt")) w.WriteHeader(http.StatusBadRequest) }, testhelpers.HTTPServerNoExpectedCallHandler(t)) @@ -486,6 +745,21 @@ func TestStrategyLoginConsentNext(t *testing.T) { assert.Empty(t, res.Request.URL.Query().Get("code")) }) + t.Run("case=should forward the identity schema in the login URL", func(t *testing.T) { + c := createDefaultClient(t) + hc := testhelpers.NewEmptyJarClient(t) + + testhelpers.NewLoginConsentUI(t, reg.Config(), + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "custom-id-schema", r.URL.Query().Get("identity_schema")) + w.WriteHeader(http.StatusBadRequest) // We do not want to continue the flow here, we just want to check the query parameter + }, + testhelpers.HTTPServerNoExpectedCallHandler(t)) + + _, res := makeOAuth2Request(t, reg, hc, c, url.Values{"identity_schema": {"custom-id-schema"}}) + assert.EqualValues(t, http.StatusBadRequest, res.StatusCode) + }) + t.Run("case=should require re-authentication when parameters mandate it", func(t *testing.T) { // Covers: // - should pass and require re-authentication although session is set (because prompt=login) @@ -563,7 +837,7 @@ func TestStrategyLoginConsentNext(t *testing.T) { makeRequestAndExpectCode(t, hc, c, url.Values{}) // Make request with additional scope and prompt none, which fails - makeRequestAndExpectError(t, hc, c, url.Values{"prompt": {"none"}, "scope": {"openid"}}, + makeRequestAndExpectError(t, hc, c, url.Values{"prompt": {"none"}, "scope": {"openid"}, "redirect_uri": {c.RedirectURIs[0]}}, "Prompt 'none' was requested, but no previous consent was found") }) @@ -670,11 +944,11 @@ func TestStrategyLoginConsentNext(t *testing.T) { }{ { d: "check all the sub claims", - values: url.Values{"scope": {"openid"}}, + values: url.Values{"scope": {"openid"}, "redirect_uri": {c.RedirectURIs[0]}}, }, { d: "works with id_token_hint", - values: url.Values{"scope": {"openid"}, "id_token_hint": {testhelpers.NewIDToken(t, reg, hash)}}, + values: url.Values{"scope": {"openid"}, "redirect_uri": {c.RedirectURIs[0]}, "id_token_hint": {testhelpers.NewIDToken(t, reg, hash)}}, }, } { t.Run("case="+tc.d, func(t *testing.T) { @@ -691,7 +965,7 @@ func TestStrategyLoginConsentNext(t *testing.T) { assert.EqualValues(t, hash, uiClaims.Get("sub").String()) // Access token data must not be obfuscated - atClaims := testhelpers.IntrospectToken(t, conf, token.AccessToken, adminTS) + atClaims := testhelpers.IntrospectToken(t, token.AccessToken, adminTS) assert.EqualValues(t, subject, atClaims.Get("sub").String()) }) } @@ -714,7 +988,7 @@ func TestStrategyLoginConsentNext(t *testing.T) { }), acceptConsentHandler(t, &hydra.AcceptOAuth2ConsentRequest{GrantScope: []string{"openid"}})) - code := makeRequestAndExpectCode(t, nil, c, url.Values{}) + code := makeRequestAndExpectCode(t, nil, c, url.Values{"redirect_uri": {c.RedirectURIs[0]}}) conf := oauth2Config(t, c) token, err := conf.Exchange(context.Background(), code) @@ -727,7 +1001,7 @@ func TestStrategyLoginConsentNext(t *testing.T) { assert.EqualValues(t, obfuscated, uiClaims.Get("sub").String()) // Access token data must not be obfuscated - atClaims := testhelpers.IntrospectToken(t, conf, token.AccessToken, adminTS) + atClaims := testhelpers.IntrospectToken(t, token.AccessToken, adminTS) assert.EqualValues(t, subject, atClaims.Get("sub").String()) }) @@ -848,3 +1122,251 @@ func TestStrategyLoginConsentNext(t *testing.T) { makeRequestAndExpectCode(t, hc, c, url.Values{"redirect_uri": {c.RedirectURIs[0]}}) }) } + +func TestStrategyDeviceLoginConsent(t *testing.T) { + t.Parallel() + + ctx := context.Background() + reg := testhelpers.NewRegistryMemory(t) + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "opaque") + reg.Config().MustSet(ctx, config.KeyConsentRequestMaxAge, time.Hour) + reg.Config().MustSet(ctx, config.KeyConsentRequestMaxAge, time.Hour) + reg.Config().MustSet(ctx, config.KeyScopeStrategy, "exact") + reg.Config().MustSet(ctx, config.KeySubjectTypesSupported, []string{"pairwise", "public"}) + reg.Config().MustSet(ctx, config.KeySubjectIdentifierAlgorithmSalt, "76d5d2bf-747f-4592-9fbd-d2b895a54b3a") + + publicTS, adminTS := testhelpers.NewOAuth2Server(ctx, t, reg) + adminClient := hydra.NewAPIClient(hydra.NewConfiguration()) + adminClient.GetConfig().Servers = hydra.ServerConfigurations{{URL: adminTS.URL}} + + oauth2Config := func(t *testing.T, c *client.Client) *oauth2.Config { + return &oauth2.Config{ + ClientID: c.GetID(), + ClientSecret: c.Secret, + Endpoint: oauth2.Endpoint{ + DeviceAuthURL: publicTS.URL + "/oauth2/device/auth", + TokenURL: publicTS.URL + "/oauth2/token", + AuthStyle: oauth2.AuthStyleInHeader, + }, + } + } + + now := 1723546027 // Unix timestamps must round-trip through Hydra without converting to floats or similar + acceptDeviceHandler := func(t *testing.T) http.HandlerFunc { + return checkAndAcceptDeviceHandler(t, adminClient) + } + + acceptLoginHandler := func(t *testing.T, subject string, payload *hydra.AcceptOAuth2LoginRequest) http.HandlerFunc { + return checkAndAcceptLoginHandler(t, adminClient, subject, func(*testing.T, *hydra.OAuth2LoginRequest, error) hydra.AcceptOAuth2LoginRequest { + if payload == nil { + return hydra.AcceptOAuth2LoginRequest{} + } + return *payload + }) + } + + acceptConsentHandler := func(t *testing.T, payload *hydra.AcceptOAuth2ConsentRequest) http.HandlerFunc { + return checkAndAcceptConsentHandler(t, adminClient, func(*testing.T, *hydra.OAuth2ConsentRequest, error) hydra.AcceptOAuth2ConsentRequest { + if payload == nil { + return hydra.AcceptOAuth2ConsentRequest{} + } + return *payload + }) + } + + createDefaultClient := func(t *testing.T) *client.Client { + c := &client.Client{GrantTypes: []string{"urn:ietf:params:oauth:grant-type:device_code"}} + return createClient(t, reg, c) + } + t.Run("case=should pass if both login and consent are granted and check remember flows as well as various payloads", func(t *testing.T) { + subject := "aeneas-rekkas" + c := createDefaultClient(t) + testhelpers.NewDeviceLoginConsentUI(t, reg.Config(), + acceptDeviceHandler(t), + acceptLoginHandler(t, subject, &hydra.AcceptOAuth2LoginRequest{ + Remember: pointerx.Bool(true), + }), + acceptConsentHandler(t, &hydra.AcceptOAuth2ConsentRequest{ + Remember: pointerx.Bool(true), + GrantScope: []string{"openid"}, + Session: &hydra.AcceptOAuth2ConsentRequestSession{ + AccessToken: map[string]interface{}{ + "foo": "bar", + "ts1": now, + }, + IdToken: map[string]interface{}{ + "bar": "baz", + "ts1": now, + }, + }, + })) + + hc := testhelpers.NewEmptyJarClient(t) + + var sid string + var run = func(t *testing.T) { + res, resp := makeOAuth2DeviceAuthRequest(t, reg, hc, c, "openid") + assert.EqualValues(t, http.StatusOK, resp.StatusCode) + + devResp := new(oauth2.DeviceAuthResponse) + require.NoError(t, json.Unmarshal([]byte(res.Raw), devResp)) + + resp, err := hc.Get(devResp.VerificationURIComplete) + require.NoError(t, err) + require.Contains(t, reg.Config().DeviceDoneURL(ctx).String(), resp.Request.URL.Path, "did not end up in post device URL") + require.Equal(t, resp.Request.URL.Query().Get("client_id"), c.ID) + + conf := oauth2Config(t, c) + token, err := conf.DeviceAccessToken(ctx, devResp) + require.NoError(t, err) + + claims := testhelpers.IntrospectToken(t, token.AccessToken, adminTS) + assert.Equal(t, "bar", claims.Get("ext.foo").String(), "%s", claims.Raw) + + idClaims := testhelpers.DecodeIDToken(t, token) + assert.Equal(t, "baz", idClaims.Get("bar").String(), "%s", idClaims.Raw) + sid = idClaims.Get("sid").String() + assert.NotNil(t, sid) + } + + t.Run("perform first flow", run) + + t.Run("perform follow up flows and check if session values are set", func(t *testing.T) { + testhelpers.NewLoginConsentUI(t, reg.Config(), + checkAndAcceptLoginHandler(t, adminClient, subject, func(t *testing.T, res *hydra.OAuth2LoginRequest, err error) hydra.AcceptOAuth2LoginRequest { + require.NoError(t, err) + assert.True(t, res.Skip) + assert.Equal(t, sid, *res.SessionId) + assert.Equal(t, subject, res.Subject) + assert.Empty(t, pointerx.StringR(res.Client.ClientSecret)) + return hydra.AcceptOAuth2LoginRequest{ + Subject: subject, + Context: map[string]interface{}{"xyz": "abc"}, + } + }), + checkAndAcceptConsentHandler(t, adminClient, func(t *testing.T, req *hydra.OAuth2ConsentRequest, err error) hydra.AcceptOAuth2ConsentRequest { + require.NoError(t, err) + assert.True(t, *req.Skip) + assert.Equal(t, sid, *req.LoginSessionId) + assert.Equal(t, subject, *req.Subject) + assert.Empty(t, pointerx.StringR(req.Client.ClientSecret)) + assert.Equal(t, map[string]interface{}{"xyz": "abc"}, req.Context) + return hydra.AcceptOAuth2ConsentRequest{ + Remember: pointerx.Bool(true), + GrantScope: []string{"openid"}, + Session: &hydra.AcceptOAuth2ConsentRequestSession{ + AccessToken: map[string]interface{}{ + "foo": "bar", + "ts1": now, + }, + IdToken: map[string]interface{}{ + "bar": "baz", + "ts2": now, + }, + }, + } + })) + + for k := 0; k < 3; k++ { + t.Run(fmt.Sprintf("case=%d", k), run) + } + }) + }) + t.Run("case=should fail because we are reusing the same verifier", func(t *testing.T) { + subject := "aeneas-rekkas" + c := createDefaultClient(t) + testhelpers.NewDeviceLoginConsentUI(t, reg.Config(), + acceptDeviceHandler(t), + acceptLoginHandler(t, subject, &hydra.AcceptOAuth2LoginRequest{ + Remember: pointerx.Bool(true), + }), + acceptConsentHandler(t, &hydra.AcceptOAuth2ConsentRequest{ + Remember: pointerx.Bool(true), + GrantScope: []string{"openid"}, + Session: &hydra.AcceptOAuth2ConsentRequestSession{ + AccessToken: map[string]interface{}{"foo": "bar"}, + IdToken: map[string]interface{}{"bar": "baz"}, + }, + })) + + hc := testhelpers.NewEmptyJarClient(t) + + res, resp := makeOAuth2DeviceAuthRequest(t, reg, hc, c, "openid") + assert.EqualValues(t, http.StatusOK, resp.StatusCode) + + devResp := new(oauth2.DeviceAuthResponse) + require.NoError(t, json.Unmarshal([]byte(res.Raw), devResp)) + + resp, err := hc.Get(devResp.VerificationURIComplete) + require.NoError(t, err) + require.Contains(t, reg.Config().DeviceDoneURL(ctx).String(), resp.Request.URL.Path, "did not end up in post device URL") + require.Equal(t, resp.Request.URL.Query().Get("client_id"), c.ID) + + conf := oauth2Config(t, c) + token, err := conf.DeviceAccessToken(ctx, devResp) + require.NoError(t, err) + + claims := testhelpers.IntrospectToken(t, token.AccessToken, adminTS) + assert.Equal(t, "bar", claims.Get("ext.foo").String(), "%s", claims.Raw) + + idClaims := testhelpers.DecodeIDToken(t, token) + assert.Equal(t, "baz", idClaims.Get("bar").String(), "%s", idClaims.Raw) + sid := idClaims.Get("sid").String() + assert.NotNil(t, sid) + + }) + t.Run("case=should fail because a device verifier was given that doesn't exist in the store", func(t *testing.T) { + testhelpers.NewDeviceLoginConsentUI(t, reg.Config(), testhelpers.HTTPServerNoExpectedCallHandler(t), testhelpers.HTTPServerNoExpectedCallHandler(t), testhelpers.HTTPServerNoExpectedCallHandler(t)) + c := createDefaultClient(t) + hc := testhelpers.NewEmptyJarClient(t) + + _, res := makeOAuth2DeviceVerificationRequest(t, reg, hc, c, url.Values{"device_verifier": {"does-not-exist"}}) + assert.EqualValues(t, http.StatusForbidden, res.StatusCode) + }) + + t.Run("case=should fail because a login verifier was given that doesn't exist in the store", func(t *testing.T) { + testhelpers.NewLoginConsentUI(t, reg.Config(), testhelpers.HTTPServerNoExpectedCallHandler(t), testhelpers.HTTPServerNoExpectedCallHandler(t)) + c := createDefaultClient(t) + hc := testhelpers.NewEmptyJarClient(t) + + _, res := makeOAuth2DeviceVerificationRequest(t, reg, hc, c, url.Values{"login_verifier": {"does-not-exist"}}) + assert.EqualValues(t, http.StatusForbidden, res.StatusCode) + }) + + t.Run("case=should fail because a consent verifier was given that doesn't exist in the store", func(t *testing.T) { + testhelpers.NewLoginConsentUI(t, reg.Config(), testhelpers.HTTPServerNoExpectedCallHandler(t), testhelpers.HTTPServerNoExpectedCallHandler(t)) + c := createDefaultClient(t) + hc := testhelpers.NewEmptyJarClient(t) + + _, res := makeOAuth2DeviceVerificationRequest(t, reg, hc, c, url.Values{"consent_verifier": {"does-not-exist"}}) + assert.EqualValues(t, http.StatusForbidden, res.StatusCode) + }) +} + +func DropCookieJar(drop *regexp.Regexp) http.CookieJar { + jar, _ := cookiejar.New(nil) + return &dropCSRFCookieJar{ + jar: jar, + drop: drop, + } +} + +type dropCSRFCookieJar struct { + jar *cookiejar.Jar + drop *regexp.Regexp +} + +var _ http.CookieJar = (*dropCSRFCookieJar)(nil) + +func (d *dropCSRFCookieJar) SetCookies(u *url.URL, cookies []*http.Cookie) { + for _, c := range cookies { + if d.drop.MatchString(c.Name) { + continue + } + d.jar.SetCookies(u, []*http.Cookie{c}) + } +} + +func (d *dropCSRFCookieJar) Cookies(u *url.URL) []*http.Cookie { + return d.jar.Cookies(u) +} diff --git a/consent/subject_identifier_algorithm.go b/consent/subject_identifier_algorithm.go deleted file mode 100644 index 343ab7d458e..00000000000 --- a/consent/subject_identifier_algorithm.go +++ /dev/null @@ -1,11 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package consent - -import "github.com/ory/hydra/client" - -type SubjectIdentifierAlgorithm interface { - // Obfuscate derives a pairwise subject identifier from the given string. - Obfuscate(subject string, client *client.Client) (string, error) -} diff --git a/consent/subject_identifier_algorithm_pairwise.go b/consent/subject_identifier_algorithm_pairwise.go deleted file mode 100644 index b27f7690bb2..00000000000 --- a/consent/subject_identifier_algorithm_pairwise.go +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package consent - -import ( - "crypto/sha256" - "fmt" - "net/url" - - "github.com/ory/x/errorsx" - - "github.com/ory/fosite" - "github.com/ory/hydra/client" -) - -type SubjectIdentifierAlgorithmPairwise struct { - Salt []byte -} - -func NewSubjectIdentifierAlgorithmPairwise(salt []byte) *SubjectIdentifierAlgorithmPairwise { - return &SubjectIdentifierAlgorithmPairwise{Salt: salt} -} - -func (g *SubjectIdentifierAlgorithmPairwise) Obfuscate(subject string, client *client.Client) (string, error) { - // sub = SHA-256 ( sector_identifier || local_account_id || salt ). - var id string - if len(client.SectorIdentifierURI) == 0 && len(client.RedirectURIs) > 1 { - return "", errorsx.WithStack(fosite.ErrInvalidRequest.WithHintf("OAuth 2.0 Client %s has multiple redirect_uris but no sector_identifier_uri was set which is not allowed when performing using subject type pairwise. Please reconfigure the OAuth 2.0 client properly.", client.GetID())) - } else if len(client.SectorIdentifierURI) == 0 && len(client.RedirectURIs) == 0 { - return "", errorsx.WithStack(fosite.ErrInvalidRequest.WithHintf("OAuth 2.0 Client %s neither specifies a sector_identifier_uri nor a redirect_uri which is not allowed when performing using subject type pairwise. Please reconfigure the OAuth 2.0 client properly.", client.GetID())) - } else if len(client.SectorIdentifierURI) > 0 { - id = client.SectorIdentifierURI - } else { - redirectURL, err := url.Parse(client.RedirectURIs[0]) - if err != nil { - return "", errorsx.WithStack(err) - } - id = redirectURL.Host - } - - return fmt.Sprintf("%x", sha256.Sum256(append(append([]byte{}, []byte(id+subject)...), g.Salt...))), nil -} diff --git a/consent/subject_identifier_algorithm_public.go b/consent/subject_identifier_algorithm_public.go deleted file mode 100644 index a73062f3d64..00000000000 --- a/consent/subject_identifier_algorithm_public.go +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package consent - -import "github.com/ory/hydra/client" - -type SubjectIdentifierAlgorithmPublic struct{} - -func NewSubjectIdentifierAlgorithmPublic() *SubjectIdentifierAlgorithmPublic { - return &SubjectIdentifierAlgorithmPublic{} -} - -func (g *SubjectIdentifierAlgorithmPublic) Obfuscate(subject string, client *client.Client) (string, error) { - return subject, nil -} diff --git a/consent/subject_identifier_pairwise.go b/consent/subject_identifier_pairwise.go new file mode 100644 index 00000000000..ae930462164 --- /dev/null +++ b/consent/subject_identifier_pairwise.go @@ -0,0 +1,38 @@ +// Copyright © 2022 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package consent + +import ( + "crypto/sha256" + "fmt" + "net/url" + + "github.com/pkg/errors" + + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/fosite" +) + +func pairwiseObfuscate(salt, subject string, client *client.Client) (string, error) { + // sub = SHA-256 ( sector_identifier || local_account_id || salt ). + var id string + if len(client.SectorIdentifierURI) == 0 && len(client.RedirectURIs) > 1 { + return "", errors.WithStack(fosite.ErrInvalidRequest.WithHintf("OAuth 2.0 Client %s has multiple redirect_uris but no sector_identifier_uri was set which is not allowed when performing using subject type pairwise. Please reconfigure the OAuth 2.0 client properly.", client.GetID())) + } else if len(client.SectorIdentifierURI) == 0 && len(client.RedirectURIs) == 0 { + return "", errors.WithStack(fosite.ErrInvalidRequest.WithHintf("OAuth 2.0 Client %s neither specifies a sector_identifier_uri nor a redirect_uri which is not allowed when performing using subject type pairwise. Please reconfigure the OAuth 2.0 client properly.", client.GetID())) + } else if len(client.SectorIdentifierURI) > 0 { + id = client.SectorIdentifierURI + } else { + redirectURL, err := url.Parse(client.RedirectURIs[0]) + if err != nil { + return "", errors.WithStack(err) + } + id = redirectURL.Host + } + h := sha256.New() + h.Write([]byte(id)) + h.Write([]byte(subject)) + h.Write([]byte(salt)) + return fmt.Sprintf("%x", h.Sum(make([]byte, 0, sha256.Size))), nil +} diff --git a/consent/subject_identifier_pairwise_test.go b/consent/subject_identifier_pairwise_test.go new file mode 100644 index 00000000000..eae67d87da8 --- /dev/null +++ b/consent/subject_identifier_pairwise_test.go @@ -0,0 +1,60 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package consent + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/x/uuidx" +) + +func Test_pairwiseObfuscate(t *testing.T) { + salt, sub, clientURI := uuidx.NewV4().String(), uuidx.NewV4().String(), uuidx.NewV4().String() + + t.Run("same result with same parameters", func(t *testing.T) { + baseLine, err := pairwiseObfuscate(salt, sub, &client.Client{SectorIdentifierURI: clientURI}) + require.NoError(t, err) + + other, err := pairwiseObfuscate(salt, sub, &client.Client{SectorIdentifierURI: clientURI}) + require.NoError(t, err) + assert.Equal(t, baseLine, other) + + other, err = pairwiseObfuscate(salt, sub, &client.Client{RedirectURIs: []string{"https://" + clientURI}}) + require.NoError(t, err) + assert.Equal(t, baseLine, other) + }) + + t.Run("different result with different parameters", func(t *testing.T) { + baseLine, err := pairwiseObfuscate(salt, sub, &client.Client{SectorIdentifierURI: clientURI}) + require.NoError(t, err) + + other, err := pairwiseObfuscate(uuidx.NewV4().String(), sub, &client.Client{SectorIdentifierURI: clientURI}) + require.NoError(t, err) + assert.NotEqual(t, baseLine, other) + + other, err = pairwiseObfuscate(salt, uuidx.NewV4().String(), &client.Client{SectorIdentifierURI: clientURI}) + require.NoError(t, err) + assert.NotEqual(t, baseLine, other) + + other, err = pairwiseObfuscate(salt, sub, &client.Client{SectorIdentifierURI: uuidx.NewV4().String()}) + require.NoError(t, err) + assert.NotEqual(t, baseLine, other) + + other, err = pairwiseObfuscate(salt, sub, &client.Client{RedirectURIs: []string{"https://" + uuidx.NewV4().String()}}) + require.NoError(t, err) + assert.NotEqual(t, baseLine, other) + }) + + t.Run("errors with invalid client setup", func(t *testing.T) { + _, err := pairwiseObfuscate(salt, sub, &client.Client{}) + assert.ErrorIs(t, err, fosite.ErrInvalidRequest) + _, err = pairwiseObfuscate(salt, sub, &client.Client{RedirectURIs: []string{"https://" + uuidx.NewV4().String(), "https://" + uuidx.NewV4().String()}}) + assert.ErrorIs(t, err, fosite.ErrInvalidRequest) + }) +} diff --git a/consent/test/manager_test_helpers.go b/consent/test/manager_test_helpers.go new file mode 100644 index 00000000000..47e95eccd13 --- /dev/null +++ b/consent/test/manager_test_helpers.go @@ -0,0 +1,600 @@ +// Copyright © 2022 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package test + +import ( + "fmt" + "testing" + "time" + + "github.com/gofrs/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/consent" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/flow" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/contextx" + "github.com/ory/x/pointerx" + "github.com/ory/x/sqlcon" + "github.com/ory/x/sqlxx" + "github.com/ory/x/uuidx" +) + +func MockConsentFlow(remember bool, rememberFor int, skip bool) *flow.Flow { + return &flow.Flow{ + ID: uuidx.NewV4().String(), + Client: &client.Client{ID: uuidx.NewV4().String()}, + State: flow.FlowStateConsentUsed, + ConsentRequestID: sqlxx.NullString(uuidx.NewV4().String()), + ConsentSkip: skip, + ConsentCSRF: sqlxx.NullString(uuidx.NewV4().String()), + OpenIDConnectContext: &flow.OAuth2ConsentRequestOpenIDConnectContext{ + ACRValues: []string{"1", "2"}, + UILocales: []string{"fr", "de"}, + Display: "popup", + }, + Subject: uuidx.NewV4().String(), + RequestedScope: []string{"scope_a", "scope_b"}, + RequestedAudience: []string{"aud_a", "aud_b"}, + RequestURL: "https://request-url/path", + RequestedAt: time.Now().UTC(), + ConsentRemember: remember, + ConsentRememberFor: pointerx.Ptr(rememberFor), + GrantedScope: []string{"scope_a", "scope_b"}, + GrantedAudience: []string{"aud_a", "aud_b"}, + ConsentHandledAt: sqlxx.NullTime(time.Now().UTC()), + } +} + +func mockLogoutRequest(withClient bool) (c *flow.LogoutRequest) { + req := &flow.LogoutRequest{ + Subject: uuidx.NewV4().String(), + ID: uuidx.NewV4().String(), + Verifier: uuidx.NewV4().String(), + SessionID: uuidx.NewV4().String(), + RPInitiated: true, + RequestURL: "http://request-me/", + PostLogoutRedirectURI: "http://redirect-me/", + WasHandled: false, + Accepted: false, + } + if withClient { + req.Client = &client.Client{ID: uuidx.NewV4().String()} + } + return req +} + +func LoginNIDTest(t1ValidNID, t2InvalidNID consent.LoginManager) func(t *testing.T) { + testLS := flow.LoginSession{ + ID: "2022-03-11-ls-nid-test-1", + Subject: "2022-03-11-test-1-sub", + } + return func(t *testing.T) { + ctx := t.Context() + + require.ErrorContains(t, t2InvalidNID.ConfirmLoginSession(ctx, &testLS), "foreign key constraint") + require.NoError(t, t1ValidNID.ConfirmLoginSession(ctx, &testLS)) + ls, err := t2InvalidNID.DeleteLoginSession(ctx, testLS.ID) + require.ErrorIs(t, err, sqlcon.ErrNoRows) + assert.Nil(t, ls) + ls, err = t1ValidNID.DeleteLoginSession(ctx, testLS.ID) + require.NoError(t, err) + assert.EqualValues(t, testLS.ID, ls.ID) + } +} + +type Deps interface { + contextx.Provider + x.TracingProvider + x.NetworkProvider + config.Provider +} + +func LoginManagerTest(t *testing.T, deps Deps, m consent.LoginManager) { + t.Run("get with random id", func(t *testing.T) { + _, err := m.GetRememberedLoginSession(t.Context(), uuidx.NewV4().String()) + assert.ErrorIs(t, err, x.ErrNotFound) + }) + + t.Run("create get update", func(t *testing.T) { + sess := &flow.LoginSession{ + ID: uuidx.NewV4().String(), + AuthenticatedAt: sqlxx.NullTime(time.Now().Round(time.Second).UTC()), + Subject: uuidx.NewV4().String(), + Remember: true, + } + require.NoError(t, m.ConfirmLoginSession(t.Context(), sess)) + + actual, err := m.GetRememberedLoginSession(t.Context(), sess.ID) + require.NoError(t, err) + assert.Equal(t, deps.Networker().NetworkID(t.Context()), sess.NID) + assert.Equal(t, sess, actual) + + sess.AuthenticatedAt = sqlxx.NullTime(time.Now().Add(10 * time.Minute).Round(time.Second).UTC()) + sess.Subject = uuidx.NewV4().String() // not sure why we should be able to update the subject, but ok... + require.NoError(t, m.ConfirmLoginSession(t.Context(), sess)) + + actual, err = m.GetRememberedLoginSession(t.Context(), sess.ID) + require.NoError(t, err) + assert.Equal(t, sess, actual) + }) + + t.Run("get non-remembered session", func(t *testing.T) { + id := uuidx.NewV4().String() + require.NoError(t, m.ConfirmLoginSession(t.Context(), &flow.LoginSession{ + ID: id, + AuthenticatedAt: sqlxx.NullTime(time.Now().Round(time.Second).UTC()), + Subject: uuidx.NewV4().String(), + Remember: false, + })) + + _, err := m.GetRememberedLoginSession(t.Context(), id) + assert.ErrorIs(t, err, x.ErrNotFound) + }) + + t.Run("delete", func(t *testing.T) { + expected := &flow.LoginSession{ + ID: uuidx.NewV4().String(), + AuthenticatedAt: sqlxx.NullTime(time.Now().Round(time.Second).UTC()), + Subject: uuidx.NewV4().String(), + Remember: true, + } + require.NoError(t, m.ConfirmLoginSession(t.Context(), expected)) + + deleted, err := m.DeleteLoginSession(t.Context(), expected.ID) + require.NoError(t, err) + assert.EqualValues(t, expected, deleted) + + _, err = m.GetRememberedLoginSession(t.Context(), expected.ID) + assert.ErrorIs(t, err, x.ErrNotFound) + }) + + t.Run("revoke by subject", func(t *testing.T) { + subs := make([]uuid.UUID, 2) + sessions := make([]*flow.LoginSession, 0) + for i := range subs { + subs[i] = uuid.Must(uuid.NewV4()) + sessions = append(sessions, &flow.LoginSession{ + ID: uuidx.NewV4().String(), + AuthenticatedAt: sqlxx.NullTime(time.Now()), + Subject: subs[i].String(), + Remember: true, + }, &flow.LoginSession{ + ID: uuidx.NewV4().String(), + AuthenticatedAt: sqlxx.NullTime(time.Now()), + Subject: subs[i].String(), + Remember: true, + }) + } + sessions = append(sessions, &flow.LoginSession{ + ID: uuidx.NewV4().String(), + AuthenticatedAt: sqlxx.NullTime(time.Now()), + Subject: uuid.Must(uuid.NewV4()).String(), + Remember: true, + }) + + for _, s := range sessions { + require.NoError(t, m.ConfirmLoginSession(t.Context(), s)) + } + + for _, sub := range subs { + require.NoError(t, m.RevokeSubjectLoginSession(t.Context(), sub.String())) + } + + for _, s := range sessions[:len(sessions)-1] { + _, err := m.GetRememberedLoginSession(t.Context(), s.ID) + assert.ErrorIs(t, err, x.ErrNotFound) + } + + // ensure the unrelated session still exists + _, err := m.GetRememberedLoginSession(t.Context(), sessions[len(sessions)-1].ID) + assert.NoError(t, err) + }) + + t.Run("revoke with random subject", func(t *testing.T) { + assert.NoError(t, m.RevokeSubjectLoginSession(t.Context(), uuidx.NewV4().String())) + }) +} + +func ConsentManagerTests(t *testing.T, deps Deps, m consent.Manager, loginManager consent.LoginManager, clientManager client.Manager, fositeManager x.FositeStorer) { + t.Run("case=consent-request", func(t *testing.T) { + for _, tc := range []struct { + key string + remember bool + rememberFor int + skip bool + expectRemembered bool + }{ + {"1", true, 0, false, true}, + {"3", true, 1, false, false}, + {"4", false, 0, false, false}, + {"5", true, 120, false, true}, + {"6", true, 120, true, false}, + } { + t.Run("key="+tc.key, func(t *testing.T) { + f := MockConsentFlow(tc.remember, tc.rememberFor, tc.skip) + require.NoError(t, clientManager.CreateClient(t.Context(), f.Client)) + f.NID = deps.Networker().NetworkID(t.Context()) + + require.NoError(t, m.CreateConsentSession(t.Context(), f)) + + t.Run("sub=detect double-submit for consent verifier", func(t *testing.T) { + require.ErrorIs(t, m.CreateConsentSession(t.Context(), f), sqlcon.ErrUniqueViolation) + }) + + t.Run("sub=find granted and remembered consent", func(t *testing.T) { + if tc.rememberFor == 1 { + // unfortunately the interface does not allow us to set the absolute time, so we have to wait + time.Sleep(2 * time.Second) + } + actual, err := m.FindGrantedAndRememberedConsentRequest(t.Context(), f.Client.ID, f.Subject) + if !tc.expectRemembered { + assert.Nil(t, actual) + assert.ErrorIs(t, err, consent.ErrNoPreviousConsentFound) + } else { + require.NoError(t, err) + assert.NotNil(t, actual) + } + }) + }) + } + + for _, tc := range []struct{ keyC, keyS string }{ + {"1", "5"}, + {"5", "1"}, + } { + t.Run(fmt.Sprintf("missmatched client %q and subject %q", tc.keyC, tc.keyS), func(t *testing.T) { + rs, err := m.FindGrantedAndRememberedConsentRequest(t.Context(), "fk-client-"+tc.keyC, "subject"+tc.keyS) + assert.Nil(t, rs) + assert.ErrorIs(t, err, consent.ErrNoPreviousConsentFound) + }) + } + }) + + t.Run("case=revoke consent request", func(t *testing.T) { + type tc struct { + f *flow.Flow + at, rt string + revoke func(*testing.T, tc) + } + revokeFuncs := []func(*testing.T, tc){ + func(t *testing.T, c tc) { + require.NoError(t, m.RevokeSubjectConsentSession(t.Context(), c.f.Subject)) + }, + func(t *testing.T, c tc) { + require.NoError(t, m.RevokeSubjectClientConsentSession(t.Context(), c.f.Subject, c.f.Client.ID)) + }, + func(t *testing.T, c tc) { + require.NoError(t, m.RevokeConsentSessionByID(t.Context(), c.f.ConsentRequestID.String())) + }, + } + tcs := make([]tc, 2*len(revokeFuncs)) + for i := range tcs { + f := MockConsentFlow(i < len(revokeFuncs), 0, true) + f.NID = deps.Networker().NetworkID(t.Context()) + + tcs[i] = tc{ + f: f, + at: uuidx.NewV4().String(), + rt: uuidx.NewV4().String(), + revoke: revokeFuncs[i%len(revokeFuncs)], + } + + require.NoError(t, clientManager.CreateClient(t.Context(), f.Client)) + require.NoError(t, m.CreateConsentSession(t.Context(), f)) + sess := &oauth2.Session{DefaultSession: openid.NewDefaultSession()} + sess.Subject = f.Subject + sess.ConsentChallenge = f.ConsentRequestID.String() + require.NoError(t, fositeManager.CreateAccessTokenSession(t.Context(), tcs[i].at, + &fosite.Request{Client: f.Client, ID: f.ConsentRequestID.String(), RequestedAt: time.Now(), Session: sess}), + ) + require.NoError(t, fositeManager.CreateRefreshTokenSession(t.Context(), tcs[i].rt, tcs[i].at, + &fosite.Request{Client: f.Client, ID: f.ConsentRequestID.String(), RequestedAt: time.Now(), Session: sess}, + )) + } + + for i, tc := range tcs { + t.Run(fmt.Sprintf("run=%d", i), func(t *testing.T) { + _, err := fositeManager.GetAccessTokenSession(t.Context(), tc.at, nil) + require.NoError(t, err) + _, err = fositeManager.GetRefreshTokenSession(t.Context(), tc.rt, nil) + require.NoError(t, err) + + tc.revoke(t, tc) + + r, err := fositeManager.GetAccessTokenSession(t.Context(), tc.at, nil) + assert.ErrorIsf(t, err, fosite.ErrNotFound, "%+v", r) + r, err = fositeManager.GetRefreshTokenSession(t.Context(), tc.rt, nil) + assert.ErrorIsf(t, err, fosite.ErrNotFound, "%+v", r) + }) + } + + t.Run("unknown subject/client return no error", func(t *testing.T) { + require.NoError(t, m.RevokeSubjectConsentSession(t.Context(), "i-do-not-exist")) + require.NoError(t, m.RevokeSubjectClientConsentSession(t.Context(), "i-do-not-exist", "i-do-not-exist")) + }) + }) + + t.Run("case=list consents", func(t *testing.T) { + flows := make([]*flow.Flow, 2) + for i := range flows { + f := MockConsentFlow(true, 0, false) + f.NID = deps.Networker().NetworkID(t.Context()) + f.SessionID = sqlxx.NullString(uuidx.NewV4().String()) + flows[i] = f + + require.NoError(t, clientManager.CreateClient(t.Context(), f.Client)) + require.NoError(t, loginManager.ConfirmLoginSession(t.Context(), &flow.LoginSession{ + ID: string(f.SessionID), + NID: deps.Networker().NetworkID(t.Context()), + AuthenticatedAt: sqlxx.NullTime(time.Now().Round(time.Second).UTC()), + Subject: f.Subject, + })) + require.NoError(t, m.CreateConsentSession(t.Context(), f)) + } + + t.Run("by subject and session", func(t *testing.T) { + for i, f := range flows { + t.Run(fmt.Sprintf("case=%d", i), func(t *testing.T) { + consents, nextPage, err := m.FindSubjectsSessionGrantedConsentRequests(t.Context(), f.Subject, f.SessionID.String()) + require.NoError(t, err) + require.Len(t, consents, 1) + + assert.True(t, nextPage.IsLast()) + assert.Equal(t, f.ConsentRequestID, consents[0].ConsentRequestID) + assert.Equal(t, f.Client.ID, consents[0].Client.GetID()) + }) + } + + t.Run("random subject", func(t *testing.T) { + res, _, err := m.FindSubjectsSessionGrantedConsentRequests(t.Context(), uuidx.NewV4().String(), flows[0].SessionID.String()) + assert.ErrorIsf(t, err, consent.ErrNoPreviousConsentFound, "%+v", res) + }) + }) + + for i, f := range flows { + t.Run(fmt.Sprintf("case=%d", i), func(t *testing.T) { + consents, nextPage, err := m.FindSubjectsGrantedConsentRequests(t.Context(), f.Subject) + require.NoError(t, err) + require.Len(t, consents, 1) + assert.True(t, nextPage.IsLast()) + + assert.Equal(t, f.ConsentRequestID, consents[0].ConsentRequestID) + assert.Equal(t, f.Client.ID, consents[0].Client.GetID()) + }) + + t.Run("random subject", func(t *testing.T) { + _, _, err := m.FindSubjectsGrantedConsentRequests(t.Context(), uuidx.NewV4().String()) + assert.ErrorIs(t, err, consent.ErrNoPreviousConsentFound) + }) + } + + t.Run("case=ListUserAuthenticatedClientsWithFrontAndBackChannelLogout", func(t *testing.T) { + // The idea of this test is to create two identities (subjects) with 4 sessions each, where + // only some sessions have been associated with a client that has a front channel logout url + + subjects := make([]string, 2) + for k := range subjects { + subjects[k] = fmt.Sprintf("subject-ListUserAuthenticatedClientsWithFrontAndBackChannelLogout-%d", k) + } + + sessions := make([]flow.LoginSession, len(subjects)*4) + frontChannels := map[string][]client.Client{} + backChannels := map[string][]client.Client{} + for k := range sessions { + id := uuidx.NewV4().String() + subject := subjects[k%len(subjects)] + t.Run(fmt.Sprintf("create/session=%s/subject=%s", id, subject), func(t *testing.T) { + ls := &flow.LoginSession{ + ID: id, + NID: deps.Networker().NetworkID(t.Context()), + AuthenticatedAt: sqlxx.NullTime(time.Now()), + Subject: subject, + Remember: true, + } + require.NoError(t, loginManager.ConfirmLoginSession(t.Context(), ls)) + + cl := &client.Client{ID: uuidx.NewV4().String()} + switch k % 4 { + case 0: + cl.FrontChannelLogoutURI = "http://some-url.com/" + frontChannels[id] = append(frontChannels[id], *cl) + case 1: + cl.BackChannelLogoutURI = "http://some-url.com/" + backChannels[id] = append(backChannels[id], *cl) + case 2: + cl.FrontChannelLogoutURI = "http://some-url.com/" + cl.BackChannelLogoutURI = "http://some-url.com/" + frontChannels[id] = append(frontChannels[id], *cl) + backChannels[id] = append(backChannels[id], *cl) + } + require.NoError(t, clientManager.CreateClient(t.Context(), cl)) + + f := &flow.Flow{ + ID: uuidx.NewV4().String(), + NID: deps.Networker().NetworkID(t.Context()), + OpenIDConnectContext: &flow.OAuth2ConsentRequestOpenIDConnectContext{ + ACRValues: []string{"1", "2"}, + UILocales: []string{"fr", "de"}, + Display: "popup", + }, + ACR: "1", + AMR: sqlxx.StringSliceJSONFormat{"passwd"}, + RequestedAt: time.Now().UTC().Add(-time.Hour), + Client: cl, + Subject: ls.Subject, + RequestURL: "https://request-url/path", + RequestedScope: []string{"scopea", "scopeb"}, + SessionID: sqlxx.NullString(ls.ID), + ConsentRequestID: sqlxx.NullString(uuid.Must(uuid.NewV4()).String()), + GrantedScope: sqlxx.StringSliceJSONFormat{"scopea", "scopeb"}, + ConsentRemember: true, + ConsentRememberFor: pointerx.Ptr(0), + } + + require.NoError(t, m.CreateConsentSession(t.Context(), f)) + + sessions[k] = *ls + }) + } + + for _, ls := range sessions { + check := func(t *testing.T, expected map[string][]client.Client, actual []client.Client) { + es, ok := expected[ls.ID] + if !ok { + require.Len(t, actual, 0) + return + } + require.Len(t, actual, len(es)) + + for _, e := range es { + var found bool + for _, a := range actual { + if e.GetID() == a.GetID() { + found = true + } + assert.EqualValues(t, e.GetID(), a.GetID()) + assert.EqualValues(t, e.FrontChannelLogoutURI, a.FrontChannelLogoutURI) + assert.EqualValues(t, e.BackChannelLogoutURI, a.BackChannelLogoutURI) + } + require.True(t, found) + } + } + + t.Run(fmt.Sprintf("method=ListUserAuthenticatedClientsWithFrontChannelLogout/session=%s/subject=%s", ls.ID, ls.Subject), func(t *testing.T) { + actual, err := m.ListUserAuthenticatedClientsWithFrontChannelLogout(t.Context(), ls.Subject, ls.ID) + require.NoError(t, err) + check(t, frontChannels, actual) + }) + + t.Run(fmt.Sprintf("method=ListUserAuthenticatedClientsWithBackChannelLogout/session=%s", ls.ID), func(t *testing.T) { + actual, err := m.ListUserAuthenticatedClientsWithBackChannelLogout(t.Context(), ls.Subject, ls.ID) + require.NoError(t, err) + check(t, backChannels, actual) + }) + } + }) + }) +} + +func ObfuscatedSubjectManagerTest(t *testing.T, deps Deps, m consent.ObfuscatedSubjectManager, clientManager client.Manager) { + t.Run("get with random keys", func(t *testing.T) { + _, err := m.GetForcedObfuscatedLoginSession(t.Context(), uuidx.NewV4().String(), uuidx.NewV4().String()) + assert.ErrorIs(t, err, x.ErrNotFound) + }) + + t.Run("create and retrieve", func(t *testing.T) { + cl := &client.Client{ID: uuidx.NewV4().String()} + require.NoError(t, clientManager.CreateClient(t.Context(), cl)) + obfuscatedSession := &consent.ForcedObfuscatedLoginSession{ + ClientID: cl.ID, + Subject: uuidx.NewV4().String(), + SubjectObfuscated: uuidx.NewV4().String(), + NID: deps.Networker().NetworkID(t.Context()), + } + require.NoError(t, m.CreateForcedObfuscatedLoginSession(t.Context(), obfuscatedSession)) + + actual, err := m.GetForcedObfuscatedLoginSession(t.Context(), cl.ID, obfuscatedSession.SubjectObfuscated) + require.NoError(t, err) + assert.EqualValues(t, obfuscatedSession, actual) + + t.Run("with random client fails", func(t *testing.T) { + _, err = m.GetForcedObfuscatedLoginSession(t.Context(), uuidx.NewV4().String(), obfuscatedSession.SubjectObfuscated) + assert.ErrorIs(t, err, x.ErrNotFound) + }) + + t.Run("with random obfuscated subject fails", func(t *testing.T) { + _, err = m.GetForcedObfuscatedLoginSession(t.Context(), cl.ID, uuidx.NewV4().String()) + assert.ErrorIs(t, err, x.ErrNotFound) + }) + }) +} + +func LogoutManagerTest(t *testing.T, m consent.LogoutManager, clientManager client.Manager) { + for _, withClient := range []bool{true, false} { + t.Run("get with random challenge", func(t *testing.T) { + _, err := m.GetLogoutRequest(t.Context(), uuidx.NewV4().String()) + assert.ErrorIs(t, err, sqlcon.ErrNoRows) + }) + + t.Run(fmt.Sprintf("with client=%v", withClient), func(t *testing.T) { + setup := func(t *testing.T) *flow.LogoutRequest { + req := mockLogoutRequest(withClient) + if withClient { + require.NoError(t, clientManager.CreateClient(t.Context(), req.Client)) + } + require.NoError(t, m.CreateLogoutRequest(t.Context(), req)) + return req + } + + t.Run("get unhandled", func(t *testing.T) { + expected := setup(t) + + actual, err := m.GetLogoutRequest(t.Context(), expected.ID) + require.NoError(t, err) + assert.False(t, actual.WasHandled) + assert.False(t, actual.Accepted) + compareLogoutRequest(t, expected, actual) + }) + + t.Run("accept and verify", func(t *testing.T) { + expected := setup(t) + + actual, err := m.AcceptLogoutRequest(t.Context(), expected.ID) + require.NoError(t, err) + assert.True(t, actual.Accepted) + assert.False(t, actual.WasHandled) + compareLogoutRequest(t, expected, actual) + + actual, err = m.VerifyAndInvalidateLogoutRequest(t.Context(), expected.Verifier) + require.NoError(t, err) + assert.True(t, actual.Accepted) + assert.True(t, actual.WasHandled) + compareLogoutRequest(t, expected, actual) + + t.Run("double verify fails", func(t *testing.T) { + _, err = m.VerifyAndInvalidateLogoutRequest(t.Context(), expected.Verifier) + require.NotErrorIs(t, err, x.ErrNotFound) + }) + + t.Run("get verified", func(t *testing.T) { + actual, err = m.GetLogoutRequest(t.Context(), expected.ID) + require.NoError(t, err) + assert.True(t, actual.WasHandled) + assert.True(t, actual.Accepted) + compareLogoutRequest(t, expected, actual) + }) + }) + + t.Run("reject", func(t *testing.T) { + expected := setup(t) + + require.NoError(t, m.RejectLogoutRequest(t.Context(), expected.ID)) + _, err := m.GetLogoutRequest(t.Context(), expected.ID) + assert.ErrorIs(t, err, sqlcon.ErrNoRows) + }) + }) + } +} + +func compareLogoutRequest(t *testing.T, a, b *flow.LogoutRequest) { + require.True(t, (a.Client != nil && b.Client != nil) || (a.Client == nil && b.Client == nil)) + if a.Client != nil { + assert.EqualValues(t, a.Client.GetID(), b.Client.GetID()) + } + + assert.EqualValues(t, a.ID, b.ID) + assert.EqualValues(t, a.Subject, b.Subject) + assert.EqualValues(t, a.Verifier, b.Verifier) + assert.EqualValues(t, a.RequestURL, b.RequestURL) + assert.EqualValues(t, a.PostLogoutRedirectURI, b.PostLogoutRedirectURI) + assert.EqualValues(t, a.RPInitiated, b.RPInitiated) + assert.EqualValues(t, a.SessionID, b.SessionID) +} diff --git a/consent/types_test.go b/consent/types_test.go deleted file mode 100644 index 6366404d9e9..00000000000 --- a/consent/types_test.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package consent - -import ( - "fmt" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/ory/fosite" -) - -func TestToRFCError(t *testing.T) { - for k, tc := range []struct { - input *RequestDeniedError - expect *fosite.RFC6749Error - }{ - { - input: &RequestDeniedError{ - Name: "not empty", - valid: true, - }, - expect: &fosite.RFC6749Error{ - ErrorField: "not empty", - DescriptionField: "", - CodeField: fosite.ErrInvalidRequest.CodeField, - DebugField: "", - }, - }, - { - input: &RequestDeniedError{ - Name: "", - Description: "not empty", - valid: true, - }, - expect: &fosite.RFC6749Error{ - ErrorField: "request_denied", - DescriptionField: "not empty", - CodeField: fosite.ErrInvalidRequest.CodeField, - DebugField: "", - }, - }, - { - input: &RequestDeniedError{valid: true}, - expect: &fosite.RFC6749Error{ - ErrorField: "request_denied", - DescriptionField: "", - HintField: "", - CodeField: fosite.ErrInvalidRequest.CodeField, - DebugField: "", - }, - }, - } { - t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { - require.EqualValues(t, tc.input.toRFCError(), tc.expect) - }) - } -} - -func TestRequestDeniedError(t *testing.T) { - var e *RequestDeniedError - v, err := e.Value() - require.NoError(t, err) - assert.EqualValues(t, "{}", fmt.Sprintf("%v", v)) -} diff --git a/contrib/quickstart/5-min/hydra.yml b/contrib/quickstart/5-min/hydra.yml index 8d69cc1d243..30c7862bc77 100644 --- a/contrib/quickstart/5-min/hydra.yml +++ b/contrib/quickstart/5-min/hydra.yml @@ -8,6 +8,9 @@ urls: consent: http://127.0.0.1:3000/consent login: http://127.0.0.1:3000/login logout: http://127.0.0.1:3000/logout + device: + verification: http://127.0.0.1:3000/device/verify + success: http://127.0.0.1:3000/device/success secrets: system: diff --git a/contrib/quickstart/gitlab/config/gitlab.rb b/contrib/quickstart/gitlab/config/gitlab.rb index 9540469d3bd..6cd8c378e34 100644 --- a/contrib/quickstart/gitlab/config/gitlab.rb +++ b/contrib/quickstart/gitlab/config/gitlab.rb @@ -70,7 +70,7 @@ ### Request duration ###! Tells the rails application how long it has to complete a request ###! This value needs to be lower than the worker timeout set in unicorn/puma. -###! By default, we'll allow 95% of the the worker timeout +###! By default, we'll allow 95% of the worker timeout # gitlab_rails['max_request_duration_seconds'] = 57 ### Email Settings diff --git a/cypress/integration/oauth2/authorize_code.js b/cypress/integration/oauth2/authorize_code.js index e204adc72ae..a6d81fe4480 100644 --- a/cypress/integration/oauth2/authorize_code.js +++ b/cypress/integration/oauth2/authorize_code.js @@ -3,91 +3,120 @@ import { prng } from "../../helpers" +const accessTokenStrategies = ["opaque", "jwt"] + describe("The OAuth 2.0 Authorization Code Grant", function () { - const nc = () => ({ - client_secret: prng(), - scope: "offline_access openid", - subject_type: "public", - token_endpoint_auth_method: "client_secret_basic", - redirect_uris: [`${Cypress.env("client_url")}/oauth2/callback`], - grant_types: ["authorization_code", "refresh_token"], - }) + accessTokenStrategies.forEach((accessTokenStrategy) => { + describe("access_token_strategy=" + accessTokenStrategy, function () { + const nc = (extradata) => ({ + client_secret: prng(), + scope: "offline_access openid", + subject_type: "public", + token_endpoint_auth_method: "client_secret_basic", + redirect_uris: [`${Cypress.env("client_url")}/oauth2/callback`], + grant_types: ["authorization_code", "refresh_token"], + access_token_strategy: accessTokenStrategy, + ...extradata, + }) - it("should return an Access, Refresh, and ID Token when scope offline_access and openid are granted", function () { - const client = nc() - cy.authCodeFlow(client, { - consent: { scope: ["offline_access", "openid"] }, - }) + it("should return an Access, Refresh, and ID Token when scope offline_access and openid are granted", function () { + const client = nc() + cy.authCodeFlow(client, { + consent: { scope: ["offline_access", "openid"] }, + }) + + cy.get("body") + .invoke("text") + .then((content) => { + const { + result, + token: { access_token, id_token, refresh_token }, + } = JSON.parse(content) - cy.get("body") - .invoke("text") - .then((content) => { - const { - result, - token: { access_token, id_token, refresh_token }, - } = JSON.parse(content) - - expect(result).to.equal("success") - expect(access_token).to.not.be.empty - expect(id_token).to.not.be.empty - expect(refresh_token).to.not.be.empty + expect(result).to.equal("success") + expect(access_token).to.not.be.empty + expect(id_token).to.not.be.empty + expect(refresh_token).to.not.be.empty + }) }) - }) - it("should return an Access and Refresh Token when scope offline_access is granted", function () { - const client = nc() - cy.authCodeFlow(client, { consent: { scope: ["offline_access"] } }) - - cy.get("body") - .invoke("text") - .then((content) => { - const { - result, - token: { access_token, id_token, refresh_token }, - } = JSON.parse(content) - - expect(result).to.equal("success") - expect(access_token).to.not.be.empty - expect(id_token).to.be.undefined - expect(refresh_token).to.not.be.empty + it("should return an Access and Refresh Token when scope offline_access is granted", function () { + const client = nc() + cy.authCodeFlow(client, { consent: { scope: ["offline_access"] } }) + + cy.get("body") + .invoke("text") + .then((content) => { + const { + result, + token: { access_token, id_token, refresh_token }, + } = JSON.parse(content) + + expect(result).to.equal("success") + expect(access_token).to.not.be.empty + expect(id_token).to.be.undefined + expect(refresh_token).to.not.be.empty + }) }) - }) - it("should return an Access and ID Token when scope offline_access is granted", function () { - const client = nc() - cy.authCodeFlow(client, { consent: { scope: ["openid"] } }) - - cy.get("body") - .invoke("text") - .then((content) => { - const { - result, - token: { access_token, id_token, refresh_token }, - } = JSON.parse(content) - - expect(result).to.equal("success") - expect(access_token).to.not.be.empty - expect(id_token).to.not.be.empty - expect(refresh_token).to.be.undefined + it("should return an Access and ID Token when scope offline_access is granted", function () { + const client = nc() + cy.authCodeFlow(client, { consent: { scope: ["openid"] } }) + + cy.get("body") + .invoke("text") + .then((content) => { + const { + result, + token: { access_token, id_token, refresh_token }, + } = JSON.parse(content) + + expect(result).to.equal("success") + expect(access_token).to.not.be.empty + expect(id_token).to.not.be.empty + expect(refresh_token).to.be.undefined + }) }) - }) - it("should return an Access Token when no scope is granted", function () { - const client = nc() - cy.authCodeFlow(client, { consent: { scope: [] } }) - - cy.get("body") - .invoke("text") - .then((content) => { - const { - result, - token: { access_token, id_token, refresh_token }, - } = JSON.parse(content) - - expect(result).to.equal("success") - expect(access_token).to.not.be.empty - expect(id_token).to.be.undefined - expect(refresh_token).to.be.undefined + it("should return an Access Token when no scope is granted", function () { + const client = nc() + cy.authCodeFlow(client, { consent: { scope: [] } }) + + cy.get("body") + .invoke("text") + .then((content) => { + const { + result, + token: { access_token, id_token, refresh_token }, + } = JSON.parse(content) + + expect(result).to.equal("success") + expect(access_token).to.not.be.empty + expect(id_token).to.be.undefined + expect(refresh_token).to.be.undefined + }) }) + + it("should skip consent if the client is confgured thus", function () { + const client = nc({ skip_consent: true }) + cy.authCodeFlow(client, { + consent: { scope: ["offline_access", "openid"], skip: true }, + }) + + cy.get("body") + .invoke("text") + .then((content) => { + const { + result, + token: { access_token, id_token, refresh_token }, + } = JSON.parse(content) + + expect(result).to.equal("success") + expect(access_token).to.not.be.empty + expect(id_token).to.not.be.empty + expect(refresh_token).to.not.be.empty + }) + }) + }) }) }) diff --git a/cypress/integration/oauth2/authorize_error.js b/cypress/integration/oauth2/authorize_error.js index c537aa920dd..517e9d11e80 100644 --- a/cypress/integration/oauth2/authorize_error.js +++ b/cypress/integration/oauth2/authorize_error.js @@ -4,173 +4,180 @@ import { createClient, prng } from "../../helpers" import qs from "querystring" +const accessTokenStrategies = ["opaque", "jwt"] + describe("OAuth 2.0 Authorization Endpoint Error Handling", () => { - describe("rejecting login and consent requests", () => { - const nc = () => ({ - client_secret: prng(), - scope: "offline_access openid", - subject_type: "public", - token_endpoint_auth_method: "client_secret_basic", - redirect_uris: [`${Cypress.env("client_url")}/oauth2/callback`], - grant_types: ["authorization_code", "refresh_token"], - }) + accessTokenStrategies.forEach((accessTokenStrategy) => { + describe("access_token_strategy=" + accessTokenStrategy, function () { + describe("rejecting login and consent requests", () => { + const nc = () => ({ + client_secret: prng(), + scope: "offline_access openid", + subject_type: "public", + token_endpoint_auth_method: "client_secret_basic", + redirect_uris: [`${Cypress.env("client_url")}/oauth2/callback`], + grant_types: ["authorization_code", "refresh_token"], + access_token_strategy: accessTokenStrategy, + }) - it("should return an error when rejecting login", function () { - const client = nc() - cy.authCodeFlow(client, { - login: { accept: false }, - consent: { skip: true }, - createClient: true, - }) + it("should return an error when rejecting login", function () { + const client = nc() + cy.authCodeFlow(client, { + login: { accept: false }, + consent: { skip: true }, + createClient: true, + }) + + cy.get("body") + .invoke("text") + .then((content) => { + const { + result, + error_description, + token: { access_token, id_token, refresh_token } = {}, + } = JSON.parse(content) + + expect(result).to.equal("error") + expect(error_description).to.equal( + "The resource owner denied the request", + ) + expect(access_token).to.be.undefined + expect(id_token).to.be.undefined + expect(refresh_token).to.be.undefined + }) + }) - cy.get("body") - .invoke("text") - .then((content) => { - const { - result, - error_description, - token: { access_token, id_token, refresh_token } = {}, - } = JSON.parse(content) - - expect(result).to.equal("error") - expect(error_description).to.equal( - "The resource owner denied the request", - ) - expect(access_token).to.be.undefined - expect(id_token).to.be.undefined - expect(refresh_token).to.be.undefined + it("should return an error when rejecting consent", function () { + const client = nc() + cy.authCodeFlow(client, { + consent: { accept: false }, + createClient: true, + }) + + cy.get("body") + .invoke("text") + .then((content) => { + const { + result, + error_description, + token: { access_token, id_token, refresh_token } = {}, + } = JSON.parse(content) + + expect(result).to.equal("error") + expect(error_description).to.equal( + "The resource owner denied the request", + ) + expect(access_token).to.be.undefined + expect(id_token).to.be.undefined + expect(refresh_token).to.be.undefined + }) }) - }) + }) - it("should return an error when rejecting consent", function () { - const client = nc() - cy.authCodeFlow(client, { - consent: { accept: false }, - createClient: true, + it("should return an error when an OAuth 2.0 Client ID is used that does not exist", () => { + cy.visit( + `${Cypress.env( + "client_url", + )}/oauth2/code?client_id=i-do-not-exist&client_secret=i-am-not-correct}`, + { failOnStatusCode: false }, + ) + + cy.location().should(({ search, port }) => { + const query = qs.parse(search.substr(1)) + expect(query.error).to.equal("invalid_client") + + // Should show Ory Hydra's Error URL because a redirect URL could not be determined + expect(port).to.equal(Cypress.env("public_port")) + }) }) - cy.get("body") - .invoke("text") - .then((content) => { - const { - result, - error_description, - token: { access_token, id_token, refresh_token } = {}, - } = JSON.parse(content) - - expect(result).to.equal("error") - expect(error_description).to.equal( - "The resource owner denied the request", + it("should return an error when an OAuth 2.0 Client requests a scope that is not allowed to be requested", () => { + createClient({ + client_secret: prng(), + scope: "foo", + redirect_uris: [`${Cypress.env("client_url")}/oauth2/callback`], + grant_types: ["authorization_code"], + }).then((c) => { + cy.visit( + `${Cypress.env("client_url")}/oauth2/code?client_id=${ + c.client_id + }&client_secret=${c.client_secret}&scope=bar`, + { failOnStatusCode: false }, ) - expect(access_token).to.be.undefined - expect(id_token).to.be.undefined - expect(refresh_token).to.be.undefined - }) - }) - }) - it("should return an error when an OAuth 2.0 Client ID is used that does not exist", () => { - cy.visit( - `${Cypress.env( - "client_url", - )}/oauth2/code?client_id=i-do-not-exist&client_secret=i-am-not-correct}`, - { failOnStatusCode: false }, - ) + cy.location().should(({ search, port }) => { + const query = qs.parse(search.substr(1)) + expect(query.error).to.equal("invalid_scope") - cy.location().should(({ search, port }) => { - const query = qs.parse(search.substr(1)) - expect(query.error).to.equal("invalid_client") + // This is a client error so we expect the client app to show the error + expect(port).to.equal(Cypress.env("client_port")) + }) + }) + }) - // Should show Ory Hydra's Error URL because a redirect URL could not be determined - expect(port).to.equal(Cypress.env("public_port")) - }) - }) + it("should return an error when an OAuth 2.0 Client requests a response type it is not allowed to call", () => { + createClient({ + client_secret: prng(), + redirect_uris: [`${Cypress.env("client_url")}/oauth2/callback`], + response_types: ["token"], // disallows Authorization Code Grant + }).then((c) => { + cy.visit( + `${Cypress.env("client_url")}/oauth2/code?client_id=${ + c.client_id + }&client_secret=${c.client_secret}`, + { failOnStatusCode: false }, + ) - it("should return an error when an OAuth 2.0 Client requests a scope that is not allowed to be requested", () => { - createClient({ - client_secret: prng(), - scope: "foo", - redirect_uris: [`${Cypress.env("client_url")}/oauth2/callback`], - grant_types: ["authorization_code"], - }).then((c) => { - cy.visit( - `${Cypress.env("client_url")}/oauth2/code?client_id=${ - c.client_id - }&client_secret=${c.client_secret}&scope=bar`, - { failOnStatusCode: false }, - ) - - cy.location().should(({ search, port }) => { - const query = qs.parse(search.substr(1)) - expect(query.error).to.equal("invalid_scope") - - // This is a client error so we expect the client app to show the error - expect(port).to.equal(Cypress.env("client_port")) + cy.get("body").should("contain", "unsupported_response_type") + }) }) - }) - }) - it("should return an error when an OAuth 2.0 Client requests a response type it is not allowed to call", () => { - createClient({ - client_secret: prng(), - redirect_uris: [`${Cypress.env("client_url")}/oauth2/callback`], - response_types: ["token"], // disallows Authorization Code Grant - }).then((c) => { - cy.visit( - `${Cypress.env("client_url")}/oauth2/code?client_id=${ - c.client_id - }&client_secret=${c.client_secret}`, - { failOnStatusCode: false }, - ) - - cy.get("body").should("contain", "unsupported_response_type") - }) - }) + it("should return an error when an OAuth 2.0 Client requests a grant type it is not allowed to call", () => { + createClient({ + client_secret: prng(), + redirect_uris: [`${Cypress.env("client_url")}/oauth2/callback`], + grant_types: ["client_credentials"], + }).then((c) => { + cy.visit( + `${Cypress.env("client_url")}/oauth2/code?client_id=${ + c.client_id + }&client_secret=${c.client_secret}&scope=`, + { failOnStatusCode: false }, + ) - it("should return an error when an OAuth 2.0 Client requests a grant type it is not allowed to call", () => { - createClient({ - client_secret: prng(), - redirect_uris: [`${Cypress.env("client_url")}/oauth2/callback`], - grant_types: ["client_credentials"], - }).then((c) => { - cy.visit( - `${Cypress.env("client_url")}/oauth2/code?client_id=${ - c.client_id - }&client_secret=${c.client_secret}&scope=`, - { failOnStatusCode: false }, - ) - - cy.get("#email").type("foo@bar.com", { delay: 1 }) - cy.get("#password").type("foobar", { delay: 1 }) - cy.get("#accept").click() - cy.get("#accept").click() - - cy.get("body").should("contain", "unauthorized_client") - }) + cy.get("#email").type("foo@bar.com", { delay: 1 }) + cy.get("#password").type("foobar", { delay: 1 }) + cy.get("#accept").click() + cy.get("#accept").click() + + cy.get("body").should("contain", "unauthorized_client") + }) - it("should return an error when an OAuth 2.0 Client requests a redirect_uri that is not preregistered", () => { - const c = { - client_secret: prng(), - redirect_uris: ["http://some-other-domain/not-callback"], - grant_types: ["client_credentials"], - } - createClient(c) - - cy.visit( - `${Cypress.env("client_url")}/oauth2/code?client_id=${ - c.client_id - }&client_secret=${c.client_secret}&scope=`, - { failOnStatusCode: false }, - ) - - cy.location().should(({ search, port }) => { - const query = qs.parse(search.substr(1)) - console.log(query) - expect(query.error).to.equal("invalid_request") - expect(query.error_description).to.contain("redirect_uri") - - // Should show Ory Hydra's Error URL because a redirect URL could not be determined - expect(port).to.equal(Cypress.env("public_port")) + it("should return an error when an OAuth 2.0 Client requests a redirect_uri that is not preregistered", () => { + const c = { + client_secret: prng(), + redirect_uris: ["http://some-other-domain/not-callback"], + grant_types: ["client_credentials"], + } + createClient(c) + + cy.visit( + `${Cypress.env("client_url")}/oauth2/code?client_id=${ + c.client_id + }&client_secret=${c.client_secret}&scope=`, + { failOnStatusCode: false }, + ) + + cy.location().should(({ search, port }) => { + const query = qs.parse(search.substr(1)) + console.log(query) + expect(query.error).to.equal("invalid_request") + expect(query.error_description).to.contain("redirect_uri") + + // Should show Ory Hydra's Error URL because a redirect URL could not be determined + expect(port).to.equal(Cypress.env("public_port")) + }) + }) }) }) }) diff --git a/cypress/integration/oauth2/client_creds.js b/cypress/integration/oauth2/client_creds.js index a1419e8b1bd..04f31bf9c23 100644 --- a/cypress/integration/oauth2/client_creds.js +++ b/cypress/integration/oauth2/client_creds.js @@ -3,33 +3,40 @@ import { createClient, prng } from "../../helpers" +const accessTokenStrategies = ["opaque", "jwt"] + describe("The OAuth 2.0 Authorization Code Grant", function () { - const nc = () => ({ - client_secret: prng(), - scope: "foo openid offline_access", - grant_types: ["client_credentials"], - }) + accessTokenStrategies.forEach((accessTokenStrategy) => { + describe("access_token_strategy=" + accessTokenStrategy, function () { + const nc = () => ({ + client_secret: prng(), + scope: "foo openid offline_access", + grant_types: ["client_credentials"], + access_token_strategy: accessTokenStrategy, + }) - it("should return an Access Token but not Refresh or ID Token for client_credentials flow", function () { - createClient(nc()).then((client) => { - cy.request( - `${Cypress.env("client_url")}/oauth2/cc?client_id=${ - client.client_id - }&client_secret=${client.client_secret}&scope=${client.scope}`, - { failOnStatusCode: false }, - ) - .its("body") - .then((body) => { - const { - result, - token: { access_token, id_token, refresh_token } = {}, - } = body + it("should return an Access Token but not Refresh or ID Token for client_credentials flow", function () { + createClient(nc()).then((client) => { + cy.request( + `${Cypress.env("client_url")}/oauth2/cc?client_id=${ + client.client_id + }&client_secret=${client.client_secret}&scope=${client.scope}`, + { failOnStatusCode: false }, + ) + .its("body") + .then((body) => { + const { + result, + token: { access_token, id_token, refresh_token } = {}, + } = body - expect(result).to.equal("success") - expect(access_token).to.not.be.empty - expect(id_token).to.be.undefined - expect(refresh_token).to.be.undefined + expect(result).to.equal("success") + expect(access_token).to.not.be.empty + expect(id_token).to.be.undefined + expect(refresh_token).to.be.undefined + }) }) + }) }) }) }) diff --git a/cypress/integration/oauth2/consent.js b/cypress/integration/oauth2/consent.js index 5e9829312d2..4d2614a3be1 100644 --- a/cypress/integration/oauth2/consent.js +++ b/cypress/integration/oauth2/consent.js @@ -3,89 +3,99 @@ import { createClient, prng } from "../../helpers" -describe("OAuth 2.0 End-User Authorization", () => { - const nc = () => ({ - client_secret: prng(), - scope: "offline_access", - redirect_uris: [`${Cypress.env("client_url")}/oauth2/callback`], - grant_types: ["authorization_code", "refresh_token"], - }) - - const hasConsent = (client, body) => { - let found = false - body.forEach( - ({ - consent_request: { - client: { client_id }, - }, - }) => { - if (client_id === client.client_id) { - found = true - } - }, - ) - return found - } +const accessTokenStrategies = ["opaque", "jwt"] - it("should check if end user authorization exists", () => { - createClient(nc()).then((client) => { - cy.authCodeFlow(client, { - consent: { - scope: ["offline_access"], - remember: true, - }, - createClient: false, +describe("OAuth 2.0 End-User Authorization", () => { + accessTokenStrategies.forEach((accessTokenStrategy) => { + describe("access_token_strategy=" + accessTokenStrategy, function () { + const nc = () => ({ + client_secret: prng(), + scope: "offline_access", + redirect_uris: [`${Cypress.env("client_url")}/oauth2/callback`], + grant_types: ["authorization_code", "refresh_token"], + access_token_strategy: accessTokenStrategy, }) - console.log("got ", { client }) + const hasConsent = (client, body) => { + let found = false + body.forEach( + ({ + consent_request: { + client: { client_id }, + }, + }) => { + if (client_id === client.client_id) { + found = true + } + }, + ) + return found + } - cy.request( - Cypress.env("admin_url") + - "/oauth2/auth/sessions/consent?subject=foo@bar.com", - ) - .its("body") - .then((body) => { - expect(body.length).to.be.greaterThan(0) - console.log({ body, client }) - expect(hasConsent(client, body)).to.be.true - body.forEach((consent) => { - expect( - consent.handled_at.match( - /^[2-9]\d{3}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d+)?Z$/, - ), - ).not.to.be.empty + it("should check if end user authorization exists", () => { + createClient(nc()).then((client) => { + cy.authCodeFlow(client, { + consent: { + scope: ["offline_access"], + remember: true, + }, + createClient: false, }) - }) - cy.request( - "DELETE", - Cypress.env("admin_url") + - "/oauth2/auth/sessions/consent?subject=foo@bar.com&all=true", - ) + console.log("got ", { client }) - cy.request( - Cypress.env("admin_url") + - "/oauth2/auth/sessions/consent?subject=foo@bar.com", - ) - .its("body") - .then((body) => { - expect(body.length).to.eq(0) - expect(hasConsent(client, body)).to.be.false - }) + cy.request( + Cypress.env("admin_url") + + "/oauth2/auth/sessions/consent?subject=foo@bar.com", + ) + .its("body") + .then((body) => { + expect(body.length).to.be.greaterThan(0) + console.log({ + body, + client, + }) + expect(hasConsent(client, body)).to.be.true + body.forEach((consent) => { + expect( + consent.handled_at.match( + /^[2-9]\d{3}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d+)?Z$/, + ), + ).not.to.be.empty + }) + }) - cy.request(`${Cypress.env("client_url")}/oauth2/introspect/at`) - .its("body") - .then((body) => { - expect(body.result).to.equal("success") - expect(body.body.active).to.be.false - }) + cy.request( + "DELETE", + Cypress.env("admin_url") + + "/oauth2/auth/sessions/consent?subject=foo@bar.com&all=true", + ) + + cy.request( + Cypress.env("admin_url") + + "/oauth2/auth/sessions/consent?subject=foo@bar.com", + ) + .its("body") + .then((body) => { + expect(body.length).to.eq(0) + expect(hasConsent(client, body)).to.be.false + }) - cy.request(`${Cypress.env("client_url")}/oauth2/introspect/rt`) - .its("body") - .then((body) => { - expect(body.result).to.equal("success") - expect(body.body.active).to.be.false + cy.request(`${Cypress.env("client_url")}/oauth2/introspect/at`) + .its("body") + .then((body) => { + expect(body.result).to.equal("success") + expect(body.body.active).to.be.false + }) + + cy.request(`${Cypress.env("client_url")}/oauth2/introspect/rt`) + .its("body") + .then((body) => { + expect(body.result).to.equal("success") + expect(body.body.active).to.be.false + }) }) + }) }) }) }) diff --git a/cypress/integration/oauth2/device_auth.js b/cypress/integration/oauth2/device_auth.js new file mode 100644 index 00000000000..4e26abde8a0 --- /dev/null +++ b/cypress/integration/oauth2/device_auth.js @@ -0,0 +1,118 @@ +// Copyright © 2022 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +import { prng } from "../../helpers" + +const accessTokenStrategies = ["opaque", "jwt"] + +describe("The OAuth 2.0 Device Authorization Grant", function () { + accessTokenStrategies.forEach((accessTokenStrategy) => { + describe("access_token_strategy=" + accessTokenStrategy, function () { + const nc = (extradata) => ({ + client_secret: prng(), + scope: "offline_access openid", + subject_type: "public", + token_endpoint_auth_method: "client_secret_basic", + grant_types: [ + "urn:ietf:params:oauth:grant-type:device_code", + "refresh_token", + ], + access_token_strategy: accessTokenStrategy, + ...extradata, + }) + + it("should return an Access, Refresh, and ID Token when scope offline_access and openid are granted", function () { + const client = nc() + cy.deviceAuthFlow(client, { + consent: { scope: ["offline_access", "openid"] }, + }) + + cy.postDeviceAuthFlow().then((resp) => { + const { + result, + token: { access_token, id_token, refresh_token }, + } = resp.body + + expect(result).to.equal("success") + expect(access_token).to.not.be.empty + expect(id_token).to.not.be.empty + expect(refresh_token).to.not.be.empty + }) + }) + + it("should return an Access and Refresh Token when scope offline_access is granted", function () { + const client = nc() + cy.deviceAuthFlow(client, { consent: { scope: ["offline_access"] } }) + + cy.postDeviceAuthFlow().then((resp) => { + console.log(resp) + const { + result, + token: { access_token, id_token, refresh_token }, + } = resp.body + + expect(result).to.equal("success") + expect(access_token).to.not.be.empty + expect(id_token).to.be.undefined + expect(refresh_token).to.not.be.empty + }) + }) + + it("should return an Access and ID Token when scope offline_access is granted", function () { + const client = nc() + cy.deviceAuthFlow(client, { consent: { scope: ["openid"] } }) + + cy.postDeviceAuthFlow().then((resp) => { + console.log(resp) + const { + result, + token: { access_token, id_token, refresh_token }, + } = resp.body + + expect(result).to.equal("success") + expect(access_token).to.not.be.empty + expect(id_token).to.not.be.empty + expect(refresh_token).to.be.undefined + }) + }) + + it("should return an Access Token when no scope is granted", function () { + const client = nc() + cy.deviceAuthFlow(client, { consent: { scope: [] } }) + + cy.postDeviceAuthFlow().then((resp) => { + console.log(resp) + const { + result, + token: { access_token, id_token, refresh_token }, + } = resp.body + + expect(result).to.equal("success") + expect(access_token).to.not.be.empty + expect(id_token).to.be.undefined + expect(refresh_token).to.be.undefined + }) + }) + + it("should skip consent if the client is confgured thus", function () { + const client = nc({ skip_consent: true }) + cy.deviceAuthFlow(client, { + consent: { scope: ["offline_access", "openid"], skip: true }, + }) + + cy.postDeviceAuthFlow().then((resp) => { + console.log(resp) + const { + result, + token: { access_token, id_token, refresh_token }, + } = resp.body + + expect(result).to.equal("success") + expect(access_token).to.not.be.empty + expect(id_token).to.not.be.empty + expect(refresh_token).to.not.be.empty + }) + }) + }) + }) +}) diff --git a/cypress/integration/oauth2/grant_jwtbearer.js b/cypress/integration/oauth2/grant_jwtbearer.js index ccb2aac87aa..3ca08d3b4db 100644 --- a/cypress/integration/oauth2/grant_jwtbearer.js +++ b/cypress/integration/oauth2/grant_jwtbearer.js @@ -58,491 +58,512 @@ const initTestKeyPairs = async () => { invalidtestPrivatePem = invalidPrivatePem } -describe("The OAuth 2.0 JWT Bearer (RFC 7523) Grant", function () { - beforeEach(() => { - deleteGrants() - deleteClients() - }) - - before(() => { - return cy.wrap(initTestKeyPairs()) - }) - - const tokenUrl = `${Cypress.env("public_url")}/oauth2/token` - - const nc = () => ({ - client_secret: prng(), - scope: "foo openid offline_access", - grant_types: ["urn:ietf:params:oauth:grant-type:jwt-bearer"], - token_endpoint_auth_method: "client_secret_post", - response_types: ["token"], - }) - - const gr = (subject) => ({ - issuer: prng(), - subject: subject, - allow_any_subject: subject === "", - scope: ["foo", "openid", "offline_access"], - jwk: testPublicJwk, - expires_at: dayjs() - .utc() - .add(1, "year") - .set("millisecond", 0) - .toISOString(), - }) - - const jwtAssertion = (grant, override) => { - const assert = { - jti: prng(), - iss: grant.issuer, - sub: grant.subject, - aud: tokenUrl, - exp: dayjs().utc().add(2, "minute").set("millisecond", 0).unix(), - iat: dayjs().utc().subtract(2, "minute").set("millisecond", 0).unix(), - } - return { ...assert, ...override } - } - - it("should return an Access Token when given client credentials and a signed JWT assertion", function () { - createClient(nc()).then((client) => { - const grant = gr(prng()) - createGrant(grant) - - const assertion = jwt.sign(jwtAssertion(grant), testPrivatePem, { - algorithm: "RS256", +const accessTokenStrategies = ["opaque", "jwt"] + +accessTokenStrategies.forEach((accessTokenStrategy) => { + describe("access_token_strategy=" + accessTokenStrategy, function () { + describe("The OAuth 2.0 JWT Bearer (RFC 7523) Grant", function () { + beforeEach(() => { + deleteGrants() + deleteClients() }) - cy.request({ - method: "POST", - url: tokenUrl, - form: true, - body: { - grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", - assertion: assertion, - scope: client.scope, - client_secret: client.client_secret, - client_id: client.client_id, - }, + before(() => { + return cy.wrap(initTestKeyPairs()) }) - .its("body") - .then((body) => { - const { access_token, expires_in, scope, token_type } = body - - expect(access_token).to.not.be.empty - expect(expires_in).to.not.be.undefined - expect(scope).to.not.be.empty - expect(token_type).to.not.be.empty - }) - }) - }) - it("should return an Error (400) when not given client credentials", function () { - createClient(nc()).then((client) => { - const grant = gr(prng()) - createGrant(grant) + const tokenUrl = `${Cypress.env("public_url")}/oauth2/token` - const assertion = jwt.sign(jwtAssertion(grant), testPrivatePem, { - algorithm: "RS256", + const nc = () => ({ + client_secret: prng(), + scope: "foo openid offline_access", + grant_types: ["urn:ietf:params:oauth:grant-type:jwt-bearer"], + token_endpoint_auth_method: "client_secret_post", + response_types: ["token"], + access_token_strategy: accessTokenStrategy, }) - cy.request({ - method: "POST", - url: tokenUrl, - form: true, - body: { - grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", - assertion: assertion, - scope: client.scope, - }, - failOnStatusCode: false, + const gr = (subject) => ({ + issuer: prng(), + subject: subject, + allow_any_subject: subject === "", + scope: ["foo", "openid", "offline_access"], + jwk: testPublicJwk, + expires_at: dayjs() + .utc() + .add(1, "year") + .set("millisecond", 0) + .toISOString(), }) - .its("status") - .then((status) => { - expect(status).to.be.equal(400) - }) - }) - }) - it("should return an Error (400) when given client credentials and a JWT assertion without a jti", function () { - createClient(nc()).then((client) => { - const grant = gr(prng()) - createGrant(grant) - - var ja = jwtAssertion(grant) - delete ja["jti"] - const assertion = jwt.sign(ja, testPrivatePem, { algorithm: "RS256" }) - - // first token request should work fine - cy.request({ - method: "POST", - url: tokenUrl, - form: true, - body: { - grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", - assertion: assertion, - scope: client.scope, - client_secret: client.client_secret, - client_id: client.client_id, - }, - failOnStatusCode: false, - }) - .its("status") - .then((status) => { - expect(status).to.be.equal(400) + const jwtAssertion = (grant, override) => { + const assert = { + jti: prng(), + iss: grant.issuer, + sub: grant.subject, + aud: tokenUrl, + exp: dayjs().utc().add(2, "minute").set("millisecond", 0).unix(), + iat: dayjs().utc().subtract(2, "minute").set("millisecond", 0).unix(), + } + return { ...assert, ...override } + } + + it("should return an Access Token when given client credentials and a signed JWT assertion", function () { + createClient(nc()).then((client) => { + const grant = gr(prng()) + createGrant(grant) + + const assertion = jwt.sign(jwtAssertion(grant), testPrivatePem, { + algorithm: "RS256", + }) + + cy.request({ + method: "POST", + url: tokenUrl, + form: true, + body: { + grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", + assertion: assertion, + scope: client.scope, + client_secret: client.client_secret, + client_id: client.client_id, + }, + }) + .its("body") + .then((body) => { + const { access_token, expires_in, scope, token_type } = body + + expect(access_token).to.not.be.empty + expect(expires_in).to.not.be.undefined + expect(scope).to.not.be.empty + expect(token_type).to.not.be.empty + }) }) - }) - }) - - it("should return an Error (400) when given client credentials and a JWT assertion with a duplicated jti", function () { - createClient(nc()).then((client) => { - const grant = gr(prng()) - createGrant(grant) - - const jwt1 = jwtAssertion(grant) - const assertion1 = jwt.sign(jwt1, testPrivatePem, { algorithm: "RS256" }) - - // first token request should work fine - cy.request({ - method: "POST", - url: tokenUrl, - form: true, - body: { - grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", - assertion: assertion1, - scope: client.scope, - client_secret: client.client_secret, - client_id: client.client_id, - }, }) - .its("body") - .then((body) => { - const { access_token, expires_in, scope, token_type } = body - - expect(access_token).to.not.be.empty - expect(expires_in).to.not.be.undefined - expect(scope).to.not.be.empty - expect(token_type).to.not.be.empty - }) - const assertion2 = jwt.sign( - jwtAssertion(grant, { jti: jwt1["jti"] }), - testPrivatePem, - { algorithm: "RS256" }, - ) - - // the second should fail - cy.request({ - method: "POST", - url: tokenUrl, - form: true, - body: { - grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", - assertion: assertion2, - scope: client.scope, - client_secret: client.client_secret, - client_id: client.client_id, - }, - failOnStatusCode: false, - }) - .its("status") - .then((status) => { - expect(status).to.be.equal(400) + it("should return an Error (400) when not given client credentials", function () { + createClient(nc()).then((client) => { + const grant = gr(prng()) + createGrant(grant) + + const assertion = jwt.sign(jwtAssertion(grant), testPrivatePem, { + algorithm: "RS256", + }) + + cy.request({ + method: "POST", + url: tokenUrl, + form: true, + body: { + grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", + assertion: assertion, + scope: client.scope, + }, + failOnStatusCode: false, + }) + .its("status") + .then((status) => { + expect(status).to.be.equal(400) + }) }) - }) - }) - - it("should return an Error (400) when given client credentials and a JWT assertion without an iat", function () { - createClient(nc()).then((client) => { - const grant = gr(prng()) - createGrant(grant) - - var ja = jwtAssertion(grant) - delete ja["iat"] - const assertion = jwt.sign(ja, testPrivatePem, { - algorithm: "RS256", - noTimestamp: true, }) - // first token request should work fine - cy.request({ - method: "POST", - url: tokenUrl, - form: true, - body: { - grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", - assertion: assertion, - scope: client.scope, - client_secret: client.client_secret, - client_id: client.client_id, - }, - failOnStatusCode: false, - }) - .its("status") - .then((status) => { - expect(status).to.be.equal(400) + it("should return an Error (400) when given client credentials and a JWT assertion without a jti", function () { + createClient(nc()).then((client) => { + const grant = gr(prng()) + createGrant(grant) + + var ja = jwtAssertion(grant) + delete ja["jti"] + const assertion = jwt.sign(ja, testPrivatePem, { algorithm: "RS256" }) + + // first token request should work fine + cy.request({ + method: "POST", + url: tokenUrl, + form: true, + body: { + grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", + assertion: assertion, + scope: client.scope, + client_secret: client.client_secret, + client_id: client.client_id, + }, + failOnStatusCode: false, + }) + .its("status") + .then((status) => { + expect(status).to.be.equal(400) + }) }) - }) - }) - - it("should return an Error (400) when given client credentials and a JWT assertion with an invalid signature", function () { - createClient(nc()).then((client) => { - const grant = gr(prng()) - createGrant(grant) - - const assertion = jwt.sign(jwtAssertion(grant), invalidtestPrivatePem, { - algorithm: "RS256", }) - cy.request({ - method: "POST", - url: tokenUrl, - form: true, - body: { - grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", - assertion: assertion, - scope: client.scope, - client_secret: client.client_secret, - client_id: client.client_id, - }, - failOnStatusCode: false, - }) - .its("status") - .then((status) => { - expect(status).to.be.equal(400) + it("should return an Error (400) when given client credentials and a JWT assertion with a duplicated jti", function () { + createClient(nc()).then((client) => { + const grant = gr(prng()) + createGrant(grant) + + const jwt1 = jwtAssertion(grant) + const assertion1 = jwt.sign(jwt1, testPrivatePem, { + algorithm: "RS256", + }) + + // first token request should work fine + cy.request({ + method: "POST", + url: tokenUrl, + form: true, + body: { + grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", + assertion: assertion1, + scope: client.scope, + client_secret: client.client_secret, + client_id: client.client_id, + }, + }) + .its("body") + .then((body) => { + const { access_token, expires_in, scope, token_type } = body + + expect(access_token).to.not.be.empty + expect(expires_in).to.not.be.undefined + expect(scope).to.not.be.empty + expect(token_type).to.not.be.empty + }) + + const assertion2 = jwt.sign( + jwtAssertion(grant, { jti: jwt1["jti"] }), + testPrivatePem, + { algorithm: "RS256" }, + ) + + // the second should fail + cy.request({ + method: "POST", + url: tokenUrl, + form: true, + body: { + grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", + assertion: assertion2, + scope: client.scope, + client_secret: client.client_secret, + client_id: client.client_id, + }, + failOnStatusCode: false, + }) + .its("status") + .then((status) => { + expect(status).to.be.equal(400) + }) }) - }) - }) - - it("should return an Error (400) when given client credentials and a JWT assertion with an invalid subject", function () { - createClient(nc()).then((client) => { - const grant = gr(prng()) - createGrant(grant) - - const assertion = jwt.sign( - jwtAssertion(grant, { sub: "invalid_subject" }), - testPrivatePem, - { algorithm: "RS256" }, - ) - - cy.request({ - method: "POST", - url: tokenUrl, - form: true, - body: { - grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", - assertion: assertion, - scope: client.scope, - client_secret: client.client_secret, - client_id: client.client_id, - }, - failOnStatusCode: false, }) - .its("status") - .then((status) => { - expect(status).to.be.equal(400) - }) - }) - }) - it("should return an Access Token when given client credentials and a JWT assertion with any subject", function () { - createClient(nc()).then((client) => { - const grant = gr("") // allow any subject - createGrant(grant) - - const assertion = jwt.sign( - jwtAssertion(grant, { sub: "any-subject-is-valid" }), - testPrivatePem, - { - algorithm: "RS256", - }, - ) - - cy.request({ - method: "POST", - url: tokenUrl, - form: true, - body: { - grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", - assertion: assertion, - scope: client.scope, - client_secret: client.client_secret, - client_id: client.client_id, - }, - }) - .its("body") - .then((body) => { - const { access_token, expires_in, scope, token_type } = body - - expect(access_token).to.not.be.empty - expect(expires_in).to.not.be.undefined - expect(scope).to.not.be.empty - expect(token_type).to.not.be.empty + it("should return an Error (400) when given client credentials and a JWT assertion without an iat", function () { + createClient(nc()).then((client) => { + const grant = gr(prng()) + createGrant(grant) + + var ja = jwtAssertion(grant) + delete ja["iat"] + const assertion = jwt.sign(ja, testPrivatePem, { + algorithm: "RS256", + noTimestamp: true, + }) + + // first token request should work fine + cy.request({ + method: "POST", + url: tokenUrl, + form: true, + body: { + grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", + assertion: assertion, + scope: client.scope, + client_secret: client.client_secret, + client_id: client.client_id, + }, + failOnStatusCode: false, + }) + .its("status") + .then((status) => { + expect(status).to.be.equal(400) + }) }) - }) - }) - - it("should return an Error (400) when given client credentials and a JWT assertion with an invalid issuer", function () { - createClient(nc()).then((client) => { - const grant = gr(prng()) - createGrant(grant) - - const assertion = jwt.sign( - jwtAssertion(grant, { iss: "invalid_issuer" }), - testPrivatePem, - { algorithm: "RS256" }, - ) - - cy.request({ - method: "POST", - url: tokenUrl, - form: true, - body: { - grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", - assertion: assertion, - scope: client.scope, - client_secret: client.client_secret, - client_id: client.client_id, - }, - failOnStatusCode: false, }) - .its("status") - .then((status) => { - expect(status).to.be.equal(400) - }) - }) - }) - it("should return an Error (400) when given client credentials and a JWT assertion with an invalid audience", function () { - createClient(nc()).then((client) => { - const grant = gr(prng()) - createGrant(grant) - - const assertion = jwt.sign( - jwtAssertion(grant, { aud: "invalid_audience" }), - testPrivatePem, - { algorithm: "RS256" }, - ) - - cy.request({ - method: "POST", - url: tokenUrl, - form: true, - body: { - grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", - assertion: assertion, - scope: client.scope, - client_secret: client.client_secret, - client_id: client.client_id, - }, - failOnStatusCode: false, + it("should return an Error (400) when given client credentials and a JWT assertion with an invalid signature", function () { + createClient(nc()).then((client) => { + const grant = gr(prng()) + createGrant(grant) + + const assertion = jwt.sign( + jwtAssertion(grant), + invalidtestPrivatePem, + { + algorithm: "RS256", + }, + ) + + cy.request({ + method: "POST", + url: tokenUrl, + form: true, + body: { + grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", + assertion: assertion, + scope: client.scope, + client_secret: client.client_secret, + client_id: client.client_id, + }, + failOnStatusCode: false, + }) + .its("status") + .then((status) => { + expect(status).to.be.equal(400) + }) + }) }) - .its("status") - .then((status) => { - expect(status).to.be.equal(400) + + it("should return an Error (400) when given client credentials and a JWT assertion with an invalid subject", function () { + createClient(nc()).then((client) => { + const grant = gr(prng()) + createGrant(grant) + + const assertion = jwt.sign( + jwtAssertion(grant, { sub: "invalid_subject" }), + testPrivatePem, + { algorithm: "RS256" }, + ) + + cy.request({ + method: "POST", + url: tokenUrl, + form: true, + body: { + grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", + assertion: assertion, + scope: client.scope, + client_secret: client.client_secret, + client_id: client.client_id, + }, + failOnStatusCode: false, + }) + .its("status") + .then((status) => { + expect(status).to.be.equal(400) + }) }) - }) - }) + }) - it("should return an Error (400) when given client credentials and a JWT assertion with an expired date", function () { - createClient(nc()).then((client) => { - const grant = gr(prng()) - createGrant(grant) - - const assertion = jwt.sign( - jwtAssertion(grant, { - exp: dayjs().utc().subtract(1, "minute").set("millisecond", 0).unix(), - }), - testPrivatePem, - { algorithm: "RS256" }, - ) - - cy.request({ - method: "POST", - url: tokenUrl, - form: true, - body: { - grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", - assertion: assertion, - scope: client.scope, - client_secret: client.client_secret, - client_id: client.client_id, - }, - failOnStatusCode: false, + it("should return an Access Token when given client credentials and a JWT assertion with any subject", function () { + createClient(nc()).then((client) => { + const grant = gr("") // allow any subject + createGrant(grant) + + const assertion = jwt.sign( + jwtAssertion(grant, { sub: "any-subject-is-valid" }), + testPrivatePem, + { + algorithm: "RS256", + }, + ) + + cy.request({ + method: "POST", + url: tokenUrl, + form: true, + body: { + grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", + assertion: assertion, + scope: client.scope, + client_secret: client.client_secret, + client_id: client.client_id, + }, + }) + .its("body") + .then((body) => { + const { access_token, expires_in, scope, token_type } = body + + expect(access_token).to.not.be.empty + expect(expires_in).to.not.be.undefined + expect(scope).to.not.be.empty + expect(token_type).to.not.be.empty + }) + }) }) - .its("status") - .then((status) => { - expect(status).to.be.equal(400) + + it("should return an Error (400) when given client credentials and a JWT assertion with an invalid issuer", function () { + createClient(nc()).then((client) => { + const grant = gr(prng()) + createGrant(grant) + + const assertion = jwt.sign( + jwtAssertion(grant, { iss: "invalid_issuer" }), + testPrivatePem, + { algorithm: "RS256" }, + ) + + cy.request({ + method: "POST", + url: tokenUrl, + form: true, + body: { + grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", + assertion: assertion, + scope: client.scope, + client_secret: client.client_secret, + client_id: client.client_id, + }, + failOnStatusCode: false, + }) + .its("status") + .then((status) => { + expect(status).to.be.equal(400) + }) }) - }) - }) + }) - it("should return an Error (400) when given client credentials and a JWT assertion with a nbf that is still not valid", function () { - createClient(nc()).then((client) => { - const grant = gr(prng()) - createGrant(grant) - - const assertion = jwt.sign( - jwtAssertion(grant, { - nbf: dayjs().utc().add(1, "minute").set("millisecond", 0).unix(), - }), - testPrivatePem, - { algorithm: "RS256" }, - ) - - cy.request({ - method: "POST", - url: tokenUrl, - form: true, - body: { - grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", - assertion: assertion, - scope: client.scope, - client_secret: client.client_secret, - client_id: client.client_id, - }, - failOnStatusCode: false, + it("should return an Error (400) when given client credentials and a JWT assertion with an invalid audience", function () { + createClient(nc()).then((client) => { + const grant = gr(prng()) + createGrant(grant) + + const assertion = jwt.sign( + jwtAssertion(grant, { aud: "invalid_audience" }), + testPrivatePem, + { algorithm: "RS256" }, + ) + + cy.request({ + method: "POST", + url: tokenUrl, + form: true, + body: { + grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", + assertion: assertion, + scope: client.scope, + client_secret: client.client_secret, + client_id: client.client_id, + }, + failOnStatusCode: false, + }) + .its("status") + .then((status) => { + expect(status).to.be.equal(400) + }) + }) }) - .its("status") - .then((status) => { - expect(status).to.be.equal(400) + + it("should return an Error (400) when given client credentials and a JWT assertion with an expired date", function () { + createClient(nc()).then((client) => { + const grant = gr(prng()) + createGrant(grant) + + const assertion = jwt.sign( + jwtAssertion(grant, { + exp: dayjs() + .utc() + .subtract(1, "minute") + .set("millisecond", 0) + .unix(), + }), + testPrivatePem, + { algorithm: "RS256" }, + ) + + cy.request({ + method: "POST", + url: tokenUrl, + form: true, + body: { + grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", + assertion: assertion, + scope: client.scope, + client_secret: client.client_secret, + client_id: client.client_id, + }, + failOnStatusCode: false, + }) + .its("status") + .then((status) => { + expect(status).to.be.equal(400) + }) }) - }) - }) + }) - it("should return an Access Token when given client credentials and a JWT assertion with a nbf that is valid", function () { - createClient(nc()).then((client) => { - const grant = gr(prng()) - createGrant(grant) - - const assertion = jwt.sign( - jwtAssertion(grant, { - nbf: dayjs().utc().subtract(1, "minute").set("millisecond", 0).unix(), - }), - testPrivatePem, - { algorithm: "RS256" }, - ) - - cy.request({ - method: "POST", - url: tokenUrl, - form: true, - body: { - grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", - assertion: assertion, - scope: client.scope, - client_secret: client.client_secret, - client_id: client.client_id, - }, + it("should return an Error (400) when given client credentials and a JWT assertion with a nbf that is still not valid", function () { + createClient(nc()).then((client) => { + const grant = gr(prng()) + createGrant(grant) + + const assertion = jwt.sign( + jwtAssertion(grant, { + nbf: dayjs().utc().add(1, "minute").set("millisecond", 0).unix(), + }), + testPrivatePem, + { algorithm: "RS256" }, + ) + + cy.request({ + method: "POST", + url: tokenUrl, + form: true, + body: { + grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", + assertion: assertion, + scope: client.scope, + client_secret: client.client_secret, + client_id: client.client_id, + }, + failOnStatusCode: false, + }) + .its("status") + .then((status) => { + expect(status).to.be.equal(400) + }) + }) }) - .its("body") - .then((body) => { - const { access_token, expires_in, scope, token_type } = body - - expect(access_token).to.not.be.empty - expect(expires_in).to.not.be.undefined - expect(scope).to.not.be.empty - expect(token_type).to.not.be.empty + + it("should return an Access Token when given client credentials and a JWT assertion with a nbf that is valid", function () { + createClient(nc()).then((client) => { + const grant = gr(prng()) + createGrant(grant) + + const assertion = jwt.sign( + jwtAssertion(grant, { + nbf: dayjs() + .utc() + .subtract(1, "minute") + .set("millisecond", 0) + .unix(), + }), + testPrivatePem, + { algorithm: "RS256" }, + ) + + cy.request({ + method: "POST", + url: tokenUrl, + form: true, + body: { + grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer", + assertion: assertion, + scope: client.scope, + client_secret: client.client_secret, + client_id: client.client_id, + }, + }) + .its("body") + .then((body) => { + const { access_token, expires_in, scope, token_type } = body + + expect(access_token).to.not.be.empty + expect(expires_in).to.not.be.undefined + expect(scope).to.not.be.empty + expect(token_type).to.not.be.empty + }) }) + }) }) }) }) diff --git a/cypress/integration/oauth2/introspect.js b/cypress/integration/oauth2/introspect.js index 655b7fb4cfc..c2dfd5feb1c 100644 --- a/cypress/integration/oauth2/introspect.js +++ b/cypress/integration/oauth2/introspect.js @@ -3,59 +3,72 @@ import { prng } from "../../helpers" +const accessTokenStrategies = ["opaque", "jwt"] + describe("OpenID Connect Token Introspection", () => { - const nc = () => ({ - client_secret: prng(), - scope: "offline_access", - redirect_uris: [`${Cypress.env("client_url")}/oauth2/callback`], - grant_types: ["authorization_code", "refresh_token"], - }) + accessTokenStrategies.forEach((accessTokenStrategy) => { + describe("access_token_strategy=" + accessTokenStrategy, function () { + const nc = () => ({ + client_secret: prng(), + scope: "offline_access", + redirect_uris: [`${Cypress.env("client_url")}/oauth2/callback`], + grant_types: ["authorization_code", "refresh_token"], + access_token_strategy: accessTokenStrategy, + }) - it("should introspect access token", function () { - const client = nc() - cy.authCodeFlow(client, { - consent: { scope: ["offline_access"], createClient: true }, - }) + it("should introspect access token", function () { + const client = nc() + cy.authCodeFlow(client, { + consent: { + scope: ["offline_access"], + createClient: true, + }, + }) - cy.get("body") - .invoke("text") - .then((content) => { - const { result } = JSON.parse(content) - expect(result).to.equal("success") - }) + cy.get("body") + .invoke("text") + .then((content) => { + const { result } = JSON.parse(content) + expect(result).to.equal("success") + }) - cy.request(`${Cypress.env("client_url")}/oauth2/introspect/at`) - .its("body") - .then((body) => { - expect(body.result).to.equal("success") - expect(body.body.active).to.be.true - expect(body.body.sub).to.be.equal("foo@bar.com") - expect(body.body.token_type).to.be.equal("Bearer") - expect(body.body.token_use).to.be.equal("access_token") + cy.request(`${Cypress.env("client_url")}/oauth2/introspect/at`) + .its("body") + .then((body) => { + expect(body.result).to.equal("success") + expect(body.body.active).to.be.true + expect(body.body.sub).to.be.equal("foo@bar.com") + expect(body.body.token_type).to.be.equal("Bearer") + expect(body.body.token_use).to.be.equal("access_token") + }) }) - }) - it("should introspect refresh token", function () { - const client = nc() - cy.authCodeFlow(client, { - consent: { scope: ["offline_access"], createClient: true }, - }) + it("should introspect refresh token", function () { + const client = nc() + cy.authCodeFlow(client, { + consent: { + scope: ["offline_access"], + createClient: true, + }, + }) - cy.get("body") - .invoke("text") - .then((content) => { - const { result } = JSON.parse(content) - expect(result).to.equal("success") - }) + cy.get("body") + .invoke("text") + .then((content) => { + const { result } = JSON.parse(content) + expect(result).to.equal("success") + }) - cy.request(`${Cypress.env("client_url")}/oauth2/introspect/rt`) - .its("body") - .then((body) => { - expect(body.result).to.equal("success") - expect(body.body.active).to.be.true - expect(body.body.sub).to.be.equal("foo@bar.com") - expect(body.body.token_type).to.be.equal("Bearer") - expect(body.body.token_use).to.be.equal("refresh_token") + cy.request(`${Cypress.env("client_url")}/oauth2/introspect/rt`) + .its("body") + .then((body) => { + expect(body.result).to.equal("success") + expect(body.body.active).to.be.true + expect(body.body.sub).to.be.equal("foo@bar.com") + expect(body.body.token_type).to.be.equal("Bearer") + expect(body.body.token_use).to.be.equal("refresh_token") + }) }) + }) }) }) diff --git a/cypress/integration/oauth2/jwt.js b/cypress/integration/oauth2/jwt.js index 5054cfee0f2..8b9d0fe78f8 100644 --- a/cypress/integration/oauth2/jwt.js +++ b/cypress/integration/oauth2/jwt.js @@ -3,51 +3,59 @@ import { createClient, prng } from "../../helpers" -describe("OAuth 2.0 JSON Web Token Access Tokens", () => { - before(function () { - // this must be a function otherwise this.skip() fails because the context is wrong - if ( - Cypress.env("jwt_enabled") !== "true" && - !Boolean(Cypress.env("jwt_enabled")) - ) { - this.skip() - } - }) +const accessTokenStrategies = ["opaque", "jwt"] - const nc = () => ({ - client_secret: prng(), - scope: "offline_access", - redirect_uris: [`${Cypress.env("client_url")}/oauth2/callback`], - grant_types: ["authorization_code", "refresh_token"], - }) +describe("OAuth 2.0 JSON Web Token Access Tokens", () => { + accessTokenStrategies.forEach((accessTokenStrategy) => { + describe("access_token_strategy=" + accessTokenStrategy, function () { + before(function () { + // this must be a function otherwise this.skip() fails because the context is wrong + if ( + accessTokenStrategy === "opaque" || + (Cypress.env("jwt_enabled") !== "true" && + !Boolean(Cypress.env("jwt_enabled"))) + ) { + this.skip() + } + }) - it("should return an Access Token in JWT format and validate it and a Refresh Token in opaque format", () => { - createClient(nc()).then((client) => { - cy.authCodeFlow(client, { - consent: { scope: ["offline_access"], createClient: true }, - createClient: false, + const nc = () => ({ + client_secret: prng(), + scope: "offline_access", + redirect_uris: [`${Cypress.env("client_url")}/oauth2/callback`], + grant_types: ["authorization_code", "refresh_token"], + access_token_strategy: accessTokenStrategy, }) - cy.request(`${Cypress.env("client_url")}/oauth2/refresh`) - .its("body") - .then((body) => { - const { result, token } = body - expect(result).to.equal("success") + it("should return an Access Token in JWT format and validate it and a Refresh Token in opaque format", () => { + createClient(nc()).then((client) => { + cy.authCodeFlow(client, { + consent: { scope: ["offline_access"], createClient: true }, + createClient: false, + }) - expect(token.access_token).to.not.be.empty - expect(token.refresh_token).to.not.be.empty - expect(token.access_token.split(".").length).to.equal(3) - expect(token.refresh_token.split(".").length).to.equal(2) - }) + cy.request(`${Cypress.env("client_url")}/oauth2/refresh`) + .its("body") + .then((body) => { + const { result, token } = body + expect(result).to.equal("success") - cy.request(`${Cypress.env("client_url")}/oauth2/validate-jwt`) - .its("body") - .then((body) => { - console.log(body) - expect(body.sub).to.eq("foo@bar.com") - expect(body.client_id).to.eq(client.client_id) - expect(body.jti).to.not.be.empty + expect(token.access_token).to.not.be.empty + expect(token.refresh_token).to.not.be.empty + expect(token.access_token.split(".").length).to.equal(3) + expect(token.refresh_token.split(".").length).to.equal(2) + }) + + cy.request(`${Cypress.env("client_url")}/oauth2/validate-jwt`) + .its("body") + .then((body) => { + console.log(body) + expect(body.sub).to.eq("foo@bar.com") + expect(body.client_id).to.eq(client.client_id) + expect(body.jti).to.not.be.empty + }) }) + }) }) }) }) diff --git a/cypress/integration/oauth2/refresh_token.js b/cypress/integration/oauth2/refresh_token.js index a3b1c6282bf..2ddf7d30f19 100644 --- a/cypress/integration/oauth2/refresh_token.js +++ b/cypress/integration/oauth2/refresh_token.js @@ -3,89 +3,102 @@ import { createClient, prng } from "../../helpers" +const accessTokenStrategies = ["opaque", "jwt"] + describe("The OAuth 2.0 Refresh Token Grant", function () { - const nc = () => ({ - client_secret: prng(), - scope: "offline_access openid", - redirect_uris: [`${Cypress.env("client_url")}/oauth2/callback`], - grant_types: ["authorization_code", "refresh_token"], - }) + accessTokenStrategies.forEach((accessTokenStrategy) => { + describe("access_token_strategy=" + accessTokenStrategy, function () { + const nc = () => ({ + client_secret: prng(), + scope: "offline_access openid", + redirect_uris: [`${Cypress.env("client_url")}/oauth2/callback`], + grant_types: ["authorization_code", "refresh_token"], + access_token_strategy: accessTokenStrategy, + }) - it("should return an Access and Refresh Token and refresh the Access Token", function () { - const client = nc() - cy.authCodeFlow(client, { - consent: { scope: ["offline_access"], createClient: true }, - }) + it("should return an Access and Refresh Token and refresh the Access Token", function () { + const client = nc() + cy.authCodeFlow(client, { + consent: { + scope: ["offline_access"], + createClient: true, + }, + }) - cy.request(`${Cypress.env("client_url")}/oauth2/refresh`) - .its("body") - .then((body) => { - const { result, token } = body - expect(result).to.equal("success") - expect(token.access_token).to.not.be.empty - expect(token.refresh_token).to.not.be.empty + cy.request(`${Cypress.env("client_url")}/oauth2/refresh`) + .its("body") + .then((body) => { + const { result, token } = body + expect(result).to.equal("success") + expect(token.access_token).to.not.be.empty + expect(token.refresh_token).to.not.be.empty + }) }) - }) - it("should return an Access, ID, and Refresh Token and refresh the Access Token and ID Token", function () { - const client = nc() - cy.authCodeFlow(client, { - consent: { scope: ["offline_access", "openid"], createClient: true }, - }) + it("should return an Access, ID, and Refresh Token and refresh the Access Token and ID Token", function () { + const client = nc() + cy.authCodeFlow(client, { + consent: { + scope: ["offline_access", "openid"], + createClient: true, + }, + }) - cy.request(`${Cypress.env("client_url")}/oauth2/refresh`) - .its("body") - .then((body) => { - const { result, token } = body - expect(result).to.equal("success") - expect(token.access_token).to.not.be.empty - expect(token.id_token).to.not.be.empty - expect(token.refresh_token).to.not.be.empty + cy.request(`${Cypress.env("client_url")}/oauth2/refresh`) + .its("body") + .then((body) => { + const { result, token } = body + expect(result).to.equal("success") + expect(token.access_token).to.not.be.empty + expect(token.id_token).to.not.be.empty + expect(token.refresh_token).to.not.be.empty + }) }) - }) - it("should revoke Refresh Token on reuse", function () { - const referrer = `${Cypress.env("client_url")}/empty` - cy.visit(referrer, { - failOnStatusCode: false, - }) + it("should revoke Refresh Token on reuse", function () { + const referrer = `${Cypress.env("client_url")}/empty` + cy.visit(referrer, { + failOnStatusCode: false, + }) - createClient({ - scope: "offline_access", - redirect_uris: [referrer], - grant_types: ["authorization_code", "refresh_token"], - response_types: ["code"], - token_endpoint_auth_method: "none", - }).then((client) => { - cy.authCodeFlowBrowser(client, { - consent: { scope: ["offline_access"] }, - createClient: false, - }).then((originalResponse) => { - expect(originalResponse.status).to.eq(200) - expect(originalResponse.body.refresh_token).to.not.be.empty + createClient({ + scope: "offline_access", + redirect_uris: [referrer], + grant_types: ["authorization_code", "refresh_token"], + response_types: ["code"], + token_endpoint_auth_method: "none", + }).then((client) => { + cy.authCodeFlowBrowser(client, { + consent: { scope: ["offline_access"] }, + createClient: false, + }).then((originalResponse) => { + expect(originalResponse.status).to.eq(200) + expect(originalResponse.body.refresh_token).to.not.be.empty - const originalToken = originalResponse.body.refresh_token + const originalToken = originalResponse.body.refresh_token - cy.refreshTokenBrowser(client, originalToken).then( - (refreshedResponse) => { - expect(refreshedResponse.status).to.eq(200) - expect(refreshedResponse.body.refresh_token).to.not.be.empty + cy.refreshTokenBrowser(client, originalToken).then( + (refreshedResponse) => { + expect(refreshedResponse.status).to.eq(200) + expect(refreshedResponse.body.refresh_token).to.not.be.empty - const refreshedToken = refreshedResponse.body.refresh_token + const refreshedToken = refreshedResponse.body.refresh_token - return cy - .refreshTokenBrowser(client, originalToken) - .then((response) => { - expect(response.status).to.eq(401) - expect(response.body.error).to.eq("token_inactive") - }) - .then(() => cy.refreshTokenBrowser(client, refreshedToken)) - .then((response) => { - expect(response.status).to.eq(401) - expect(response.body.error).to.eq("token_inactive") - }) - }, - ) + return cy + .refreshTokenBrowser(client, originalToken) + .then((response) => { + expect(response.status).to.eq(400) + expect(response.body.error).to.eq("invalid_grant") + }) + .then(() => cy.refreshTokenBrowser(client, refreshedToken)) + .then((response) => { + expect(response.status).to.eq(400) + expect(response.body.error).to.eq("invalid_grant") + }) + }, + ) + }) + }) }) }) }) diff --git a/cypress/integration/openid/authorize_code.js b/cypress/integration/openid/authorize_code.js index 96d2a594578..7449249ca56 100644 --- a/cypress/integration/openid/authorize_code.js +++ b/cypress/integration/openid/authorize_code.js @@ -3,36 +3,43 @@ import { prng } from "../../helpers" +const accessTokenStrategies = ["opaque", "jwt"] + describe("OpenID Connect Authorize Code Grant", () => { - const nc = () => ({ - client_secret: prng(), - scope: "openid", - subject_type: "public", - token_endpoint_auth_method: "client_secret_basic", - redirect_uris: [`${Cypress.env("client_url")}/openid/callback`], - grant_types: ["authorization_code", "refresh_token"], - }) + accessTokenStrategies.forEach((accessTokenStrategy) => { + describe("access_token_strategy=" + accessTokenStrategy, function () { + const nc = () => ({ + client_secret: prng(), + scope: "openid", + subject_type: "public", + token_endpoint_auth_method: "client_secret_basic", + redirect_uris: [`${Cypress.env("client_url")}/openid/callback`], + grant_types: ["authorization_code", "refresh_token"], + access_token_strategy: accessTokenStrategy, + }) - it("should return an access, refresh, and ID token", function () { - const client = nc() - cy.authCodeFlow(client, { consent: { scope: ["openid"] } }, "openid") + it("should return an access, refresh, and ID token", function () { + const client = nc() + cy.authCodeFlow(client, { consent: { scope: ["openid"] } }, "openid") - cy.get("body") - .invoke("text") - .then((content) => { - const { - result, - token: { access_token, id_token, refresh_token }, - claims: { sub, sid }, - } = JSON.parse(content) + cy.get("body") + .invoke("text") + .then((content) => { + const { + result, + token: { access_token, id_token, refresh_token }, + claims: { sub, sid }, + } = JSON.parse(content) - expect(result).to.equal("success") - expect(access_token).to.not.be.empty - expect(id_token).to.not.be.empty - expect(refresh_token).to.be.undefined + expect(result).to.equal("success") + expect(access_token).to.not.be.empty + expect(id_token).to.not.be.empty + expect(refresh_token).to.be.undefined - expect(sub).to.eq("foo@bar.com") - expect(sid).to.not.be.empty + expect(sub).to.eq("foo@bar.com") + expect(sid).to.not.be.empty + }) }) + }) }) }) diff --git a/cypress/integration/openid/logout.js b/cypress/integration/openid/logout.js index 4495198d109..1de78c3917d 100644 --- a/cypress/integration/openid/logout.js +++ b/cypress/integration/openid/logout.js @@ -3,207 +3,236 @@ import { deleteClients, prng } from "../../helpers" -const nc = () => ({ - client_secret: prng(), - scope: "openid", - subject_type: "public", - redirect_uris: [`${Cypress.env("client_url")}/openid/callback`], - grant_types: ["authorization_code"], -}) - -describe("OpenID Connect Logout", () => { - before(() => { - cy.clearCookies({ domain: null }) - }) - - after(() => { - deleteClients() - }) - - describe("logout without id_token_hint", () => { - beforeEach(() => { - Cypress.Cookies.preserveOnce( - "oauth2_authentication_session", - "oauth2_authentication_session_insecure", - "connect.sid", - ) - }) - - before(() => { - deleteClients() +const accessTokenStrategies = ["opaque", "jwt"] + +accessTokenStrategies.forEach((accessTokenStrategy) => { + describe("access_token_strategy=" + accessTokenStrategy, function () { + const nc = () => ({ + client_secret: prng(), + scope: "openid", + subject_type: "public", + redirect_uris: [`${Cypress.env("client_url")}/openid/callback`], + grant_types: ["authorization_code"], + access_token_strategy: accessTokenStrategy, }) - const client = { - ...nc(), - backchannel_logout_uri: `${Cypress.env( - "client_url", - )}/openid/session/end/bc`, - } - - it("should log in and remember login without id_token_hint", function () { - cy.authCodeFlow( - client, - { - login: { remember: true }, - consent: { scope: ["openid"], remember: true }, - }, - "openid", - ) - - cy.request(`${Cypress.env("client_url")}/openid/session/check`) - .its("body") - .then(({ has_session }) => { - expect(has_session).to.be.true - }) - }) - - it("should show the logout page and complete logout without id_token_hint", () => { - // cy.request(`${Cypress.env('client_url')}/openid/session/check`) - // .its('body') - // .then(({ has_session }) => { - // expect(has_session).to.be.true; - // }); - - cy.visit(`${Cypress.env("client_url")}/openid/session/end?simple=1`, { - failOnStatusCode: false, + describe("OpenID Connect Logout", () => { + before(() => { + cy.clearCookies({ domain: null }) }) - cy.get("#accept").click() - - cy.get("h1").should("contain", "Your log out request however succeeded.") - }) - - it("should show the login screen again because we logged out", () => { - cy.authCodeFlow( - client, - { - login: { remember: false }, // login should have skip false because we removed the session.mak - consent: { scope: ["openid"], remember: false, skip: true }, - createClient: false, - }, - "openid", - ) - }) - }) - - // The Back-Channel test should run before the front-channel test because otherwise both tests need a long time to finish. - describe.only("Back-Channel", () => { - beforeEach(() => { - Cypress.Cookies.preserveOnce( - "oauth2_authentication_session", - "oauth2_authentication_session_insecure", - "connect.sid", - ) - }) + after(() => { + deleteClients() + }) - before(() => { - deleteClients() - }) + describe("logout without id_token_hint", () => { + beforeEach(() => { + Cypress.Cookies.preserveOnce( + "oauth2_authentication_session", + "oauth2_authentication_session_insecure", + "connect.sid", + ) + }) - const client = { - ...nc(), - backchannel_logout_uri: `${Cypress.env( - "client_url", - )}/openid/session/end/bc`, - } - - it("should log in and remember login with back-channel", function () { - cy.authCodeFlow( - client, - { - login: { remember: true }, - consent: { scope: ["openid"], remember: true }, - }, - "openid", - ) - - cy.request(`${Cypress.env("client_url")}/openid/session/check`) - .its("body") - .then(({ has_session }) => { - expect(has_session).to.be.true + before(() => { + deleteClients() }) - }) - it("should show the logout page and complete logout with back-channel", () => { - cy.request(`${Cypress.env("client_url")}/openid/session/check`) - .its("body") - .then(({ has_session }) => { - expect(has_session).to.be.true + const client = { + ...nc(), + backchannel_logout_uri: `${Cypress.env( + "client_url", + )}/openid/session/end/bc`, + } + + it("should log in and remember login without id_token_hint", function () { + cy.authCodeFlow( + client, + { + login: { remember: true }, + consent: { + scope: ["openid"], + remember: true, + }, + }, + "openid", + ) + + cy.request(`${Cypress.env("client_url")}/openid/session/check`) + .its("body") + .then(({ has_session }) => { + expect(has_session).to.be.true + }) }) - cy.visit(`${Cypress.env("client_url")}/openid/session/end`, { - failOnStatusCode: false, - }) + it("should show the logout page and complete logout without id_token_hint", () => { + // cy.request(`${Cypress.env('client_url')}/openid/session/check`) + // .its('body') + // .then(({ has_session }) => { + // expect(has_session).to.be.true; + // }); - cy.get("#accept").click() + cy.visit(`${Cypress.env("client_url")}/openid/session/end?simple=1`, { + failOnStatusCode: false, + }) - cy.get("h1").should("contain", "Your log out request however succeeded.") + cy.get("#accept").click() - cy.request(`${Cypress.env("client_url")}/openid/session/check`) - .its("body") - .then(({ has_session }) => { - expect(has_session).to.be.false + cy.get("h1").should( + "contain", + "Your log out request however succeeded.", + ) }) - }) - }) - describe("Front-Channel", () => { - beforeEach(() => { - Cypress.Cookies.preserveOnce( - "oauth2_authentication_session", - "oauth2_authentication_session_insecure", - "connect.sid", - ) - }) + it("should show the login screen again because we logged out", () => { + cy.authCodeFlow( + client, + { + login: { remember: false }, // login should have skip false because we removed the session.mak + consent: { + scope: ["openid"], + remember: false, + skip: true, + }, + createClient: false, + }, + "openid", + ) + }) + }) - before(() => { - deleteClients() - }) + // The Back-Channel test should run before the front-channel test because otherwise both tests need a long time to finish. + describe.only("Back-Channel", () => { + beforeEach(() => { + Cypress.Cookies.preserveOnce( + "oauth2_authentication_session", + "oauth2_authentication_session_insecure", + "connect.sid", + ) + }) - const client = { - ...nc(), - frontchannel_logout_uri: `${Cypress.env( - "client_url", - )}/openid/session/end/fc`, - } - - it("should log in and remember login with front-channel", () => { - cy.authCodeFlow( - client, - { - login: { remember: true }, - consent: { scope: ["openid"], remember: true }, - }, - "openid", - ) - - cy.request(`${Cypress.env("client_url")}/openid/session/check`) - .its("body") - .then(({ has_session }) => { - expect(has_session).to.be.true + before(() => { + deleteClients() }) - }) - it("should show the logout page and complete logout with front-channel", () => { - cy.request(`${Cypress.env("client_url")}/openid/session/check`) - .its("body") - .then(({ has_session }) => { - expect(has_session).to.be.true + const client = { + ...nc(), + backchannel_logout_uri: `${Cypress.env( + "client_url", + )}/openid/session/end/bc`, + } + + it("should log in and remember login with back-channel", function () { + cy.authCodeFlow( + client, + { + login: { remember: true }, + consent: { + scope: ["openid"], + remember: true, + }, + }, + "openid", + ) + + cy.request(`${Cypress.env("client_url")}/openid/session/check`) + .its("body") + .then(({ has_session }) => { + expect(has_session).to.be.true + }) }) - cy.visit(`${Cypress.env("client_url")}/openid/session/end`, { - failOnStatusCode: false, + it("should show the logout page and complete logout with back-channel", () => { + cy.request(`${Cypress.env("client_url")}/openid/session/check`) + .its("body") + .then(({ has_session }) => { + expect(has_session).to.be.true + }) + + cy.visit(`${Cypress.env("client_url")}/openid/session/end`, { + failOnStatusCode: false, + }) + + cy.get("#accept").click() + + cy.get("h1").should( + "contain", + "Your log out request however succeeded.", + ) + + cy.request(`${Cypress.env("client_url")}/openid/session/check`) + .its("body") + .then(({ has_session }) => { + expect(has_session).to.be.false + }) + }) }) - cy.get("#accept").click() + describe("Front-Channel", () => { + beforeEach(() => { + Cypress.Cookies.preserveOnce( + "oauth2_authentication_session", + "oauth2_authentication_session_insecure", + "connect.sid", + ) + }) - cy.get("h1").should("contain", "Your log out request however succeeded.") + before(() => { + deleteClients() + }) + + const client = { + ...nc(), + frontchannel_logout_uri: `${Cypress.env( + "client_url", + )}/openid/session/end/fc`, + } + + it("should log in and remember login with front-channel", () => { + cy.authCodeFlow( + client, + { + login: { remember: true }, + consent: { + scope: ["openid"], + remember: true, + }, + }, + "openid", + ) + + cy.request(`${Cypress.env("client_url")}/openid/session/check`) + .its("body") + .then(({ has_session }) => { + expect(has_session).to.be.true + }) + }) - cy.request(`${Cypress.env("client_url")}/openid/session/check`) - .its("body") - .then(({ has_session }) => { - expect(has_session).to.be.false + it("should show the logout page and complete logout with front-channel", () => { + cy.request(`${Cypress.env("client_url")}/openid/session/check`) + .its("body") + .then(({ has_session }) => { + expect(has_session).to.be.true + }) + + cy.visit(`${Cypress.env("client_url")}/openid/session/end`, { + failOnStatusCode: false, + }) + + cy.get("#accept").click() + + cy.get("h1").should( + "contain", + "Your log out request however succeeded.", + ) + + cy.request(`${Cypress.env("client_url")}/openid/session/check`) + .its("body") + .then(({ has_session }) => { + expect(has_session).to.be.false + }) }) + }) }) }) }) diff --git a/cypress/integration/openid/prompt.js b/cypress/integration/openid/prompt.js index 111784c982e..7953734894b 100644 --- a/cypress/integration/openid/prompt.js +++ b/cypress/integration/openid/prompt.js @@ -4,107 +4,125 @@ import { createClient, prng } from "../../helpers" import qs from "querystring" +const accessTokenStrategies = ["opaque", "jwt"] + describe("OpenID Connect Prompt", () => { - const nc = () => ({ - client_secret: prng(), - scope: "openid", - redirect_uris: [`${Cypress.env("client_url")}/openid/callback`], - grant_types: ["authorization_code", "refresh_token"], - }) + accessTokenStrategies.forEach((accessTokenStrategy) => { + describe("access_token_strategy=" + accessTokenStrategy, function () { + const nc = () => ({ + client_secret: prng(), + scope: "openid", + redirect_uris: [`${Cypress.env("client_url")}/openid/callback`], + grant_types: ["authorization_code", "refresh_token"], + access_token_strategy: accessTokenStrategy, + }) - it("should fail prompt=none when no session exists", function () { - createClient(nc()).then((client) => { - cy.visit( - `${Cypress.env("client_url")}/openid/code?client_id=${ - client.client_id - }&client_secret=${client.client_secret}&prompt=none`, - { failOnStatusCode: false }, - ) + it("should fail prompt=none when no session exists", function () { + createClient(nc()).then((client) => { + cy.visit( + `${Cypress.env("client_url")}/openid/code?client_id=${ + client.client_id + }&client_secret=${client.client_secret}&prompt=none`, + { failOnStatusCode: false }, + ) - cy.location().should(({ search, port }) => { - const query = qs.parse(search.substr(1)) - expect(query.error).to.equal("login_required") - expect(port).to.equal(Cypress.env("client_port")) + cy.location().should(({ search, port }) => { + const query = qs.parse(search.substr(1)) + expect(query.error).to.equal("login_required") + expect(port).to.equal(Cypress.env("client_port")) + }) + }) }) - }) - }) - it("should pass with prompt=none if both login and consent were remembered", function () { - createClient(nc()).then((client) => { - cy.authCodeFlow( - client, - { - login: { remember: true }, - consent: { scope: ["openid"], remember: true }, - createClient: false, - }, - "openid", - ) + it("should pass with prompt=none if both login and consent were remembered", function () { + createClient(nc()).then((client) => { + cy.authCodeFlow( + client, + { + login: { remember: true }, + consent: { + scope: ["openid"], + remember: true, + }, + createClient: false, + }, + "openid", + ) - cy.request( - `${Cypress.env("client_url")}/openid/code?client_id=${ - client.client_id - }&client_secret=${client.client_secret}&scope=openid`, - ) - .its("body") - .then((body) => { - const { - result, - token: { access_token }, - } = body - expect(result).to.equal("success") - expect(access_token).to.not.be.empty + cy.request( + `${Cypress.env("client_url")}/openid/code?client_id=${ + client.client_id + }&client_secret=${client.client_secret}&scope=openid`, + ) + .its("body") + .then((body) => { + const { + result, + token: { access_token }, + } = body + expect(result).to.equal("success") + expect(access_token).to.not.be.empty + }) }) - }) - }) + }) - it("should require login with prompt=login even when session exists", function () { - createClient(nc()).then((client) => { - cy.authCodeFlow( - client, - { - login: { remember: true }, - consent: { scope: ["openid"], remember: true }, - createClient: false, - }, - "openid", - ) + it("should require login with prompt=login even when session exists", function () { + createClient(nc()).then((client) => { + cy.authCodeFlow( + client, + { + login: { remember: true }, + consent: { + scope: ["openid"], + remember: true, + }, + createClient: false, + }, + "openid", + ) - cy.request( - `${Cypress.env("client_url")}/openid/code?client_id=${ - client.client_id - }&client_secret=${client.client_secret}&scope=openid&prompt=login`, - ) - .its("body") - .then((body) => { - expect(body).to.contain("Please log in") + cy.request( + `${Cypress.env("client_url")}/openid/code?client_id=${ + client.client_id + }&client_secret=${client.client_secret}&scope=openid&prompt=login`, + ) + .its("body") + .then((body) => { + expect(body).to.contain("Please log in") + }) }) - }) - }) + }) - it("should require consent with prompt=consent even when session exists", function () { - createClient(nc()).then((client) => { - cy.authCodeFlow( - client, - { - login: { remember: true }, - consent: { scope: ["openid"], remember: true }, - createClient: false, - }, - "openid", - ) + it("should require consent with prompt=consent even when session exists", function () { + createClient(nc()).then((client) => { + cy.authCodeFlow( + client, + { + login: { remember: true }, + consent: { + scope: ["openid"], + remember: true, + }, + createClient: false, + }, + "openid", + ) - cy.request( - `${Cypress.env("client_url")}/openid/code?client_id=${ - client.client_id - }&client_secret=${client.client_secret}&scope=openid&prompt=consent`, - ) - .its("body") - .then((body) => { - expect(body).to.contain( - "An application requests access to your data!", + cy.request( + `${Cypress.env("client_url")}/openid/code?client_id=${ + client.client_id + }&client_secret=${ + client.client_secret + }&scope=openid&prompt=consent`, ) + .its("body") + .then((body) => { + expect(body).to.contain( + "An application requests access to your data!", + ) + }) }) + }) }) }) }) diff --git a/cypress/integration/openid/revoke.js b/cypress/integration/openid/revoke.js index 08c6bf29a1c..3793d3e089d 100644 --- a/cypress/integration/openid/revoke.js +++ b/cypress/integration/openid/revoke.js @@ -3,73 +3,80 @@ import { prng } from "../../helpers" +const accessTokenStrategies = ["opaque", "jwt"] + describe("OpenID Connect Token Revokation", () => { - const nc = () => ({ - client_secret: prng(), - scope: "openid offline_access", - redirect_uris: [`${Cypress.env("client_url")}/openid/callback`], - grant_types: ["authorization_code", "refresh_token"], - }) + accessTokenStrategies.forEach((accessTokenStrategy) => { + describe("access_token_strategy=" + accessTokenStrategy, function () { + const nc = () => ({ + client_secret: prng(), + scope: "openid offline_access", + redirect_uris: [`${Cypress.env("client_url")}/openid/callback`], + grant_types: ["authorization_code", "refresh_token"], + access_token_strategy: accessTokenStrategy, + }) - it("should be able to revoke the access token", function () { - const client = nc() - cy.authCodeFlow( - client, - { consent: { scope: ["openid", "offline_access"] } }, - "openid", - ) + it("should be able to revoke the access token", function () { + const client = nc() + cy.authCodeFlow( + client, + { consent: { scope: ["openid", "offline_access"] } }, + "openid", + ) - cy.get("body") - .invoke("text") - .then((content) => { - const { result } = JSON.parse(content) - expect(result).to.equal("success") - }) + cy.get("body") + .invoke("text") + .then((content) => { + const { result } = JSON.parse(content) + expect(result).to.equal("success") + }) - cy.request(`${Cypress.env("client_url")}/openid/revoke/at`) - .its("body") - .then((response) => { - expect(response.result).to.equal("success") - }) + cy.request(`${Cypress.env("client_url")}/openid/revoke/at`) + .its("body") + .then((response) => { + expect(response.result).to.equal("success") + }) - cy.request(`${Cypress.env("client_url")}/openid/userinfo`, { - failOnStatusCode: false, - }) - .its("body") - .then((response) => { - expect(response.error).to.contain("request_unauthorized") + cy.request(`${Cypress.env("client_url")}/openid/userinfo`, { + failOnStatusCode: false, + }) + .its("body") + .then((response) => { + expect(response.error).to.contain("request_unauthorized") + }) }) - }) - it("should be able to revoke the refresh token", function () { - const client = nc() - cy.authCodeFlow( - client, - { consent: { scope: ["openid", "offline_access"] } }, - "openid", - ) + it("should be able to revoke the refresh token", function () { + const client = nc() + cy.authCodeFlow( + client, + { consent: { scope: ["openid", "offline_access"] } }, + "openid", + ) - cy.get("body") - .invoke("text") - .then((content) => { - const { result } = JSON.parse(content) - expect(result).to.equal("success") - }) + cy.get("body") + .invoke("text") + .then((content) => { + const { result } = JSON.parse(content) + expect(result).to.equal("success") + }) - cy.request(`${Cypress.env("client_url")}/openid/revoke/rt`, { - failOnStatusCode: false, - }) - .its("body") - .then((response) => { - expect(response.result).to.equal("success") - }) + cy.request(`${Cypress.env("client_url")}/openid/revoke/rt`, { + failOnStatusCode: false, + }) + .its("body") + .then((response) => { + expect(response.result).to.equal("success") + }) - cy.request(`${Cypress.env("client_url")}/openid/userinfo`, { - failOnStatusCode: false, - }) - .its("body") - .then((response) => { - expect(response.error).to.contain("request_unauthorized") + cy.request(`${Cypress.env("client_url")}/openid/userinfo`, { + failOnStatusCode: false, + }) + .its("body") + .then((response) => { + expect(response.error).to.contain("request_unauthorized") + }) }) + }) }) }) diff --git a/cypress/integration/openid/userinfo.js b/cypress/integration/openid/userinfo.js index 7df40c65772..079f4fba846 100644 --- a/cypress/integration/openid/userinfo.js +++ b/cypress/integration/openid/userinfo.js @@ -3,30 +3,37 @@ import { prng } from "../../helpers" +const accessTokenStrategies = ["opaque", "jwt"] + describe("OpenID Connect Userinfo", () => { - const nc = () => ({ - client_secret: prng(), - scope: "openid", - redirect_uris: [`${Cypress.env("client_url")}/openid/callback`], - grant_types: ["authorization_code", "refresh_token"], - }) + accessTokenStrategies.forEach((accessTokenStrategy) => { + describe("access_token_strategy=" + accessTokenStrategy, function () { + const nc = () => ({ + client_secret: prng(), + scope: "openid", + redirect_uris: [`${Cypress.env("client_url")}/openid/callback`], + grant_types: ["authorization_code", "refresh_token"], + access_token_strategy: accessTokenStrategy, + }) - it("should return a proper userinfo response", function () { - const client = nc() - cy.authCodeFlow(client, { consent: { scope: ["openid"] } }, "openid") + it("should return a proper userinfo response", function () { + const client = nc() + cy.authCodeFlow(client, { consent: { scope: ["openid"] } }, "openid") - cy.get("body") - .invoke("text") - .then((content) => { - const { result } = JSON.parse(content) - expect(result).to.equal("success") - }) + cy.get("body") + .invoke("text") + .then((content) => { + const { result } = JSON.parse(content) + expect(result).to.equal("success") + }) - cy.request(`${Cypress.env("client_url")}/openid/userinfo`) - .its("body") - .then(({ aud, sub } = {}) => { - expect(sub).to.eq("foo@bar.com") - expect(aud).to.not.be.empty + cy.request(`${Cypress.env("client_url")}/openid/userinfo`) + .its("body") + .then(({ aud, sub } = {}) => { + expect(sub).to.eq("foo@bar.com") + expect(aud).to.not.be.empty + }) }) + }) }) }) diff --git a/cypress/support/commands.js b/cypress/support/commands.js index 2f75293404d..0e8700177dc 100644 --- a/cypress/support/commands.js +++ b/cypress/support/commands.js @@ -216,3 +216,90 @@ Cypress.Commands.add("refreshTokenBrowser", (client, token) => failOnStatusCode: false, }), ) + +Cypress.Commands.add( + "deviceAuthFlow", + ( + client, + { + override: { scope, client_id, client_secret } = {}, + consent: { + accept: acceptConsent = true, + skip: skipConsent = false, + remember: rememberConsent = false, + scope: acceptScope = [], + } = {}, + login: { + accept: acceptLogin = true, + skip: skipLogin = false, + remember: rememberLogin = false, + username = "foo@bar.com", + password = "foobar", + } = {}, + prompt = "", + createClient: doCreateClient = true, + } = {}, + path = "oauth2", + ) => { + const run = (client) => { + cy.visit( + `${Cypress.env("client_url")}/${path}/device?client_id=${ + client_id || client.client_id + }&client_secret=${client_secret || client.client_secret}&scope=${ + scope || client.scope + }`, + { failOnStatusCode: false }, + ) + + cy.get("#verify").click() + + if (!skipLogin) { + cy.get("#email").type(username, { delay: 1 }) + cy.get("#password").type(password, { delay: 1 }) + + if (rememberLogin) { + cy.get("#remember").click() + } + + if (acceptLogin) { + cy.get("#accept").click() + } else { + cy.get("#reject").click() + } + } + + if (!skipConsent) { + acceptScope.forEach((s) => { + cy.get(`#${s}`).click() + }) + + if (rememberConsent) { + cy.get("#remember").click() + } + + if (acceptConsent) { + cy.get("#accept").click() + } else { + cy.get("#reject").click() + } + + cy.location().should((loc) => { + expect(loc.origin).to.eq(Cypress.env("consent_url")) + expect(loc.pathname).to.eq("/oauth2/device/success") + }) + } + } + + if (doCreateClient) { + createClient(client).should((client) => { + run(client) + }) + return + } + run(client) + }, +) + +Cypress.Commands.add("postDeviceAuthFlow", (path = "oauth2") => + cy.request(`${Cypress.env("client_url")}/${path}/device/success`), +) diff --git a/docs/flow-cache-design-doc.md b/docs/flow-cache-design-doc.md new file mode 100644 index 00000000000..22916348936 --- /dev/null +++ b/docs/flow-cache-design-doc.md @@ -0,0 +1,167 @@ +# Flow Cache Design Doc + +## Overview + +This design doc outlines the proposed solution for caching the flow object in +the OAuth2 exchange between the Client, Ory Hydra, and the Consent and Login +UIs. The flow object contains the state of the authorization request. + +## Problem Statement + +Currently, the flow object is stored in the database on the Ory Hydra server. +This approach has several drawbacks: + +- Each step of the OAuth2 flow (initialization, consent, login, etc.) requires a + database query to retrieve the flow object, and another to update it. +- Each part of the exchanges supplies different values (login challenge, consent + challenge, etc.) to identify the flow object. This means the database table + has multiple indices that slow down insertions. + +## Proposed Solution + +The proposed solution is to store the flow object in client cookies and URLs. +This way, the flow object is written only once when the flow is completed and +the final authorization code is generated. + +### Requirements + +- The flow object must be stored in client cookies and URLs. +- The flow object must be secure and protect against unauthorized access. +- The flow object must be persistent, so that the flow can be resumed if the + user navigates away from the page or closes the browser. +- The flow object must be scalable and able to handle a large number of + concurrent requests. + +### Architecture + +The proposed architecture for the flow cache is as follows: + +- Store the flow object in an AEAD encrypted cookie. +- Pass a partial flow around in the URL. +- Use a secure connection to protect against unauthorized access. + +```mermaid +sequenceDiagram + actor Client + participant Hydra + participant LoginUI as Login UI + participant ConsentUI as Consent UI + % participant Callback + + autonumber + + Client->>+Hydra: GET /oauth2/auth?client_id=CLIENT_ID&response_type=code&scope=SCOPES&state=STATE + Hydra->>-Client: Redirect to
http://login.local/?login_challenge=LOGIN_CHALLENGE + + Client->>+LoginUI: GET /?login_challenge=LOGIN_CHALLENGE + LoginUI->>Hydra: GET /admin/oauth2/auth/requests/login + Hydra->>LoginUI: oAuth2LoginRequest + alt accept login + LoginUI->>Hydra: PUT /admin/oauth2/auth/requests/login/accept + else reject login + LoginUI->>Hydra: PUT /admin/oauth2/auth/requests/login/reject + end + Hydra->>LoginUI: oAuth2RedirectTo + LoginUI->>-Client: Redirect to
http://hydra.local/oauth2/auth?client_id=CLIENT_ID&login_verifier=LOGIN_VERIFIER&response_type=code&scope=SCOPES&state=STATE + + Client->>+Hydra: GET /oauth2/auth?client_id=CLIENT_ID&login_verifier=LOGIN_VERIFIER&response_type=code&scope=SCOPES&state=STATE + Hydra->>-Client: Redirect to
http://consent.local/?consent_challenge=CONSENT_CHALLENGE + + Client->>+ConsentUI: GET /?consent_challenge=CONSENT_CHALLENGE + ConsentUI->>Hydra: GET /admin/oauth2/auth/requests/consent + Hydra->>ConsentUI: oAuth2ConsentRequest + alt accept login + ConsentUI->>Hydra: PUT /admin/oauth2/auth/requests/consent/accept + else reject login + ConsentUI->>Hydra: PUT /admin/oauth2/auth/requests/consent/reject + end + Hydra->>ConsentUI: oAuth2RedirectTo + ConsentUI->>-Client: Redirect to
http://hydra.local/oauth2/auth?client_id=CLIENT_ID&consent_verifier=CONSENT_VERIFIER&response_type=code&scope=SCOPES&state=STATE + + Client->>+Hydra: GET /oauth2/auth?client_id=CLIENT_ID&consent_verifier=CONSENT_VERIFIER&response_type=code&scope=SCOPES&state=STATE + Hydra->>-Client: Redirect to
http://callback.local/callback?code=AUTH_CODE&scope=SCOPES&state=STATE + Note over Hydra,Client: next, exchange code for token. + + + % Client->>+Callback: GET /callback?code=AUTH_CODE&scope=SCOPES&state=STATE + % Callback->>-Client: Return Authorization Code +``` + +Step 2: + +- Set the whole flow as an AEAD encrypted cookie on the client +- The cookie is keyed by the `state`, so that multiple flows can run in parallel + from one cookie jar +- Set the `LOGIN_CHALLENGE` to the AEAD-encrypted flow + +Step 5: + +- Decrypt the flow from the `LOGIN_CHALLENGE`, return the `oAuth2LoginRequest` + +Step 8: + +- Encode the flow into the redirect URL in `oAuth2RedirectTo` as the + `LOGIN_VERIFIER` + +Step 11 + +- Check that the login challenge in the `LOGIN_VERIFIER` matches the challenge + in the flow cookie. +- Update the flow based on the request from the `LOGIN_VERIFIER` +- Update the cookie +- Set the `CONSENT_CHALLENGE` to the AEAD-encrypted flow + +Step 14: + +- Decrypt the flow from the `CONSENT_CHALLENGE` + +Step 17: + +- Encode the flow into the redirect URL in `oAuth2RedirectTo` as the + `CONSENT_VERIFIER` + +Step 20 + +- Check that the consent challenge in the `CONSENT_VERIFIER` matches the + challenge in the flow cookie. +- Update the flow based on the request from the `CONSENT_VERIFIER` +- Update the cookie +- Write the flow to the database +- Continue the flow as currently implemented (generate the authentication code, + return the code, etc.) + +### Client HTTP requests + +For reference, these HTTP requests are issued by the client: + +``` +GET http://hydra.local/oauth2/auth?client_id=CLIENT_ID&nonce=NONCE&response_type=code&scope=SCOPES&state=STATE +Redirect to http://login.local/?login_challenge=LOGIN_CHALLENGE +GET http://login.local/?login_challenge=LOGIN_CHALLENGE +Redirect to http://hydra.local/oauth2/auth?client_id=CLIENT_ID&login_verifier=LOGIN_VERIFIER&nonce=NONCE&response_type=code&scope=SCOPES&state=STATE +GET http://hydra.local/oauth2/auth?client_id=CLIENT_ID&login_verifier=LOGIN_VERIFIER&nonce=NONCE&response_type=code&scope=SCOPES&state=STATE +Redirect to http://consent.local/?consent_challenge=CONSENT_CHALLENGE +GET http://consent.local/?consent_challenge=CONSENT_CHALLENGE +Redirect to http://hydra.local/oauth2/auth?client_id=CLIENT_ID&consent_verifier=CONSENT_VERIFIER&nonce=NONCE&response_type=code&scope=SCOPES&state=STATE +GET http://hydra.local/oauth2/auth?client_id=CLIENT_ID&consent_verifier=CONSENT_VERIFIER&nonce=NONCE&response_type=code&scope=SCOPES&state=STATE +Redirect to http://callback.local/callback?code=AUTH_CODE&scope=SCOPES&state=STATE +GET http://callback.local/callback?code=AUTH_CODE&scope=SCOPES&state=STATE +``` + +### Implementation + +The implementation of the flow cache will involve the following steps: + +1. Modify the Ory Hydra server to store the flow object in an AEAD encrypted + cookie. +2. Modify the Consent and Login UIs to include the flow object in the URL. +3. Use HTTPS to protect against unauthorized access. + +## Conclusion + +The proposed solution for caching the flow object in the OAuth2 exchange between +the Client, Ory Hydra, and the Consent and Login UIs is to store the flow object +in client cookies and URLs. This approach eliminates the need for a distributed +cache and provides a scalable and secure solution. The flow object will be +stored in an AEAD encrypted cookie and passed around in the URL. HTTPS will be +used to protect against unauthorized access. diff --git a/driver/config/provider.go b/driver/config/provider.go index 5e7252d6e66..6670b4a05bc 100644 --- a/driver/config/provider.go +++ b/driver/config/provider.go @@ -5,27 +5,29 @@ package config import ( "context" + "crypto/sha512" "fmt" + "math" "net/http" "net/url" "strings" + "testing" "time" - "github.com/ory/x/hasherx" + "github.com/stretchr/testify/require" "github.com/gofrs/uuid" + "github.com/pkg/errors" - "github.com/ory/x/otelx" - - "github.com/ory/hydra/spec" - "github.com/ory/x/dbal" - + "github.com/ory/hydra/v2/spec" + "github.com/ory/hydra/v2/x" "github.com/ory/x/configx" - - "github.com/ory/x/logrusx" - - "github.com/ory/hydra/x" "github.com/ory/x/contextx" + "github.com/ory/x/dbal" + "github.com/ory/x/hasherx" + "github.com/ory/x/logrusx" + "github.com/ory/x/otelx" + "github.com/ory/x/randx" "github.com/ory/x/stringslice" "github.com/ory/x/urlx" ) @@ -42,14 +44,17 @@ const ( KeyOAuth2ClientRegistrationURL = "webfinger.oidc_discovery.client_registration_url" KeyOAuth2TokenURL = "webfinger.oidc_discovery.token_url" // #nosec G101 KeyOAuth2AuthURL = "webfinger.oidc_discovery.auth_url" + KeyVerifiableCredentialsURL = "webfinger.oidc_discovery.verifiable_credentials_url" // #nosec G101 KeyJWKSURL = "webfinger.oidc_discovery.jwks_url" KeyOIDCDiscoverySupportedClaims = "webfinger.oidc_discovery.supported_claims" KeyOIDCDiscoverySupportedScope = "webfinger.oidc_discovery.supported_scope" KeyOIDCDiscoveryUserinfoEndpoint = "webfinger.oidc_discovery.userinfo_url" + KeyOAuth2DeviceAuthorisationURL = "webfinger.oidc_discovery.device_authorization_url" KeySubjectTypesSupported = "oidc.subject_identifiers.supported_types" KeyDefaultClientScope = "oidc.dynamic_client_registration.default_scope" KeyDSN = "dsn" - ViperKeyClientHTTPNoPrivateIPRanges = "clients.http.disallow_private_ip_ranges" + KeyClientHTTPNoPrivateIPRanges = "clients.http.disallow_private_ip_ranges" + KeyClientHTTPPrivateIPExceptionURLs = "clients.http.private_ip_exception_urls" KeyHasherAlgorithm = "oauth2.hashers.algorithm" KeyBCryptCost = "oauth2.hashers.bcrypt.cost" KeyPBKDF2Iterations = "oauth2.hashers.pbkdf2.iterations" @@ -59,28 +64,45 @@ const ( KeyCookieDomain = "serve.cookies.domain" KeyCookieSecure = "serve.cookies.secure" KeyCookieLoginCSRFName = "serve.cookies.names.login_csrf" + KeyCookieDeviceCSRFName = "serve.cookies.names.device_csrf" KeyCookieConsentCSRFName = "serve.cookies.names.consent_csrf" KeyCookieSessionName = "serve.cookies.names.session" + KeyCookieSessionPath = "serve.cookies.paths.session" KeyConsentRequestMaxAge = "ttl.login_consent_request" KeyAccessTokenLifespan = "ttl.access_token" // #nosec G101 KeyRefreshTokenLifespan = "ttl.refresh_token" // #nosec G101 + KeyVerifiableCredentialsNonceLifespan = "ttl.vc_nonce" // #nosec G101 KeyIDTokenLifespan = "ttl.id_token" // #nosec G101 KeyAuthCodeLifespan = "ttl.auth_code" + KeyDeviceAndUserCodeLifespan = "ttl.device_user_code" + KeyAuthenticationSessionLifespan = "ttl.authentication_session" KeyScopeStrategy = "strategies.scope" KeyGetCookieSecrets = "secrets.cookie" KeyGetSystemSecret = "secrets.system" + KeyPaginationSecrets = "secrets.pagination" KeyLogoutRedirectURL = "urls.post_logout_redirect" KeyLoginURL = "urls.login" + KeyRegistrationURL = "urls.registration" KeyLogoutURL = "urls.logout" KeyConsentURL = "urls.consent" KeyErrorURL = "urls.error" + KeyDeviceVerificationURL = "urls.device.verification" + KeyDeviceDoneURL = "urls.device.success" KeyPublicURL = "urls.self.public" KeyAdminURL = "urls.self.admin" KeyIssuerURL = "urls.self.issuer" + KeyIdentityProviderAdminURL = "urls.identity_provider.url" + KeyIdentityProviderPublicURL = "urls.identity_provider.publicUrl" + KeyIdentityProviderHeaders = "urls.identity_provider.headers" KeyAccessTokenStrategy = "strategies.access_token" + KeyJWTScopeClaimStrategy = "strategies.jwt.scope_claim" KeyDBIgnoreUnknownTableColumns = "db.ignore_unknown_table_columns" KeySubjectIdentifierAlgorithmSalt = "oidc.subject_identifiers.pairwise.salt" KeyPublicAllowDynamicRegistration = "oidc.dynamic_client_registration.enabled" + KeyDeviceAuthTokenPollingInterval = "oauth2.device_authorization.token_polling_interval" // #nosec G101 + KeyDeviceAuthUserCodeEntropyPreset = "oauth2.device_authorization.user_code.entropy_preset" + KeyDeviceAuthUserCodeLength = "oauth2.device_authorization.user_code.length" + KeyDeviceAuthUserCodeCharacterSet = "oauth2.device_authorization.user_code.character_set" KeyPKCEEnforced = "oauth2.pkce.enforced" KeyPKCEEnforcedForPublicClients = "oauth2.pkce.enforced_for_public_clients" KeyLogLevel = "log.level" @@ -89,57 +111,72 @@ const ( KeyExposeOAuth2Debug = "oauth2.expose_internal_errors" KeyExcludeNotBeforeClaim = "oauth2.exclude_not_before_claim" KeyAllowedTopLevelClaims = "oauth2.allowed_top_level_claims" + KeyMirrorTopLevelClaims = "oauth2.mirror_top_level_claims" + KeyRefreshTokenRotationGracePeriod = "oauth2.grant.refresh_token.rotation_grace_period" // #nosec G101 + KeyRefreshTokenRotationGraceReuseCount = "oauth2.grant.refresh_token.rotation_grace_reuse_count" // #nosec G101 KeyOAuth2GrantJWTIDOptional = "oauth2.grant.jwt.jti_optional" KeyOAuth2GrantJWTIssuedDateOptional = "oauth2.grant.jwt.iat_optional" KeyOAuth2GrantJWTMaxDuration = "oauth2.grant.jwt.max_ttl" - KeyRefreshTokenHookURL = "oauth2.refresh_token_hook" // #nosec G101 + KeyRefreshTokenHook = "oauth2.refresh_token_hook" // #nosec G101 + KeyTokenHook = "oauth2.token_hook" // #nosec G101 KeyDevelopmentMode = "dev" ) const DSNMemory = "memory" -var _ hasherx.PBKDF2Configurator = (*DefaultProvider)(nil) -var _ hasherx.BCryptConfigurator = (*DefaultProvider)(nil) +var ( + _ hasherx.PBKDF2Configurator = (*DefaultProvider)(nil) + _ hasherx.BCryptConfigurator = (*DefaultProvider)(nil) +) type DefaultProvider struct { - generatedSecret []byte - l *logrusx.Logger - + l *logrusx.Logger p *configx.Provider c contextx.Contextualizer } -func (p *DefaultProvider) GetHasherAlgorithm(ctx context.Context) x.HashAlgorithm { - switch strings.ToLower(p.getProvider(ctx).String(KeyHasherAlgorithm)) { - case x.HashAlgorithmBCrypt.String(): - return x.HashAlgorithmBCrypt - case x.HashAlgorithmPBKDF2.String(): - fallthrough - default: - return x.HashAlgorithmPBKDF2 - } +func (p *DefaultProvider) GetHasherAlgorithm(ctx context.Context) string { + return strings.ToLower(p.getProvider(ctx).String(KeyHasherAlgorithm)) } func (p *DefaultProvider) HasherBcryptConfig(ctx context.Context) *hasherx.BCryptConfig { + var cost uint32 + costInt := int64(p.GetBCryptCost(ctx)) + if costInt < 0 { + cost = 10 + } else if costInt > math.MaxUint32 { + cost = math.MaxUint32 + } else { + cost = uint32(costInt) + } return &hasherx.BCryptConfig{ - Cost: uint32(p.GetBCryptCost(ctx)), + Cost: cost, } } func (p *DefaultProvider) HasherPBKDF2Config(ctx context.Context) *hasherx.PBKDF2Config { + var iters uint32 + itersInt := p.getProvider(ctx).Int64(KeyPBKDF2Iterations) + if itersInt < 1 { + iters = 1 + } else if int64(itersInt) > math.MaxUint32 { + iters = math.MaxUint32 + } else { + iters = uint32(itersInt) + } + return &hasherx.PBKDF2Config{ Algorithm: "sha256", - Iterations: uint32(p.getProvider(ctx).Int(KeyPBKDF2Iterations)), + Iterations: iters, SaltLength: 16, KeyLength: 32, } } -func MustNew(ctx context.Context, l *logrusx.Logger, opts ...configx.OptionModifier) *DefaultProvider { - p, err := New(ctx, l, opts...) - if err != nil { - l.WithError(err).Fatalf("Unable to load config.") - } +func MustNew(t testing.TB, l *logrusx.Logger, opts ...configx.OptionModifier) *DefaultProvider { + ctxt := contextx.NewTestConfigProvider(spec.ConfigValidationSchema, opts...) + p, err := New(t.Context(), l, ctxt, opts...) + require.NoError(t, err) return p } @@ -147,7 +184,7 @@ func (p *DefaultProvider) getProvider(ctx context.Context) *configx.Provider { return p.c.Config(ctx, p.p) } -func New(ctx context.Context, l *logrusx.Logger, opts ...configx.OptionModifier) (*DefaultProvider, error) { +func New(ctx context.Context, l *logrusx.Logger, ctxt contextx.Contextualizer, opts ...configx.OptionModifier) (*DefaultProvider, error) { opts = append( []configx.OptionModifier{ configx.WithStderrValidationReporter(), @@ -161,7 +198,7 @@ func New(ctx context.Context, l *logrusx.Logger, opts ...configx.OptionModifier) if err != nil { return nil, err } - return NewCustom(l, p, &contextx.Default{}), nil + return NewCustom(l, p, ctxt), nil } func NewCustom(l *logrusx.Logger, p *configx.Provider, ctxt contextx.Contextualizer) *DefaultProvider { @@ -169,16 +206,23 @@ func NewCustom(l *logrusx.Logger, p *configx.Provider, ctxt contextx.Contextuali return &DefaultProvider{l: l, p: p, c: ctxt} } +// Deprecated: use context-based test setters func (p *DefaultProvider) Set(ctx context.Context, key string, value interface{}) error { return p.getProvider(ctx).Set(key, value) } +// Deprecated: use context-based test setters func (p *DefaultProvider) MustSet(ctx context.Context, key string, value interface{}) { if err := p.Set(ctx, key, value); err != nil { p.l.WithError(err).Fatalf("Unable to set \"%s\" to \"%s\".", key, value) } } +// Deprecated: use context-based test setters +func (p *DefaultProvider) Delete(ctx context.Context, key string) { + p.getProvider(ctx).Delete(key) +} + func (p *DefaultProvider) Source(ctx context.Context) *configx.Provider { return p.getProvider(ctx) } @@ -188,47 +232,44 @@ func (p *DefaultProvider) IsDevelopmentMode(ctx context.Context) bool { } func (p *DefaultProvider) WellKnownKeys(ctx context.Context, include ...string) []string { - if p.AccessTokenStrategy(ctx) == AccessTokenJWTStrategy { - include = append(include, x.OAuth2JWTKeyName) - } - - include = append(include, x.OpenIDConnectKeyName) + include = append(include, x.OAuth2JWTKeyName, x.OpenIDConnectKeyName) return stringslice.Unique(append(p.getProvider(ctx).Strings(KeyWellKnownKeys), include...)) } -func (p *DefaultProvider) IsUsingJWTAsAccessTokens(ctx context.Context) bool { - return p.AccessTokenStrategy(ctx) != "opaque" +func (p *DefaultProvider) ClientHTTPNoPrivateIPRanges() bool { + return p.getProvider(contextx.RootContext).Bool(KeyClientHTTPNoPrivateIPRanges) } -func (p *DefaultProvider) ClientHTTPNoPrivateIPRanges() bool { - return p.getProvider(contextx.RootContext).Bool(ViperKeyClientHTTPNoPrivateIPRanges) +func (p *DefaultProvider) ClientHTTPPrivateIPExceptionURLs() []string { + return p.getProvider(contextx.RootContext).Strings(KeyClientHTTPPrivateIPExceptionURLs) } func (p *DefaultProvider) AllowedTopLevelClaims(ctx context.Context) []string { return stringslice.Unique(p.getProvider(ctx).Strings(KeyAllowedTopLevelClaims)) } -func (p *DefaultProvider) SubjectTypesSupported(ctx context.Context) []string { - types := stringslice.Filter( - p.getProvider(ctx).StringsF(KeySubjectTypesSupported, []string{"public"}), - func(s string) bool { - return !(s == "public" || s == "pairwise") - }, - ) +func (p *DefaultProvider) MirrorTopLevelClaims(ctx context.Context) bool { + return p.getProvider(ctx).BoolF(KeyMirrorTopLevelClaims, true) +} - if len(types) == 0 { - types = []string{"public"} +func (p *DefaultProvider) SubjectTypesSupported(ctx context.Context, additionalSources ...AccessTokenStrategySource) []string { + public, pairwise := false, false + for _, t := range p.getProvider(ctx).StringsF(KeySubjectTypesSupported, []string{"public"}) { + switch t { + case "public": + public = true + case "pairwise": + pairwise = true + } } - if stringslice.Has(types, "pairwise") { - if p.AccessTokenStrategy(ctx) == AccessTokenJWTStrategy { + // when neither public nor pairwise are set, force public + public = public || !pairwise + + if pairwise { + if p.AccessTokenStrategy(ctx, additionalSources...) == AccessTokenJWTStrategy { p.l.Warn(`The pairwise subject identifier algorithm is not supported by the JWT OAuth 2.0 Access Token Strategy and is thus being disabled. Please remove "pairwise" from oidc.subject_identifiers.supported_types" (e.g. oidc.subject_identifiers.supported_types=public) or set strategies.access_token to "opaque".`) - types = stringslice.Filter( - types, - func(s string) bool { - return !(s == "public") - }, - ) + pairwise = false } else if len(p.SubjectIdentifierAlgorithmSalt(ctx)) < 8 { p.l.Fatalf( `The pairwise subject identifier algorithm was set but length of oidc.subject_identifier.salt is too small (%d < 8), please set oidc.subject_identifiers.pairwise.salt to a random string with 8 characters or more.`, @@ -237,6 +278,13 @@ func (p *DefaultProvider) SubjectTypesSupported(ctx context.Context) []string { } } + types := make([]string, 0, 2) + if public { + types = append(types, "public") + } + if pairwise { + types = append(types, "pairwise") + } return types } @@ -285,7 +333,8 @@ func (p *DefaultProvider) CookieSameSiteMode(ctx context.Context) http.SameSite case "strict": return http.SameSiteStrictMode case "none": - if p.IsDevelopmentMode(ctx) { + if p.IssuerURL(ctx).Scheme != "https" { + // SameSite=None can only be set for HTTPS issuers. return http.SameSiteLaxMode } return http.SameSiteNoneMode @@ -339,29 +388,88 @@ func (p *DefaultProvider) LogoutRedirectURL(ctx context.Context) *url.URL { } func (p *DefaultProvider) publicFallbackURL(ctx context.Context, path string) *url.URL { - if len(p.PublicURL(ctx).String()) > 0 { - return urlx.AppendPaths(p.PublicURL(ctx), path) + if publicURL := p.PublicURL(ctx); len(publicURL.String()) > 0 { + return urlx.AppendPaths(publicURL, path) } - return p.fallbackURL(ctx, path, p.host(PublicInterface), p.port(PublicInterface)) + return p.fallbackURL(ctx, path, p.ServePublic(ctx)) } -func (p *DefaultProvider) fallbackURL(ctx context.Context, path string, host string, port int) *url.URL { - var u url.URL - u.Scheme = "http" - if tls := p.TLS(ctx, PublicInterface); tls.Enabled() || !p.IsDevelopmentMode(ctx) { +func (p *DefaultProvider) fallbackURL(ctx context.Context, path string, serve *configx.Serve) *url.URL { + u := url.URL{ + Scheme: "http", + Host: serve.GetAddress(), + } + if serve.TLS.Enabled || !p.IsDevelopmentMode(ctx) { u.Scheme = "https" } - if host == "" { - u.Host = fmt.Sprintf("%s:%d", "localhost", port) + if serve.Host == "" { + u.Host = fmt.Sprintf("%s:%d", "localhost", serve.Port) } u.Path = path return &u } +// GetDeviceAndUserCodeLifespan returns the device_code and user_code lifespan. Defaults to 15 minutes. +func (p *DefaultProvider) GetDeviceAndUserCodeLifespan(ctx context.Context) time.Duration { + return p.p.DurationF(KeyDeviceAndUserCodeLifespan, time.Minute*15) +} + +// GetAuthenticationSessionLifespan returns the authentication_session lifespan. +func (p *DefaultProvider) GetAuthenticationSessionLifespan(ctx context.Context) time.Duration { + lifespan := p.p.Duration(KeyAuthenticationSessionLifespan) + if lifespan > time.Hour*24*180 { + return time.Hour * 24 * 180 + } + return lifespan +} + +// GetDeviceAuthTokenPollingInterval returns device grant token endpoint polling interval. Defaults to 5 seconds. +func (p *DefaultProvider) GetDeviceAuthTokenPollingInterval(ctx context.Context) time.Duration { + return p.p.DurationF(KeyDeviceAuthTokenPollingInterval, time.Second*5) +} + +func (p *DefaultProvider) userCodeEntropyPreset(t string) (int, []rune) { + switch t { + default: + p.l.Errorf(`invalid user code entropy preset %q, allowed values are "high", "medium", or "low"`, t) + fallthrough + case "high": + return 8, randx.AlphaNumNoAmbiguous + case "medium": + return 8, randx.AlphaUpper + case "low": + return 9, randx.Numeric + } +} + +// GetUserCodeLength returns configured user_code length +func (p *DefaultProvider) GetUserCodeLength(ctx context.Context) int { + if l := p.getProvider(ctx).Int(KeyDeviceAuthUserCodeLength); l > 0 { + return l + } + k := p.getProvider(ctx).StringF(KeyDeviceAuthUserCodeEntropyPreset, "high") + l, _ := p.userCodeEntropyPreset(k) + return l +} + +// GetUserCodeSymbols returns configured user_code allowed symbols +func (p *DefaultProvider) GetUserCodeSymbols(ctx context.Context) []rune { + if s := p.getProvider(ctx).String(KeyDeviceAuthUserCodeCharacterSet); s != "" { + return []rune(s) + } + k := p.getProvider(ctx).StringF(KeyDeviceAuthUserCodeEntropyPreset, "high") + _, s := p.userCodeEntropyPreset(k) + return s +} + func (p *DefaultProvider) LoginURL(ctx context.Context) *url.URL { return urlRoot(p.getProvider(ctx).URIF(KeyLoginURL, p.publicFallbackURL(ctx, "oauth2/fallbacks/login"))) } +func (p *DefaultProvider) RegistrationURL(ctx context.Context) *url.URL { + return urlRoot(p.getProvider(ctx).URIF(KeyRegistrationURL, p.LoginURL(ctx))) +} + func (p *DefaultProvider) LogoutURL(ctx context.Context) *url.URL { return urlRoot(p.getProvider(ctx).RequestURIF(KeyLogoutURL, p.publicFallbackURL(ctx, "oauth2/fallbacks/logout"))) } @@ -374,6 +482,16 @@ func (p *DefaultProvider) ErrorURL(ctx context.Context) *url.URL { return urlRoot(p.getProvider(ctx).RequestURIF(KeyErrorURL, p.publicFallbackURL(ctx, "oauth2/fallbacks/error"))) } +// DeviceVerificationURL returns user_code verification page URL. Defaults to "oauth2/fallbacks/device". +func (p *DefaultProvider) DeviceVerificationURL(ctx context.Context) *url.URL { + return urlRoot(p.getProvider(ctx).URIF(KeyDeviceVerificationURL, p.publicFallbackURL(ctx, "oauth2/fallbacks/device"))) +} + +// DeviceDoneURL returns the post device authorization URL. Defaults to "oauth2/fallbacks/device/done". +func (p *DefaultProvider) DeviceDoneURL(ctx context.Context) *url.URL { + return urlRoot(p.getProvider(ctx).RequestURIF(KeyDeviceDoneURL, p.publicFallbackURL(ctx, "oauth2/fallbacks/device/done"))) +} + func (p *DefaultProvider) PublicURL(ctx context.Context) *url.URL { return urlRoot(p.getProvider(ctx).RequestURIF(KeyPublicURL, p.IssuerURL(ctx))) } @@ -381,15 +499,40 @@ func (p *DefaultProvider) PublicURL(ctx context.Context) *url.URL { func (p *DefaultProvider) AdminURL(ctx context.Context) *url.URL { return urlRoot( p.getProvider(ctx).RequestURIF( - KeyAdminURL, p.fallbackURL(ctx, "/", p.host(AdminInterface), p.port(AdminInterface)), + KeyAdminURL, p.fallbackURL(ctx, "/", p.ServeAdmin(ctx)), ), ) } func (p *DefaultProvider) IssuerURL(ctx context.Context) *url.URL { - return p.getProvider(ctx).RequestURIF( - KeyIssuerURL, p.fallbackURL(ctx, "/", p.host(PublicInterface), p.port(PublicInterface)), - ) + return p.getProvider(ctx).RequestURIF(KeyIssuerURL, p.fallbackURL(ctx, "/", p.ServePublic(ctx))) +} + +func (p *DefaultProvider) KratosAdminURL(ctx context.Context) (*url.URL, bool) { + u := p.getProvider(ctx).RequestURIF(KeyIdentityProviderAdminURL, nil) + + return u, u != nil +} +func (p *DefaultProvider) KratosPublicURL(ctx context.Context) (*url.URL, bool) { + u := p.getProvider(ctx).RequestURIF(KeyIdentityProviderPublicURL, nil) + + return u, u != nil +} + +func (p *DefaultProvider) KratosRequestHeader(ctx context.Context) http.Header { + hh := map[string]string{} + if err := p.getProvider(ctx).Unmarshal(KeyIdentityProviderHeaders, &hh); err != nil { + p.l.WithError(errors.WithStack(err)). + Errorf("Configuration value from key %s could not be decoded.", KeyIdentityProviderHeaders) + return nil + } + + h := make(http.Header) + for k, v := range hh { + h.Set(k, v) + } + + return h } func (p *DefaultProvider) OAuth2ClientRegistrationURL(ctx context.Context) *url.URL { @@ -404,11 +547,32 @@ func (p *DefaultProvider) OAuth2AuthURL(ctx context.Context) *url.URL { return p.getProvider(ctx).RequestURIF(KeyOAuth2AuthURL, urlx.AppendPaths(p.PublicURL(ctx), "/oauth2/auth")) } +// OAuth2DeviceAuthorisationURL returns device authorization endpoint. Defaults to "/oauth2/device/auth". +func (p *DefaultProvider) OAuth2DeviceAuthorisationURL(ctx context.Context) *url.URL { + return p.getProvider(ctx).RequestURIF(KeyOAuth2DeviceAuthorisationURL, urlx.AppendPaths(p.PublicURL(ctx), "/oauth2/device/auth")) +} + func (p *DefaultProvider) JWKSURL(ctx context.Context) *url.URL { return p.getProvider(ctx).RequestURIF(KeyJWKSURL, urlx.AppendPaths(p.IssuerURL(ctx), "/.well-known/jwks.json")) } -func (p *DefaultProvider) AccessTokenStrategy(ctx context.Context) AccessTokenStrategyType { +func (p *DefaultProvider) CredentialsEndpointURL(ctx context.Context) *url.URL { + return p.getProvider(ctx).RequestURIF(KeyVerifiableCredentialsURL, urlx.AppendPaths(p.PublicURL(ctx), "/credentials")) +} + +type AccessTokenStrategySource interface { + GetAccessTokenStrategy() AccessTokenStrategyType +} + +func (p *DefaultProvider) AccessTokenStrategy(ctx context.Context, additionalSources ...AccessTokenStrategySource) AccessTokenStrategyType { + for _, src := range additionalSources { + if src == nil { + continue + } + if strategy := src.GetAccessTokenStrategy(); strategy != "" { + return strategy + } + } s, err := ToAccessTokenStrategyType(p.getProvider(ctx).String(KeyAccessTokenStrategy)) if err != nil { p.l.WithError(err).Warn("Key `strategies.access_token` contains an invalid value, falling back to `opaque` strategy.") @@ -418,12 +582,61 @@ func (p *DefaultProvider) AccessTokenStrategy(ctx context.Context) AccessTokenSt return s } -func (p *DefaultProvider) TokenRefreshHookURL(ctx context.Context) *url.URL { - if len(p.getProvider(ctx).String(KeyRefreshTokenHookURL)) == 0 { +type ( + Auth struct { + Type string `json:"type"` + Config AuthConfig `json:"config"` + } + AuthConfig struct { + In string `json:"in"` + Name string `json:"name"` + Value string `json:"value"` + } + HookConfig struct { + URL string `json:"url"` + Auth *Auth `json:"auth"` + } +) + +func (p *DefaultProvider) getHookConfig(ctx context.Context, key string) *HookConfig { + if p.getProvider(ctx).String(key) == "" { return nil } - return p.getProvider(ctx).RequestURIF(KeyRefreshTokenHookURL, nil) + if hookURL := p.getProvider(ctx).RequestURIF(key, nil); hookURL != nil { + return &HookConfig{ + URL: hookURL.String(), + } + } + + var hookConfig *HookConfig + if err := p.getProvider(ctx).Unmarshal(key, &hookConfig); err != nil { + p.l.WithError(errors.WithStack(err)). + Errorf("Configuration value from key %s could not be decoded.", key) + return nil + } + if hookConfig == nil { + return nil + } + + // validate URL by parsing it + u, err := url.ParseRequestURI(hookConfig.URL) + if err != nil { + p.l.WithError(errors.WithStack(err)). + Errorf("Configuration value from key %s could not be decoded.", key) + return nil + } + hookConfig.URL = u.String() + + return hookConfig +} + +func (p *DefaultProvider) TokenHookConfig(ctx context.Context) *HookConfig { + return p.getHookConfig(ctx, KeyTokenHook) +} + +func (p *DefaultProvider) TokenRefreshHookConfig(ctx context.Context) *HookConfig { + return p.getHookConfig(ctx, KeyRefreshTokenHook) } func (p *DefaultProvider) DbIgnoreUnknownTableColumns() bool { @@ -519,10 +732,19 @@ func (p *DefaultProvider) CookieDomain(ctx context.Context) string { return p.getProvider(ctx).String(KeyCookieDomain) } +func (p *DefaultProvider) SessionCookiePath(ctx context.Context) string { + return p.getProvider(ctx).StringF(KeyCookieSessionPath, "/") +} + func (p *DefaultProvider) CookieNameLoginCSRF(ctx context.Context) string { return p.cookieSuffix(ctx, KeyCookieLoginCSRFName) } +// CookieNameDeviceCSRF returns the device CSRF cookie name. +func (p *DefaultProvider) CookieNameDeviceCSRF(ctx context.Context) string { + return p.cookieSuffix(ctx, KeyCookieDeviceCSRFName) +} + func (p *DefaultProvider) CookieNameConsentCSRF(ctx context.Context) string { return p.cookieSuffix(ctx, KeyCookieConsentCSRFName) } @@ -539,3 +761,36 @@ func (p *DefaultProvider) cookieSuffix(ctx context.Context, key string) string { return p.getProvider(ctx).String(key) + suffix } + +type GracefulRefreshTokenRotation struct { + Period time.Duration + Count int32 +} + +func (p *DefaultProvider) GracefulRefreshTokenRotation(ctx context.Context) (cfg GracefulRefreshTokenRotation) { + //nolint:gosec + cfg.Count = int32(x.Clamp(p.getProvider(ctx).IntF(KeyRefreshTokenRotationGraceReuseCount, 0), 0, math.MaxInt32)) + + // The maximum value is 5 minutes, unless also a reuse count is configured, in + // which case the maximum is 180 days + maxPeriod := 5 * time.Minute + if cfg.Count > 0 { + maxPeriod = 180 * 24 * time.Hour + } + cfg.Period = x.Clamp(p.getProvider(ctx).DurationF(KeyRefreshTokenRotationGracePeriod, 0), 0, maxPeriod) + + return +} + +func (p *DefaultProvider) GetPaginationEncryptionKeys(ctx context.Context) [][32]byte { + secrets := p.getProvider(ctx).Strings(KeyPaginationSecrets) + if len(secrets) == 0 { + secrets = p.getProvider(ctx).Strings(KeyGetSystemSecret) + } + + hashed := make([][32]byte, len(secrets)) + for i := range secrets { + hashed[i] = sha512.Sum512_256([]byte(secrets[i])) + } + return hashed +} diff --git a/driver/config/provider_fosite.go b/driver/config/provider_fosite.go index 07c43042edd..8b3719ff399 100644 --- a/driver/config/provider_fosite.go +++ b/driver/config/provider_fosite.go @@ -10,8 +10,9 @@ import ( "github.com/pkg/errors" - "github.com/ory/fosite" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/token/jwt" + "github.com/ory/hydra/v2/x" ) var _ fosite.GlobalSecretProvider = (*DefaultProvider)(nil) @@ -42,9 +43,9 @@ func (p *DefaultProvider) GetRotatedGlobalSecrets(ctx context.Context) ([][]byte return nil, nil } - var rotated [][]byte - for _, secret := range secrets[1:] { - rotated = append(rotated, x.HashStringSecret(secret)) + rotated := make([][]byte, len(secrets)-1) + for i, secret := range secrets[1:] { + rotated[i] = x.HashStringSecret(secret) } return rotated, nil @@ -68,6 +69,12 @@ func (p *DefaultProvider) GetRefreshTokenLifespan(ctx context.Context) time.Dura return p.getProvider(ctx).DurationF(KeyRefreshTokenLifespan, time.Hour*720) } +var _ fosite.VerifiableCredentialsNonceLifespanProvider = (*DefaultProvider)(nil) + +func (p *DefaultProvider) GetVerifiableCredentialsNonceLifespan(ctx context.Context) time.Duration { + return p.getProvider(ctx).DurationF(KeyVerifiableCredentialsNonceLifespan, time.Hour) +} + var _ fosite.IDTokenLifespanProvider = (*DefaultProvider)(nil) func (p *DefaultProvider) GetIDTokenLifespan(ctx context.Context) time.Duration { @@ -89,6 +96,21 @@ func (p *DefaultProvider) GetScopeStrategy(ctx context.Context) fosite.ScopeStra return fosite.ExactScopeStrategy } +var _ fosite.JWTScopeFieldProvider = (*DefaultProvider)(nil) + +func (p *DefaultProvider) GetJWTScopeField(ctx context.Context) jwt.JWTScopeFieldEnum { + switch strings.ToLower(p.getProvider(ctx).String(KeyJWTScopeClaimStrategy)) { + case "string": + return jwt.JWTScopeFieldString + case "both": + return jwt.JWTScopeFieldBoth + case "list": + return jwt.JWTScopeFieldList + default: + return jwt.JWTScopeFieldUnset + } +} + func (p *DefaultProvider) GetUseLegacyErrorFormat(context.Context) bool { return false } diff --git a/driver/config/provider_test.go b/driver/config/provider_test.go index fbe8cb2fc62..cc8d5c11a96 100644 --- a/driver/config/provider_test.go +++ b/driver/config/provider_test.go @@ -5,90 +5,90 @@ package config import ( "context" - "fmt" + "encoding/json" "io" "net/http" - "net/url" "os" - "strings" "testing" "time" - "github.com/ory/x/configx" - "github.com/ory/x/otelx" - "github.com/rs/cors" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/ory/x/urlx" - + "github.com/ory/hydra/v2/fosite/token/jwt" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/configx" "github.com/ory/x/logrusx" - - "github.com/ory/hydra/x" + "github.com/ory/x/otelx" + "github.com/ory/x/randx" + "github.com/ory/x/urlx" ) -func newProvider() *DefaultProvider { - return MustNew(context.Background(), logrusx.New("", "")) -} - -func setupEnv(env map[string]string) func(t *testing.T) func() { - return func(t *testing.T) (setup func()) { - setup = func() { - for k, v := range env { - t.Setenv(k, v) - } - } - return - } +func newProvider(t *testing.T, opts ...configx.OptionModifier) *DefaultProvider { + return MustNew(t, logrusx.New("", ""), opts...) } func TestSubjectTypesSupported(t *testing.T) { - ctx := context.Background() - for k, tc := range []struct { - d string - env func(t *testing.T) func() - e []string - }{ - { - d: "Load legacy environment variable in legacy format", - env: setupEnv(map[string]string{ - strings.ToUpper(strings.Replace(KeySubjectTypesSupported, ".", "_", -1)): "public,pairwise", - strings.ToUpper(strings.Replace("oidc.subject_identifiers.pairwise.salt", ".", "_", -1)): "some-salt", - }), - e: []string{"public", "pairwise"}, + ctx := t.Context() + for _, tc := range []struct { + d string + vals map[string]any + e []string + }{{ + d: "no subject types", + vals: map[string]any{KeySubjectTypesSupported: []string{}}, + e: []string{"public"}, + }, { + d: "public", + vals: map[string]any{KeySubjectTypesSupported: []string{"public"}}, + e: []string{"public"}, + }, { + d: "pairwise", + vals: map[string]any{ + KeySubjectTypesSupported: []string{"pairwise"}, + KeySubjectIdentifierAlgorithmSalt: "00000000", }, - { - d: "Load legacy environment variable in legacy format with JWT enabled", - env: setupEnv(map[string]string{ - strings.ToUpper(strings.Replace(KeySubjectTypesSupported, ".", "_", -1)): "public,pairwise", - strings.ToUpper(strings.Replace("oidc.subject_identifiers.pairwise.salt", ".", "_", -1)): "some-salt", - strings.ToUpper(strings.Replace(KeyAccessTokenStrategy, ".", "_", -1)): "jwt", - }), - e: []string{"public"}, + e: []string{"pairwise"}, + }, { + d: "public and pairwise", + vals: map[string]any{ + KeySubjectTypesSupported: []string{"public", "pairwise"}, + KeySubjectIdentifierAlgorithmSalt: "00000000", }, - } { - t.Run(fmt.Sprintf("case=%d/description=%s", k, tc.d), func(t *testing.T) { - setup := tc.env(t) - setup() - p := newProvider() - p.MustSet(ctx, KeySubjectIdentifierAlgorithmSalt, "00000000") - assert.EqualValues(t, tc.e, p.SubjectTypesSupported(ctx)) + e: []string{"public", "pairwise"}, + }, { + d: "pairwise disabled with jwt", + vals: map[string]any{ + KeySubjectTypesSupported: []string{"public", "pairwise"}, + KeyAccessTokenStrategy: "jwt", + }, + e: []string{"public"}, + }, { + d: "unknown subject type", + vals: map[string]any{ + KeySubjectTypesSupported: []string{"public", "pairwise", "unknown"}, + KeySubjectIdentifierAlgorithmSalt: "00000000", + }, + e: []string{"public", "pairwise"}, + }} { + t.Run(tc.d, func(t *testing.T) { + p := newProvider(t, configx.WithValues(tc.vals), configx.SkipValidation()) + assert.Equal(t, tc.e, p.SubjectTypesSupported(ctx)) }) } } func TestWellKnownKeysUnique(t *testing.T) { - p := newProvider() - assert.EqualValues(t, []string{x.OpenIDConnectKeyName, x.OAuth2JWTKeyName}, p.WellKnownKeys(context.Background(), x.OAuth2JWTKeyName, x.OpenIDConnectKeyName, x.OpenIDConnectKeyName)) + p := newProvider(t) + assert.EqualValues(t, []string{x.OpenIDConnectKeyName, x.OAuth2JWTKeyName}, p.WellKnownKeys(t.Context(), x.OAuth2JWTKeyName, x.OpenIDConnectKeyName, x.OpenIDConnectKeyName)) } func TestCORSOptions(t *testing.T) { ctx := context.Background() - p := newProvider() - p.MustSet(ctx, "serve.public.cors.enabled", true) + p := newProvider(t, configx.WithValue("serve.public.cors.enabled", true)) - conf, enabled := p.CORS(ctx, PublicInterface) + conf, enabled := p.CORSPublic(ctx) assert.True(t, enabled) assert.EqualValues(t, cors.Options{ @@ -101,130 +101,122 @@ func TestCORSOptions(t *testing.T) { } func TestProviderAdminDisableHealthAccessLog(t *testing.T) { - ctx := context.Background() - l := logrusx.New("", "") - l.Logrus().SetOutput(io.Discard) + p := newProvider(t) + serve := p.ServeAdmin(t.Context()) + assert.False(t, serve.RequestLog.DisableHealth) - p := MustNew(context.Background(), l) - - value := p.DisableHealthAccessLog(AdminInterface) - assert.Equal(t, false, value) - - p.MustSet(ctx, AdminInterface.Key(KeySuffixDisableHealthAccessLog), "true") - - value = p.DisableHealthAccessLog(AdminInterface) - assert.Equal(t, true, value) + p = newProvider(t, configx.WithValue("serve.admin.requestlog.disable_health", true)) + serve = p.ServeAdmin(t.Context()) + assert.True(t, serve.RequestLog.DisableHealth) } func TestProviderPublicDisableHealthAccessLog(t *testing.T) { - ctx := context.Background() - l := logrusx.New("", "") - l.Logrus().SetOutput(io.Discard) - - p := MustNew(context.Background(), l) - - value := p.DisableHealthAccessLog(PublicInterface) - assert.Equal(t, false, value) + p := newProvider(t) + serve := p.ServePublic(t.Context()) + assert.False(t, serve.RequestLog.DisableHealth) - p.MustSet(ctx, PublicInterface.Key(KeySuffixDisableHealthAccessLog), "true") - - value = p.DisableHealthAccessLog(PublicInterface) - assert.Equal(t, true, value) + p = newProvider(t, configx.WithValue("serve.public.requestlog.disable_health", true)) + serve = p.ServePublic(t.Context()) + assert.True(t, serve.RequestLog.DisableHealth) } func TestPublicAllowDynamicRegistration(t *testing.T) { - ctx := context.Background() - l := logrusx.New("", "") - l.Logrus().SetOutput(io.Discard) - - p := MustNew(context.Background(), l) - - value := p.PublicAllowDynamicRegistration(ctx) - assert.Equal(t, false, value) - - p.MustSet(ctx, KeyPublicAllowDynamicRegistration, "true") + p := newProvider(t) + value := p.PublicAllowDynamicRegistration(t.Context()) + assert.False(t, value) - value = p.PublicAllowDynamicRegistration(ctx) - assert.Equal(t, true, value) + p = newProvider(t, configx.WithValue(KeyPublicAllowDynamicRegistration, true)) + value = p.PublicAllowDynamicRegistration(t.Context()) + assert.True(t, value) } func TestProviderIssuerURL(t *testing.T) { - ctx := context.Background() - l := logrusx.New("", "") - l.Logrus().SetOutput(io.Discard) - p := MustNew(context.Background(), l) - p.MustSet(ctx, KeyIssuerURL, "http://hydra.localhost") - assert.Equal(t, "http://hydra.localhost", p.IssuerURL(ctx).String()) - - p2 := MustNew(context.Background(), l) - p2.MustSet(ctx, KeyIssuerURL, "http://hydra.localhost/") - assert.Equal(t, "http://hydra.localhost/", p2.IssuerURL(ctx).String()) + p := newProvider(t, configx.WithValue(KeyIssuerURL, "http://hydra.localhost")) + assert.Equal(t, "http://hydra.localhost", p.IssuerURL(t.Context()).String()) } func TestProviderIssuerPublicURL(t *testing.T) { - ctx := context.Background() - l := logrusx.New("", "") - l.Logrus().SetOutput(io.Discard) - p := MustNew(context.Background(), l) - p.MustSet(ctx, KeyIssuerURL, "http://hydra.localhost") - p.MustSet(ctx, KeyPublicURL, "http://hydra.example") - - assert.Equal(t, "http://hydra.localhost", p.IssuerURL(ctx).String()) - assert.Equal(t, "http://hydra.example/", p.PublicURL(ctx).String()) - assert.Equal(t, "http://hydra.localhost/.well-known/jwks.json", p.JWKSURL(ctx).String()) - assert.Equal(t, "http://hydra.example/oauth2/fallbacks/consent", p.ConsentURL(ctx).String()) - assert.Equal(t, "http://hydra.example/oauth2/fallbacks/login", p.LoginURL(ctx).String()) - assert.Equal(t, "http://hydra.example/oauth2/fallbacks/logout", p.LogoutURL(ctx).String()) - assert.Equal(t, "http://hydra.example/oauth2/token", p.OAuth2TokenURL(ctx).String()) - assert.Equal(t, "http://hydra.example/oauth2/auth", p.OAuth2AuthURL(ctx).String()) - assert.Equal(t, "http://hydra.example/userinfo", p.OIDCDiscoveryUserinfoEndpoint(ctx).String()) - - p2 := MustNew(context.Background(), l) - p2.MustSet(ctx, KeyIssuerURL, "http://hydra.localhost/") - assert.Equal(t, "http://hydra.localhost/", p2.IssuerURL(ctx).String()) - assert.Equal(t, "http://hydra.localhost/", p2.PublicURL(ctx).String()) - assert.Equal(t, "http://hydra.localhost/.well-known/jwks.json", p2.JWKSURL(ctx).String()) - assert.Equal(t, "http://hydra.localhost/oauth2/fallbacks/consent", p2.ConsentURL(ctx).String()) - assert.Equal(t, "http://hydra.localhost/oauth2/fallbacks/login", p2.LoginURL(ctx).String()) - assert.Equal(t, "http://hydra.localhost/oauth2/fallbacks/logout", p2.LogoutURL(ctx).String()) - assert.Equal(t, "http://hydra.localhost/oauth2/token", p2.OAuth2TokenURL(ctx).String()) - assert.Equal(t, "http://hydra.localhost/oauth2/auth", p2.OAuth2AuthURL(ctx).String()) - assert.Equal(t, "http://hydra.localhost/userinfo", p2.OIDCDiscoveryUserinfoEndpoint(ctx).String()) + p := newProvider(t, configx.WithValues(map[string]any{ + KeyIssuerURL: "http://hydra.localhost", + KeyPublicURL: "http://hydra.example", + })) + + assert.Equal(t, "http://hydra.localhost", p.IssuerURL(t.Context()).String()) + assert.Equal(t, "http://hydra.example/", p.PublicURL(t.Context()).String()) + assert.Equal(t, "http://hydra.localhost/.well-known/jwks.json", p.JWKSURL(t.Context()).String()) + assert.Equal(t, "http://hydra.example/oauth2/fallbacks/consent", p.ConsentURL(t.Context()).String()) + assert.Equal(t, "http://hydra.example/oauth2/fallbacks/login", p.LoginURL(t.Context()).String()) + assert.Equal(t, "http://hydra.example/oauth2/fallbacks/logout", p.LogoutURL(t.Context()).String()) + assert.Equal(t, "http://hydra.example/oauth2/token", p.OAuth2TokenURL(t.Context()).String()) + assert.Equal(t, "http://hydra.example/oauth2/auth", p.OAuth2AuthURL(t.Context()).String()) + assert.Equal(t, "http://hydra.example/userinfo", p.OIDCDiscoveryUserinfoEndpoint(t.Context()).String()) + + p = newProvider(t, configx.WithValue(KeyIssuerURL, "http://hydra.localhost/")) + assert.Equal(t, "http://hydra.localhost/", p.IssuerURL(t.Context()).String()) + assert.Equal(t, "http://hydra.localhost/", p.PublicURL(t.Context()).String()) + assert.Equal(t, "http://hydra.localhost/.well-known/jwks.json", p.JWKSURL(t.Context()).String()) + assert.Equal(t, "http://hydra.localhost/oauth2/fallbacks/consent", p.ConsentURL(t.Context()).String()) + assert.Equal(t, "http://hydra.localhost/oauth2/fallbacks/login", p.LoginURL(t.Context()).String()) + assert.Equal(t, "http://hydra.localhost/oauth2/fallbacks/logout", p.LogoutURL(t.Context()).String()) + assert.Equal(t, "http://hydra.localhost/oauth2/token", p.OAuth2TokenURL(t.Context()).String()) + assert.Equal(t, "http://hydra.localhost/oauth2/auth", p.OAuth2AuthURL(t.Context()).String()) + assert.Equal(t, "http://hydra.localhost/userinfo", p.OIDCDiscoveryUserinfoEndpoint(t.Context()).String()) } func TestProviderCookieSameSiteMode(t *testing.T) { - ctx := context.Background() - l := logrusx.New("", "") - l.Logrus().SetOutput(io.Discard) - - p := MustNew(context.Background(), l, configx.SkipValidation()) - p.MustSet(ctx, KeyTLSEnabled, true) - - p.MustSet(ctx, KeyCookieSameSiteMode, "") - assert.Equal(t, http.SameSiteDefaultMode, p.CookieSameSiteMode(ctx)) - - p.MustSet(ctx, KeyCookieSameSiteMode, "none") - assert.Equal(t, http.SameSiteNoneMode, p.CookieSameSiteMode(ctx)) - - p = MustNew(context.Background(), l, configx.SkipValidation()) - p.MustSet(ctx, "dev", true) - assert.Equal(t, http.SameSiteLaxMode, p.CookieSameSiteMode(ctx)) - p.MustSet(ctx, KeyCookieSameSiteMode, "none") - assert.Equal(t, http.SameSiteLaxMode, p.CookieSameSiteMode(ctx)) + for _, tc := range []struct { + d, mode string + others map[string]any + expected http.SameSite + }{{ + d: "default", + mode: "", + expected: http.SameSiteDefaultMode, + }, { + d: "default dev", + mode: "", + others: map[string]any{KeyDevelopmentMode: true}, + expected: http.SameSiteLaxMode, + }, { + d: "none with http", + mode: "none", + others: map[string]any{KeyIssuerURL: "http://example.com"}, + expected: http.SameSiteLaxMode, + }, { + d: "none with https", + mode: "none", + others: map[string]any{KeyIssuerURL: "https://example.com"}, + expected: http.SameSiteNoneMode, + }, { + d: "lax", + mode: "lax", + expected: http.SameSiteLaxMode, + }, { + d: "strict", + mode: "strict", + expected: http.SameSiteStrictMode, + }} { + t.Run(tc.d, func(t *testing.T) { + p := newProvider(t, configx.WithValue(KeyCookieSameSiteMode, tc.mode), configx.WithValues(tc.others), configx.SkipValidation()) + assert.Equal(t, tc.expected, p.CookieSameSiteMode(t.Context())) + }) + } } -func TestViperProviderValidates(t *testing.T) { - ctx := context.Background() - l := logrusx.New("", "") - c := MustNew(context.Background(), l, configx.WithConfigFiles("../../internal/.hydra.yaml")) +func TestProviderValidates(t *testing.T) { + ctx := t.Context() + c := newProvider(t, configx.WithConfigFiles("../../internal/.hydra.yaml")) // log assert.Equal(t, "debug", c.Source(ctx).String(KeyLogLevel)) assert.Equal(t, "json", c.Source(ctx).String("log.format")) // serve - assert.Equal(t, "localhost:1", c.ListenOn(PublicInterface)) - assert.Equal(t, "localhost:2", c.ListenOn(AdminInterface)) + servePublic, serveAdmin := c.ServePublic(ctx), c.ServeAdmin(ctx) + assert.Equal(t, "localhost", servePublic.Host) + assert.Equal(t, 1, servePublic.Port) + assert.Equal(t, "localhost", serveAdmin.Host) + assert.Equal(t, 2, serveAdmin.Port) expectedPublicPermission := &configx.UnixPermission{ Owner: "hydra", @@ -236,8 +228,8 @@ func TestViperProviderValidates(t *testing.T) { Group: "hydra-admin-api", Mode: 0770, } - assert.Equal(t, expectedPublicPermission, c.SocketPermission(PublicInterface)) - assert.Equal(t, expectedAdminPermission, c.SocketPermission(AdminInterface)) + assert.Equal(t, expectedPublicPermission, &servePublic.Socket) + assert.Equal(t, expectedAdminPermission, &serveAdmin.Socket) expectedCors := cors.Options{ AllowedOrigins: []string{"https://example.com"}, @@ -249,17 +241,23 @@ func TestViperProviderValidates(t *testing.T) { Debug: false, } - gc, enabled := c.CORS(ctx, AdminInterface) + gc, enabled := c.CORSAdmin(ctx) assert.False(t, enabled) assert.Equal(t, expectedCors, gc) - gc, enabled = c.CORS(ctx, PublicInterface) + gc, enabled = c.CORSPublic(ctx) assert.False(t, enabled) assert.Equal(t, expectedCors, gc) - assert.Equal(t, []string{"127.0.0.1/32"}, c.TLS(ctx, PublicInterface).AllowTerminationFrom()) + assert.Equal(t, []string{"127.0.0.1/32"}, c.Source(ctx).Strings("serve.tls.allow_termination_from")) + assert.Equal(t, []string{"127.0.0.1/32"}, servePublic.TLS.AllowTerminationFrom) + assert.Equal(t, []string{"127.0.0.1/32"}, serveAdmin.TLS.AllowTerminationFrom) assert.Equal(t, "/path/to/file.pem", c.Source(ctx).String("serve.tls.key.path")) + assert.Equal(t, "/path/to/file.pem", servePublic.TLS.KeyPath) + assert.Equal(t, "/path/to/file.pem", serveAdmin.TLS.KeyPath) assert.Equal(t, "b3J5IGh5ZHJhIGlzIGF3ZXNvbWUK", c.Source(ctx).String("serve.tls.cert.base64")) + assert.Equal(t, "b3J5IGh5ZHJhIGlzIGF3ZXNvbWUK", servePublic.TLS.CertBase64) + assert.Equal(t, "b3J5IGh5ZHJhIGlzIGF3ZXNvbWUK", serveAdmin.TLS.CertBase64) assert.Equal(t, http.SameSiteLaxMode, c.CookieSameSiteMode(ctx)) assert.Equal(t, true, c.CookieSameSiteLegacyWorkaround(ctx)) @@ -267,8 +265,9 @@ func TestViperProviderValidates(t *testing.T) { assert.Contains(t, c.DSN(), "sqlite://") // webfinger - assert.Equal(t, []string{"hydra.openid.id-token"}, c.WellKnownKeys(ctx)) + assert.Equal(t, []string{"hydra.openid.id-token", "hydra.jwt.access-token"}, c.WellKnownKeys(ctx)) assert.Equal(t, urlx.ParseOrPanic("https://example.com"), c.OAuth2ClientRegistrationURL(ctx)) + assert.Equal(t, urlx.ParseOrPanic("https://example.com/device_authorization"), c.OAuth2DeviceAuthorisationURL(ctx)) assert.Equal(t, urlx.ParseOrPanic("https://example.com/jwks.json"), c.JWKSURL(ctx)) assert.Equal(t, urlx.ParseOrPanic("https://example.com/auth"), c.OAuth2AuthURL(ctx)) assert.Equal(t, urlx.ParseOrPanic("https://example.com/token"), c.OAuth2TokenURL(ctx)) @@ -281,12 +280,25 @@ func TestViperProviderValidates(t *testing.T) { assert.Equal(t, "random_salt", c.SubjectIdentifierAlgorithmSalt(ctx)) assert.Equal(t, []string{"whatever"}, c.DefaultClientScope(ctx)) + // refresh + assert.Equal(t, GracefulRefreshTokenRotation{}, c.GracefulRefreshTokenRotation(ctx)) + require.NoError(t, c.Set(ctx, KeyRefreshTokenRotationGracePeriod, "1s")) + assert.Equal(t, time.Second, c.GracefulRefreshTokenRotation(ctx).Period) + require.NoError(t, c.Set(ctx, KeyRefreshTokenRotationGracePeriod, "2h")) + assert.Equal(t, 5*time.Minute, c.GracefulRefreshTokenRotation(ctx).Period) + require.NoError(t, c.Set(ctx, KeyRefreshTokenRotationGraceReuseCount, "2")) + assert.Equal(t, GracefulRefreshTokenRotation{Count: 2, Period: 2 * time.Hour}, c.GracefulRefreshTokenRotation(ctx)) + require.NoError(t, c.Set(ctx, KeyRefreshTokenRotationGracePeriod, (time.Hour*24*200).String())) + assert.Equal(t, GracefulRefreshTokenRotation{Count: 2, Period: time.Hour * 24 * 180}, c.GracefulRefreshTokenRotation(ctx)) + // urls assert.Equal(t, urlx.ParseOrPanic("https://issuer"), c.IssuerURL(ctx)) assert.Equal(t, urlx.ParseOrPanic("https://public/"), c.PublicURL(ctx)) assert.Equal(t, urlx.ParseOrPanic("https://admin/"), c.AdminURL(ctx)) assert.Equal(t, urlx.ParseOrPanic("https://login/"), c.LoginURL(ctx)) assert.Equal(t, urlx.ParseOrPanic("https://consent/"), c.ConsentURL(ctx)) + assert.Equal(t, urlx.ParseOrPanic("https://device/"), c.DeviceVerificationURL(ctx)) + assert.Equal(t, urlx.ParseOrPanic("https://device/callback"), c.DeviceDoneURL(ctx)) assert.Equal(t, urlx.ParseOrPanic("https://logout/"), c.LogoutURL(ctx)) assert.Equal(t, urlx.ParseOrPanic("https://error/"), c.ErrorURL(ctx)) assert.Equal(t, urlx.ParseOrPanic("https://post_logout/"), c.LogoutRedirectURL(ctx)) @@ -296,6 +308,7 @@ func TestViperProviderValidates(t *testing.T) { assert.False(t, c.GetScopeStrategy(ctx)([]string{"openid.*"}, "openid.email"), "should us fosite.ExactScopeStrategy") assert.Equal(t, AccessTokenDefaultStrategy, c.AccessTokenStrategy(ctx)) assert.Equal(t, false, c.GrantAllClientCredentialsScopesPerDefault(ctx)) + assert.Equal(t, jwt.JWTScopeFieldList, c.GetJWTScopeField(ctx)) // ttl assert.Equal(t, 2*time.Hour, c.ConsentRequestMaxAge(ctx)) @@ -303,12 +316,17 @@ func TestViperProviderValidates(t *testing.T) { assert.Equal(t, 2*time.Hour, c.GetRefreshTokenLifespan(ctx)) assert.Equal(t, 2*time.Hour, c.GetIDTokenLifespan(ctx)) assert.Equal(t, 2*time.Hour, c.GetAuthorizeCodeLifespan(ctx)) + assert.Equal(t, 2*time.Hour, c.GetDeviceAndUserCodeLifespan(ctx)) + assert.Equal(t, 24*time.Hour, c.GetAuthenticationSessionLifespan(ctx)) // oauth2 assert.Equal(t, true, c.GetSendDebugMessagesToClients(ctx)) assert.Equal(t, 20, c.GetBCryptCost(ctx)) assert.Equal(t, true, c.GetEnforcePKCE(ctx)) assert.Equal(t, true, c.GetEnforcePKCEForPublicClients(ctx)) + assert.Equal(t, 2*time.Hour, c.GetDeviceAuthTokenPollingInterval(ctx)) + assert.Equal(t, 8, c.GetUserCodeLength(ctx)) + assert.Equal(t, string(randx.AlphaUpper), string(c.GetUserCodeSymbols(ctx))) // secrets secret, err := c.GetGlobalSecret(ctx) @@ -317,7 +335,11 @@ func TestViperProviderValidates(t *testing.T) { cookieSecret, err := c.GetCookieSecrets(ctx) require.NoError(t, err) - assert.Equal(t, [][]uint8{[]byte("some-random-cookie-secret")}, cookieSecret) + assert.Equal(t, [][]byte{[]byte("some-random-cookie-secret")}, cookieSecret) + + paginationKeys := c.GetPaginationEncryptionKeys(ctx) + require.Len(t, paginationKeys, 1) + assert.Equal(t, [32]byte{0x1a, 0x4c, 0x1, 0xbc, 0x1b, 0xd1, 0x4c, 0xdf, 0x23, 0x3, 0xd9, 0x1a, 0x2a, 0x1b, 0x68, 0xdc, 0x69, 0x17, 0xf4, 0x31, 0xd, 0x27, 0x6d, 0x86, 0x70, 0xb0, 0xae, 0x2d, 0x45, 0xe2, 0xf, 0xab}, paginationKeys[0]) // profiling assert.Equal(t, "cpu", c.Source(ctx).String("profiling")) @@ -330,12 +352,20 @@ func TestViperProviderValidates(t *testing.T) { Jaeger: otelx.JaegerConfig{ LocalAgentAddress: "127.0.0.1:6831", Sampling: otelx.JaegerSampling{ - ServerURL: "http://sampling", + ServerURL: "http://sampling", + TraceIdRatio: 1, }, }, Zipkin: otelx.ZipkinConfig{ ServerURL: "http://zipkin/api/v2/spans", }, + OTLP: otelx.OTLPConfig{ + ServerURL: "localhost:4318", + Insecure: true, + Sampling: otelx.OTLPSampling{ + SamplingRatio: 1.0, + }, + }, }, }, c.Tracing()) } @@ -363,38 +393,49 @@ func TestSetPerm(t *testing.T) { } func TestLoginConsentURL(t *testing.T) { - ctx := context.Background() - l := logrusx.New("", "") - l.Logrus().SetOutput(io.Discard) - p := MustNew(context.Background(), l) - p.MustSet(ctx, KeyLoginURL, "http://localhost:8080/oauth/login") - p.MustSet(ctx, KeyConsentURL, "http://localhost:8080/oauth/consent") - - assert.Equal(t, "http://localhost:8080/oauth/login", p.LoginURL(ctx).String()) - assert.Equal(t, "http://localhost:8080/oauth/consent", p.ConsentURL(ctx).String()) + p := newProvider(t, configx.WithValues(map[string]any{ + KeyLoginURL: "http://localhost:8080/oauth/login", + KeyConsentURL: "http://localhost:8080/oauth/consent", + KeyDeviceVerificationURL: "http://localhost:8080/oauth/device", + })) + + assert.Equal(t, "http://localhost:8080/oauth/login", p.LoginURL(t.Context()).String()) + assert.Equal(t, "http://localhost:8080/oauth/consent", p.ConsentURL(t.Context()).String()) + assert.Equal(t, "http://localhost:8080/oauth/device", p.DeviceVerificationURL(t.Context()).String()) + + p = newProvider(t, configx.WithValues(map[string]any{ + KeyLoginURL: "http://localhost:3000/#/oauth/login", + KeyConsentURL: "http://localhost:3000/#/oauth/consent", + KeyDeviceVerificationURL: "http://localhost:3000/#/oauth/device", + })) + + assert.Equal(t, "http://localhost:3000/#/oauth/login", p.LoginURL(t.Context()).String()) + assert.Equal(t, "http://localhost:3000/#/oauth/consent", p.ConsentURL(t.Context()).String()) + assert.Equal(t, "http://localhost:3000/#/oauth/device", p.DeviceVerificationURL(t.Context()).String()) +} - p2 := MustNew(context.Background(), l) - p2.MustSet(ctx, KeyLoginURL, "http://localhost:3000/#/oauth/login") - p2.MustSet(ctx, KeyConsentURL, "http://localhost:3000/#/oauth/consent") +func TestInfinityRefreshTokenTTL(t *testing.T) { + c := newProvider(t, configx.WithValue("ttl.refresh_token", -1)) - assert.Equal(t, "http://localhost:3000/#/oauth/login", p2.LoginURL(ctx).String()) - assert.Equal(t, "http://localhost:3000/#/oauth/consent", p2.ConsentURL(ctx).String()) + assert.Equal(t, time.Duration(-1), c.GetRefreshTokenLifespan(t.Context())) } -func TestInfinitRefreshTokenTTL(t *testing.T) { +func TestLimitAuthSessionLifespan(t *testing.T) { ctx := context.Background() l := logrusx.New("", "") l.Logrus().SetOutput(io.Discard) - c := MustNew(context.Background(), l, configx.WithValue("ttl.refresh_token", -1)) + c := MustNew(t, l) + assert.Equal(t, 30*24*time.Hour, c.GetAuthenticationSessionLifespan(ctx)) - assert.Equal(t, -1*time.Nanosecond, c.GetRefreshTokenLifespan(ctx)) + require.NoError(t, c.Set(ctx, KeyAuthenticationSessionLifespan, (time.Hour*24*300).String())) + assert.Equal(t, 180*24*time.Hour, c.GetAuthenticationSessionLifespan(ctx)) } func TestCookieSecure(t *testing.T) { ctx := context.Background() l := logrusx.New("", "") l.Logrus().SetOutput(io.Discard) - c := MustNew(context.Background(), l, configx.WithValue(KeyDevelopmentMode, true)) + c := MustNew(t, l, configx.WithValue(KeyDevelopmentMode, true)) c.MustSet(ctx, KeyCookieSecure, true) assert.True(t, c.CookieSecure(ctx)) @@ -406,44 +447,107 @@ func TestCookieSecure(t *testing.T) { assert.True(t, c.CookieSecure(ctx)) } -func TestTokenRefreshHookURL(t *testing.T) { +func TestHookConfigs(t *testing.T) { ctx := context.Background() l := logrusx.New("", "") l.Logrus().SetOutput(io.Discard) - c := MustNew(context.Background(), l, configx.SkipValidation()) + c := MustNew(t, l, configx.SkipValidation()) - assert.EqualValues(t, (*url.URL)(nil), c.TokenRefreshHookURL(ctx)) - c.MustSet(ctx, KeyRefreshTokenHookURL, "") - assert.EqualValues(t, (*url.URL)(nil), c.TokenRefreshHookURL(ctx)) - c.MustSet(ctx, KeyRefreshTokenHookURL, "http://localhost:8080/oauth/token_refresh") - assert.EqualValues(t, "http://localhost:8080/oauth/token_refresh", c.TokenRefreshHookURL(ctx).String()) + for key, getFunc := range map[string]func(context.Context) *HookConfig{ + KeyRefreshTokenHook: c.TokenRefreshHookConfig, + KeyTokenHook: c.TokenHookConfig, + } { + assert.Nil(t, getFunc(ctx)) + c.MustSet(ctx, key, "") + assert.Nil(t, getFunc(ctx)) + c.MustSet(ctx, key, "http://localhost:8080/hook") + hc := getFunc(ctx) + require.NotNil(t, hc) + assert.EqualValues(t, "http://localhost:8080/hook", hc.URL) + + c.MustSet(ctx, key, ` +{ + "url": "http://localhost:8080/hook2", + "auth": { + "type": "api_key", + "config": { + "in": "header", + "name": "my-header", + "value": "my-value" + } + } +}`) + hc = getFunc(ctx) + require.NotNil(t, hc) + assert.EqualValues(t, "http://localhost:8080/hook2", hc.URL) + assert.EqualValues(t, "api_key", hc.Auth.Type) + rawConfig, err := json.Marshal(hc.Auth.Config) + require.NoError(t, err) + assert.JSONEq(t, `{"in":"header","name":"my-header","value":"my-value"}`, string(rawConfig)) + } } func TestJWTBearer(t *testing.T) { l := logrusx.New("", "") l.Logrus().SetOutput(io.Discard) - p := MustNew(context.Background(), l) + p := MustNew(t, l) ctx := context.Background() - //p.MustSet(ctx, KeyOAuth2GrantJWTClientAuthOptional, false) + // p.MustSet(ctx, KeyOAuth2GrantJWTClientAuthOptional, false) p.MustSet(ctx, KeyOAuth2GrantJWTMaxDuration, "1h") p.MustSet(ctx, KeyOAuth2GrantJWTIssuedDateOptional, false) p.MustSet(ctx, KeyOAuth2GrantJWTIDOptional, false) - //assert.Equal(t, false, p.GetGrantTypeJWTBearerCanSkipClientAuth(ctx)) + // assert.Equal(t, false, p.GetGrantTypeJWTBearerCanSkipClientAuth(ctx)) assert.Equal(t, 1.0, p.GetJWTMaxDuration(ctx).Hours()) assert.Equal(t, false, p.GetGrantTypeJWTBearerIssuedDateOptional(ctx)) assert.Equal(t, false, p.GetGrantTypeJWTBearerIDOptional(ctx)) - p2 := MustNew(context.Background(), l) + p2 := MustNew(t, l) - //p2.MustSet(ctx, KeyOAuth2GrantJWTClientAuthOptional, true) + // p2.MustSet(ctx, KeyOAuth2GrantJWTClientAuthOptional, true) p2.MustSet(ctx, KeyOAuth2GrantJWTMaxDuration, "24h") p2.MustSet(ctx, KeyOAuth2GrantJWTIssuedDateOptional, true) p2.MustSet(ctx, KeyOAuth2GrantJWTIDOptional, true) - //assert.Equal(t, true, p2.GetGrantTypeJWTBearerCanSkipClientAuth(ctx)) + // assert.Equal(t, true, p2.GetGrantTypeJWTBearerCanSkipClientAuth(ctx)) assert.Equal(t, 24.0, p2.GetJWTMaxDuration(ctx).Hours()) assert.Equal(t, true, p2.GetGrantTypeJWTBearerIssuedDateOptional(ctx)) assert.Equal(t, true, p2.GetGrantTypeJWTBearerIDOptional(ctx)) } + +func TestJWTScopeClaimStrategy(t *testing.T) { + l := logrusx.New("", "") + l.Logrus().SetOutput(io.Discard) + p := MustNew(t, l) + + ctx := context.Background() + + assert.Equal(t, jwt.JWTScopeFieldList, p.GetJWTScopeField(ctx)) + p.MustSet(ctx, KeyJWTScopeClaimStrategy, "list") + assert.Equal(t, jwt.JWTScopeFieldList, p.GetJWTScopeField(ctx)) + p.MustSet(ctx, KeyJWTScopeClaimStrategy, "string") + assert.Equal(t, jwt.JWTScopeFieldString, p.GetJWTScopeField(ctx)) + p.MustSet(ctx, KeyJWTScopeClaimStrategy, "both") + assert.Equal(t, jwt.JWTScopeFieldBoth, p.GetJWTScopeField(ctx)) +} + +func TestDeviceUserCode(t *testing.T) { + l := logrusx.New("", "") + + t.Run("preset", func(t *testing.T) { + p := MustNew(t, l, configx.WithValue(KeyDeviceAuthUserCodeEntropyPreset, "low")) + assert.Equal(t, 9, p.GetUserCodeLength(t.Context())) + assert.Equal(t, string(randx.Numeric), string(p.GetUserCodeSymbols(t.Context()))) + }) + + t.Run("explicit values", func(t *testing.T) { + length, charSet := 15, "foobarbaz1234" + p := MustNew(t, l, configx.WithValues(map[string]any{ + KeyDeviceAuthUserCodeLength: length, + KeyDeviceAuthUserCodeCharacterSet: charSet, + })) + assert.Equal(t, length, p.GetUserCodeLength(t.Context())) + assert.Equal(t, charSet, string(p.GetUserCodeSymbols(t.Context()))) + }) +} diff --git a/driver/config/serve.go b/driver/config/serve.go index f37dcde41eb..ff80cf6e018 100644 --- a/driver/config/serve.go +++ b/driver/config/serve.go @@ -5,113 +5,68 @@ package config import ( "context" - "fmt" - "os" - "strings" - - "github.com/ory/x/contextx" "github.com/rs/cors" "github.com/ory/x/configx" ) -const ( - KeySuffixListenOnHost = "host" - KeySuffixListenOnPort = "port" - KeySuffixSocketOwner = "socket.owner" - KeySuffixSocketGroup = "socket.group" - KeySuffixSocketMode = "socket.mode" - KeySuffixDisableHealthAccessLog = "request_log.disable_for_health" -) - -var ( - PublicInterface ServeInterface = &servePrefix{ - prefix: "serve.public", - } - AdminInterface ServeInterface = &servePrefix{ - prefix: "serve.admin", - } -) - -type ServeInterface interface { - Key(suffix string) string - String() string -} - -type servePrefix struct { - prefix string -} - -func (iface *servePrefix) Key(suffix string) string { - if suffix == KeyRoot { - return iface.prefix - } - return fmt.Sprintf("%s.%s", iface.prefix, suffix) -} - -func (iface *servePrefix) String() string { - return iface.prefix -} - -func (p *DefaultProvider) ListenOn(iface ServeInterface) string { - host, port := p.host(iface), p.port(iface) - if strings.HasPrefix(host, "unix:") { - return host - } - return fmt.Sprintf("%s:%d", host, port) -} - -func (p *DefaultProvider) SocketPermission(iface ServeInterface) *configx.UnixPermission { - return &configx.UnixPermission{ - Owner: p.getProvider(contextx.RootContext).String(iface.Key(KeySuffixSocketOwner)), - Group: p.getProvider(contextx.RootContext).String(iface.Key(KeySuffixSocketGroup)), - Mode: os.FileMode(p.getProvider(contextx.RootContext).IntF(iface.Key(KeySuffixSocketMode), 0755)), - } +func (p *DefaultProvider) ServePublic(ctx context.Context) *configx.Serve { + sharedTLS := p.getProvider(ctx).TLS("serve.tls", configx.TLS{}) + c := p.getProvider(ctx).Serve("serve.public", p.IsDevelopmentMode(ctx), configx.Serve{ + Host: "localhost", + Port: 4444, + TLS: sharedTLS, + }) + return c } -func (p *DefaultProvider) CORS(ctx context.Context, iface ServeInterface) (cors.Options, bool) { - return p.getProvider(ctx).CORS(iface.Key(KeyRoot), cors.Options{ - AllowedMethods: []string{ - "POST", - "GET", - "PUT", - "PATCH", - "DELETE", - "CONNECT", - "HEAD", - "OPTIONS", - "TRACE", - }, - AllowedHeaders: []string{ - "Accept", - "Content-Type", - "Content-Length", - "Accept-Language", - "Content-Language", - "Authorization", - }, - ExposedHeaders: []string{ - "Cache-Control", - "Expires", - "Last-Modified", - "Pragma", - "Content-Length", - "Content-Language", - "Content-Type", - }, - AllowCredentials: true, +func (p *DefaultProvider) ServeAdmin(ctx context.Context) *configx.Serve { + sharedTLS := p.getProvider(ctx).TLS("serve.tls", configx.TLS{}) + return p.getProvider(ctx).Serve("serve.admin", p.IsDevelopmentMode(ctx), configx.Serve{ + Host: "localhost", + Port: 4445, + TLS: sharedTLS, }) } -func (p *DefaultProvider) DisableHealthAccessLog(iface ServeInterface) bool { - return p.getProvider(contextx.RootContext).Bool(iface.Key(KeySuffixDisableHealthAccessLog)) +var defaultCORSOptions = cors.Options{ + AllowedOrigins: []string{}, + AllowedMethods: []string{ + "POST", + "GET", + "PUT", + "PATCH", + "DELETE", + "CONNECT", + "HEAD", + "OPTIONS", + "TRACE", + }, + AllowedHeaders: []string{ + "Accept", + "Content-Type", + "Content-Length", + "Accept-Language", + "Content-Language", + "Authorization", + }, + ExposedHeaders: []string{ + "Cache-Control", + "Expires", + "Last-Modified", + "Pragma", + "Content-Length", + "Content-Language", + "Content-Type", + }, + AllowCredentials: true, } -func (p *DefaultProvider) host(iface ServeInterface) string { - return p.getProvider(contextx.RootContext).String(iface.Key(KeySuffixListenOnHost)) +func (p *DefaultProvider) CORSPublic(ctx context.Context) (cors.Options, bool) { + return p.getProvider(ctx).CORS("serve.public", defaultCORSOptions) } -func (p *DefaultProvider) port(iface ServeInterface) int { - return p.getProvider(contextx.RootContext).Int(iface.Key(KeySuffixListenOnPort)) +func (p *DefaultProvider) CORSAdmin(ctx context.Context) (cors.Options, bool) { + return p.getProvider(ctx).CORS("serve.admin", defaultCORSOptions) } diff --git a/driver/config/tls.go b/driver/config/tls.go deleted file mode 100644 index 078ef6ec26d..00000000000 --- a/driver/config/tls.go +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package config - -import ( - "context" - "crypto/tls" - - "github.com/pkg/errors" - - "github.com/ory/x/logrusx" - "github.com/ory/x/tlsx" -) - -const ( - KeySuffixTLSEnabled = "tls.enabled" - KeySuffixTLSAllowTerminationFrom = "tls.allow_termination_from" - KeySuffixTLSCertString = "tls.cert.base64" - KeySuffixTLSKeyString = "tls.key.base64" - KeySuffixTLSCertPath = "tls.cert.path" - KeySuffixTLSKeyPath = "tls.key.path" - - KeyTLSAllowTerminationFrom = "serve." + KeySuffixTLSAllowTerminationFrom - KeyTLSCertString = "serve." + KeySuffixTLSCertString - KeyTLSKeyString = "serve." + KeySuffixTLSKeyString - KeyTLSCertPath = "serve." + KeySuffixTLSCertPath - KeyTLSKeyPath = "serve." + KeySuffixTLSKeyPath - KeyTLSEnabled = "serve." + KeySuffixTLSEnabled -) - -type TLSConfig interface { - Enabled() bool - AllowTerminationFrom() []string - GetCertificateFunc(stopReload <-chan struct{}, _ *logrusx.Logger) (func(*tls.ClientHelloInfo) (*tls.Certificate, error), error) -} - -var _ TLSConfig = (*tlsConfig)(nil) - -type tlsConfig struct { - enabled bool - allowTerminationFrom []string - - certString string - keyString string - certPath string - keyPath string -} - -func (c *tlsConfig) Enabled() bool { - return c.enabled -} - -func (c *tlsConfig) AllowTerminationFrom() []string { - return c.allowTerminationFrom -} - -func (p *DefaultProvider) TLS(ctx context.Context, iface ServeInterface) TLSConfig { - return &tlsConfig{ - enabled: p.getProvider(ctx).BoolF(iface.Key(KeySuffixTLSEnabled), p.getProvider(ctx).Bool(KeyTLSEnabled)), - allowTerminationFrom: p.getProvider(ctx).StringsF(iface.Key(KeySuffixTLSAllowTerminationFrom), p.getProvider(ctx).Strings(KeyTLSAllowTerminationFrom)), - certString: p.getProvider(ctx).StringF(iface.Key(KeySuffixTLSCertString), p.getProvider(ctx).String(KeyTLSCertString)), - keyString: p.getProvider(ctx).StringF(iface.Key(KeySuffixTLSKeyString), p.getProvider(ctx).String(KeyTLSKeyString)), - certPath: p.getProvider(ctx).StringF(iface.Key(KeySuffixTLSCertPath), p.getProvider(ctx).String(KeyTLSCertPath)), - keyPath: p.getProvider(ctx).StringF(iface.Key(KeySuffixTLSKeyPath), p.getProvider(ctx).String(KeyTLSKeyPath)), - } -} - -func (c *tlsConfig) GetCertificateFunc(stopReload <-chan struct{}, log *logrusx.Logger) (func(*tls.ClientHelloInfo) (*tls.Certificate, error), error) { - if c.certPath != "" && c.keyPath != "" { // attempt to load from disk first (enables hot-reloading) - ctx, cancel := context.WithCancel(context.Background()) - go func() { - <-stopReload - cancel() - }() - errs := make(chan error, 1) - getCert, err := tlsx.GetCertificate(ctx, c.certPath, c.keyPath, errs) - if err != nil { - return nil, errors.WithStack(err) - } - go func() { - for err := range errs { - log.WithError(err).Error("Failed to reload TLS certificates. Using the previously loaded certificates.") - } - }() - return getCert, nil - } - if c.certString != "" && c.keyString != "" { // base64-encoded directly in config - cert, err := tlsx.CertificateFromBase64(c.certString, c.keyString) - if err != nil { - return nil, errors.WithStack(err) - } - return func(*tls.ClientHelloInfo) (*tls.Certificate, error) { - return &cert, nil - }, nil - } - return nil, tlsx.ErrNoCertificatesConfigured -} diff --git a/driver/config/types_test.go b/driver/config/types_test.go index e7fe500f91e..39523fde6ea 100644 --- a/driver/config/types_test.go +++ b/driver/config/types_test.go @@ -19,6 +19,6 @@ func TestToAccessTokenStrategyType(t *testing.T) { require.NoError(t, err) assert.Equal(t, AccessTokenJWTStrategy, actual) - actual, err = ToAccessTokenStrategyType("invalid") + _, err = ToAccessTokenStrategyType("invalid") require.Error(t, err) } diff --git a/driver/di.go b/driver/di.go new file mode 100644 index 00000000000..d8317e046e5 --- /dev/null +++ b/driver/di.go @@ -0,0 +1,68 @@ +// Copyright © 2024 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package driver + +import ( + "github.com/ory/hydra/v2/consent" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/jwk" +) + +type RegistryModifier func(r *RegistrySQL) error + +func WithRegistryModifiers(f ...RegistryModifier) OptionsModifier { + return func(o *options) { + o.registryModifiers = append(o.registryModifiers, f...) + } +} + +func RegistryWithHMACSHAStrategy(s func(r *RegistrySQL) oauth2.CoreStrategy) RegistryModifier { + return func(r *RegistrySQL) error { + r.hmacs = s(r) + return nil + } +} + +func RegistryWithJWTStrategy(s func(r *RegistrySQL) oauth2.AccessTokenStrategy) RegistryModifier { + return func(r *RegistrySQL) error { + r.jwtStrategy = s(r) + return nil + } +} + +func RegistryWithKeyManager(km func(r *RegistrySQL) (jwk.Manager, error)) RegistryModifier { + return func(r *RegistrySQL) (err error) { + r.keyManager, err = km(r) + return err + } +} + +func RegistryWithOAuth2Provider(pr func(r *RegistrySQL) fosite.OAuth2Provider) RegistryModifier { + return func(r *RegistrySQL) error { + r.fop = pr(r) + return nil + } +} + +func RegistryWithAccessTokenStorage(s func(r *RegistrySQL) oauth2.AccessTokenStorage) RegistryModifier { + return func(r *RegistrySQL) error { + r.accessTokenStorage = s(r) + return nil + } +} + +func RegistryWithAuthorizeCodeStorage(s func(r *RegistrySQL) oauth2.AuthorizeCodeStorage) RegistryModifier { + return func(r *RegistrySQL) error { + r.authorizeCodeStorage = s(r) + return nil + } +} + +func RegistryWithConsentManager(cm func(r *RegistrySQL) (consent.Manager, error)) RegistryModifier { + return func(r *RegistrySQL) (err error) { + r.consentManager, err = cm(r) + return err + } +} diff --git a/driver/factory.go b/driver/factory.go index 8f7a09928a6..aa4d46eaba0 100644 --- a/driver/factory.go +++ b/driver/factory.go @@ -5,46 +5,66 @@ package driver import ( "context" + "io/fs" - "github.com/ory/x/servicelocatorx" + "github.com/pkg/errors" - "github.com/ory/x/configx" + "github.com/ory/pop/v6" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fositex" + "github.com/ory/hydra/v2/hsm" + "github.com/ory/hydra/v2/internal/kratos" + "github.com/ory/x/configx" "github.com/ory/x/logrusx" - - "github.com/ory/hydra/driver/config" - "github.com/ory/x/contextx" + "github.com/ory/x/otelx" + "github.com/ory/x/popx" + "github.com/ory/x/servicelocatorx" ) -type options struct { - forcedValues map[string]interface{} - preload bool - validate bool - opts []configx.OptionModifier - config *config.DefaultProvider - // The first default refers to determining the NID at startup; the second default referes to the fact that the Contextualizer may dynamically change the NID. - skipNetworkInit bool -} +type ( + options struct { + noPreload, + noValidate, + autoMigrate, + skipNetworkInit bool + configOpts []configx.OptionModifier + tracerWrapper TracerWrapper + extraMigrations []fs.FS + goMigrations []popx.Migration + fositexFactories []fositex.Factory + registryModifiers []RegistryModifier + inspect func(*RegistrySQL) error + serviceLocatorOpts []servicelocatorx.Option + hsmContext hsm.Context + kratos kratos.Client + fop fosite.OAuth2Provider + dbOptsModifier []func(details *pop.ConnectionDetails) + } + OptionsModifier func(*options) + + TracerWrapper func(*otelx.Tracer) *otelx.Tracer +) -func newOptions() *options { - return &options{ - validate: true, - preload: true, - opts: []configx.OptionModifier{}, +func newOptions(opts []OptionsModifier) *options { + o := &options{} + for _, f := range opts { + f(o) } + return o } -func WithConfig(config *config.DefaultProvider) func(o *options) { +func WithConfigOptions(opts ...configx.OptionModifier) OptionsModifier { return func(o *options) { - o.config = config + o.configOpts = append(o.configOpts, opts...) } } -type OptionsModifier func(*options) - -func WithOptions(opts ...configx.OptionModifier) OptionsModifier { +// WithDBOptionsModifier modifies the pop connection details before the connection is opened. +func WithDBOptionsModifier(f ...func(details *pop.ConnectionDetails)) OptionsModifier { return func(o *options) { - o.opts = append(o.opts, opts...) + o.dbOptsModifier = append(o.dbOptsModifier, f...) } } @@ -53,14 +73,14 @@ func WithOptions(opts ...configx.OptionModifier) OptionsModifier { // This does not affect schema validation! func DisableValidation() OptionsModifier { return func(o *options) { - o.validate = false + o.noValidate = true } } // DisablePreloading will not preload the config. func DisablePreloading() OptionsModifier { return func(o *options) { - o.preload = false + o.noPreload = true } } @@ -70,50 +90,125 @@ func SkipNetworkInit() OptionsModifier { } } -func New(ctx context.Context, sl *servicelocatorx.Options, opts []OptionsModifier) (Registry, error) { - o := newOptions() - for _, f := range opts { - f(o) +// WithTracerWrapper sets a function that wraps the tracer. +func WithTracerWrapper(wrapper TracerWrapper) OptionsModifier { + return func(o *options) { + o.tracerWrapper = wrapper } +} + +// WithExtraMigrations specifies additional database migration. +func WithExtraMigrations(m ...fs.FS) OptionsModifier { + return func(o *options) { + o.extraMigrations = append(o.extraMigrations, m...) + } +} + +func WithGoMigrations(m ...popx.Migration) OptionsModifier { + return func(o *options) { + o.goMigrations = append(o.goMigrations, m...) + } +} + +func WithExtraFositeFactories(f ...fositex.Factory) OptionsModifier { + return func(o *options) { + o.fositexFactories = append(o.fositexFactories, f...) + } +} + +func Inspect(f func(*RegistrySQL) error) OptionsModifier { + return func(o *options) { + o.inspect = f + } +} + +func WithServiceLocatorOptions(opts ...servicelocatorx.Option) OptionsModifier { + return func(o *options) { + o.serviceLocatorOpts = append(o.serviceLocatorOpts, opts...) + } +} + +func WithAutoMigrate() OptionsModifier { + return func(o *options) { + o.autoMigrate = true + } +} + +func WithHSMContext(h hsm.Context) OptionsModifier { + return func(o *options) { + o.hsmContext = h + } +} + +func WithKratosClient(k kratos.Client) OptionsModifier { + return func(o *options) { + o.kratos = k + } +} + +func WithOAuth2Provider(p fosite.OAuth2Provider) OptionsModifier { + return func(o *options) { + o.fop = p + } +} + +func New(ctx context.Context, opts ...OptionsModifier) (*RegistrySQL, error) { + o := newOptions(opts) + sl := servicelocatorx.NewOptions(o.serviceLocatorOpts...) l := sl.Logger() if l == nil { l = logrusx.New("Ory Hydra", config.Version) } - ctxter := sl.Contextualizer() - c := o.config - if c == nil { - var err error - c, err = config.New(ctx, l, o.opts...) - if err != nil { - l.WithError(err).Error("Unable to instantiate configuration.") - return nil, err - } + c, err := config.New(ctx, l, sl.Contextualizer(), o.configOpts...) + if err != nil { + l.WithError(err).Error("Unable to instantiate configuration.") + return nil, err } - if o.validate { + if !o.noValidate { if err := config.Validate(ctx, l, c); err != nil { return nil, err } } - r, err := NewRegistryFromDSN(ctx, c, l, o.skipNetworkInit, false, ctxter) + r, err := newRegistryWithoutInit(c, l) if err != nil { l.WithError(err).Error("Unable to create service registry.") return nil, err } - if err = r.Init(ctx, o.skipNetworkInit, false, &contextx.Default{}); err != nil { + r.tracerWrapper = o.tracerWrapper + r.fositeFactories = o.fositexFactories + r.hsm = o.hsmContext + r.middlewares = sl.HTTPMiddlewares() + r.ctxer = sl.Contextualizer() + r.kratos = o.kratos + r.fop = o.fop + r.dbOptsModifier = o.dbOptsModifier + + if err = r.Init(ctx, o.skipNetworkInit, o.autoMigrate, o.extraMigrations, o.goMigrations); err != nil { l.WithError(err).Error("Unable to initialize service registry.") return nil, err } + for _, f := range o.registryModifiers { + if err := f(r); err != nil { + return nil, err + } + } + // Avoid cold cache issues on boot: - if o.preload { - CallRegistry(ctx, r) + if !o.noPreload { + callRegistry(ctx, r) + } + + if o.inspect != nil { + if err := o.inspect(r); err != nil { + return nil, errors.WithStack(err) + } } - c.Source(ctx).SetTracer(ctx, r.Tracer(ctx)) return r, nil } diff --git a/driver/registry.go b/driver/registry.go index f12dc429f0d..bb9ecea3f38 100644 --- a/driver/registry.go +++ b/driver/registry.go @@ -5,50 +5,29 @@ package driver import ( "context" - - "github.com/ory/x/httprouterx" - - "github.com/ory/hydra/hsm" - "github.com/ory/x/contextx" - - "github.com/ory/hydra/oauth2/trust" + "strings" "github.com/pkg/errors" - "github.com/ory/x/errorsx" - - "github.com/ory/fosite" - foauth2 "github.com/ory/fosite/handler/oauth2" - - "github.com/ory/x/logrusx" - - "github.com/ory/hydra/persistence" - - prometheus "github.com/ory/x/prometheusx" - + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/consent" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/internal/kratos" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/oauth2/trust" + "github.com/ory/hydra/v2/persistence" + "github.com/ory/hydra/v2/x" + "github.com/ory/pop/v6" + "github.com/ory/x/contextx" "github.com/ory/x/dbal" - "github.com/ory/x/healthx" - - "github.com/ory/hydra/client" - "github.com/ory/hydra/consent" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/jwk" - "github.com/ory/hydra/oauth2" - "github.com/ory/hydra/x" + "github.com/ory/x/logrusx" ) -type Registry interface { - dbal.Driver - - Init(ctx context.Context, skipNetworkInit bool, migrate bool, ctxer contextx.Contextualizer) error - - WithBuildInfo(v, h, d string) Registry - WithConfig(c *config.DefaultProvider) Registry - WithContextualizer(ctxer contextx.Contextualizer) Registry - WithLogger(l *logrusx.Logger) Registry +type registry interface { x.HTTPClientProvider - GetJWKSFetcherStrategy() fosite.JWKSFetcherStrategy + contextx.Provider config.Provider persistence.Provider x.RegistryLogger @@ -59,62 +38,41 @@ type Registry interface { jwk.Registry trust.Registry oauth2.Registry - PrometheusManager() *prometheus.MetricsManager x.TracingProvider + x.NetworkProvider - RegisterRoutes(ctx context.Context, admin *httprouterx.RouterAdmin, public *httprouterx.RouterPublic) - ClientHandler() *client.Handler - KeyHandler() *jwk.Handler - ConsentHandler() *consent.Handler - OAuth2Handler() *oauth2.Handler - HealthHandler() *healthx.Handler - - OAuth2HMACStrategy() *foauth2.HMACSHAStrategy - WithOAuth2Provider(f fosite.OAuth2Provider) - WithConsentStrategy(c consent.Strategy) - WithHsmContext(h hsm.Context) + kratos.Provider } -func NewRegistryFromDSN(ctx context.Context, c *config.DefaultProvider, l *logrusx.Logger, skipNetworkInit bool, migrate bool, ctxer contextx.Contextualizer) (Registry, error) { - registry, err := NewRegistryWithoutInit(c, l) - if err != nil { - return nil, err - } - if err := registry.Init(ctx, skipNetworkInit, migrate, ctxer); err != nil { - return nil, err - } - return registry, nil -} - -func NewRegistryWithoutInit(c *config.DefaultProvider, l *logrusx.Logger) (Registry, error) { - driver, err := dbal.GetDriverFor(c.DSN()) - if err != nil { - return nil, errorsx.WithStack(err) - } - registry, ok := driver.(Registry) - if !ok { - return nil, errors.Errorf("driver of type %T does not implement interface Registry", driver) +func newRegistryWithoutInit(c *config.DefaultProvider, l *logrusx.Logger) (*RegistrySQL, error) { + scheme, _, _ := strings.Cut(c.DSN(), "://") + if !pop.DialectSupported(pop.CanonicalDialect(scheme)) { + if dbal.IsSQLite(c.DSN()) { + return nil, errors.New("The DSN connection string looks like a SQLite connection, but SQLite support was not built into the binary. Please check if you have downloaded the correct binary or are using the correct Docker Image. Binary archives and Docker Images indicate SQLite support by appending the -sqlite suffix.") + } + return nil, errors.New("unsupported DSN type") } - registry = registry.WithLogger(l).WithConfig(c).WithBuildInfo(config.Version, config.Commit, config.Date) - return registry, nil + return &RegistrySQL{ + l: l, + conf: c, + initialPing: defaultInitialPing, + }, nil } -func CallRegistry(ctx context.Context, r Registry) { +func callRegistry(ctx context.Context, r *RegistrySQL) { r.ClientValidator() r.ClientManager() r.ClientHasher() r.ConsentManager() r.ConsentStrategy() - r.SubjectIdentifierAlgorithm(ctx) r.KeyManager() r.KeyCipher() + r.FlowCipher() r.OAuth2Storage() r.OAuth2Provider() - r.AudienceStrategy() - r.AccessTokenJWTStrategy() - r.OpenIDJWTStrategy() + r.AccessTokenJWTSigner() + r.OpenIDJWTSigner() r.OpenIDConnectRequestValidator() - r.PrometheusManager() r.Tracer(ctx) } diff --git a/driver/registry_base.go b/driver/registry_base.go deleted file mode 100644 index 8a81a8fa4b1..00000000000 --- a/driver/registry_base.go +++ /dev/null @@ -1,547 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package driver - -import ( - "context" - "crypto/sha256" - "fmt" - "net/http" - "time" - - "github.com/ory/x/popx" - - "github.com/ory/x/httprouterx" - - "github.com/rs/cors" - - "github.com/hashicorp/go-retryablehttp" - - "github.com/ory/hydra/fositex" - ctxx "github.com/ory/x/contextx" - "github.com/ory/x/httpx" - "github.com/ory/x/otelx" - - "github.com/ory/hydra/hsm" - - prometheus "github.com/ory/x/prometheusx" - - "github.com/pkg/errors" - - "github.com/ory/hydra/oauth2/trust" - "github.com/ory/hydra/x/oauth2cors" - "github.com/ory/x/contextx" - - "github.com/ory/hydra/persistence" - - "github.com/prometheus/client_golang/prometheus/promhttp" - - "github.com/ory/x/logrusx" - - "github.com/gorilla/sessions" - - "github.com/ory/fosite" - "github.com/ory/fosite/compose" - foauth2 "github.com/ory/fosite/handler/oauth2" - "github.com/ory/fosite/handler/openid" - "github.com/ory/herodot" - - "github.com/ory/hydra/client" - "github.com/ory/hydra/consent" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/jwk" - "github.com/ory/hydra/oauth2" - "github.com/ory/hydra/x" - "github.com/ory/x/healthx" -) - -var ( - _ contextx.Provider = (*RegistryBase)(nil) -) - -type RegistryBase struct { - l *logrusx.Logger - al *logrusx.Logger - conf *config.DefaultProvider - ch *client.Handler - fh fosite.Hasher - jwtGrantH *trust.Handler - jwtGrantV *trust.GrantValidator - kh *jwk.Handler - cv *client.Validator - ctxer contextx.Contextualizer - hh *healthx.Handler - migrationStatus *popx.MigrationStatuses - kc *jwk.AEAD - cos consent.Strategy - writer herodot.Writer - fsc fosite.ScopeStrategy - atjs jwk.JWTSigner - idtjs jwk.JWTSigner - hsm hsm.Context - fscPrev string - forv *openid.OpenIDConnectRequestValidator - fop fosite.OAuth2Provider - coh *consent.Handler - oah *oauth2.Handler - sia map[string]consent.SubjectIdentifierAlgorithm - trc *otelx.Tracer - pmm *prometheus.MetricsManager - oa2mw func(h http.Handler) http.Handler - o2mc *foauth2.HMACSHAStrategy - o2jwt *foauth2.DefaultJWTStrategy - arhs []oauth2.AccessRequestHook - buildVersion string - buildHash string - buildDate string - r Registry - persister persistence.Persister - jfs fosite.JWKSFetcherStrategy - oc fosite.Configurator - oidcs jwk.JWTSigner - ats jwk.JWTSigner - hmacs *foauth2.HMACSHAStrategy - fc *fositex.Config - publicCORS *cors.Cors -} - -func (m *RegistryBase) GetJWKSFetcherStrategy() fosite.JWKSFetcherStrategy { - if m.jfs == nil { - m.jfs = fosite.NewDefaultJWKSFetcherStrategy(fosite.JWKSFetcherWithHTTPClientSource(func(ctx context.Context) *retryablehttp.Client { - return m.HTTPClient(ctx) - })) - } - return m.jfs -} - -func (m *RegistryBase) WithContextualizer(ctxer contextx.Contextualizer) Registry { - m.ctxer = ctxer - return m.r -} - -func (m *RegistryBase) Contextualizer() contextx.Contextualizer { - if m.ctxer == nil { - panic("registry Contextualizer not set") - } - return m.ctxer -} - -func (m *RegistryBase) with(r Registry) *RegistryBase { - m.r = r - return m -} - -func (m *RegistryBase) WithBuildInfo(version, hash, date string) Registry { - m.buildVersion = version - m.buildHash = hash - m.buildDate = date - return m.r -} - -func (m *RegistryBase) OAuth2AwareMiddleware(ctx context.Context) func(h http.Handler) http.Handler { - if m.oa2mw == nil { - m.oa2mw = oauth2cors.Middleware(ctx, m.r) - } - return m.oa2mw -} - -func (m *RegistryBase) addPublicCORSOnHandler(ctx context.Context) func(http.Handler) http.Handler { - corsConfig, corsEnabled := m.Config().CORS(ctx, config.PublicInterface) - if !corsEnabled { - return func(h http.Handler) http.Handler { - return h - } - } - if m.publicCORS == nil { - m.publicCORS = cors.New(corsConfig) - } - return func(h http.Handler) http.Handler { - return m.publicCORS.Handler(h) - } -} - -func (m *RegistryBase) RegisterRoutes(ctx context.Context, admin *httprouterx.RouterAdmin, public *httprouterx.RouterPublic) { - m.HealthHandler().SetHealthRoutes(admin.Router, true) - m.HealthHandler().SetVersionRoutes(admin.Router) - - m.HealthHandler().SetHealthRoutes(public.Router, false, healthx.WithMiddleware(m.addPublicCORSOnHandler(ctx))) - - admin.Handler("GET", prometheus.MetricsPrometheusPath, promhttp.Handler()) - - m.ConsentHandler().SetRoutes(admin) - m.KeyHandler().SetRoutes(admin, public, m.OAuth2AwareMiddleware(ctx)) - m.ClientHandler().SetRoutes(admin, public) - m.OAuth2Handler().SetRoutes(admin, public, m.OAuth2AwareMiddleware(ctx)) - m.JWTGrantHandler().SetRoutes(admin) -} - -func (m *RegistryBase) BuildVersion() string { - return m.buildVersion -} - -func (m *RegistryBase) BuildDate() string { - return m.buildDate -} - -func (m *RegistryBase) BuildHash() string { - return m.buildHash -} - -func (m *RegistryBase) WithConfig(c *config.DefaultProvider) Registry { - m.conf = c - return m.r -} - -func (m *RegistryBase) Writer() herodot.Writer { - if m.writer == nil { - h := herodot.NewJSONWriter(m.Logger()) - h.ErrorEnhancer = x.ErrorEnhancer - m.writer = h - } - return m.writer -} - -func (m *RegistryBase) WithLogger(l *logrusx.Logger) Registry { - m.l = l - return m.r -} - -func (m *RegistryBase) Logger() *logrusx.Logger { - if m.l == nil { - m.l = logrusx.New("Ory Hydra", m.BuildVersion()) - } - return m.l -} - -func (m *RegistryBase) AuditLogger() *logrusx.Logger { - if m.al == nil { - m.al = logrusx.NewAudit("Ory Hydra", m.BuildVersion()) - m.al.UseConfig(m.Config().Source(ctxx.RootContext)) - } - return m.al -} - -func (m *RegistryBase) ClientHasher() fosite.Hasher { - if m.fh == nil { - m.fh = x.NewHasher(m.Config()) - } - return m.fh -} - -func (m *RegistryBase) ClientHandler() *client.Handler { - if m.ch == nil { - m.ch = client.NewHandler(m.r) - } - return m.ch -} - -func (m *RegistryBase) ClientValidator() *client.Validator { - if m.cv == nil { - m.cv = client.NewValidator(m.r) - } - return m.cv -} - -func (m *RegistryBase) KeyHandler() *jwk.Handler { - if m.kh == nil { - m.kh = jwk.NewHandler(m.r) - } - return m.kh -} - -func (m *RegistryBase) JWTGrantHandler() *trust.Handler { - if m.jwtGrantH == nil { - m.jwtGrantH = trust.NewHandler(m.r) - } - return m.jwtGrantH -} - -func (m *RegistryBase) GrantValidator() *trust.GrantValidator { - if m.jwtGrantV == nil { - m.jwtGrantV = trust.NewGrantValidator() - } - return m.jwtGrantV -} - -func (m *RegistryBase) HealthHandler() *healthx.Handler { - if m.hh == nil { - m.hh = healthx.NewHandler(m.Writer(), m.buildVersion, healthx.ReadyCheckers{ - "database": func(_ *http.Request) error { - return m.r.Ping() - }, - "migrations": func(r *http.Request) error { - if m.migrationStatus != nil && !m.migrationStatus.HasPending() { - return nil - } - - status, err := m.r.Persister().MigrationStatus(r.Context()) - if err != nil { - return err - } - - if status.HasPending() { - err := errors.Errorf("migrations have not yet been fully applied: %+v", status) - m.Logger().WithField("status", fmt.Sprintf("%+v", status)).WithError(err).Warn("Instance is not yet ready because migrations have not yet been fully applied.") - return err - } - - m.migrationStatus = &status - return nil - }, - }) - } - - return m.hh -} - -func (m *RegistryBase) ConsentStrategy() consent.Strategy { - if m.cos == nil { - m.cos = consent.NewStrategy(m.r, m.Config()) - } - return m.cos -} - -func (m *RegistryBase) KeyCipher() *jwk.AEAD { - if m.kc == nil { - m.kc = jwk.NewAEAD(m.Config()) - } - return m.kc -} - -func (m *RegistryBase) CookieStore(ctx context.Context) (sessions.Store, error) { - var keys [][]byte - secrets, err := m.conf.GetCookieSecrets(ctx) - if err != nil { - return nil, err - } - - for _, k := range secrets { - encrypt := sha256.Sum256(k) - keys = append(keys, k, encrypt[:]) - } - - cs := sessions.NewCookieStore(keys...) - cs.Options.Secure = m.Config().CookieSecure(ctx) - cs.Options.HttpOnly = true - - // CookieStore MaxAge is set to 86400 * 30 by default. This prevents secure cookies retrieval with expiration > 30 days. - // MaxAge(0) disables internal MaxAge check by SecureCookie, see: - // - // https://github.com/ory/hydra/pull/2488#discussion_r618992698 - cs.MaxAge(0) - - if domain := m.Config().CookieDomain(ctx); domain != "" { - cs.Options.Domain = domain - } - - cs.Options.Path = "/" - if sameSite := m.Config().CookieSameSiteMode(ctx); sameSite != 0 { - cs.Options.SameSite = sameSite - } - - return cs, nil -} - -func (m *RegistryBase) HTTPClient(ctx context.Context, opts ...httpx.ResilientOptions) *retryablehttp.Client { - opts = append(opts, - httpx.ResilientClientWithLogger(m.Logger()), - httpx.ResilientClientWithMaxRetry(2), - httpx.ResilientClientWithConnectionTimeout(30*time.Second)) - - tracer := m.Tracer(ctx) - if tracer.IsLoaded() { - opts = append(opts, httpx.ResilientClientWithTracer(tracer.Tracer())) - } - - if m.Config().ClientHTTPNoPrivateIPRanges() { - opts = append(opts, httpx.ResilientClientDisallowInternalIPs()) - } - return httpx.NewResilientClient(opts...) -} - -func (m *RegistryBase) OAuth2Provider() fosite.OAuth2Provider { - if m.fop != nil { - return m.fop - } - - m.fop = fosite.NewOAuth2Provider(m.r.OAuth2Storage(), m.OAuth2ProviderConfig()) - return m.fop -} - -func (m *RegistryBase) OpenIDJWTStrategy() jwk.JWTSigner { - if m.oidcs != nil { - return m.oidcs - } - - m.oidcs = jwk.NewDefaultJWTSigner(m.Config(), m.r, x.OpenIDConnectKeyName) - return m.oidcs -} - -func (m *RegistryBase) AccessTokenJWTStrategy() jwk.JWTSigner { - if m.ats != nil { - return m.ats - } - - m.ats = jwk.NewDefaultJWTSigner(m.Config(), m.r, x.OAuth2JWTKeyName) - return m.ats -} - -func (m *RegistryBase) OAuth2HMACStrategy() *foauth2.HMACSHAStrategy { - if m.hmacs != nil { - return m.hmacs - } - - m.hmacs = compose.NewOAuth2HMACStrategy(m.OAuth2Config()) - return m.hmacs -} - -func (m *RegistryBase) OAuth2Config() *fositex.Config { - if m.fc != nil { - return m.fc - } - - m.fc = fositex.NewConfig(m.r) - return m.fc -} - -func (m *RegistryBase) OAuth2ProviderConfig() fosite.Configurator { - if m.oc != nil { - return m.oc - } - - conf := m.OAuth2Config() - hmacAtStrategy := m.OAuth2HMACStrategy() - oidcSigner := m.OpenIDJWTStrategy() - atSigner := m.AccessTokenJWTStrategy() - jwtAtStrategy := &foauth2.DefaultJWTStrategy{ - Signer: atSigner, - HMACSHAStrategy: hmacAtStrategy, - Config: conf, - } - - conf.LoadDefaultHanlders(&compose.CommonStrategy{ - CoreStrategy: fositex.NewTokenStrategy(m.Config(), hmacAtStrategy, &foauth2.DefaultJWTStrategy{ - Signer: jwtAtStrategy, - HMACSHAStrategy: hmacAtStrategy, - Config: conf, - }), - OpenIDConnectTokenStrategy: &openid.DefaultStrategy{ - Config: conf, - Signer: oidcSigner, - }, - Signer: oidcSigner, - }) - - m.oc = conf - return m.oc -} - -func (m *RegistryBase) OpenIDConnectRequestValidator() *openid.OpenIDConnectRequestValidator { - if m.forv == nil { - m.forv = openid.NewOpenIDConnectRequestValidator(&openid.DefaultStrategy{ - Config: m.OAuth2ProviderConfig(), - Signer: m.OpenIDJWTStrategy(), - }, m.OAuth2ProviderConfig()) - } - return m.forv -} - -func (m *RegistryBase) AudienceStrategy() fosite.AudienceMatchingStrategy { - return fosite.DefaultAudienceMatchingStrategy -} - -func (m *RegistryBase) ConsentHandler() *consent.Handler { - if m.coh == nil { - m.coh = consent.NewHandler(m.r, m.Config()) - } - return m.coh -} - -func (m *RegistryBase) OAuth2Handler() *oauth2.Handler { - if m.oah == nil { - m.oah = oauth2.NewHandler(m.r, m.Config()) - } - return m.oah -} - -func (m *RegistryBase) SubjectIdentifierAlgorithm(ctx context.Context) map[string]consent.SubjectIdentifierAlgorithm { - if m.sia == nil { - m.sia = map[string]consent.SubjectIdentifierAlgorithm{} - for _, t := range m.Config().SubjectTypesSupported(ctx) { - switch t { - case "public": - m.sia["public"] = consent.NewSubjectIdentifierAlgorithmPublic() - case "pairwise": - m.sia["pairwise"] = consent.NewSubjectIdentifierAlgorithmPairwise([]byte(m.Config().SubjectIdentifierAlgorithmSalt(ctx))) - } - } - } - return m.sia -} - -func (m *RegistryBase) Tracer(ctx context.Context) *otelx.Tracer { - if m.trc == nil { - t, err := otelx.New("Ory Hydra", m.l, m.conf.Tracing()) - if err != nil { - m.Logger().WithError(err).Error("Unable to initialize Tracer.") - } else { - m.trc = t - } - } - if m.trc.Tracer() == nil { - m.trc = otelx.NewNoop(m.l, m.Config().Tracing()) - } - - return m.trc -} - -func (m *RegistryBase) PrometheusManager() *prometheus.MetricsManager { - if m.pmm == nil { - m.pmm = prometheus.NewMetricsManagerWithPrefix("hydra", prometheus.HTTPMetrics, m.buildVersion, m.buildHash, m.buildDate) - } - return m.pmm -} - -func (m *RegistryBase) Persister() persistence.Persister { - return m.persister -} - -// Config returns the configuration for the given context. It may or may not be the same as the global configuration. -func (m *RegistryBase) Config() *config.DefaultProvider { - return m.conf -} - -// WithOAuth2Provider forces an oauth2 provider which is only used for testing. -func (m *RegistryBase) WithOAuth2Provider(f fosite.OAuth2Provider) { - m.fop = f -} - -// WithConsentStrategy forces a consent strategy which is only used for testing. -func (m *RegistryBase) WithConsentStrategy(c consent.Strategy) { - m.cos = c -} - -func (m *RegistryBase) AccessRequestHooks() []oauth2.AccessRequestHook { - if m.arhs == nil { - m.arhs = []oauth2.AccessRequestHook{ - oauth2.RefreshTokenHook(m), - } - } - return m.arhs -} - -func (m *RegistryBase) WithHsmContext(h hsm.Context) { - m.hsm = h -} - -func (m *RegistryBase) HSMContext() hsm.Context { - if m.hsm == nil { - m.hsm = hsm.NewContext(m.Config(), m.l) - } - return m.hsm -} - -func (m *RegistrySQL) ClientAuthenticator() x.ClientAuthenticator { - return m.OAuth2Provider().(*fosite.Fosite) -} diff --git a/driver/registry_base_test.go b/driver/registry_base_test.go deleted file mode 100644 index abf7c1a4303..00000000000 --- a/driver/registry_base_test.go +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package driver - -import ( - "context" - "errors" - "io" - "testing" - - "github.com/ory/x/randx" - - "github.com/stretchr/testify/require" - - "github.com/ory/x/httpx" - - "github.com/sirupsen/logrus" - "github.com/sirupsen/logrus/hooks/test" - "github.com/stretchr/testify/assert" - - "github.com/ory/hydra/driver/config" - "github.com/ory/x/configx" - "github.com/ory/x/contextx" - "github.com/ory/x/logrusx" - - "github.com/gorilla/sessions" -) - -func TestGetJWKSFetcherStrategyHostEnforcment(t *testing.T) { - ctx := context.Background() - l := logrusx.New("", "") - c := config.MustNew(context.Background(), l, configx.WithConfigFiles("../internal/.hydra.yaml")) - c.MustSet(ctx, config.KeyDSN, "memory") - c.MustSet(ctx, config.HSMEnabled, "false") - c.MustSet(ctx, config.ViperKeyClientHTTPNoPrivateIPRanges, true) - - registry, err := NewRegistryWithoutInit(c, l) - require.NoError(t, err) - - _, err = registry.GetJWKSFetcherStrategy().Resolve(ctx, "http://localhost:8080", true) - require.ErrorAs(t, err, new(httpx.ErrPrivateIPAddressDisallowed)) -} - -func TestRegistryBase_newKeyStrategy_handlesNetworkError(t *testing.T) { - // Test ensures any network specific error is logged with a - // specific message when attempting to create a new key strategy: issue #2338 - - hook := test.Hook{} // Test hook for asserting log messages - ctx := context.Background() - - l := logrusx.New("", "", logrusx.WithHook(&hook)) - l.Logrus().SetOutput(io.Discard) - l.Logrus().ExitFunc = func(int) {} // Override the exit func to avoid call to os.Exit - - // Create a config and set a valid but unresolvable DSN - c := config.MustNew(context.Background(), l, configx.WithConfigFiles("../internal/.hydra.yaml")) - c.MustSet(ctx, config.KeyDSN, "postgres://user:password@127.0.0.1:9999/postgres") - c.MustSet(ctx, config.HSMEnabled, "false") - - registry, err := NewRegistryWithoutInit(c, l) - if err != nil { - t.Errorf("Failed to create registry: %s", err) - return - } - - r := registry.(*RegistrySQL) - r.initialPing = failedPing(errors.New("snizzles")) - - _ = r.Init(context.Background(), true, false, &contextx.TestContextualizer{}) - - registryBase := RegistryBase{r: r, l: l} - registryBase.WithConfig(c) - - assert.Equal(t, logrus.FatalLevel, hook.LastEntry().Level) - assert.Contains(t, hook.LastEntry().Message, "snizzles") -} - -func TestRegistryBase_CookieStore_MaxAgeZero(t *testing.T) { - // Test ensures that CookieStore MaxAge option is equal to zero after initialization - - ctx := context.Background() - r := new(RegistryBase) - r.WithConfig(config.MustNew(context.Background(), logrusx.New("", ""), configx.WithValue(config.KeyGetSystemSecret, []string{randx.MustString(32, randx.AlphaNum)}))) - - s, err := r.CookieStore(ctx) - require.NoError(t, err) - cs := s.(*sessions.CookieStore) - - assert.Equal(t, cs.Options.MaxAge, 0) -} diff --git a/driver/registry_nosqlite.go b/driver/registry_nosqlite.go deleted file mode 100644 index bd57c32359c..00000000000 --- a/driver/registry_nosqlite.go +++ /dev/null @@ -1,11 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -//go:build !sqlite -// +build !sqlite - -package driver - -func (m *RegistrySQL) CanHandle(dsn string) bool { - return m.alwaysCanHandle(dsn) -} diff --git a/driver/registry_sql.go b/driver/registry_sql.go index 8162b5c13c0..ced6d24d47f 100644 --- a/driver/registry_sql.go +++ b/driver/registry_sql.go @@ -5,103 +5,186 @@ package driver import ( "context" - "strings" + "crypto/sha256" + "fmt" + "io/fs" + "net/http" "time" + "github.com/gorilla/sessions" + "github.com/hashicorp/go-retryablehttp" + _ "github.com/jackc/pgx/v5/stdlib" "github.com/pkg/errors" - - "github.com/ory/hydra/hsm" + "github.com/prometheus/client_golang/prometheus/promhttp" + "github.com/rs/cors" + "github.com/urfave/negroni" + "go.uber.org/automaxprocs/maxprocs" + + "github.com/ory/herodot" + "github.com/ory/hydra/v2/aead" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/consent" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + foauth2 "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/handler/pkce" + "github.com/ory/hydra/v2/fosite/handler/rfc7523" + "github.com/ory/hydra/v2/fosite/handler/rfc8628" + "github.com/ory/hydra/v2/fosite/handler/verifiable" + "github.com/ory/hydra/v2/fosite/token/hmac" + "github.com/ory/hydra/v2/fositex" + "github.com/ory/hydra/v2/hsm" + "github.com/ory/hydra/v2/internal/kratos" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/oauth2/trust" + "github.com/ory/hydra/v2/persistence" + "github.com/ory/hydra/v2/persistence/sql" + "github.com/ory/hydra/v2/x" + "github.com/ory/hydra/v2/x/oauth2cors" + "github.com/ory/pop/v6" "github.com/ory/x/contextx" - - "github.com/gobuffalo/pop/v6" - - "github.com/ory/hydra/oauth2/trust" - "github.com/ory/x/errorsx" - "github.com/ory/x/networkx" + "github.com/ory/x/dbal" + "github.com/ory/x/healthx" + "github.com/ory/x/httprouterx" + "github.com/ory/x/httpx" + "github.com/ory/x/logrusx" + "github.com/ory/x/otelx" "github.com/ory/x/popx" + prometheus "github.com/ory/x/prometheusx" + "github.com/ory/x/resilience" + "github.com/ory/x/sqlcon" +) - "github.com/luna-duclos/instrumentedsql" +type RegistrySQL struct { + l *logrusx.Logger + conf *config.DefaultProvider + fh fosite.Hasher + cv *client.Validator + ctxer contextx.Contextualizer + hh *healthx.Handler + kc *aead.AESGCM + flowc *aead.XChaCha20Poly1305 + cos consent.Strategy + writer herodot.Writer + hsm hsm.Context + forv *openid.OpenIDConnectRequestValidator + fop fosite.OAuth2Provider + trc *otelx.Tracer + tracerWrapper func(*otelx.Tracer) *otelx.Tracer + arhs []oauth2.AccessRequestHook + basePersister *sql.BasePersister + accessTokenStorage foauth2.AccessTokenStorage + authorizeCodeStorage foauth2.AuthorizeCodeStorage + oc fosite.Configurator + oidcs jwk.JWTSigner + ats jwk.JWTSigner + hmacs foauth2.CoreStrategy + jwtStrategy foauth2.AccessTokenStrategy + enigmaHMAC *hmac.HMACStrategy + deviceHmac *rfc8628.DefaultDeviceStrategy + fc *fositex.Config + publicCORS *cors.Cors + kratos kratos.Client + fositeFactories []fositex.Factory + migrator *sql.MigrationManager + dbOptsModifier []func(details *pop.ConnectionDetails) + + keyManager jwk.Manager + consentManager consent.Manager + + initialPing func(ctx context.Context, l *logrusx.Logger, p *sql.BasePersister) error + middlewares []negroni.Handler +} - "github.com/ory/x/resilience" +var ( + _ contextx.Provider = (*RegistrySQL)(nil) + _ registry = (*RegistrySQL)(nil) +) - _ "github.com/jackc/pgx/v4/stdlib" +func (m *RegistrySQL) FositeClientManager() fosite.ClientManager { + return m.OAuth2Storage() +} - "github.com/ory/hydra/persistence/sql" +// AuthorizeCodeStorage implements foauth2.AuthorizeCodeStorageProvider +func (m *RegistrySQL) AuthorizeCodeStorage() foauth2.AuthorizeCodeStorage { + if m.authorizeCodeStorage != nil { + return m.authorizeCodeStorage + } + return m.OAuth2Storage() +} - "github.com/jmoiron/sqlx" +// AccessTokenStorage implements foauth2.AccessTokenStorageProvider +func (m *RegistrySQL) AccessTokenStorage() foauth2.AccessTokenStorage { + if m.accessTokenStorage != nil { + return m.accessTokenStorage + } + return m.OAuth2Storage() +} - "github.com/ory/x/dbal" - otelsql "github.com/ory/x/otelx/sql" - "github.com/ory/x/sqlcon" +// RefreshTokenStorage implements foauth2.RefreshTokenStorageProvider +func (m *RegistrySQL) RefreshTokenStorage() foauth2.RefreshTokenStorage { + return m.OAuth2Storage() +} - "github.com/ory/hydra/client" - "github.com/ory/hydra/consent" - "github.com/ory/hydra/jwk" - "github.com/ory/hydra/x" -) +// TokenRevocationStorage implements foauth2.TokenRevocationStorageProvider +func (m *RegistrySQL) TokenRevocationStorage() foauth2.TokenRevocationStorage { + return m.OAuth2Storage() +} -type RegistrySQL struct { - *RegistryBase - db *sqlx.DB - defaultKeyManager jwk.Manager - initialPing func(r *RegistrySQL) error +// ResourceOwnerPasswordCredentialsGrantStorage implements foauth2.ResourceOwnerPasswordCredentialsGrantStorage +func (m *RegistrySQL) ResourceOwnerPasswordCredentialsGrantStorage() foauth2.ResourceOwnerPasswordCredentialsGrantStorage { + return m.OAuth2Storage() } -var _ Registry = new(RegistrySQL) +// OpenIDConnectRequestStorage implements openid.OIDCRequestStorageProvider +func (m *RegistrySQL) OpenIDConnectRequestStorage() openid.OpenIDConnectRequestStorage { + return m.OAuth2Storage() +} -// defaultInitialPing is the default function that will be called within RegistrySQL.Init to make sure -// the database is reachable. It can be injected for test purposes by changing the value -// of RegistrySQL.initialPing. -var defaultInitialPing = func(m *RegistrySQL) error { - if err := resilience.Retry(m.l, 5*time.Second, 5*time.Minute, m.Ping); err != nil { - m.Logger().Print("Could not ping database: ", err) - return errorsx.WithStack(err) - } - return nil +// PKCERequestStorage implements pkce.PKCERequestStorageProvider +func (m *RegistrySQL) PKCERequestStorage() pkce.PKCERequestStorage { + return m.OAuth2Storage() } -func init() { - dbal.RegisterDriver( - func() dbal.Driver { - return NewRegistrySQL() - }, - ) +// DeviceAuthStorage implements rfc8628.DeviceAuthStorageProvider +func (m *RegistrySQL) DeviceAuthStorage() rfc8628.DeviceAuthStorage { + return m.OAuth2Storage() } -func NewRegistrySQL() *RegistrySQL { - r := &RegistrySQL{ - RegistryBase: new(RegistryBase), - initialPing: defaultInitialPing, - } - r.RegistryBase.with(r) - return r +// RFC7523KeyStorage implements rfc7523.RFC7523KeyStorageProvider +func (m *RegistrySQL) RFC7523KeyStorage() rfc7523.RFC7523KeyStorage { + return m.OAuth2Storage() } -func (m *RegistrySQL) determineNetwork(c *pop.Connection, ctx context.Context) (*networkx.Network, error) { - mb, err := popx.NewMigrationBox(networkx.Migrations, popx.NewMigrator(c, m.Logger(), m.Tracer(ctx), 0)) - if err != nil { - return nil, err - } - s, err := mb.Status(ctx) - if err != nil { - return nil, err - } - if s.HasPending() { - return nil, errors.WithStack(errors.New("some migrations are pending")) - } +// NonceManager implements verifiable.NonceManager +func (m *RegistrySQL) NonceManager() verifiable.NonceManager { + return m.OAuth2Storage() +} - return networkx.NewManager(c, m.Logger(), m.Tracer(ctx)).Determine(ctx) +// defaultInitialPing is the default function that will be called within RegistrySQL.Init to make sure +// the database is reachable. It can be injected for test purposes by changing the value +// of RegistrySQL.initialPing. +func defaultInitialPing(ctx context.Context, l *logrusx.Logger, p *sql.BasePersister) error { + return errors.WithStack(resilience.Retry(l, 5*time.Second, 5*time.Minute, func() error { + return p.Ping(ctx) + })) } func (m *RegistrySQL) Init( - ctx context.Context, skipNetworkInit bool, migrate bool, ctxer contextx.Contextualizer, + ctx context.Context, + skipNetworkInit bool, + migrate bool, + extraMigrations []fs.FS, + goMigrations []popx.Migration, ) error { - if m.persister == nil { - m.WithContextualizer(ctxer) - var opts []instrumentedsql.Opt - if m.Tracer(ctx).IsLoaded() { - opts = []instrumentedsql.Opt{ - instrumentedsql.WithTracer(otelsql.NewTracer()), + if m.basePersister == nil { + if m.Config().CGroupsV1AutoMaxProcsEnabled() { + _, err := maxprocs.Set(maxprocs.Logger(m.Logger().Infof)) + if err != nil { + return fmt.Errorf("could not set GOMAXPROCS: %w", err) } } @@ -109,113 +192,455 @@ func (m *RegistrySQL) Init( pool, idlePool, connMaxLifetime, connMaxIdleTime, cleanedDSN := sqlcon.ParseConnectionOptions( m.l, m.Config().DSN(), ) - c, err := pop.NewConnection( - &pop.ConnectionDetails{ - URL: sqlcon.FinalizeDSN(m.l, cleanedDSN), - IdlePool: idlePool, - ConnMaxLifetime: connMaxLifetime, - ConnMaxIdleTime: connMaxIdleTime, - Pool: pool, - UseInstrumentedDriver: m.Tracer(ctx).IsLoaded(), - InstrumentedDriverOptions: opts, - Unsafe: m.Config().DbIgnoreUnknownTableColumns(), - }, - ) - if err != nil { - return errorsx.WithStack(err) + + opts := &pop.ConnectionDetails{ + URL: sqlcon.FinalizeDSN(m.l, cleanedDSN), + IdlePool: idlePool, + ConnMaxLifetime: connMaxLifetime, + ConnMaxIdleTime: connMaxIdleTime, + Pool: pool, + TracerProvider: m.Tracer(ctx).Provider(), + Unsafe: m.Config().DbIgnoreUnknownTableColumns(), } - if err := resilience.Retry(m.l, 5*time.Second, 5*time.Minute, c.Open); err != nil { - return errorsx.WithStack(err) + + for _, f := range m.dbOptsModifier { + f(opts) } - p, err := sql.NewPersister(ctx, c, m, m.Config(), m.l) + c, err := pop.NewConnection(opts) if err != nil { - return err + return errors.WithStack(err) } - m.persister = p - if err := m.initialPing(m); err != nil { - return err + if err := resilience.Retry(m.l, 5*time.Second, 5*time.Minute, c.Open); err != nil { + return errors.WithStack(err) } - if m.Config().HSMEnabled() { - hardwareKeyManager := hsm.NewKeyManager(m.HSMContext(), m.Config()) - m.defaultKeyManager = jwk.NewManagerStrategy(hardwareKeyManager, m.persister) - } else { - m.defaultKeyManager = m.persister + m.basePersister = sql.NewBasePersister(c, m) + if err := m.initialPing(ctx, m.Logger(), m.basePersister); err != nil { + m.Logger().Print("Could not ping database: ", err) + return err } + m.migrator = sql.NewMigrationManager(c, m, extraMigrations, goMigrations) + // if dsn is memory we have to run the migrations on every start // use case - such as // - just in memory // - shared connection // - shared but unique in the same process // see: https://sqlite.org/inmemorydb.html - if dbal.IsMemorySQLite(m.Config().DSN()) { - m.Logger().Print("Hydra is running migrations on every startup as DSN is memory.\n") - m.Logger().Print("This means your data is lost when Hydra terminates.\n") - if err := p.MigrateUp(context.Background()); err != nil { - return err - } - } else if migrate { - if err := p.MigrateUp(context.Background()); err != nil { + switch { + case dbal.IsMemorySQLite(m.Config().DSN()): + m.Logger().Println("Hydra is running migrations on every startup as DSN is memory.") + m.Logger().Println("This means your data is lost when Hydra terminates.") + fallthrough + case migrate: + if err := m.migrator.MigrateUp(ctx); err != nil { return err } } - if skipNetworkInit { - m.persister = p - } else { - net, err := p.DetermineNetwork(ctx) - if err != nil { - m.Logger().WithError(err).Warnf("Unable to determine network, retrying.") + if !skipNetworkInit { + if err := m.InitNetwork(ctx); err != nil { return err } - - m.persister = p.WithFallbackNetworkID(net.ID) } + } + + return nil +} + +func (m *RegistrySQL) InitNetwork(ctx context.Context) error { + net, err := m.basePersister.DetermineNetwork(ctx) + if err != nil { + m.Logger().WithError(err).Warnf("Unable to determine network, retrying.") + return err + } + + m.basePersister = m.basePersister.WithFallbackNetworkID(net.ID) + return nil +} + +func (m *RegistrySQL) PingContext(ctx context.Context) error { return m.basePersister.Ping(ctx) } + +func (m *RegistrySQL) BasePersister() *sql.BasePersister { return m.basePersister } +func (m *RegistrySQL) ClientManager() client.Manager { return m.Persister() } +func (m *RegistrySQL) ConsentManager() consent.Manager { + if m.consentManager != nil { + return m.consentManager + } + return &sql.ConsentPersister{BasePersister: m.basePersister} +} +func (m *RegistrySQL) ObfuscatedSubjectManager() consent.ObfuscatedSubjectManager { + return m.Persister() +} +func (m *RegistrySQL) LoginManager() consent.LoginManager { return m.Persister() } +func (m *RegistrySQL) LogoutManager() consent.LogoutManager { return m.Persister() } +func (m *RegistrySQL) OAuth2Storage() x.FositeStorer { return m.Persister() } +func (m *RegistrySQL) KeyManager() jwk.Manager { + if m.keyManager == nil { + softwareKeyManager := &sql.JWKPersister{D: m} if m.Config().HSMEnabled() { hardwareKeyManager := hsm.NewKeyManager(m.HSMContext(), m.Config()) - m.defaultKeyManager = jwk.NewManagerStrategy(hardwareKeyManager, m.persister) + m.keyManager = jwk.NewManagerStrategy(hardwareKeyManager, softwareKeyManager) } else { - m.defaultKeyManager = m.persister + m.keyManager = softwareKeyManager } + } + return m.keyManager +} + +func (m *RegistrySQL) GrantManager() trust.GrantManager { return m.Persister() } +func (m *RegistrySQL) Contextualizer() contextx.Contextualizer { + if m.ctxer == nil { + panic("registry Contextualizer not set") } + return m.ctxer +} - return nil +func (m *RegistrySQL) addPublicCORSOnHandler(ctx context.Context) func(http.Handler) http.Handler { + corsConfig, corsEnabled := m.Config().CORSPublic(ctx) + if !corsEnabled { + return func(h http.Handler) http.Handler { + return h + } + } + if m.publicCORS == nil { + m.publicCORS = cors.New(corsConfig) + } + return func(h http.Handler) http.Handler { + return m.publicCORS.Handler(h) + } } -func (m *RegistrySQL) alwaysCanHandle(dsn string) bool { - scheme := strings.Split(dsn, "://")[0] - s := dbal.Canonicalize(scheme) - return s == dbal.DriverMySQL || s == dbal.DriverPostgreSQL || s == dbal.DriverCockroachDB +func (m *RegistrySQL) RegisterPublicRoutes(ctx context.Context, public *httprouterx.RouterPublic) { + m.HealthHandler().SetHealthRoutes(public, false, healthx.WithMiddleware(m.addPublicCORSOnHandler(ctx))) + + corsMW := oauth2cors.Middleware(m) + jwk.NewHandler(m).SetPublicRoutes(public, corsMW) + client.NewHandler(m).SetPublicRoutes(public) + oauth2.NewHandler(m).SetPublicRoutes(public, corsMW) } -func (m *RegistrySQL) Ping() error { - return m.Persister().Ping() +func (m *RegistrySQL) RegisterAdminRoutes(admin *httprouterx.RouterAdmin) { + m.HealthHandler().SetHealthRoutes(admin, true) + m.HealthHandler().SetVersionRoutes(admin) + admin.Handler("GET", prometheus.MetricsPrometheusPath, promhttp.Handler()) + + consent.NewHandler(m).SetRoutes(admin) + jwk.NewHandler(m).SetAdminRoutes(admin) + client.NewHandler(m).SetAdminRoutes(admin) + oauth2.NewHandler(m).SetAdminRoutes(admin) + trust.NewHandler(m).SetRoutes(admin) } -func (m *RegistrySQL) ClientManager() client.Manager { - return m.Persister() +func (m *RegistrySQL) Writer() herodot.Writer { + if m.writer == nil { + h := herodot.NewJSONWriter(m.Logger()) + h.ErrorEnhancer = x.ErrorEnhancer + m.writer = h + } + return m.writer } -func (m *RegistrySQL) ConsentManager() consent.Manager { - return m.Persister() +func (m *RegistrySQL) Logger() *logrusx.Logger { + if m.l == nil { + m.l = logrusx.New("Ory Hydra", config.Version) + } + return m.l } -func (m *RegistrySQL) OAuth2Storage() x.FositeStorer { - return m.Persister() +func (m *RegistrySQL) ClientHasher() fosite.Hasher { + if m.fh == nil { + m.fh = x.NewHasher(m, m.Config()) + } + return m.fh } -func (m *RegistrySQL) KeyManager() jwk.Manager { - return m.defaultKeyManager +func (m *RegistrySQL) ClientValidator() *client.Validator { + if m.cv == nil { + m.cv = client.NewValidator(m) + } + return m.cv } -func (m *RegistrySQL) SoftwareKeyManager() jwk.Manager { - return m.Persister() +func (m *RegistrySQL) HealthHandler() *healthx.Handler { + if m.hh == nil { + m.hh = healthx.NewHandler(m.Writer(), config.Version, healthx.ReadyCheckers{ + "database": func(r *http.Request) error { + return m.PingContext(r.Context()) + }, + "migrations": func(r *http.Request) error { + status, err := m.migrator.MigrationStatus(r.Context()) + if err != nil { + return err + } + + if status.HasPending() { + var notApplied []string + for _, s := range status { + if s.State != "Applied" { + notApplied = append(notApplied, s.Version) + } + } + err := errors.Errorf("migrations have not yet been fully applied: %+v", notApplied) + m.Logger().WithField("not_applied", fmt.Sprintf("%+v", notApplied)).WithError(err).Warn("Instance is not yet ready because migrations have not yet been fully applied.") + return err + } + return nil + }, + }) + } + + return m.hh } -func (m *RegistrySQL) GrantManager() trust.GrantManager { - return m.Persister() +func (m *RegistrySQL) ConsentStrategy() consent.Strategy { + if m.cos == nil { + m.cos = consent.NewStrategy(m) + } + return m.cos +} + +func (m *RegistrySQL) KeyCipher() *aead.AESGCM { + if m.kc == nil { + m.kc = aead.NewAESGCM(m.Config()) + } + return m.kc +} + +func (m *RegistrySQL) FlowCipher() *aead.XChaCha20Poly1305 { + if m.flowc == nil { + m.flowc = aead.NewXChaCha20Poly1305(m.Config()) + } + return m.flowc +} + +func (m *RegistrySQL) CookieStore(ctx context.Context) (sessions.Store, error) { + var keys [][]byte + secrets, err := m.conf.GetCookieSecrets(ctx) + if err != nil { + return nil, err + } + + for _, k := range secrets { + encrypt := sha256.Sum256(k) + keys = append(keys, k, encrypt[:]) + } + + cs := sessions.NewCookieStore(keys...) + cs.Options.Secure = m.Config().CookieSecure(ctx) + cs.Options.HttpOnly = true + + // CookieStore MaxAge is set to 86400 * 30 by default. This prevents secure cookies retrieval with expiration > 30 days. + // MaxAge(0) disables internal MaxAge check by SecureCookie, see: + // + // https://github.com/ory/hydra/pull/2488#discussion_r618992698 + cs.MaxAge(0) + + if domain := m.Config().CookieDomain(ctx); domain != "" { + cs.Options.Domain = domain + } + + cs.Options.Path = "/" + if sameSite := m.Config().CookieSameSiteMode(ctx); sameSite != 0 { + cs.Options.SameSite = sameSite + } + + return cs, nil +} + +func (m *RegistrySQL) HTTPClient(_ context.Context, opts ...httpx.ResilientOptions) *retryablehttp.Client { + opts = append(opts, + httpx.ResilientClientWithLogger(m.Logger()), + httpx.ResilientClientWithMaxRetry(2), + httpx.ResilientClientWithConnectionTimeout(30*time.Second)) + + if m.Config().ClientHTTPNoPrivateIPRanges() { + opts = append( + opts, + httpx.ResilientClientDisallowInternalIPs(), + httpx.ResilientClientAllowInternalIPRequestsTo(m.Config().ClientHTTPPrivateIPExceptionURLs()...), + ) + } + return httpx.NewResilientClient(opts...) +} + +func (m *RegistrySQL) OAuth2Provider() fosite.OAuth2Provider { + if m.fop == nil { + m.fop = fosite.NewOAuth2Provider(m, m.OAuth2ProviderConfig()) + } + return m.fop +} + +func (m *RegistrySQL) OpenIDJWTSigner() jwk.JWTSigner { + if m.oidcs == nil { + m.oidcs = jwk.NewDefaultJWTSigner(m, x.OpenIDConnectKeyName) + } + return m.oidcs +} + +func (m *RegistrySQL) AccessTokenJWTSigner() jwk.JWTSigner { + if m.ats == nil { + m.ats = jwk.NewDefaultJWTSigner(m, x.OAuth2JWTKeyName) + } + return m.ats +} + +func (m *RegistrySQL) OAuth2EnigmaStrategy() *hmac.HMACStrategy { + if m.enigmaHMAC == nil { + m.enigmaHMAC = &hmac.HMACStrategy{Config: m.OAuth2Config()} + } + return m.enigmaHMAC +} + +func (m *RegistrySQL) OAuth2HMACStrategy() foauth2.CoreStrategy { + if m.hmacs == nil { + m.hmacs = foauth2.NewHMACSHAStrategy(m.OAuth2EnigmaStrategy(), m.OAuth2Config()) + } + return m.hmacs +} + +func (m *RegistrySQL) OAuth2JWTStrategy() foauth2.AccessTokenStrategy { + if m.jwtStrategy == nil { + m.jwtStrategy = &foauth2.DefaultJWTStrategy{ + Signer: m.AccessTokenJWTSigner(), + Config: m.OAuth2Config(), + } + } + return m.jwtStrategy +} + +// rfc8628HMACStrategy returns the rfc8628 strategy +func (m *RegistrySQL) rfc8628HMACStrategy() *rfc8628.DefaultDeviceStrategy { + if m.deviceHmac == nil { + m.deviceHmac = compose.NewDeviceStrategy(m.OAuth2Config()) + } + return m.deviceHmac +} + +// DeviceRateLimitStrategy implements rfc8628.DeviceRateLimitStrategyProvider +func (m *RegistrySQL) DeviceRateLimitStrategy() rfc8628.DeviceRateLimitStrategy { + return m.rfc8628HMACStrategy() +} + +// DeviceCodeStrategy implements rfc8628.DeviceCodeStrategyProvider +func (m *RegistrySQL) DeviceCodeStrategy() rfc8628.DeviceCodeStrategy { + return m.rfc8628HMACStrategy() +} + +// UserCodeStrategy implements rfc8628.UserCodeStrategyProvider +func (m *RegistrySQL) UserCodeStrategy() rfc8628.UserCodeStrategy { + return m.rfc8628HMACStrategy() +} + +func (m *RegistrySQL) OAuth2Config() *fositex.Config { + if m.fc == nil { + m.fc = fositex.NewConfig(m) + } + return m.fc +} + +func (m *RegistrySQL) ExtraFositeFactories() []fositex.Factory { + return m.fositeFactories +} + +func (m *RegistrySQL) OAuth2ProviderConfig() fosite.Configurator { + if m.oc != nil { + return m.oc + } + + conf := m.OAuth2Config() + deviceHmacAtStrategy := m.rfc8628HMACStrategy() + oidcSigner := m.OpenIDJWTSigner() + + conf.LoadDefaultHandlers(m, &compose.CommonStrategyProvider{ + CoreStrategy: fositex.NewTokenStrategy(m), + DeviceStrategy: deviceHmacAtStrategy, + OIDCTokenStrategy: &openid.DefaultStrategy{ + Config: conf, + Signer: oidcSigner, + }, + Signer: oidcSigner, + }) + + m.oc = conf + return m.oc +} + +func (m *RegistrySQL) OpenIDConnectRequestValidator() *openid.OpenIDConnectRequestValidator { + if m.forv == nil { + m.forv = openid.NewOpenIDConnectRequestValidator(&openid.DefaultStrategy{ + Config: m.OAuth2ProviderConfig(), + Signer: m.OpenIDJWTSigner(), + }, m.OAuth2ProviderConfig()) + } + return m.forv +} + +func (m *RegistrySQL) Networker() x.Networker { return m.basePersister } + +func (m *RegistrySQL) Tracer(_ context.Context) *otelx.Tracer { + if m.trc == nil { + t, err := otelx.New("Ory Hydra", m.l, m.conf.Tracing()) + if err != nil { + m.Logger().WithError(err).Error("Unable to initialize Tracer.") + } else { + // Wrap the tracer if required + if m.tracerWrapper != nil { + t = m.tracerWrapper(t) + } + + m.trc = t + } + } + if m.trc == nil || m.trc.Tracer() == nil { + m.trc = otelx.NewNoop(m.l, m.Config().Tracing()) + } + + return m.trc +} + +func (m *RegistrySQL) Persister() persistence.Persister { return sql.NewPersister(m.basePersister, m) } +func (m *RegistrySQL) Config() *config.DefaultProvider { return m.conf } + +// WithConsentStrategy forces a consent strategy which is only used for testing. +func (m *RegistrySQL) WithConsentStrategy(c consent.Strategy) { m.cos = c } + +func (m *RegistrySQL) AccessRequestHooks() []oauth2.AccessRequestHook { + if m.arhs == nil { + m.arhs = []oauth2.AccessRequestHook{ + oauth2.RefreshTokenHook(m), + oauth2.TokenHook(m), + } + } + return m.arhs +} + +func (m *RegistrySQL) HSMContext() hsm.Context { + if m.hsm == nil { + m.hsm = hsm.NewContext(m.Config(), m.l) + } + return m.hsm +} + +func (m *RegistrySQL) Kratos() kratos.Client { + if m.kratos == nil { + m.kratos = kratos.New(m) + } + return m.kratos +} + +func (m *RegistrySQL) HTTPMiddlewares() []negroni.Handler { + return m.middlewares +} + +func (m *RegistrySQL) Migrator() *sql.MigrationManager { + return m.migrator +} + +func (m *RegistrySQL) Transaction(ctx context.Context, fn func(ctx context.Context) error) error { + return m.basePersister.Transaction(ctx, func(ctx context.Context, _ *pop.Connection) error { return fn(ctx) }) } diff --git a/driver/registry_sql_test.go b/driver/registry_sql_test.go index 3cd7cd33742..8a0044199cf 100644 --- a/driver/registry_sql_test.go +++ b/driver/registry_sql_test.go @@ -5,88 +5,197 @@ package driver import ( "context" + "errors" + "fmt" + "io" "math/rand" + "net/http" + "net/http/httptest" "strconv" "testing" - "github.com/stretchr/testify/require" - + "github.com/gorilla/sessions" + pkgerr "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" - "github.com/ory/hydra/client" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/persistence/sql" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/persistence/sql" "github.com/ory/x/configx" - "github.com/ory/x/contextx" - "github.com/ory/x/errorsx" + "github.com/ory/x/dbal" + "github.com/ory/x/httpx" "github.com/ory/x/logrusx" - "github.com/ory/x/sqlcon/dockertest" + "github.com/ory/x/popx" + "github.com/ory/x/randx" ) -func TestDefaultKeyManager_HsmDisabled(t *testing.T) { - l := logrusx.New("", "") - c := config.MustNew(context.Background(), l, configx.SkipValidation()) - c.MustSet(context.Background(), config.KeyDSN, "postgres://user:password@127.0.0.1:9999/postgres") - c.MustSet(context.Background(), config.HSMEnabled, "false") - reg, err := NewRegistryWithoutInit(c, l) - r := reg.(*RegistrySQL) - r.initialPing = sussessfulPing() - if err := r.Init(context.Background(), true, false, &contextx.Default{}); err != nil { - t.Fatalf("unable to init registry: %s", err) +func init() { + sql.SilenceMigrations = true +} + +func TestGetJWKSFetcherStrategyHostEnforcement(t *testing.T) { + t.Parallel() + + r, err := New(t.Context(), WithConfigOptions( + configx.WithValues(map[string]any{ + config.KeyDSN: "memory", + config.HSMEnabled: "false", + config.KeyClientHTTPNoPrivateIPRanges: true, + }), + configx.WithConfigFiles("../internal/.hydra.yaml"), + )) + require.NoError(t, err) + + _, err = r.OAuth2Config().GetJWKSFetcherStrategy(t.Context()).Resolve(t.Context(), "http://localhost:8080", true) + require.ErrorAs(t, err, new(httpx.ErrPrivateIPAddressDisallowed)) +} + +func TestRegistrySQL_newKeyStrategy_handlesNetworkError(t *testing.T) { + t.Parallel() + + // Test ensures any network specific error is logged with a + // specific message when attempting to create a new key strategy: issue #2338 + + hook := test.Hook{} // Test hook for asserting log messages + + l := logrusx.New("", "", logrusx.WithHook(&hook)) + l.Logrus().SetOutput(io.Discard) + + // Create a config and set a valid but unresolvable DSN + c := config.MustNew(t, l, + configx.WithConfigFiles("../internal/.hydra.yaml"), + configx.WithValues(map[string]any{ + config.KeyDSN: "postgres://user:password@127.0.0.1:9999/postgres", + config.HSMEnabled: false, + }), + ) + + r, err := newRegistryWithoutInit(c, l) + if err != nil { + t.Errorf("Failed to create registry: %s", err) + return } - assert.NoError(t, err) - assert.IsType(t, &sql.Persister{}, reg.KeyManager()) - assert.IsType(t, &sql.Persister{}, reg.SoftwareKeyManager()) + + r.initialPing = failedPing(errors.New("snizzles")) + + assert.ErrorContains(t, + r.Init(t.Context(), true, false, nil, nil), + "snizzles", + ) + + assert.Equal(t, logrus.InfoLevel, hook.LastEntry().Level) + assert.Contains(t, hook.LastEntry().Message, "snizzles") +} + +func TestRegistrySQL_CookieStore_MaxAgeZero(t *testing.T) { + t.Parallel() + + // Test ensures that CookieStore MaxAge option is equal to zero after initialization + + r, err := New(t.Context(), SkipNetworkInit(), DisableValidation(), WithConfigOptions( + configx.WithValues(map[string]any{ + config.KeyDSN: dbal.NewSQLiteTestDatabase(t), + config.KeyGetSystemSecret: []string{randx.MustString(32, randx.AlphaNum)}, + }), + )) + require.NoError(t, err) + + s, err := r.CookieStore(t.Context()) + require.NoError(t, err) + cs := s.(*sessions.CookieStore) + + assert.Equal(t, cs.Options.MaxAge, 0) +} + +func TestRegistrySQL_HTTPClient(t *testing.T) { + t.Parallel() + + ts := httptest.NewServer(http.HandlerFunc(func(writer http.ResponseWriter, _ *http.Request) { + writer.WriteHeader(http.StatusOK) + })) + defer ts.Close() + + r, err := New(t.Context(), SkipNetworkInit(), DisableValidation(), WithConfigOptions(configx.WithValues(map[string]interface{}{ + config.KeyDSN: dbal.NewSQLiteTestDatabase(t), + config.KeyClientHTTPNoPrivateIPRanges: true, + config.KeyClientHTTPPrivateIPExceptionURLs: []string{ts.URL + "/exception/*"}, + }))) + require.NoError(t, err) + + t.Run("case=matches exception glob", func(t *testing.T) { + res, err := r.HTTPClient(t.Context()).Get(ts.URL + "/exception/foo") + require.NoError(t, err) + assert.Equal(t, 200, res.StatusCode) + }) + + t.Run("case=does not match exception glob", func(t *testing.T) { + _, err := r.HTTPClient(t.Context()).Get(ts.URL + "/foo") + assert.ErrorContains(t, err, "prohibited IP address") + }) +} + +func TestDefaultKeyManager_HsmDisabled(t *testing.T) { + t.Parallel() + + r, err := New(t.Context(), + SkipNetworkInit(), + DisableValidation(), + WithConfigOptions( + configx.SkipValidation(), + configx.WithValues(map[string]any{ + config.KeyDSN: "memory", + config.HSMEnabled: false, + }), + ), + ) + require.NoError(t, err) + assert.IsType(t, &sql.JWKPersister{}, r.KeyManager()) } func TestDbUnknownTableColumns(t *testing.T) { - ctx := context.Background() - l := logrusx.New("", "") - c := config.MustNew(ctx, l, configx.SkipValidation()) - postgresDsn := dockertest.RunTestPostgreSQL(t) - c.MustSet(ctx, config.KeyDSN, postgresDsn) - reg, err := NewRegistryFromDSN(ctx, c, l, false, true, &contextx.Default{}) + t.Parallel() + + dsn := dbal.NewSQLiteTestDatabase(t) + reg, err := New(t.Context(), WithConfigOptions(configx.WithValue("dsn", dsn)), WithAutoMigrate()) require.NoError(t, err) - statement := "ALTER TABLE \"hydra_client\" ADD COLUMN \"temp_column\" VARCHAR(128) NOT NULL DEFAULT '';" - require.NoError(t, reg.Persister().Connection(ctx).RawQuery(statement).Exec()) + statement := `ALTER TABLE "hydra_client" ADD COLUMN "temp_column" VARCHAR(128) NOT NULL DEFAULT '';` + require.NoError(t, reg.Persister().Connection(t.Context()).RawQuery(statement).Exec()) cl := &client.Client{ - LegacyClientID: strconv.Itoa(rand.Int()), + ID: strconv.Itoa(rand.Int()), } - require.NoError(t, reg.Persister().CreateClient(ctx, cl)) - getClients := func(reg Registry) ([]client.Client, error) { + require.NoError(t, reg.Persister().CreateClient(t.Context(), cl)) + getClients := func(ctx context.Context, reg *RegistrySQL) ([]client.Client, error) { readClients := make([]client.Client, 0) - return readClients, reg.Persister().Connection(ctx).RawQuery("SELECT * FROM \"hydra_client\"").All(&readClients) + conn := reg.Persister().Connection(ctx) + cols := popx.DBColumns[client.Client](conn.Dialect) + return readClients, conn.RawQuery(fmt.Sprintf(`SELECT %s, temp_column FROM "hydra_client"`, cols)).All(&readClients) } t.Run("with ignore disabled (default behavior)", func(t *testing.T) { - _, err := getClients(reg) - require.Error(t, err) - assert.Contains(t, err.Error(), "missing destination name temp_column") + _, err := getClients(t.Context(), reg) + assert.ErrorContains(t, err, "missing destination name temp_column") }) t.Run("with ignore enabled", func(t *testing.T) { - c.MustSet(ctx, config.KeyDBIgnoreUnknownTableColumns, true) - reg, err := NewRegistryFromDSN(ctx, c, l, false, true, &contextx.Default{}) + reg, err := New(t.Context(), WithConfigOptions( + configx.WithValue("dsn", dsn), + configx.WithValue(config.KeyDBIgnoreUnknownTableColumns, true), + )) require.NoError(t, err) - actual, err := getClients(reg) + actual, err := getClients(t.Context(), reg) require.NoError(t, err) assert.Len(t, actual, 1) }) } -func sussessfulPing() func(r *RegistrySQL) error { - return func(r *RegistrySQL) error { - // fake that ping is successful - return nil - } -} - -func failedPing(err error) func(r *RegistrySQL) error { - return func(r *RegistrySQL) error { - r.Logger().Fatalf(err.Error()) - return errorsx.WithStack(err) +func failedPing(err error) func(context.Context, *logrusx.Logger, *sql.BasePersister) error { + return func(context.Context, *logrusx.Logger, *sql.BasePersister) error { + return pkgerr.WithStack(err) } } diff --git a/driver/registry_sqlite.go b/driver/registry_sqlite.go deleted file mode 100644 index bb39281b481..00000000000 --- a/driver/registry_sqlite.go +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -//go:build sqlite -// +build sqlite - -package driver - -import ( - "strings" -) - -func (m *RegistrySQL) CanHandle(dsn string) bool { - scheme := strings.Split(dsn, "://")[0] - return scheme == "sqlite" || scheme == "sqlite3" || m.alwaysCanHandle(dsn) -} diff --git a/flow/.snapshots/TestAcceptOAuth2ConsentRequestSession_MarshalJSON.json b/flow/.snapshots/TestAcceptOAuth2ConsentRequestSession_MarshalJSON.json new file mode 100644 index 00000000000..773d747f8c7 --- /dev/null +++ b/flow/.snapshots/TestAcceptOAuth2ConsentRequestSession_MarshalJSON.json @@ -0,0 +1,4 @@ +{ + "access_token": {}, + "id_token": {} +} diff --git a/flow/.snapshots/TestAcceptOAuth2ConsentRequest_MarshalJSON.json b/flow/.snapshots/TestAcceptOAuth2ConsentRequest_MarshalJSON.json new file mode 100644 index 00000000000..b6ebede3286 --- /dev/null +++ b/flow/.snapshots/TestAcceptOAuth2ConsentRequest_MarshalJSON.json @@ -0,0 +1,8 @@ +{ + "grant_scope": [], + "grant_access_token_audience": [], + "session": null, + "remember": false, + "remember_for": 0, + "context": {} +} diff --git a/flow/.snapshots/TestCanUseLegacyChallenges-consent_error.json b/flow/.snapshots/TestCanUseLegacyChallenges-consent_error.json new file mode 100644 index 00000000000..0eb8a7f13ba --- /dev/null +++ b/flow/.snapshots/TestCanUseLegacyChallenges-consent_error.json @@ -0,0 +1,111 @@ +{ + "i": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "n": "34b4dd42-f02b-4448-b066-8e4e6655c0bb", + "rs": [ + "openid", + "profile" + ], + "ra": [ + "https://api.example.org" + ], + "ls": true, + "s": "test-subject", + "oc": { + "acr_values": [ + "http://acrvalues.example.org" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "email": "user@example.org" + }, + "login_hint": "login-hint" + }, + "c": { + "client_id": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "client_name": "", + "redirect_uris": null, + "grant_types": null, + "response_types": null, + "scope": "", + "audience": null, + "owner": "", + "policy_uri": "", + "allowed_cors_origins": null, + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "", + "created_at": "0001-01-01T00:00:00Z", + "updated_at": "0001-01-01T00:00:00Z", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "ci": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "r": "https://example.org/oauth2/auth?client_id=test", + "si": "session-123", + "is": "session-id", + "lc": "login-csrf", + "ia": "2025-10-09T14:04:36.551687+02:00", + "q": 129, + "lr": true, + "lf": 3000, + "ll": true, + "a": "http://acrvalues.example.org", + "am": [ + "pwd" + ], + "fs": "forced-subject", + "ct": { + "foo": "bar" + }, + "la": "2025-10-09T12:52:00Z", + "di": "device-challenge", + "dr": "device-code-request", + "dc": "device-csrf", + "dh": null, + "cc": "consent-request", + "cs": true, + "cr": "consent-csrf", + "gs": [ + "openid" + ], + "ga": [ + "https://api.example.org" + ], + "ce": true, + "cf": 3000, + "ch": null, + "cx": null, + "st": { + "foo": "bar", + "sub": "test-subject" + }, + "sa": { + "aud": [ + "https://api.example.org" + ], + "scp": [ + "openid", + "profile" + ] + } +} diff --git a/flow/.snapshots/TestCanUseLegacyChallenges-consent_initialized.json b/flow/.snapshots/TestCanUseLegacyChallenges-consent_initialized.json new file mode 100644 index 00000000000..9097751407c --- /dev/null +++ b/flow/.snapshots/TestCanUseLegacyChallenges-consent_initialized.json @@ -0,0 +1,111 @@ +{ + "i": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "n": "34b4dd42-f02b-4448-b066-8e4e6655c0bb", + "rs": [ + "openid", + "profile" + ], + "ra": [ + "https://api.example.org" + ], + "ls": true, + "s": "test-subject", + "oc": { + "acr_values": [ + "http://acrvalues.example.org" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "email": "user@example.org" + }, + "login_hint": "login-hint" + }, + "c": { + "client_id": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "client_name": "", + "redirect_uris": null, + "grant_types": null, + "response_types": null, + "scope": "", + "audience": null, + "owner": "", + "policy_uri": "", + "allowed_cors_origins": null, + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "", + "created_at": "0001-01-01T00:00:00Z", + "updated_at": "0001-01-01T00:00:00Z", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "ci": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "r": "https://example.org/oauth2/auth?client_id=test", + "si": "session-123", + "is": "session-id", + "lc": "login-csrf", + "ia": "2025-10-09T14:04:36.551041+02:00", + "q": 4, + "lr": true, + "lf": 3000, + "ll": true, + "a": "http://acrvalues.example.org", + "am": [ + "pwd" + ], + "fs": "forced-subject", + "ct": { + "foo": "bar" + }, + "la": "2025-10-09T12:52:00Z", + "di": "device-challenge", + "dr": "device-code-request", + "dc": "device-csrf", + "dh": null, + "cc": "consent-request", + "cs": true, + "cr": "consent-csrf", + "gs": [ + "openid" + ], + "ga": [ + "https://api.example.org" + ], + "ce": true, + "cf": 3000, + "ch": null, + "cx": null, + "st": { + "foo": "bar", + "sub": "test-subject" + }, + "sa": { + "aud": [ + "https://api.example.org" + ], + "scp": [ + "openid", + "profile" + ] + } +} diff --git a/flow/.snapshots/TestCanUseLegacyChallenges-consent_unused.json b/flow/.snapshots/TestCanUseLegacyChallenges-consent_unused.json new file mode 100644 index 00000000000..75fe0582431 --- /dev/null +++ b/flow/.snapshots/TestCanUseLegacyChallenges-consent_unused.json @@ -0,0 +1,111 @@ +{ + "i": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "n": "34b4dd42-f02b-4448-b066-8e4e6655c0bb", + "rs": [ + "openid", + "profile" + ], + "ra": [ + "https://api.example.org" + ], + "ls": true, + "s": "test-subject", + "oc": { + "acr_values": [ + "http://acrvalues.example.org" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "email": "user@example.org" + }, + "login_hint": "login-hint" + }, + "c": { + "client_id": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "client_name": "", + "redirect_uris": null, + "grant_types": null, + "response_types": null, + "scope": "", + "audience": null, + "owner": "", + "policy_uri": "", + "allowed_cors_origins": null, + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "", + "created_at": "0001-01-01T00:00:00Z", + "updated_at": "0001-01-01T00:00:00Z", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "ci": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "r": "https://example.org/oauth2/auth?client_id=test", + "si": "session-123", + "is": "session-id", + "lc": "login-csrf", + "ia": "2025-10-09T14:04:36.551431+02:00", + "q": 5, + "lr": true, + "lf": 3000, + "ll": true, + "a": "http://acrvalues.example.org", + "am": [ + "pwd" + ], + "fs": "forced-subject", + "ct": { + "foo": "bar" + }, + "la": "2025-10-09T12:52:00Z", + "di": "device-challenge", + "dr": "device-code-request", + "dc": "device-csrf", + "dh": null, + "cc": "consent-request", + "cs": true, + "cr": "consent-csrf", + "gs": [ + "openid" + ], + "ga": [ + "https://api.example.org" + ], + "ce": true, + "cf": 3000, + "ch": null, + "cx": null, + "st": { + "foo": "bar", + "sub": "test-subject" + }, + "sa": { + "aud": [ + "https://api.example.org" + ], + "scp": [ + "openid", + "profile" + ] + } +} diff --git a/flow/.snapshots/TestCanUseLegacyChallenges-consent_used.json b/flow/.snapshots/TestCanUseLegacyChallenges-consent_used.json new file mode 100644 index 00000000000..1fc76cf601b --- /dev/null +++ b/flow/.snapshots/TestCanUseLegacyChallenges-consent_used.json @@ -0,0 +1,111 @@ +{ + "i": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "n": "34b4dd42-f02b-4448-b066-8e4e6655c0bb", + "rs": [ + "openid", + "profile" + ], + "ra": [ + "https://api.example.org" + ], + "ls": true, + "s": "test-subject", + "oc": { + "acr_values": [ + "http://acrvalues.example.org" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "email": "user@example.org" + }, + "login_hint": "login-hint" + }, + "c": { + "client_id": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "client_name": "", + "redirect_uris": null, + "grant_types": null, + "response_types": null, + "scope": "", + "audience": null, + "owner": "", + "policy_uri": "", + "allowed_cors_origins": null, + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "", + "created_at": "0001-01-01T00:00:00Z", + "updated_at": "0001-01-01T00:00:00Z", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "ci": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "r": "https://example.org/oauth2/auth?client_id=test", + "si": "session-123", + "is": "session-id", + "lc": "login-csrf", + "ia": "2025-10-09T14:04:36.553175+02:00", + "q": 6, + "lr": true, + "lf": 3000, + "ll": true, + "a": "http://acrvalues.example.org", + "am": [ + "pwd" + ], + "fs": "forced-subject", + "ct": { + "foo": "bar" + }, + "la": "2025-10-09T12:52:00Z", + "di": "device-challenge", + "dr": "device-code-request", + "dc": "device-csrf", + "dh": null, + "cc": "consent-request", + "cs": true, + "cr": "consent-csrf", + "gs": [ + "openid" + ], + "ga": [ + "https://api.example.org" + ], + "ce": true, + "cf": 3000, + "ch": null, + "cx": null, + "st": { + "foo": "bar", + "sub": "test-subject" + }, + "sa": { + "aud": [ + "https://api.example.org" + ], + "scp": [ + "openid", + "profile" + ] + } +} diff --git a/flow/.snapshots/TestCanUseLegacyChallenges-device_error.json b/flow/.snapshots/TestCanUseLegacyChallenges-device_error.json new file mode 100644 index 00000000000..a7eaa304869 --- /dev/null +++ b/flow/.snapshots/TestCanUseLegacyChallenges-device_error.json @@ -0,0 +1,116 @@ +{ + "i": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "n": "34b4dd42-f02b-4448-b066-8e4e6655c0bb", + "rs": [ + "openid", + "profile" + ], + "ra": [ + "https://api.example.org" + ], + "ls": true, + "s": "test-subject", + "oc": { + "acr_values": [ + "http://acrvalues.example.org" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "email": "user@example.org" + }, + "login_hint": "login-hint" + }, + "c": { + "client_id": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "client_name": "", + "redirect_uris": null, + "grant_types": null, + "response_types": null, + "scope": "", + "audience": null, + "owner": "", + "policy_uri": "", + "allowed_cors_origins": null, + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "", + "created_at": "0001-01-01T00:00:00Z", + "updated_at": "0001-01-01T00:00:00Z", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "ci": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "r": "https://example.org/oauth2/auth?client_id=test", + "si": "session-123", + "is": "session-id", + "lv": "login-verifier", + "lc": "login-csrf", + "li": "2025-10-09T12:51:00Z", + "ia": "2025-10-09T14:04:36.552604+02:00", + "q": 127, + "lr": true, + "lf": 3000, + "ll": true, + "a": "http://acrvalues.example.org", + "am": [ + "pwd" + ], + "fs": "forced-subject", + "ct": { + "foo": "bar" + }, + "la": "2025-10-09T12:52:00Z", + "di": "device-challenge", + "dr": "device-code-request", + "dv": "device-verifier", + "dc": "device-csrf", + "du": false, + "dh": null, + "cc": "consent-request", + "cs": true, + "cv": "consent-verifier", + "cr": "consent-csrf", + "gs": [ + "openid" + ], + "ga": [ + "https://api.example.org" + ], + "ce": true, + "cf": 3000, + "ch": null, + "cx": null, + "st": { + "foo": "bar", + "sub": "test-subject" + }, + "sa": { + "aud": [ + "https://api.example.org" + ], + "scp": [ + "openid", + "profile" + ] + } +} diff --git a/flow/.snapshots/TestCanUseLegacyChallenges-device_initialized.json b/flow/.snapshots/TestCanUseLegacyChallenges-device_initialized.json new file mode 100644 index 00000000000..15cf9775ec7 --- /dev/null +++ b/flow/.snapshots/TestCanUseLegacyChallenges-device_initialized.json @@ -0,0 +1,111 @@ +{ + "i": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "n": "34b4dd42-f02b-4448-b066-8e4e6655c0bb", + "rs": [ + "openid", + "profile" + ], + "ra": [ + "https://api.example.org" + ], + "ls": true, + "s": "test-subject", + "oc": { + "acr_values": [ + "http://acrvalues.example.org" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "email": "user@example.org" + }, + "login_hint": "login-hint" + }, + "c": { + "client_id": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "client_name": "", + "redirect_uris": null, + "grant_types": null, + "response_types": null, + "scope": "", + "audience": null, + "owner": "", + "policy_uri": "", + "allowed_cors_origins": null, + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "", + "created_at": "0001-01-01T00:00:00Z", + "updated_at": "0001-01-01T00:00:00Z", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "ci": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "r": "https://example.org/oauth2/auth?client_id=test", + "si": "session-123", + "is": "session-id", + "lc": "login-csrf", + "ia": "2025-10-09T14:04:36.553365+02:00", + "q": 7, + "lr": true, + "lf": 3000, + "ll": true, + "a": "http://acrvalues.example.org", + "am": [ + "pwd" + ], + "fs": "forced-subject", + "ct": { + "foo": "bar" + }, + "la": "2025-10-09T12:52:00Z", + "di": "device-challenge", + "dr": "device-code-request", + "dc": "device-csrf", + "dh": null, + "cc": "consent-request", + "cs": true, + "cr": "consent-csrf", + "gs": [ + "openid" + ], + "ga": [ + "https://api.example.org" + ], + "ce": true, + "cf": 3000, + "ch": null, + "cx": null, + "st": { + "foo": "bar", + "sub": "test-subject" + }, + "sa": { + "aud": [ + "https://api.example.org" + ], + "scp": [ + "openid", + "profile" + ] + } +} diff --git a/flow/.snapshots/TestCanUseLegacyChallenges-device_unused.json b/flow/.snapshots/TestCanUseLegacyChallenges-device_unused.json new file mode 100644 index 00000000000..fabc7a90ee1 --- /dev/null +++ b/flow/.snapshots/TestCanUseLegacyChallenges-device_unused.json @@ -0,0 +1,111 @@ +{ + "i": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "n": "34b4dd42-f02b-4448-b066-8e4e6655c0bb", + "rs": [ + "openid", + "profile" + ], + "ra": [ + "https://api.example.org" + ], + "ls": true, + "s": "test-subject", + "oc": { + "acr_values": [ + "http://acrvalues.example.org" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "email": "user@example.org" + }, + "login_hint": "login-hint" + }, + "c": { + "client_id": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "client_name": "", + "redirect_uris": null, + "grant_types": null, + "response_types": null, + "scope": "", + "audience": null, + "owner": "", + "policy_uri": "", + "allowed_cors_origins": null, + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "", + "created_at": "0001-01-01T00:00:00Z", + "updated_at": "0001-01-01T00:00:00Z", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "ci": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "r": "https://example.org/oauth2/auth?client_id=test", + "si": "session-123", + "is": "session-id", + "lc": "login-csrf", + "ia": "2025-10-09T14:04:36.552222+02:00", + "q": 8, + "lr": true, + "lf": 3000, + "ll": true, + "a": "http://acrvalues.example.org", + "am": [ + "pwd" + ], + "fs": "forced-subject", + "ct": { + "foo": "bar" + }, + "la": "2025-10-09T12:52:00Z", + "di": "device-challenge", + "dr": "device-code-request", + "dc": "device-csrf", + "dh": null, + "cc": "consent-request", + "cs": true, + "cr": "consent-csrf", + "gs": [ + "openid" + ], + "ga": [ + "https://api.example.org" + ], + "ce": true, + "cf": 3000, + "ch": null, + "cx": null, + "st": { + "foo": "bar", + "sub": "test-subject" + }, + "sa": { + "aud": [ + "https://api.example.org" + ], + "scp": [ + "openid", + "profile" + ] + } +} diff --git a/flow/.snapshots/TestCanUseLegacyChallenges-device_used.json b/flow/.snapshots/TestCanUseLegacyChallenges-device_used.json new file mode 100644 index 00000000000..b4df2d3d653 --- /dev/null +++ b/flow/.snapshots/TestCanUseLegacyChallenges-device_used.json @@ -0,0 +1,111 @@ +{ + "i": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "n": "34b4dd42-f02b-4448-b066-8e4e6655c0bb", + "rs": [ + "openid", + "profile" + ], + "ra": [ + "https://api.example.org" + ], + "ls": true, + "s": "test-subject", + "oc": { + "acr_values": [ + "http://acrvalues.example.org" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "email": "user@example.org" + }, + "login_hint": "login-hint" + }, + "c": { + "client_id": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "client_name": "", + "redirect_uris": null, + "grant_types": null, + "response_types": null, + "scope": "", + "audience": null, + "owner": "", + "policy_uri": "", + "allowed_cors_origins": null, + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "", + "created_at": "0001-01-01T00:00:00Z", + "updated_at": "0001-01-01T00:00:00Z", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "ci": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "r": "https://example.org/oauth2/auth?client_id=test", + "si": "session-123", + "is": "session-id", + "lc": "login-csrf", + "ia": "2025-10-09T14:04:36.552433+02:00", + "q": 9, + "lr": true, + "lf": 3000, + "ll": true, + "a": "http://acrvalues.example.org", + "am": [ + "pwd" + ], + "fs": "forced-subject", + "ct": { + "foo": "bar" + }, + "la": "2025-10-09T12:52:00Z", + "di": "device-challenge", + "dr": "device-code-request", + "dc": "device-csrf", + "dh": null, + "cc": "consent-request", + "cs": true, + "cr": "consent-csrf", + "gs": [ + "openid" + ], + "ga": [ + "https://api.example.org" + ], + "ce": true, + "cf": 3000, + "ch": null, + "cx": null, + "st": { + "foo": "bar", + "sub": "test-subject" + }, + "sa": { + "aud": [ + "https://api.example.org" + ], + "scp": [ + "openid", + "profile" + ] + } +} diff --git a/flow/.snapshots/TestCanUseLegacyChallenges-login_error.json b/flow/.snapshots/TestCanUseLegacyChallenges-login_error.json new file mode 100644 index 00000000000..fea668a491d --- /dev/null +++ b/flow/.snapshots/TestCanUseLegacyChallenges-login_error.json @@ -0,0 +1,111 @@ +{ + "i": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "n": "34b4dd42-f02b-4448-b066-8e4e6655c0bb", + "rs": [ + "openid", + "profile" + ], + "ra": [ + "https://api.example.org" + ], + "ls": true, + "s": "test-subject", + "oc": { + "acr_values": [ + "http://acrvalues.example.org" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "email": "user@example.org" + }, + "login_hint": "login-hint" + }, + "c": { + "client_id": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "client_name": "", + "redirect_uris": null, + "grant_types": null, + "response_types": null, + "scope": "", + "audience": null, + "owner": "", + "policy_uri": "", + "allowed_cors_origins": null, + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "", + "created_at": "0001-01-01T00:00:00Z", + "updated_at": "0001-01-01T00:00:00Z", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "ci": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "r": "https://example.org/oauth2/auth?client_id=test", + "si": "session-123", + "is": "session-id", + "lc": "login-csrf", + "ia": "2025-10-09T14:04:36.55082+02:00", + "q": 128, + "lr": true, + "lf": 3000, + "ll": true, + "a": "http://acrvalues.example.org", + "am": [ + "pwd" + ], + "fs": "forced-subject", + "ct": { + "foo": "bar" + }, + "la": "2025-10-09T12:52:00Z", + "di": "device-challenge", + "dr": "device-code-request", + "dc": "device-csrf", + "dh": null, + "cc": "consent-request", + "cs": true, + "cr": "consent-csrf", + "gs": [ + "openid" + ], + "ga": [ + "https://api.example.org" + ], + "ce": true, + "cf": 3000, + "ch": null, + "cx": null, + "st": { + "foo": "bar", + "sub": "test-subject" + }, + "sa": { + "aud": [ + "https://api.example.org" + ], + "scp": [ + "openid", + "profile" + ] + } +} diff --git a/flow/.snapshots/TestCanUseLegacyChallenges-login_initialized.json b/flow/.snapshots/TestCanUseLegacyChallenges-login_initialized.json new file mode 100644 index 00000000000..3ce62ac6540 --- /dev/null +++ b/flow/.snapshots/TestCanUseLegacyChallenges-login_initialized.json @@ -0,0 +1,111 @@ +{ + "i": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "n": "34b4dd42-f02b-4448-b066-8e4e6655c0bb", + "rs": [ + "openid", + "profile" + ], + "ra": [ + "https://api.example.org" + ], + "ls": true, + "s": "test-subject", + "oc": { + "acr_values": [ + "http://acrvalues.example.org" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "email": "user@example.org" + }, + "login_hint": "login-hint" + }, + "c": { + "client_id": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "client_name": "", + "redirect_uris": null, + "grant_types": null, + "response_types": null, + "scope": "", + "audience": null, + "owner": "", + "policy_uri": "", + "allowed_cors_origins": null, + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "", + "created_at": "0001-01-01T00:00:00Z", + "updated_at": "0001-01-01T00:00:00Z", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "ci": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "r": "https://example.org/oauth2/auth?client_id=test", + "si": "session-123", + "is": "session-id", + "lc": "login-csrf", + "ia": "2025-10-09T14:04:36.550095+02:00", + "q": 1, + "lr": true, + "lf": 3000, + "ll": true, + "a": "http://acrvalues.example.org", + "am": [ + "pwd" + ], + "fs": "forced-subject", + "ct": { + "foo": "bar" + }, + "la": "2025-10-09T12:52:00Z", + "di": "device-challenge", + "dr": "device-code-request", + "dc": "device-csrf", + "dh": null, + "cc": "consent-request", + "cs": true, + "cr": "consent-csrf", + "gs": [ + "openid" + ], + "ga": [ + "https://api.example.org" + ], + "ce": true, + "cf": 3000, + "ch": null, + "cx": null, + "st": { + "foo": "bar", + "sub": "test-subject" + }, + "sa": { + "aud": [ + "https://api.example.org" + ], + "scp": [ + "openid", + "profile" + ] + } +} diff --git a/flow/.snapshots/TestCanUseLegacyChallenges-login_unused.json b/flow/.snapshots/TestCanUseLegacyChallenges-login_unused.json new file mode 100644 index 00000000000..085831bef1b --- /dev/null +++ b/flow/.snapshots/TestCanUseLegacyChallenges-login_unused.json @@ -0,0 +1,111 @@ +{ + "i": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "n": "34b4dd42-f02b-4448-b066-8e4e6655c0bb", + "rs": [ + "openid", + "profile" + ], + "ra": [ + "https://api.example.org" + ], + "ls": true, + "s": "test-subject", + "oc": { + "acr_values": [ + "http://acrvalues.example.org" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "email": "user@example.org" + }, + "login_hint": "login-hint" + }, + "c": { + "client_id": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "client_name": "", + "redirect_uris": null, + "grant_types": null, + "response_types": null, + "scope": "", + "audience": null, + "owner": "", + "policy_uri": "", + "allowed_cors_origins": null, + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "", + "created_at": "0001-01-01T00:00:00Z", + "updated_at": "0001-01-01T00:00:00Z", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "ci": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "r": "https://example.org/oauth2/auth?client_id=test", + "si": "session-123", + "is": "session-id", + "lc": "login-csrf", + "ia": "2025-10-09T14:04:36.552799+02:00", + "q": 2, + "lr": true, + "lf": 3000, + "ll": true, + "a": "http://acrvalues.example.org", + "am": [ + "pwd" + ], + "fs": "forced-subject", + "ct": { + "foo": "bar" + }, + "la": "2025-10-09T12:52:00Z", + "di": "device-challenge", + "dr": "device-code-request", + "dc": "device-csrf", + "dh": null, + "cc": "consent-request", + "cs": true, + "cr": "consent-csrf", + "gs": [ + "openid" + ], + "ga": [ + "https://api.example.org" + ], + "ce": true, + "cf": 3000, + "ch": null, + "cx": null, + "st": { + "foo": "bar", + "sub": "test-subject" + }, + "sa": { + "aud": [ + "https://api.example.org" + ], + "scp": [ + "openid", + "profile" + ] + } +} diff --git a/flow/.snapshots/TestCanUseLegacyChallenges-login_used.json b/flow/.snapshots/TestCanUseLegacyChallenges-login_used.json new file mode 100644 index 00000000000..d16752162b8 --- /dev/null +++ b/flow/.snapshots/TestCanUseLegacyChallenges-login_used.json @@ -0,0 +1,111 @@ +{ + "i": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "n": "34b4dd42-f02b-4448-b066-8e4e6655c0bb", + "rs": [ + "openid", + "profile" + ], + "ra": [ + "https://api.example.org" + ], + "ls": true, + "s": "test-subject", + "oc": { + "acr_values": [ + "http://acrvalues.example.org" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "email": "user@example.org" + }, + "login_hint": "login-hint" + }, + "c": { + "client_id": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "client_name": "", + "redirect_uris": null, + "grant_types": null, + "response_types": null, + "scope": "", + "audience": null, + "owner": "", + "policy_uri": "", + "allowed_cors_origins": null, + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "", + "created_at": "0001-01-01T00:00:00Z", + "updated_at": "0001-01-01T00:00:00Z", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "ci": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "r": "https://example.org/oauth2/auth?client_id=test", + "si": "session-123", + "is": "session-id", + "lc": "login-csrf", + "ia": "2025-10-09T14:04:36.552994+02:00", + "q": 3, + "lr": true, + "lf": 3000, + "ll": true, + "a": "http://acrvalues.example.org", + "am": [ + "pwd" + ], + "fs": "forced-subject", + "ct": { + "foo": "bar" + }, + "la": "2025-10-09T12:52:00Z", + "di": "device-challenge", + "dr": "device-code-request", + "dc": "device-csrf", + "dh": null, + "cc": "consent-request", + "cs": true, + "cr": "consent-csrf", + "gs": [ + "openid" + ], + "ga": [ + "https://api.example.org" + ], + "ce": true, + "cf": 3000, + "ch": null, + "cx": null, + "st": { + "foo": "bar", + "sub": "test-subject" + }, + "sa": { + "aud": [ + "https://api.example.org" + ], + "scp": [ + "openid", + "profile" + ] + } +} diff --git a/flow/.snapshots/TestDecodeAndInvalidateConsentVerifier-case=successful_decode_and_invalidate_with_valid_consent_verifier.json b/flow/.snapshots/TestDecodeAndInvalidateConsentVerifier-case=successful_decode_and_invalidate_with_valid_consent_verifier.json new file mode 100644 index 00000000000..883f64f54c2 --- /dev/null +++ b/flow/.snapshots/TestDecodeAndInvalidateConsentVerifier-case=successful_decode_and_invalidate_with_valid_consent_verifier.json @@ -0,0 +1,109 @@ +{ + "i": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "rs": [ + "openid", + "profile" + ], + "ra": [ + "https://api.example.org" + ], + "ls": true, + "s": "test-subject", + "oc": { + "acr_values": [ + "http://acrvalues.example.org" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "email": "user@example.org" + }, + "login_hint": "login-hint" + }, + "c": { + "client_id": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "client_name": "", + "redirect_uris": null, + "grant_types": null, + "response_types": null, + "scope": "", + "audience": null, + "owner": "", + "policy_uri": "", + "allowed_cors_origins": null, + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "", + "created_at": "0001-01-01T00:00:00Z", + "updated_at": "0001-01-01T00:00:00Z", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "ci": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "r": "https://example.org/oauth2/auth?client_id=test", + "si": "session-123", + "is": "session-id", + "lc": "login-csrf", + "q": 6, + "lr": true, + "lf": 3000, + "ll": true, + "a": "http://acrvalues.example.org", + "am": [ + "pwd" + ], + "fs": "forced-subject", + "ct": { + "foo": "bar" + }, + "la": "2025-10-09T12:52:00Z", + "di": "device-challenge", + "dr": "device-code-request", + "dc": "device-csrf", + "dh": null, + "cc": "consent-request", + "cs": true, + "cr": "consent-csrf", + "gs": [ + "openid" + ], + "ga": [ + "https://api.example.org" + ], + "ce": true, + "cf": 3000, + "ch": null, + "cx": null, + "st": { + "foo": "bar", + "sub": "test-subject" + }, + "sa": { + "aud": [ + "https://api.example.org" + ], + "scp": [ + "openid", + "profile" + ] + } +} diff --git a/flow/.snapshots/TestDecodeAndInvalidateDeviceVerifier-case=successful_decode_and_invalidate_with_valid_device_verifier.json b/flow/.snapshots/TestDecodeAndInvalidateDeviceVerifier-case=successful_decode_and_invalidate_with_valid_device_verifier.json new file mode 100644 index 00000000000..7ca27b794ab --- /dev/null +++ b/flow/.snapshots/TestDecodeAndInvalidateDeviceVerifier-case=successful_decode_and_invalidate_with_valid_device_verifier.json @@ -0,0 +1,109 @@ +{ + "i": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "rs": [ + "openid", + "profile" + ], + "ra": [ + "https://api.example.org" + ], + "ls": true, + "s": "test-subject", + "oc": { + "acr_values": [ + "http://acrvalues.example.org" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "email": "user@example.org" + }, + "login_hint": "login-hint" + }, + "c": { + "client_id": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "client_name": "", + "redirect_uris": null, + "grant_types": null, + "response_types": null, + "scope": "", + "audience": null, + "owner": "", + "policy_uri": "", + "allowed_cors_origins": null, + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "", + "created_at": "0001-01-01T00:00:00Z", + "updated_at": "0001-01-01T00:00:00Z", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "ci": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "r": "https://example.org/oauth2/auth?client_id=test", + "si": "session-123", + "is": "session-id", + "lc": "login-csrf", + "q": 9, + "lr": true, + "lf": 3000, + "ll": true, + "a": "http://acrvalues.example.org", + "am": [ + "pwd" + ], + "fs": "forced-subject", + "ct": { + "foo": "bar" + }, + "la": "2025-10-09T12:52:00Z", + "di": "device-challenge", + "dr": "device-code-request", + "dc": "device-csrf", + "dh": null, + "cc": "consent-request", + "cs": true, + "cr": "consent-csrf", + "gs": [ + "openid" + ], + "ga": [ + "https://api.example.org" + ], + "ce": true, + "cf": 3000, + "ch": null, + "cx": null, + "st": { + "foo": "bar", + "sub": "test-subject" + }, + "sa": { + "aud": [ + "https://api.example.org" + ], + "scp": [ + "openid", + "profile" + ] + } +} diff --git a/flow/.snapshots/TestDecodeAndInvalidateLoginVerifier-case=successful_decode_and_invalidate_with_valid_login_verifier.json b/flow/.snapshots/TestDecodeAndInvalidateLoginVerifier-case=successful_decode_and_invalidate_with_valid_login_verifier.json new file mode 100644 index 00000000000..6fe30db4a2d --- /dev/null +++ b/flow/.snapshots/TestDecodeAndInvalidateLoginVerifier-case=successful_decode_and_invalidate_with_valid_login_verifier.json @@ -0,0 +1,109 @@ +{ + "i": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "rs": [ + "openid", + "profile" + ], + "ra": [ + "https://api.example.org" + ], + "ls": true, + "s": "test-subject", + "oc": { + "acr_values": [ + "http://acrvalues.example.org" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "email": "user@example.org" + }, + "login_hint": "login-hint" + }, + "c": { + "client_id": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "client_name": "", + "redirect_uris": null, + "grant_types": null, + "response_types": null, + "scope": "", + "audience": null, + "owner": "", + "policy_uri": "", + "allowed_cors_origins": null, + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "", + "created_at": "0001-01-01T00:00:00Z", + "updated_at": "0001-01-01T00:00:00Z", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "ci": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "r": "https://example.org/oauth2/auth?client_id=test", + "si": "session-123", + "is": "session-id", + "lc": "login-csrf", + "q": 3, + "lr": true, + "lf": 3000, + "ll": true, + "a": "http://acrvalues.example.org", + "am": [ + "pwd" + ], + "fs": "forced-subject", + "ct": { + "foo": "bar" + }, + "la": "2025-10-09T12:52:00Z", + "di": "device-challenge", + "dr": "device-code-request", + "dc": "device-csrf", + "dh": null, + "cc": "consent-request", + "cs": true, + "cr": "consent-csrf", + "gs": [ + "openid" + ], + "ga": [ + "https://api.example.org" + ], + "ce": true, + "cf": 3000, + "ch": null, + "cx": null, + "st": { + "foo": "bar", + "sub": "test-subject" + }, + "sa": { + "aud": [ + "https://api.example.org" + ], + "scp": [ + "openid", + "profile" + ] + } +} diff --git a/flow/.snapshots/TestDecodeFromConsentChallenge-case=successful_decode_with_valid_consent_challenge.json b/flow/.snapshots/TestDecodeFromConsentChallenge-case=successful_decode_with_valid_consent_challenge.json new file mode 100644 index 00000000000..6bab0bdd3ef --- /dev/null +++ b/flow/.snapshots/TestDecodeFromConsentChallenge-case=successful_decode_with_valid_consent_challenge.json @@ -0,0 +1,109 @@ +{ + "i": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "rs": [ + "openid", + "profile" + ], + "ra": [ + "https://api.example.org" + ], + "ls": true, + "s": "test-subject", + "oc": { + "acr_values": [ + "http://acrvalues.example.org" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "email": "user@example.org" + }, + "login_hint": "login-hint" + }, + "c": { + "client_id": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "client_name": "", + "redirect_uris": null, + "grant_types": null, + "response_types": null, + "scope": "", + "audience": null, + "owner": "", + "policy_uri": "", + "allowed_cors_origins": null, + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "", + "created_at": "0001-01-01T00:00:00Z", + "updated_at": "0001-01-01T00:00:00Z", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "ci": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "r": "https://example.org/oauth2/auth?client_id=test", + "si": "session-123", + "is": "session-id", + "lc": "login-csrf", + "q": 5, + "lr": true, + "lf": 3000, + "ll": true, + "a": "http://acrvalues.example.org", + "am": [ + "pwd" + ], + "fs": "forced-subject", + "ct": { + "foo": "bar" + }, + "la": "2025-10-09T12:52:00Z", + "di": "device-challenge", + "dr": "device-code-request", + "dc": "device-csrf", + "dh": null, + "cc": "consent-request", + "cs": true, + "cr": "consent-csrf", + "gs": [ + "openid" + ], + "ga": [ + "https://api.example.org" + ], + "ce": true, + "cf": 3000, + "ch": null, + "cx": null, + "st": { + "foo": "bar", + "sub": "test-subject" + }, + "sa": { + "aud": [ + "https://api.example.org" + ], + "scp": [ + "openid", + "profile" + ] + } +} diff --git a/flow/.snapshots/TestDecodeFromDeviceChallenge-case=successful_decode_with_valid_device_challenge.json b/flow/.snapshots/TestDecodeFromDeviceChallenge-case=successful_decode_with_valid_device_challenge.json new file mode 100644 index 00000000000..3a8753db1ae --- /dev/null +++ b/flow/.snapshots/TestDecodeFromDeviceChallenge-case=successful_decode_with_valid_device_challenge.json @@ -0,0 +1,109 @@ +{ + "i": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "rs": [ + "openid", + "profile" + ], + "ra": [ + "https://api.example.org" + ], + "ls": true, + "s": "test-subject", + "oc": { + "acr_values": [ + "http://acrvalues.example.org" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "email": "user@example.org" + }, + "login_hint": "login-hint" + }, + "c": { + "client_id": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "client_name": "", + "redirect_uris": null, + "grant_types": null, + "response_types": null, + "scope": "", + "audience": null, + "owner": "", + "policy_uri": "", + "allowed_cors_origins": null, + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "", + "created_at": "0001-01-01T00:00:00Z", + "updated_at": "0001-01-01T00:00:00Z", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "ci": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "r": "https://example.org/oauth2/auth?client_id=test", + "si": "session-123", + "is": "session-id", + "lc": "login-csrf", + "q": 8, + "lr": true, + "lf": 3000, + "ll": true, + "a": "http://acrvalues.example.org", + "am": [ + "pwd" + ], + "fs": "forced-subject", + "ct": { + "foo": "bar" + }, + "la": "2025-10-09T12:52:00Z", + "di": "device-challenge", + "dr": "device-code-request", + "dc": "device-csrf", + "dh": null, + "cc": "consent-request", + "cs": true, + "cr": "consent-csrf", + "gs": [ + "openid" + ], + "ga": [ + "https://api.example.org" + ], + "ce": true, + "cf": 3000, + "ch": null, + "cx": null, + "st": { + "foo": "bar", + "sub": "test-subject" + }, + "sa": { + "aud": [ + "https://api.example.org" + ], + "scp": [ + "openid", + "profile" + ] + } +} diff --git a/flow/.snapshots/TestDecodeFromLoginChallenge-case=successful_decode_with_valid_login_challenge.json b/flow/.snapshots/TestDecodeFromLoginChallenge-case=successful_decode_with_valid_login_challenge.json new file mode 100644 index 00000000000..d7f2dcf4e04 --- /dev/null +++ b/flow/.snapshots/TestDecodeFromLoginChallenge-case=successful_decode_with_valid_login_challenge.json @@ -0,0 +1,109 @@ +{ + "i": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "rs": [ + "openid", + "profile" + ], + "ra": [ + "https://api.example.org" + ], + "ls": true, + "s": "test-subject", + "oc": { + "acr_values": [ + "http://acrvalues.example.org" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "email": "user@example.org" + }, + "login_hint": "login-hint" + }, + "c": { + "client_id": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "client_name": "", + "redirect_uris": null, + "grant_types": null, + "response_types": null, + "scope": "", + "audience": null, + "owner": "", + "policy_uri": "", + "allowed_cors_origins": null, + "tos_uri": "", + "client_uri": "", + "logo_uri": "", + "contacts": null, + "client_secret_expires_at": 0, + "subject_type": "", + "created_at": "0001-01-01T00:00:00Z", + "updated_at": "0001-01-01T00:00:00Z", + "skip_consent": false, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "ci": "a12bf95e-ccfc-45fc-b10d-1358790772c7", + "r": "https://example.org/oauth2/auth?client_id=test", + "si": "session-123", + "is": "session-id", + "lc": "login-csrf", + "q": 2, + "lr": true, + "lf": 3000, + "ll": true, + "a": "http://acrvalues.example.org", + "am": [ + "pwd" + ], + "fs": "forced-subject", + "ct": { + "foo": "bar" + }, + "la": "2025-10-09T12:52:00Z", + "di": "device-challenge", + "dr": "device-code-request", + "dc": "device-csrf", + "dh": null, + "cc": "consent-request", + "cs": true, + "cr": "consent-csrf", + "gs": [ + "openid" + ], + "ga": [ + "https://api.example.org" + ], + "ce": true, + "cf": 3000, + "ch": null, + "cx": null, + "st": { + "foo": "bar", + "sub": "test-subject" + }, + "sa": { + "aud": [ + "https://api.example.org" + ], + "scp": [ + "openid", + "profile" + ] + } +} diff --git a/flow/.snapshots/TestEncoding-encode_and_decode_with_snapshots-consent_challenge.json b/flow/.snapshots/TestEncoding-encode_and_decode_with_snapshots-consent_challenge.json new file mode 100644 index 00000000000..10f524626df --- /dev/null +++ b/flow/.snapshots/TestEncoding-encode_and_decode_with_snapshots-consent_challenge.json @@ -0,0 +1,123 @@ +{ + "i": "test-flow-id", + "rs": [ + "scope1", + "scope2" + ], + "ra": [ + "https://api.example.org/v1", + "https://api.example.org/v2" + ], + "s": "some-subject@some-idp-somewhere.com", + "oc": { + "acr_values": [ + "acr1", + "acr2" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "claim1": "value1", + "claim2": "value2" + }, + "login_hint": "some-login-hint" + }, + "c": { + "client_id": "test-client-id", + "client_name": "some-client-name", + "client_secret": "some-supersafe-secret", + "redirect_uris": [ + "https://redirect1.example.org/callback", + "https://redirect2.example.org/callback" + ], + "grant_types": [ + "authorization_code", + "refresh_token" + ], + "response_types": [ + "code" + ], + "scope": "scope1 scope2", + "audience": [ + "https://api.example.org/v1 https://api.example.org/v2" + ], + "owner": "some-owner", + "policy_uri": "https://policy.example.org", + "allowed_cors_origins": [ + "https://cors1.example.org", + "https://cors2.example.org" + ], + "tos_uri": "https://tos.example.org", + "client_uri": "https://client.example.org", + "logo_uri": "https://logo.example.org", + "contacts": [ + "contact1", + "contact2" + ], + "client_secret_expires_at": 0, + "subject_type": "public", + "jwks_uri": "https://jwks.example.org", + "token_endpoint_auth_method": "client_secret_basic", + "created_at": "2025-01-01T00:00:00Z", + "updated_at": "2025-01-01T00:00:00Z", + "metadata": { + "client-metadata-key1": "val1" + }, + "access_token_strategy": "jwt", + "skip_consent": true, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "r": "https://auth.hydra.local/oauth2/auth?client_id=some-client-id\u0026response_type=code\u0026scope=scope1+scope2\u0026redirect_uri=https%3A%2F%2Fredirect1.example.org%2Fcallback\u0026state=some-state\u0026nonce=some-nonce", + "si": "some-session-id", + "lc": "test-login-csrf", + "q": 1, + "lr": true, + "lf": 3600, + "ct": { + "context-key1": "val1" + }, + "la": null, + "dh": null, + "gs": [ + "scope1", + "scope2" + ], + "ga": [ + "https://api.example.org/v1", + "https://api.example.org/v2" + ], + "ce": true, + "cf": 3600, + "ch": "2025-01-01T00:00:00Z", + "cx": null, + "st": { + "session-id-token-key1": "val1", + "session-id-token-key2": "val2", + "session-id-token-key3": "val3", + "session-id-token-key4": "val4", + "session-id-token-key5": "val5" + }, + "sa": { + "session-access-token-key1": "val1", + "session-access-token-key2": "val2", + "session-access-token-key3": "val3", + "session-access-token-key4": "val4", + "session-access-token-key5": "val5" + } +} diff --git a/flow/.snapshots/TestEncoding-encode_and_decode_with_snapshots-consent_verifier.json b/flow/.snapshots/TestEncoding-encode_and_decode_with_snapshots-consent_verifier.json new file mode 100644 index 00000000000..10f524626df --- /dev/null +++ b/flow/.snapshots/TestEncoding-encode_and_decode_with_snapshots-consent_verifier.json @@ -0,0 +1,123 @@ +{ + "i": "test-flow-id", + "rs": [ + "scope1", + "scope2" + ], + "ra": [ + "https://api.example.org/v1", + "https://api.example.org/v2" + ], + "s": "some-subject@some-idp-somewhere.com", + "oc": { + "acr_values": [ + "acr1", + "acr2" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "claim1": "value1", + "claim2": "value2" + }, + "login_hint": "some-login-hint" + }, + "c": { + "client_id": "test-client-id", + "client_name": "some-client-name", + "client_secret": "some-supersafe-secret", + "redirect_uris": [ + "https://redirect1.example.org/callback", + "https://redirect2.example.org/callback" + ], + "grant_types": [ + "authorization_code", + "refresh_token" + ], + "response_types": [ + "code" + ], + "scope": "scope1 scope2", + "audience": [ + "https://api.example.org/v1 https://api.example.org/v2" + ], + "owner": "some-owner", + "policy_uri": "https://policy.example.org", + "allowed_cors_origins": [ + "https://cors1.example.org", + "https://cors2.example.org" + ], + "tos_uri": "https://tos.example.org", + "client_uri": "https://client.example.org", + "logo_uri": "https://logo.example.org", + "contacts": [ + "contact1", + "contact2" + ], + "client_secret_expires_at": 0, + "subject_type": "public", + "jwks_uri": "https://jwks.example.org", + "token_endpoint_auth_method": "client_secret_basic", + "created_at": "2025-01-01T00:00:00Z", + "updated_at": "2025-01-01T00:00:00Z", + "metadata": { + "client-metadata-key1": "val1" + }, + "access_token_strategy": "jwt", + "skip_consent": true, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "r": "https://auth.hydra.local/oauth2/auth?client_id=some-client-id\u0026response_type=code\u0026scope=scope1+scope2\u0026redirect_uri=https%3A%2F%2Fredirect1.example.org%2Fcallback\u0026state=some-state\u0026nonce=some-nonce", + "si": "some-session-id", + "lc": "test-login-csrf", + "q": 1, + "lr": true, + "lf": 3600, + "ct": { + "context-key1": "val1" + }, + "la": null, + "dh": null, + "gs": [ + "scope1", + "scope2" + ], + "ga": [ + "https://api.example.org/v1", + "https://api.example.org/v2" + ], + "ce": true, + "cf": 3600, + "ch": "2025-01-01T00:00:00Z", + "cx": null, + "st": { + "session-id-token-key1": "val1", + "session-id-token-key2": "val2", + "session-id-token-key3": "val3", + "session-id-token-key4": "val4", + "session-id-token-key5": "val5" + }, + "sa": { + "session-access-token-key1": "val1", + "session-access-token-key2": "val2", + "session-access-token-key3": "val3", + "session-access-token-key4": "val4", + "session-access-token-key5": "val5" + } +} diff --git a/flow/.snapshots/TestEncoding-encode_and_decode_with_snapshots-login_challenge.json b/flow/.snapshots/TestEncoding-encode_and_decode_with_snapshots-login_challenge.json new file mode 100644 index 00000000000..10f524626df --- /dev/null +++ b/flow/.snapshots/TestEncoding-encode_and_decode_with_snapshots-login_challenge.json @@ -0,0 +1,123 @@ +{ + "i": "test-flow-id", + "rs": [ + "scope1", + "scope2" + ], + "ra": [ + "https://api.example.org/v1", + "https://api.example.org/v2" + ], + "s": "some-subject@some-idp-somewhere.com", + "oc": { + "acr_values": [ + "acr1", + "acr2" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "claim1": "value1", + "claim2": "value2" + }, + "login_hint": "some-login-hint" + }, + "c": { + "client_id": "test-client-id", + "client_name": "some-client-name", + "client_secret": "some-supersafe-secret", + "redirect_uris": [ + "https://redirect1.example.org/callback", + "https://redirect2.example.org/callback" + ], + "grant_types": [ + "authorization_code", + "refresh_token" + ], + "response_types": [ + "code" + ], + "scope": "scope1 scope2", + "audience": [ + "https://api.example.org/v1 https://api.example.org/v2" + ], + "owner": "some-owner", + "policy_uri": "https://policy.example.org", + "allowed_cors_origins": [ + "https://cors1.example.org", + "https://cors2.example.org" + ], + "tos_uri": "https://tos.example.org", + "client_uri": "https://client.example.org", + "logo_uri": "https://logo.example.org", + "contacts": [ + "contact1", + "contact2" + ], + "client_secret_expires_at": 0, + "subject_type": "public", + "jwks_uri": "https://jwks.example.org", + "token_endpoint_auth_method": "client_secret_basic", + "created_at": "2025-01-01T00:00:00Z", + "updated_at": "2025-01-01T00:00:00Z", + "metadata": { + "client-metadata-key1": "val1" + }, + "access_token_strategy": "jwt", + "skip_consent": true, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "r": "https://auth.hydra.local/oauth2/auth?client_id=some-client-id\u0026response_type=code\u0026scope=scope1+scope2\u0026redirect_uri=https%3A%2F%2Fredirect1.example.org%2Fcallback\u0026state=some-state\u0026nonce=some-nonce", + "si": "some-session-id", + "lc": "test-login-csrf", + "q": 1, + "lr": true, + "lf": 3600, + "ct": { + "context-key1": "val1" + }, + "la": null, + "dh": null, + "gs": [ + "scope1", + "scope2" + ], + "ga": [ + "https://api.example.org/v1", + "https://api.example.org/v2" + ], + "ce": true, + "cf": 3600, + "ch": "2025-01-01T00:00:00Z", + "cx": null, + "st": { + "session-id-token-key1": "val1", + "session-id-token-key2": "val2", + "session-id-token-key3": "val3", + "session-id-token-key4": "val4", + "session-id-token-key5": "val5" + }, + "sa": { + "session-access-token-key1": "val1", + "session-access-token-key2": "val2", + "session-access-token-key3": "val3", + "session-access-token-key4": "val4", + "session-access-token-key5": "val5" + } +} diff --git a/flow/.snapshots/TestEncoding-encode_and_decode_with_snapshots-login_verifier.json b/flow/.snapshots/TestEncoding-encode_and_decode_with_snapshots-login_verifier.json new file mode 100644 index 00000000000..10f524626df --- /dev/null +++ b/flow/.snapshots/TestEncoding-encode_and_decode_with_snapshots-login_verifier.json @@ -0,0 +1,123 @@ +{ + "i": "test-flow-id", + "rs": [ + "scope1", + "scope2" + ], + "ra": [ + "https://api.example.org/v1", + "https://api.example.org/v2" + ], + "s": "some-subject@some-idp-somewhere.com", + "oc": { + "acr_values": [ + "acr1", + "acr2" + ], + "ui_locales": [ + "en-US", + "en-GB" + ], + "display": "page", + "id_token_hint_claims": { + "claim1": "value1", + "claim2": "value2" + }, + "login_hint": "some-login-hint" + }, + "c": { + "client_id": "test-client-id", + "client_name": "some-client-name", + "client_secret": "some-supersafe-secret", + "redirect_uris": [ + "https://redirect1.example.org/callback", + "https://redirect2.example.org/callback" + ], + "grant_types": [ + "authorization_code", + "refresh_token" + ], + "response_types": [ + "code" + ], + "scope": "scope1 scope2", + "audience": [ + "https://api.example.org/v1 https://api.example.org/v2" + ], + "owner": "some-owner", + "policy_uri": "https://policy.example.org", + "allowed_cors_origins": [ + "https://cors1.example.org", + "https://cors2.example.org" + ], + "tos_uri": "https://tos.example.org", + "client_uri": "https://client.example.org", + "logo_uri": "https://logo.example.org", + "contacts": [ + "contact1", + "contact2" + ], + "client_secret_expires_at": 0, + "subject_type": "public", + "jwks_uri": "https://jwks.example.org", + "token_endpoint_auth_method": "client_secret_basic", + "created_at": "2025-01-01T00:00:00Z", + "updated_at": "2025-01-01T00:00:00Z", + "metadata": { + "client-metadata-key1": "val1" + }, + "access_token_strategy": "jwt", + "skip_consent": true, + "skip_logout_consent": null, + "authorization_code_grant_access_token_lifespan": null, + "authorization_code_grant_id_token_lifespan": null, + "authorization_code_grant_refresh_token_lifespan": null, + "client_credentials_grant_access_token_lifespan": null, + "implicit_grant_access_token_lifespan": null, + "implicit_grant_id_token_lifespan": null, + "jwt_bearer_grant_access_token_lifespan": null, + "refresh_token_grant_id_token_lifespan": null, + "refresh_token_grant_access_token_lifespan": null, + "refresh_token_grant_refresh_token_lifespan": null, + "device_authorization_grant_id_token_lifespan": null, + "device_authorization_grant_access_token_lifespan": null, + "device_authorization_grant_refresh_token_lifespan": null + }, + "r": "https://auth.hydra.local/oauth2/auth?client_id=some-client-id\u0026response_type=code\u0026scope=scope1+scope2\u0026redirect_uri=https%3A%2F%2Fredirect1.example.org%2Fcallback\u0026state=some-state\u0026nonce=some-nonce", + "si": "some-session-id", + "lc": "test-login-csrf", + "q": 1, + "lr": true, + "lf": 3600, + "ct": { + "context-key1": "val1" + }, + "la": null, + "dh": null, + "gs": [ + "scope1", + "scope2" + ], + "ga": [ + "https://api.example.org/v1", + "https://api.example.org/v2" + ], + "ce": true, + "cf": 3600, + "ch": "2025-01-01T00:00:00Z", + "cx": null, + "st": { + "session-id-token-key1": "val1", + "session-id-token-key2": "val2", + "session-id-token-key3": "val3", + "session-id-token-key4": "val4", + "session-id-token-key5": "val5" + }, + "sa": { + "session-access-token-key1": "val1", + "session-access-token-key2": "val2", + "session-access-token-key3": "val3", + "session-access-token-key4": "val4", + "session-access-token-key5": "val5" + } +} diff --git a/flow/.snapshots/TestHandledLoginRequest_MarshalJSON.json b/flow/.snapshots/TestHandledLoginRequest_MarshalJSON.json new file mode 100644 index 00000000000..ae233234cfc --- /dev/null +++ b/flow/.snapshots/TestHandledLoginRequest_MarshalJSON.json @@ -0,0 +1,10 @@ +{ + "remember": false, + "remember_for": 0, + "extend_session_lifespan": false, + "acr": "", + "amr": [], + "subject": "", + "force_subject_identifier": "", + "context": {} +} diff --git a/flow/.snapshots/TestLoginRequest_MarshalJSON.json b/flow/.snapshots/TestLoginRequest_MarshalJSON.json new file mode 100644 index 00000000000..00aa71a553f --- /dev/null +++ b/flow/.snapshots/TestLoginRequest_MarshalJSON.json @@ -0,0 +1,11 @@ +{ + "challenge": "", + "requested_scope": [], + "requested_access_token_audience": [], + "skip": false, + "subject": "", + "oidc_context": null, + "client": null, + "request_url": "", + "session_id": "" +} diff --git a/flow/.snapshots/TestLogoutRequest_MarshalJSON.json b/flow/.snapshots/TestLogoutRequest_MarshalJSON.json new file mode 100644 index 00000000000..312a294f4df --- /dev/null +++ b/flow/.snapshots/TestLogoutRequest_MarshalJSON.json @@ -0,0 +1,9 @@ +{ + "challenge": "", + "subject": "", + "request_url": "", + "rp_initiated": false, + "expires_at": null, + "requested_at": null, + "client": null +} diff --git a/flow/.snapshots/TestOAuth2ConsentRequestOpenIDConnectContext_MarshalJSON.json b/flow/.snapshots/TestOAuth2ConsentRequestOpenIDConnectContext_MarshalJSON.json new file mode 100644 index 00000000000..0967ef424bc --- /dev/null +++ b/flow/.snapshots/TestOAuth2ConsentRequestOpenIDConnectContext_MarshalJSON.json @@ -0,0 +1 @@ +{} diff --git a/flow/.snapshots/TestOAuth2ConsentRequest_MarshalJSON.json b/flow/.snapshots/TestOAuth2ConsentRequest_MarshalJSON.json new file mode 100644 index 00000000000..3d7701950ca --- /dev/null +++ b/flow/.snapshots/TestOAuth2ConsentRequest_MarshalJSON.json @@ -0,0 +1,15 @@ +{ + "challenge": "", + "consent_request_id": "", + "requested_scope": [], + "requested_access_token_audience": [], + "skip": false, + "subject": "", + "oidc_context": null, + "client": null, + "request_url": "", + "login_challenge": "", + "login_session_id": "", + "acr": "", + "amr": [] +} diff --git a/flow/.snapshots/TestOAuth2ConsentSession_MarshalJSON.json b/flow/.snapshots/TestOAuth2ConsentSession_MarshalJSON.json new file mode 100644 index 00000000000..5a50e8805c5 --- /dev/null +++ b/flow/.snapshots/TestOAuth2ConsentSession_MarshalJSON.json @@ -0,0 +1,11 @@ +{ + "consent_request_id": "", + "grant_scope": [], + "grant_access_token_audience": [], + "session": null, + "remember": false, + "remember_for": 0, + "handled_at": null, + "context": {}, + "consent_request": null +} diff --git a/consent/types.go b/flow/consent_types.go similarity index 72% rename from consent/types.go rename to flow/consent_types.go index 80fa2c6e88b..eabac4dfba1 100644 --- a/consent/types.go +++ b/flow/consent_types.go @@ -1,30 +1,26 @@ // Copyright © 2022 Ory Corp // SPDX-License-Identifier: Apache-2.0 -package consent +package flow import ( "database/sql" "database/sql/driver" "encoding/json" - "fmt" "net/http" - "time" - "github.com/gobuffalo/pop/v6" "github.com/gofrs/uuid" - "github.com/ory/x/errorsx" - - "github.com/ory/fosite" - "github.com/ory/hydra/client" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/pop/v6" "github.com/ory/x/sqlcon" "github.com/ory/x/sqlxx" ) const ( - consentRequestDeniedErrorName = "consent request denied" - loginRequestDeniedErrorName = "login request denied" + ConsentRequestDeniedErrorName = "consent request denied" + LoginRequestDeniedErrorName = "login request denied" ) // OAuth 2.0 Redirect Browser To @@ -42,14 +38,16 @@ type OAuth2RedirectTo struct { // swagger:ignore type LoginSession struct { - ID string `db:"id"` - NID uuid.UUID `db:"nid"` - AuthenticatedAt sqlxx.NullTime `db:"authenticated_at"` - Subject string `db:"subject"` - Remember bool `db:"remember"` + ID string `db:"id"` + NID uuid.UUID `db:"nid"` + AuthenticatedAt sqlxx.NullTime `db:"authenticated_at"` + Subject string `db:"subject"` + IdentityProviderSessionID sqlxx.NullString `db:"identity_provider_session_id"` + Remember bool `db:"remember"` + ExpiresAt sqlxx.NullTime `db:"expires_at"` } -func (_ LoginSession) TableName() string { +func (LoginSession) TableName() string { return "hydra_oauth2_authentication_session" } @@ -77,11 +75,12 @@ type RequestDeniedError struct { // to the public but only in the server logs. Debug string `json:"error_debug"` - valid bool + // swagger:ignore + Valid bool `json:"valid"` } func (e *RequestDeniedError) IsError() bool { - return e != nil && e.valid + return e != nil && e.Valid } func (e *RequestDeniedError) SetDefaults(name string) { @@ -94,7 +93,7 @@ func (e *RequestDeniedError) SetDefaults(name string) { } } -func (e *RequestDeniedError) toRFCError() *fosite.RFC6749Error { +func (e *RequestDeniedError) ToRFCError() *fosite.RFC6749Error { if e.Name == "" { e.Name = "request_denied" } @@ -112,40 +111,10 @@ func (e *RequestDeniedError) toRFCError() *fosite.RFC6749Error { } } -func (e *RequestDeniedError) Scan(value interface{}) error { - v := fmt.Sprintf("%s", value) - if len(v) == 0 || v == "{}" { - return nil - } - - if err := json.Unmarshal([]byte(v), e); err != nil { - return errorsx.WithStack(err) - } - - e.valid = true - return nil -} - -func (e *RequestDeniedError) Value() (driver.Value, error) { - if !e.IsError() { - return "{}", nil - } - - value, err := json.Marshal(e) - if err != nil { - return nil, errorsx.WithStack(err) - } - - return string(value), nil -} - // The request payload used to accept a consent request. // // swagger:model acceptOAuth2ConsentRequest type AcceptOAuth2ConsentRequest struct { - // ID instead of Challenge because of pop - ID string `json:"-"` - // GrantScope sets the scope the user authorized the client to use. Should be a subset of `requested_scope`. GrantedScope sqlxx.StringSliceJSONFormat `json:"grant_scope"` @@ -163,32 +132,35 @@ type AcceptOAuth2ConsentRequest struct { // authorization will be remembered indefinitely. RememberFor int `json:"remember_for"` - // HandledAt contains the timestamp the consent request was handled. - HandledAt sqlxx.NullTime `json:"handled_at"` + // Context is an optional object which can hold arbitrary data. The data will be made available when fetching the + // consent request under the "context" field. This is useful in scenarios where login and consent endpoints share + // data. + Context sqlxx.JSONRawMessage `json:"context"` +} - // If set to true means that the request was already handled. This - // can happen on form double-submit or other errors. If this is set - // we recommend redirecting the user to `request_url` to re-initiate - // the flow. - WasHandled bool `json:"-"` +func (r *AcceptOAuth2ConsentRequest) MarshalJSON() ([]byte, error) { + type Alias AcceptOAuth2ConsentRequest + alias := Alias(*r) + + if alias.Context == nil { + alias.Context = []byte("{}") + } - ConsentRequest *OAuth2ConsentRequest `json:"-"` - Error *RequestDeniedError `json:"-"` - RequestedAt time.Time `json:"-"` - AuthenticatedAt sqlxx.NullTime `json:"-"` + if alias.GrantedScope == nil { + alias.GrantedScope = []string{} + } - SessionIDToken sqlxx.MapStringInterface `json:"-" faker:"-"` - SessionAccessToken sqlxx.MapStringInterface `json:"-" faker:"-"` -} + if alias.GrantedAudience == nil { + alias.GrantedAudience = []string{} + } -func (r *AcceptOAuth2ConsentRequest) HasError() bool { - return r.Error.IsError() + return json.Marshal(alias) } // List of OAuth 2.0 Consent Sessions // // swagger:model oAuth2ConsentSessions -type oAuth2ConsentSessions []OAuth2ConsentSession +type _ []OAuth2ConsentSession // OAuth 2.0 Consent Session // @@ -196,68 +168,77 @@ type oAuth2ConsentSessions []OAuth2ConsentSession // // swagger:model oAuth2ConsentSession type OAuth2ConsentSession struct { - ID string `json:"-" db:"challenge"` + // ConsentRequestID is the identifier of the consent request that initiated this consent session. + ConsentRequestID string `json:"consent_request_id"` // Scope Granted // // GrantScope sets the scope the user authorized the client to use. Should be a subset of `requested_scope`. - GrantedScope sqlxx.StringSliceJSONFormat `json:"grant_scope" db:"granted_scope"` + GrantedScope sqlxx.StringSliceJSONFormat `json:"grant_scope"` // Audience Granted // // GrantedAudience sets the audience the user authorized the client to use. Should be a subset of `requested_access_token_audience`. - GrantedAudience sqlxx.StringSliceJSONFormat `json:"grant_access_token_audience" db:"granted_at_audience"` + GrantedAudience sqlxx.StringSliceJSONFormat `json:"grant_access_token_audience"` // Session Details // // Session allows you to set (optional) session data for access and ID tokens. - Session *AcceptOAuth2ConsentRequestSession `json:"session" db:"-"` + Session *AcceptOAuth2ConsentRequestSession `json:"session"` // Remember Consent // // Remember, if set to true, tells ORY Hydra to remember this consent authorization and reuse it if the same // client asks the same user for the same, or a subset of, scope. - Remember bool `json:"remember" db:"remember"` + Remember bool `json:"remember"` // Remember Consent For // // RememberFor sets how long the consent authorization should be remembered for in seconds. If set to `0`, the // authorization will be remembered indefinitely. - RememberFor int `json:"remember_for" db:"remember_for"` + RememberFor int `json:"remember_for"` // Consent Handled At // // HandledAt contains the timestamp the consent request was handled. - HandledAt sqlxx.NullTime `json:"handled_at" db:"handled_at"` + HandledAt sqlxx.NullTime `json:"handled_at"` - // If set to true means that the request was already handled. This - // can happen on form double-submit or other errors. If this is set - // we recommend redirecting the user to `request_url` to re-initiate - // the flow. - WasHandled bool `json:"-" db:"was_used"` + // Context is an optional object which can hold arbitrary data. The data will be made available when fetching the + // consent request under the "context" field. This is useful in scenarios where login and consent endpoints share + // data. + Context sqlxx.JSONRawMessage `json:"context"` // Consent Request // // The consent request that lead to this consent session. - ConsentRequest *OAuth2ConsentRequest `json:"consent_request" db:"-"` + ConsentRequest *OAuth2ConsentRequest `json:"consent_request"` +} + +func (r *OAuth2ConsentSession) MarshalJSON() ([]byte, error) { + type Alias OAuth2ConsentSession + alias := Alias(*r) - Error *RequestDeniedError `json:"-" db:"error"` - RequestedAt time.Time `json:"-" db:"requested_at"` - AuthenticatedAt sqlxx.NullTime `json:"-" db:"authenticated_at"` + if alias.Context == nil { + alias.Context = []byte("{}") + } + + if alias.GrantedScope == nil { + alias.GrantedScope = []string{} + } - SessionIDToken sqlxx.MapStringInterface `db:"session_id_token" json:"-"` - SessionAccessToken sqlxx.MapStringInterface `db:"session_access_token" json:"-"` + if alias.GrantedAudience == nil { + alias.GrantedAudience = []string{} + } + + return json.Marshal(alias) } // HandledLoginRequest is the request payload used to accept a login request. // // swagger:model acceptOAuth2LoginRequest type HandledLoginRequest struct { - // ID instead of challenge for pop - ID string `json:"-"` - - // Remember, if set to true, tells ORY Hydra to remember this user by telling the user agent (browser) to store - // a cookie with authentication data. If the same user performs another OAuth 2.0 Authorization Request, he/she + // Remember, if set to true, tells Ory Hydra to remember this user by telling the user agent (browser) to store + // a cookie with authentication data. If the same user performs another OAuth 2.0 Authorization Request, they // will not be asked to log in again. Remember bool `json:"remember"` @@ -265,13 +246,22 @@ type HandledLoginRequest struct { // authorization will be remembered for the duration of the browser session (using a session cookie). RememberFor int `json:"remember_for"` + // Extend OAuth2 authentication session lifespan + // + // If set to `true`, the OAuth2 authentication cookie lifespan is extended. This is for example useful if you want the user to be able to use `prompt=none` continuously. + // + // This value can only be set to `true` if the user has an authentication, which is the case if the `skip` value is `true`. + // + // required: false + ExtendSessionLifespan bool `json:"extend_session_lifespan"` + // ACR sets the Authentication AuthorizationContext Class Reference value for this authentication session. You can use it - // to express that, for example, a user authenticated using two factor authentication. + // to express that, for example, a user authenticated using two-factor authentication. ACR string `json:"acr"` // AMR sets the Authentication Methods References value for this // authentication session. You can use it to specify the method a user used to - // authenticate. For example, if the acr indicates a user used two factor + // authenticate. For example, if the acr indicates a user used two-factor // authentication, the amr can express they used a software-secured key. AMR sqlxx.StringSliceJSONFormat `json:"amr"` @@ -280,6 +270,12 @@ type HandledLoginRequest struct { // required: true Subject string `json:"subject"` + // IdentityProviderSessionID is the session ID of the end-user that authenticated. + // If specified, we will use this value to propagate the logout. + // + // required: false + IdentityProviderSessionID string `json:"identity_provider_session_id,omitempty"` + // ForceSubjectIdentifier forces the "pairwise" user ID of the end-user that authenticated. The "pairwise" user ID refers to the // (Pairwise Identifier Algorithm)[http://openid.net/specs/openid-connect-core-1_0.html#PairwiseAlg] of the OpenID // Connect specification. It allows you to set an obfuscated subject ("user") identifier that is unique to the client. @@ -303,21 +299,20 @@ type HandledLoginRequest struct { // consent request under the "context" field. This is useful in scenarios where login and consent endpoints share // data. Context sqlxx.JSONRawMessage `json:"context"` +} - // If set to true means that the request was already handled. This - // can happen on form double-submit or other errors. If this is set - // we recommend redirecting the user to `request_url` to re-initiate - // the flow. - WasHandled bool `json:"-"` +func (r *HandledLoginRequest) MarshalJSON() ([]byte, error) { + type Alias HandledLoginRequest + alias := Alias(*r) + if alias.Context == nil { + alias.Context = []byte("{}") + } - LoginRequest *LoginRequest `json:"-" faker:"-"` - Error *RequestDeniedError `json:"-"` - RequestedAt time.Time `json:"-"` - AuthenticatedAt sqlxx.NullTime `json:"-"` -} + if alias.AMR == nil { + alias.AMR = []string{} + } -func (r *HandledLoginRequest) HasError() bool { - return r.Error.IsError() + return json.Marshal(alias) } // Contains optional information about the OpenID Connect request. @@ -363,25 +358,37 @@ type OAuth2ConsentRequestOpenIDConnectContext struct { LoginHint string `json:"login_hint,omitempty"` } -func (n *OAuth2ConsentRequestOpenIDConnectContext) Scan(value interface{}) error { - v := fmt.Sprintf("%s", value) - if len(v) == 0 { - return nil +func (n *OAuth2ConsentRequestOpenIDConnectContext) MarshalJSON() ([]byte, error) { + type Alias OAuth2ConsentRequestOpenIDConnectContext + alias := Alias(*n) + if alias.IDTokenHintClaims == nil { + alias.IDTokenHintClaims = map[string]interface{}{} + } + + if alias.ACRValues == nil { + alias.ACRValues = []string{} + } + + if alias.UILocales == nil { + alias.UILocales = []string{} } - return errorsx.WithStack(json.Unmarshal([]byte(v), n)) + + return json.Marshal(alias) +} + +func (n *OAuth2ConsentRequestOpenIDConnectContext) Scan(value interface{}) error { + return sqlxx.JSONScan(n, value) } func (n *OAuth2ConsentRequestOpenIDConnectContext) Value() (driver.Value, error) { - value, err := json.Marshal(n) - return value, errorsx.WithStack(err) + return json.Marshal(n) } // Contains information about an ongoing logout request. // // swagger:model oAuth2LogoutRequest type LogoutRequest struct { - // Challenge is the identifier ("logout challenge") of the logout authentication request. It is used to - // identify the session. + // Challenge is the identifier of the logout authentication request. ID string `json:"challenge" db:"challenge"` NID uuid.UUID `json:"-" db:"nid"` @@ -408,10 +415,12 @@ type LogoutRequest struct { Accepted bool `json:"-" db:"accepted"` Rejected bool `db:"rejected" json:"-"` ClientID sql.NullString `json:"-" db:"client_id"` + ExpiresAt sqlxx.NullTime `json:"expires_at" db:"expires_at"` + RequestedAt sqlxx.NullTime `json:"requested_at" db:"requested_at"` Client *client.Client `json:"client" db:"-"` } -func (_ LogoutRequest) TableName() string { +func (LogoutRequest) TableName() string { return "hydra_oauth2_logout_request" } @@ -441,24 +450,59 @@ type LogoutResult struct { FrontChannelLogoutURLs []string } -// Contains information on an ongoing login request. +// Contains information on an ongoing device grant request. // -// swagger:model oAuth2LoginRequest -type LoginRequest struct { - // ID is the identifier ("login challenge") of the login request. It is used to +// swagger:model DeviceUserAuthRequest +type DeviceUserAuthRequest struct { + // ID is the identifier ("device challenge") of the device grant request. It is used to // identify the session. // // required: true ID string `json:"challenge"` + // Client is the OAuth 2.0 Client that initiated the request. + Client *client.Client `json:"client"` + // RequestURL is the original Device Authorization URL requested. + RequestURL string `json:"request_url"` + // RequestedScope contains the OAuth 2.0 Scope requested by the OAuth 2.0 Client. - // - // required: true RequestedScope sqlxx.StringSliceJSONFormat `json:"requested_scope"` + // RequestedAudience contains the access token audience as requested by the OAuth 2.0 Client. + RequestedAudience sqlxx.StringSliceJSONFormat `json:"requested_access_token_audience"` + HandledAt sqlxx.NullTime `json:"handled_at"` +} + +// HandledDeviceUserAuthRequest is the request payload used to accept a device user_code. +// +// swagger:model verifyUserCodeRequest +type HandledDeviceUserAuthRequest struct { + // RequestURL is the original Device Authorization URL requested. + RequestURL string `json:"request_url"` + // RequestedScope contains the OAuth 2.0 Scope requested by the OAuth 2.0 Client. + RequestedScope sqlxx.StringSliceJSONFormat `json:"requested_scope"` // RequestedAudience contains the access token audience as requested by the OAuth 2.0 Client. + RequestedAudience sqlxx.StringSliceJSONFormat `json:"requested_access_token_audience"` + + DeviceCodeRequestID string `json:"device_code_request_id"` + + // Client is the OAuth 2.0 Client that initiated the request. + Client *client.Client `json:"client"` +} + +// Contains information on an ongoing login request. +// +// swagger:model oAuth2LoginRequest +type LoginRequest struct { + // ID is the identifier of the login request. // // required: true + ID string `json:"challenge"` + + // RequestedScope contains the OAuth 2.0 Scope requested by the OAuth 2.0 Client. + RequestedScope sqlxx.StringSliceJSONFormat `json:"requested_scope"` + + // RequestedAudience contains the access token audience as requested by the OAuth 2.0 Client. RequestedAudience sqlxx.StringSliceJSONFormat `json:"requested_access_token_audience"` // Skip, if true, implies that the client has requested the same scopes from the same user previously. @@ -485,8 +529,6 @@ type LoginRequest struct { // required: true Client *client.Client `json:"client"` - ClientID string `json:"-"` - // RequestURL is the original OAuth 2.0 Authorization URL requested by the OAuth 2.0 client. It is the URL which // initiates the OAuth 2.0 Authorization Code or OAuth 2.0 Implicit flow. This URL is typically not needed, but // might come in handy if you want to deal with additional request parameters. @@ -499,30 +541,40 @@ type LoginRequest struct { // this will be a new random value. This value is used as the "sid" parameter in the ID Token and in OIDC Front-/Back- // channel logout. It's value can generally be used to associate consecutive login requests by a certain user. SessionID sqlxx.NullString `json:"session_id"` +} - // If set to true means that the request was already handled. This - // can happen on form double-submit or other errors. If this is set - // we recommend redirecting the user to `request_url` to re-initiate - // the flow. - WasHandled bool `json:"-"` +func (r *LoginRequest) MarshalJSON() ([]byte, error) { + type Alias LoginRequest + alias := Alias(*r) + if alias.RequestedScope == nil { + alias.RequestedScope = []string{} + } - ForceSubjectIdentifier string `json:"-"` // this is here but has no meaning apart from sql_helper working properly. - Verifier string `json:"-"` - CSRF string `json:"-"` + if alias.RequestedAudience == nil { + alias.RequestedAudience = []string{} + } + + return json.Marshal(alias) +} - AuthenticatedAt sqlxx.NullTime `json:"-"` - RequestedAt time.Time `json:"-"` +// Contains information on an device verification +// +// swagger:model acceptDeviceUserCodeRequest +type AcceptDeviceUserCodeRequest struct { + UserCode string `json:"user_code"` } // Contains information on an ongoing consent request. // // swagger:model oAuth2ConsentRequest type OAuth2ConsentRequest struct { - // ID is the identifier ("authorization challenge") of the consent authorization request. It is used to - // identify the session. + // Challenge is used to retrieve/accept/deny the consent request. // // required: true - ID string `json:"challenge"` + Challenge string `json:"challenge"` + + // ConsentRequestID is the ID of the consent request. + ConsentRequestID string `json:"consent_request_id"` // RequestedScope contains the OAuth 2.0 Scope requested by the OAuth 2.0 Client. RequestedScope sqlxx.StringSliceJSONFormat `json:"requested_scope"` @@ -544,8 +596,7 @@ type OAuth2ConsentRequest struct { OpenIDConnectContext *OAuth2ConsentRequestOpenIDConnectContext `json:"oidc_context"` // Client is the OAuth 2.0 Client that initiated the request. - Client *client.Client `json:"client"` - ClientID string `json:"-"` + Client *client.Client `json:"client"` // RequestURL is the original OAuth 2.0 Authorization URL requested by the OAuth 2.0 client. It is the URL which // initiates the OAuth 2.0 Authorization Code or OAuth 2.0 Implicit flow. This URL is typically not needed, but @@ -574,19 +625,24 @@ type OAuth2ConsentRequest struct { // Context contains arbitrary information set by the login endpoint or is empty if not set. Context sqlxx.JSONRawMessage `json:"context,omitempty"` +} - // If set to true means that the request was already handled. This - // can happen on form double-submit or other errors. If this is set - // we recommend redirecting the user to `request_url` to re-initiate - // the flow. - WasHandled bool `json:"-"` - - // ForceSubjectIdentifier is the value from authentication (if set). - ForceSubjectIdentifier string `json:"-"` - Verifier string `json:"-"` - CSRF string `json:"-"` - AuthenticatedAt sqlxx.NullTime `json:"-"` - RequestedAt time.Time `json:"-"` +func (r *OAuth2ConsentRequest) MarshalJSON() ([]byte, error) { + type Alias OAuth2ConsentRequest + alias := Alias(*r) + if alias.RequestedScope == nil { + alias.RequestedScope = []string{} + } + + if alias.RequestedAudience == nil { + alias.RequestedAudience = []string{} + } + + if alias.AMR == nil { + alias.AMR = []string{} + } + + return json.Marshal(alias) } // Pass session data to a consent request. @@ -604,10 +660,15 @@ type AcceptOAuth2ConsentRequestSession struct { IDToken map[string]interface{} `json:"id_token"` } -// NewConsentRequestSessionData creates a new AcceptOAuth2ConsentRequestSession. -func NewConsentRequestSessionData() *AcceptOAuth2ConsentRequestSession { - return &AcceptOAuth2ConsentRequestSession{ - AccessToken: map[string]interface{}{}, - IDToken: map[string]interface{}{}, +func (r *AcceptOAuth2ConsentRequestSession) MarshalJSON() ([]byte, error) { + type Alias AcceptOAuth2ConsentRequestSession + alias := Alias(*r) + if alias.AccessToken == nil { + alias.AccessToken = map[string]interface{}{} + } + + if alias.IDToken == nil { + alias.IDToken = map[string]interface{}{} } + return json.Marshal(alias) } diff --git a/flow/consent_types_test.go b/flow/consent_types_test.go new file mode 100644 index 00000000000..8588e5ab769 --- /dev/null +++ b/flow/consent_types_test.go @@ -0,0 +1,94 @@ +// Copyright © 2022 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package flow + +import ( + "fmt" + "testing" + + "github.com/ory/x/snapshotx" + + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/fosite" +) + +func TestToRFCError(t *testing.T) { + for k, tc := range []struct { + input *RequestDeniedError + expect *fosite.RFC6749Error + }{ + { + input: &RequestDeniedError{ + Name: "not empty", + Valid: true, + }, + expect: &fosite.RFC6749Error{ + ErrorField: "not empty", + DescriptionField: "", + CodeField: fosite.ErrInvalidRequest.CodeField, + DebugField: "", + }, + }, + { + input: &RequestDeniedError{ + Name: "", + Description: "not empty", + Valid: true, + }, + expect: &fosite.RFC6749Error{ + ErrorField: "request_denied", + DescriptionField: "not empty", + CodeField: fosite.ErrInvalidRequest.CodeField, + DebugField: "", + }, + }, + { + input: &RequestDeniedError{Valid: true}, + expect: &fosite.RFC6749Error{ + ErrorField: "request_denied", + DescriptionField: "", + HintField: "", + CodeField: fosite.ErrInvalidRequest.CodeField, + DebugField: "", + }, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + require.EqualValues(t, tc.input.ToRFCError(), tc.expect) + }) + } +} + +func TestAcceptOAuth2ConsentRequest_MarshalJSON(t *testing.T) { + snapshotx.SnapshotT(t, new(AcceptOAuth2ConsentRequest)) +} + +func TestOAuth2ConsentSession_MarshalJSON(t *testing.T) { + snapshotx.SnapshotT(t, new(OAuth2ConsentSession)) +} + +func TestHandledLoginRequest_MarshalJSON(t *testing.T) { + snapshotx.SnapshotT(t, new(HandledLoginRequest)) +} + +func TestOAuth2ConsentRequestOpenIDConnectContext_MarshalJSON(t *testing.T) { + snapshotx.SnapshotT(t, new(OAuth2ConsentRequestOpenIDConnectContext)) +} + +func TestLogoutRequest_MarshalJSON(t *testing.T) { + snapshotx.SnapshotT(t, new(LogoutRequest)) +} + +func TestLoginRequest_MarshalJSON(t *testing.T) { + snapshotx.SnapshotT(t, new(LoginRequest)) +} + +func TestOAuth2ConsentRequest_MarshalJSON(t *testing.T) { + snapshotx.SnapshotT(t, new(OAuth2ConsentRequest)) +} + +func TestAcceptOAuth2ConsentRequestSession_MarshalJSON(t *testing.T) { + snapshotx.SnapshotT(t, new(AcceptOAuth2ConsentRequestSession)) +} diff --git a/flow/encoding.go b/flow/encoding.go new file mode 100644 index 00000000000..52ccbe9bff3 --- /dev/null +++ b/flow/encoding.go @@ -0,0 +1,118 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package flow + +import ( + "bytes" + "compress/gzip" + "context" + "encoding/json" + + "github.com/pkg/errors" + + "github.com/ory/hydra/v2/aead" +) + +type ( + data struct { + Purpose purpose `json:"p,omitempty"` + } + purpose int + CodecOption func(ad *data) +) + +const ( + loginChallenge purpose = iota + loginVerifier + deviceChallenge + deviceVerifier + consentChallenge + consentVerifier +) + +func (p purpose) RequestType() string { + switch p { + case loginChallenge, loginVerifier: + return "login" + case deviceChallenge, deviceVerifier: + return "device" + case consentChallenge, consentVerifier: + return "consent" + default: + return "unknown" + } +} + +func withPurpose(purpose purpose) CodecOption { return func(ad *data) { ad.Purpose = purpose } } + +var ( + AsLoginChallenge = withPurpose(loginChallenge) + AsLoginVerifier = withPurpose(loginVerifier) + AsDeviceChallenge = withPurpose(deviceChallenge) + AsDeviceVerifier = withPurpose(deviceVerifier) + AsConsentChallenge = withPurpose(consentChallenge) + AsConsentVerifier = withPurpose(consentVerifier) +) + +func additionalDataFromOpts(opts ...CodecOption) []byte { + if len(opts) == 0 { + return nil + } + ad := &data{} + for _, o := range opts { + o(ad) + } + b, err := json.Marshal(ad) + if err != nil { + // Panic is OK here because the struct and the parameters are all known. + panic("failed to marshal additional data: " + errors.WithStack(err).Error()) + } + + return b +} + +// Decode decodes the given string to a value. +func Decode[T any](ctx context.Context, cipher aead.Cipher, encoded string, opts ...CodecOption) (*T, error) { + plaintext, err := cipher.Decrypt(ctx, encoded, additionalDataFromOpts(opts...)) + if err != nil { + return nil, err + } + + rawBytes, err := gzip.NewReader(bytes.NewReader(plaintext)) + if err != nil { + return nil, err + } + defer func() { _ = rawBytes.Close() }() + + var val T + if err = json.NewDecoder(rawBytes).Decode(&val); err != nil { + return nil, err + } + + return &val, nil +} + +// Encode encodes the given value to a string. +func Encode(ctx context.Context, cipher aead.Cipher, val any, opts ...CodecOption) (s string, err error) { + // Steps: + // 1. Encode to JSON + // 2. GZIP + // 3. Encrypt with AEAD (XChaCha20-Poly1305) + Base64 URL-encode + var b bytes.Buffer + + gz, err := gzip.NewWriterLevel(&b, gzip.BestCompression) + if err != nil { + return "", err + } + + if err = json.NewEncoder(gz).Encode(val); err != nil { + return "", err + } + + if err = gz.Close(); err != nil { + return "", err + } + + return cipher.Encrypt(ctx, b.Bytes(), additionalDataFromOpts(opts...)) +} diff --git a/flow/encoding_test.go b/flow/encoding_test.go new file mode 100644 index 00000000000..f73130a78d2 --- /dev/null +++ b/flow/encoding_test.go @@ -0,0 +1,183 @@ +// Copyright © 2024 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package flow_test + +import ( + "context" + "encoding/json" + "testing" + "time" + + "github.com/gofrs/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/flow" + + "github.com/ory/hydra/v2/aead" + "github.com/ory/hydra/v2/client" + "github.com/ory/x/pointerx" + "github.com/ory/x/snapshotx" + "github.com/ory/x/sqlxx" +) + +func TestEncoding(t *testing.T) { + f := flow.Flow{ + ID: "test-flow-id", + NID: uuid.FromStringOrNil("735c9c15-3d07-4501-9800-4e5e0599e57b"), + RequestedScope: []string{"scope1", "scope2"}, + RequestedAudience: []string{"https://api.example.org/v1", "https://api.example.org/v2"}, + LoginSkip: false, + Subject: "some-subject@some-idp-somewhere.com", + OpenIDConnectContext: &flow.OAuth2ConsentRequestOpenIDConnectContext{ + ACRValues: []string{"acr1", "acr2"}, + UILocales: []string{"en-US", "en-GB"}, + Display: "page", + IDTokenHintClaims: map[string]interface{}{"claim1": "value1", "claim2": "value2"}, + LoginHint: "some-login-hint", + }, + Client: &client.Client{ + ID: "test-client-id", + NID: uuid.FromStringOrNil("735c9c15-3d07-4501-9800-4e5e0599e57b"), + Name: "some-client-name", + Secret: "some-supersafe-secret", + RedirectURIs: []string{ + "https://redirect1.example.org/callback", + "https://redirect2.example.org/callback", + }, + GrantTypes: []string{"authorization_code", "refresh_token"}, + ResponseTypes: []string{"code"}, + Scope: "scope1 scope2", + Audience: sqlxx.StringSliceJSONFormat{"https://api.example.org/v1 https://api.example.org/v2"}, + Owner: "some-owner", + TermsOfServiceURI: "https://tos.example.org", + PolicyURI: "https://policy.example.org", + ClientURI: "https://client.example.org", + LogoURI: "https://logo.example.org", + Contacts: []string{"contact1", "contact2"}, + SubjectType: "public", + JSONWebKeysURI: "https://jwks.example.org", + JSONWebKeys: nil, // TODO? + TokenEndpointAuthMethod: "client_secret_basic", + CreatedAt: time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC), + AllowedCORSOrigins: []string{"https://cors1.example.org", "https://cors2.example.org"}, + Metadata: sqlxx.JSONRawMessage(`{"client-metadata-key1": "val1"}`), + AccessTokenStrategy: "jwt", + SkipConsent: true, + }, + RequestURL: "https://auth.hydra.local/oauth2/auth?client_id=some-client-id&response_type=code&scope=scope1+scope2&redirect_uri=https%3A%2F%2Fredirect1.example.org%2Fcallback&state=some-state&nonce=some-nonce", + SessionID: sqlxx.NullString("some-session-id"), + LoginCSRF: "test-login-csrf", + RequestedAt: time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC), + State: 1, + LoginRemember: true, + LoginRememberFor: 3600, + Context: sqlxx.JSONRawMessage(`{"context-key1": "val1"}`), + GrantedScope: []string{"scope1", "scope2"}, + GrantedAudience: []string{"https://api.example.org/v1", "https://api.example.org/v2"}, + ConsentRemember: true, + ConsentRememberFor: pointerx.Int(3600), + ConsentHandledAt: sqlxx.NullTime(time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC)), + SessionIDToken: sqlxx.MapStringInterface{ + "session-id-token-key1": "val1", + "session-id-token-key2": "val2", + "session-id-token-key3": "val3", + "session-id-token-key4": "val4", + "session-id-token-key5": "val5", + }, + SessionAccessToken: sqlxx.MapStringInterface{ + "session-access-token-key1": "val1", + "session-access-token-key2": "val2", + "session-access-token-key3": "val3", + "session-access-token-key4": "val4", + "session-access-token-key5": "val5", + }, + } + + ctx := context.Background() + cp := new(cipherProvider) + + t.Run("encode and decode with snapshots", func(t *testing.T) { + testCases := []struct { + name string + purpose flow.CodecOption + }{ + {"login challenge", flow.AsLoginChallenge}, + {"login verifier", flow.AsLoginVerifier}, + {"consent challenge", flow.AsConsentChallenge}, + {"consent verifier", flow.AsConsentVerifier}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + encoded, err := flow.Encode(ctx, cp.FlowCipher(), f, tc.purpose) + require.NoError(t, err) + + decoded, err := flow.Decode[flow.Flow](ctx, cp.FlowCipher(), encoded, tc.purpose) + require.NoError(t, err) + snapshotx.SnapshotT(t, decoded, snapshotx.ExceptPaths("n", "ia")) + }) + } + }) + + t.Run("purpose validation", func(t *testing.T) { + testCases := []struct { + name string + encodePurpose flow.CodecOption + decodePurpose flow.CodecOption + }{ + {"login challenge decoded as login verifier", flow.AsLoginChallenge, flow.AsLoginVerifier}, + {"login verifier decoded as login challenge", flow.AsLoginVerifier, flow.AsLoginChallenge}, + {"consent challenge decoded as consent verifier", flow.AsConsentChallenge, flow.AsConsentVerifier}, + {"consent verifier decoded as consent challenge", flow.AsConsentVerifier, flow.AsConsentChallenge}, + {"login challenge decoded as consent challenge", flow.AsLoginChallenge, flow.AsConsentChallenge}, + {"consent challenge decoded as login challenge", flow.AsConsentChallenge, flow.AsLoginChallenge}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + encoded, err := flow.Encode(ctx, cp.FlowCipher(), f, tc.encodePurpose) + require.NoError(t, err) + + _, err = flow.Decode[flow.Flow](ctx, cp.FlowCipher(), encoded, tc.decodePurpose) + assert.Error(t, err, "decoding with wrong purpose should fail") + }) + } + }) + + t.Run("with client", func(t *testing.T) { + j, err := json.Marshal(f) + require.NoError(t, err) + t.Logf("Length (JSON): %d", len(j)) + consentVerifier, err := flow.Encode(ctx, cp.FlowCipher(), f, flow.AsConsentVerifier) + require.NoError(t, err) + t.Logf("Length (JSON+GZIP+AEAD): %d", len(consentVerifier)) + }) + + t.Run("without client", func(t *testing.T) { + f := f + f.Client = nil + j, err := json.Marshal(f) + require.NoError(t, err) + t.Logf("Length (JSON): %d", len(j)) + consentVerifier, err := f.ToConsentVerifier(ctx, cp) + require.NoError(t, err) + t.Logf("Length (JSON+GZIP+AEAD): %d", len(consentVerifier)) + }) +} + +type cipherProvider struct{} + +func (c *cipherProvider) FlowCipher() *aead.XChaCha20Poly1305 { + return aead.NewXChaCha20Poly1305(c) +} + +func (c *cipherProvider) GetGlobalSecret(context.Context) ([]byte, error) { + return []byte("supersecret123456789123456789012"), nil +} + +func (c *cipherProvider) GetRotatedGlobalSecrets(ctx context.Context) ([][]byte, error) { + return nil, nil +} diff --git a/flow/error.go b/flow/error.go new file mode 100644 index 00000000000..644f03268ec --- /dev/null +++ b/flow/error.go @@ -0,0 +1,8 @@ +// Copyright © 2024 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package flow + +import "github.com/ory/hydra/v2/fosite" + +var ErrorLogoutFlowExpired = fosite.ErrRequestUnauthorized.WithHint("The logout request has expired, please try the flow again.") diff --git a/flow/fixtures/legacy_challenges/consent_error.txt b/flow/fixtures/legacy_challenges/consent_error.txt new file mode 100644 index 00000000000..defc910c370 --- /dev/null +++ b/flow/fixtures/legacy_challenges/consent_error.txt @@ -0,0 +1 @@ +B_atsO-XqZJYYADpWAdHaDGCuTlJNXmidk_nPlM4dsRCHmUpipxElBmyangAENB21FEH09AnrAsTmdmM0mYVsu-RQV_X4xW1d2GpPH43amSdi_bM7VTQCcOnqMihv3wGrvVFOil_e-ZydsJPbY0wU7BmDjDZpL7geLBTn8yzMQmp1VNFd9G6vkZkpJINs_6Uq8aXz8msAMOTHb8BAi7u-5-BFLVXxqoOduNpBwNLK38ziqJK3X-uZbP3A4g4xTpk2_gtICKYtWw88WN5CsQFN2ZFxh5F4lI3kVtEU0JFqw_KgBy2a47WTfP0xiIAbbkj36DyZD3aMw67uX4ecq1MnxqefCP4znAE1Yl_uq427rqZIhD5KP2eky5sU1j-lC3UAVhz15F8nIHg06oLDqhcXzAB2ZmnhCq_3WmCh4Km1YJ26SrM9-UojRyCuO33oujiSJUNPJ6mnpj6dQLcfZO-14fuckjvJraNyam-BtAo_9EwBLW-Sy2_af-tO41uJZ1Xx-IBNV5M_KSWpCjGFe6xz6FGL0grkYOuFJHLiSefzFrKwdwnITlxcXrB84rb8klmRP_fv-zOnnWcIMS9RN5XpaaA7Yf2VHYn6z0ONrnw3icu_t6KyVZlWo5cTcWae4SNIHKwf1FxQ_zbvGQBpRyCl_2W4gjXvCsM7iub2-Nc87z07AXi8nJ59IJh6BXBZ7eUEAPDr0k9xDmgS7Ypa0G7D4bdT99_2q8FNEkJF9xAsjsBXLxy7Bh19XwDJhUpjAZki6OHrUbTUCeEoydehkbAc4f87_jHRuL_GWwl_Al7DUxUGzaDI4YG4KjV_c-2Ra-uo1geGmrhB_v0Jum_HPmEFSLBmk_GNTDrkDNZieWzpp-AD0NoM-_Fh8bJ7laz2aE2qgAq7WExXmAlbdTX37XieqhyqkOzCs66izg5sk_r3CW6B6R5SHKpaGbeMJJoqHwQOmlL-WiO5mTq0UuLZA_z7e68Locw4NVv5dsqGnTWyKCb0KSJXkkYEYB10GitMhVgU-LK42hmlryfo59igIcQltUxQwEyisXtqHt0K2K7IGv2VGiaE-ucUNHMJXHpkd_CLxOiL0LKCaM5H1CIgGEQ0dXqFokCSJpgmiuI5EHD-G6QU7bmMrPl \ No newline at end of file diff --git a/flow/fixtures/legacy_challenges/consent_initialized.txt b/flow/fixtures/legacy_challenges/consent_initialized.txt new file mode 100644 index 00000000000..b26322e58a5 --- /dev/null +++ b/flow/fixtures/legacy_challenges/consent_initialized.txt @@ -0,0 +1 @@ +ckW2wLJ_gr8WZBEh4X45YRpikPpzA4ITt2O1RL39935ZRFRaQncGWfAheJPQU6mEp3uDftwW_Qiy2b3ROTtfWwRbMvUY36VhJ0ycdTcRyvpz8c8xkjuGLtu5_nh3NFcUUuN8BHOhBwa5iJFzGnQxS1VRGsMk1_D5IBwZWWnvgYOWjqf2taL4wsULprILAmvbQdhgCOIW299gS7Y3k7D_i2ZqPdDJieq4FK-rqNMMgJZbKWjxgzQPvc-MxfOrGnv2xDyQRHJOuILjPs1CQADFaoT2osfNmAhHHekOZO6ZJMNcwC3pSQjhYfvs_DDKldneMt8iz3DOKnAj9VgN1Up6Je-5v_pxZCf-YCONSzAwj7P6xKrj4C9SPXcDdyb7qbzpdH6cu5r6tr3b-jhoLyQ1Vr2sTyCQ08qYG9SU6xM8YDi95YKoQP_8_3HCaNI-j4H6ZibY3MXsfc0wLKw1QaN_qVZ9I99V3YRvEKK-4ri1iyWstYEDJhgQTEfRmgceZJg1rWdhQNIUdl9Ja9DhiDAqxyunWq3HqzpdQiAO0qv8eenk5iC8DNOOgvT6ADwM9-GdRsGFIm-vxZ4x7JpV_TwhaGtWziZX_y_zwBMmqTaoK2D0AuhL0Dbul5m8dtT4Of8tdcJkokW5hyesq7sh3qVAqbwpAqu09Mekuu8-u6qFWzmzeSMwcYKSvSKMbBh62hR6QtYS-XAox4aeM8nGHRahq9J476uxpHL0EggOnq-HZHU2u_Zj7xkYwp7tX4wmmA5fcJXL9oQHVS0O8Vt0GN57aIPXKoZUDbkJlnfZlJghCWg49PKEGCFp0hJp175pGSEKggw7sRlNV9pSh7Nfr37HUsiw2suJn7aW6mXLwSrk5tjgRb6MMpj4kl78yRXPylbHtY4A160wtSqhwBDXM2fKQkiK5UBgQFqh-Tr7KPdzLCITc2PlMwU4lkVF9NARCuX86rh3cooMRxZqaLbnET3lSDqS7iQRu4tFezzeyF1RBKmWnt-L5O7eXPs85bEFyaN5yr-izKjuFJHk_ZwbzLY4XcJvRMPIXOHddrAtZQslQlm_WD9EWXcc3gFNnUvllldX1ZB-f0jcfYMswCuHhPMG5LjTR0NY6IfCloNPp5GjCBSkIW5Tcg== \ No newline at end of file diff --git a/flow/fixtures/legacy_challenges/consent_unused.txt b/flow/fixtures/legacy_challenges/consent_unused.txt new file mode 100644 index 00000000000..ca1153ccda1 --- /dev/null +++ b/flow/fixtures/legacy_challenges/consent_unused.txt @@ -0,0 +1 @@ +loEDZz1uvTOiiRmRsWWtVD-owUJp6B58P_YD4Jz7o9ZXHyhypRAPEaOf7Pe4_87qUEpfmvfNREjXfanuxClb5ss6ejrC3omi4GBkN11YLYAMMPnJgokKN-L7SfQiB_JNFN__hwh8Se-Ef_9rz7U6olVzN-4ZzoJXAJy_CG2Nn6c0Nwo3i_h5gqJWV0wDTVuG0zxu_E5IUbrXieY5i3XGQIflKJq2xapX-DJrfEQgS_Od9cmhdwTfN4gdcop0MurBK8O6rRWM1akY3_v0Z84vTlz4wXs5CWmB_OLcjsUPXuJEXkqPv5J9zVXyq-tsx9YWrQN_EStEHpYzSHR66ZFpmikEdII5hY80e-pnZWnHVh3VcTPoEOsAcY45aZ4qX3aq9NayK6U6vkw-gIYRdL4HwYSt5dQVoAKzfpBjML1Vs5-To7107omiP0jip02SXJIn7sVVDTiRVlTGKBVJ9olmBM8BLtOuIx6XIqMhrmFKxciD2Hk2xQgSw4yvVbBHnPNnYzZ3pa0ToWZ3USC7zeWlwoH4q_cVgnUL4bNpme2068h9igKNkTNmZy8W8qcE1_MOvFUMPArFiQ8YiP1xPlDJjJd4GuzNnmsSsENXDasInNebb7zGfJ9unYfIkcFccHHiZTIAiVhnmnWKE9Yp7F9b1rvakeW0XfdsJ8d-A8urKvnBGW8ADbja2oY10LenJNvxQ0KiUibjGcjvBPoBzf41nAy0frRN9_qW0H1eyZo0DfzIJ9CPNTWX0CisgUvogc3Hr-fFE_oRc_se3E3S4zy4KEexhpHgnJ_lvoIa0Ni8-Oo_x2yRRa7HyEMp4nLX571bT4YAffuUBFCFV3lXgn4Fb15rG4glQ81AnLtKLsh4DMqX12y3Icdkmb_KqPjy3Btx-UKVaOaoD9tcE0SD9XHCJGwgCxrJBTsD4D5uceNhehmBemdXEC1R_I03DOV3P1ixg-ddqzrLzSAi-MeeFXFDfHiypLVnEg3_RUoMDR1b4RLvQoG44ARWEGfrWcTgFaSOX--J6JyudWRUEJI-pJRwf-kt11w6JCVvf2U_iCizk2wrDYnfiqsyDSWfnMncRBytFmCAhdmvnu2dza4ZM_AzrFR_EtYJ2x9blBQ5VM9BeuqeDX2UbQ== \ No newline at end of file diff --git a/flow/fixtures/legacy_challenges/consent_used.txt b/flow/fixtures/legacy_challenges/consent_used.txt new file mode 100644 index 00000000000..5c47f26ce72 --- /dev/null +++ b/flow/fixtures/legacy_challenges/consent_used.txt @@ -0,0 +1 @@ +G7FG3f-oN9gszoRodRrGYo3N7znuyJMI33MBE8kRRf-XdtphXtN-jorahAP1_a_J_dB1p2B2bSm6k2OhA5A4GNrgW5L3FzajZV717kMqlb0F0P7nJ2RocbJfDUP5ZU7pJ5Unvp90aNW84QkvNARKybUFHO3JlHZx7I3AwS7H8M3LJ-MPyYZPTW4h8Cdmcsx0Bv0tplTzcJVHbYT6LevdbMEKJVnUNiV2Vjt63Df8Mcw5Y058sI5NRoZ-s7eJOlh1k77ivscDJsVBspamYCTwmza__ZoognnxNkzu4tWff48ZC8AJ2Sas8YJO0eqoUPPfjGf1hE53VEEA848MA39vAfUcuoZiOxO3S3ytDcEQ6y-IJji-88Kutnh52DuDsMyq8DC-on3x_lXIb6gSGQR7TrBshzQeTJgB-0YCY2SseiDybc8srz82nL-RA7SjS_fZZndENHs2sLER91fI3fmP-enQPzs40dViOD8tylZBQx7x1tehrPBJSc5JFLBNxbI4OzKwtXfPx8_qlWk9I5d0w50vnKDKSEHUdw-9tLR7bASI3yLt1rATyrJD-dtJjGe9jc7ERS0q-DA5DF-EDmsTINVnpbpXzKl6_L0HQsEylzoRvFO3gNTb4Jsj6d-EoxRamSF46pA5htvQRd_-7NyFjBXbgTJaQtmjarOAQQMGhz-kCsnBNP-rDaxc4WQv_rUYYDV_HQJbiMaL145Mrl25fqug2u2-kxMmltRhIoBNzipq1GzZ1f_13RIm9qulFmlxUuE1kvPKb0QRJ1CokqAFpXT1d-AKA4pyVoSZGUKOtKJppg_YEkY3YeTBTrsJgU5l__KM1pgrgUOSPao65Yk6r2t31gAUu1WqQ01Hh320bFWpaGJ0NNuOGsOp5-l-JnNdaYzMGzdeV_1FQQBdodlPbY8oLK9gVQwvRAAMN2Fz1g-liNj60dV5Ub5x2AQT7TfgWDJNf1be1K1MGebUNtzrofKC2bqdK5WSvfJ6oCrx9BDtHOcAwBGMGWPf0CxD0iibBTPNCVpKPqWcj8wVOjkj9LW4mVQmNuAxYcf_Qm1Jwaq6YiuaKzP2sjkX951O_kvrJhYDHOIfOd4adb1W42223KVYZxBNAoU1BZP6SarbG7MPNpvjonIx8Q== \ No newline at end of file diff --git a/flow/fixtures/legacy_challenges/device_initialized.txt b/flow/fixtures/legacy_challenges/device_initialized.txt new file mode 100644 index 00000000000..ae993720c99 --- /dev/null +++ b/flow/fixtures/legacy_challenges/device_initialized.txt @@ -0,0 +1 @@ +gS3DRXNjShk75GF_UJdViyhb66ekwgTPHsELgWy361dWUmnwso5EujkvRZnlq0VzC8Liz0dk6cqpfxzM_wOVcx2pgvmBBppygzYzwIi82VB9ThjCgpaXthAdyvAgap35Dh6izI8UxWEVsVeRbevPPd7NKCB7GwODWEW_3jgrVay86TJnZ6nhkOA42Iw9Qwzt_JUeIh9Qwvd30fFNPuIno4h4P5vzDwd9RwT7EYUjpva-iGNFeUy_s-w4PlPMtcYmvqb8CQlpOveXXvAYl_NsyR9cRlfnbqKnRwoLSw4-vdzB9Txdsp4hagb7W7tstEjzmNXf5-vKkIIWSY8hzKyMHup_M_UYEHueLQBjob-LqH9kj-O73pErOwuKalsXxEz6W0Qc1Ude5hJUbTdG5Y_3-H3eOFyfPd-v2dHpBIFvE6LCxqllRYrohK8T4VcZa3ezCNpDPU1nTGUYfvBbksW1_tb06S0Z1rDOKNMAoM3hgtaxOntZJg6k_c6P-In-h8oYyyS8sgXoDsuDj9OhbEzNB27ZQdZZN4iQJSFUbAieqzRIxhBDvCQd5kKeQIHlEL_1BuErKpqb-lW18AkUwH9_npRJrq8eagpB1pwASAr-F_lW2vFRpkBJ2PPvC_We-4TLmpAqS4H3vzZbTMK9XTIFKHCXNCHeO-hlkipeNubUYsQ3Aqo2jKFkdinE-Kdg3U8GwaE2lgIEAyVqI2tOKkgoTYQ92dtZ1Gg0tpfusNNUsiQsNRi4xI7T9CD4mBnavIke9jIIvrDy8F-DOjoMSU6buiFYUzmdcYr4x7DParoMkuiI68kzpCc3slcbpjx4WGPuUr-kJr-c-fN4LPKl06W2n0LITkKxS96vajdyksgB11hck-izUj-qYiS4N_M0ot9Mc42rv5l0ScCS7SMriloyCLyHDP6nd-GdQKHbRwE0TY9kqldfozQbXdyDpN9-5Ez0Mfrkx0r4RFOgYAN_r-WweyTSO2NZPvurVr1w-E3Mg5Eo6dBMM8BdW_gY848Ju__0RD9YokDDshbhpcAZ2ZSt7Rzx_O4gKt03uZxB1WxjQyreKGSqcYXgb1sFtxraVygN40NsSoKyxWpkxeMJ7hH0W4nfEH_p-xfEa1S0vvFMY1swo_PdDw== \ No newline at end of file diff --git a/flow/fixtures/legacy_challenges/device_unused.txt b/flow/fixtures/legacy_challenges/device_unused.txt new file mode 100644 index 00000000000..547f87c6b74 --- /dev/null +++ b/flow/fixtures/legacy_challenges/device_unused.txt @@ -0,0 +1 @@ +PX7NyRjrlNLxs4ZqEjxaPJJyP68HzQcBZF5Te8BXwdJ00Y4XhaLq1UAOmO3cYiT4RrafRUX84FU26juBlXZOM-GwLhfwbF22gDbXWB8goyrwbO1ltTX-jwveeC57lPsZ086iU8K5zBwFkdRqHXmM3PBX2dlN-SuSN1OWK1yzOJG8t1v3hnnFRFtV5YiHOuYmt-t-dbx4VF59Tz4Gc1gGc5S1mbqIlawb0SBBveHmonKfbsIHultr-6nizka8o7QuiipmRAvo9pNJaTOIE1krpFcuCbiQGW5rsh1v3W2_aqrWkYjy2xnztU92xb0CGLNKzD_jUEDT85caCPp7-rG21c9eZYb3YXAIhKpSfWl5naZxtxNDVgSW2fnZjH3PoVSONvZ-5m8i64OnPhTbTuxhXRRdSKNTKGw9-F-bm6Vy-dhoRvwd6MQxAdzEbtx5sRqgcI8mODIjyVVPVGDlsoSCDhXJ20lORJBPfXM2Wr8_pSoCmJEDZO2UQFADl2R2Odt9XfXJfq9VJYYXCgPTt71MlBZPOzEsaIr4HIxdRUKgkOBPKB7_u7HKxGT1eZFuxr-cd5VefZokxEjAjyV4wXfeKvAg_E-BScv8X337r3SaAngRBm-1dp0Co6NHQwXmk2eZlPfFqlWKDiEsz16KFH2_Bgozgla0N50AodcqnCtYxjJbg5N1uNr1fmjgbM3mNgV2N7Hu5xNwY3-XfmIq6VHj95B64UYDbwptwHOLlpBqGL5itlQ9u51fdon2DjE2JC6xE3WnO5MAEvWvwue-23fiJNdhWnjN0876FPlR-AnmXoU99KxjTYor2Rz2NZ0xNtPO0rBUTRyTbwM113uvSgNgC2C8bDY9bZWxhZ_r1qkhaDy-shWz89Zp9C1seOSRj0iZ5JBkItWUXAmlgq8koTRyuqXCKlJl5IlSNPD00p1d6iWbHP-JSaRF6mWqEjWuTEaAwPPem0ifvGEWQK29KAMne1aFt5MSkyMf0j4klBqrKdtwGNdztKwlid7FW3UQBz2I7DmG6q7TE3a-qelvp-5cWXrhRnL71F6dXPX3fsPdZfV-KSOTpe6PM7XrbEMCBhYlqPE_eG_3JeR5hM_APEg4WXbKvzA0q3dYbXKvC44K4KYHIvUQXw== \ No newline at end of file diff --git a/flow/fixtures/legacy_challenges/device_used.txt b/flow/fixtures/legacy_challenges/device_used.txt new file mode 100644 index 00000000000..3ae9c638ab0 --- /dev/null +++ b/flow/fixtures/legacy_challenges/device_used.txt @@ -0,0 +1 @@ +iAcHHa-2TcVArmf0SE2tAIF17axgNAFCxc5tbmPWP6e1DgOlfQkgteK6-eLF3dmKqzraojbBZwYpCgTXg-CNvFnbXp1R5CWbpNTKVBxUrUfZcTtMucKEdhzqBc7ekkBpwqe7cvfVyojV5au38cIOhORXCLmGF7Ahg1GNIMKut1ow5hf3GDv33TXntQAdL2vOeM25N3XICCRm4wEvw7zc67tYQbTxKrzg6WeyqPmFNOt5YC39Xe12DuhnrqF3wbxlyYGGKcJb6TKTfbpnro64_6eJHIX34zAA7uPIlG-Pn2_wAqTu2dBYWfcxnklQyctpY_bsAZ1Aq5fCoQxzlHKv8Mwm5-rs6AZ1P0TQgOuiGyD0gyEbfrfhASDtSmx0dnmf9ANgielUPqRCgA7gYJGYCbNUHfV5xlwjKQ1u19LMOEn0yEnBbPhWa5XgLI2R_g4EGGfHmBBEKCnxgKAK7COV2R3obQHEh6Eim0_xgfLy-FfKRyqB68I1b0727iMSNzMQy5oYrt77hT8urCANaWte42846HOSW3dEPdomjmMO70EuDqCSyffdHpJZWQoqLAmZAEal7pSvYVK8-jiWnDsiNu7I8KHY0mcot-AOHpFGhlNhvwi2b_uKvlZoYTyKBRMDUVJ2zY7c849Q28GKt6Nu_jdhJAFjPuNC6KhxrethlVD2MYG9eFQxj8nRQqd3QV4zsHiy9eG4NWCxQLgFb5kP346jROVaZyS_KltNAFrRXx7v6RGBriNM8BRSpFKIj2rGm0QHWNwfNV4fS4Z3rwIuQvmbXY-nZ_Zrthtf0LAq8dzl3gtThsATB89gVmUFfUTi8Bc7G72iVnoO2rDFxfCmhT8u9Lx4yh79FGLTkFig_76RWbwqbxk_1P84EHj4MdtdKMtOH5eAxFUgc4Jzrh3W2pYmH6F1XZ2sHY1eukYl-hj2DFZmWFlenqwos1cc7Re5vta80FLxqlmPvPTa7SJS6_eOlAArkVPY59NnuidCo608sNkizHvlfN5B4t7pjf3EmgNuXwXjQ_nMBH48Xg5cZrRB03GWkzI6IuIOlOjyhyHdJYbRz6kLTf10GNC4yBoJajJYFTqBc8RPGOCTGvVaIXW86twGIpysnXuksFX3DaetXEM20w== \ No newline at end of file diff --git a/flow/fixtures/legacy_challenges/login_error.txt b/flow/fixtures/legacy_challenges/login_error.txt new file mode 100644 index 00000000000..6a9e0b55392 --- /dev/null +++ b/flow/fixtures/legacy_challenges/login_error.txt @@ -0,0 +1 @@ +CEpZ1AsnTTzD3S8aNvmjpIukLX_0809hLPwNhAduWp9O6Xnhu2i73LBxHymjkuQHafj6EO1A3YWji04W0gSOlMI15X499W3l2yot60Cdq0QEaXgCo5mQNbA7YtU80feGAvITgA98K0-XTckWp0fcUhUIfg8HvBU6zSIRSUjEycFo79UIPVBGsSUscUIseBpeG1_4IUHZDnSFeZu890Ra8hUoEuG1ivR1bRp5FeWkWljR7PMRZTVqf7CtNQuhIFlPt8E9IeuvyQnHhnIYu17MSGDe-gYo70rNa0o-TxxwjQz3nFKnRscZ5iVY7VuGM93eGozqEqAU8XiE1_rEKAfni9vJyT_l-YjNoZTDlNr9VzNrZjr6qACEU55Is_r8tzbBefJgw8KsnZ8yniJLnr12DEmgJ1wCbzXyfncj4oBEOL8h_r1fg3hS_nkiwL9_XMbUur_jHejQivRWAujUxDnSPnBzI_MesF3qrjzxOE9I7xzzLLTfoEk-8L0RbGa-P9HOnXbROBFUWu_h7fWyOZL04t0M41ncPrQdvTr2zWxBt6TP_v4WZwbdDluj_xrZwHtBmCOHth6pbBgBA3N-T3fJSY3_z69iqyQ4yWdndl2udU5AUsGm2quoO9VKKIKbz9kwQW3nhknXA5D2errhsGq-_yQQBx3dv58XxTM-ocI8SrKNT0psU6TXQq9UJoCDKw-pVwSwLv9h6AWWDMm4UpTdhgd_ol6_JmZO76XZoKcXOp2Tx7JsQVpp3MnAOVr77Fg52zGpXKhOJ9jCRzn5CqioWTLzsork9aFvHD5j69BuIzdHvLf5u7Ffkekpw8jqKqyaxW-PH7SxoSJ58rKHOZrd5sa8XsH8OfAE59KonzSUxyq33J8zPKAr6otM4OA1T3V9X1k-8H7arLw0qzz4sin74Pn00k7N0WqZxZZiEst3uF7ce1P8C3ZCFaXp-yY5ceW9vZlizeWeEXAWEnBLiF7MbncAsUzq-2tP-2GXJxOx7uPZG3dyUF-VkjmAykO15uGxp8FNgS6HFwd69oeUWBa1tuNbmkXO3mKwrn3GSHdEweLQXF21IsAkTWtGwVvDfoH4mw-K2ut5aJ1LqsY25iTh_A-35o0ehIZtRBjpdrOVtjULYmBU1NQ= \ No newline at end of file diff --git a/flow/fixtures/legacy_challenges/login_initialized.txt b/flow/fixtures/legacy_challenges/login_initialized.txt new file mode 100644 index 00000000000..5905435377a --- /dev/null +++ b/flow/fixtures/legacy_challenges/login_initialized.txt @@ -0,0 +1 @@ +V2C1GmIjRkgP0eMM2FhmAnLIGxpamxa6vbyuiNoJQyKRJ6gjI93jek6MDElfQRJQcFAEgB8NkdMK3dhK7-P_TFJVnkG9mv5yjzbyZs_VV_3i_HCm2-_GhvC7Xotee5nVn_iBLlCmqv7mkoa5AUD9H2XAurzPJaigQL_obkFRMlRFbppbFZ-Km4XI1ilGJNCebG6t5t0x8HgFHydBPcabqyBtfbUmLQDOusS-GJ3xF-TtItxND6PTwi6VY9idrvtc3qUWBvRECPIHn9NOFLBhIuOABBbi3Ps7OsToYKN0_HtHBwonSANmDXVtgDNBnfMyPCe3FRDgwKt2TMTlBQ-K31E3fL6KKjJHnAqwdflKbs4Ihaym-x-uj4mGxeJ-O8lC3398-yS1zPkGPi0U0v4yALKg_O7rySzXBAOwnmInfn4hwp2PNYYLpo5yS6vROaZWQgrfayonPnf2f4L-RdCcAr0V5-aa65XOEFjrKcidTNXctKJ9vlxB6jgMqVrGd7fsUWL5yHE_fi03uoLSmPJXAakqUZmZ2oUCY5CfqI_t7Z739vVh2rssFyXTyRqfQWLogiwVnATQSIJdFunR5q67qxgDwkg9GOY_n3i1DR2o82P9E6aLPOLVnOcoalsWKoOdERznIrPid7cvVOc6eDHK_-Fuaa06I6F9SxikGxvPjhyYdqhq7xPOP26u2Bms8OXafgLxnr8mCbaw09AOkM1KoGRkf7L1-AyCG_nX0oBOMnmuz903S27ZJInMxbiFH9-LEi-1D-4Vkm09juiV_FpFkQQJNyVk_hmhyQft6c02aXVmy1n6TK0JHWJ12es1BIrs3T12SU0yVAu3tWqPxFaKILv3sMWdf4lxkxXnlO07h2ZODHjx8VwmeX4iaP0c4zptzRtCc2ZseNHxOr4WiIkh0UQ3AEAU0FhothDV38kCVJfQNE1tI-TTUb35TV0R1rnKYvUMnFiWbR2v4r412CV6tTazyQH4EXCLpVlWVitSgbnXFclKYcBaLH2C2A299xgrUlZz8nZ8Js0JnxfA4OcU2fmOnm0MN1hvfpHuO2Y7tSGPU7XUgmxdy5qNdSPDnoL-zAzm9k1Aq88G7VNmYpSaA_Ij55Jm7U6mlFso6gFxsstvfaUD6g== \ No newline at end of file diff --git a/flow/fixtures/legacy_challenges/login_unused.txt b/flow/fixtures/legacy_challenges/login_unused.txt new file mode 100644 index 00000000000..8d180bd377c --- /dev/null +++ b/flow/fixtures/legacy_challenges/login_unused.txt @@ -0,0 +1 @@ +W4GKBor1H1_XmdD2tsK3g2QR0txC4byI2-s0L26G4NDIwgHJegI0_4nyeRc-ZteDRsaSQBfy9_R32abhyPLFYEOre9LRh44-34i4D6D4mLXzflNVJE_PiL2o3QCQajnGJZhQagIT1cb1q9hVexxd0-n45jgTyuKfNN6noCLgxfYZN3mgZo-_uRAd9u2pon2msRS17ep13CgLQ7Fhz7o_yidhkHKbmFggvBdA-xQEU3TMVOVCSMQ4LBIpUv4tEfjdPCVvzITMfednt7yC7NVF4VUe4OrvoHAIk5ILDHfAMhoPxTUReTzMBAubf3DbdIzifBS23lOlXCwoRziWzpKZqsXgDXuOsZ1SRvympdKPrfVe4foRyuSJ5lm3n1jD_1rTSnoDOJw6jbijnxaF-IM3h8PYbHIbFogvvGZrLCpbnICLuJ8WnGfwLHqthScn-npHs4vii-aEYpBl8_soWvb97SY9wGSvR78voWTpKF5R-Ly112HVHVFdfisDILNuQyKuzSilnGWNnClGiKYCp4QRusfzqxFuxgG6B20Q1rsOu5VaPH1VGLCRCNYxVH5fmmyLwcNFcpTStJH-WntT5vYlXab8mWSu75MrITP0lVZHRk9EUStQr5ZA-B7eUgBZZbNyqcE3DFfLaUuaRqxpg_Hb2TCzf92IqrugNisg_8yXuv6ZtdykLdnjte4la2O3IAx4GZJ1s7-tgafQfCEgIRI8_FwU74QkKA7Bj12Q7wYDL-g61_F0xPtJ_rlOFlE707rgpKX4L6r6429wUbisRQW7Inn8xvqd8Nf7SFoSb657VEAro26EZKYDMe0AxVdjO3zsJJRBkiDpge5YsiEYfYzTnG6ttotcD7AaLmAvPQGleBAUMy2uLzICf9BrwnKOJU1KqX66E7NJFfV4C9X3EGQcSRJy6Fh1gqq_aI1OjSB0G19S6kJtg2jBI_ft2XbD-zddOhW6a4IIVlHOUaiji-XSLNrhvuXJ08Zggne4enDXWZjZgoWdO8bkD4Q0bHZbCbRjF8jM6lpS7_x-TBSDynxdP9l3LqzXXpwmQ9BczAXlbSqOIjICf6JeyaCuF6n1TB7v5t2-Bw21-1Xu_PrgTkwMLDyjt-LVkOhkZmsDOrk1rX9QA0Vidw== \ No newline at end of file diff --git a/flow/fixtures/legacy_challenges/login_used.txt b/flow/fixtures/legacy_challenges/login_used.txt new file mode 100644 index 00000000000..c12c268e1f2 --- /dev/null +++ b/flow/fixtures/legacy_challenges/login_used.txt @@ -0,0 +1 @@ +c4ktauBVvmjCCJAkwzppt2422l5fUr5VvTyUdouevdY3_8ZiHswXNF-08A3l9sNz9cgcffVOX33ZdxJDKeNuGqSKuRpALPU-fVwjTt0an9PPBW_2eBBvfMbasqZJQhRxbCL_3z7VrA6U5_nUHf8kGX0so7kHkNnvsR-cSgdmqdq1TPrE01IuU-PMNivjtKvEihvWucOSEO4DtERoQkGCJJ8IEfGZP5RIo9RrFQ-TJswD9QQtk6Gw1oD2Gko80zoIozxWResDSuoZuikzjyd7VLRZYUS7A0y8bxxR-I5b8A_2priqMEYYAhuC39A4QNgNEvMnHBV9JWIky6u4rStwWaHjDNdahnYl7tumLO4r7iH_C2ZTpxcVoaJzxar1_xERrQ2OE2hehVaSO3tY9MoYPae4lvu8OnwSFHTBhUyd1gDxuSoi38G3Hw78ZHyTc29GAA_Ir3ZASMU3sVWAlZ8jZY8jG85Npd0OzkxhzkUz_M_PExF4I45EF9s73g8G-nHEjfcOR78LrddIIKsZGzldtWF_u00cAoSHzyWjy1HTF1Yf60qbVm2rDmAcPd33JqI11RjYUvNv30-9Sasqs2w4BccW5iaZC0sKTj-WD6CZzFKYz8VGDox1symv748-pEct2V51CYQR95o9mQplxqJdfaKD6SmHwthU-4yZ5txX6-MFaRA1dNfn--0VZD0Iih6VtankV4jqJGni1GkvDO22wX1XEfdlO3HgjeUxLq7tnyGXtFYn5XI9vkbefWAJyoPUedmsZnIZ4bdyUg_elwGpkhWqdoAMPqVmo1lqTJXG1Di3LX8phUdIVCGQQRe8K3BVg7YvUiriZ5-BXJdGOwy67_oNo0QZK2JLM9FGtE8_HqNq5sPVuBsgJ32ZeLWYku28QlafWrXKFo8JWMTrqJ743hRzPWsHDqAsAQjJr7e4K8yqkuLK_Hhd7raetVWw2yjV1YG9DOTnnQ14-YFrZeqzYZ5CagLPmftwQUmk7nP-VLEB_3wmV7sW39TwSIDK1UDLNS9eurOBBS9rQe4G1-_eUqO-H4IBFGJ-cDEYFv61XJGbxKzOFfAx-nsHej816WYMQ1pbaUe5zLONbMfeMvlgPDQMn1G0oakadK8VtdjWgx7rTD6CC-4CJg== \ No newline at end of file diff --git a/flow/flow.go b/flow/flow.go index 9915ed40719..ed0dc8460be 100644 --- a/flow/flow.go +++ b/flow/flow.go @@ -4,16 +4,16 @@ package flow import ( + "context" "time" "github.com/gofrs/uuid" "github.com/pkg/errors" - "github.com/gobuffalo/pop/v6" - - "github.com/ory/hydra/client" - "github.com/ory/hydra/consent" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/aead" + "github.com/ory/hydra/v2/client" + "github.com/ory/pop/v6" + "github.com/ory/x/pointerx" "github.com/ory/x/sqlcon" "github.com/ory/x/sqlxx" ) @@ -21,37 +21,59 @@ import ( // FlowState* constants enumerate the states of a flow. The below graph // describes possible flow state transitions. // -// graph TD -// -// LOGIN_INITIALIZED --> LOGIN_UNUSED -// LOGIN_UNUSED --> LOGIN_USED -// LOGIN_UNUSED --> LOGIN_ERROR -// LOGIN_USED --> CONSENT_INITIALIZED -// CONSENT_INITIALIZED --> CONSENT_UNUSED -// CONSENT_UNUSED --> CONSENT_UNUSED -// CONSENT_UNUSED --> CONSENT_USED -// CONSENT_UNUSED --> CONSENT_ERROR +// stateDiagram-v2 +// [*] --> DEVICE_UNUSED: GET /oauth2/device/verify +// DEVICE_UNUSED --> DEVICE_USED: submit user code +// DEVICE_USED --> LOGIN_UNUSED: to verifier +// [*] --> LOGIN_UNUSED: GET /oauth2/auth +// LOGIN_UNUSED --> LOGIN_UNUSED: accept login +// LOGIN_UNUSED --> LOGIN_USED: submit login verifier +// LOGIN_UNUSED --> LOGIN_ERROR: reject login +// LOGIN_ERROR --> [*] +// LOGIN_USED --> CONSENT_UNUSED +// CONSENT_UNUSED --> CONSENT_UNUSED: accept consent +// CONSENT_UNUSED --> CONSENT_USED: submit consent verifier +// CONSENT_UNUSED --> CONSENT_ERROR: reject consent +// CONSENT_ERROR --> [*] +// CONSENT_USED --> [*] + +type State int16 + const ( - // FlowStateLoginInitialized applies before the login app either - // accepts or rejects the login request. - FlowStateLoginInitialized = int16(1) + // FlowStateLoginInitialized is not used anymore, but is kept for + // backwards compatibility. New flows start at FlowStateLoginUnused. + FlowStateLoginInitialized = State(1) // FlowStateLoginUnused indicates that the login has been authenticated, but // the User Agent hasn't picked up the result yet. - FlowStateLoginUnused = int16(2) + FlowStateLoginUnused = State(2) // FlowStateLoginUsed indicates that the User Agent is requesting consent and // Hydra has invalidated the login request. This is a short-lived state // because the transition to FlowStateConsentInitialized should happen while // handling the request that triggered the transition to FlowStateLoginUsed. - FlowStateLoginUsed = int16(3) + FlowStateLoginUsed = State(3) + + // FlowStateConsentInitialized is not used anymore, but is kept for + // backwards compatibility. New flows start at FlowStateConsentUnused. + FlowStateConsentInitialized = State(4) + + FlowStateConsentUnused = State(5) + FlowStateConsentUsed = State(6) + + // DeviceFlowStateInitialized is not used anymore, but is kept for + // backwards compatibility. New flows start at DeviceFlowStateUnused. + DeviceFlowStateInitialized = State(7) - // FlowStateConsentInitialized applies while Hydra waits for a consent request - // to be accepted or rejected. - FlowStateConsentInitialized = int16(4) + // DeviceFlowStateUnused indicates that the login has been authenticated, but + // the User Agent hasn't picked up the result yet. + DeviceFlowStateUnused = State(8) - FlowStateConsentUnused = int16(5) - FlowStateConsentUsed = int16(6) + // DeviceFlowStateUsed indicates that the User Agent is requesting consent and + // Hydra has invalidated the login request. This is a short-lived state + // because the transition to DeviceFlowStateConsentInitialized should happen while + // handling the request that triggered the transition to DeviceFlowStateUsed. + DeviceFlowStateUsed = State(9) // TODO: Refactor error handling to persist error codes instead of JSON // strings. Currently we persist errors as JSON strings in the LoginError @@ -64,10 +86,22 @@ const ( // If the above is implemented, merge the LoginError and ConsentError fields // and use the following FlowStates when converting to/from // [Handled]{Login|Consent}Request: - FlowStateLoginError = int16(128) - FlowStateConsentError = int16(129) + FlowStateLoginError = State(128) + FlowStateConsentError = State(129) ) +func (s State) ConsentWasUsed() bool { return s == FlowStateConsentUsed || s == FlowStateConsentError } +func (s State) LoginWasUsed() bool { return s == FlowStateLoginUsed || s == FlowStateLoginError } + +func (s State) IsAny(expected ...State) error { + for _, e := range expected { + if s == e { + return nil + } + } + return errors.Errorf("invalid flow state: expected one of %v, got %d", expected, s) +} + // Flow is an abstraction used in the persistence layer to unify LoginRequest, // HandledLoginRequest, ConsentRequest, and AcceptOAuth2ConsentRequest. // @@ -79,22 +113,27 @@ const ( // using the original structs in the API in order to minimize the impact of the // database refactoring on the API. type Flow struct { - // ID is the identifier ("login challenge") of the login request. It is used to - // identify the session. + // ID is the identifier of the login request. // - // required: true - ID string `db:"login_challenge"` - NID uuid.UUID `db:"nid"` + // The struct field is named ID for compatibility with gobuffalo/pop, and is + // the primary key in the database. + // + // The database column should be named `login_challenge_id`, but is not for + // historical reasons. + // + // This is not the same as the login session ID. + ID string `db:"login_challenge" json:"i"` + NID uuid.UUID `db:"nid" json:"n"` // RequestedScope contains the OAuth 2.0 Scope requested by the OAuth 2.0 Client. // // required: true - RequestedScope sqlxx.StringSliceJSONFormat `db:"requested_scope"` + RequestedScope sqlxx.StringSliceJSONFormat `db:"requested_scope" json:"rs,omitempty"` // RequestedAudience contains the access token audience as requested by the OAuth 2.0 Client. // // required: true - RequestedAudience sqlxx.StringSliceJSONFormat `db:"requested_at_audience"` + RequestedAudience sqlxx.StringSliceJSONFormat `db:"requested_at_audience" json:"ra,omitempty"` // LoginSkip, if true, implies that the client has requested the same scopes from the same user previously. // If true, you can skip asking the user to grant the requested scopes, and simply forward the user to the redirect URL. @@ -102,65 +141,70 @@ type Flow struct { // This feature allows you to update / set session information. // // required: true - LoginSkip bool `db:"login_skip"` + LoginSkip bool `db:"-" json:"ls,omitempty"` // Subject is the user ID of the end-user that authenticated. Now, that end user needs to grant or deny the scope // requested by the OAuth 2.0 client. If this value is set and `skip` is true, you MUST include this subject type // when accepting the login request, or the request will fail. // // required: true - Subject string `db:"subject"` + Subject string `db:"subject" json:"s,omitempty"` // OpenIDConnectContext provides context for the (potential) OpenID Connect context. Implementation of these // values in your app are optional but can be useful if you want to be fully compliant with the OpenID Connect spec. - OpenIDConnectContext *consent.OAuth2ConsentRequestOpenIDConnectContext `db:"oidc_context"` + OpenIDConnectContext *OAuth2ConsentRequestOpenIDConnectContext `db:"oidc_context" json:"oc"` // Client is the OAuth 2.0 Client that initiated the request. // // required: true - Client *client.Client `db:"-"` - - ClientID string `db:"client_id"` + Client *client.Client `db:"-" json:"c,omitempty"` + ClientID string `db:"client_id" json:"ci,omitempty"` // RequestURL is the original OAuth 2.0 Authorization URL requested by the OAuth 2.0 client. It is the URL which // initiates the OAuth 2.0 Authorization Code or OAuth 2.0 Implicit flow. This URL is typically not needed, but // might come in handy if you want to deal with additional request parameters. // // required: true - RequestURL string `db:"request_url"` + RequestURL string `db:"request_url" json:"r,omitempty"` // SessionID is the login session ID. If the user-agent reuses a login session (via cookie / remember flag) // this ID will remain the same. If the user-agent did not have an existing authentication session (e.g. remember is false) // this will be a new random value. This value is used as the "sid" parameter in the ID Token and in OIDC Front-/Back- // channel logout. Its value can generally be used to associate consecutive login requests by a certain user. - SessionID sqlxx.NullString `db:"login_session_id"` + SessionID sqlxx.NullString `db:"login_session_id" json:"si,omitempty"` + + // IdentityProviderSessionID is the session ID of the end-user that authenticated. + // If specified, we will use this value to propagate the logout. + IdentityProviderSessionID sqlxx.NullString `db:"-" json:"is,omitempty"` - LoginVerifier string `db:"login_verifier"` - LoginCSRF string `db:"login_csrf"` + LoginCSRF string `db:"-" json:"lc,omitempty"` - LoginInitializedAt sqlxx.NullTime `db:"login_initialized_at"` - RequestedAt time.Time `db:"requested_at"` + RequestedAt time.Time `db:"requested_at" json:"ia,omitempty"` - State int16 `db:"state"` + State State `db:"-" json:"q,omitempty"` // LoginRemember, if set to true, tells ORY Hydra to remember this user by telling the user agent (browser) to store // a cookie with authentication data. If the same user performs another OAuth 2.0 Authorization Request, he/she // will not be asked to log in again. - LoginRemember bool `db:"login_remember"` + LoginRemember bool `db:"-" json:"lr,omitempty"` // LoginRememberFor sets how long the authentication should be remembered for in seconds. If set to `0`, the // authorization will be remembered for the duration of the browser session (using a session cookie). - LoginRememberFor int `db:"login_remember_for"` + LoginRememberFor int `db:"-" json:"lf,omitempty"` + + // LoginExtendSessionLifespan, if set to true, session cookie expiry time will be updated when session is + // refreshed (login skip=true). + LoginExtendSessionLifespan bool `db:"-" json:"ll,omitempty"` // ACR sets the Authentication AuthorizationContext Class Reference value for this authentication session. You can use it // to express that, for example, a user authenticated using two factor authentication. - ACR string `db:"acr"` + ACR string `db:"acr" json:"a,omitempty"` // AMR sets the Authentication Methods References value for this // authentication session. You can use it to specify the method a user used to // authenticate. For example, if the acr indicates a user used two factor // authentication, the amr can express they used a software-secured key. - AMR sqlxx.StringSliceJSONFormat `db:"amr"` + AMR sqlxx.StringSliceJSONFormat `db:"amr" json:"am,omitempty"` // ForceSubjectIdentifier forces the "pairwise" user ID of the end-user that authenticated. The "pairwise" user ID refers to the // (Pairwise Identifier Algorithm)[http://openid.net/specs/openid-connect-core-1_0.html#PairwiseAlg] of the OpenID @@ -179,93 +223,87 @@ type Flow struct { // other unique value). // // If you fail to compute the proper value, then authentication processes which have id_token_hint set might fail. - ForceSubjectIdentifier string `db:"forced_subject_identifier"` + ForceSubjectIdentifier string `db:"-" json:"fs,omitempty"` // Context is an optional object which can hold arbitrary data. The data will be made available when fetching the // consent request under the "context" field. This is useful in scenarios where login and consent endpoints share // data. - Context sqlxx.JSONRawMessage `db:"context"` - - // LoginWasUsed set to true means that the login request was already handled. - // This can happen on form double-submit or other errors. If this is set we - // recommend redirecting the user to `request_url` to re-initiate the flow. - LoginWasUsed bool `db:"login_was_used"` - - LoginError *consent.RequestDeniedError `db:"login_error"` - LoginAuthenticatedAt sqlxx.NullTime `db:"login_authenticated_at"` - - // ConsentChallengeID is the identifier ("authorization challenge") of the consent authorization request. It is used to - // identify the session. - // - // required: true - ConsentChallengeID sqlxx.NullString `db:"consent_challenge_id"` - + Context sqlxx.JSONRawMessage `db:"context" json:"ct"` + + LoginError *RequestDeniedError `db:"-" json:"le,omitempty"` + LoginAuthenticatedAt sqlxx.NullTime `db:"-" json:"la,omitempty"` + + // DeviceChallengeID is the device request's challenge ID + DeviceChallengeID sqlxx.NullString `db:"device_challenge_id" json:"di,omitempty"` + // DeviceCodeRequestID is the device request's ID + DeviceCodeRequestID sqlxx.NullString `db:"device_code_request_id" json:"dr,omitempty"` + // DeviceCSRF is the device request's CSRF + DeviceCSRF sqlxx.NullString `db:"-" json:"dc,omitempty"` + // DeviceHandledAt contains the timestamp the device user_code verification request was handled + DeviceHandledAt sqlxx.NullTime `db:"-" json:"dh,omitempty"` + + // ConsentRequestID is the identifier of the consent request. + // The database column should be named `consent_request_id`, but is not for historical reasons. + ConsentRequestID sqlxx.NullString `db:"consent_challenge_id" json:"cc,omitempty"` // ConsentSkip, if true, implies that the client has requested the same scopes from the same user previously. // If true, you must not ask the user to grant the requested scopes. You must however either allow or deny the // consent request using the usual API call. - ConsentSkip bool `db:"consent_skip"` - ConsentVerifier sqlxx.NullString `db:"consent_verifier"` - ConsentCSRF sqlxx.NullString `db:"consent_csrf"` + ConsentSkip bool `db:"consent_skip" json:"cs,omitempty"` + ConsentCSRF sqlxx.NullString `db:"-" json:"cr,omitempty"` // GrantedScope sets the scope the user authorized the client to use. Should be a subset of `requested_scope`. - GrantedScope sqlxx.StringSliceJSONFormat `db:"granted_scope"` + GrantedScope sqlxx.StringSliceJSONFormat `db:"granted_scope" json:"gs,omitempty"` // GrantedAudience sets the audience the user authorized the client to use. Should be a subset of `requested_access_token_audience`. - GrantedAudience sqlxx.StringSliceJSONFormat `db:"granted_at_audience"` + GrantedAudience sqlxx.StringSliceJSONFormat `db:"granted_at_audience" json:"ga,omitempty"` // ConsentRemember, if set to true, tells ORY Hydra to remember this consent authorization and reuse it if the same // client asks the same user for the same, or a subset of, scope. - ConsentRemember bool `db:"consent_remember"` + ConsentRemember bool `db:"consent_remember" json:"ce,omitempty"` // ConsentRememberFor sets how long the consent authorization should be remembered for in seconds. If set to `0`, the // authorization will be remembered indefinitely. - ConsentRememberFor *int `db:"consent_remember_for"` + ConsentRememberFor *int `db:"consent_remember_for" json:"cf"` // ConsentHandledAt contains the timestamp the consent request was handled. - ConsentHandledAt sqlxx.NullTime `db:"consent_handled_at"` - - // ConsentWasHandled set to true means that the request was already handled. - // This can happen on form double-submit or other errors. If this is set we - // recommend redirecting the user to `request_url` to re-initiate the flow. - ConsentWasHandled bool `db:"consent_was_used"` - ConsentError *consent.RequestDeniedError `db:"consent_error"` - SessionIDToken sqlxx.MapStringInterface `db:"session_id_token" faker:"-"` - SessionAccessToken sqlxx.MapStringInterface `db:"session_access_token" faker:"-"` -} + ConsentHandledAt sqlxx.NullTime `db:"consent_handled_at" json:"ch,omitempty"` -func NewFlow(r *consent.LoginRequest) *Flow { - return &Flow{ - ID: r.ID, - RequestedScope: r.RequestedScope, - RequestedAudience: r.RequestedAudience, - LoginSkip: r.Skip, - Subject: r.Subject, - OpenIDConnectContext: r.OpenIDConnectContext, - Client: r.Client, - ClientID: r.ClientID, - RequestURL: r.RequestURL, - SessionID: r.SessionID, - LoginWasUsed: r.WasHandled, - ForceSubjectIdentifier: r.ForceSubjectIdentifier, - LoginVerifier: r.Verifier, - LoginCSRF: r.CSRF, - LoginAuthenticatedAt: r.AuthenticatedAt, - RequestedAt: r.RequestedAt, - State: FlowStateLoginInitialized, - } + ConsentError *RequestDeniedError `db:"-" json:"cx"` + SessionIDToken sqlxx.MapStringInterface `db:"session_id_token" faker:"-" json:"st"` + SessionAccessToken sqlxx.MapStringInterface `db:"session_access_token" faker:"-" json:"sa"` } -func (f *Flow) HandleLoginRequest(h *consent.HandledLoginRequest) error { - if f.LoginWasUsed { - return errors.WithStack(x.ErrConflict.WithHint("The login request was already used and can no longer be changed.")) +// HandleDeviceUserAuthRequest updates the flows fields from a handled request. +func (f *Flow) HandleDeviceUserAuthRequest(h *HandledDeviceUserAuthRequest) error { + if err := f.State.IsAny(DeviceFlowStateInitialized, DeviceFlowStateUnused); err != nil { + return err } - if f.State != FlowStateLoginInitialized && f.State != FlowStateLoginUnused && f.State != FlowStateLoginError { - return errors.Errorf("invalid flow state: expected %d/%d/%d, got %d", FlowStateLoginInitialized, FlowStateLoginUnused, FlowStateLoginError, f.State) + f.State = DeviceFlowStateUnused + + f.Client = h.Client + f.ClientID = h.Client.GetID() + f.DeviceCodeRequestID = sqlxx.NullString(h.DeviceCodeRequestID) + f.DeviceHandledAt = sqlxx.NullTime(time.Now().UTC()) + f.RequestedScope = h.RequestedScope + f.RequestedAudience = h.RequestedAudience + + return nil +} + +// InvalidateDeviceRequest shifts the flow state to DeviceFlowStateUsed. This +// transition is executed upon device completion. +func (f *Flow) InvalidateDeviceRequest() error { + if err := f.State.IsAny(DeviceFlowStateUnused); err != nil { + return err } + f.State = DeviceFlowStateUsed + return nil +} - if f.ID != h.ID { - return errors.Errorf("flow ID %s does not match HandledLoginRequest ID %s", f.ID, h.ID) +func (f *Flow) HandleLoginRequest(h *HandledLoginRequest) error { + if err := f.State.IsAny(FlowStateLoginInitialized, FlowStateLoginUnused, FlowStateLoginError); err != nil { + return err } if f.Subject != "" && h.Subject != "" && f.Subject != h.Subject { @@ -276,111 +314,92 @@ func (f *Flow) HandleLoginRequest(h *consent.HandledLoginRequest) error { return errors.Errorf("flow ForceSubjectIdentifier %s does not match the HandledLoginRequest ForceSubjectIdentifier %s", f.ForceSubjectIdentifier, h.ForceSubjectIdentifier) } - if h.Error != nil { - f.State = FlowStateLoginError - } else { - f.State = FlowStateLoginUnused + f.State = FlowStateLoginUnused + + if f.Context != nil { + f.Context = h.Context } - f.ID = h.ID + f.Subject = h.Subject f.ForceSubjectIdentifier = h.ForceSubjectIdentifier - f.LoginError = h.Error + f.IdentityProviderSessionID = sqlxx.NullString(h.IdentityProviderSessionID) f.LoginRemember = h.Remember f.LoginRememberFor = h.RememberFor + f.LoginExtendSessionLifespan = h.ExtendSessionLifespan f.ACR = h.ACR f.AMR = h.AMR - f.Context = h.Context - f.LoginWasUsed = h.WasHandled - f.LoginAuthenticatedAt = h.AuthenticatedAt return nil } -func (f *Flow) GetHandledLoginRequest() consent.HandledLoginRequest { - return consent.HandledLoginRequest{ - ID: f.ID, - Remember: f.LoginRemember, - RememberFor: f.LoginRememberFor, - ACR: f.ACR, - AMR: f.AMR, - Subject: f.Subject, - ForceSubjectIdentifier: f.ForceSubjectIdentifier, - Context: f.Context, - WasHandled: f.LoginWasUsed, - Error: f.LoginError, - LoginRequest: f.GetLoginRequest(), - RequestedAt: f.RequestedAt, - AuthenticatedAt: f.LoginAuthenticatedAt, +func (f *Flow) HandleLoginError(er *RequestDeniedError) error { + if err := f.State.IsAny(FlowStateLoginInitialized, FlowStateLoginUnused, FlowStateLoginError); err != nil { + return err } + + f.State = FlowStateLoginError + + f.LoginError = er + + // force-reset values + f.Subject = "" + f.ForceSubjectIdentifier = "" + f.LoginAuthenticatedAt = sqlxx.NullTime{} + f.IdentityProviderSessionID = "" + f.LoginRemember = false + f.LoginRememberFor = 0 + f.LoginExtendSessionLifespan = false + f.ACR = "" + f.AMR = nil + + return nil } -func (f *Flow) GetLoginRequest() *consent.LoginRequest { - return &consent.LoginRequest{ - ID: f.ID, - RequestedScope: f.RequestedScope, - RequestedAudience: f.RequestedAudience, - Skip: f.LoginSkip, - Subject: f.Subject, - OpenIDConnectContext: f.OpenIDConnectContext, - Client: f.Client, - ClientID: f.ClientID, - RequestURL: f.RequestURL, - SessionID: f.SessionID, - WasHandled: f.LoginWasUsed, - ForceSubjectIdentifier: f.ForceSubjectIdentifier, - Verifier: f.LoginVerifier, - CSRF: f.LoginCSRF, - AuthenticatedAt: f.LoginAuthenticatedAt, - RequestedAt: f.RequestedAt, +func (f *Flow) GetLoginRequest() *LoginRequest { + return &LoginRequest{ + ID: f.ID, + RequestedScope: f.RequestedScope, + RequestedAudience: f.RequestedAudience, + Skip: f.LoginSkip, + Subject: f.Subject, + OpenIDConnectContext: f.OpenIDConnectContext, + Client: f.Client, + RequestURL: f.RequestURL, + SessionID: f.SessionID, } } // InvalidateLoginRequest shifts the flow state to FlowStateLoginUsed. This // transition is executed upon login completion. func (f *Flow) InvalidateLoginRequest() error { - if f.State != FlowStateLoginUnused && f.State != FlowStateLoginError { - return errors.Errorf("invalid flow state: expected %d or %d, got %d", FlowStateLoginUnused, FlowStateLoginError, f.State) + if err := f.State.IsAny(FlowStateLoginUnused, FlowStateLoginError); err != nil { + return err } - if f.LoginWasUsed { - return errors.New("login verifier has already been used") + + if f.State == FlowStateLoginUnused { + f.State = FlowStateLoginUsed + } else { + // FlowStateLoginError is already a terminal state, so we don't need to do anything here. } - f.LoginWasUsed = true - f.State = FlowStateLoginUsed return nil } -func (f *Flow) HandleConsentRequest(r *consent.AcceptOAuth2ConsentRequest) error { - if time.Time(r.HandledAt).IsZero() { - return errors.New("refusing to handle a consent request with null HandledAt") - } - - if f.ConsentWasHandled { - return x.ErrConflict.WithHint("The consent request was already used and can no longer be changed.") - } - - if f.State != FlowStateConsentInitialized && f.State != FlowStateConsentUnused && f.State != FlowStateConsentError { - return errors.Errorf("invalid flow state: expected %d/%d/%d, got %d", FlowStateConsentInitialized, FlowStateConsentUnused, FlowStateConsentError, f.State) - } - - if f.ConsentChallengeID.String() != r.ID { - return errors.Errorf("flow.ConsentChallengeID %s doesn't match AcceptOAuth2ConsentRequest.ID %s", f.ConsentChallengeID.String(), r.ID) +func (f *Flow) HandleConsentRequest(r *AcceptOAuth2ConsentRequest) error { + if err := f.State.IsAny(FlowStateConsentInitialized, FlowStateConsentUnused, FlowStateConsentError); err != nil { + return err } - if r.Error != nil { - f.State = FlowStateConsentError - } else if r.WasHandled { - f.State = FlowStateConsentUsed - } else { - f.State = FlowStateConsentUnused - } + f.State = FlowStateConsentUnused f.GrantedScope = r.GrantedScope f.GrantedAudience = r.GrantedAudience f.ConsentRemember = r.Remember f.ConsentRememberFor = &r.RememberFor - f.ConsentHandledAt = r.HandledAt - f.ConsentWasHandled = r.WasHandled - f.ConsentError = r.Error + f.ConsentHandledAt = sqlxx.NullTime(time.Now().UTC()) + f.ConsentError = nil + if r.Context != nil { + f.Context = r.Context + } if r.Session != nil { f.SessionIDToken = r.Session.IDToken @@ -389,68 +408,66 @@ func (f *Flow) HandleConsentRequest(r *consent.AcceptOAuth2ConsentRequest) error return nil } -func (f *Flow) InvalidateConsentRequest() error { - if f.ConsentWasHandled { - return errors.New("consent verifier has already been used") - } - if f.State != FlowStateConsentUnused && f.State != FlowStateConsentError { - return errors.Errorf("unexpected flow state: expected %d or %d, got %d", FlowStateConsentUnused, FlowStateConsentError, f.State) +func (f *Flow) HandleConsentError(er *RequestDeniedError) error { + if err := f.State.IsAny(FlowStateConsentInitialized, FlowStateConsentUnused, FlowStateConsentError); err != nil { + return err } - f.ConsentWasHandled = true - f.State = FlowStateConsentUsed + f.State = FlowStateConsentError + + f.ConsentError = er + f.ConsentHandledAt = sqlxx.NullTime(time.Now().UTC()) + + // force-reset values + f.GrantedScope = nil + f.GrantedAudience = nil + f.ConsentRemember = false + f.ConsentRememberFor = nil + return nil } -func (f *Flow) GetConsentRequest() *consent.OAuth2ConsentRequest { - return &consent.OAuth2ConsentRequest{ - ID: f.ConsentChallengeID.String(), - RequestedScope: f.RequestedScope, - RequestedAudience: f.RequestedAudience, - Skip: f.ConsentSkip, - Subject: f.Subject, - OpenIDConnectContext: f.OpenIDConnectContext, - Client: f.Client, - ClientID: f.ClientID, - RequestURL: f.RequestURL, - LoginChallenge: sqlxx.NullString(f.ID), - LoginSessionID: f.SessionID, - ACR: f.ACR, - AMR: f.AMR, - Context: f.Context, - WasHandled: f.ConsentWasHandled, - ForceSubjectIdentifier: f.ForceSubjectIdentifier, - Verifier: f.ConsentVerifier.String(), - CSRF: f.ConsentCSRF.String(), - AuthenticatedAt: f.LoginAuthenticatedAt, - RequestedAt: f.RequestedAt, +func (f *Flow) InvalidateConsentRequest() error { + if err := f.State.IsAny(FlowStateConsentUnused, FlowStateConsentError); err != nil { + return err + } + + if f.State == FlowStateConsentUnused { + f.State = FlowStateConsentUsed + } else { + // FlowStateConsentError is already a terminal state, so we don't need to do anything here. } + return nil } -func (f *Flow) GetHandledConsentRequest() *consent.AcceptOAuth2ConsentRequest { - crf := 0 - if f.ConsentRememberFor != nil { - crf = *f.ConsentRememberFor +func (f *Flow) GetConsentRequest(challenge string) *OAuth2ConsentRequest { + cs := OAuth2ConsentRequest{ + Challenge: challenge, + ConsentRequestID: f.ConsentRequestID.String(), + RequestedScope: f.RequestedScope, + RequestedAudience: f.RequestedAudience, + Skip: f.ConsentSkip, + Subject: f.Subject, + OpenIDConnectContext: f.OpenIDConnectContext, + Client: f.Client, + RequestURL: f.RequestURL, + LoginChallenge: sqlxx.NullString(f.ID), + LoginSessionID: f.SessionID, + ACR: f.ACR, + AMR: f.AMR, + Context: f.Context, + } + // set some defaults for the API + if cs.RequestedAudience == nil { + cs.RequestedAudience = []string{} } - return &consent.AcceptOAuth2ConsentRequest{ - ID: f.ConsentChallengeID.String(), - GrantedScope: f.GrantedScope, - GrantedAudience: f.GrantedAudience, - Session: &consent.AcceptOAuth2ConsentRequestSession{AccessToken: f.SessionAccessToken, IDToken: f.SessionIDToken}, - Remember: f.ConsentRemember, - RememberFor: crf, - HandledAt: f.ConsentHandledAt, - WasHandled: f.ConsentWasHandled, - ConsentRequest: f.GetConsentRequest(), - Error: f.ConsentError, - RequestedAt: f.RequestedAt, - AuthenticatedAt: f.LoginAuthenticatedAt, - SessionIDToken: f.SessionIDToken, - SessionAccessToken: f.SessionAccessToken, + if cs.AMR == nil { + cs.AMR = []string{} } + return &cs } -func (_ Flow) TableName() string { +func (Flow) TableName() string { return "hydra_oauth2_flow" } @@ -464,15 +481,18 @@ func (f *Flow) BeforeSave(_ *pop.Connection) error { return nil } -// TODO Populate the client field in FindInDB and FindByConsentChallengeID in -// order to avoid accessing the database twice. func (f *Flow) AfterFind(c *pop.Connection) error { + // TODO Populate the client field in FindInDB and FindByConsentChallengeID in + // order to avoid accessing the database twice. + if f.ClientID == "" { + return nil + } f.AfterSave(c) f.Client = &client.Client{} return sqlcon.HandleError(c.Where("id = ? AND nid = ?", f.ClientID, f.NID).First(f.Client)) } -func (f *Flow) AfterSave(c *pop.Connection) { +func (f *Flow) AfterSave(_ *pop.Connection) { if f.SessionAccessToken == nil { f.SessionAccessToken = make(map[string]interface{}) } @@ -480,3 +500,69 @@ func (f *Flow) AfterSave(c *pop.Connection) { f.SessionIDToken = make(map[string]interface{}) } } + +type CipherProvider interface { + FlowCipher() *aead.XChaCha20Poly1305 +} + +// ToDeviceChallenge converts the flow into a device challenge. +func (f *Flow) ToDeviceChallenge(ctx context.Context, cipherProvider CipherProvider) (string, error) { + return Encode(ctx, cipherProvider.FlowCipher(), f, AsDeviceChallenge) +} + +// ToDeviceVerifier converts the flow into a device verifier. +func (f *Flow) ToDeviceVerifier(ctx context.Context, cipherProvider CipherProvider) (string, error) { + return Encode(ctx, cipherProvider.FlowCipher(), f, AsDeviceVerifier) +} + +// ToLoginChallenge converts the flow into a login challenge. +func (f Flow) ToLoginChallenge(ctx context.Context, cipherProvider CipherProvider) (challenge string, err error) { + if f.Client != nil { + f.ClientID = f.Client.GetID() + } + return Encode(ctx, cipherProvider.FlowCipher(), f, AsLoginChallenge) +} + +// ToLoginVerifier converts the flow into a login verifier. +func (f Flow) ToLoginVerifier(ctx context.Context, cipherProvider CipherProvider) (verifier string, err error) { + if f.Client != nil { + f.ClientID = f.Client.GetID() + } + return Encode(ctx, cipherProvider.FlowCipher(), f, AsLoginVerifier) +} + +// ToConsentChallenge converts the flow into a consent challenge. +func (f Flow) ToConsentChallenge(ctx context.Context, cipherProvider CipherProvider) (challenge string, err error) { + if f.Client != nil { + f.ClientID = f.Client.GetID() + } + return Encode(ctx, cipherProvider.FlowCipher(), f, AsConsentChallenge) +} + +// ToConsentVerifier converts the flow into a consent verifier. +func (f Flow) ToConsentVerifier(ctx context.Context, cipherProvider CipherProvider) (verifier string, err error) { + if f.Client != nil { + f.ClientID = f.Client.GetID() + } + return Encode(ctx, cipherProvider.FlowCipher(), f, AsConsentVerifier) +} + +func (f Flow) ToListConsentSessionResponse() *OAuth2ConsentSession { + s := &OAuth2ConsentSession{ + ConsentRequestID: f.ConsentRequestID.String(), + GrantedScope: f.GrantedScope, + GrantedAudience: f.GrantedAudience, + RememberFor: pointerx.Deref(f.ConsentRememberFor), + Session: &AcceptOAuth2ConsentRequestSession{AccessToken: f.SessionAccessToken, IDToken: f.SessionIDToken}, + Remember: f.ConsentRemember, + HandledAt: f.ConsentHandledAt, + Context: f.Context, + ConsentRequest: f.GetConsentRequest( /* No longer available and no longer needed: challenge = */ ""), + } + s.ConsentRequest.Client.Secret = "" // do not leak client secret in response + // set some defaults for the API + if s.GrantedAudience == nil { + s.GrantedAudience = []string{} + } + return s +} diff --git a/flow/flow_encoding.go b/flow/flow_encoding.go new file mode 100644 index 00000000000..de64e0a9d05 --- /dev/null +++ b/flow/flow_encoding.go @@ -0,0 +1,121 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package flow + +import ( + "context" + "time" + + "github.com/pkg/errors" + + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/otelx" +) + +type decodeDependencies interface { + CipherProvider + x.NetworkProvider + config.Provider + x.TracingProvider +} + +func decodeFlow(ctx context.Context, d decodeDependencies, enc string, p purpose) (_ *Flow, err error) { + f, err := Decode[Flow](ctx, d.FlowCipher(), enc, withPurpose(p)) + if err != nil { + return nil, errors.WithStack(x.ErrNotFound.WithWrap(err)) + } + + if f.NID != d.Networker().NetworkID(ctx) { + return nil, errors.WithStack(x.ErrNotFound.WithDescription("Network IDs are not matching.")) + } + + if f.RequestedAt.Add(d.Config().ConsentRequestMaxAge(ctx)).Before(time.Now()) { + return nil, errors.WithStack(fosite.ErrRequestUnauthorized.WithHintf("The %s request has expired, please try again.", p.RequestType())) + } + + return f, nil +} + +func DecodeFromLoginChallenge(ctx context.Context, d decodeDependencies, challenge string) (_ *Flow, err error) { + ctx, span := d.Tracer(ctx).Tracer().Start(ctx, "flow.DecodeFromLoginChallenge") + defer otelx.End(span, &err) + + return decodeFlow(ctx, d, challenge, loginChallenge) +} + +func DecodeFromConsentChallenge(ctx context.Context, d decodeDependencies, challenge string) (_ *Flow, err error) { + ctx, span := d.Tracer(ctx).Tracer().Start(ctx, "flow.DecodeFromConsentChallenge") + defer otelx.End(span, &err) + + return decodeFlow(ctx, d, challenge, consentChallenge) +} + +func DecodeFromDeviceChallenge(ctx context.Context, d decodeDependencies, challenge string) (_ *Flow, err error) { + ctx, span := d.Tracer(ctx).Tracer().Start(ctx, "flow.DecodeFromDeviceChallenge") + defer otelx.End(span, &err) + + return decodeFlow(ctx, d, challenge, deviceChallenge) +} + +func decodeVerifier(ctx context.Context, d decodeDependencies, verifier string, p purpose) (_ *Flow, err error) { + f, err := decodeFlow(ctx, d, verifier, p) + if err != nil { + if errors.Is(err, x.ErrNotFound) { + return nil, errors.WithStack(fosite.ErrAccessDenied.WithHintf("The %s verifier has already been used, has not been granted, or is invalid.", p.RequestType())) + } + return nil, err + } + + return f, nil +} + +func DecodeAndInvalidateLoginVerifier(ctx context.Context, d decodeDependencies, verifier string) (_ *Flow, err error) { + ctx, span := d.Tracer(ctx).Tracer().Start(ctx, "flow.DecodeAndInvalidateLoginVerifier") + defer otelx.End(span, &err) + + f, err := decodeVerifier(ctx, d, verifier, loginVerifier) + if err != nil { + return nil, err + } + + if err := f.InvalidateLoginRequest(); err != nil { + return nil, errors.WithStack(fosite.ErrInvalidRequest.WithDebug(err.Error())) + } + + return f, nil +} + +func DecodeAndInvalidateDeviceVerifier(ctx context.Context, d decodeDependencies, verifier string) (_ *Flow, err error) { + ctx, span := d.Tracer(ctx).Tracer().Start(ctx, "flow.DecodeAndInvalidateDeviceVerifier") + defer otelx.End(span, &err) + + f, err := decodeVerifier(ctx, d, verifier, deviceVerifier) + if err != nil { + return nil, err + } + + if err = f.InvalidateDeviceRequest(); err != nil { + return nil, errors.WithStack(fosite.ErrInvalidRequest.WithDebug(err.Error())) + } + + return f, nil +} + +func DecodeAndInvalidateConsentVerifier(ctx context.Context, d decodeDependencies, verifier string) (_ *Flow, err error) { + ctx, span := d.Tracer(ctx).Tracer().Start(ctx, "flow.DecodeAndInvalidateLoginVerifier") + defer otelx.End(span, &err) + + f, err := decodeVerifier(ctx, d, verifier, consentVerifier) + if err != nil { + return nil, err + } + + if err = f.InvalidateConsentRequest(); err != nil { + return nil, errors.WithStack(fosite.ErrInvalidRequest.WithDebug(err.Error())) + } + + return f, nil +} diff --git a/flow/flow_encoding_test.go b/flow/flow_encoding_test.go new file mode 100644 index 00000000000..74b1873ffb7 --- /dev/null +++ b/flow/flow_encoding_test.go @@ -0,0 +1,661 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package flow_test + +import ( + "context" + "embed" + "errors" + "fmt" + "io/fs" + "os" + "strings" + "testing" + "time" + + "github.com/gofrs/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/flow" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/configx" + "github.com/ory/x/contextx" + "github.com/ory/x/pointerx" + "github.com/ory/x/servicelocatorx" + "github.com/ory/x/snapshotx" + "github.com/ory/x/sqlxx" +) + +func createTestFlow(nid uuid.UUID, state flow.State) *flow.Flow { + return &flow.Flow{ + ID: "a12bf95e-ccfc-45fc-b10d-1358790772c7", + NID: nid, + RequestedScope: []string{"openid", "profile"}, + RequestedAudience: []string{"https://api.example.org"}, + LoginSkip: true, + Subject: "test-subject", + OpenIDConnectContext: &flow.OAuth2ConsentRequestOpenIDConnectContext{ + ACRValues: []string{"http://acrvalues.example.org"}, + UILocales: []string{"en-US", "en-GB"}, + Display: "page", + IDTokenHintClaims: map[string]interface{}{"email": "user@example.org"}, + LoginHint: "login-hint", + }, + Client: &client.Client{ + ID: "a12bf95e-ccfc-45fc-b10d-1358790772c7", + NID: nid, + }, + ClientID: "a12bf95e-ccfc-45fc-b10d-1358790772c7", + RequestURL: "https://example.org/oauth2/auth?client_id=test", + SessionID: "session-123", + IdentityProviderSessionID: "session-id", + LoginCSRF: "login-csrf", + RequestedAt: time.Now(), + State: state, + LoginRemember: true, + LoginRememberFor: 3000, + LoginExtendSessionLifespan: true, + ACR: "http://acrvalues.example.org", + AMR: []string{"pwd"}, + ForceSubjectIdentifier: "forced-subject", + Context: sqlxx.JSONRawMessage(`{"foo":"bar"}`), + LoginAuthenticatedAt: sqlxx.NullTime(time.Date(2025, 10, 9, 12, 52, 0, 0, time.UTC)), + DeviceChallengeID: "device-challenge", + DeviceCodeRequestID: "device-code-request", + DeviceCSRF: "device-csrf", + DeviceHandledAt: sqlxx.NullTime{}, + ConsentRequestID: "consent-request", + ConsentSkip: true, + ConsentCSRF: "consent-csrf", + GrantedScope: []string{"openid"}, + GrantedAudience: []string{"https://api.example.org"}, + ConsentRemember: true, + ConsentRememberFor: pointerx.Ptr(3000), + ConsentHandledAt: sqlxx.NullTime{}, + SessionIDToken: map[string]interface{}{"sub": "test-subject", "foo": "bar"}, + SessionAccessToken: map[string]interface{}{"scp": []string{"openid", "profile"}, "aud": []string{"https://api.example.org"}}, + } +} + +func TestDecodeFromLoginChallenge(t *testing.T) { + ctx := t.Context() + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions( + configx.WithValue(config.KeyConsentRequestMaxAge, time.Hour), + )) + + nid := reg.Networker().NetworkID(ctx) + testFlow := createTestFlow(nid, flow.FlowStateLoginUnused) + + t.Run("case=successful decode with valid login challenge", func(t *testing.T) { + loginChallenge, err := testFlow.ToLoginChallenge(ctx, reg) + require.NoError(t, err) + require.NotEmpty(t, loginChallenge) + + decoded, err := flow.DecodeFromLoginChallenge(ctx, reg, loginChallenge) + require.NoError(t, err) + require.NotNil(t, decoded) + + assert.Equal(t, testFlow.ID, decoded.ID) + assert.Equal(t, testFlow.NID, decoded.NID) + assert.Equal(t, testFlow.RequestedScope, decoded.RequestedScope) + assert.Equal(t, testFlow.Subject, decoded.Subject) + + snapshotx.SnapshotT(t, decoded, snapshotx.ExceptPaths("n", "ia")) + + t.Run("decodes deterministically", func(t *testing.T) { + second, err := flow.DecodeFromLoginChallenge(ctx, reg, loginChallenge) + require.NoError(t, err) + assert.Equal(t, decoded, second) + }) + }) + + t.Run("case=fails with wrong purpose (consent challenge instead of login)", func(t *testing.T) { + consentChallenge, err := testFlow.ToConsentChallenge(ctx, reg) + require.NoError(t, err) + require.NotEmpty(t, consentChallenge) + + decoded, err := flow.DecodeFromLoginChallenge(ctx, reg, consentChallenge) + assert.Error(t, err) + assert.Nil(t, decoded) + assert.ErrorIs(t, err, x.ErrNotFound) + }) + + t.Run("case=fails with different network ID", func(t *testing.T) { + flowWithDifferentNID := createTestFlow(uuid.Must(uuid.NewV4()), flow.FlowStateLoginUnused) + + loginChallenge, err := flow.Encode(ctx, reg.FlowCipher(), flowWithDifferentNID, flow.AsLoginChallenge) + require.NoError(t, err) + require.NotEmpty(t, loginChallenge) + + _, err = flow.DecodeFromLoginChallenge(ctx, reg, loginChallenge) + assert.ErrorIs(t, err, x.ErrNotFound) + }) + + t.Run("case=fails with expired request", func(t *testing.T) { + expiredFlow := createTestFlow(nid, flow.FlowStateLoginUnused) + expiredFlow.RequestedAt = time.Now().Add(-2 * time.Hour) + + loginChallenge, err := expiredFlow.ToLoginChallenge(ctx, reg) + require.NoError(t, err) + require.NotEmpty(t, loginChallenge) + + _, err = flow.DecodeFromLoginChallenge(ctx, reg, loginChallenge) + assert.ErrorIs(t, err, fosite.ErrRequestUnauthorized) + }) + + t.Run("case=fails with invalid challenge format", func(t *testing.T) { + _, err := flow.DecodeFromLoginChallenge(ctx, reg, "invalid-challenge") + assert.ErrorIs(t, err, x.ErrNotFound) + }) + + t.Run("case=fails with empty challenge", func(t *testing.T) { + _, err := flow.DecodeFromLoginChallenge(ctx, reg, "") + assert.ErrorIs(t, err, x.ErrNotFound) + }) +} + +func TestDecodeFromConsentChallenge(t *testing.T) { + ctx := t.Context() + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions( + configx.WithValue(config.KeyConsentRequestMaxAge, time.Hour), + )) + + nid := reg.Networker().NetworkID(ctx) + testFlow := createTestFlow(nid, flow.FlowStateConsentUnused) + + t.Run("case=successful decode with valid consent challenge", func(t *testing.T) { + consentChallenge, err := testFlow.ToConsentChallenge(ctx, reg) + require.NoError(t, err) + require.NotEmpty(t, consentChallenge) + + decoded, err := flow.DecodeFromConsentChallenge(ctx, reg, consentChallenge) + require.NoError(t, err) + require.NotNil(t, decoded) + + assert.Equal(t, testFlow.ID, decoded.ID) + assert.Equal(t, testFlow.NID, decoded.NID) + assert.Equal(t, testFlow.RequestedScope, decoded.RequestedScope) + assert.Equal(t, testFlow.Subject, decoded.Subject) + + snapshotx.SnapshotT(t, decoded, snapshotx.ExceptPaths("n", "ia")) + + t.Run("decodes deterministically", func(t *testing.T) { + second, err := flow.DecodeFromConsentChallenge(ctx, reg, consentChallenge) + require.NoError(t, err) + assert.Equal(t, decoded, second) + }) + }) + + t.Run("case=fails with wrong purpose (login challenge instead of consent)", func(t *testing.T) { + loginChallenge, err := testFlow.ToLoginChallenge(ctx, reg) + require.NoError(t, err) + require.NotEmpty(t, loginChallenge) + + decoded, err := flow.DecodeFromConsentChallenge(ctx, reg, loginChallenge) + assert.Error(t, err) + assert.Nil(t, decoded) + assert.ErrorIs(t, err, x.ErrNotFound) + }) + + t.Run("case=fails with different network ID", func(t *testing.T) { + flowWithDifferentNID := createTestFlow(uuid.Must(uuid.NewV4()), flow.FlowStateConsentUnused) + + consentChallenge, err := flow.Encode(ctx, reg.FlowCipher(), flowWithDifferentNID, flow.AsConsentChallenge) + require.NoError(t, err) + require.NotEmpty(t, consentChallenge) + + _, err = flow.DecodeFromConsentChallenge(ctx, reg, consentChallenge) + assert.ErrorIs(t, err, x.ErrNotFound) + }) + + t.Run("case=fails with expired request", func(t *testing.T) { + expiredFlow := createTestFlow(nid, flow.FlowStateConsentUnused) + expiredFlow.RequestedAt = time.Now().Add(-2 * time.Hour) + + consentChallenge, err := expiredFlow.ToConsentChallenge(ctx, reg) + require.NoError(t, err) + require.NotEmpty(t, consentChallenge) + + _, err = flow.DecodeFromConsentChallenge(ctx, reg, consentChallenge) + assert.ErrorIs(t, err, fosite.ErrRequestUnauthorized) + }) + + t.Run("case=fails with invalid challenge format", func(t *testing.T) { + _, err := flow.DecodeFromConsentChallenge(ctx, reg, "invalid-challenge") + assert.ErrorIs(t, err, x.ErrNotFound) + }) + + t.Run("case=fails with empty challenge", func(t *testing.T) { + _, err := flow.DecodeFromConsentChallenge(ctx, reg, "") + assert.ErrorIs(t, err, x.ErrNotFound) + }) +} + +func TestDecodeAndInvalidateLoginVerifier(t *testing.T) { + ctx := t.Context() + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions( + configx.WithValue(config.KeyConsentRequestMaxAge, time.Hour), + )) + + nid := reg.Networker().NetworkID(ctx) + + t.Run("case=successful decode and invalidate with valid login verifier", func(t *testing.T) { + testFlow := createTestFlow(nid, flow.FlowStateLoginUnused) + + loginVerifier, err := testFlow.ToLoginVerifier(ctx, reg) + require.NoError(t, err) + require.NotEmpty(t, loginVerifier) + + decoded, err := flow.DecodeAndInvalidateLoginVerifier(ctx, reg, loginVerifier) + require.NoError(t, err) + + // Verify that InvalidateLoginRequest was called + assert.Equal(t, flow.FlowStateLoginUsed, decoded.State, "State should be FlowStateLoginUsed after invalidation") + + snapshotx.SnapshotT(t, decoded, snapshotx.ExceptPaths("n", "ia")) + }) + + t.Run("case=fails when flow has already been used", func(t *testing.T) { + testFlow := createTestFlow(nid, flow.FlowStateLoginUsed) + + loginVerifier, err := testFlow.ToLoginVerifier(ctx, reg) + require.NoError(t, err) + + _, err = flow.DecodeAndInvalidateLoginVerifier(ctx, reg, loginVerifier) + assert.ErrorIs(t, err, fosite.ErrInvalidRequest) + }) + + t.Run("case=fails with invalid flow state", func(t *testing.T) { + testFlow := createTestFlow(nid, flow.FlowStateConsentUnused) + + loginVerifier, err := testFlow.ToLoginVerifier(ctx, reg) + require.NoError(t, err) + + _, err = flow.DecodeAndInvalidateLoginVerifier(ctx, reg, loginVerifier) + assert.ErrorIs(t, err, fosite.ErrInvalidRequest) + }) + + t.Run("case=fails with wrong purpose (login challenge instead of verifier)", func(t *testing.T) { + testFlow := createTestFlow(nid, flow.FlowStateLoginUnused) + + loginChallenge, err := testFlow.ToLoginChallenge(ctx, reg) + require.NoError(t, err) + require.NotEmpty(t, loginChallenge) + + _, err = flow.DecodeAndInvalidateLoginVerifier(ctx, reg, loginChallenge) + assert.ErrorIs(t, err, fosite.ErrAccessDenied) + }) + + t.Run("case=fails with different network ID", func(t *testing.T) { + differentNID := uuid.Must(uuid.NewV4()) + flowWithDifferentNID := createTestFlow(differentNID, flow.FlowStateLoginUnused) + + loginVerifier, err := flow.Encode(ctx, reg.FlowCipher(), flowWithDifferentNID, flow.AsLoginVerifier) + require.NoError(t, err) + require.NotEmpty(t, loginVerifier) + + _, err = flow.DecodeAndInvalidateLoginVerifier(ctx, reg, loginVerifier) + assert.ErrorIs(t, err, fosite.ErrAccessDenied) + }) + + t.Run("case=fails with invalid verifier format", func(t *testing.T) { + _, err := flow.DecodeAndInvalidateLoginVerifier(ctx, reg, "invalid-verifier") + assert.ErrorIs(t, err, fosite.ErrAccessDenied) + }) + + t.Run("case=fails with empty verifier", func(t *testing.T) { + _, err := flow.DecodeAndInvalidateLoginVerifier(ctx, reg, "") + assert.ErrorIs(t, err, fosite.ErrAccessDenied) + }) + + t.Run("case=works with FlowStateLoginError", func(t *testing.T) { + testFlow := createTestFlow(nid, flow.FlowStateLoginError) + + loginVerifier, err := testFlow.ToLoginVerifier(ctx, reg) + require.NoError(t, err) + require.NotEmpty(t, loginVerifier) + + decoded, err := flow.DecodeAndInvalidateLoginVerifier(ctx, reg, loginVerifier) + require.NoError(t, err) + require.NotNil(t, decoded) + + assert.Equal(t, flow.FlowStateLoginError, decoded.State) + }) +} + +func TestDecodeFromDeviceChallenge(t *testing.T) { + ctx := t.Context() + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions( + configx.WithValue(config.KeyConsentRequestMaxAge, time.Hour), + )) + + nid := reg.Networker().NetworkID(ctx) + testFlow := createTestFlow(nid, flow.DeviceFlowStateUnused) + + t.Run("case=successful decode with valid device challenge", func(t *testing.T) { + deviceChallenge, err := testFlow.ToDeviceChallenge(ctx, reg) + require.NoError(t, err) + require.NotEmpty(t, deviceChallenge) + + decoded, err := flow.DecodeFromDeviceChallenge(ctx, reg, deviceChallenge) + require.NoError(t, err) + require.NotNil(t, decoded) + + assert.Equal(t, testFlow.ID, decoded.ID) + assert.Equal(t, testFlow.NID, decoded.NID) + assert.Equal(t, testFlow.RequestedScope, decoded.RequestedScope) + assert.Equal(t, testFlow.Subject, decoded.Subject) + + snapshotx.SnapshotT(t, decoded, snapshotx.ExceptPaths("n", "ia")) + + t.Run("decodes deterministically", func(t *testing.T) { + second, err := flow.DecodeFromDeviceChallenge(ctx, reg, deviceChallenge) + require.NoError(t, err) + assert.Equal(t, decoded, second) + }) + }) + + t.Run("case=fails with wrong purpose (login challenge instead of device)", func(t *testing.T) { + loginChallenge, err := testFlow.ToLoginChallenge(ctx, reg) + require.NoError(t, err) + require.NotEmpty(t, loginChallenge) + + decoded, err := flow.DecodeFromDeviceChallenge(ctx, reg, loginChallenge) + assert.Error(t, err) + assert.Nil(t, decoded) + assert.ErrorIs(t, err, x.ErrNotFound) + }) + + t.Run("case=fails with different network ID", func(t *testing.T) { + flowWithDifferentNID := createTestFlow(uuid.Must(uuid.NewV4()), flow.DeviceFlowStateUnused) + + deviceChallenge, err := flow.Encode(ctx, reg.FlowCipher(), flowWithDifferentNID, flow.AsDeviceChallenge) + require.NoError(t, err) + require.NotEmpty(t, deviceChallenge) + + _, err = flow.DecodeFromDeviceChallenge(ctx, reg, deviceChallenge) + assert.ErrorIs(t, err, x.ErrNotFound) + }) + + t.Run("case=fails with expired request", func(t *testing.T) { + expiredFlow := createTestFlow(nid, flow.DeviceFlowStateUnused) + expiredFlow.RequestedAt = time.Now().Add(-2 * time.Hour) + + deviceChallenge, err := expiredFlow.ToDeviceChallenge(ctx, reg) + require.NoError(t, err) + require.NotEmpty(t, deviceChallenge) + + _, err = flow.DecodeFromDeviceChallenge(ctx, reg, deviceChallenge) + assert.ErrorIs(t, err, fosite.ErrRequestUnauthorized) + }) + + t.Run("case=fails with invalid challenge format", func(t *testing.T) { + _, err := flow.DecodeFromDeviceChallenge(ctx, reg, "invalid-challenge") + assert.ErrorIs(t, err, x.ErrNotFound) + }) + + t.Run("case=fails with empty challenge", func(t *testing.T) { + _, err := flow.DecodeFromDeviceChallenge(ctx, reg, "") + assert.ErrorIs(t, err, x.ErrNotFound) + }) +} + +func TestDecodeAndInvalidateDeviceVerifier(t *testing.T) { + ctx := context.Background() + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions( + configx.WithValue(config.KeyConsentRequestMaxAge, time.Hour), + )) + + nid := reg.Networker().NetworkID(ctx) + + t.Run("case=successful decode and invalidate with valid device verifier", func(t *testing.T) { + testFlow := createTestFlow(nid, flow.DeviceFlowStateUnused) + + deviceVerifier, err := testFlow.ToDeviceVerifier(ctx, reg) + require.NoError(t, err) + require.NotEmpty(t, deviceVerifier) + + decoded, err := flow.DecodeAndInvalidateDeviceVerifier(ctx, reg, deviceVerifier) + require.NoError(t, err) + require.NotNil(t, decoded) + + assert.Equal(t, flow.DeviceFlowStateUsed, decoded.State, "State should be DeviceFlowStateUsed after invalidation") + + snapshotx.SnapshotT(t, decoded, snapshotx.ExceptPaths("n", "ia")) + }) + + t.Run("case=fails when flow has already been used", func(t *testing.T) { + testFlow := createTestFlow(nid, flow.DeviceFlowStateUsed) + + deviceVerifier, err := testFlow.ToDeviceVerifier(ctx, reg) + require.NoError(t, err) + + _, err = flow.DecodeAndInvalidateDeviceVerifier(ctx, reg, deviceVerifier) + assert.ErrorIs(t, err, fosite.ErrInvalidRequest) + }) + + t.Run("case=fails with invalid flow state", func(t *testing.T) { + testFlow := createTestFlow(nid, flow.FlowStateLoginUnused) + + deviceVerifier, err := testFlow.ToDeviceVerifier(ctx, reg) + require.NoError(t, err) + + _, err = flow.DecodeAndInvalidateDeviceVerifier(ctx, reg, deviceVerifier) + assert.ErrorIs(t, err, fosite.ErrInvalidRequest) + }) + + t.Run("case=fails with wrong purpose (device challenge instead of verifier)", func(t *testing.T) { + testFlow := createTestFlow(nid, flow.DeviceFlowStateUnused) + + deviceChallenge, err := testFlow.ToDeviceChallenge(ctx, reg) + require.NoError(t, err) + require.NotEmpty(t, deviceChallenge) + + _, err = flow.DecodeAndInvalidateDeviceVerifier(ctx, reg, deviceChallenge) + assert.ErrorIs(t, err, fosite.ErrAccessDenied) + }) + + t.Run("case=fails with different network ID", func(t *testing.T) { + differentNID := uuid.Must(uuid.NewV4()) + flowWithDifferentNID := createTestFlow(differentNID, flow.DeviceFlowStateUnused) + + deviceVerifier, err := flow.Encode(ctx, reg.FlowCipher(), flowWithDifferentNID, flow.AsDeviceVerifier) + require.NoError(t, err) + require.NotEmpty(t, deviceVerifier) + + _, err = flow.DecodeAndInvalidateDeviceVerifier(ctx, reg, deviceVerifier) + assert.ErrorIs(t, err, fosite.ErrAccessDenied) + }) + + t.Run("case=fails with invalid verifier format", func(t *testing.T) { + _, err := flow.DecodeAndInvalidateDeviceVerifier(ctx, reg, "invalid-verifier") + assert.ErrorIs(t, err, fosite.ErrAccessDenied) + }) + + t.Run("case=fails with empty verifier", func(t *testing.T) { + _, err := flow.DecodeAndInvalidateDeviceVerifier(ctx, reg, "") + assert.ErrorIs(t, err, fosite.ErrAccessDenied) + }) +} + +func TestDecodeAndInvalidateConsentVerifier(t *testing.T) { + ctx := t.Context() + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions( + configx.WithValue(config.KeyConsentRequestMaxAge, time.Hour), + )) + + nid := reg.Networker().NetworkID(ctx) + + t.Run("case=successful decode and invalidate with valid consent verifier", func(t *testing.T) { + testFlow := createTestFlow(nid, flow.FlowStateConsentUnused) + + consentVerifier, err := testFlow.ToConsentVerifier(ctx, reg) + require.NoError(t, err) + require.NotEmpty(t, consentVerifier) + + decoded, err := flow.DecodeAndInvalidateConsentVerifier(ctx, reg, consentVerifier) + require.NoError(t, err) + + // Verify that InvalidateConsentRequest was called + assert.Equal(t, flow.FlowStateConsentUsed, decoded.State, "State should be FlowStateConsentUsed after invalidation") + + snapshotx.SnapshotT(t, decoded, snapshotx.ExceptPaths("n", "ia")) + }) + + t.Run("case=fails when flow has already been used", func(t *testing.T) { + testFlow := createTestFlow(nid, flow.FlowStateConsentUsed) + + consentVerifier, err := testFlow.ToConsentVerifier(ctx, reg) + require.NoError(t, err) + + _, err = flow.DecodeAndInvalidateConsentVerifier(ctx, reg, consentVerifier) + assert.ErrorIs(t, err, fosite.ErrInvalidRequest) + }) + + t.Run("case=fails with invalid flow state", func(t *testing.T) { + testFlow := createTestFlow(nid, flow.FlowStateLoginUnused) + + consentVerifier, err := testFlow.ToConsentVerifier(ctx, reg) + require.NoError(t, err) + + _, err = flow.DecodeAndInvalidateConsentVerifier(ctx, reg, consentVerifier) + assert.ErrorIs(t, err, fosite.ErrInvalidRequest) + }) + + t.Run("case=fails with wrong purpose (consent challenge instead of verifier)", func(t *testing.T) { + testFlow := createTestFlow(nid, flow.FlowStateConsentUnused) + + consentChallenge, err := testFlow.ToConsentChallenge(ctx, reg) + require.NoError(t, err) + require.NotEmpty(t, consentChallenge) + + _, err = flow.DecodeAndInvalidateConsentVerifier(ctx, reg, consentChallenge) + assert.ErrorIs(t, err, fosite.ErrAccessDenied) + }) + + t.Run("case=fails with different network ID", func(t *testing.T) { + differentNID := uuid.Must(uuid.NewV4()) + flowWithDifferentNID := createTestFlow(differentNID, flow.FlowStateConsentUnused) + + consentVerifier, err := flow.Encode(ctx, reg.FlowCipher(), flowWithDifferentNID, flow.AsConsentVerifier) + require.NoError(t, err) + require.NotEmpty(t, consentVerifier) + + _, err = flow.DecodeAndInvalidateConsentVerifier(ctx, reg, consentVerifier) + assert.ErrorIs(t, err, fosite.ErrAccessDenied) + }) + + t.Run("case=fails with invalid verifier format", func(t *testing.T) { + _, err := flow.DecodeAndInvalidateConsentVerifier(ctx, reg, "invalid-verifier") + assert.ErrorIs(t, err, fosite.ErrAccessDenied) + }) + + t.Run("case=fails with empty verifier", func(t *testing.T) { + _, err := flow.DecodeAndInvalidateConsentVerifier(ctx, reg, "") + assert.ErrorIs(t, err, fosite.ErrAccessDenied) + }) + + t.Run("case=works with FlowStateConsentError", func(t *testing.T) { + testFlow := createTestFlow(nid, flow.FlowStateConsentError) + + consentVerifier, err := testFlow.ToConsentVerifier(ctx, reg) + require.NoError(t, err) + require.NotEmpty(t, consentVerifier) + + decoded, err := flow.DecodeAndInvalidateConsentVerifier(ctx, reg, consentVerifier) + require.NoError(t, err) + require.NotNil(t, decoded) + + assert.Equal(t, flow.FlowStateConsentError, decoded.State) + }) +} + +var ( + //go:embed fixtures/legacy_challenges/*.txt + LegacyChallenges embed.FS + legacyChallengesNID = uuid.Must(uuid.FromString("34b4dd42-f02b-4448-b066-8e4e6655c0bb")) +) + +func TestCanUseLegacyChallenges(t *testing.T) { + reg := testhelpers.NewRegistryMemory(t, + driver.WithConfigOptions( + configx.WithValue(config.KeyGetSystemSecret, []string{"well-known-fixture-secret"}), + configx.WithValue(config.KeyConsentRequestMaxAge, 100*365*24*time.Hour), // 100 years, effectively disabling expiration + ), + driver.WithServiceLocatorOptions(servicelocatorx.WithContextualizer(&contextx.Static{NID: legacyChallengesNID})), + ) + + require.NoError(t, fs.WalkDir(LegacyChallenges, "fixtures/legacy_challenges", func(path string, d fs.DirEntry, err error) error { + require.NoError(t, err) + if d.IsDir() { + return nil + } + t.Run(strings.TrimSuffix(d.Name(), ".txt"), func(t *testing.T) { + content, err := fs.ReadFile(LegacyChallenges, path) + require.NoError(t, err) + + var f *flow.Flow + switch { + case strings.Contains(d.Name(), "login"): + f, err = flow.DecodeFromLoginChallenge(t.Context(), reg, string(content)) + case strings.Contains(d.Name(), "consent"): + f, err = flow.DecodeFromConsentChallenge(t.Context(), reg, string(content)) + case strings.Contains(d.Name(), "device"): + f, err = flow.DecodeFromDeviceChallenge(t.Context(), reg, string(content)) + default: + t.Fatalf("unknown challenge type in file name: %s", d.Name()) + } + require.NoErrorf(t, err, "failed to decode challenge from file: %s\n%+v", d.Name(), errors.Unwrap(errors.Unwrap(err))) + + snapshotx.SnapshotT(t, f) + }) + return nil + })) +} + +func TestUpdateLegacyChallenges(t *testing.T) { + t.Skip("this test is used to update the fixtures only, they should not be updated unless we have a breaking change (so probably never)") + + reg := testhelpers.NewRegistryMemory(t, + driver.WithConfigOptions(configx.WithValue(config.KeyGetSystemSecret, []string{"well-known-fixture-secret"})), + driver.WithServiceLocatorOptions(servicelocatorx.WithContextualizer(&contextx.Static{NID: legacyChallengesNID})), + ) + + for name, flowState := range map[string]flow.State{ + "login_initialized": flow.FlowStateLoginInitialized, + "login_unused": flow.FlowStateLoginUnused, + "login_used": flow.FlowStateLoginUsed, + "login_error": flow.FlowStateLoginError, + "consent_initialized": flow.FlowStateConsentInitialized, + "consent_unused": flow.FlowStateConsentUnused, + "consent_used": flow.FlowStateConsentUsed, + "consent_error": flow.FlowStateConsentError, + "device_initialized": flow.DeviceFlowStateInitialized, + "device_unused": flow.DeviceFlowStateUnused, + "device_used": flow.DeviceFlowStateUsed, + } { + f := createTestFlow(legacyChallengesNID, flowState) + var challenge string + var err error + switch flowState { + case flow.FlowStateLoginInitialized, flow.FlowStateLoginUnused, flow.FlowStateLoginUsed, flow.FlowStateLoginError: + challenge, err = f.ToLoginChallenge(t.Context(), reg) + case flow.FlowStateConsentInitialized, flow.FlowStateConsentUnused, flow.FlowStateConsentUsed, flow.FlowStateConsentError: + challenge, err = f.ToConsentChallenge(t.Context(), reg) + case flow.DeviceFlowStateInitialized, flow.DeviceFlowStateUnused, flow.DeviceFlowStateUsed: + challenge, err = f.ToDeviceChallenge(t.Context(), reg) + default: + t.Fatalf("unknown flow state: %d", flowState) + } + require.NoError(t, err) + + require.NoError(t, os.WriteFile(fmt.Sprintf("fixtures/legacy_challenges/%s.txt", name), []byte(challenge), 0644)) + } +} diff --git a/flow/flow_test.go b/flow/flow_test.go index 86a0c757afd..4093f9894d1 100644 --- a/flow/flow_test.go +++ b/flow/flow_test.go @@ -7,17 +7,15 @@ import ( "testing" "time" - "github.com/instana/testify/require" + "github.com/go-faker/faker/v4" "github.com/mohae/deepcopy" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" - "github.com/bxcodec/faker/v3" - - "github.com/ory/hydra/consent" "github.com/ory/x/sqlxx" ) -func (f *Flow) setLoginRequest(r *consent.LoginRequest) { +func (f *Flow) setLoginRequest(r *LoginRequest) { f.ID = r.ID f.RequestedScope = r.RequestedScope f.RequestedAudience = r.RequestedAudience @@ -25,170 +23,129 @@ func (f *Flow) setLoginRequest(r *consent.LoginRequest) { f.Subject = r.Subject f.OpenIDConnectContext = r.OpenIDConnectContext f.Client = r.Client - f.ClientID = r.ClientID f.RequestURL = r.RequestURL f.SessionID = r.SessionID - f.LoginWasUsed = r.WasHandled - f.ForceSubjectIdentifier = r.ForceSubjectIdentifier - f.LoginVerifier = r.Verifier - f.LoginCSRF = r.CSRF - f.LoginAuthenticatedAt = r.AuthenticatedAt - f.RequestedAt = r.RequestedAt -} - -func (f *Flow) setHandledLoginRequest(r *consent.HandledLoginRequest) { - f.ID = r.ID - f.LoginRemember = r.Remember - f.LoginRememberFor = r.RememberFor - f.ACR = r.ACR - f.AMR = r.AMR - f.Subject = r.Subject - f.ForceSubjectIdentifier = r.ForceSubjectIdentifier - f.Context = r.Context - f.LoginWasUsed = r.WasHandled - f.LoginError = r.Error - f.RequestedAt = r.RequestedAt - f.LoginAuthenticatedAt = r.AuthenticatedAt } -func (f *Flow) setConsentRequest(r consent.OAuth2ConsentRequest) { - f.ConsentChallengeID = sqlxx.NullString(r.ID) +func (f *Flow) setConsentRequest(r OAuth2ConsentRequest) { + f.ConsentRequestID = sqlxx.NullString(r.ConsentRequestID) f.RequestedScope = r.RequestedScope f.RequestedAudience = r.RequestedAudience f.ConsentSkip = r.Skip f.Subject = r.Subject f.OpenIDConnectContext = r.OpenIDConnectContext f.Client = r.Client - f.ClientID = r.ClientID f.RequestURL = r.RequestURL f.ID = r.LoginChallenge.String() f.SessionID = r.LoginSessionID f.ACR = r.ACR f.AMR = r.AMR f.Context = r.Context - f.ConsentWasHandled = r.WasHandled - f.ForceSubjectIdentifier = r.ForceSubjectIdentifier - f.ConsentVerifier = sqlxx.NullString(r.Verifier) - f.ConsentCSRF = sqlxx.NullString(r.CSRF) - f.LoginAuthenticatedAt = r.AuthenticatedAt - f.RequestedAt = r.RequestedAt } -func (f *Flow) setHandledConsentRequest(r consent.AcceptOAuth2ConsentRequest) { - f.ConsentChallengeID = sqlxx.NullString(r.ID) - f.GrantedScope = r.GrantedScope - f.GrantedAudience = r.GrantedAudience - f.ConsentRemember = r.Remember - f.ConsentRememberFor = &r.RememberFor - f.ConsentHandledAt = r.HandledAt - f.ConsentWasHandled = r.WasHandled - f.ConsentError = r.Error - f.RequestedAt = r.RequestedAt - f.LoginAuthenticatedAt = r.AuthenticatedAt - f.SessionIDToken = r.SessionIDToken - f.SessionAccessToken = r.SessionAccessToken +func TestFlow_HandleDeviceUserAuthRequest(t *testing.T) { + for _, state := range []State{DeviceFlowStateUnused, DeviceFlowStateInitialized} { + t.Run("HandleDeviceUserAuthRequest should ignore RequestedAt in its argument and copy the other fields", func(t *testing.T) { + f := Flow{} + assert.NoError(t, faker.FakeData(&f)) + f.State = state + + r := HandledDeviceUserAuthRequest{} + assert.NoError(t, faker.FakeData(&r)) + f.RequestURL = r.RequestURL + + assert.NoError(t, f.HandleDeviceUserAuthRequest(&r)) + + assert.WithinDuration(t, time.Time(f.DeviceHandledAt), time.Now(), time.Second) + assert.Equal(t, r.Client, f.Client) + assert.EqualValues(t, r.DeviceCodeRequestID, f.DeviceCodeRequestID) + }) + } + + t.Run("should fail with invalid state", func(t *testing.T) { + f := Flow{State: FlowStateLoginUnused} + r := HandledDeviceUserAuthRequest{} + assert.ErrorContains(t, f.HandleDeviceUserAuthRequest(&r), "invalid flow state") + }) + + t.Run("should fail when in used state", func(t *testing.T) { + f := Flow{State: DeviceFlowStateUsed} + r := HandledDeviceUserAuthRequest{} + assert.ErrorContains(t, f.HandleDeviceUserAuthRequest(&r), "invalid flow state") + }) } func TestFlow_GetLoginRequest(t *testing.T) { t.Run("GetLoginRequest should set all fields on its return value", func(t *testing.T) { - f := Flow{} - expected := consent.LoginRequest{} + expected := LoginRequest{} assert.NoError(t, faker.FakeData(&expected)) + f := Flow{State: FlowStateLoginUsed} f.setLoginRequest(&expected) + actual := f.GetLoginRequest() assert.Equal(t, expected, *actual) }) } -func TestFlow_GetHandledLoginRequest(t *testing.T) { - t.Run("GetHandledLoginRequest should set all fields on its return value", func(t *testing.T) { - f := Flow{} - expected := consent.HandledLoginRequest{} - assert.NoError(t, faker.FakeData(&expected)) - f.setHandledLoginRequest(&expected) - actual := f.GetHandledLoginRequest() - assert.NotNil(t, actual.LoginRequest) - expected.LoginRequest = nil - actual.LoginRequest = nil - assert.Equal(t, expected, actual) - }) -} - -func TestFlow_NewFlow(t *testing.T) { - t.Run("NewFlow and GetLoginRequest should use all LoginRequest fields", func(t *testing.T) { - expected := &consent.LoginRequest{} - assert.NoError(t, faker.FakeData(expected)) - actual := NewFlow(expected).GetLoginRequest() - assert.Equal(t, expected, actual) - }) -} - -func TestFlow_HandleLoginRequest(t *testing.T) { +func TestFlow_UpdateFlowWithHandledLoginRequest(t *testing.T) { t.Run( "HandleLoginRequest should ignore RequestedAt in its argument and copy the other fields", func(t *testing.T) { f := Flow{} assert.NoError(t, faker.FakeData(&f)) - f.State = FlowStateLoginInitialized + f.State = FlowStateLoginUnused - r := consent.HandledLoginRequest{} + r := HandledLoginRequest{} assert.NoError(t, faker.FakeData(&r)) - r.ID = f.ID r.Subject = f.Subject r.ForceSubjectIdentifier = f.ForceSubjectIdentifier - f.LoginWasUsed = false assert.NoError(t, f.HandleLoginRequest(&r)) - actual := f.GetHandledLoginRequest() - assert.NotEqual(t, r.RequestedAt, actual.RequestedAt) - r.LoginRequest = f.GetLoginRequest() - actual.RequestedAt = r.RequestedAt - assert.Equal(t, r, actual) + assert.Equal(t, r.Subject, f.Subject) + assert.Equal(t, r.ForceSubjectIdentifier, f.ForceSubjectIdentifier) + assert.Equal(t, r.Remember, f.LoginRemember) + assert.Equal(t, r.RememberFor, f.LoginRememberFor) + assert.Equal(t, r.ExtendSessionLifespan, f.LoginExtendSessionLifespan) + assert.Equal(t, r.ACR, f.ACR) + assert.Equal(t, r.AMR, f.AMR) + assert.Equal(t, r.IdentityProviderSessionID, f.IdentityProviderSessionID.String()) + assert.Equal(t, r.Context, f.Context) }, ) } func TestFlow_InvalidateLoginRequest(t *testing.T) { - t.Run("InvalidateLoginRequest should transition the flow into FlowStateLoginUsed", func(t *testing.T) { - f := NewFlow(&consent.LoginRequest{ - ID: "t3-id", - Subject: "t3-sub", - WasHandled: false, + for _, state := range []State{FlowStateLoginUnused, FlowStateLoginInitialized} { + t.Run("InvalidateLoginRequest should transition the flow into FlowStateLoginUsed", func(t *testing.T) { + f := Flow{ + ID: "t3-id", + Subject: "t3-sub", + State: state, + } + assert.NoError(t, f.HandleLoginRequest(&HandledLoginRequest{ + Subject: "t3-sub", + })) + assert.NoError(t, f.InvalidateLoginRequest()) + assert.Equal(t, FlowStateLoginUsed, f.State) }) - assert.NoError(t, f.HandleLoginRequest(&consent.HandledLoginRequest{ - ID: "t3-id", - Subject: "t3-sub", - WasHandled: false, - })) - assert.NoError(t, f.InvalidateLoginRequest()) - assert.Equal(t, FlowStateLoginUsed, f.State) - assert.Equal(t, true, f.LoginWasUsed) - }) - t.Run("InvalidateLoginRequest should fail when flow.LoginWasUsed is true", func(t *testing.T) { - f := NewFlow(&consent.LoginRequest{ - ID: "t3-id", - Subject: "t3-sub", - WasHandled: false, - }) - assert.NoError(t, f.HandleLoginRequest(&consent.HandledLoginRequest{ - ID: "t3-id", - Subject: "t3-sub", - WasHandled: true, - })) - err := f.InvalidateLoginRequest() - assert.Error(t, err) - assert.Contains(t, err.Error(), "verifier has already been used") + } + t.Run("InvalidateLoginRequest should fail when flow is in used state", func(t *testing.T) { + f := Flow{ + Subject: "t3-sub", + State: FlowStateLoginUsed, + } + assert.ErrorContains(t, f.InvalidateLoginRequest(), "invalid flow state") }) } func TestFlow_GetConsentRequest(t *testing.T) { t.Run("GetConsentRequest should set all fields on its return value", func(t *testing.T) { f := Flow{} - expected := consent.OAuth2ConsentRequest{} + expected := OAuth2ConsentRequest{} assert.NoError(t, faker.FakeData(&expected)) f.setConsentRequest(expected) - actual := f.GetConsentRequest() + actual := f.GetConsentRequest(expected.Challenge) assert.Equal(t, expected, *actual) }) } @@ -197,74 +154,72 @@ func TestFlow_HandleConsentRequest(t *testing.T) { f := Flow{} require.NoError(t, faker.FakeData(&f)) - expected := consent.AcceptOAuth2ConsentRequest{} + expected := AcceptOAuth2ConsentRequest{} require.NoError(t, faker.FakeData(&expected)) - expected.ID = string(f.ConsentChallengeID) - expected.HandledAt = sqlxx.NullTime(time.Now()) - expected.RequestedAt = f.RequestedAt - expected.Session = &consent.AcceptOAuth2ConsentRequestSession{ + expected.Session = &AcceptOAuth2ConsentRequestSession{ IDToken: sqlxx.MapStringInterface{"claim1": "value1", "claim2": "value2"}, AccessToken: sqlxx.MapStringInterface{"claim3": "value3", "claim4": "value4"}, } - expected.SessionIDToken = expected.Session.IDToken - expected.SessionAccessToken = expected.Session.AccessToken - f.State = FlowStateConsentInitialized - f.ConsentWasHandled = false + f.State = FlowStateConsentUnused + f.ConsentHandledAt = sqlxx.NullTime(time.Now()) fGood := deepcopy.Copy(f).(Flow) - eGood := deepcopy.Copy(expected).(consent.AcceptOAuth2ConsentRequest) require.NoError(t, f.HandleConsentRequest(&expected)) t.Run("HandleConsentRequest should fail when already handled", func(t *testing.T) { fBad := deepcopy.Copy(fGood).(Flow) - fBad.ConsentWasHandled = true - require.Error(t, fBad.HandleConsentRequest(&expected)) + fBad.State = FlowStateConsentUsed + assert.ErrorContains(t, fBad.HandleConsentRequest(&expected), "invalid flow state") }) t.Run("HandleConsentRequest should fail when State is FlowStateLoginUsed", func(t *testing.T) { fBad := deepcopy.Copy(fGood).(Flow) fBad.State = FlowStateLoginUsed - require.Error(t, fBad.HandleConsentRequest(&expected)) + require.ErrorContains(t, fBad.HandleConsentRequest(&expected), "invalid flow state") }) - t.Run("HandleConsentRequest should fail when HandledAt in its argument is zero", func(t *testing.T) { + t.Run("HandleConsentRequest should pass with legacy FlowStateConsentInitialized", func(t *testing.T) { f := deepcopy.Copy(fGood).(Flow) - eBad := deepcopy.Copy(eGood).(consent.AcceptOAuth2ConsentRequest) - eBad.HandledAt = sqlxx.NullTime(time.Time{}) - require.Error(t, f.HandleConsentRequest(&eBad)) + f.State = FlowStateConsentInitialized + require.NoError(t, f.HandleConsentRequest(&expected)) + + assert.Equal(t, expected.GrantedScope, f.GrantedScope) + assert.Equal(t, expected.GrantedAudience, f.GrantedAudience) + assert.WithinDuration(t, time.Now(), time.Time(f.ConsentHandledAt), 5*time.Second) + assert.Nil(t, f.ConsentError) + assert.EqualValues(t, expected.Session.IDToken, f.SessionIDToken) + assert.EqualValues(t, expected.Session.AccessToken, f.SessionAccessToken) }) require.NoError(t, fGood.HandleConsentRequest(&expected)) - actual := f.GetHandledConsentRequest() - require.NotNil(t, actual.ConsentRequest) - expected.ConsentRequest = nil - actual.ConsentRequest = nil - require.Equal(t, &expected, actual) + assert.Equal(t, expected.GrantedScope, fGood.GrantedScope) + assert.Equal(t, expected.GrantedAudience, fGood.GrantedAudience) + assert.WithinDuration(t, time.Now(), time.Time(fGood.ConsentHandledAt), 5*time.Second) + assert.Nil(t, fGood.ConsentError) + assert.EqualValues(t, expected.Session.IDToken, fGood.SessionIDToken) + assert.EqualValues(t, expected.Session.AccessToken, fGood.SessionAccessToken) } -func TestFlow_GetHandledConsentRequest(t *testing.T) { - t.Run("GetHandledConsentRequest should set all fields on its return value", func(t *testing.T) { +func TestFlow_HandleConsentError(t *testing.T) { + for _, state := range []State{FlowStateConsentInitialized, FlowStateConsentUnused, FlowStateConsentError} { f := Flow{} - expected := consent.AcceptOAuth2ConsentRequest{} - - assert.NoError(t, faker.FakeData(&expected)) - expected.ConsentRequest = nil - expected.Session = &consent.AcceptOAuth2ConsentRequestSession{ - IDToken: sqlxx.MapStringInterface{"claim1": "value1", "claim2": "value2"}, - AccessToken: sqlxx.MapStringInterface{"claim3": "value3", "claim4": "value4"}, - } - expected.SessionIDToken = expected.Session.IDToken - expected.SessionAccessToken = expected.Session.AccessToken + require.NoError(t, faker.FakeData(&f)) + f.State = state - f.setHandledConsentRequest(expected) - actual := f.GetHandledConsentRequest() + expected := RequestDeniedError{} + require.NoError(t, faker.FakeData(&expected)) - assert.NotNil(t, actual.ConsentRequest) - actual.ConsentRequest = nil + require.NoError(t, f.HandleConsentError(&expected)) + assert.Equal(t, FlowStateConsentError, f.State) + assert.WithinDuration(t, time.Now(), time.Time(f.ConsentHandledAt), 5*time.Second) + assert.Equal(t, &expected, f.ConsentError) - assert.Equal(t, expected, *actual) - }) + assert.Zero(t, f.ConsentRemember) + assert.Zero(t, f.ConsentRememberFor) + assert.Zero(t, f.GrantedScope) + assert.Zero(t, f.GrantedAudience) + } } diff --git a/flow/state_transition.go b/flow/state_transition.go new file mode 100644 index 00000000000..2facf9f2fd8 --- /dev/null +++ b/flow/state_transition.go @@ -0,0 +1,40 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package flow + +import "github.com/ory/x/sqlxx" + +type StateTransitionOption func(*Flow) + +func WithConsentRequestID(id string) StateTransitionOption { + return func(f *Flow) { + f.ConsentRequestID = sqlxx.NullString(id) + } +} + +func WithConsentSkip(skip bool) StateTransitionOption { + return func(f *Flow) { + f.ConsentSkip = skip + } +} + +func WithConsentCSRF(csrf string) StateTransitionOption { + return func(f *Flow) { + f.ConsentCSRF = sqlxx.NullString(csrf) + } +} + +func WithID(id string) StateTransitionOption { + return func(f *Flow) { + f.ID = id + } +} + +func (f *Flow) ToStateConsentUnused(opts ...StateTransitionOption) { + f.State = FlowStateConsentUnused + + for _, opt := range opts { + opt(f) + } +} diff --git a/flow/state_transition_test.go b/flow/state_transition_test.go new file mode 100644 index 00000000000..7a6cbb42107 --- /dev/null +++ b/flow/state_transition_test.go @@ -0,0 +1,126 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package flow + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/ory/hydra/v2/client" + "github.com/ory/x/sqlxx" +) + +func TestStateTransition(t *testing.T) { + t.Run("case=ToStateConsentUnused", func(t *testing.T) { + testCases := []struct { + name string + flowID string + opts []StateTransitionOption + expected *Flow + }{ + { + name: "with all options", + flowID: "test-flow-1", + opts: []StateTransitionOption{ + WithConsentRequestID("consent-req-123"), + WithConsentSkip(true), + WithConsentCSRF("csrf-789"), + WithID("new-flow-id"), + }, + expected: &Flow{ + ID: "new-flow-id", + State: FlowStateConsentUnused, + ConsentRequestID: sqlxx.NullString("consent-req-123"), + ConsentSkip: true, + ConsentCSRF: sqlxx.NullString("csrf-789"), + }, + }, + { + name: "with partial options", + flowID: "test-flow-2", + opts: []StateTransitionOption{ + WithConsentRequestID("consent-req-456"), + }, + expected: &Flow{ + ID: "test-flow-2", + State: FlowStateConsentUnused, + ConsentRequestID: sqlxx.NullString("consent-req-456"), + ConsentSkip: false, + ConsentCSRF: sqlxx.NullString(""), + }, + }, + { + name: "with no options", + flowID: "test-flow-3", + opts: []StateTransitionOption{}, + expected: &Flow{ + ID: "test-flow-3", + State: FlowStateConsentUnused, + ConsentRequestID: sqlxx.NullString(""), + ConsentSkip: false, + ConsentCSRF: sqlxx.NullString(""), + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + f := &Flow{ + ID: tc.flowID, + } + + f.ToStateConsentUnused(tc.opts...) + + assert.Equal(t, tc.expected.ID, f.ID) + assert.Equal(t, tc.expected.State, f.State) + assert.Equal(t, tc.expected.ConsentRequestID, f.ConsentRequestID) + assert.Equal(t, tc.expected.ConsentSkip, f.ConsentSkip) + assert.Equal(t, tc.expected.ConsentCSRF, f.ConsentCSRF) + }) + } + }) + + t.Run("case=functional_options_work_independently", func(t *testing.T) { + f := &Flow{ID: "test-flow"} + + // Test WithConsentRequestID + WithConsentRequestID("test-consent-id")(f) + assert.Equal(t, sqlxx.NullString("test-consent-id"), f.ConsentRequestID) + + // Test WithConsentSkip + WithConsentSkip(true)(f) + assert.True(t, f.ConsentSkip) + + // Test WithConsentCSRF + WithConsentCSRF("test-csrf")(f) + assert.Equal(t, sqlxx.NullString("test-csrf"), f.ConsentCSRF) + + // Test WithID + WithID("new-id")(f) + assert.Equal(t, "new-id", f.ID) + }) + + t.Run("case=state_transition_preserves_existing_fields", func(t *testing.T) { + f := &Flow{ + ID: "original-id", + Subject: "test-subject", + Client: &client.Client{ID: "test-client"}, + } + + f.ToStateConsentUnused( + WithConsentRequestID("new-consent-id"), + ) + + // State should be updated + assert.Equal(t, FlowStateConsentUnused, f.State) + assert.Equal(t, sqlxx.NullString("new-consent-id"), f.ConsentRequestID) + + // Other fields should be preserved + assert.Equal(t, "original-id", f.ID) + assert.Equal(t, "test-subject", f.Subject) + assert.NotNil(t, f.Client) + assert.Equal(t, "test-client", f.Client.ID) + }) +} diff --git a/fosite/.github/CODEOWNERS b/fosite/.github/CODEOWNERS new file mode 100644 index 00000000000..ef90d000d7f --- /dev/null +++ b/fosite/.github/CODEOWNERS @@ -0,0 +1 @@ +* @aeneasr @ory/product-development diff --git a/fosite/.github/FUNDING.yml b/fosite/.github/FUNDING.yml new file mode 100644 index 00000000000..c44036054b6 --- /dev/null +++ b/fosite/.github/FUNDING.yml @@ -0,0 +1,8 @@ +# AUTO-GENERATED, DO NOT EDIT! +# Please edit the original at https://github.com/ory/meta/blob/master/templates/repository/common/.github/FUNDING.yml + +# These are supported funding model platforms + +# github: +patreon: _ory +open_collective: ory diff --git a/fosite/.github/ISSUE_TEMPLATE/BUG-REPORT.yml b/fosite/.github/ISSUE_TEMPLATE/BUG-REPORT.yml new file mode 100644 index 00000000000..a8f5c03b84e --- /dev/null +++ b/fosite/.github/ISSUE_TEMPLATE/BUG-REPORT.yml @@ -0,0 +1,122 @@ +# AUTO-GENERATED, DO NOT EDIT! +# Please edit the original at https://github.com/ory/meta/blob/master/templates/repository/common/.github/ISSUE_TEMPLATE/BUG-REPORT.yml + +description: "Create a bug report" +labels: + - bug +name: "Bug Report" +body: + - attributes: + value: "Thank you for taking the time to fill out this bug report!\n" + type: markdown + - attributes: + label: "Preflight checklist" + options: + - label: + "I could not find a solution in the existing issues, docs, nor + discussions." + required: true + - label: + "I agree to follow this project's [Code of + Conduct](https://github.com/ory/fosite/blob/master/CODE_OF_CONDUCT.md)." + required: true + - label: + "I have read and am following this repository's [Contribution + Guidelines](https://github.com/ory/fosite/blob/master/CONTRIBUTING.md)." + required: true + - label: + "I have joined the [Ory Community Slack](https://slack.ory.sh)." + - label: + "I am signed up to the [Ory Security Patch + Newsletter](https://www.ory.sh/l/sign-up-newsletter)." + id: checklist + type: checkboxes + - attributes: + description: + "Enter the slug or API URL of the affected Ory Network project. Leave + empty when you are self-hosting." + label: "Ory Network Project" + placeholder: "https://.projects.oryapis.com" + id: ory-network-project + type: input + - attributes: + description: "A clear and concise description of what the bug is." + label: "Describe the bug" + placeholder: "Tell us what you see!" + id: describe-bug + type: textarea + validations: + required: true + - attributes: + description: | + Clear, formatted, and easy to follow steps to reproduce the behavior: + placeholder: | + Steps to reproduce the behavior: + + 1. Run `docker run ....` + 2. Make API Request to with `curl ...` + 3. Request fails with response: `{"some": "error"}` + label: "Reproducing the bug" + id: reproduce-bug + type: textarea + validations: + required: true + - attributes: + description: + "Please copy and paste any relevant log output. This will be + automatically formatted into code, so no need for backticks. Please + redact any sensitive information" + label: "Relevant log output" + render: shell + placeholder: | + log=error .... + id: logs + type: textarea + - attributes: + description: + "Please copy and paste any relevant configuration. This will be + automatically formatted into code, so no need for backticks. Please + redact any sensitive information!" + label: "Relevant configuration" + render: yml + placeholder: | + server: + admin: + port: 1234 + id: config + type: textarea + - attributes: + description: "What version of our software are you running?" + label: Version + id: version + type: input + validations: + required: true + - attributes: + label: "On which operating system are you observing this issue?" + options: + - Ory Network + - macOS + - Linux + - Windows + - FreeBSD + - Other + id: operating-system + type: dropdown + - attributes: + label: "In which environment are you deploying?" + options: + - Ory Network + - Docker + - "Docker Compose" + - "Kubernetes with Helm" + - Kubernetes + - Binary + - Other + id: deployment + type: dropdown + - attributes: + description: "Add any other context about the problem here." + label: Additional Context + id: additional + type: textarea diff --git a/fosite/.github/ISSUE_TEMPLATE/DESIGN-DOC.yml b/fosite/.github/ISSUE_TEMPLATE/DESIGN-DOC.yml new file mode 100644 index 00000000000..cc941ffd560 --- /dev/null +++ b/fosite/.github/ISSUE_TEMPLATE/DESIGN-DOC.yml @@ -0,0 +1,125 @@ +# AUTO-GENERATED, DO NOT EDIT! +# Please edit the original at https://github.com/ory/meta/blob/master/templates/repository/common/.github/ISSUE_TEMPLATE/DESIGN-DOC.yml + +description: + "A design document is needed for non-trivial changes to the code base." +labels: + - rfc +name: "Design Document" +body: + - attributes: + value: | + Thank you for writing this design document. + + One of the key elements of Ory's software engineering culture is the use of defining software designs through design docs. These are relatively informal documents that the primary author or authors of a software system or application create before they embark on the coding project. The design doc documents the high level implementation strategy and key design decisions with emphasis on the trade-offs that were considered during those decisions. + + Ory is leaning heavily on [Google's design docs process](https://www.industrialempathy.com/posts/design-docs-at-google/) + and [Golang Proposals](https://github.com/golang/proposal). + + Writing a design doc before contributing your change ensures that your ideas are checked with + the community and maintainers. It will save you a lot of time developing things that might need to be changed + after code reviews, and your pull requests will be merged faster. + type: markdown + - attributes: + label: "Preflight checklist" + options: + - label: + "I could not find a solution in the existing issues, docs, nor + discussions." + required: true + - label: + "I agree to follow this project's [Code of + Conduct](https://github.com/ory/fosite/blob/master/CODE_OF_CONDUCT.md)." + required: true + - label: + "I have read and am following this repository's [Contribution + Guidelines](https://github.com/ory/fosite/blob/master/CONTRIBUTING.md)." + required: true + - label: + "I have joined the [Ory Community Slack](https://slack.ory.sh)." + - label: + "I am signed up to the [Ory Security Patch + Newsletter](https://www.ory.sh/l/sign-up-newsletter)." + id: checklist + type: checkboxes + - attributes: + description: + "Enter the slug or API URL of the affected Ory Network project. Leave + empty when you are self-hosting." + label: "Ory Network Project" + placeholder: "https://.projects.oryapis.com" + id: ory-network-project + type: input + - attributes: + description: | + This section gives the reader a very rough overview of the landscape in which the new system is being built and what is actually being built. This isn’t a requirements doc. Keep it succinct! The goal is that readers are brought up to speed but some previous knowledge can be assumed and detailed info can be linked to. This section should be entirely focused on objective background facts. + label: "Context and scope" + id: scope + type: textarea + validations: + required: true + + - attributes: + description: | + A short list of bullet points of what the goals of the system are, and, sometimes more importantly, what non-goals are. Note, that non-goals aren’t negated goals like “The system shouldn’t crash”, but rather things that could reasonably be goals, but are explicitly chosen not to be goals. A good example would be “ACID compliance”; when designing a database, you’d certainly want to know whether that is a goal or non-goal. And if it is a non-goal you might still select a solution that provides it, if it doesn’t introduce trade-offs that prevent achieving the goals. + label: "Goals and non-goals" + id: goals + type: textarea + validations: + required: true + + - attributes: + description: | + This section should start with an overview and then go into details. + The design doc is the place to write down the trade-offs you made in designing your software. Focus on those trade-offs to produce a useful document with long-term value. That is, given the context (facts), goals and non-goals (requirements), the design doc is the place to suggest solutions and show why a particular solution best satisfies those goals. + + The point of writing a document over a more formal medium is to provide the flexibility to express the problem at hand in an appropriate manner. Because of this, there is no explicit guidance on how to actually describe the design. + label: "The design" + id: design + type: textarea + validations: + required: true + + - attributes: + description: | + If the system under design exposes an API, then sketching out that API is usually a good idea. In most cases, however, one should withstand the temptation to copy-paste formal interface or data definitions into the doc as these are often verbose, contain unnecessary detail and quickly get out of date. Instead, focus on the parts that are relevant to the design and its trade-offs. + label: "APIs" + id: apis + type: textarea + + - attributes: + description: | + Systems that store data should likely discuss how and in what rough form this happens. Similar to the advice on APIs, and for the same reasons, copy-pasting complete schema definitions should be avoided. Instead, focus on the parts that are relevant to the design and its trade-offs. + label: "Data storage" + id: persistence + type: textarea + + - attributes: + description: | + Design docs should rarely contain code, or pseudo-code except in situations where novel algorithms are described. As appropriate, link to prototypes that show the feasibility of the design. + label: "Code and pseudo-code" + id: pseudocode + type: textarea + + - attributes: + description: | + One of the primary factors that would influence the shape of a software design and hence the design doc, is the degree of constraint of the solution space. + + On one end of the extreme is the “greenfield software project”, where all we know are the goals, and the solution can be whatever makes the most sense. Such a document may be wide-ranging, but it also needs to quickly define a set of rules that allow zooming in on a manageable set of solutions. + + On the other end are systems where the possible solutions are very well defined, but it isn't at all obvious how they could even be combined to achieve the goals. This may be a legacy system that is difficult to change and wasn't designed to do what you want it to do or a library design that needs to operate within the constraints of the host programming language. + + In this situation, you may be able to enumerate all the things you can do relatively easily, but you need to creatively put those things together to achieve the goals. There may be multiple solutions, and none of them are great, and hence such a document should focus on selecting the best way given all identified trade-offs. + label: "Degree of constraint" + id: constrait + type: textarea + + - attributes: + description: | + This section lists alternative designs that would have reasonably achieved similar outcomes. The focus should be on the trade-offs that each respective design makes and how those trade-offs led to the decision to select the design that is the primary topic of the document. + + While it is fine to be succinct about a solution that ended up not being selected, this section is one of the most important ones as it shows very explicitly why the selected solution is the best given the project goals and how other solutions, that the reader may be wondering about, introduce trade-offs that are less desirable given the goals. + + label: Alternatives considered + id: alternatives + type: textarea diff --git a/fosite/.github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml b/fosite/.github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml new file mode 100644 index 00000000000..e3445f1d701 --- /dev/null +++ b/fosite/.github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml @@ -0,0 +1,86 @@ +# AUTO-GENERATED, DO NOT EDIT! +# Please edit the original at https://github.com/ory/meta/blob/master/templates/repository/common/.github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml + +description: + "Suggest an idea for this project without a plan for implementation" +labels: + - feat +name: "Feature Request" +body: + - attributes: + value: | + Thank you for suggesting an idea for this project! + + If you already have a plan to implement a feature or a change, please create a [design document](https://github.com/aeneasr/gh-template-test/issues/new?assignees=&labels=rfc&template=DESIGN-DOC.yml) instead if the change is non-trivial! + type: markdown + - attributes: + label: "Preflight checklist" + options: + - label: + "I could not find a solution in the existing issues, docs, nor + discussions." + required: true + - label: + "I agree to follow this project's [Code of + Conduct](https://github.com/ory/fosite/blob/master/CODE_OF_CONDUCT.md)." + required: true + - label: + "I have read and am following this repository's [Contribution + Guidelines](https://github.com/ory/fosite/blob/master/CONTRIBUTING.md)." + required: true + - label: + "I have joined the [Ory Community Slack](https://slack.ory.sh)." + - label: + "I am signed up to the [Ory Security Patch + Newsletter](https://www.ory.sh/l/sign-up-newsletter)." + id: checklist + type: checkboxes + - attributes: + description: + "Enter the slug or API URL of the affected Ory Network project. Leave + empty when you are self-hosting." + label: "Ory Network Project" + placeholder: "https://.projects.oryapis.com" + id: ory-network-project + type: input + - attributes: + description: + "Is your feature request related to a problem? Please describe." + label: "Describe your problem" + placeholder: + "A clear and concise description of what the problem is. Ex. I'm always + frustrated when [...]" + id: problem + type: textarea + validations: + required: true + - attributes: + description: | + Describe the solution you'd like + placeholder: | + A clear and concise description of what you want to happen. + label: "Describe your ideal solution" + id: solution + type: textarea + validations: + required: true + - attributes: + description: "Describe alternatives you've considered" + label: "Workarounds or alternatives" + id: alternatives + type: textarea + validations: + required: true + - attributes: + description: "What version of our software are you running?" + label: Version + id: version + type: input + validations: + required: true + - attributes: + description: + "Add any other context or screenshots about the feature request here." + label: Additional Context + id: additional + type: textarea diff --git a/fosite/.github/ISSUE_TEMPLATE/config.yml b/fosite/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000000..ef7611c08fe --- /dev/null +++ b/fosite/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,14 @@ +# AUTO-GENERATED, DO NOT EDIT! +# Please edit the original at https://github.com/ory/meta/blob/master/templates/repository/common/.github/ISSUE_TEMPLATE/config.yml + +blank_issues_enabled: false +contact_links: + - name: Ory Fosite Forum + url: https://github.com/orgs/ory/discussions + about: + Please ask and answer questions here, show your implementations and + discuss ideas. + - name: Ory Chat + url: https://www.ory.sh/chat + about: + Hang out with other Ory community members to ask and answer questions. diff --git a/fosite/.github/auto_assign.yml b/fosite/.github/auto_assign.yml new file mode 100644 index 00000000000..c6cf23b781f --- /dev/null +++ b/fosite/.github/auto_assign.yml @@ -0,0 +1,16 @@ +# AUTO-GENERATED, DO NOT EDIT! +# Please edit the original at https://github.com/ory/meta/blob/master/templates/repository/common/.github/auto_assign.yml + +# Set to true to add reviewers to pull requests +addReviewers: true + +# Set to true to add assignees to pull requests +addAssignees: true + +# A list of reviewers to be added to pull requests (GitHub user name) +assignees: + - ory/maintainers + +# A number of reviewers added to the pull request +# Set 0 to add all the reviewers (default: 0) +numberOfReviewers: 0 diff --git a/fosite/.github/config.yml b/fosite/.github/config.yml new file mode 100644 index 00000000000..4fed11851b3 --- /dev/null +++ b/fosite/.github/config.yml @@ -0,0 +1,6 @@ +# AUTO-GENERATED, DO NOT EDIT! +# Please edit the original at https://github.com/ory/meta/blob/master/templates/repository/common/.github/config.yml + +todo: + keyword: "@todo" + label: todo diff --git a/fosite/.github/pull_request_template.md b/fosite/.github/pull_request_template.md new file mode 100644 index 00000000000..bd001c31df9 --- /dev/null +++ b/fosite/.github/pull_request_template.md @@ -0,0 +1,57 @@ + + +## Related Issue or Design Document + + + +## Checklist + + + +- [ ] I have read the [contributing guidelines](../blob/master/CONTRIBUTING.md) + and signed the CLA. +- [ ] I have referenced an issue containing the design document if my change + introduces a new feature. +- [ ] I have read the [security policy](../security/policy). +- [ ] I confirm that this pull request does not address a security + vulnerability. If this pull request addresses a security vulnerability, I + confirm that I got approval (please contact + [security@ory.sh](mailto:security@ory.sh)) from the maintainers to push + the changes. +- [ ] I have added tests that prove my fix is effective or that my feature + works. +- [ ] I have added the necessary documentation within the code base (if + appropriate). + +## Further comments + + diff --git a/fosite/.github/renovate.json b/fosite/.github/renovate.json new file mode 100644 index 00000000000..a1953a67d3c --- /dev/null +++ b/fosite/.github/renovate.json @@ -0,0 +1,5 @@ +{ + "extends": ["config:base"], + "prHourlyLimit": 1, + "prConcurrentLimit": 2 +} diff --git a/fosite/.github/workflows/closed_references.yml b/fosite/.github/workflows/closed_references.yml new file mode 100644 index 00000000000..9a1b48350a8 --- /dev/null +++ b/fosite/.github/workflows/closed_references.yml @@ -0,0 +1,30 @@ +# AUTO-GENERATED, DO NOT EDIT! +# Please edit the original at https://github.com/ory/meta/blob/master/templates/repository/common/.github/workflows/closed_references.yml + +name: Closed Reference Notifier + +on: + schedule: + - cron: "0 0 * * *" + workflow_dispatch: + inputs: + issueLimit: + description: Max. number of issues to create + required: true + default: "5" + +jobs: + find_closed_references: + if: github.repository_owner == 'ory' + runs-on: ubuntu-latest + name: Find closed references + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v2-beta + with: + node-version: "14" + - uses: ory/closed-reference-notifier@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + issueLabels: upstream,good first issue,help wanted + issueLimit: ${{ github.event.inputs.issueLimit || '5' }} diff --git a/fosite/.github/workflows/conventional_commits.yml b/fosite/.github/workflows/conventional_commits.yml new file mode 100644 index 00000000000..c4d39051176 --- /dev/null +++ b/fosite/.github/workflows/conventional_commits.yml @@ -0,0 +1,59 @@ +# AUTO-GENERATED, DO NOT EDIT! +# Please edit the original at https://github.com/ory/meta/blob/master/templates/repository/common/.github/workflows/conventional_commits.yml + +name: Conventional commits + +# This GitHub CI Action enforces that pull request titles follow conventional commits. +# More info at https://www.conventionalcommits.org. +# +# The Ory-wide defaults for commit titles and scopes are below. +# Your repository can add/replace elements via a configuration file at the path below. +# More info at https://github.com/ory/ci/blob/master/conventional_commit_config/README.md + +on: + pull_request_target: + types: + - edited + - opened + - ready_for_review + - reopened + # pull_request: # for debugging, uses config in local branch but supports only Pull Requests from this repo + +jobs: + main: + name: Validate PR title + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - id: config + uses: ory/ci/conventional_commit_config@master + with: + config_path: .github/conventional_commits.json + default_types: | + feat + fix + revert + docs + style + refactor + test + build + autogen + security + ci + chore + default_scopes: | + deps + docs + default_require_scope: false + - uses: amannn/action-semantic-pull-request@v4 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + types: ${{ steps.config.outputs.types }} + scopes: ${{ steps.config.outputs.scopes }} + requireScope: ${{ steps.config.outputs.requireScope }} + subjectPattern: ^(?![A-Z]).+$ + subjectPatternError: | + The subject should start with a lowercase letter, yours is uppercase: + "{subject}" diff --git a/fosite/.github/workflows/format.yml b/fosite/.github/workflows/format.yml new file mode 100644 index 00000000000..b59c85d31b2 --- /dev/null +++ b/fosite/.github/workflows/format.yml @@ -0,0 +1,17 @@ +name: Format + +on: + pull_request: + push: + +jobs: + format: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-go@v3 + with: + go-version: "1.21" + - run: make format + - name: Indicate formatting issues + run: git diff HEAD --exit-code --color diff --git a/fosite/.github/workflows/labels.yml b/fosite/.github/workflows/labels.yml new file mode 100644 index 00000000000..e903667d45c --- /dev/null +++ b/fosite/.github/workflows/labels.yml @@ -0,0 +1,25 @@ +# AUTO-GENERATED, DO NOT EDIT! +# Please edit the original at https://github.com/ory/meta/blob/master/templates/repository/common/.github/workflows/labels.yml + +name: Synchronize Issue Labels + +on: + workflow_dispatch: + push: + branches: + - master + +jobs: + milestone: + if: github.repository_owner == 'ory' + name: Synchronize Issue Labels + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Synchronize Issue Labels + uses: ory/label-sync-action@v0 + with: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + dry: false + forced: true diff --git a/fosite/.github/workflows/licenses.yml b/fosite/.github/workflows/licenses.yml new file mode 100644 index 00000000000..4d996501097 --- /dev/null +++ b/fosite/.github/workflows/licenses.yml @@ -0,0 +1,35 @@ +# AUTO-GENERATED, DO NOT EDIT! +# Please edit the original at https://github.com/ory/meta/blob/master/templates/repository/common/.github/workflows/licenses.yml + +name: Licenses + +on: + pull_request: + push: + branches: + - main + - v3 + - master + +jobs: + licenses: + name: License compliance + runs-on: ubuntu-latest + steps: + - name: Install script + uses: ory/ci/licenses/setup@master + with: + token: ${{ secrets.ORY_BOT_PAT || secrets.GITHUB_TOKEN }} + - name: Check licenses + uses: ory/ci/licenses/check@master + - name: Write, commit, push licenses + uses: ory/ci/licenses/write@master + if: + ${{ github.ref == 'refs/heads/main' || github.ref == + 'refs/heads/master' || github.ref == 'refs/heads/v3' }} + with: + author-email: + ${{ secrets.ORY_BOT_PAT && + '60093411+ory-bot@users.noreply.github.com' || + format('{0}@users.noreply.github.com', github.actor) }} + author-name: ${{ secrets.ORY_BOT_PAT && 'ory-bot' || github.actor }} diff --git a/fosite/.github/workflows/oidc-conformity-master.yml b/fosite/.github/workflows/oidc-conformity-master.yml new file mode 100644 index 00000000000..954bb072087 --- /dev/null +++ b/fosite/.github/workflows/oidc-conformity-master.yml @@ -0,0 +1,28 @@ +name: "OpenID Connect Conformity Tests on Master Branch)" + +on: + push: + branches: + - master + +jobs: + oidc-conformity: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v2 + with: + fetch-depth: 2 + repository: ory/hydra + ref: master + - uses: actions/setup-go@v2 + with: + go-version: "1.21" + - name: Update fosite + run: | + go mod edit -replace github.com/ory/fosite=github.com/ory/fosite@${{ github.sha }} + go mod tidy + - name: Start service + run: ./test/conformance/start.sh + - name: Run tests + run: ./test/conformance/test.sh -v -short -parallel 16 diff --git a/fosite/.github/workflows/oidc-conformity.yml b/fosite/.github/workflows/oidc-conformity.yml new file mode 100644 index 00000000000..1c7ecdd0597 --- /dev/null +++ b/fosite/.github/workflows/oidc-conformity.yml @@ -0,0 +1,28 @@ +name: "OpenID Connect Conformity Tests" + +on: + pull_request: + branches: + - master + +jobs: + oidc-conformity: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v2 + with: + fetch-depth: 2 + repository: ory/hydra + ref: master + - uses: actions/setup-go@v2 + with: + go-version: "1.21" + - name: Update fosite + run: | + go mod edit -replace github.com/ory/fosite=github.com/${{ github.event.pull_request.head.repo.full_name }}@${{ github.event.pull_request.head.sha }} + go mod tidy + - name: Start service + run: ./test/conformance/start.sh + - name: Run tests + run: ./test/conformance/test.sh -v -short -parallel 16 diff --git a/fosite/.github/workflows/stale.yml b/fosite/.github/workflows/stale.yml new file mode 100644 index 00000000000..ac48a5e509b --- /dev/null +++ b/fosite/.github/workflows/stale.yml @@ -0,0 +1,47 @@ +# AUTO-GENERATED, DO NOT EDIT! +# Please edit the original at https://github.com/ory/meta/blob/master/templates/repository/common/.github/workflows/stale.yml + +name: "Close Stale Issues" +on: + workflow_dispatch: + schedule: + - cron: "0 0 * * *" + +jobs: + stale: + if: github.repository_owner == 'ory' + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v4 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + stale-issue-message: | + Hello contributors! + + I am marking this issue as stale as it has not received any engagement from the community or maintainers for a year. That does not imply that the issue has no merit! If you feel strongly about this issue + + - open a PR referencing and resolving the issue; + - leave a comment on it and discuss ideas on how you could contribute towards resolving it; + - leave a comment and describe in detail why this issue is critical for your use case; + - open a new issue with updated details and a plan for resolving the issue. + + Throughout its lifetime, Ory has received over 10.000 issues and PRs. To sustain that growth, we need to prioritize and focus on issues that are important to the community. A good indication of importance, and thus priority, is activity on a topic. + + Unfortunately, [burnout](https://www.jeffgeerling.com/blog/2016/why-i-close-prs-oss-project-maintainer-notes) has become a [topic](https://opensource.guide/best-practices/#its-okay-to-hit-pause) of [concern](https://docs.brew.sh/Maintainers-Avoiding-Burnout) amongst open-source projects. + + It can lead to severe personal and health issues as well as [opening](https://haacked.com/archive/2019/05/28/maintainer-burnout/) catastrophic [attack vectors](https://www.gradiant.org/en/blog/open-source-maintainer-burnout-as-an-attack-surface/). + + The motivation for this automation is to help prioritize issues in the backlog and not ignore, reject, or belittle anyone. + + If this issue was marked as stale erroneously you can exempt it by adding the `backlog` label, assigning someone, or setting a milestone for it. + + Thank you for your understanding and to anyone who participated in the conversation! And as written above, please do participate in the conversation if this topic is important to you! + + Thank you 🙏✌️ + stale-issue-label: "stale" + exempt-issue-labels: "bug,blocking,docs,backlog" + days-before-stale: 365 + days-before-close: 30 + exempt-milestones: true + exempt-assignees: true + only-pr-labels: "stale" diff --git a/fosite/.github/workflows/test.yml b/fosite/.github/workflows/test.yml new file mode 100644 index 00000000000..24f9fb236ce --- /dev/null +++ b/fosite/.github/workflows/test.yml @@ -0,0 +1,15 @@ +name: Unit tests + +on: + pull_request: + push: + +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-go@v3 + with: + go-version: "1.21" + - run: make test diff --git a/fosite/.gitignore b/fosite/.gitignore new file mode 100644 index 00000000000..25bca000d0d --- /dev/null +++ b/fosite/.gitignore @@ -0,0 +1,9 @@ +.bin +.idea +*.iml +.cover +*.log +*.exe +cover.out +vendor/ +node_modules/ diff --git a/fosite/.golangci.yml b/fosite/.golangci.yml new file mode 100644 index 00000000000..41a57ab92be --- /dev/null +++ b/fosite/.golangci.yml @@ -0,0 +1,16 @@ +linters: + enable: + - gosec + disable: + - ineffassign + - deadcode + - unused + - structcheck + - gosimple + - bodyclose + - staticcheck + +run: + skip-files: + - ".+_test.go" + - ".+_test_.+.go" diff --git a/fosite/.nancy-ignore b/fosite/.nancy-ignore new file mode 100644 index 00000000000..23ee728dda4 --- /dev/null +++ b/fosite/.nancy-ignore @@ -0,0 +1,32 @@ +# etcd issues - can be ignored because etcd is not used. +CVE-2020-15114 +CVE-2020-15136 +CVE-2020-15115 +# end + +# dockertest issues - can be ignored because only used for testing +CVE-2020-8911 +sonatype-2020-1759 +CVE-2020-8912 +CVE-2021-41103 +CVE-2022-23648 +CVE-2021-21334 +CVE-2021-32760 +CVE-2021-41190 +sonatype-2021-0853 +sonatype-2021-0853 +CVE-2021-41190 +CVE-2021-30465 +CVE-2021-43784 +sonatype-2020-0569 +CVE-2022-29162 +sonatype-2019-0890 +CVE-2022-21698 +sonatype-2021-148 +CVE-2021-20329 +CVE-2021-22133 +sonatype-2021-1485 +sonatype-2021-4899 +sonatype-2020-1759 +CVE-2021-221333 +CVE-2022-31030 \ No newline at end of file diff --git a/fosite/.prettierignore b/fosite/.prettierignore new file mode 100644 index 00000000000..1e816b997df --- /dev/null +++ b/fosite/.prettierignore @@ -0,0 +1,7 @@ +.github/ISSUE_TEMPLATE/BUG-REPORT.yml +.github/ISSUE_TEMPLATE/DESIGN-DOC.yml +.github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml +.github/ISSUE_TEMPLATE/config.yml +.github/pull_request_template.md +CHANGELOG.md +CONTRIBUTING.md diff --git a/fosite/.reference-ignore b/fosite/.reference-ignore new file mode 100644 index 00000000000..eee2a89c2ed --- /dev/null +++ b/fosite/.reference-ignore @@ -0,0 +1,3 @@ +**/node_modules +docs +CHANGELOG.md diff --git a/fosite/.reports/dep-licenses.csv b/fosite/.reports/dep-licenses.csv new file mode 100644 index 00000000000..262a26dd821 --- /dev/null +++ b/fosite/.reports/dep-licenses.csv @@ -0,0 +1,101 @@ + +"github.com/asaskevich/govalidator","MIT" +"github.com/cespare/xxhash/v2","MIT" +"github.com/dgraph-io/ristretto","Apache-2.0" +"github.com/dgraph-io/ristretto/z","MIT" +"github.com/dustin/go-humanize","MIT" +"github.com/pkg/errors","BSD-2-Clause" +"golang.org/x/sys/unix","BSD-3-Clause" +"github.com/google/uuid","BSD-3-Clause" +"github.com/gorilla/mux","BSD-3-Clause" +"github.com/gorilla/websocket","BSD-2-Clause" +"github.com/hashicorp/go-cleanhttp","MPL-2.0" +"github.com/hashicorp/go-retryablehttp","MPL-2.0" +"github.com/magiconair/properties","BSD-2-Clause" +"github.com/mattn/goveralls","MIT" +"golang.org/x/mod","BSD-3-Clause" +"golang.org/x/tools","BSD-3-Clause" +"github.com/mohae/deepcopy","MIT" +"github.com/oleiade/reflections","MIT" +"github.com/asaskevich/govalidator","MIT" +"github.com/cenkalti/backoff/v4","MIT" +"github.com/cespare/xxhash/v2","MIT" +"github.com/dgraph-io/ristretto","Apache-2.0" +"github.com/dgraph-io/ristretto/z","MIT" +"github.com/dustin/go-humanize","MIT" +"github.com/felixge/httpsnoop","MIT" +"github.com/go-jose/go-jose/v3","Apache-2.0" +"github.com/go-jose/go-jose/v3/json","BSD-3-Clause" +"github.com/go-logr/logr","Apache-2.0" +"github.com/go-logr/stdr","Apache-2.0" +"github.com/gobuffalo/pop/v6/logging","MIT" +"github.com/gogo/protobuf","BSD-3-Clause" +"github.com/google/uuid","BSD-3-Clause" +"github.com/grpc-ecosystem/grpc-gateway/v2","BSD-3-Clause" +"github.com/hashicorp/go-cleanhttp","MPL-2.0" +"github.com/hashicorp/go-retryablehttp","MPL-2.0" +"github.com/mohae/deepcopy","MIT" +"github.com/openzipkin/zipkin-go/model","Apache-2.0" +"github.com/ory/fosite","Apache-2.0" +"github.com/ory/go-convenience/stringslice","MIT" +"github.com/ory/x","Apache-2.0" +"github.com/pkg/errors","BSD-2-Clause" +"github.com/seatgeek/logrus-gelf-formatter","BSD-3-Clause" +"github.com/sirupsen/logrus","MIT" +"go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace","Apache-2.0" +"go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp","Apache-2.0" +"go.opentelemetry.io/contrib/propagators/b3","Apache-2.0" +"go.opentelemetry.io/contrib/propagators/jaeger","Apache-2.0" +"go.opentelemetry.io/contrib/samplers/jaegerremote","Apache-2.0" +"go.opentelemetry.io/otel","Apache-2.0" +"go.opentelemetry.io/otel/exporters/jaeger","Apache-2.0" +"go.opentelemetry.io/otel/exporters/jaeger/internal/third_party/thrift/lib/go/thrift","Apache-2.0" +"go.opentelemetry.io/otel/exporters/jaeger/internal/third_party/thrift/lib/go/thrift","GNU-All-permissive-Copying-License" +"go.opentelemetry.io/otel/exporters/jaeger/internal/third_party/thrift/lib/go/thrift","BSD-3-Clause" +"go.opentelemetry.io/otel/exporters/otlp/otlptrace","Apache-2.0" +"go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp","Apache-2.0" +"go.opentelemetry.io/otel/exporters/zipkin","Apache-2.0" +"go.opentelemetry.io/otel/metric","Apache-2.0" +"go.opentelemetry.io/otel/sdk","Apache-2.0" +"go.opentelemetry.io/otel/trace","Apache-2.0" +"go.opentelemetry.io/proto/otlp","Apache-2.0" +"golang.org/x/crypto","BSD-3-Clause" +"golang.org/x/net","BSD-3-Clause" +"golang.org/x/sys/unix","BSD-3-Clause" +"golang.org/x/text","BSD-3-Clause" +"google.golang.org/genproto/googleapis/api","Apache-2.0" +"google.golang.org/genproto/googleapis/rpc/status","Apache-2.0" +"google.golang.org/grpc","Apache-2.0" +"google.golang.org/protobuf","BSD-3-Clause" +"github.com/fsnotify/fsnotify","BSD-3-Clause" +"github.com/hashicorp/hcl","MPL-2.0" +"github.com/magiconair/properties","BSD-2-Clause" +"github.com/mitchellh/mapstructure","MIT" +"github.com/ory/go-acc","Apache-2.0" +"github.com/pelletier/go-toml/v2","MIT" +"github.com/spf13/afero","Apache-2.0" +"github.com/spf13/cast","MIT" +"github.com/spf13/cobra","Apache-2.0" +"github.com/spf13/jwalterweatherman","MIT" +"github.com/spf13/pflag","BSD-3-Clause" +"github.com/spf13/viper","MIT" +"github.com/subosito/gotenv","MIT" +"golang.org/x/sys/unix","BSD-3-Clause" +"golang.org/x/text","BSD-3-Clause" +"gopkg.in/ini.v1","Apache-2.0" +"gopkg.in/yaml.v3","MIT" +"github.com/ory/x","Apache-2.0" +"github.com/moul/http2curl","MIT" +"github.com/parnurzeal/gorequest","MIT" +"github.com/pkg/errors","BSD-2-Clause" +"golang.org/x/net/publicsuffix","BSD-3-Clause" +"github.com/pkg/errors","BSD-2-Clause" +"github.com/stretchr/testify","MIT" +"github.com/tidwall/gjson","MIT" +"github.com/tidwall/match","MIT" +"github.com/tidwall/pretty","MIT" +"go.opentelemetry.io/otel","Apache-2.0" +"go.opentelemetry.io/otel/trace","Apache-2.0" +"golang.org/x/oauth2","BSD-3-Clause" +"golang.org/x/text","BSD-3-Clause" + diff --git a/fosite/.travis.yml b/fosite/.travis.yml new file mode 100644 index 00000000000..f6ec97b90b4 --- /dev/null +++ b/fosite/.travis.yml @@ -0,0 +1,18 @@ +language: go + +go_import_path: github.com/ory/fosite + +go: + - "1.14" + +install: + - go install github.com/mattn/goveralls + - go install github.com/ory/go-acc + - curl -sSfL + https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | + sh -s -- -b $(go env GOPATH)/bin v1.24.0 + +script: + - golangci-lint run + - go-acc -o coverage.txt ./... -- -failfast -timeout=20m + - goveralls -coverprofile="coverage.txt" diff --git a/fosite/CHANGELOG.md b/fosite/CHANGELOG.md new file mode 100644 index 00000000000..7ed5dd3070e --- /dev/null +++ b/fosite/CHANGELOG.md @@ -0,0 +1,2827 @@ +**This file is no longer being updated and kept for historical reasons. Please check +the [GitHub releases](https://github.com/ory/fosite/releases) instead!** + + + + +**Table of Contents** + +- [0.0.0 (2022-09-22)](#000-2022-09-22) + - [Breaking Changes](#breaking-changes) + - [Bug Fixes](#bug-fixes) + - [Code Refactoring](#code-refactoring) + - [Features](#features) + - [Tests](#tests) + - [Unclassified](#unclassified) +- [0.42.2 (2022-04-17)](#0422-2022-04-17) + - [Bug Fixes](#bug-fixes-1) + - [Code Generation](#code-generation) + - [Documentation](#documentation) + - [Features](#features-1) +- [0.42.1 (2022-02-03)](#0421-2022-02-03) + - [Code Generation](#code-generation-1) + - [Features](#features-2) +- [0.42.0 (2022-01-06)](#0420-2022-01-06) + - [Code Generation](#code-generation-2) + - [Features](#features-3) +- [0.41.0 (2021-11-13)](#0410-2021-11-13) + - [Bug Fixes](#bug-fixes-2) + - [Code Generation](#code-generation-3) + - [Code Refactoring](#code-refactoring-1) + - [Documentation](#documentation-1) + - [Features](#features-4) +- [0.40.2 (2021-05-28)](#0402-2021-05-28) + - [Features](#features-5) +- [0.40.1 (2021-05-23)](#0401-2021-05-23) + - [Bug Fixes](#bug-fixes-3) +- [0.40.0 (2021-05-21)](#0400-2021-05-21) + - [Bug Fixes](#bug-fixes-4) + - [Code Refactoring](#code-refactoring-2) + - [Documentation](#documentation-2) + - [Features](#features-6) + - [Tests](#tests-1) +- [0.39.0 (2021-03-08)](#0390-2021-03-08) + - [Features](#features-7) +- [0.38.0 (2021-02-23)](#0380-2021-02-23) + - [Breaking Changes](#breaking-changes-1) + - [Bug Fixes](#bug-fixes-5) + - [Features](#features-8) +- [0.37.0 (2021-02-05)](#0370-2021-02-05) + - [Bug Fixes](#bug-fixes-6) + - [Features](#features-9) +- [0.36.1 (2021-01-11)](#0361-2021-01-11) + - [Bug Fixes](#bug-fixes-7) + - [Chores](#chores) + - [Code Refactoring](#code-refactoring-3) +- [0.36.0 (2020-11-16)](#0360-2020-11-16) + - [Breaking Changes](#breaking-changes-2) + - [Bug Fixes](#bug-fixes-8) + - [Code Refactoring](#code-refactoring-4) + - [Documentation](#documentation-3) + - [Features](#features-10) +- [0.35.1 (2020-10-11)](#0351-2020-10-11) + - [Bug Fixes](#bug-fixes-9) + - [Code Generation](#code-generation-4) + - [Documentation](#documentation-4) + - [Features](#features-11) +- [0.35.0 (2020-10-06)](#0350-2020-10-06) + - [Breaking Changes](#breaking-changes-3) + - [Bug Fixes](#bug-fixes-10) + - [Code Generation](#code-generation-5) +- [0.34.1 (2020-10-02)](#0341-2020-10-02) + - [Bug Fixes](#bug-fixes-11) + - [Documentation](#documentation-5) +- [0.34.0 (2020-09-24)](#0340-2020-09-24) + - [Breaking Changes](#breaking-changes-4) + - [Bug Fixes](#bug-fixes-12) + - [Chores](#chores-1) + - [Features](#features-12) + - [Unclassified](#unclassified-1) +- [0.33.0 (2020-09-16)](#0330-2020-09-16) + - [Breaking Changes](#breaking-changes-5) + - [Features](#features-13) +- [0.32.4 (2020-09-15)](#0324-2020-09-15) + - [Code Generation](#code-generation-6) + - [Code Refactoring](#code-refactoring-5) + - [Documentation](#documentation-6) +- [0.32.3 (2020-09-12)](#0323-2020-09-12) + - [Bug Fixes](#bug-fixes-13) + - [Code Refactoring](#code-refactoring-6) + - [Documentation](#documentation-7) + - [Features](#features-14) +- [0.32.2 (2020-06-22)](#0322-2020-06-22) + - [Features](#features-15) +- [0.32.1 (2020-06-05)](#0321-2020-06-05) + - [Bug Fixes](#bug-fixes-14) + - [Features](#features-16) +- [0.32.0 (2020-05-28)](#0320-2020-05-28) + - [Bug Fixes](#bug-fixes-15) + - [Documentation](#documentation-8) + - [Features](#features-17) +- [0.31.3 (2020-05-09)](#0313-2020-05-09) + - [Bug Fixes](#bug-fixes-16) + - [Features](#features-18) +- [0.31.2 (2020-04-16)](#0312-2020-04-16) + - [Bug Fixes](#bug-fixes-17) +- [0.31.1 (2020-04-16)](#0311-2020-04-16) + - [Bug Fixes](#bug-fixes-18) + - [Documentation](#documentation-9) +- [0.31.0 (2020-03-29)](#0310-2020-03-29) + - [Unclassified](#unclassified-2) +- [0.30.6 (2020-03-26)](#0306-2020-03-26) + - [Bug Fixes](#bug-fixes-19) + - [Documentation](#documentation-10) +- [0.30.5 (2020-03-25)](#0305-2020-03-25) + - [Bug Fixes](#bug-fixes-20) +- [0.30.4 (2020-03-17)](#0304-2020-03-17) + - [Bug Fixes](#bug-fixes-21) +- [0.30.3 (2020-03-04)](#0303-2020-03-04) + - [Bug Fixes](#bug-fixes-22) + - [Documentation](#documentation-11) + - [Features](#features-19) +- [0.30.2 (2019-11-21)](#0302-2019-11-21) + - [Unclassified](#unclassified-3) +- [0.30.1 (2019-09-23)](#0301-2019-09-23) + - [Unclassified](#unclassified-4) +- [0.30.0 (2019-09-16)](#0300-2019-09-16) + - [Unclassified](#unclassified-5) +- [0.29.8 (2019-08-29)](#0298-2019-08-29) + - [Documentation](#documentation-12) + - [Unclassified](#unclassified-6) +- [0.29.7 (2019-08-06)](#0297-2019-08-06) + - [Documentation](#documentation-13) + - [Unclassified](#unclassified-7) +- [0.29.6 (2019-04-26)](#0296-2019-04-26) + - [Unclassified](#unclassified-8) +- [0.29.5 (2019-04-25)](#0295-2019-04-25) + - [Unclassified](#unclassified-9) +- [0.29.3 (2019-04-17)](#0293-2019-04-17) + - [Unclassified](#unclassified-10) +- [0.29.2 (2019-04-11)](#0292-2019-04-11) + - [Unclassified](#unclassified-11) +- [0.29.1 (2019-03-27)](#0291-2019-03-27) + - [Unclassified](#unclassified-12) +- [0.29.0 (2018-12-23)](#0290-2018-12-23) + - [Unclassified](#unclassified-13) +- [0.28.1 (2018-12-04)](#0281-2018-12-04) + - [Unclassified](#unclassified-14) +- [0.28.0 (2018-11-16)](#0280-2018-11-16) + - [Unclassified](#unclassified-15) +- [0.27.4 (2018-11-12)](#0274-2018-11-12) + - [Documentation](#documentation-14) + - [Unclassified](#unclassified-16) +- [0.27.3 (2018-11-08)](#0273-2018-11-08) + - [Unclassified](#unclassified-17) +- [0.27.2 (2018-11-07)](#0272-2018-11-07) + - [Unclassified](#unclassified-18) +- [0.27.1 (2018-11-03)](#0271-2018-11-03) + - [Unclassified](#unclassified-19) +- [0.27.0 (2018-10-31)](#0270-2018-10-31) + - [Unclassified](#unclassified-20) +- [0.26.1 (2018-10-25)](#0261-2018-10-25) + - [Unclassified](#unclassified-21) +- [0.26.0 (2018-10-24)](#0260-2018-10-24) + - [Unclassified](#unclassified-22) +- [0.25.1 (2018-10-23)](#0251-2018-10-23) + - [Documentation](#documentation-15) + - [Unclassified](#unclassified-23) +- [0.25.0 (2018-10-08)](#0250-2018-10-08) + - [Unclassified](#unclassified-24) +- [0.24.0 (2018-09-27)](#0240-2018-09-27) + - [Unclassified](#unclassified-25) +- [0.23.0 (2018-09-22)](#0230-2018-09-22) + - [Unclassified](#unclassified-26) +- [0.22.0 (2018-09-19)](#0220-2018-09-19) + - [Unclassified](#unclassified-27) +- [0.21.5 (2018-08-31)](#0215-2018-08-31) + - [Unclassified](#unclassified-28) +- [0.21.4 (2018-08-26)](#0214-2018-08-26) + - [Unclassified](#unclassified-29) +- [0.21.3 (2018-08-22)](#0213-2018-08-22) + - [Unclassified](#unclassified-30) +- [0.21.2 (2018-08-07)](#0212-2018-08-07) + - [Unclassified](#unclassified-31) +- [0.21.1 (2018-07-22)](#0211-2018-07-22) + - [Unclassified](#unclassified-32) +- [0.21.0 (2018-06-23)](#0210-2018-06-23) + - [Documentation](#documentation-16) + - [Unclassified](#unclassified-33) +- [0.20.3 (2018-06-07)](#0203-2018-06-07) + - [Unclassified](#unclassified-34) +- [0.20.2 (2018-05-29)](#0202-2018-05-29) + - [Unclassified](#unclassified-35) +- [0.20.1 (2018-05-29)](#0201-2018-05-29) + - [Unclassified](#unclassified-36) +- [0.20.0 (2018-05-28)](#0200-2018-05-28) + - [Unclassified](#unclassified-37) +- [0.19.8 (2018-05-24)](#0198-2018-05-24) + - [Unclassified](#unclassified-38) +- [0.19.7 (2018-05-24)](#0197-2018-05-24) + - [Unclassified](#unclassified-39) +- [0.19.6 (2018-05-24)](#0196-2018-05-24) + - [Unclassified](#unclassified-40) +- [0.19.5 (2018-05-23)](#0195-2018-05-23) + - [Unclassified](#unclassified-41) +- [0.19.4 (2018-05-20)](#0194-2018-05-20) + - [Unclassified](#unclassified-42) +- [0.19.3 (2018-05-20)](#0193-2018-05-20) + - [Unclassified](#unclassified-43) +- [0.19.2 (2018-05-19)](#0192-2018-05-19) + - [Unclassified](#unclassified-44) +- [0.19.1 (2018-05-19)](#0191-2018-05-19) + - [Unclassified](#unclassified-45) +- [0.19.0 (2018-05-17)](#0190-2018-05-17) + - [Unclassified](#unclassified-46) +- [0.18.1 (2018-05-01)](#0181-2018-05-01) + - [Unclassified](#unclassified-47) +- [0.18.0 (2018-04-30)](#0180-2018-04-30) + - [Unclassified](#unclassified-48) +- [0.17.2 (2018-04-26)](#0172-2018-04-26) + - [Unclassified](#unclassified-49) +- [0.17.1 (2018-04-22)](#0171-2018-04-22) + - [Unclassified](#unclassified-50) +- [0.17.0 (2018-04-08)](#0170-2018-04-08) + - [Documentation](#documentation-17) + - [Unclassified](#unclassified-51) +- [0.16.5 (2018-03-17)](#0165-2018-03-17) + - [Documentation](#documentation-18) + - [Unclassified](#unclassified-52) +- [0.16.4 (2018-02-07)](#0164-2018-02-07) + - [Unclassified](#unclassified-53) +- [0.16.3 (2018-02-07)](#0163-2018-02-07) + - [Unclassified](#unclassified-54) +- [0.16.2 (2018-01-25)](#0162-2018-01-25) + - [Unclassified](#unclassified-55) +- [0.16.1 (2017-12-23)](#0161-2017-12-23) + - [Unclassified](#unclassified-56) +- [0.16.0 (2017-12-23)](#0160-2017-12-23) + - [Unclassified](#unclassified-57) +- [0.15.6 (2017-12-21)](#0156-2017-12-21) + - [Unclassified](#unclassified-58) +- [0.15.5 (2017-12-17)](#0155-2017-12-17) + - [Unclassified](#unclassified-59) +- [0.15.4 (2017-12-17)](#0154-2017-12-17) + - [Unclassified](#unclassified-60) +- [0.15.3 (2017-12-17)](#0153-2017-12-17) + - [Unclassified](#unclassified-61) +- [0.15.2 (2017-12-10)](#0152-2017-12-10) + - [Unclassified](#unclassified-62) +- [0.15.1 (2017-12-10)](#0151-2017-12-10) + - [Unclassified](#unclassified-63) +- [0.15.0 (2017-12-09)](#0150-2017-12-09) + - [Documentation](#documentation-19) + - [Unclassified](#unclassified-64) +- [0.14.2 (2017-12-06)](#0142-2017-12-06) + - [Unclassified](#unclassified-65) +- [0.14.1 (2017-12-06)](#0141-2017-12-06) + - [Unclassified](#unclassified-66) +- [0.14.0 (2017-12-06)](#0140-2017-12-06) + - [Unclassified](#unclassified-67) +- [0.13.1 (2017-12-04)](#0131-2017-12-04) + - [Unclassified](#unclassified-68) +- [0.13.0 (2017-10-25)](#0130-2017-10-25) + - [Unclassified](#unclassified-69) +- [0.12.0 (2017-10-25)](#0120-2017-10-25) + - [Unclassified](#unclassified-70) +- [0.11.4 (2017-10-10)](#0114-2017-10-10) + - [Documentation](#documentation-20) + - [Unclassified](#unclassified-71) +- [0.11.3 (2017-08-21)](#0113-2017-08-21) + - [Documentation](#documentation-21) + - [Unclassified](#unclassified-72) +- [0.11.2 (2017-07-09)](#0112-2017-07-09) + - [Unclassified](#unclassified-73) +- [0.11.1 (2017-07-09)](#0111-2017-07-09) + - [Unclassified](#unclassified-74) +- [0.11.0 (2017-07-09)](#0110-2017-07-09) + - [Unclassified](#unclassified-75) +- [0.10.0 (2017-07-06)](#0100-2017-07-06) + - [Unclassified](#unclassified-76) +- [0.9.7 (2017-06-28)](#097-2017-06-28) + - [Unclassified](#unclassified-77) +- [0.9.6 (2017-06-21)](#096-2017-06-21) + - [Documentation](#documentation-22) + - [Unclassified](#unclassified-78) +- [0.9.5 (2017-06-08)](#095-2017-06-08) + - [Unclassified](#unclassified-79) +- [0.9.4 (2017-06-05)](#094-2017-06-05) + - [Unclassified](#unclassified-80) +- [0.9.3 (2017-06-05)](#093-2017-06-05) + - [Unclassified](#unclassified-81) +- [0.9.2 (2017-06-05)](#092-2017-06-05) + - [Unclassified](#unclassified-82) +- [0.9.1 (2017-06-04)](#091-2017-06-04) + - [Unclassified](#unclassified-83) +- [0.9.0 (2017-06-03)](#090-2017-06-03) + - [Documentation](#documentation-23) + - [Unclassified](#unclassified-84) +- [0.8.0 (2017-05-18)](#080-2017-05-18) + - [Documentation](#documentation-24) + - [Unclassified](#unclassified-85) +- [0.7.0 (2017-05-03)](#070-2017-05-03) + - [Documentation](#documentation-25) + - [Unclassified](#unclassified-86) +- [0.6.19 (2017-05-03)](#0619-2017-05-03) + - [Unclassified](#unclassified-87) +- [0.6.18 (2017-04-14)](#0618-2017-04-14) + - [Unclassified](#unclassified-88) +- [0.6.17 (2017-02-24)](#0617-2017-02-24) + - [Unclassified](#unclassified-89) +- [0.6.15 (2017-02-11)](#0615-2017-02-11) + - [Unclassified](#unclassified-90) +- [0.6.14 (2017-01-08)](#0614-2017-01-08) + - [Unclassified](#unclassified-91) +- [0.6.13 (2017-01-08)](#0613-2017-01-08) + - [Unclassified](#unclassified-92) +- [0.6.12 (2017-01-02)](#0612-2017-01-02) + - [Unclassified](#unclassified-93) +- [0.6.11 (2017-01-02)](#0611-2017-01-02) + - [Unclassified](#unclassified-94) +- [0.6.10 (2016-12-29)](#0610-2016-12-29) + - [Unclassified](#unclassified-95) +- [0.6.9 (2016-12-29)](#069-2016-12-29) + - [Documentation](#documentation-26) + - [Unclassified](#unclassified-96) +- [0.6.8 (2016-12-20)](#068-2016-12-20) + - [Unclassified](#unclassified-97) +- [0.6.7 (2016-12-06)](#067-2016-12-06) + - [Unclassified](#unclassified-98) +- [0.6.6 (2016-12-06)](#066-2016-12-06) + - [Unclassified](#unclassified-99) +- [0.6.5 (2016-12-04)](#065-2016-12-04) + - [Unclassified](#unclassified-100) +- [0.6.4 (2016-11-29)](#064-2016-11-29) + - [Unclassified](#unclassified-101) +- [0.6.2 (2016-11-25)](#062-2016-11-25) + - [Unclassified](#unclassified-102) +- [0.6.1 (2016-11-17)](#061-2016-11-17) + - [Unclassified](#unclassified-103) +- [0.6.0 (2016-11-17)](#060-2016-11-17) + - [Unclassified](#unclassified-104) +- [0.5.1 (2016-10-22)](#051-2016-10-22) + - [Unclassified](#unclassified-105) +- [0.5.0 (2016-10-17)](#050-2016-10-17) + - [Unclassified](#unclassified-106) +- [0.4.0 (2016-10-16)](#040-2016-10-16) + - [Documentation](#documentation-27) + - [Unclassified](#unclassified-107) +- [0.3.6 (2016-10-07)](#036-2016-10-07) + - [Unclassified](#unclassified-108) +- [0.3.5 (2016-10-06)](#035-2016-10-06) + - [Unclassified](#unclassified-109) +- [0.3.4 (2016-10-04)](#034-2016-10-04) + - [Unclassified](#unclassified-110) +- [0.3.3 (2016-10-03)](#033-2016-10-03) + - [Documentation](#documentation-28) + - [Unclassified](#unclassified-111) +- [0.3.2 (2016-09-22)](#032-2016-09-22) + - [Unclassified](#unclassified-112) +- [0.3.1 (2016-09-22)](#031-2016-09-22) + - [Unclassified](#unclassified-113) +- [0.3.0 (2016-08-22)](#030-2016-08-22) + - [Unclassified](#unclassified-114) +- [0.2.4 (2016-08-09)](#024-2016-08-09) + - [Unclassified](#unclassified-115) +- [0.2.3 (2016-08-08)](#023-2016-08-08) + - [Unclassified](#unclassified-116) +- [0.2.2 (2016-08-08)](#022-2016-08-08) + - [Unclassified](#unclassified-117) +- [0.2.1 (2016-08-08)](#021-2016-08-08) + - [Unclassified](#unclassified-118) +- [0.2.0 (2016-08-06)](#020-2016-08-06) + - [Unclassified](#unclassified-119) +- [0.1.0 (2016-08-01)](#010-2016-08-01) + - [Code Refactoring](#code-refactoring-7) + - [Documentation](#documentation-29) + - [Unclassified](#unclassified-120) + + + +# [0.0.0](https://github.com/ory/fosite/compare/v0.42.2...v0.0.0) (2022-09-22) + +## Breaking Changes + +Please be aware that several internal APIs have changed, as well as public methods. Most notably, we added the context to all `Write*` metods. + +```patch + type OAuth2Provider interface { +- WriteAuthorizeError(rw http.ResponseWriter, requester AuthorizeRequester, err error) ++ WriteAuthorizeError(ctx context.Context, rw http.ResponseWriter, requester AuthorizeRequester, err error) + +- WriteAuthorizeResponse(rw http.ResponseWriter, requester AuthorizeRequester, responder AuthorizeResponder) ++ WriteAuthorizeResponse(ctx context.Context, rw http.ResponseWriter, requester AuthorizeRequester, responder AuthorizeResponder) + +- WriteAccessError(rw http.ResponseWriter, requester AccessRequester, err error) ++ WriteAccessError(ctx context.Context, rw http.ResponseWriter, requester AccessRequester, err error) + +- WriteAccessResponse(rw http.ResponseWriter, requester AccessRequester, responder AccessResponder) ++ WriteAccessResponse(ctx context.Context, rw http.ResponseWriter, requester AccessRequester, responder AccessResponder) + +- WriteRevocationResponse(rw http.ResponseWriter, err error) ++ WriteRevocationResponse(ctx context.Context, rw http.ResponseWriter, err error) + +- WriteIntrospectionError(rw http.ResponseWriter, err error) ++ WriteIntrospectionError(ctx context.Context, rw http.ResponseWriter, err error) + +- WriteIntrospectionResponse(rw http.ResponseWriter, r IntrospectionResponder) ++ WriteIntrospectionResponse(ctx context.Context, rw http.ResponseWriter, r IntrospectionResponder) + } +``` + +The default config struct has moved from package `github.com/ory/fosite/compose.Config` to `github.com/ory/fosite.Config`. Struct `github.com/ory/fosite.Fosite` no longer has any configuration parameters +itself. + +Please note that the HMAC / global secret has to be set no longer in the compose call, but in the config initialization: + +```patch +-compose.ComposeAllEnabled(&compose.Config{}, store, secret, privateKey) ++compose.ComposeAllEnabled(&fosite.Config{GlobalSecret: secret}, store, privateKey) +``` + +Many internal interfaces have been changed, usually adding `ctx context.Context` as the first parameter. + +### Bug Fixes + +- Bump dependencies ([5dab818](https://github.com/ory/fosite/commit/5dab818f9707e364dcfe56bc6fd2245049417cc1)) +- Cves in deps ([f5782c3](https://github.com/ory/fosite/commit/f5782c33814ec738ea188b0ffac50ef45e7f3eb8)) +- Include `at_hash` claim in authcode flow's ID token ([#679](https://github.com/ory/fosite/issues/679)) ([c3b7bab](https://github.com/ory/fosite/commit/c3b7bab41db24b000f8e1416e1475e0aae4c310c)) +- Linting ([222ca97](https://github.com/ory/fosite/commit/222ca97805edfb52a655969841c2ac2958cc6d36)) +- **rfc7523:** Comment mentioned incorrect granttype ([#668](https://github.com/ory/fosite/issues/668)) ([b41f187](https://github.com/ory/fosite/commit/b41f187703bc1c8dc43ac0ec1ea23569779974bb)) +- State check for hybrid flow ([#670](https://github.com/ory/fosite/issues/670)) ([37f8a0a](https://github.com/ory/fosite/commit/37f8a0ac12e47893459528cabb38b9879600286d)) + +### Code Refactoring + +- **config:** Support hot reloading ([1661401](https://github.com/ory/fosite/commit/16614014a42b3905d065188c8f1f45433c4353f9)), closes [#666](https://github.com/ory/fosite/issues/666): + + This patch updates the config system to be replacable and uses functions instead of struct fields. This allows implementing hot reloading mechanisms easily. + +- Move to go 1.17 ([d9d0fed](https://github.com/ory/fosite/commit/d9d0fedaad87044e4d38ba82e01d9e430d09514c)) + +### Features + +- Add `ory_at|pt|ac` prefixes to HMAC tokens ([b652335](https://github.com/ory/fosite/commit/b652335c965d5cc523faebad9c9792c4135cfb75)): + + See https://github.com/ory/hydra/issues/2845 + +- Add json mappings to default session and its contents ([#688](https://github.com/ory/fosite/issues/688)) ([d8ecac4](https://github.com/ory/fosite/commit/d8ecac4077c446b71842372169abc37a02f9e1b7)) +- Add json mappings to generic session to match openid session ([#690](https://github.com/ory/fosite/issues/690)) ([2386b25](https://github.com/ory/fosite/commit/2386b259837ab89983f6d0ee37b147b36b171f5b)) +- Implement client token lifespan customization ([#684](https://github.com/ory/fosite/issues/684)) ([cfffe8c](https://github.com/ory/fosite/commit/cfffe8cec67a986e2abc736b940f9f0bab9ad7d9)): + + This change introduces the ability to control the lifespan of tokens for each valid combination of Client, GrantType, and TokenType. + +- Introduce cache strategy for JWKS fetcher ([452f377](https://github.com/ory/fosite/commit/452f37728890c68524b9aa190e1cdb279414f802)) +- Make http source contextualized ([9fc89e9](https://github.com/ory/fosite/commit/9fc89e9007c71354f7fe2d036ea6e175a2e5860b)) +- PAR implementation ([#660](https://github.com/ory/fosite/issues/660)) ([3de78db](https://github.com/ory/fosite/commit/3de78db805fe1c69b0fc5b853bfabeb19433feba)), closes [#628](https://github.com/ory/fosite/issues/628): + + Implements [RFC9126 - Pushed Authorization Request](https://www.rfc-editor.org/rfc/rfc9126.html). + +- Support variety of JWT formats when `jose.JSONWebKey` is used ([2590eb8](https://github.com/ory/fosite/commit/2590eb83d1e66df998053bc2fb7381b9043c232e)) + +### Tests + +- Fix assertions ([#683](https://github.com/ory/fosite/issues/683)) ([551b8b8](https://github.com/ory/fosite/commit/551b8b827cf0b7033aac80818a516ee3c5b8523e)) +- Fix panic ([fe60766](https://github.com/ory/fosite/commit/fe60766cdb1f0d22df7d9c4543b06cfd6dc7aea1)) + +### Unclassified + +- Revert "chore: delete .circleci folder (#699)" (#705) ([ef753d5](https://github.com/ory/fosite/commit/ef753d550d59b077f6ae349d6c795e2c142ec676)), closes [#699](https://github.com/ory/fosite/issues/699) [#705](https://github.com/ory/fosite/issues/705): + + This reverts commit 2eea63bddcbdf50771adf670391e495e339f619f since CircleCI is still used here. + +# [0.42.2](https://github.com/ory/fosite/compare/v0.42.1...v0.42.2) (2022-04-17) + +autogen(docs): regenerate and update changelog + +### Bug Fixes + +- Always rollback ([#638](https://github.com/ory/fosite/issues/638)) ([7edf673](https://github.com/ory/fosite/commit/7edf673f20aece260f9ba677a07086c48835fba8)), closes [#637](https://github.com/ory/fosite/issues/637) +- Empty client secret via basic auth header means "none" authn ([#655](https://github.com/ory/fosite/issues/655)) ([7a2d972](https://github.com/ory/fosite/commit/7a2d9721f4b6da0e3b2b829ec4312de1e3d66b6f)), closes [/github.com/golang/oauth2/blob/ee480838109b20d468babcb00b7027c82f962065/internal/token.go#L174-L176](https://github.com//github.com/golang/oauth2/blob/ee480838109b20d468babcb00b7027c82f962065/internal/token.go/issues/L174-L176): + + The existing client authentication code treats an empty client_secret + query parameter to be equivalent to "none" authentication instead of + "client_secret_post." + + This change updates the basic auth check to be consistent with this. + That is, an empty secret via the basic auth header is considered to + mean "none" instead of "client_secret_basic." + + The "golang.org/x/oauth2" library probes for both methods of + authentication, starting with the basic auth header approach first. + + As required, both client ID and secret are encoded in one header: + +- Handle invalid_token error for refresh_token is expired ([#664](https://github.com/ory/fosite/issues/664)) ([76bb274](https://github.com/ory/fosite/commit/76bb274e95585d4552789abbd1c1f123463ff47e)) +- Handle token_inactive error for multiple concurrent refresh requests ([#652](https://github.com/ory/fosite/issues/652)) ([7c8f4ae](https://github.com/ory/fosite/commit/7c8f4ae49550c61ff43d1a86adace4ed08c71e3e)): + + See https://github.com/ory/hydra/issues/3004 + +- Url-encode the fragment in the redirect URL of the authorize response ([#649](https://github.com/ory/fosite/issues/649)) ([beec138](https://github.com/ory/fosite/commit/beec13889c431ff06348c032dd260d00db253dd2)), closes [#648](https://github.com/ory/fosite/issues/648): + + This patch reverts the encoding logic for the fragment of the redirect URL returned as part of the authorize response to what was the one before version `0.36.0`. In that version, the code was refactored and the keys and values of the fragment ceased to be url-encoded. This in turn reflected on all Ory Hydra versions starting from `1.9.0` and provoked a breaking change that made the parsing of the fragment impossible if any of the params contain a character like `&` or `=` because they get treated as separators instead of as text + +- Use the correct algorithm for at_hash and c_hash ([#659](https://github.com/ory/fosite/issues/659)) ([8cb4b4b](https://github.com/ory/fosite/commit/8cb4b4b0c57be8944e403a0f3ec588b19f49f6f7)), closes [#630](https://github.com/ory/fosite/issues/630) + +### Code Generation + +- **docs:** Regenerate and update changelog ([5dbfa9a](https://github.com/ory/fosite/commit/5dbfa9a56d36061d5bf80149e1801c36a371bafd)) + +### Documentation + +- Add deprecation to communicate ropc discouragement ([#665](https://github.com/ory/fosite/issues/665)) ([df491be](https://github.com/ory/fosite/commit/df491beb5e82ca66bf5c5825c91ded0ca9d67b57)): + + This adds godoc deprecations to the compose.OAuth2ResourceOwnerPasswordCredentialsFactory and oauth2.ResourceOwnerPasswordCredentialsGrantHandler in order to clearly communicate the discouragement of the ROPC grant type to users implementing this library. + +### Features + +- Use custom hash.Hash in hmac.HMACStrategy ([#663](https://github.com/ory/fosite/issues/663)) ([d09a8c3](https://github.com/ory/fosite/commit/d09a8c39284fecce47933ff3b53d90d35b646b0c)), closes [#654](https://github.com/ory/fosite/issues/654) + +# [0.42.1](https://github.com/ory/fosite/compare/v0.42.0...v0.42.1) (2022-02-03) + +autogen(docs): regenerate and update changelog + +### Code Generation + +- **docs:** Regenerate and update changelog ([dcc6550](https://github.com/ory/fosite/commit/dcc6550b807980faca740b261790b3be339632c7)) + +### Features + +- Support FormPostHTMLTemplate config for fosite ([#647](https://github.com/ory/fosite/issues/647)) ([570ce3f](https://github.com/ory/fosite/commit/570ce3f6e3bf4e54781a6bfffc2ce777f0ac5194)), closes [#646](https://github.com/ory/fosite/issues/646) + +# [0.42.0](https://github.com/ory/fosite/compare/v0.41.0...v0.42.0) (2022-01-06) + +autogen(docs): regenerate and update changelog + +### Code Generation + +- **docs:** Regenerate and update changelog ([cf2c545](https://github.com/ory/fosite/commit/cf2c545540c12bfa5cfbf752bc84c03a8a515ecc)) + +### Features + +- Add new function to TokenRevocationStorage to support refresh token grace-period ([#635](https://github.com/ory/fosite/issues/635)) ([9b40d03](https://github.com/ory/fosite/commit/9b40d036e6494dfe9942b513b8bc4a50c7c9f730)) + +# [0.41.0](https://github.com/ory/fosite/compare/v0.40.2...v0.41.0) (2021-11-13) + +autogen(docs): regenerate and update changelog + +### Bug Fixes + +- Force HTTP GET for redirect responses ([#636](https://github.com/ory/fosite/issues/636)) ([f6c6523](https://github.com/ory/fosite/commit/f6c6523a09e7733d5ca263bccb7fd4fdb80172b2)) +- Include `typ` in jwt header ([#607](https://github.com/ory/fosite/issues/607)) ([7644a74](https://github.com/ory/fosite/commit/7644a74bd48accb46d8578f6846b3e509dfd4b03)), closes [#606](https://github.com/ory/fosite/issues/606) +- Make `amr` claim an array to match the OIDC spec ([#625](https://github.com/ory/fosite/issues/625)) ([8a6f66a](https://github.com/ory/fosite/commit/8a6f66ab5d9f74140f4ce94210f09ccb0e27f56d)) +- Resolve nancy warning ([b6cf0a6](https://github.com/ory/fosite/commit/b6cf0a641d1169595ceb3110f76be0788e778521)) + +### Code Generation + +- **docs:** Regenerate and update changelog ([1777ad5](https://github.com/ory/fosite/commit/1777ad52e68b20ce57ed7f2f7d085895c3c157c6)) + +### Code Refactoring + +- Upgrade go-jose to decode JSON numbers into int64 ([#603](https://github.com/ory/fosite/issues/603)) ([c02d327](https://github.com/ory/fosite/commit/c02d3273e30ca9b29285d1641b252e6c29598ea5)), closes [#602](https://github.com/ory/fosite/issues/602) + +### Documentation + +- Add missing word ([#626](https://github.com/ory/fosite/issues/626)) ([c7a553b](https://github.com/ory/fosite/commit/c7a553bb4945013be17d2bbd2ec126ae93113a72)) +- Document that DeleteOpenIDConnectSession is deprecated ([#634](https://github.com/ory/fosite/issues/634)) ([4e2c03d](https://github.com/ory/fosite/commit/4e2c03d3f6dcb3a3b50e7ea245128edde7ebf959)) + +### Features + +- Add client secret rotation support ([#608](https://github.com/ory/fosite/issues/608)) ([a4ce354](https://github.com/ory/fosite/commit/a4ce3544c2996a99b65350d4b200967df9fc0d45)), closes [#590](https://github.com/ory/fosite/issues/590) +- Add prettier and format ([d682bdf](https://github.com/ory/fosite/commit/d682bdf51c22c211ee1aceb06fb7c4a7e43db326)) +- Add ResponseModeHandler to support custom response modes ([#592](https://github.com/ory/fosite/issues/592)) ([10ec003](https://github.com/ory/fosite/commit/10ec003fb414fd3fcbd3e2e6d250cb2da51a0304)), closes [#591](https://github.com/ory/fosite/issues/591) +- I18n support added ([#627](https://github.com/ory/fosite/issues/627)) ([cf02af9](https://github.com/ory/fosite/commit/cf02af977681fd667b33f8e131891f6746d0b9da)), closes [#615](https://github.com/ory/fosite/issues/615) +- Support jose.opaquesigner for JWTs ([#611](https://github.com/ory/fosite/issues/611)) ([1121a0a](https://github.com/ory/fosite/commit/1121a0aa4155e9216abb989ab008df8cff67830d)) +- Use bitwise comparison for jwt validation errors ([#633](https://github.com/ory/fosite/issues/633)) ([52ee93f](https://github.com/ory/fosite/commit/52ee93fe976152457482870b4ebb487560ca93e0)) + +# [0.40.2](https://github.com/ory/fosite/compare/v0.40.1...v0.40.2) (2021-05-28) + +feat: use int64 type for claims with timestamps (#600) + +Co-authored-by: Nestor + +### Features + +- Use int64 type for claims with timestamps ([#600](https://github.com/ory/fosite/issues/600)) ([c370994](https://github.com/ory/fosite/commit/c370994c007be101a388f825f1a4d6b38393756e)) + +# [0.40.1](https://github.com/ory/fosite/compare/v0.40.0...v0.40.1) (2021-05-23) + +fix: revert float64 auth_time claim (#599) + +Closes #598 + +### Bug Fixes + +- Revert float64 auth_time claim ([#599](https://github.com/ory/fosite/issues/599)) ([e609d91](https://github.com/ory/fosite/commit/e609d9196070050adf39b9bdb3cbfbba2edda0d5)), closes [#598](https://github.com/ory/fosite/issues/598) + +# [0.40.0](https://github.com/ory/fosite/compare/v0.39.0...v0.40.0) (2021-05-21) + +feat: transit from jwt-go to go-jose (#593) + +Closes #514 + +Co-authored-by: hackerman <3372410+aeneasr@users.noreply.github.com> + +### Bug Fixes + +- 582memory store authentication error code ([#583](https://github.com/ory/fosite/issues/583)) ([51b4424](https://github.com/ory/fosite/commit/51b44248275128ca83e1899522f2cd412e5c466e)) +- Do not include nonce in ID tokens when not used ([#570](https://github.com/ory/fosite/issues/570)) ([795dee2](https://github.com/ory/fosite/commit/795dee246f26c1fef16dcd52da37e3df75e73772)) +- Sha alg name in error message and go doc ([#571](https://github.com/ory/fosite/issues/571)) ([0f2e289](https://github.com/ory/fosite/commit/0f2e289973ad22d14c5d5bedd4fc9bb886134354)) +- Upgrade gogo protubuf ([#573](https://github.com/ory/fosite/issues/573)) ([9a9467a](https://github.com/ory/fosite/commit/9a9467a20391059534df859b2b295711918bfd08)) + +### Code Refactoring + +- Generate claims in the same way ([#595](https://github.com/ory/fosite/issues/595)) ([4c7b13f](https://github.com/ory/fosite/commit/4c7b13f2f1234128c53e8fc3e6cc3981e10d3069)) + +### Documentation + +- Add client credentials grant how-to ([#589](https://github.com/ory/fosite/issues/589)) ([893aae4](https://github.com/ory/fosite/commit/893aae4348cfef78cb3d7f9aa70568e2137b4b3f)), closes [#566](https://github.com/ory/fosite/issues/566) + +### Features + +- Allow extra fields in introspect response ([#579](https://github.com/ory/fosite/issues/579)) ([294a0bf](https://github.com/ory/fosite/commit/294a0bf7f4cb01739a560480364403118d1408bf)), closes [#441](https://github.com/ory/fosite/issues/441) +- Allow omitting scope in authorization redirect uri ([#588](https://github.com/ory/fosite/issues/588)) ([6ad9264](https://github.com/ory/fosite/commit/6ad92642f0f01ff4d3662f3680a825db22594366)) +- Pass requests through context ([#596](https://github.com/ory/fosite/issues/596)) ([2f96bb8](https://github.com/ory/fosite/commit/2f96bb8a2623fe7b4abb31db870582b555df6db8)), closes [#537](https://github.com/ory/fosite/issues/537) +- Transit from jwt-go to go-jose ([#593](https://github.com/ory/fosite/issues/593)) ([d022bbc](https://github.com/ory/fosite/commit/d022bbc2b45fd603cb12575e28bbe884170bf788)), closes [#514](https://github.com/ory/fosite/issues/514) + +### Tests + +- Change sha algorithm name acc to standard naming ([#572](https://github.com/ory/fosite/issues/572)) ([a3594a3](https://github.com/ory/fosite/commit/a3594a3cb0eb70e912a7268d2d396d19a45116c6)) + +# [0.39.0](https://github.com/ory/fosite/compare/v0.38.0...v0.39.0) (2021-03-08) + +feat: token reuse detection (#567) + +See https://github.com/ory/hydra/issues/2022 + +### Features + +- Token reuse detection ([#567](https://github.com/ory/fosite/issues/567)) ([db7f981](https://github.com/ory/fosite/commit/db7f9817ee19878c4bf650e97b49be7e3b268ee0)): + + See https://github.com/ory/hydra/issues/2022 + +# [0.38.0](https://github.com/ory/fosite/compare/v0.37.0...v0.38.0) (2021-02-23) + +feat: add ClientAuthenticationStrategy extension point (#565) + +Closes #564 + +## Breaking Changes + +Replaces `token_expired` error ID with `invalid_token` which is the correct value according to https://tools.ietf.org/html/rfc6750#section-3.1 + +### Bug Fixes + +- Use correct error code for expired token ([#562](https://github.com/ory/fosite/issues/562)) ([56a71e5](https://github.com/ory/fosite/commit/56a71e5f9797abe35a9566c86f9ce9c1f485c11a)) + +### Features + +- Add ClientAuthenticationStrategy extension point ([#565](https://github.com/ory/fosite/issues/565)) ([ec0bec2](https://github.com/ory/fosite/commit/ec0bec2d8462bae2dc545defbd21190dfe832024)), closes [#564](https://github.com/ory/fosite/issues/564) + +# [0.37.0](https://github.com/ory/fosite/compare/v0.36.1...v0.37.0) (2021-02-05) + +feat: add support for urn:ietf:params:oauth:grant-type:jwt-bearer grant type RFC 7523 (#560) + +Closes #546 +Closes #305 + +Co-authored-by: Vladimir Kalugin +Co-authored-by: i.seliverstov + +### Bug Fixes + +- Resolve regression ([#561](https://github.com/ory/fosite/issues/561)) ([173d60e](https://github.com/ory/fosite/commit/173d60e5324c19c2323d2b8a731e201bf26845ce)) + +### Features + +- Add support for urn:ietf:params:oauth:grant-type:jwt-bearer grant type RFC 7523 ([#560](https://github.com/ory/fosite/issues/560)) ([9720241](https://github.com/ory/fosite/commit/9720241c57e2154ed9fdb44fcf25e8c6b50410ee)), closes [#546](https://github.com/ory/fosite/issues/546) [#305](https://github.com/ory/fosite/issues/305) + +# [0.36.1](https://github.com/ory/fosite/compare/v0.36.0...v0.36.1) (2021-01-11) + +chore: bump deps + +### Bug Fixes + +- Broken dependency to reflection package ([#555](https://github.com/ory/fosite/issues/555)) ([a103222](https://github.com/ory/fosite/commit/a1032221363726bdcdc2f9b1c1898f99c62e8932)) + +### Chores + +- Bump deps ([c2375de](https://github.com/ory/fosite/commit/c2375de6ff3229493b6a6ad628bf4e4961c8d989)) + +### Code Refactoring + +- Use constructor ([#535](https://github.com/ory/fosite/issues/535)) ([2da54e3](https://github.com/ory/fosite/commit/2da54e3620a467e20d67ae05d0d3885a2383e4d4)) +- Use provided context ([#536](https://github.com/ory/fosite/issues/536)) ([35d4f13](https://github.com/ory/fosite/commit/35d4f133faa87076c7eb1c5e8384f3653643de9e)) + +# [0.36.0](https://github.com/ory/fosite/compare/v0.35.1...v0.36.0) (2020-11-16) + +fix: be more permissive in time checks + +Time equality should not cause failures in OpenID Connect validation. + +## Breaking Changes + +This patch removes fields `error_hint`, `error_debug` from error responses. To use the legacy error format where these fields are included, set `UseLegacyErrorFormat` to true in your compose config or directly on the `Fosite` struct. If `UseLegacyErrorFormat` is set, the `error_description` no longer merges `error_hint` nor `error_debug` messages which reverts a change introduced in `v0.33.0`. Instead, `error_hint` and `error_debug` are included and the merged message can be constructed from those fields. + +As part of this change, the error interface and its fields have changed: + +- `RFC6749Error.Name` was renamed to `RFC6749Error.ErrorField`. +- `RFC6749Error.Description` was renamed to `RFC6749Error.DescriptionField`. +- `RFC6749Error.Hint` was renamed to `RFC6749Error.HintField`. +- `RFC6749Error.Code` was renamed to `RFC6749Error.CodeField`. +- `RFC6749Error.Hint` was renamed to `RFC6749Error.HintField`. +- `RFC6749Error.WithCause()` was renamed to `RFC6749Error.WithWrap() *RFC6749Error` and alternatively to `RFC6749Error.Wrap()` (without return value) to standardize naming conventions around the new Go 1.14+ error interfaces. + +As part of this change, methods `GetResponseMode`, `SetDefaultResponseMode`, `GetDefaultResponseMode ` where added to interface `AuthorizeRequester`. Also, methods `GetQuery`, `AddQuery`, and `GetFragment` were merged into one function `GetParameters` and `AddParameter` on the `AuthorizeResponder` interface. Methods on `AuthorizeRequest` and `AuthorizeResponse` changed accordingly and will need to be updated in your codebase. Additionally, the field `Debug` was renamed to `DebugField` and a new method `Debug() string` was added to `RFC6749Error`. + +Co-authored-by: hackerman <3372410+aeneasr@users.noreply.github.com> + +### Bug Fixes + +- Allow all request object algs when client value is unset ([1d14636](https://github.com/ory/fosite/commit/1d14636e61b2047e5eee6d1d740249b819fc0794)): + + Allows all request object signing algorithms when the client has not explicitly allowed a certain algorithm. This follows the spec: + + > \*request_object_signing_alg - OPTIONAL. JWS [JWS] alg algorithm [JWA] that MUST be used for signing Request Objects sent to the OP. All Request Objects from this Client MUST be rejected, if not signed with this algorithm. Request Objects are described in Section 6.1 of OpenID Connect Core 1.0 [OpenID.Core]. This algorithm MUST be used both when the Request Object is passed by value (using the request parameter) and when it is passed by reference (using the request_uri parameter). Servers SHOULD support RS256. The value none MAY be used. The default, if omitted, is that any algorithm supported by the OP and the RP MAY be used. + +- Always return non-error response for inactive tokens ([#517](https://github.com/ory/fosite/issues/517)) ([5f2cae3](https://github.com/ory/fosite/commit/5f2cae3eabb83da898e1b5515176e65dda4da862)) +- Be more permissive in time checks ([839d000](https://github.com/ory/fosite/commit/839d00093a2ed8c590d910f113186cd96fad9185)): + + Time equality should not cause failures in OpenID Connect validation. + +- Do not accidentally leak jwks fetching errors ([6d2092d](https://github.com/ory/fosite/commit/6d2092da1e8699e43fd6dccb4c3a33b885cec7f8)), closes [/github.com/ory/fosite/pull/526#discussion_r517491738](https://github.com//github.com/ory/fosite/pull/526/issues/discussion_r517491738) +- Do not require nonce for hybrid flows ([de5c8f9](https://github.com/ory/fosite/commit/de5c8f90e8ccae0849fa6426d53563ef7520880d)): + + This patch resolves an issue where nonce was required for hybrid flows, which does not comply with the OpenID Connect conformity test suite, specifically the `oidcc-ensure-request-without-nonce-succeeds-for-code-flow` test. + +- Guess default response mode in `NewAuthorizeRequest` ([a2952d7](https://github.com/ory/fosite/commit/a2952d7ad09fbd83a354b22dbcc0cef8a15f50f7)) +- Improve claims handling for jwts ([a72ca9a](https://github.com/ory/fosite/commit/a72ca9a978e60d7c4b000c41357719f0e2b61f8e)) +- Improve error stack wrapping ([620d4c1](https://github.com/ory/fosite/commit/620d4c148307f7be7b2674fe420141b33aef6075)) +- Kid header is not required for key lookup ([27cc5c0](https://github.com/ory/fosite/commit/27cc5c0e935ecb8bca23dd8c2670c8a93f7b829d)) +- Modernized JWT stateless introspection ([#519](https://github.com/ory/fosite/issues/519)) ([a6bfb92](https://github.com/ory/fosite/commit/a6bfb921ebc746ba7a1215e32fb42a2c0530a2bf)) +- Only use allowed characters in error_description ([431f9a5](https://github.com/ory/fosite/commit/431f9a56ed03648ea4ef637fe6c2b6d74e765dad)), closes [#525](https://github.com/ory/fosite/issues/525): + + Replace LF and quotes with `.` and `'` to match allowed and recommended character set defined in various RFCs. + +- Prevent debug details from leaking during key lookup ([c0598fb](https://github.com/ory/fosite/commit/c0598fb8d8ce75b7f0ad645420caea641e64a4d2)), closes [/github.com/ory/fosite/pull/526#discussion_r517490461](https://github.com//github.com/ory/fosite/pull/526/issues/discussion_r517490461) +- Reset jti and hash ID token claims on refresh ([#523](https://github.com/ory/fosite/issues/523)) ([ce2de73](https://github.com/ory/fosite/commit/ce2de73ff979b02be32d850c1c695067a35576c7)) +- Use state from request object ([8cac1a0](https://github.com/ory/fosite/commit/8cac1a00a6f87523b88fea6962ab1194049cbacd)): + + Resolves failing OIDC conformity test "oidcc-request-uri-unsigned". + +### Code Refactoring + +- Use rfc compliant error formating ([edbbda3](https://github.com/ory/fosite/commit/edbbda3c4cf70a77cdcd1383c55762c73613f87e)) + +### Documentation + +- Document Session interface methods ([#512](https://github.com/ory/fosite/issues/512)) ([11a95ba](https://github.com/ory/fosite/commit/11a95ba00f562b3864fc0d6878c9d93943cc4273)) +- Updates banner in readme.md ([#529](https://github.com/ory/fosite/issues/529)) ([9718eb6](https://github.com/ory/fosite/commit/9718eb6ce63983ade0689908b5cce3e27c8838bc)) + +### Features + +- Add support for response_mode=form_post ([#509](https://github.com/ory/fosite/issues/509)) ([3e3290f](https://github.com/ory/fosite/commit/3e3290f811f849881f1c6bafabc1c765d9a42ac7)): + + This patch introduces support for `response_mode=form_post` as well as `response_mode` of `none` and `query` and `fragment`. + + To support this new feature your OAuth2 Client must implement the `fosite.ResponseModeClient` interface. We suggest to always return all response modes there unless you want to explicitly disable one of the response modes: + + ```go + func (c *Client) GetResponseModes() []fosite.ResponseModeType { + return []fosite.ResponseModeType{ + fosite.ResponseModeDefault, + fosite.ResponseModeFormPost, + fosite.ResponseModeQuery, + fosite.ResponseModeFragment, + } + } + ``` + +- Improve error messages ([#513](https://github.com/ory/fosite/issues/513)) ([fcac5a6](https://github.com/ory/fosite/commit/fcac5a6457c92d1eb1a389192cd0c7fb590ab8b3)) +- Introduce WithExposeDebug to error interface ([625a521](https://github.com/ory/fosite/commit/625a5214c4a002b4d0f86e49555edf8755703968)) +- Support passing repeated audience parameter in URL query ([#518](https://github.com/ory/fosite/issues/518)) ([47f2a31](https://github.com/ory/fosite/commit/47f2a31fbed137b58e4866f78ec8b9f591134f98)), closes [#504](https://github.com/ory/fosite/issues/504): + + Added `GetAudiences` helper function which tries to have current behavior and also support multiple/repeated audience parameters. If there are parameter is repeated, then it is not split by space. If there is only one then it is split by space. I think this is the best balance between standard/backwards behavior and allowing repeated parameter and allowing also URIs/audiences with spaces in them (which we probably all agree is probably not something anyone should be doing). + + Also added `ExactAudienceMatchingStrategy` which is slightly more suitable to use for audiences which are not URIs. In [OIDC spec](https://openid.net/specs/openid-connect-core-1_0.html) audience is described as: + + > Audience(s) that this ID Token is intended for. It MUST contain the OAuth 2.0 client_id of the Relying Party as an audience value. It MAY also contain identifiers for other audiences. In the general case, the aud value is an array of case sensitive strings. In the common special case when there is one audience, the aud value MAY be a single case sensitive string. + + `client_id` is generally not an URI, but some UUID or some other random string. + +# [0.35.1](https://github.com/ory/fosite/compare/v0.35.0...v0.35.1) (2020-10-11) + +autogen(docs): regenerate and update changelog + +### Bug Fixes + +- Uniform audience parsing ([#505](https://github.com/ory/fosite/issues/505)) ([e3f331d](https://github.com/ory/fosite/commit/e3f331d0d8e4470eef3dd7ecb46e66eeebfbe4c7)) + +### Code Generation + +- **docs:** Regenerate and update changelog ([c598cc7](https://github.com/ory/fosite/commit/c598cc7fae17e70db2bad555cff94e97b2ca185b)) + +### Documentation + +- Improved test descriptions ([#507](https://github.com/ory/fosite/issues/507)) ([29e9336](https://github.com/ory/fosite/commit/29e9336be5673530ae00e735c3dc7d191f4b03a6)) + +### Features + +- Allow configuring redirect secure checker everywhere ([#489](https://github.com/ory/fosite/issues/489)) ([e87d091](https://github.com/ory/fosite/commit/e87d0910f3ee960dbc7b1bc0fef124c9b928a55c)) +- Scope can now be space delimited in access tokens ([#482](https://github.com/ory/fosite/issues/482)) ([8225935](https://github.com/ory/fosite/commit/8225935276d40a24da400d46ee7e7b63976488a1)), closes [#362](https://github.com/ory/fosite/issues/362) + +# [0.35.0](https://github.com/ory/fosite/compare/v0.34.1...v0.35.0) (2020-10-06) + +autogen(docs): regenerate and update changelog + +## Breaking Changes + +Type `fosite.TokenType` has been renamed to `fosite.TokenUse`. + +### Bug Fixes + +- Redirct_url with query escape character outside of query is failing ([#480](https://github.com/ory/fosite/issues/480)) ([6e49c57](https://github.com/ory/fosite/commit/6e49c57c8f7a46a78eda4d3091765d631f427845)): + + See https://github.com/ory/hydra/issues/2055 + + Co-authored-by: ajanthan + +- Rename TokenType to TokenUse in introspection ([#486](https://github.com/ory/fosite/issues/486)) ([4b81316](https://github.com/ory/fosite/commit/4b81316a1dbb0c5246bac39ecbaff749b00e4efa)), closes [ory/hydra#1762](https://github.com/ory/hydra/issues/1762) +- Return allowed redirect url with preference ([f0badc4](https://github.com/ory/fosite/commit/f0badc4919e00fa179dd54edcbd7385fac14fa19)) + +### Code Generation + +- **docs:** Regenerate and update changelog ([3f0bc87](https://github.com/ory/fosite/commit/3f0bc875af230342d161de8516b7c0050f89d648)) + +# [0.34.1](https://github.com/ory/fosite/compare/v0.34.0...v0.34.1) (2020-10-02) + +fix: make redirect URL checking more strict + +The OAuth 2.0 Client's Redirect URL and the Redirect URL used in the OAuth 2.0 flow do not check if the query string is equal: + +1. Registering a client with allowed redirect URL `https://example.com/callback` +2. Performing OAuth2 flow and requesting redirect URL `https://example.com/callback?bar=foo` +3. Instead of an error, the browser is redirected to `https://example.com/callback?bar=foo` with a potentially successful OAuth2 response. + +Additionally, matching Redirect URLs used `strings.ToLower` normalization: + +1. Registering a client with allowed redirect URL `https://example.com/callback` +2. Performing OAuth2 flow and requesting redirect URL `https://example.com/CALLBACK` +3. Instead of an error, the browser is redirected to `https://example.com/CALLBACK ` with a potentially successful OAuth2 response. + +This patch addresses all of these issues and adds regression tests to keep the implementation secure in future releases. + +### Bug Fixes + +- Make redirect URL checking more strict ([cdee51e](https://github.com/ory/fosite/commit/cdee51ebe721bfc8acca0fd0b86b030ca70867bf)): + + The OAuth 2.0 Client's Redirect URL and the Redirect URL used in the OAuth 2.0 flow do not check if the query string is equal: + + 1. Registering a client with allowed redirect URL `https://example.com/callback` + 2. Performing OAuth2 flow and requesting redirect URL `https://example.com/callback?bar=foo` + 3. Instead of an error, the browser is redirected to `https://example.com/callback?bar=foo` with a potentially successful OAuth2 response. + + Additionally, matching Redirect URLs used `strings.ToLower` normalization: + + 1. Registering a client with allowed redirect URL `https://example.com/callback` + 2. Performing OAuth2 flow and requesting redirect URL `https://example.com/CALLBACK` + 3. Instead of an error, the browser is redirected to `https://example.com/CALLBACK ` with a potentially successful OAuth2 response. + + This patch addresses all of these issues and adds regression tests to keep the implementation secure in future releases. + +### Documentation + +- Added missing dot ([#487](https://github.com/ory/fosite/issues/487)) ([a822244](https://github.com/ory/fosite/commit/a82224430292b2f209d011f107998273d568912b)) + +# [0.34.0](https://github.com/ory/fosite/compare/v0.33.0...v0.34.0) (2020-09-24) + +chore: fix unused const linter error (#484) + +## Breaking Changes + +`fosite.ErrRevocationClientMismatch` was removed because it is not part of [RFC 6749](https://tools.ietf.org/html/rfc6749#section-5.2). Instead, `fosite.ErrUnauthorizedClient` will be returned when calling `RevokeToken` with an OAuth2 Client which does not match the Access or Refresh Token to be revoked. + +### Bug Fixes + +- Full JSON escaping ([#481](https://github.com/ory/fosite/issues/481)) ([0943a10](https://github.com/ory/fosite/commit/0943a1095a209fdfb2f8a29524b676ee9c9650a1)) +- Ignore x/net false positives ([#483](https://github.com/ory/fosite/issues/483)) ([aead149](https://github.com/ory/fosite/commit/aead1499deb8b08f48bcc196a88e5715702b5431)) + +### Chores + +- Fix unused const linter error ([#484](https://github.com/ory/fosite/issues/484)) ([3540462](https://github.com/ory/fosite/commit/354046265cd4ffcbff8465e4b7a7ea7b6741c5e4)) + +### Features + +- Errors now wrap underlying errors ([#479](https://github.com/ory/fosite/issues/479)) ([b53f8f5](https://github.com/ory/fosite/commit/b53f8f58f0b9889d044cf9a8e2604316f0559ff6)), closes [#458](https://github.com/ory/fosite/issues/458) + +### Unclassified + +- Merge pull request from GHSA-7mqr-2v3q-v2wm ([03dd558](https://github.com/ory/fosite/commit/03dd55813f5521985f7dd64277b7ba0cf1441319)) + +# [0.33.0](https://github.com/ory/fosite/compare/v0.32.4...v0.33.0) (2020-09-16) + +feat: error_hint and error_debug are now exposed through error_description (#460) + +BREAKING CHANGE: Merges the error description with error hint and error debug, making it easier to consume error messages in standardized OAuth2 clients. + +## Breaking Changes + +Merges the error description with error hint and error debug, making it easier to consume error messages in standardized OAuth2 clients. + +### Features + +- Error_hint and error_debug are now exposed through error_description ([#460](https://github.com/ory/fosite/issues/460)) ([8daab21](https://github.com/ory/fosite/commit/8daab21f97c513101d224a7ad7a44b871440be57)) + +# [0.32.4](https://github.com/ory/fosite/compare/v0.32.3...v0.32.4) (2020-09-15) + +autogen(docs): regenerate and update changelog + +### Code Generation + +- **docs:** Regenerate and update changelog ([1f16df0](https://github.com/ory/fosite/commit/1f16df0862bbcdfba98644d1c8fce8a9f92bbbec)) + +### Code Refactoring + +- Fix inconsistent spelling of revocation ([#477](https://github.com/ory/fosite/issues/477)) ([7a55edb](https://github.com/ory/fosite/commit/7a55edbb67738a721c5f1a8f58d2db67f6738f65)) + +### Documentation + +- Fix minor typos ([#475](https://github.com/ory/fosite/issues/475)) ([23cc9c1](https://github.com/ory/fosite/commit/23cc9c1d29f35a73acbf05fe6b505b692f6fe49c)) + +# [0.32.3](https://github.com/ory/fosite/compare/v0.32.2...v0.32.3) (2020-09-12) + +fix: add missing OAuth2TokenRevocationFactory to ComposeAllEnabled (#472) + +### Bug Fixes + +- Add missing OAuth2TokenRevocationFactory to ComposeAllEnabled ([#472](https://github.com/ory/fosite/issues/472)) ([88587fd](https://github.com/ory/fosite/commit/88587fde8fc92137660383c401250e492716c396)) +- Align error returned when a grant_type was requested that's not allowed for a client ([#467](https://github.com/ory/fosite/issues/467)) ([3c30c0d](https://github.com/ory/fosite/commit/3c30c0d9f1e62b237acc845d5b3a42d1ea9a80c0)), closes [/tools.ietf.org/html/rfc6749#section-5](https://github.com//tools.ietf.org/html/rfc6749/issues/section-5): + + Returned error was 'invalid_grant'. + +- All responses now contain headers to not cache them ([#465](https://github.com/ory/fosite/issues/465)) ([2012cb7](https://github.com/ory/fosite/commit/2012cb7ec6feb504d1faa6e393fce8d25edafebb)) +- No cache headers followup ([#466](https://github.com/ory/fosite/issues/466)) ([1627c6a](https://github.com/ory/fosite/commit/1627c6ab31cb151f01671cd3403bc3c7de6fcfbd)) + +### Code Refactoring + +- Copy all values when sanitizing ([#455](https://github.com/ory/fosite/issues/455)) ([c80d0d4](https://github.com/ory/fosite/commit/c80d0d42a34f8cf664d44c687d7cfea576a0b232)) + +### Documentation + +- Add empty session example explanation ([#450](https://github.com/ory/fosite/issues/450)) ([36d65cb](https://github.com/ory/fosite/commit/36d65cbc061ff6cae38e90b0a6954646c8daf5d7)) +- Better section reference for GetRedirectURIFromRequestValues ([#463](https://github.com/ory/fosite/issues/463)) ([48a3daf](https://github.com/ory/fosite/commit/48a3daf45bd1885c4412eeb9b2bc3117b6075de9)) +- Deprecate history.md ([b0d5fea](https://github.com/ory/fosite/commit/b0d5feacfcbeedf609563fa8567bd0e031b179b5)), closes [/github.com/ory/fosite/issues/414#issuecomment-662538622](https://github.com//github.com/ory/fosite/issues/414/issues/issuecomment-662538622) + +### Features + +- Add locking to memory storage ([#471](https://github.com/ory/fosite/issues/471)) ([4687147](https://github.com/ory/fosite/commit/46871476b1f47cefc09888615f70dd9fdd5af8b3)) +- Make MinParameterEntropy configurable ([#461](https://github.com/ory/fosite/issues/461)) ([2c793e6](https://github.com/ory/fosite/commit/2c793e6c010ac6cbc552200197ae1262d91c2bda)), closes [#267](https://github.com/ory/fosite/issues/267) +- New compose strategies for ES256 ([#446](https://github.com/ory/fosite/issues/446)) ([39053ee](https://github.com/ory/fosite/commit/39053eedaa687fe1d8dbe8b928fb98cd5ce8c021)) + +# [0.32.2](https://github.com/ory/fosite/compare/v0.32.1...v0.32.2) (2020-06-22) + +feat: new factory with default issuer for JWT tokens (#444) + +### Features + +- New factory with default issuer for JWT tokens ([#444](https://github.com/ory/fosite/issues/444)) ([901e206](https://github.com/ory/fosite/commit/901e206d03b615c189e12f94607d92c10d6909fa)) + +# [0.32.1](https://github.com/ory/fosite/compare/v0.32.0...v0.32.1) (2020-06-05) + +feat: makeRemoveEmpty public (#443) + +### Bug Fixes + +- Improved error messages in client authentication ([#440](https://github.com/ory/fosite/issues/440)) ([c06e560](https://github.com/ory/fosite/commit/c06e5608c7ae6a0243428252e6ec80bc37ae33ca)), closes [#436](https://github.com/ory/fosite/issues/436) + +### Features + +- MakeRemoveEmpty public ([#443](https://github.com/ory/fosite/issues/443)) ([17b0756](https://github.com/ory/fosite/commit/17b075688f9a012b09e650e90d765de6d4d538cf)) + +# [0.32.0](https://github.com/ory/fosite/compare/v0.31.3...v0.32.0) (2020-05-28) + +feat: added support for ES256 token strategy and client authentication (#439) + +I added to `DefaultOpenIDConnectClient` a field `TokenEndpointAuthSigningAlgorithm` to be able to configure what `GetTokenEndpointAuthSigningAlgorithm` returns. I also cleaned some other places where there were assumptions about only RSA keys. + +Closes #429 + +### Bug Fixes + +- **arguments:** Fixes a logic bug in MatchesExact and adds documentation ([#433](https://github.com/ory/fosite/issues/433)) ([10fd67b](https://github.com/ory/fosite/commit/10fd67bf84118affc9269ca0c0dbc8da4b0bf2cd)): + +- Double-decoding of client credentials in request body ([#434](https://github.com/ory/fosite/issues/434)) ([48c9b41](https://github.com/ory/fosite/commit/48c9b41ea2dc89ec2bf58ba918c45c8430bb0ccd)): + + I noticed that client credentials are URL-decoded after being extracted from the POST body form, which was already URL-decoded by Go. The accompanying error message suggests this was copied and pasted from the HTTP basic authorization header handling, which is the only place where the extra URL-decoding was needed (as per the OAuth 2.0 spec). The result is that client credentials containing %-prefixed sequences, whether valid sequences or not, are going to fail validation. + + Remove the extra URL decoding. Add tests that ensure client credentials work with special characters in both the HTTP basic auth header and in the request body. + +### Documentation + +- Update github templates ([#432](https://github.com/ory/fosite/issues/432)) ([b393832](https://github.com/ory/fosite/commit/b393832765e0c97661bb5495e3a3d51a8019afd7)) +- Update repository templates ([a840a62](https://github.com/ory/fosite/commit/a840a62e401b4111f8304fa8b963006a866a20f8)) + +### Features + +- Added support for ES256 token strategy and client authentication ([#439](https://github.com/ory/fosite/issues/439)) ([36eb661](https://github.com/ory/fosite/commit/36eb661cc8b609877d8e81c849c34631bbab245a)), closes [#429](https://github.com/ory/fosite/issues/429): + + I added to `DefaultOpenIDConnectClient` a field `TokenEndpointAuthSigningAlgorithm` to be able to configure what `GetTokenEndpointAuthSigningAlgorithm` returns. I also cleaned some other places where there were assumptions about only RSA keys. + +# [0.31.3](https://github.com/ory/fosite/compare/v0.31.2...v0.31.3) (2020-05-09) + +feat(pkce): add EnforcePKCEForPublicClients config flag (#431) + +Alternative proposal for the issue discussed in #389 and #391, where enforcement of PKCE is wanted only for certain clients. + +Add a new flag EnforcePKCEForPublicClients which enforces PKCE only for public clients. The error hint is slightly different, as it mentions PKCE is enforced for "this client" rather than "clients". (It intentionally does not mention why it's enforced, as I think basing it on public clients is an implementation detail that servers may want to change without adding to the error hints). + +Closes #389 +Closes #391 + +### Bug Fixes + +- Do not issue refresh tokens to clients who cannot use it ([#430](https://github.com/ory/fosite/issues/430)) ([792670d](https://github.com/ory/fosite/commit/792670d0e81ff83f2b345502ea7adadf99bcaa9b)), closes [#370](https://github.com/ory/fosite/issues/370) + +### Features + +- **pkce:** Add EnforcePKCEForPublicClients config flag ([#431](https://github.com/ory/fosite/issues/431)) ([9f53c84](https://github.com/ory/fosite/commit/9f53c843e4a72d0ff34acb084e5a920d7114278f)), closes [#389](https://github.com/ory/fosite/issues/389) [#391](https://github.com/ory/fosite/issues/391) [#389](https://github.com/ory/fosite/issues/389) [#391](https://github.com/ory/fosite/issues/391) + +# [0.31.2](https://github.com/ory/fosite/compare/v0.31.1...v0.31.2) (2020-04-16) + +fix: introduce better linting pipeline and resolve Go issues (#428) + +### Bug Fixes + +- Introduce better linting pipeline and resolve Go issues ([#428](https://github.com/ory/fosite/issues/428)) ([e02f731](https://github.com/ory/fosite/commit/e02f731a41fb82ac8d6b62ea3f6fd8a915526090)) + +# [0.31.1](https://github.com/ory/fosite/compare/v0.31.0...v0.31.1) (2020-04-16) + +fix: return invalid_grant instead of invalid_request in refresh flow (#427) + +Return invalid_grant instead of invalid_request when in authorization code flow when the user is not the owner of the authorization code or if the redirect uri doesn't match from the authorization request. + +Co-authored-by: Damien Bravin + +### Bug Fixes + +- List all response types in example memory store ([#413](https://github.com/ory/fosite/issues/413)) ([427d40d](https://github.com/ory/fosite/commit/427d40dcaadab6933a4e571def7d9729fd442581)), closes [#304](https://github.com/ory/fosite/issues/304) +- Return invalid_grant instead of invalid_request in refresh flow ([#427](https://github.com/ory/fosite/issues/427)) ([f5a0e96](https://github.com/ory/fosite/commit/f5a0e9696750e3f1d67bd919a6588b175e7cc2bb)): + + Return invalid_grant instead of invalid_request when in authorization code flow when the user is not the owner of the authorization code or if the redirect uri doesn't match from the authorization request. + +- **storage:** Remove unused field ([#422](https://github.com/ory/fosite/issues/422)) ([d2eb3b9](https://github.com/ory/fosite/commit/d2eb3b9ff5f52810067ac59969a3c4272772bdb3)), closes [#417](https://github.com/ory/fosite/issues/417) +- **storage:** Remove unused methods ([#417](https://github.com/ory/fosite/issues/417)) ([023bdcf](https://github.com/ory/fosite/commit/023bdcf1217b8f86de250f53391ad3b1e356949d)) + +### Documentation + +- Fix various typos ([#415](https://github.com/ory/fosite/issues/415)) ([719aaa0](https://github.com/ory/fosite/commit/719aaa0b695f02556167f02fc94133a380ccfa16)) +- Replace Discord with Slack ([#412](https://github.com/ory/fosite/issues/412)) ([d8591bb](https://github.com/ory/fosite/commit/d8591bba33d16b61e6c611b7042d695166bd94e5)) +- Update github templates ([#424](https://github.com/ory/fosite/issues/424)) ([d37fc4b](https://github.com/ory/fosite/commit/d37fc4babe43b52c92eb081b9ea78c0fa9f51865)) +- Update github templates ([#425](https://github.com/ory/fosite/issues/425)) ([0399871](https://github.com/ory/fosite/commit/039987119ea78d69fe991bbb0edb6735b88b16cc)) +- Update SetSession comment ([#423](https://github.com/ory/fosite/issues/423)) ([32951ab](https://github.com/ory/fosite/commit/32951ab56fb3400ff6980519c2e6e20802292f2f)) +- Updates issue and pull request templates ([#419](https://github.com/ory/fosite/issues/419)) ([d804da1](https://github.com/ory/fosite/commit/d804da1e3dfda46872d358d2987bd19462c03e98)) + +# [0.31.0](https://github.com/ory/fosite/compare/v0.30.6...v0.31.0) (2020-03-29) + +Merge pull request from GHSA-v3q9-2p3m-7g43 + +- u + +- u + +### Unclassified + +- Merge pull request from GHSA-v3q9-2p3m-7g43 ([0c9e0f6](https://github.com/ory/fosite/commit/0c9e0f6d654913ad57c507dd9a36631e1858a3e9)): + + - u + + - u + +# [0.30.6](https://github.com/ory/fosite/compare/v0.30.5...v0.30.6) (2020-03-26) + +fix: handle serialization errors that can be thrown by call to 'Commit' (#403) + +### Bug Fixes + +- Handle serialization errors that can be thrown by call to 'Commit' ([#403](https://github.com/ory/fosite/issues/403)) ([35a1558](https://github.com/ory/fosite/commit/35a1558d8d845ac15bc6ec99fb4be062716b231a)) + +### Documentation + +- Update forum and chat links ([b1ba04e](https://github.com/ory/fosite/commit/b1ba04e447d6dfdaf9f0c84336d3bacab41b2c8d)) + +# [0.30.5](https://github.com/ory/fosite/compare/v0.30.4...v0.30.5) (2020-03-25) + +fix: handle concurrent transactional errors in the refresh token grant handler (#402) + +This commit provides the functionality required to address https://github.com/ory/hydra/issues/1719 & https://github.com/ory/hydra/issues/1735 by adding error checking to the RefreshTokenGrantHandler's PopulateTokenEndpointResponse method so it can deal with errors due to concurrent access. This will allow the authorization server to render a better error to the user-agent. + +No longer returns fosite.ErrServerError in the event the storage. Instead a wrapped fosite.ErrNotFound is returned when fetching the refresh token fails due to it no longer being present. This scenario is caused when the user sends two or more request to refresh using the same token and one request gets into the handler just after the prior request finished and successfully committed its transaction. + +Adds unit test coverage for transaction error handling logic added to the RefreshTokenGrantHandler's PopulateTokenEndpointResponse method + +### Bug Fixes + +- Handle concurrent transactional errors in the refresh token grant handler ([#402](https://github.com/ory/fosite/issues/402)) ([b17190b](https://github.com/ory/fosite/commit/b17190b4964e911d6f94379873139cdfc3def5bd)): + + This commit provides the functionality required to address https://github.com/ory/hydra/issues/1719 & https://github.com/ory/hydra/issues/1735 by adding error checking to the RefreshTokenGrantHandler's PopulateTokenEndpointResponse method so it can deal with errors due to concurrent access. This will allow the authorization server to render a better error to the user-agent. + + No longer returns fosite.ErrServerError in the event the storage. Instead a wrapped fosite.ErrNotFound is returned when fetching the refresh token fails due to it no longer being present. This scenario is caused when the user sends two or more request to refresh using the same token and one request gets into the handler just after the prior request finished and successfully committed its transaction. + + Adds unit test coverage for transaction error handling logic added to the RefreshTokenGrantHandler's PopulateTokenEndpointResponse method + +# [0.30.4](https://github.com/ory/fosite/compare/v0.30.3...v0.30.4) (2020-03-17) + +fix: add ability to specify amr values natively in id_token payload (#401) + +See ory/hydra#1756 + +### Bug Fixes + +- Add ability to specify amr values natively in id_token payload ([#401](https://github.com/ory/fosite/issues/401)) ([f99bb80](https://github.com/ory/fosite/commit/f99bb8012a583b25fd591718a51308c208cb9a55)), closes [ory/hydra#1756](https://github.com/ory/hydra/issues/1756) + +# [0.30.3](https://github.com/ory/fosite/compare/v0.30.2...v0.30.3) (2020-03-04) + +fix: Support RFC8252#section-7.3 Loopback Interface Redirection (#400) + +Closes #284 + +### Bug Fixes + +- Merge request ID as well ([#398](https://github.com/ory/fosite/issues/398)) ([67c081c](https://github.com/ory/fosite/commit/67c081cb5cb650e7095d7343a618484103cf8bb5)), closes [#386](https://github.com/ory/fosite/issues/386) +- Support RFC8252#section-7.3 Loopback Interface Redirection ([#400](https://github.com/ory/fosite/issues/400)) ([4104135](https://github.com/ory/fosite/commit/41041350c06853d490e94849b25d0fee87a95a32)), closes [RFC8252#section-7](https://github.com/RFC8252/issues/section-7) [#284](https://github.com/ory/fosite/issues/284) + +### Documentation + +- Add undocumented ExactScopeStrategy ([#395](https://github.com/ory/fosite/issues/395)) ([387cade](https://github.com/ory/fosite/commit/387cade4c6e96e0b83df274da5835691e54d07af)) +- Updates issue and pull request templates ([#393](https://github.com/ory/fosite/issues/393)) ([cdefb3e](https://github.com/ory/fosite/commit/cdefb3e99e73b69e62a449de489b0e806d5158af)) +- Updates issue and pull request templates ([#394](https://github.com/ory/fosite/issues/394)) ([119e6ab](https://github.com/ory/fosite/commit/119e6ab6d83ab8dee3fd31085153f64ca008582a)) + +### Features + +- Add ExactOne and MatchesExact to Arguments ([#399](https://github.com/ory/fosite/issues/399)) ([cf23400](https://github.com/ory/fosite/commit/cf23400930e63a6d5244262d284ddc79943775e6)): + + Previously Arguments.Exact had vague semantic where + it coudln't distinguish between value with a space and multiple + values. Split it into 2 functions with clear semantic. + + Old .Exact() remains for compatibility and marked as deprecated + +# [0.30.2](https://github.com/ory/fosite/compare/v0.30.1...v0.30.2) (2019-11-21) + +Return state parameter in authorization error conditions (#388) + +Related to ory/hydra#1642 + +### Unclassified + +- Return state parameter in authorization error conditions (#388) ([3ece795](https://github.com/ory/fosite/commit/3ece795f3080db5de3529cea9bfa670e70704686)), closes [#388](https://github.com/ory/fosite/issues/388) [ory/hydra#1642](https://github.com/ory/hydra/issues/1642) +- Revert incorrect license changes ([40a49f7](https://github.com/ory/fosite/commit/40a49f743dff60d07b6314667933a47dbf2635aa)) + +# [0.30.1](https://github.com/ory/fosite/compare/v0.30.0...v0.30.1) (2019-09-23) + +pkce: Enforce verifier formatting (#383) + +### Unclassified + +- Enforce verifier formatting ([#383](https://github.com/ory/fosite/issues/383)) ([024667a](https://github.com/ory/fosite/commit/024667ac1905a4d0274294ab552f3566e2eb3b6a)) + +# [0.30.0](https://github.com/ory/fosite/compare/v0.29.8...v0.30.0) (2019-09-16) + +handler/pkce: Enable PKCE for private clients (#382) + +### Unclassified + +- handler/pkce: Enable PKCE for private clients (#382) ([e21830e](https://github.com/ory/fosite/commit/e21830ec0c0c37ca6ca5544b1362c85abe38b80f)), closes [#382](https://github.com/ory/fosite/issues/382) +- Add RefreshTokenScopes Config (#371) ([bcc7859](https://github.com/ory/fosite/commit/bcc78599eadbff38dc0efc9370e5ef64eadfefa9)), closes [#371](https://github.com/ory/fosite/issues/371): + + When set to true, this will return refresh tokens even if the user did + not ask for the offline or offline_access Oauth Scope. + +# [0.29.8](https://github.com/ory/fosite/compare/v0.29.7...v0.29.8) (2019-08-29) + +handler/revoke: respecting ErrInvalidRequest code (#380) + +This commit modifies the case for ErrInvalidRequest in +WriteRevocationResponse to respect the 400 error code +and not fallthrough to ErrInvalidClient. + +Author: DefinitelyNotAGoat + +### Documentation + +- Updates issue and pull request templates ([#376](https://github.com/ory/fosite/issues/376)) ([165e93e](https://github.com/ory/fosite/commit/165e93eeff7d187af682f7f958b39e2393d15821)) +- Updates issue and pull request templates ([#377](https://github.com/ory/fosite/issues/377)) ([40590cb](https://github.com/ory/fosite/commit/40590cbaa45167dff2085483ccf5b4bddb37e422)) +- Updates issue and pull request templates ([#378](https://github.com/ory/fosite/issues/378)) ([54426bb](https://github.com/ory/fosite/commit/54426bbf3d3bb125753aaf7fda5a7ded5effdf4c)) + +### Unclassified + +- handler/revoke: respecting ErrInvalidRequest code (#380) ([cc34bfb](https://github.com/ory/fosite/commit/cc34bfb4f970d25f59948dcdcbc0eb587ae78d6d)), closes [#380](https://github.com/ory/fosite/issues/380): + + This commit modifies the case for ErrInvalidRequest in + WriteRevocationResponse to respect the 400 error code + and not fallthrough to ErrInvalidClient. + + Author: DefinitelyNotAGoat + +# [0.29.7](https://github.com/ory/fosite/compare/v0.29.6...v0.29.7) (2019-08-06) + +pkce: Return error when PKCE is used with private clients (#375) + +### Documentation + +- Fix method/struct documents ([#360](https://github.com/ory/fosite/issues/360)) ([ad06f22](https://github.com/ory/fosite/commit/ad06f2266b28b3d1844f36e97c1118822fd2a46c)) +- Updates issue and pull request templates ([#361](https://github.com/ory/fosite/issues/361)) ([35157e2](https://github.com/ory/fosite/commit/35157e2a5174f1a8ee9074452b77953e35c4161c)) +- Updates issue and pull request templates ([#365](https://github.com/ory/fosite/issues/365)) ([90a3c50](https://github.com/ory/fosite/commit/90a3c509e718445b799821fac400aad28d9de928)) +- Updates issue and pull request templates ([#366](https://github.com/ory/fosite/issues/366)) ([27c64ec](https://github.com/ory/fosite/commit/27c64ec1b7d12ee1b1e1e0d35dc6b24f7ade92e0)) +- Updates issue and pull request templates ([#367](https://github.com/ory/fosite/issues/367)) ([01cd955](https://github.com/ory/fosite/commit/01cd955efe9a00c014a5ef7488774c3913e7218d)) +- Updates issue and pull request templates ([#373](https://github.com/ory/fosite/issues/373)) ([5962474](https://github.com/ory/fosite/commit/5962474c904f80517d1a9c2731e703ffda972d6a)) +- Updates issue and pull request templates ([#374](https://github.com/ory/fosite/issues/374)) ([9f7cf40](https://github.com/ory/fosite/commit/9f7cf409a643b72cfa25dd2f1340f1aa1c17c443)) + +### Unclassified + +- Create FUNDING.yml ([1b7b479](https://github.com/ory/fosite/commit/1b7b479ca040f95f3ea4cff642c7f678df5cb0ab)) +- Return error when PKCE is used with private clients ([#375](https://github.com/ory/fosite/issues/375)) ([7219387](https://github.com/ory/fosite/commit/72193870c9914dc97c1117a566c68bede0bf5290)) + +# [0.29.6](https://github.com/ory/fosite/compare/v0.29.5...v0.29.6) (2019-04-26) + +openid: Allow promp=none for https/localhost (#359) + +Signed-off-by: aeneasr + +### Unclassified + +- Allow promp=none for https/localhost ([#359](https://github.com/ory/fosite/issues/359)) ([27bbe00](https://github.com/ory/fosite/commit/27bbe0033273157ea449310c064675127e2550e6)) + +# [0.29.5](https://github.com/ory/fosite/compare/v0.29.4...v0.29.5) (2019-04-25) + +core: Add debug log to invalid_client error(#358) + +Signed-off-by: nerocrux + +### Unclassified + +- Add debug log to invalid_client error([#358](https://github.com/ory/fosite/issues/358)) ([dce3111](https://github.com/ory/fosite/commit/dce3111ad0dac62911c19d9b6ea4cb776f087c4d)) + +# [0.29.3](https://github.com/ory/fosite/compare/v0.29.2...v0.29.3) (2019-04-17) + +Export IsLocalhost + +Signed-off-by: aeneasr + +### Unclassified + +- Export IsLocalhost ([a95ea09](https://github.com/ory/fosite/commit/a95ea092ef682cd5fe3449c23245d211444f28cb)) +- Improve IsRedirectURISecure check ([d6f8962](https://github.com/ory/fosite/commit/d6f8962de5336ce17128b1fd238cba13862c85a7)) + +# [0.29.2](https://github.com/ory/fosite/compare/v0.29.1...v0.29.2) (2019-04-11) + +Allow providing a custom redirect URI checker (#355) + +Signed-off-by: aeneasr + +### Unclassified + +- Allow providing a custom redirect URI checker (#355) ([3d16e39](https://github.com/ory/fosite/commit/3d16e39a3b25cb5d77b8b10cb568c9bc2a835356)), closes [#355](https://github.com/ory/fosite/issues/355) + +# [0.29.1](https://github.com/ory/fosite/compare/v0.29.0...v0.29.1) (2019-03-27) + +token: Improve rotated secret error reporting in HMAC strategy (#354) + +Signed-off-by: aeneasr + +### Unclassified + +- Improve rotated secret error reporting in HMAC strategy ([#354](https://github.com/ory/fosite/issues/354)) ([f21d930](https://github.com/ory/fosite/commit/f21d930291ada9e609ea5018693d6e4745815f03)) +- Propagate session data properly ([#353](https://github.com/ory/fosite/issues/353)) ([5ba0f04](https://github.com/ory/fosite/commit/5ba0f0465039e7072593205b1252e630d340d6ab)): + + This example is slightly inaccurate; the session data will need to come from the returned AccessRequester, not the pre-created session. The session passed to IntrospectToken isn't mutated. + +- Remove useless details fn receiver ([#349](https://github.com/ory/fosite/issues/349)) ([af403c6](https://github.com/ory/fosite/commit/af403c6fac913736a05ca0c44765b10baaf89295)) +- Update HISTORY.md, README.md, CONTRIBUTING.md ([#347](https://github.com/ory/fosite/issues/347)) ([de5e61e](https://github.com/ory/fosite/commit/de5e61e0eb445af57e692964057ea8e661f98618)): + + - README: Breaks out `0.26.0` as was stuck inside a code block. + - README: Ensures the later versions formats code blocks as Go code. + - Runs doctoc to ensure TOCs are up to date. + +# [0.29.0](https://github.com/ory/fosite/compare/v0.28.1...v0.29.0) (2018-12-23) + +oauth2: add test coverage to exercise the transactional support in the AuthorizeExplicitGrantHandler's PopulateTokenEndpointResponse method. + +Signed-off-by: Amir Aslaminejad + +### Unclassified + +- Add mock for storage.Transactional + update generate-mocks.sh ([03f7bc8](https://github.com/ory/fosite/commit/03f7bc8e59f15d7b9c0df47c8c77c106f3fd4a0c)) +- Add test coverage to exercise the transactional support in the AuthorizeExplicitGrantHandler's PopulateTokenEndpointResponse method. ([2f58f9e](https://github.com/ory/fosite/commit/2f58f9e0ea1a197c8b7eb62dc545d9467ed2ff10)) +- Add test coverage to exercise the transactional support in the RefreshTokenGrantHandler's PopulateTokenEndpointResponse method. ([b38d7c8](https://github.com/ory/fosite/commit/b38d7c89b9a45b7576af379b2dc479ddb880195c)) +- Adds new interface `Transactional` which is to be implemented by storage providers that can support transactions. ([c364b33](https://github.com/ory/fosite/commit/c364b33eefe813da4da02fc78d9e72e1d5301234)) +- Don't double encode URL fragments ([#346](https://github.com/ory/fosite/issues/346)) ([1f41934](https://github.com/ory/fosite/commit/1f419341886c8e37a10c68d7a5c8d576176e666a)), closes [#345](https://github.com/ory/fosite/issues/345) +- Use transactions in the auth code token flow (if the storage implementation implements the `Transactional` interface) to address [#309](https://github.com/ory/fosite/issues/309) ([e00c567](https://github.com/ory/fosite/commit/e00c5675182eb5d90644160c0f3f1b10f0f287f4)) +- Use transactions in the refresh token flow (if the storage implementation implements the `Transactional` interface) to address [#309](https://github.com/ory/fosite/issues/309) ([07d1a39](https://github.com/ory/fosite/commit/07d1a3974ff6d53c239c4050703b09928f484e01)) + +# [0.28.1](https://github.com/ory/fosite/compare/v0.28.0...v0.28.1) (2018-12-04) + +compose: Expose token entropy setting (#342) + +Signed-off-by: nerocrux + +### Unclassified + +- Remove cryptopasta dependency (#339) ([b156e6b](https://github.com/ory/fosite/commit/b156e6b48383926974a560bb416a9ac7507347ec)), closes [#339](https://github.com/ory/fosite/issues/339) +- Expose token entropy setting ([#342](https://github.com/ory/fosite/issues/342)) ([0761fca](https://github.com/ory/fosite/commit/0761fcae7e6ecd0f7d16c51a3c7fa3891d85d85b)) + +# [0.28.0](https://github.com/ory/fosite/compare/v0.27.4...v0.28.0) (2018-11-16) + +oauth2: Add ability to specify refresh token lifespan (#337) + +Set it to `-1` to disable this feature. Defaults to 30 days. + +Closes #319 + +Signed-off-by: arekkas + +### Unclassified + +- Add ability to specify refresh token lifespan ([#337](https://github.com/ory/fosite/issues/337)) ([fa65408](https://github.com/ory/fosite/commit/fa654089e09900d842117827ec2f6258323ec436)), closes [#319](https://github.com/ory/fosite/issues/319): + + Set it to `-1` to disable this feature. Defaults to 30 days. + +# [0.27.4](https://github.com/ory/fosite/compare/v0.27.3...v0.27.4) (2018-11-12) + +docs: Fix quickstart (#335) + +- replace NewMemoryStore with NewExampleStore +- fix length of signing key +- fix config type + +Signed-off-by: Peter Schultz + +### Documentation + +- Fix quickstart ([#335](https://github.com/ory/fosite/issues/335)) ([25cc6c4](https://github.com/ory/fosite/commit/25cc6c42e2befe3b200d79c9d8edac47cc6d3f86)): + + - replace NewMemoryStore with NewExampleStore + - fix length of signing key + - fix config type + +### Unclassified + +- Omit exp if ExpiresAt is zero value ([#334](https://github.com/ory/fosite/issues/334)) ([6d50176](https://github.com/ory/fosite/commit/6d501761a17bc3a720e2a0b72ff5f218fa72660c)) + +# [0.27.3](https://github.com/ory/fosite/compare/v0.27.2...v0.27.3) (2018-11-08) + +oauth2: Set exp for authorize code issued by hybrid flow (#333) + +Signed-off-by: nerocrux + +### Unclassified + +- Set exp for authorize code issued by hybrid flow ([#333](https://github.com/ory/fosite/issues/333)) ([d275e84](https://github.com/ory/fosite/commit/d275e84dc6f4bf4e71393672e0e16d54b401bc3c)) + +# [0.27.2](https://github.com/ory/fosite/compare/v0.27.1...v0.27.2) (2018-11-07) + +pkce: Allow hybrid flows (#328) + +Signed-off-by: Adam Shannon +Signed-off-by: Wenhao Ni + +### Unclassified + +- Allow hybrid flows ([#328](https://github.com/ory/fosite/issues/328)) ([cdfddc8](https://github.com/ory/fosite/commit/cdfddc8b06d861708ebe3494a35d65da2d2fcef8)): + + Signed-off-by: Wenhao Ni + +# [0.27.1](https://github.com/ory/fosite/compare/v0.27.0...v0.27.1) (2018-11-03) + +oauth2: Improve refresh security and reliability (#332) + +This patch resolves several issues regarding the refresh flow. First, +an issue has been resolved which caused the audience to not be +set in the refreshed access tokens. + +Second, scope and audience are validated against the client's +whitelisted values and if the values are no longer allowed, +the grant is canceled. + +Closes #331 +Closes #325 +Closes #324 + +### Unclassified + +- Improve refresh security and reliability ([#332](https://github.com/ory/fosite/issues/332)) ([4e4121b](https://github.com/ory/fosite/commit/4e4121bac5cda8efa7d3eb6aaf7720f3ff59c329)), closes [#331](https://github.com/ory/fosite/issues/331) [#325](https://github.com/ory/fosite/issues/325) [#324](https://github.com/ory/fosite/issues/324): + + This patch resolves several issues regarding the refresh flow. First, + an issue has been resolved which caused the audience to not be + set in the refreshed access tokens. + + Second, scope and audience are validated against the client's + whitelisted values and if the values are no longer allowed, + the grant is canceled. + +# [0.27.0](https://github.com/ory/fosite/compare/v0.26.1...v0.27.0) (2018-10-31) + +oauth2: Update jwt access token interface (#330) + +The interface needed to change in order to natively handle the audience claim. + +Signed-off-by: arekkas + +### Unclassified + +- Introduce audience capabilities ([#327](https://github.com/ory/fosite/issues/327)) ([e2441d2](https://github.com/ory/fosite/commit/e2441d231a19cd1133b3316d3477b84d7b649522)), closes [#326](https://github.com/ory/fosite/issues/326): + + This patch allows clients to whitelist audiences and request that audiences are set for oauth2 access and refresh tokens + +- Update jwt access token interface ([#330](https://github.com/ory/fosite/issues/330)) ([2da9764](https://github.com/ory/fosite/commit/2da976477fcd41493103ea478541d68ca04083ae)): + + The interface needed to change in order to natively handle the audience claim. + +# [0.26.1](https://github.com/ory/fosite/compare/v0.26.0...v0.26.1) (2018-10-25) + +hash: Raise bcrypt cost factor lower bound (#321) + +Users of this library can easily create the following: + +hasher := fosite.BCrypt{} +hasher.Hash(..) + +This is a problem because WorkFactor will default to 0 and x/crypto/bcrypt will default that to 4 (See https://godoc.org/golang.org/x/crypto/bcrypt). + +Instead this should be some higher cost factor. Callers who need a lower WorkFactor can still lower the cost, if needed. + +Signed-off-by: Adam Shannon + +### Unclassified + +- Fix Config.GetHashCost godoc comment ([#320](https://github.com/ory/fosite/issues/320)) ([4d2b119](https://github.com/ory/fosite/commit/4d2b119b7a302bf7e6a4d9b600697e08cf089b02)) +- Fix doc typo ([#322](https://github.com/ory/fosite/issues/322)) ([239b1ed](https://github.com/ory/fosite/commit/239b1ed4b9b406287fa49e01f8316e5fc4eb7923)) +- Raise bcrypt cost factor lower bound ([#321](https://github.com/ory/fosite/issues/321)) ([799fc70](https://github.com/ory/fosite/commit/799fc70a48b68b3403eb150084c28d4e78c035e4)): + + Users of this library can easily create the following: + + hasher := fosite.BCrypt{} + hasher.Hash(..) + + This is a problem because WorkFactor will default to 0 and x/crypto/bcrypt will default that to 4 (See https://godoc.org/golang.org/x/crypto/bcrypt). + + Instead this should be some higher cost factor. Callers who need a lower WorkFactor can still lower the cost, if needed. + +# [0.26.0](https://github.com/ory/fosite/compare/v0.25.1...v0.26.0) (2018-10-24) + +all: Rearrange commits with goreturns + +Signed-off-by: aeneasr + +### Unclassified + +- Allow customization of JWT claims ([f97e451](https://github.com/ory/fosite/commit/f97e45118fbf7a87129ee40c8a56e97efc30c8b9)) +- Rearrange commits with goreturns ([211b43b](https://github.com/ory/fosite/commit/211b43b4c04c732adc5fbfa7cab339f44fbea7d7)) + +# [0.25.1](https://github.com/ory/fosite/compare/v0.25.0...v0.25.1) (2018-10-23) + +handler/openid: Populate at_hash in explicit/refresh flows (#315) + +Signed-off-by: Wenhao Ni + +### Documentation + +- Updates issue and pull request templates ([#313](https://github.com/ory/fosite/issues/313)) ([53c7b55](https://github.com/ory/fosite/commit/53c7b55dba903cdb8071417f39ebc01e00921cd4)) +- Updates issue and pull request templates ([#314](https://github.com/ory/fosite/issues/314)) ([73ae623](https://github.com/ory/fosite/commit/73ae6238fc6db4737d5b529ceeb08b26dbab88ea)) +- Updates issue and pull request templates ([#316](https://github.com/ory/fosite/issues/316)) ([64299bb](https://github.com/ory/fosite/commit/64299bb72fe0f9f7886bdd061519cc7e9c9081da)) + +### Unclassified + +- handler/openid: Populate at_hash in explicit/refresh flows (#315) ([189589c](https://github.com/ory/fosite/commit/189589c400467460029424226398da709eb9ec48)), closes [#315](https://github.com/ory/fosite/issues/315) +- Fix typo in README.md (#312) ([dcb83ae](https://github.com/ory/fosite/commit/dcb83ae59f984edeb1dfda19d0c0851e2e1574ae)), closes [#312](https://github.com/ory/fosite/issues/312) + +# [0.25.0](https://github.com/ory/fosite/compare/v0.24.0...v0.25.0) (2018-10-08) + +Fix broken go modules tests (#311) + +Signed-off-by: arekkas + +### Unclassified + +- Fix broken go modules tests (#311) ([02ea4b1](https://github.com/ory/fosite/commit/02ea4b186a6384bfe2a36741842f49f7370e0991)), closes [#311](https://github.com/ory/fosite/issues/311) +- Switch from dep to go modules (#310) ([ac46a67](https://github.com/ory/fosite/commit/ac46a67863cb0842d48c83413789a9d6bf595f8a)), closes [#310](https://github.com/ory/fosite/issues/310) + +# [0.24.0](https://github.com/ory/fosite/compare/v0.23.0...v0.24.0) (2018-09-27) + +Propagate context in jwt strategies (#308) + +Closes #307 + +Signed-off-by: Prateek Malhotra + +### Unclassified + +- Propagate context in jwt strategies (#308) ([e1e18d6](https://github.com/ory/fosite/commit/e1e18d6b22697abeceff6e22d4741c3bf04174f8)), closes [#308](https://github.com/ory/fosite/issues/308) [#307](https://github.com/ory/fosite/issues/307) +- Use test tables for Hasher unit tests (#306) ([499af11](https://github.com/ory/fosite/commit/499af11c14eb4f09f630ce84e971389ab668e94a)), closes [#306](https://github.com/ory/fosite/issues/306) + +# [0.23.0](https://github.com/ory/fosite/compare/v0.22.0...v0.23.0) (2018-09-22) + +Add breaking change to the Hasher interface to the change log + +Signed-off-by: Amir Aslaminejad + +### Unclassified + +- Add breaking change to the Hasher interface to the change log ([805e0e9](https://github.com/ory/fosite/commit/805e0e9a36aa254b18e853b8a9c7881738deb010)) +- Update BCrypt to adhere to new Hasher interface ([938e50a](https://github.com/ory/fosite/commit/938e50a32024693670d1a8180b33c5c4a0df470b)) +- Update Hasher to take in context ([02f19fa](https://github.com/ory/fosite/commit/02f19fa3a9db72c54c2be6a904f8a2d35792974e)) + +# [0.22.0](https://github.com/ory/fosite/compare/v0.21.5...v0.22.0) (2018-09-19) + +jwt: update JWTStrategy to take in context (#302) + +Signed-off-by: Amir Aslaminejad + +### Unclassified + +- Update PR template ([3920be2](https://github.com/ory/fosite/commit/3920be20e78ed304ee3752ffcb997ade12862734)) +- Add github issue and PR templates ([b630f54](https://github.com/ory/fosite/commit/b630f54bbd5f01891b2f3cce462819e13136d94c)) +- Update JWTStrategy to take in context ([#302](https://github.com/ory/fosite/issues/302)) ([514fdbd](https://github.com/ory/fosite/commit/514fdbd20393c2175c66f3a69eb7bb849b3d5dfa)) + +# [0.21.5](https://github.com/ory/fosite/compare/v0.21.4...v0.21.5) (2018-08-31) + +openid: Allow JWT from id_token_hint to be expired (#299) + +Signed-off-by: arekkas + +### Unclassified + +- Allow JWT from id_token_hint to be expired ([#299](https://github.com/ory/fosite/issues/299)) ([1ad9cd3](https://github.com/ory/fosite/commit/1ad9cd36069f61b2ace0fec097fe4bdc92e9f6c6)) + +# [0.21.4](https://github.com/ory/fosite/compare/v0.21.3...v0.21.4) (2018-08-26) + +token/hmac: Add ability to rotate HMAC keys (#298) + +Signed-off-by: arekkas + +### Unclassified + +- token/hmac: Add ability to rotate HMAC keys (#298) ([2134650](https://github.com/ory/fosite/commit/213465099b72b6e5afd0e69a7916a95f65e17481)), closes [#298](https://github.com/ory/fosite/issues/298) + +# [0.21.3](https://github.com/ory/fosite/compare/v0.21.2...v0.21.3) (2018-08-22) + +compose: Pass ID Token configuration to strategy (#297) + +Resolves an issue where expiry and issuer where not properly configurable in the strategy. + +See https://github.com/ory/hydra/issues/985 + +Signed-off-by: arekkas + +### Unclassified + +- Pass ID Token configuration to strategy ([#297](https://github.com/ory/fosite/issues/297)) ([a07ce27](https://github.com/ory/fosite/commit/a07ce27c814538c7d0e6228ae814482be2e96e7e)): + + Resolves an issue where expiry and issuer where not properly configurable in the strategy. + + See https://github.com/ory/hydra/issues/985 + +# [0.21.2](https://github.com/ory/fosite/compare/v0.21.1...v0.21.2) (2018-08-07) + +openid: Validate id_token_hint only via ID claims (#296) + +Signed-off-by: arekkas + +### Unclassified + +- Validate id_token_hint only via ID claims ([#296](https://github.com/ory/fosite/issues/296)) ([0fcbfea](https://github.com/ory/fosite/commit/0fcbfea741d0f0bb2a96d5fa08a2797a109a4a33)) + +# [0.21.1](https://github.com/ory/fosite/compare/v0.21.0...v0.21.1) (2018-07-22) + +Improve token_endpoint_auth_method error message (#294) + +Signed-off-by: arekkas + +### Unclassified + +- Improve token_endpoint_auth_method error message (#294) ([7820fb2](https://github.com/ory/fosite/commit/7820fb2e380ca737277095876c7f91b5ebee1467)), closes [#294](https://github.com/ory/fosite/issues/294) +- Gofmt ([#290](https://github.com/ory/fosite/issues/290)) ([f02884b](https://github.com/ory/fosite/commit/f02884ba0b236d81e338fd3bcd3e8ebc6d65538f)): + + Run standard gofmt command on project root. + + - go version go1.10.3 darwin/amd64 + +# [0.21.0](https://github.com/ory/fosite/compare/v0.20.3...v0.21.0) (2018-06-23) + +Makes error messages easier to debug for end-users + +### Documentation + +- Fixes header image in README ([4907d60](https://github.com/ory/fosite/commit/4907d60537202e3aa04e81d87efe2c5e17c2e492)) + +### Unclassified + +- Makes error messages easier to debug for end-users ([5688a1c](https://github.com/ory/fosite/commit/5688a1c5acbafad5eabe649ce56e06e922c36a60)) +- Adds errors for request and registration parameters ([920ed71](https://github.com/ory/fosite/commit/920ed71a538f7fa5e7531660d76e076b655bf48b)) +- Adds OIDC request/request_uri support ([c7abcca](https://github.com/ory/fosite/commit/c7abcca923175f85833473508684c209b1151f5a)) +- Adds private_key_jwt authentication method ([baa4cf1](https://github.com/ory/fosite/commit/baa4cf15e1f30da0a52c9314730279302a15a7a4)) +- Adds proper error responses to request object ([f483262](https://github.com/ory/fosite/commit/f4832621071290773fca25e8992fc283d76f390b)) +- Disallow empty response_type in request ([cf2eb85](https://github.com/ory/fosite/commit/cf2eb85ed17c8d51d1c2e90c3349d4f51662a8f0)) +- Do not require id_token response type for auth_code ([#288](https://github.com/ory/fosite/issues/288)) ([edc4910](https://github.com/ory/fosite/commit/edc491045155abbdbc54409889d7ccc7c3999019)): + + Before this patch, the `id_token` response type was required whenever an ID Token was requested. This patch changes that. + +- Implements oidc compliant response_type validation ([f950b9e](https://github.com/ory/fosite/commit/f950b9ea63f10b7ecfe0fa47ec3716b543450dc5)) +- Return unsupported_response_type in validator ([a24708e](https://github.com/ory/fosite/commit/a24708e8044268b324b1aec443a09940ae998c2f)) +- Uses JWTStrategy in oauth2.DefaultStrategy ([e2d2e75](https://github.com/ory/fosite/commit/e2d2e7511931d17fd92e627c65eaabd9598b185d)) +- Uses JWTStrategy interface in openid.DefaultStrategy ([517fdc5](https://github.com/ory/fosite/commit/517fdc5002ccef00a5a105b1a19bcba4c5e6839f)), closes [#252](https://github.com/ory/fosite/issues/252) + +# [0.20.3](https://github.com/ory/fosite/compare/v0.20.2...v0.20.3) (2018-06-07) + +Allows multipart content type as alternative to x-www-form-urlencoded (#285) + +### Unclassified + +- Allows multipart content type as alternative to x-www-form-urlencoded (#285) ([2edf8f8](https://github.com/ory/fosite/commit/2edf8f828b99cbabefa7f00066b49e081fab4920)), closes [#285](https://github.com/ory/fosite/issues/285) + +# [0.20.2](https://github.com/ory/fosite/compare/v0.20.1...v0.20.2) (2018-05-29) + +openid: Merge duplicate aud claim values (#283) + +### Unclassified + +- Merge duplicate aud claim values ([#283](https://github.com/ory/fosite/issues/283)) ([93618d6](https://github.com/ory/fosite/commit/93618d66a99d2756e0a4c638727b728afc62520f)) + +# [0.20.1](https://github.com/ory/fosite/compare/v0.20.0...v0.20.1) (2018-05-29) + +Uses query instead of fragment when handling unsupported response type (#282) + +### Unclassified + +- Uses query instead of fragment when handling unsupported response type (#282) ([57b1471](https://github.com/ory/fosite/commit/57b14710c9aa845f2fa87322e0a3f3fa1e3e09c9)), closes [#282](https://github.com/ory/fosite/issues/282) +- Updates upgrade guide ([a958ab8](https://github.com/ory/fosite/commit/a958ab8218d13c4b0533eb38d07203f2da7ac114)) + +# [0.20.0](https://github.com/ory/fosite/compare/v0.19.8...v0.20.0) (2018-05-28) + +oauth2: Resolves several issues related to revokation (#281) + +This patch resolves several issues related to token revokation as well as duplicate authorize code usage: + +- oauth2: Revoking access or refresh tokens should revoke past and future tokens too +- oauth2: Revoke access and refresh tokens when authorize code is used twice + +Additionally, this patch resolves an issue where refreshing a token would not revoke previous tokens. + +Closes #278 +Closes #280 + +### Unclassified + +- Resolves several issues related to revokation ([#281](https://github.com/ory/fosite/issues/281)) ([72bff7f](https://github.com/ory/fosite/commit/72bff7f33ee8c3a4a8806cc266ca7299ff1785d4)), closes [#278](https://github.com/ory/fosite/issues/278) [#280](https://github.com/ory/fosite/issues/280): + + This patch resolves several issues related to token revokation as well as duplicate authorize code usage: + + - oauth2: Revoking access or refresh tokens should revoke past and future tokens too + - oauth2: Revoke access and refresh tokens when authorize code is used twice + + Additionally, this patch resolves an issue where refreshing a token would not revoke previous tokens. + +- Sets audience to a string array ([#279](https://github.com/ory/fosite/issues/279)) ([2d58a58](https://github.com/ory/fosite/commit/2d58a585d6b53831b17bcd3ed31e67d5b2637d4a)), closes [#215](https://github.com/ory/fosite/issues/215) + +# [0.19.8](https://github.com/ory/fosite/compare/v0.19.7...v0.19.8) (2018-05-24) + +authorize: Fixes implicit detection in error writer (#277) + +### Unclassified + +- Fixes implicit detection in error writer ([#277](https://github.com/ory/fosite/issues/277)) ([608bf5f](https://github.com/ory/fosite/commit/608bf5fff7f5f7fc0dde0b3aecd03534974ba982)) + +# [0.19.7](https://github.com/ory/fosite/compare/v0.19.6...v0.19.7) (2018-05-24) + +openid: Use claims.RequestedAt for a reference of "now" (#276) + +Previously, time.Now() was used to get a reference of "now". However, this caused short max_age values to fail if, for example, the consent screen took a long time. This patch now uses the "requested_at" claim value to determine a sense of "now" which should resolve the mentioned issue. + +### Unclassified + +- Use claims.RequestedAt for a reference of "now" ([#276](https://github.com/ory/fosite/issues/276)) ([91e7a4c](https://github.com/ory/fosite/commit/91e7a4c236caccbea211c7790ad8194b7bd5f8a2)): + + Previously, time.Now() was used to get a reference of "now". However, this caused short max_age values to fail if, for example, the consent screen took a long time. This patch now uses the "requested_at" claim value to determine a sense of "now" which should resolve the mentioned issue. + +# [0.19.6](https://github.com/ory/fosite/compare/v0.19.5...v0.19.6) (2018-05-24) + +openid: Issue ID Token on implicit code flow as well + +### Unclassified + +- Issue ID Token on implicit code flow as well ([180c749](https://github.com/ory/fosite/commit/180c74965cb128059d63e894ba2dd04184458a33)) + +# [0.19.5](https://github.com/ory/fosite/compare/v0.19.4...v0.19.5) (2018-05-23) + +jwt: Add JTI to counter missing nonce + +### Unclassified + +- Add JTI to counter missing nonce ([28822d7](https://github.com/ory/fosite/commit/28822d7b686c3a48ca9afec5291699b758c5f6cf)) +- Enforce nonce on implicit/hybrid flows ([3b44eb3](https://github.com/ory/fosite/commit/3b44eb3538d4faff5fc05a74c8b9fa88ddb48202)) + +# [0.19.4](https://github.com/ory/fosite/compare/v0.19.3...v0.19.4) (2018-05-20) + +core: Checks scopes before dispatching handlers (#272) + +### Unclassified + +- Checks scopes before dispatching handlers ([#272](https://github.com/ory/fosite/issues/272)) ([0f18305](https://github.com/ory/fosite/commit/0f18305e742c17db1eee6784ce3451837b5fd09a)) + +# [0.19.3](https://github.com/ory/fosite/compare/v0.19.2...v0.19.3) (2018-05-20) + +openid: Resolves timing issues in JWT strategy (#271) + +### Unclassified + +- Resolves timing issues in JWT strategy ([#271](https://github.com/ory/fosite/issues/271)) ([aaec994](https://github.com/ory/fosite/commit/aaec9940e2c3fc5a696b3d174d517a6ff1490a6f)) + +# [0.19.2](https://github.com/ory/fosite/compare/v0.19.1...v0.19.2) (2018-05-19) + +openid: Resolves timing issues by setting now to the future (#270) + +### Unclassified + +- Resolves timing issues by setting now to the future ([#270](https://github.com/ory/fosite/issues/270)) ([e9339d7](https://github.com/ory/fosite/commit/e9339d73eb39b15ffdb4b9a62ddc1ff1ba512530)) + +# [0.19.1](https://github.com/ory/fosite/compare/v0.19.0...v0.19.1) (2018-05-19) + +openid: Improves validation errors and uses UTC everywhere (#269) + +### Unclassified + +- Improves validation errors and uses UTC everywhere ([#269](https://github.com/ory/fosite/issues/269)) ([eee3dad](https://github.com/ory/fosite/commit/eee3dad91e571a5b09217cc00caf485165f5a7d7)) + +# [0.19.0](https://github.com/ory/fosite/compare/v0.18.1...v0.19.0) (2018-05-17) + +openid: Improves prompt, max_age and id_token_hint validation (#268) + +This patch improves the OIDC prompt, max_age, and id_token_hint +validation. + +### Unclassified + +- Improves prompt, max_age and id_token_hint validation ([#268](https://github.com/ory/fosite/issues/268)) ([7ccad77](https://github.com/ory/fosite/commit/7ccad77095dbf8d094b2f3151634f074b0903dbc)): + + This patch improves the OIDC prompt, max_age, and id_token_hint + validation. + +# [0.18.1](https://github.com/ory/fosite/compare/v0.18.0...v0.18.1) (2018-05-01) + +openid: Adds a validator used to validate OIDC parameters (#266) + +The validator, for now, validates the prompt parameter of OIDC requests. + +### Unclassified + +- Adds a validator used to validate OIDC parameters ([#266](https://github.com/ory/fosite/issues/266)) ([91c9d19](https://github.com/ory/fosite/commit/91c9d194a88e6b395668211df60cb512eab08541)): + + The validator, for now, validates the prompt parameter of OIDC requests. + +# [0.18.0](https://github.com/ory/fosite/compare/v0.17.2...v0.18.0) (2018-04-30) + +oauth2: Introspection should return token type (#265) + +Closes #264 + +This patch allows the introspection handler to return the token type (e.g. `access_token`, `refresh_token`) of the +introspected token. To achieve that, some breaking API changes have been introduced: + +- `OAuth2.IntrospectToken(ctx context.Context, token string, tokenType TokenType, session Session, scope ...string) (AccessRequester, error)` is now `OAuth2.IntrospectToken(ctx context.Context, token string, tokenType TokenType, session Session, scope ...string) (TokenType, AccessRequester, error)`. +- `TokenIntrospector.IntrospectToken(ctx context.Context, token string, tokenType TokenType, accessRequest AccessRequester, scopes []string) (error)` is now `TokenIntrospector.IntrospectToken(ctx context.Context, token string, tokenType TokenType, accessRequest AccessRequester, scopes []string) (TokenType, error)`. + +This patch also resolves a misconfigured json key in the `IntrospectionResponse` struct. `AccessRequester AccessRequester json:",extra"` is now properly declared as `AccessRequester AccessRequester json:"extra"`. + +### Unclassified + +- Introspection should return token type ([#265](https://github.com/ory/fosite/issues/265)) ([2bf9b6c](https://github.com/ory/fosite/commit/2bf9b6c4177be3050ff9ba3b82c6474e4c324c39)), closes [#264](https://github.com/ory/fosite/issues/264) + +# [0.17.2](https://github.com/ory/fosite/compare/v0.17.1...v0.17.2) (2018-04-26) + +core: Regression fix for request ID in refresh token flow (#262) + +Signed-off-by: Beorn Facchini + +### Unclassified + +- handler/oauth2: Returns request unauthorized error on invalid password credentials (#261) ([cca6af4](https://github.com/ory/fosite/commit/cca6af4161818682edb98936cae9249db814db27)), closes [#261](https://github.com/ory/fosite/issues/261) +- Regression fix for request ID in refresh token flow ([#262](https://github.com/ory/fosite/issues/262)) ([99029e0](https://github.com/ory/fosite/commit/99029e0e1bc4b1d6dfa1ca8b85a46d79cffad6e8)) + +# [0.17.1](https://github.com/ory/fosite/compare/v0.17.0...v0.17.1) (2018-04-22) + +core: Adds ExactScopeStrategy (#260) + +The ExactScopeStrategy performs a simple string match (case sensitive) +of scopes. + +### Unclassified + +- Adds ExactScopeStrategy ([#260](https://github.com/ory/fosite/issues/260)) ([0fcdf33](https://github.com/ory/fosite/commit/0fcdf33fb52551e02798b4e6733110024b7d24d9)): + + The ExactScopeStrategy performs a simple string match (case sensitive) + of scopes. + +# [0.17.0](https://github.com/ory/fosite/compare/v0.16.5...v0.17.0) (2018-04-08) + +core: Sanitizes request body before sending it to the storage adapter (#258) + +This release resolves a security issue (reported by [platform.sh](https://www.platform.sh)) related to potential storage implementations. This library used to pass +all of the request body from both authorize and token endpoints to the storage adapters. As some of these values +are needed in consecutive requests, some storage adapters chose to drop the full body to the database. This in turn caused, +with the addition of enabling POST-body based client authentication, the client secret to be leaked. + +The issue has been resolved by sanitizing the request body and only including those values truly required by their +respective handlers. This lead to two breaking changes in the API: + +1. The `fosite.Requester` interface has a new method `Sanitize(allowedParameters []string) Requester` which returns + a sanitized clone of the method receiver. If you do not use your own `fosite.Requester` implementation, this won't affect you. +2. If you use the PKCE handler, you will have to add three new methods to your storage implementation. The methods + to be added work exactly like, for example `CreateAuthorizeCodeSession`. The method signatures are as follows: + +```go +type PKCERequestStorage interface { + GetPKCERequestSession(ctx context.Context, signature string, session fosite.Session) (fosite.Requester, error) + CreatePKCERequestSession(ctx context.Context, signature string, requester fosite.Requester) error + DeletePKCERequestSession(ctx context.Context, signature string) error +} +``` + +We encourage you to upgrade to this release and check your storage implementations and potentially remove old data. + +We would like to thank [platform.sh](https://www.platform.sh) for sponsoring the development of a patch that resolves this +issue. + +### Documentation + +- Fixes eaxmple errors in README ([#257](https://github.com/ory/fosite/issues/257)) ([b138f59](https://github.com/ory/fosite/commit/b138f5997d535151b3541a15b8c4f7a304cea4eb)) +- Updates banner in readme ([#253](https://github.com/ory/fosite/issues/253)) ([07ac5b8](https://github.com/ory/fosite/commit/07ac5b89878e07fd54edf267f23ebc7059c8bb48)) + +### Unclassified + +- Sanitizes request body before sending it to the storage adapter ([#258](https://github.com/ory/fosite/issues/258)) ([018b5c1](https://github.com/ory/fosite/commit/018b5c12b71b0da443255f4a5cf0ac9543bbf9f7)): + + This release resolves a security issue (reported by [platform.sh](https://www.platform.sh)) related to potential storage implementations. This library used to pass + all of the request body from both authorize and token endpoints to the storage adapters. As some of these values + are needed in consecutive requests, some storage adapters chose to drop the full body to the database. This in turn caused, + with the addition of enabling POST-body based client authentication, the client secret to be leaked. + + The issue has been resolved by sanitizing the request body and only including those values truly required by their + respective handlers. This lead to two breaking changes in the API: + + 1. The `fosite.Requester` interface has a new method `Sanitize(allowedParameters []string) Requester` which returns + a sanitized clone of the method receiver. If you do not use your own `fosite.Requester` implementation, this won't affect you. + 2. If you use the PKCE handler, you will have to add three new methods to your storage implementation. The methods + to be added work exactly like, for example `CreateAuthorizeCodeSession`. The method signatures are as follows: + + ```go + type PKCERequestStorage interface { + GetPKCERequestSession(ctx context.Context, signature string, session fosite.Session) (fosite.Requester, error) + CreatePKCERequestSession(ctx context.Context, signature string, requester fosite.Requester) error + DeletePKCERequestSession(ctx context.Context, signature string) error + } + ``` + + We encourage you to upgrade to this release and check your storage implementations and potentially remove old data. + + We would like to thank [platform.sh](https://www.platform.sh) for sponsoring the development of a patch that resolves this + issue. + +# [0.16.5](https://github.com/ory/fosite/compare/v0.16.4...v0.16.5) (2018-03-17) + +introspection: Improves debug messages (#254) + +### Documentation + +- Resolves minor code documentation misspellings ([#248](https://github.com/ory/fosite/issues/248)) ([c580d79](https://github.com/ory/fosite/commit/c580d79aaa54f2aec179df400a3365ca711ead66)) +- Resolves minor spelling mistakes ([#250](https://github.com/ory/fosite/issues/250)) ([7fbd246](https://github.com/ory/fosite/commit/7fbd2468dfb83cf7288643958db9890af5ffd3d1)) +- Updates chat badge to discord ([b6380be](https://github.com/ory/fosite/commit/b6380be3365fc9703135f6ef3ee747d60d835915)) + +### Unclassified + +- docs : Fixes typo in README (#249) ([d05fadf](https://github.com/ory/fosite/commit/d05fadfa7c4fa88ec58175fef146c7cc9c6c120c)), closes [#249](https://github.com/ory/fosite/issues/249) +- Adds email to license notice ([77fa262](https://github.com/ory/fosite/commit/77fa262093d783bc3f0e302ebddd1a2da3f2581d)) +- Improves debug messages ([#254](https://github.com/ory/fosite/issues/254)) ([338399b](https://github.com/ory/fosite/commit/338399becb5114f84e6dc7166a95f6d036a6b748)) +- Updates license header ([85bdbcb](https://github.com/ory/fosite/commit/85bdbcb4c34c646c7eae56c0a1dc41dc1f75b470)) +- Updates license notice ([917401c](https://github.com/ory/fosite/commit/917401cdf0b891afa9a3aa65edb2539ff0f0a5ba)) +- Updates years in license headers ([77df218](https://github.com/ory/fosite/commit/77df218b30566ab7cd513b723a7e44f9f6afbe7e)) +- Updates years in license headers ([d8458ab](https://github.com/ory/fosite/commit/d8458abe997f70c743a7e2fa3cc27c2cb1d38c9e)) + +# [0.16.4](https://github.com/ory/fosite/compare/v0.16.3...v0.16.4) (2018-02-07) + +handler: Adds PKCE implementation for none and S256 (#246) + +This patch adds support for PKCE (https://tools.ietf.org/html/rfc7636) which is used by native apps (mobile) and prevents eavesdropping attacks against authorization codes. + +PKCE is enabled by default but not enforced. Challenge method plain is disabled by default. Both settings can be changed using `compose.Config.EnforcePKCE` and `compose.config.EnablePKCEPlainChallengeMethod`. + +Closes #213 + +### Unclassified + +- Adds PKCE implementation for none and S256 ([#246](https://github.com/ory/fosite/issues/246)) ([4512853](https://github.com/ory/fosite/commit/45128532dc4bbb40a56bf6250a58f9c5d57a9c7a)), closes [#213](https://github.com/ory/fosite/issues/213): + + This patch adds support for PKCE (https://tools.ietf.org/html/rfc7636) which is used by native apps (mobile) and prevents eavesdropping attacks against authorization codes. + + PKCE is enabled by default but not enforced. Challenge method plain is disabled by default. Both settings can be changed using `compose.Config.EnforcePKCE` and `compose.config.EnablePKCEPlainChallengeMethod`. + +# [0.16.3](https://github.com/ory/fosite/compare/v0.16.2...v0.16.3) (2018-02-07) + +introspection: Adds missing http header to response writer (#247) + +The introspection response writer was missing `application/json` +in header `Content-Type`. This patch fixes that. + +Closes #209 + +### Unclassified + +- Adds missing http header to response writer ([#247](https://github.com/ory/fosite/issues/247)) ([f345ec1](https://github.com/ory/fosite/commit/f345ec1413aa0fc2ba4588a482e469fa19cc08aa)), closes [#209](https://github.com/ory/fosite/issues/209): + + The introspection response writer was missing `application/json` + in header `Content-Type`. This patch fixes that. + +# [0.16.2](https://github.com/ory/fosite/compare/v0.16.1...v0.16.2) (2018-01-25) + +introspection: Decodes of Basic Authorization username/password (#245) + +Signed-off-by: Dmitry Dolbik + +### Unclassified + +- Decodes of Basic Authorization username/password ([#245](https://github.com/ory/fosite/issues/245)) ([b94312e](https://github.com/ory/fosite/commit/b94312e25f011b54894da69256416271c23b5d14)) + +# [0.16.1](https://github.com/ory/fosite/compare/v0.16.0...v0.16.1) (2017-12-23) + +compose: Makes SendDebugMessages first class citizen (#243) + +### Unclassified + +- Makes SendDebugMessages first class citizen ([#243](https://github.com/ory/fosite/issues/243)) ([1ef3041](https://github.com/ory/fosite/commit/1ef3041c4da40d27ea25d56710e59d5f9352df5f)) + +# [0.16.0](https://github.com/ory/fosite/compare/v0.15.6...v0.16.0) (2017-12-23) + +Adds ability to forward hints and debug messages to clients (#242) + +### Unclassified + +- Adds ability to forward hints and debug messages to clients (#242) ([7216c4f](https://github.com/ory/fosite/commit/7216c4f2711c79cf3d8a2c75ad7da4f54103988f)), closes [#242](https://github.com/ory/fosite/issues/242) + +# [0.15.6](https://github.com/ory/fosite/compare/v0.15.5...v0.15.6) (2017-12-21) + +handler/oauth2: Adds offline_access alias for refresh flow + +### Unclassified + +- handler/oauth2: Adds offline_access alias for refresh flow ([2aa8e70](https://github.com/ory/fosite/commit/2aa8e70bb88aa6bafde8d4ea949c5d514c6f568e)) + +# [0.15.5](https://github.com/ory/fosite/compare/v0.15.4...v0.15.5) (2017-12-17) + +Returns the correct error on duplicate auth code use + +### Unclassified + +- Returns the correct error on duplicate auth code use ([95d5f58](https://github.com/ory/fosite/commit/95d5f580c939eea0e6e93cdb4bae4cdbf5082869)) + +# [0.15.4](https://github.com/ory/fosite/compare/v0.15.3...v0.15.4) (2017-12-17) + +Improves http error codes + +### Unclassified + +- Improves http error codes ([6831f75](https://github.com/ory/fosite/commit/6831f7543000b3704879e52d8c9a4555653b4bd5)) + +# [0.15.3](https://github.com/ory/fosite/compare/v0.15.2...v0.15.3) (2017-12-17) + +Resolves overriding auth_time with wrong value + +### Unclassified + +- Resolves overriding auth_time with wrong value ([c85b32d](https://github.com/ory/fosite/commit/c85b32d355a183dac3e46e50aac8b2c344cbd2d7)) + +# [0.15.2](https://github.com/ory/fosite/compare/v0.15.1...v0.15.2) (2017-12-10) + +Adds ability to catch non-conform OIDC authorizations + +Fosite is now capable of detecting authorization flows that +are not conformant with the OpenID Connect spec. + +### Unclassified + +- Adds ability to catch non-conform OIDC authorizations ([97fbeb3](https://github.com/ory/fosite/commit/97fbeb333e353d5d7d7d2368f51899262338ce62)): + + Fosite is now capable of detecting authorization flows that + are not conformant with the OpenID Connect spec. + +- Forces use of UTC time zone everywhere ([4c7e4e5](https://github.com/ory/fosite/commit/4c7e4e5512061e9add22cc246882c78d2b06599c)) + +# [0.15.1](https://github.com/ory/fosite/compare/v0.15.0...v0.15.1) (2017-12-10) + +token/jwt: Adds ability to specify acr value natively in id token payload + +### Unclassified + +- token/jwt: Adds ability to specify acr value natively in id token payload ([b87ca49](https://github.com/ory/fosite/commit/b87ca49b9418b99f492077f8ba78bf00e6c29180)) + +# [0.15.0](https://github.com/ory/fosite/compare/v0.14.2...v0.15.0) (2017-12-09) + +Upgrades history.md + +### Documentation + +- Updates history.md ([9fc25a8](https://github.com/ory/fosite/commit/9fc25a86c4d8609aafa382e5eab32d3d087ec9d8)) + +### Unclassified + +- Upgrades history.md ([87c37c3](https://github.com/ory/fosite/commit/87c37c3d6929b1edd2ab52a28d51ed1890628f51)) +- Improves test coverage report by removing internal package from it ([831f56a](https://github.com/ory/fosite/commit/831f56a9e6774b1e80c13cd301583edea6378245)) +- Resolves test issues and reverts auth code revokation patch ([59fc47b](https://github.com/ory/fosite/commit/59fc47bbeb8093ab3652149ef6789a4e1564e1d8)) +- Improves error debug messages across the project ([7ec8d19](https://github.com/ory/fosite/commit/7ec8d19815d10913ef8cfd8ced9b9794f578dbf4)) +- handler/oauth2: Adds token revokation on authorize code reuse ([2341dec](https://github.com/ory/fosite/commit/2341dec8febeda9da535dc898c7d19aa3ecc8c00)) +- handler/oauth2: Improves authorization code error handling ([d6e0fbd](https://github.com/ory/fosite/commit/d6e0fbd9bdde624fa2e9feada3dec5b4266c4b9e)) +- Allows client credentials in POST body and solves public client auth ([392c191](https://github.com/ory/fosite/commit/392c191bc1859ca57e3d0cf4d2b996d5ab382530)), closes [#231](https://github.com/ory/fosite/issues/231) [#217](https://github.com/ory/fosite/issues/217) +- Updates mocks and mock generation ([1f9d07d](https://github.com/ory/fosite/commit/1f9d07d15e8f70986ed12cfb3ac9fac4a6e7e278)) + +# [0.14.2](https://github.com/ory/fosite/compare/v0.14.1...v0.14.2) (2017-12-06) + +Makes use of rfcerr in access error endpoint writer explicit + +### Unclassified + +- Makes use of rfcerr in access error endpoint writer explicit ([701d850](https://github.com/ory/fosite/commit/701d85072d1ea5c35c7d05acf19bccdef626ba3c)) + +# [0.14.1](https://github.com/ory/fosite/compare/v0.14.0...v0.14.1) (2017-12-06) + +Exports ErrorToRFC6749Error again (#228) + +### Unclassified + +- Exports ErrorToRFC6749Error again (#228) ([8d35b66](https://github.com/ory/fosite/commit/8d35b668079db8642ede3b1d345d74692926515f)), closes [#228](https://github.com/ory/fosite/issues/228) + +# [0.14.0](https://github.com/ory/fosite/compare/v0.13.1...v0.14.0) (2017-12-06) + +Simplifies error contexts (#227) + +Simplifies how errors are instantiated. Errors now contain all necessary information without relying on `fosite.ErrorToRFC6749Error` any more. `fosite.ErrorToRFC6749Error` is now an internal method and was renamed to `fosite.errorToRFC6749Error`. + +### Unclassified + +- Simplifies error contexts (#227) ([8961d86](https://github.com/ory/fosite/commit/8961d861814862f9432f0608bcd14dfbcd4ec979)), closes [#227](https://github.com/ory/fosite/issues/227): + + Simplifies how errors are instantiated. Errors now contain all necessary information without relying on `fosite.ErrorToRFC6749Error` any more. `fosite.ErrorToRFC6749Error` is now an internal method and was renamed to `fosite.errorToRFC6749Error`. + +# [0.13.1](https://github.com/ory/fosite/compare/v0.13.0...v0.13.1) (2017-12-04) + +handler/oauth2: Client IDs in revokation requests must match now (#226) + +Closes #225 + +### Unclassified + +- handler/oauth2: Client IDs in revokation requests must match now (#226) ([83136a3](https://github.com/ory/fosite/commit/83136a3ed5ed99b3a525f0ad87d693eadf273e8a)), closes [#226](https://github.com/ory/fosite/issues/226) [#225](https://github.com/ory/fosite/issues/225) +- Add license header to all source files (#222) ([dd9398e](https://github.com/ory/fosite/commit/dd9398ea0553b07d63022af50ee2090d1616c5a9)), closes [#222](https://github.com/ory/fosite/issues/222) [#221](https://github.com/ory/fosite/issues/221) +- Update go version ([#220](https://github.com/ory/fosite/issues/220)) ([ff751ee](https://github.com/ory/fosite/commit/ff751ee3691f79886ccfc6afa3936c2c3b506a9e)) + +# [0.13.0](https://github.com/ory/fosite/compare/v0.12.0...v0.13.0) (2017-10-25) + +vendor: replace glide with dep + +### Unclassified + +- Replace glide with dep ([ec43e3a](https://github.com/ory/fosite/commit/ec43e3a05da49d45ebe8a98b28b14f8817c507f4)) + +# [0.12.0](https://github.com/ory/fosite/compare/v0.11.4...v0.12.0) (2017-10-25) + +scripts: fix goimports import path + +### Unclassified + +- token/hmac: replace custom logic with copypasta ([b4b9be5](https://github.com/ory/fosite/commit/b4b9be5640c9d814b35f54b2c8621137364209ca)) +- Add 0.12.0 to TOC ([a2e3a47](https://github.com/ory/fosite/commit/a2e3a474b2439e4ad68a641152639f7921e610a6)) +- Add format helper scripts ([92c73ae](https://github.com/ory/fosite/commit/92c73aee93b5d1fe2acf3395b495caf912453368)) +- Add goimports to install section ([4f5df70](https://github.com/ory/fosite/commit/4f5df700e3c220f3aa5f7eb79a4b4f19d2f4576e)) +- Fix goimports import path ([65743b4](https://github.com/ory/fosite/commit/65743b40c69ccc76f07fd3eb4c45837d3b4a1505)) +- Format files with goimports ([c87defe](https://github.com/ory/fosite/commit/c87defe18676b36d880fa834c10e2cbd5464e061)) +- Replace nil checks with Error/NoError ([7fe1f94](https://github.com/ory/fosite/commit/7fe1f946af7b4921da008f245da84b85ea3f26d0)) +- Update to go 1.9 ([c17222c](https://github.com/ory/fosite/commit/c17222c854198a7a388a2656a710bf13a5c3c3b9)) +- Use go-acc and test format ([47fd477](https://github.com/ory/fosite/commit/47fd477814c7826a9e9e89a02c248cfbad6b5a7a)) + +# [0.11.4](https://github.com/ory/fosite/compare/v0.11.3...v0.11.4) (2017-10-10) + +handler/oauth2: set expiration time before the access token is generated (#216) + +Signed-off-by: Nikita Vorobey + +### Documentation + +- Update banner ([d6cf027](https://github.com/ory/fosite/commit/d6cf027401e828c8e608b042615f982acdf6d915)) + +### Unclassified + +- handler/oauth2: set expiration time before the access token is generated (#216) ([0911eb0](https://github.com/ory/fosite/commit/0911eb0d643d77105e0126bf2303bdfd7190ccd3)), closes [#216](https://github.com/ory/fosite/issues/216) + +# [0.11.3](https://github.com/ory/fosite/compare/v0.11.2...v0.11.3) (2017-08-21) + +oauth2/ropc: Set expires at for password credentials flow (#210) + +Signed-off-by: Beorn Facchini + +### Documentation + +- Fixes documentation oauth2 variable and updates old method ([#205](https://github.com/ory/fosite/issues/205)) ([fa50c80](https://github.com/ory/fosite/commit/fa50c80d36bbc8dda2633b59617689d8ef21042c)): + + It seems that the documentation was declaring as OAuth2Provider the variable `oauth2Provider` whereas it used a non-declared variable `oauth2`. I renamed `oauth2` into the variable declared `oauth2Provider`. + + Furthermore, on line 333, the IntrospectToken method was called without the TokenType argument. I added the fosite.AccessToken type. + +- Update docs on scope strategy ([68119ca](https://github.com/ory/fosite/commit/68119ca5e282c356284a6dc7a2edb2b632d57a47)) + +### Unclassified + +- oauth2/ropc: Set expires at for password credentials flow (#210) ([461b38f](https://github.com/ory/fosite/commit/461b38fd07e47dad709667f024e98a71bfd3792b)), closes [#210](https://github.com/ory/fosite/issues/210) +- oauth2/introspection: configure core validator with access only option (#208) ([80cae74](https://github.com/ory/fosite/commit/80cae74590bfdf7d3f9439073a4a5aac21d7fd45)), closes [#208](https://github.com/ory/fosite/issues/208) +- Add more test cases ([c45a37d](https://github.com/ory/fosite/commit/c45a37d3bb9e3e79d16323f42d76ef96b624dbd0)) + +# [0.11.2](https://github.com/ory/fosite/compare/v0.11.1...v0.11.2) (2017-07-09) + +scope: resolve haystack needle mixup - closes #201 + +### Unclassified + +- Resolve haystack needle mixup - closes [#201](https://github.com/ory/fosite/issues/201) ([2c7cdff](https://github.com/ory/fosite/commit/2c7cdff9d2e677f5f892d6107a3c0b8b9ce61632)) + +# [0.11.1](https://github.com/ory/fosite/compare/v0.11.0...v0.11.1) (2017-07-09) + +token/jwt: add claims tests + +### Unclassified + +- token/jwt: add claims tests ([c55d679](https://github.com/ory/fosite/commit/c55d67903fdc5b2f4b200b663d4f1a0cb1d21dca)) +- handler/openid: only refresh id token with id_token response type ([dd2463a](https://github.com/ory/fosite/commit/dd2463a1a262600096f040867dcabe2a28e1a56c)), closes [#199](https://github.com/ory/fosite/issues/199) +- Add tests for nil sessions ([d67d52d](https://github.com/ory/fosite/commit/d67d52df200dfc72c9eb79e38ae6e91a1fb701f4)) + +# [0.11.0](https://github.com/ory/fosite/compare/v0.10.0...v0.11.0) (2017-07-09) + +handler/oauth2: update docs + +### Unclassified + +- handler/oauth2: update docs ([63f329b](https://github.com/ory/fosite/commit/63f329b104c36dcbe2ee2f2a5562c6422f36224b)) +- handler/oauth2: remove code validity check from test ([664d1a6](https://github.com/ory/fosite/commit/664d1a6c0177abfb4d8f780f28ecd69cb2d44d87)) +- handler/oauth2: first retrieve, then validate ([ab72cba](https://github.com/ory/fosite/commit/ab72cba1799accc7b50990908139fa762eb2efc1)) +- handler/oauth2: set requested at date in auth code test ([edd4084](https://github.com/ory/fosite/commit/edd4084b43ed88135fb60a4581283d8abaf92384)) +- handler/oauth2: resolve travis time mismatch ([ec6534c](https://github.com/ory/fosite/commit/ec6534cfebf24d716aba28dee43e6ec268c0918b)) +- handler/oauth2: simplify storage interface ([361b368](https://github.com/ory/fosite/commit/361b3683552bcadf62d1d1c42baf6d5cc1ca1409)), closes [#194](https://github.com/ory/fosite/issues/194) +- handler/oauth2: use hmac strategy for jwt refresh tokens (#190) ([56c88c0](https://github.com/ory/fosite/commit/56c88c04d4819aec08cb068a5fb7697dbaeb3288)), closes [#190](https://github.com/ory/fosite/issues/190) [#180](https://github.com/ory/fosite/issues/180) +- handler/openid: refresh token handler for oidc (#193) ([04888c5](https://github.com/ory/fosite/commit/04888c5448382612a55fb0c57ccf2c0e3d841c2c)), closes [#193](https://github.com/ory/fosite/issues/193) [#181](https://github.com/ory/fosite/issues/181) +- Gofmt ([7a998fe](https://github.com/ory/fosite/commit/7a998fece7ea2fd63ad7943266e67954ab81aaf6)) +- Implement new wildcard strategy - closes [#188](https://github.com/ory/fosite/issues/188) ([e03e99e](https://github.com/ory/fosite/commit/e03e99e653454ab7cc997aacd162374bdbf38c75)) +- Revoke access tokens when refreshing ([bb74955](https://github.com/ory/fosite/commit/bb74955ead77dbadf2f7b99ec3bff9b27f2a4388)), closes [#167](https://github.com/ory/fosite/issues/167) +- Run goimports ([35941c2](https://github.com/ory/fosite/commit/35941c2f3ed0436019429d9657d9dab59cae93e1)) +- Use deepcopy not gob encoding - closes [#191](https://github.com/ory/fosite/issues/191) ([823db5b](https://github.com/ory/fosite/commit/823db5b65cd7c0c356b211c920ca06ec10cfa8b6)) + +# [0.10.0](https://github.com/ory/fosite/compare/v0.9.7...v0.10.0) (2017-07-06) + +oauth2/introspector: remove auth code, refresh scopes (#187) + +Removes authorize code introspection in the HMAC-based strategy and now checks scopes of refresh tokens as well. + +### Unclassified + +- oauth2/introspector: remove auth code, refresh scopes (#187) ([ef8f175](https://github.com/ory/fosite/commit/ef8f1757f0c26317fd7dbb46f66fde7516a3b4bb)), closes [#187](https://github.com/ory/fosite/issues/187): + + Removes authorize code introspection in the HMAC-based strategy and now checks scopes of refresh tokens as well. + +- Separate test dependencies ([#186](https://github.com/ory/fosite/issues/186)) ([71451f0](https://github.com/ory/fosite/commit/71451f05fa2b572c4467a9bca26ec3d018a74cd3)): + + - vendor: Move testify to testImport + - test: Move Assert/Require helpers to \_test pkg + +# [0.9.7](https://github.com/ory/fosite/compare/v0.9.6...v0.9.7) (2017-06-28) + +handler/openid: remove forced nonce (#185) + +Signed-off-by: Wyatt Anderson + +### Unclassified + +- handler/openid: remove forced nonce (#185) ([6c91a21](https://github.com/ory/fosite/commit/6c91a21b540c534c9a2330922e357e24c7d5fda9)), closes [#185](https://github.com/ory/fosite/issues/185) + +# [0.9.6](https://github.com/ory/fosite/compare/v0.9.5...v0.9.6) (2017-06-21) + +oauth2: basic auth should decode client id and secret + +closes #182 + +### Documentation + +- Update test command in README and CONTRIBUTING ([#183](https://github.com/ory/fosite/issues/183)) ([c1ab029](https://github.com/ory/fosite/commit/c1ab029745520914fae525f150e91dfe7ae76142)) + +### Unclassified + +- Basic auth should decode client id and secret ([92b75d9](https://github.com/ory/fosite/commit/92b75d93070fdb96f0ec9975dc24b69243d8f894)), closes [#182](https://github.com/ory/fosite/issues/182) + +# [0.9.5](https://github.com/ory/fosite/compare/v0.9.4...v0.9.5) (2017-06-08) + +handler/oauth2: grant scopes before the access token is generated (#177) + +Signed-off-by: Nikita Vorobey + +### Unclassified + +- handler/oauth2: grant scopes before the access token is generated (#177) ([3497260](https://github.com/ory/fosite/commit/349726028d42f3c60aeefc67aef06f9f907ccf94)), closes [#177](https://github.com/ory/fosite/issues/177) + +# [0.9.4](https://github.com/ory/fosite/compare/v0.9.3...v0.9.4) (2017-06-05) + +introspection: return with active set false on token error (#176) + +### Unclassified + +- Return with active set false on token error ([#176](https://github.com/ory/fosite/issues/176)) ([82944aa](https://github.com/ory/fosite/commit/82944aaa42ddc9c718ee072d5a11635ec982394d)) + +# [0.9.3](https://github.com/ory/fosite/compare/v0.9.2...v0.9.3) (2017-06-05) + +vendor: remove unnecessary go-jose import (#175) + +### Unclassified + +- Remove unnecessary go-jose import ([#175](https://github.com/ory/fosite/issues/175)) ([d26aa4a](https://github.com/ory/fosite/commit/d26aa4a76fda898677f333c38242a9049e448e1a)) + +# [0.9.2](https://github.com/ory/fosite/compare/v0.9.1...v0.9.2) (2017-06-05) + +Resolve issues with error handling (#174) + +- errors: do not convert errors compliant with rfcerrors + +- handler/oauth2: improve redirect message for insecure http + +### Unclassified + +- Resolve issues with error handling (#174) ([9abdfd0](https://github.com/ory/fosite/commit/9abdfd04261f472f34c9d6a545ccaa2d491c4f06)), closes [#174](https://github.com/ory/fosite/issues/174): + + - errors: do not convert errors compliant with rfcerrors + + - handler/oauth2: improve redirect message for insecure http + +# [0.9.1](https://github.com/ory/fosite/compare/v0.9.0...v0.9.1) (2017-06-04) + +vendor: clean up dependencies (#173) + +- vendor: remove stray github.com/Sirupsen/logrus +- vendor: remove common lib + +### Unclassified + +- Clean up dependencies ([#173](https://github.com/ory/fosite/issues/173)) ([524d3b6](https://github.com/ory/fosite/commit/524d3b6fb51e81330608f727c63dbf41980de7ae)): + + - vendor: remove stray github.com/Sirupsen/logrus + - vendor: remove common lib + +# [0.9.0](https://github.com/ory/fosite/compare/v0.8.0...v0.9.0) (2017-06-03) + +docs: add 0.9.0 release note + +### Documentation + +- Add 0.9.0 release note ([852cf82](https://github.com/ory/fosite/commit/852cf82344c2d78863508eaa0fca32f468cd7fab)) + +### Unclassified + +- Enable fosite composing with custom hashers. ([#170](https://github.com/ory/fosite/issues/170)) ([d70d882](https://github.com/ory/fosite/commit/d70d882d0b125e386e52cd1aee3712d48538fd66)) +- Removed implicit storage as its never used - closes [#165](https://github.com/ory/fosite/issues/165) ([#171](https://github.com/ory/fosite/issues/171)) ([fe74027](https://github.com/ory/fosite/commit/fe74027ee70292a72fe453095603cca060ff6290)) + +# [0.8.0](https://github.com/ory/fosite/compare/v0.7.0...v0.8.0) (2017-05-18) + +docs: add notes for breaking changes that come with 0.8.0 + +### Documentation + +- Add notes for breaking changes that come with 0.8.0 ([d5fafb8](https://github.com/ory/fosite/commit/d5fafb87b04ddf2ced6b58a063eac71892bcd5c9)) + +### Unclassified + +- Added context to GetClient storage interface ([#162](https://github.com/ory/fosite/issues/162)) ([974585d](https://github.com/ory/fosite/commit/974585d4f809f96c8bf9ee3f0f1540bf9478b8a9)), closes [#161](https://github.com/ory/fosite/issues/161) +- Removed \*http.Request from interfaces that access request objects ([786b971](https://github.com/ory/fosite/commit/786b971ca1d36a8f0bd0a5c0bfa798802d5c0c26)): + + - removed the requirement to \*http.Request for endpoints and response object, they are resolvable trough the request.GetRequestForm + + - updated readme to reflect changes to implementation + + - run goimports on internal dir + added goimports command to generate-mocks.sh to force first run after generating the mock files + +- Set authorize code expire time before persist ([#166](https://github.com/ory/fosite/issues/166)) ([305a74f](https://github.com/ory/fosite/commit/305a74fe20649bde7150509ec072a43b958e0ee9)) +- Set expiry date on implicit access tokens ([#164](https://github.com/ory/fosite/issues/164)) ([0785b07](https://github.com/ory/fosite/commit/0785b072dba9a9cf65bc8b7304af4e7691f96a96)) + +# [0.7.0](https://github.com/ory/fosite/compare/v0.6.19...v0.7.0) (2017-05-03) + +vendor: glide update + +### Documentation + +- Add breaking changes note ([7d726e1](https://github.com/ory/fosite/commit/7d726e13800667a32372bb7f97a7f652c7eb9f3e)) + +### Unclassified + +- Glide update ([575dd79](https://github.com/ory/fosite/commit/575dd791f9f11cd8e5471178b1ec3a7638653cae)) +- Goimports ([1cb7e26](https://github.com/ory/fosite/commit/1cb7e26e164c1f11b7cb6ab64191d680d19e7ca0)) +- Move to new org ([bd13085](https://github.com/ory/fosite/commit/bd1308540c519a09d4228048d3d9a028d363a7bd)) +- Replace golang.org/x/net/context with context ([6b1d931](https://github.com/ory/fosite/commit/6b1d93124be24d4b2949060a4c3428c220667738)) + +# [0.6.19](https://github.com/ory/fosite/compare/v0.6.18...v0.6.19) (2017-05-03) + +access: revert regression issue introduced by #150 + +### Unclassified + +- Revert regression issue introduced by [#150](https://github.com/ory/fosite/issues/150) ([6f13d58](https://github.com/ory/fosite/commit/6f13d58533573ec847dca6e5cfa1d4338aef95b1)) +- Revert regression issue introduced by [#150](https://github.com/ory/fosite/issues/150) ([6bb4135](https://github.com/ory/fosite/commit/6bb4135523c4e2fcf7b3a0630e233ccb7a806fc8)) + +# [0.6.18](https://github.com/ory/fosite/compare/v0.6.17...v0.6.18) (2017-04-14) + +oauth2: basic auth should www-url-decode client id and secret - closes #150 + +### Unclassified + +- handler/oauth2: removes RevokeHandler from JWT introspector (#155) ([344dbef](https://github.com/ory/fosite/commit/344dbeff15cfce9990c0ccfd687a0c44f6a81569)), closes [#155](https://github.com/ory/fosite/issues/155): + + - Removes RevokeHandler from JWT Introspector + + RevokeHandler has been removed because it conflicts with Stateless JWT + accesstokens and revocable hmac refresh tokens. The readme has been + updated to warn users about possible misconfiguration. + + - Moves text back to correct section + +- Allow localhost subdomains such as blog.localhost:1234 ([5e1c890](https://github.com/ory/fosite/commit/5e1c890fd144ce1ec12ee26d7ebfe02862af067e)) +- Basic auth should www-url-decode client id and secret - closes [#150](https://github.com/ory/fosite/issues/150) ([ad395bf](https://github.com/ory/fosite/commit/ad395bf323137e30ce12d40646a9229a42695863)) +- Get the token from the access_token query parameter ([#156](https://github.com/ory/fosite/issues/156)) ([9edac04](https://github.com/ory/fosite/commit/9edac0441f4f9c8400e0cbd9cd637e9d2bfcae05)) + +# [0.6.17](https://github.com/ory/fosite/compare/v0.6.15...v0.6.17) (2017-02-24) + +readme: update badges to ory + +### Unclassified + +- revert unintentional change ([14a18a7](https://github.com/ory/fosite/commit/14a18a714c419b31d4bf1341e1017159bc17540f)) +- make stateless validator return an error on revocation ([f8f7978](https://github.com/ory/fosite/commit/f8f797869eaa1895791ed1bba3b0f3c3a06a03ca)) +- dont client id for aud ([a39200b](https://github.com/ory/fosite/commit/a39200b3eb08b77d0181586454e5d7348d519aa5)) +- handler/oauth2: allow stateless introspection of jwt access tokens ([c2d2ac2](https://github.com/ory/fosite/commit/c2d2ac258ecb1378493c0d60add2967e510fbc6b)) +- Redirect uris should ignore cases during matching - closes [#144](https://github.com/ory/fosite/issues/144) ([4b88774](https://github.com/ory/fosite/commit/4b887746fde977a0f5cf8fbbe06c90577f416fca)) +- Update badges to ory ([9b33931](https://github.com/ory/fosite/commit/9b33931ee14ae0768ea46a423d569330a85b482e)) + +# [0.6.15](https://github.com/ory/fosite/compare/v0.6.14...v0.6.15) (2017-02-11) + +errors: fixed typo in acccess_error + +### Unclassified + +- Fixed typo in acccess_error ([08b2242](https://github.com/ory/fosite/commit/08b2242b66a8d430084c6aada57018f8c2dabea6)) + +# [0.6.14](https://github.com/ory/fosite/compare/v0.6.13...v0.6.14) (2017-01-08) + +allow public clients to revoke tokens with just an ID + +This functionality is described in the OAuth2 spec here: https://tools.ietf.org/html/rfc7009#section-5 + +### Unclassified + +- allow public clients to revoke tokens with just an ID ([7b94f47](https://github.com/ory/fosite/commit/7b94f470bede7cf5e94d11e05aa3364d0db75fe2)), closes [/tools.ietf.org/html/rfc7009#section-5](https://github.com//tools.ietf.org/html/rfc7009/issues/section-5) +- Conform to RFC 6749 ([c404554](https://github.com/ory/fosite/commit/c4045541ae19c88634d79818a0060d71c9ef07ec)), closes [/tools.ietf.org/html/rfc6749#section-5](https://github.com//tools.ietf.org/html/rfc6749/issues/section-5): + + Section 5.2 specifies the parameters for access error responses; + the "error" and "error_description" parameters are misnamed. + +# [0.6.13](https://github.com/ory/fosite/compare/v0.6.12...v0.6.13) (2017-01-08) + +request: fix SetRequestedScopes (#139) + +Signed-off-by: Peter Schultz + +### Unclassified + +- Fix SetRequestedScopes ([#139](https://github.com/ory/fosite/issues/139)) ([d02c427](https://github.com/ory/fosite/commit/d02c427a76d5d8ef2f099bae79b7af69be3f643a)) + +# [0.6.12](https://github.com/ory/fosite/compare/v0.6.11...v0.6.12) (2017-01-02) + +authorize: allow custom redirect url schemas + +### Unclassified + +- Allow custom redirect url schemas ([c740b70](https://github.com/ory/fosite/commit/c740b703399e7a1479dac9f261baec4b341f6cff)) +- Properly wrap errors ([e054b6e](https://github.com/ory/fosite/commit/e054b6e04a9253e3d1d333064998045b3ab649fe)) + +# [0.6.11](https://github.com/ory/fosite/compare/v0.6.10...v0.6.11) (2017-01-02) + +openid: c_hash / at_hash should use url-safe base64 encoding + +### Unclassified + +- C_hash / at_hash should use url-safe base64 encoding ([33d4414](https://github.com/ory/fosite/commit/33d44146ef17f9c176a2a74e7ee77eaae98ee5c1)) + +# [0.6.10](https://github.com/ory/fosite/compare/v0.6.9...v0.6.10) (2016-12-29) + +openid: c_hash / at_hash should be string not byte slice + +### Unclassified + +- C_hash / at_hash should be string not byte slice ([b489cc9](https://github.com/ory/fosite/commit/b489cc95b87d74785c5e9b8ea5eb48e975559f63)) + +# [0.6.9](https://github.com/ory/fosite/compare/v0.6.8...v0.6.9) (2016-12-29) + +oauth2/implicit: fix redirect url on error +Signed-off-by: Nikita Vorobey + +### Documentation + +- Fix missing protocol in link in readme ([#132](https://github.com/ory/fosite/issues/132)) ([37ef374](https://github.com/ory/fosite/commit/37ef374aec940d6b9fdcc33800c09ba08b830f39)) + +### Unclassified + +- oauth2/implicit: fix redirect url on error ([435288c](https://github.com/ory/fosite/commit/435288ccdee2aed2447a5a0babf885dbfeae6b55)) + +# [0.6.8](https://github.com/ory/fosite/compare/v0.6.7...v0.6.8) (2016-12-20) + +lint: gofmt -w -s . + +### Unclassified + +- Add id_token + code flow ([3f347e3](https://github.com/ory/fosite/commit/3f347e35b603fdde805a8b7a4fdaeff6bcddaa02)) +- Fix typos ([#130](https://github.com/ory/fosite/issues/130)) ([e6b410d](https://github.com/ory/fosite/commit/e6b410d519a0944cd52ffde656f7b21c4682b5a6)) +- Gofmt -w -s . ([95caa96](https://github.com/ory/fosite/commit/95caa96835a1254ba3f8f4a21e635fe6da34f0fe)) + +# [0.6.7](https://github.com/ory/fosite/compare/v0.6.6...v0.6.7) (2016-12-06) + +access: response expires in should be int, not string + +### Unclassified + +- Response expires in should be int, not string ([a2080a3](https://github.com/ory/fosite/commit/a2080a30c04abf6a9b3f7dee63026cb5816f8bbd)) + +# [0.6.6](https://github.com/ory/fosite/compare/v0.6.5...v0.6.6) (2016-12-06) + +errors: add inactive token error + +### Unclassified + +- Add content type to error response ([75aad53](https://github.com/ory/fosite/commit/75aad53be3dfda8a02a47bd8f574dc23914b4b65)) +- Add inactive token error ([0151f1e](https://github.com/ory/fosite/commit/0151f1e17dda1c81185d00b388c83b25b7c5f72c)) +- Resolve broken test ([51ab7bb](https://github.com/ory/fosite/commit/51ab7bb960640bcd8722e2731af72c6c26e3bacd)) + +# [0.6.5](https://github.com/ory/fosite/compare/v0.6.4...v0.6.5) (2016-12-04) + +introspection: always return the error + +### Unclassified + +- Always return the error ([366b4c1](https://github.com/ory/fosite/commit/366b4c1a06369b2cecaf6f71b720273e686d520d)) + +# [0.6.4](https://github.com/ory/fosite/compare/v0.6.3...v0.6.4) (2016-11-29) + +token/jwt: Allow single element string arrays to be treated as strings + +This commit allows `aud` to be passed in as a single element array +during consent validation on Hydra. This fixes +https://github.com/ory-am/hydra/issues/314. + +Signed-off-by: Son Dinh + +### Unclassified + +- token/jwt: Allow single element string arrays to be treated as strings ([5388e10](https://github.com/ory/fosite/commit/5388e107ac994650eb1623efb6c88d14d045e325)): + + This commit allows `aud` to be passed in as a single element array + during consent validation on Hydra. This fixes + https://github.com/ory-am/hydra/issues/314. + +# [0.6.2](https://github.com/ory/fosite/compare/v0.6.1...v0.6.2) (2016-11-25) + +oauth2/introspection: endpoint responds to invalid requests appropriately (#126) + +### Unclassified + +- oauth2/introspection: endpoint responds to invalid requests appropriately (#126) ([9360f64](https://github.com/ory/fosite/commit/9360f6473249324e2c2c2f6e94b3f123bdb929fa)), closes [#126](https://github.com/ory/fosite/issues/126) + +# [0.6.1](https://github.com/ory/fosite/compare/v0.6.0...v0.6.1) (2016-11-17) + +core: resolve issues with token introspection and sessions + +### Unclassified + +- Resolve issues with token introspection and sessions ([895d169](https://github.com/ory/fosite/commit/895d16935bd97831eecff66b1d775af9b91a2506)) + +# [0.6.0](https://github.com/ory/fosite/compare/v0.5.1...v0.6.0) (2016-11-17) + +core: resolve session referencing issue (#125) + +### Unclassified + +- Comply with Go license terms - closes [#123](https://github.com/ory/fosite/issues/123) ([4c4507f](https://github.com/ory/fosite/commit/4c4507f865e0968e0a06c961aef9176bd8e7b7e3)) +- Resolve session referencing issue ([#125](https://github.com/ory/fosite/issues/125)) ([81a3229](https://github.com/ory/fosite/commit/81a3229706c38e29c7745acf930272f4711547f4)) + +# [0.5.1](https://github.com/ory/fosite/compare/v0.5.0...v0.5.1) (2016-10-22) + +handler/oauth2: set JWT ExpiresAt claim per TokenType from the session (#121) + +Signed-off-by: Cristian Graziano + +### Unclassified + +- handler/oauth2: set JWT ExpiresAt claim per TokenType from the session (#121) ([66170ae](https://github.com/ory/fosite/commit/66170ae25a3ac26abcd2ab27d687434d4e2a60a7)), closes [#121](https://github.com/ory/fosite/issues/121) +- oauth2/introspection: do not include the session in the response ([daad271](https://github.com/ory/fosite/commit/daad27179358c71aeb89dc8d7d6fdd2c04a15871)) + +# [0.5.0](https://github.com/ory/fosite/compare/v0.4.0...v0.5.0) (2016-10-17) + +0.5.0 (#119) + +- all: resolve regression issues introduced by 0.4.0 - closes #118 +- oauth2: introspection handler excess calls - closes #117 +- oauth2: inaccurate expires_in time - closes #72 + +### Unclassified + +- 0.5.0 (#119) ([eb9077f](https://github.com/ory/fosite/commit/eb9077f6608d776ae50eb2ad4205705bad6ee0eb)), closes [#119](https://github.com/ory/fosite/issues/119) [#118](https://github.com/ory/fosite/issues/118) [#117](https://github.com/ory/fosite/issues/117) [#72](https://github.com/ory/fosite/issues/72) + +# [0.4.0](https://github.com/ory/fosite/compare/v0.3.6...v0.4.0) (2016-10-16) + +all: clean up, resolve broken tests + +### Documentation + +- Add danilobuerger and jrossiter to hall of fame ([f864e26](https://github.com/ory/fosite/commit/f864e26f6b22726ad592742e8654b099729a4b46)) +- Add offline note to readme ([60a7672](https://github.com/ory/fosite/commit/60a767221625d0f6541f203e41a7ef20a1782eb0)) +- Document reasoning for interface{} in compose package - closes [#94](https://github.com/ory/fosite/issues/94) ([f193012](https://github.com/ory/fosite/commit/f1930124e072153f9d5ec8dc4f14733f9bdc20a1)) + +### Unclassified + +- Allow public clients to access token endpoint - closes [#78](https://github.com/ory/fosite/issues/78) ([cbe433e](https://github.com/ory/fosite/commit/cbe433e1985d782217cb973261a3b1677af1f664)) +- Clean up, resolve broken tests ([1041e67](https://github.com/ory/fosite/commit/1041e67f395480fd334446bd8b13f09dfbeeb658)) +- Flatten package hierarchy and merge files - closes [#93](https://github.com/ory/fosite/issues/93) ([9b7ba80](https://github.com/ory/fosite/commit/9b7ba808064d33a5251cb6cd3d30d2d4b8f3ff25)) +- Reduce third party dependencies - closes [#116](https://github.com/ory/fosite/issues/116) ([5ec5cff](https://github.com/ory/fosite/commit/5ec5cff534008820671e56f6b062dc2aa1e364e6)) +- Split library and example - closes [#92](https://github.com/ory/fosite/issues/92) ([6d76d35](https://github.com/ory/fosite/commit/6d76d35018159d830a9b050f99c15b099a6975e2)) + +# [0.3.6](https://github.com/ory/fosite/compare/v0.3.5...v0.3.6) (2016-10-07) + +oauth2: added refresh token generation for password grant type (#107) + +- oauth2: added refresh token generation for password grant type when offline scope is requested + +Signed-off-by: Jason Rossiter + +### Unclassified + +- Added refresh token generation for password grant type ([#107](https://github.com/ory/fosite/issues/107)) ([81c3cbd](https://github.com/ory/fosite/commit/81c3cbdb6b00399219b57c9e1aa1b4cbebf888d8)): + + - oauth2: added refresh token generation for password grant type when offline scope is requested + +# [0.3.5](https://github.com/ory/fosite/compare/v0.3.4...v0.3.5) (2016-10-06) + +handler/oauth2: resolve issues with refresh token flow (#110) + +- handler/oauth2/refresh: requestedAt time is not reset - closes #109 +- handler/oauth2/refresh: session is not transported to new access token - closes #108 + +### Unclassified + +- handler/oauth2: resolve issues with refresh token flow (#110) ([bef6197](https://github.com/ory/fosite/commit/bef61973fdee1a18aedba4e42a1d8977c3f8cc1c)), closes [#110](https://github.com/ory/fosite/issues/110) [#109](https://github.com/ory/fosite/issues/109) [#108](https://github.com/ory/fosite/issues/108) +- Add tests to request state ([8c7c77e](https://github.com/ory/fosite/commit/8c7c77e1f2116c38ed1765cc846c4b7c0bdc94b8)) + +# [0.3.4](https://github.com/ory/fosite/compare/v0.3.3...v0.3.4) (2016-10-04) + +handler/oauth2: refresh token does not migrate original access data - closes #103 (#104) + +### Unclassified + +- handler/oauth2: refresh token does not migrate original access data - closes #103 (#104) ([8ffa0bc](https://github.com/ory/fosite/commit/8ffa0bc825179bbffbd3a548219062846f9b0250)), closes [#103](https://github.com/ory/fosite/issues/103) [#104](https://github.com/ory/fosite/issues/104) + +# [0.3.3](https://github.com/ory/fosite/compare/v0.3.2...v0.3.3) (2016-10-03) + +authorize: scopes should be separated by %20 and not +, to ensure javascript compatibility - closes #101 (#102) + +### Documentation + +- Fix reference to store example in readme ([#87](https://github.com/ory/fosite/issues/87)) ([b1e2cda](https://github.com/ory/fosite/commit/b1e2cda5bb64ffdcce40aed52af5c9be0852c8ef)) + +### Unclassified + +- Scopes should be separated by %20 and not +, to ensure javascript compatibility - closes [#101](https://github.com/ory/fosite/issues/101) ([#102](https://github.com/ory/fosite/issues/102)) ([e61a25f](https://github.com/ory/fosite/commit/e61a25f3e3d3f067141c3f6464ab4213f4e14d45)) + +# [0.3.2](https://github.com/ory/fosite/compare/v0.3.1...v0.3.2) (2016-09-22) + +openid: resolves an issue with the explicit token flow + +### Unclassified + +- Resolves an issue with the explicit token flow ([aa1b854](https://github.com/ory/fosite/commit/aa1b8548678e5807399d35b5bcad4f62a83cf6e4)) + +# [0.3.1](https://github.com/ory/fosite/compare/v0.3.0...v0.3.1) (2016-09-22) + +0.3.1 (#98) + +- all: better error handling - closes #100 +- oauth2/implicit: bad HTML encoding of the scope parameter - closes #95 +- oauth2: state parameter is missing when response_type=id_token - closes #96 +- oauth2: id token hashes are not base64 url encoded - closes #97 +- openid: hybrid flow using `token+code+id_token` returns multiple tokens of the same type - closes #99 + +### Unclassified + +- 0.3.1 (#98) ([b16e3fc](https://github.com/ory/fosite/commit/b16e3fcfdf8f3f47802cd87b2388235186b9f108)), closes [#98](https://github.com/ory/fosite/issues/98) [#100](https://github.com/ory/fosite/issues/100) [#95](https://github.com/ory/fosite/issues/95) [#96](https://github.com/ory/fosite/issues/96) [#97](https://github.com/ory/fosite/issues/97) [#99](https://github.com/ory/fosite/issues/99) +- Add additional tests to HierarchicScopeStrategy ([#81](https://github.com/ory/fosite/issues/81)) ([64e869c](https://github.com/ory/fosite/commit/64e869cb9b69a4b027bfc0284bfeb33b2836ea41)) +- Corrected grant type in comment ([#82](https://github.com/ory/fosite/issues/82)) ([27ddd19](https://github.com/ory/fosite/commit/27ddd19e9b07101b712b4b7d82443b3c9d53fa69)) +- Removed unnecessary logging ([#86](https://github.com/ory/fosite/issues/86)) ([cb328ca](https://github.com/ory/fosite/commit/cb328caca6287c7995ee5285c6446bffd4ef496b)) +- Simplify scope comparison logic ([7fb850e](https://github.com/ory/fosite/commit/7fb850ef530b3445adb07406f8bc773e6ad38884)) + +# [0.3.0](https://github.com/ory/fosite/compare/v0.2.4...v0.3.0) (2016-08-22) + +vendor: jwt-go is now v3.0.0 (#77) + +Signed-off-by: Alexander Widerberg + +### Unclassified + +- HierarchicScopeStrategy worngly accepts missing scopes ([7faee6b](https://github.com/ory/fosite/commit/7faee6bbd53ee762ddfe194fb2ea5e7d0205e46d)) +- Jwt-go is now v3.0.0 ([#77](https://github.com/ory/fosite/issues/77)) ([76ef7ea](https://github.com/ory/fosite/commit/76ef7ea8f51735d63476cd91e1f9a9f367d544cb)) + +# [0.2.4](https://github.com/ory/fosite/compare/v0.2.3...v0.2.4) (2016-08-09) + +all: resolve race condition and package fosite with glide + +### Unclassified + +- Resolve race condition and package fosite with glide ([66b53a9](https://github.com/ory/fosite/commit/66b53a903c03950ac5180dc30c3f69e477344205)) + +# [0.2.3](https://github.com/ory/fosite/compare/v0.2.2...v0.2.3) (2016-08-08) + +vendor: commit missing lock file + +### Unclassified + +- Commit missing lock file ([be30574](https://github.com/ory/fosite/commit/be30574ee5f5f51cb22faf0a187231141f1c2f63)) + +# [0.2.2](https://github.com/ory/fosite/compare/v0.2.1...v0.2.2) (2016-08-08) + +vendor: updated go-jwt to use semver instead of gopkg + +### Unclassified + +- Updated go-jwt to use semver instead of gopkg ([3b66309](https://github.com/ory/fosite/commit/3b663092771e796816c1f9ac2169139f27b70c4b)) + +# [0.2.1](https://github.com/ory/fosite/compare/v0.2.0...v0.2.1) (2016-08-08) + +core: remove unused fields and methods from client + +### Unclassified + +- Remove unused fields and methods from client ([5f1851b](https://github.com/ory/fosite/commit/5f1851b088e9f087a7bd3e7beca4c3112418fcfc)) +- Resolved package naming issue ([4d1caeb](https://github.com/ory/fosite/commit/4d1caeb18275f2a4a5f40a7cdd06a74cfc1c3e73)) + +# [0.2.0](https://github.com/ory/fosite/compare/v0.1.0...v0.2.0) (2016-08-06) + +all: composable factories, better token validation, better scope handling and simplify structure + +- readme: add gitter chat badge closes #67 +- handler: flatten packages closes #70 +- openid: don't autogrant openid scope - closes #68 +- all: clean up scopes / arguments - closes #66 +- all: composable factories - closes #64 +- all: refactor token validation - closes #63 +- all: remove mandatory scope - closes #62 + +### Unclassified + +- Composable factories, better token validation, better scope handling and simplify structure ([a92c755](https://github.com/ory/fosite/commit/a92c75531cf5bb89524cd719c9bc2c98fe709c62)), closes [#67](https://github.com/ory/fosite/issues/67) [#70](https://github.com/ory/fosite/issues/70) [#68](https://github.com/ory/fosite/issues/68) [#66](https://github.com/ory/fosite/issues/66) [#64](https://github.com/ory/fosite/issues/64) [#63](https://github.com/ory/fosite/issues/63) [#62](https://github.com/ory/fosite/issues/62) + +# [0.1.0](https://github.com/ory/fosite/compare/7adad58c327cf52530d8c1e08059564ca0b51538...v0.1.0) (2016-08-01) + +oauth2: implicit handlers do not require tls over https (#61) + +closes #60 + +### Code Refactoring + +- New api signatures ([8a830d3](https://github.com/ory/fosite/commit/8a830d34405f3b3d50734f5258151426dc61a94b)) + +### Documentation + +- Add -d option to go get ([0e63038](https://github.com/ory/fosite/commit/0e630382425e6d1a7e9177828eeb59f6748e856f)) +- Define implicitHandler ([745a4df](https://github.com/ory/fosite/commit/745a4df7758caa8c3338d006a60f4948120f00bf)): + + Someone forgot to rename the variable name when copy-pasting in the example. + +- Document new token generation and validation ([ddef55b](https://github.com/ory/fosite/commit/ddef55ba96b6c533681b7a1953da5c33ed30587a)) +- Drafted workflows ([4ad1d14](https://github.com/ory/fosite/commit/4ad1d146d67c0e17c545d1c3959dc697777b9828)) +- Explain what handlers are ([48ca03b](https://github.com/ory/fosite/commit/48ca03b9026843f1047e510c3b66ccb6a54def2c)) +- Fix typos in readme ([b9ed7ac](https://github.com/ory/fosite/commit/b9ed7acf8b00f05fcc99578f7a49d55275041515)) +- Readme ([a5aa697](https://github.com/ory/fosite/commit/a5aa69736505502303bc99ee180539033d5ba886)) +- Readme ([f77fd41](https://github.com/ory/fosite/commit/f77fd412ea7f2be15b0f0c5ac801ac177e7d3dc4)) +- Readme ([e143d8c](https://github.com/ory/fosite/commit/e143d8ca506f7cf2f70c92710b2fc123e003a12d)) +- Readme ([d483568](https://github.com/ory/fosite/commit/d483568c06d9542bbf383771dee3ea44b60dff0e)) +- Updated authorize section ([9c21afb](https://github.com/ory/fosite/commit/9c21afbc38fbd35f951c127beb2623ae4d2590e7)) +- Updated readme docs ([336a2cd](https://github.com/ory/fosite/commit/336a2cd10ac08ca6867952555802c225c475c17a)) + +### Unclassified + +- updated gif ([39c239f](https://github.com/ory/fosite/commit/39c239faca97882da9d5293306dfdcbabf8ee0cc)) +- gofmt ([f813288](https://github.com/ory/fosite/commit/f813288911ba653b197589edc4206b52d6c11545)) +- updated example gif ([29b39ea](https://github.com/ory/fosite/commit/29b39ea32fee62b1013ee383ce56c653a7ef33d9)) +- added open id connect to example ([6f0ce68](https://github.com/ory/fosite/commit/6f0ce681147428b51c3673a4c46ab018cf46cf81)) +- added integration tests ([8d47f80](https://github.com/ory/fosite/commit/8d47f80420c288a25ba846927c532e156d27a23b)) +- added doc to fix travis ([a0db129](https://github.com/ory/fosite/commit/a0db129b0a063fe9438560b1a339f973736327f7)) +- Add go report card ([204c5d6](https://github.com/ory/fosite/commit/204c5d60b6f42b0e8f918bdd96214698ad3717da)) +- Clean-up fosite-example/main.go link in README.md ([497ff80](https://github.com/ory/fosite/commit/497ff807a10a9fb41b697c5f91ed9eeb26375b24)): + + The README url to the suggested example was broken. + +- Added jti as parameter to claims helper to privide better interface to developers ([bde3822](https://github.com/ory/fosite/commit/bde38221ed4d32c2f175a60540ac529b306a2ced)) +- Added missing jti claim ([26f41a0](https://github.com/ory/fosite/commit/26f41a06689bd12f7165044a2de7d9332fea3759)) +- Added NOTE ([64516f8](https://github.com/ory/fosite/commit/64516f8e2e0154f46358723d710447380f6d5dc2)) +- Removed unnecessary print. Added bugfix from Arekkas. ([96458b6](https://github.com/ory/fosite/commit/96458b6cf8ee46edbef35598b6d3d877fb63ff87)) +- Example updated ([5022339](https://github.com/ory/fosite/commit/50223396d01d742b1a0a3f0be1252e339cf22985)) +- Added working example of jwt token ([9410fca](https://github.com/ory/fosite/commit/9410fca73dfb00f1dc1e3aa6ec580554ec3daaba)) +- Added tests. Still need to verify implemtation with test ([1ebdd88](https://github.com/ory/fosite/commit/1ebdd88746c875bff1a6d074437c5742c812a200)) +- WIP ([caaa43a](https://github.com/ory/fosite/commit/caaa43a184a66b78972fa3725d3636837da1cd68)) +- readme ([c97d844](https://github.com/ory/fosite/commit/c97d84471bc3941e479a79ef2eed4b1ddc07f21c)) +- readme ([fe24f26](https://github.com/ory/fosite/commit/fe24f261de60711d91c016c435ce83938d367609)) +- readme ([be8cd23](https://github.com/ory/fosite/commit/be8cd2333d3eaaf266b56c30951741d7f88edc5e)) +- refactor done (unstaged) ([625f168](https://github.com/ory/fosite/commit/625f1683a0449384877823c2dae1464718c0b264)) +- unstaged ([6c616b1](https://github.com/ory/fosite/commit/6c616b12198419ed33035dabd9e33d1e2afffff2)) +- unstaged ([17ad70b](https://github.com/ory/fosite/commit/17ad70b88ff6ba2add1136762428340d21b86126)) +- Include user session data in all calls to storage handlers. ([2be3fc1](https://github.com/ory/fosite/commit/2be3fc18f5a35646f7cd001eb6b4b92cbb07ef16)) +- unstaged ([fde7c80](https://github.com/ory/fosite/commit/fde7c803798b1f7fa2056bb434dd74d9a4ebeea7)) +- unstaged ([e775aad](https://github.com/ory/fosite/commit/e775aadbc33ec8f15adc7f3b78de5eca53b349f5)) +- unstaged ([ae2fc16](https://github.com/ory/fosite/commit/ae2fc169e663486248f6518a3497b0245754892e)) +- handler/core: fixed tests ([7f5938a](https://github.com/ory/fosite/commit/7f5938adc4f79380239292cd3b6f6e0064df39ef)) +- core handlers: added tests ([e9affb7](https://github.com/ory/fosite/commit/e9affb77442c46fb4647c9a22c1a5eb60945d21d)) +- authorize/explicit ✓ ([d61635b](https://github.com/ory/fosite/commit/d61635b26e3cd34822d4f3ffc0fe25bd4774bd45)) +- authorize/explicit: minor name refactoring and tests for authorize endpoint ([4736e28](https://github.com/ory/fosite/commit/4736e284b327f0941e58073bf860caca4117c545)) +- plugin/token: fix import path ([fdba2f7](https://github.com/ory/fosite/commit/fdba2f7b5bdec0e77faa804066abe1b8895b909e)) +- unstaged ([f939597](https://github.com/ory/fosite/commit/f939597f3f3e6ad4eb582a56b643589271cbf646)) +- Initial commit ([7adad58](https://github.com/ory/fosite/commit/7adad58c327cf52530d8c1e08059564ca0b51538)) +- Access code request workflow finalized ([0232918](https://github.com/ory/fosite/commit/0232918e250eeee93bdab98502a5a30273510c49)) +- Access request api draft ([9f482ef](https://github.com/ory/fosite/commit/9f482ef50711b608dbfb72022ef998f947f0487a)) +- Add api stability section ([3ca6ec9](https://github.com/ory/fosite/commit/3ca6ec936d6b3a8dab0add136b3a2fbfefa4b4df)) +- Add go-rethink tags ([49c82bc](https://github.com/ory/fosite/commit/49c82bc9fe0c4edbb90579e1746e0dad1ae01c5c)) +- Add ValidateToken to CoreValidator ([4c2b9d8](https://github.com/ory/fosite/commit/4c2b9d8f0c84f19ae11f59cb07927ceb59598adc)) +- Added authorize code grant example ([269c5fa](https://github.com/ory/fosite/commit/269c5fab1109bb4cd2e624940dac1b9467663507)) +- Added client grant and did some renaming ([75c8179](https://github.com/ory/fosite/commit/75c8179ef537e6ea87b16cdd87016fca6d389490)) +- Added cristiangraz to the hall of fame ([1b6e2b4](https://github.com/ory/fosite/commit/1b6e2b470f8f477fdfb2ec1f914e64293bdc7b1b)) +- Added danielchatfield to the hall of fame ([2b988a8](https://github.com/ory/fosite/commit/2b988a8b2abd3dea619e31e174b306e45a62fcc1)) +- Added go 1.6 ([ae41a0a](https://github.com/ory/fosite/commit/ae41a0ace8f74480fec08c83fb1c7bda35830f35)) +- Added go1.4 to allowed failures ([49aa920](https://github.com/ory/fosite/commit/49aa920401a3cf62f16541d8fa4f9fb488270cf3)) +- Added grant and response type validation ([f524fc2](https://github.com/ory/fosite/commit/f524fc2b026621192407ce22e71f2b062635b134)) +- Added json and gorethink tags ([99c836c](https://github.com/ory/fosite/commit/99c836cd526c276419e31db25b695dd0097f0656)) +- Added JWT generator and validator. ([58acd68](https://github.com/ory/fosite/commit/58acd688530666f4720eeacb598da72a475282d5)), closes [#16](https://github.com/ory/fosite/issues/16) +- Added missing file ([8fc1615](https://github.com/ory/fosite/commit/8fc1615bf40777c2c456e1ec4515a269e348e3b4)) +- Added owner method ([78012ed](https://github.com/ory/fosite/commit/78012ed85819caaf154fe9dc4afd212f068fc0a1)) +- Added tests fragment capabilities to writeresponse ([6df0eca](https://github.com/ory/fosite/commit/6df0eca1d74d79e807a77910776ff2249340f103)) +- Api cleanup, gofmt ([3d6e8b6](https://github.com/ory/fosite/commit/3d6e8b6281c6d170a77103b89cfabdd3086a03f0)) +- Api refactor ([d936c91](https://github.com/ory/fosite/commit/d936c914253c58297dcc462a14fb6ddb87bfcac4)) +- Basic draft ([480af91](https://github.com/ory/fosite/commit/480af9165fef8a5e8bcc4896ed680cbf5afbe23c)) +- Defined OAuth2.HandleResponseTypes ([30b6e74](https://github.com/ory/fosite/commit/30b6e74b13f567237ea770bf6a4e99dd95085dcc)): + + Incorporated feedback from GitHub, did refactoring and renaming, added tests + +- Enforce https for all redirect endpoints except localhost ([d65b45a](https://github.com/ory/fosite/commit/d65b45a192cd3a2073f8e6118c005ac93f0bb974)) +- Enforce use of scopes ([12d76dd](https://github.com/ory/fosite/commit/12d76dd7c86408e52f85a3099f6063c462e0b97b)), closes [#14](https://github.com/ory/fosite/issues/14) +- Finalized auth endpoint, added tests, added integration tests ([c6dcb90](https://github.com/ory/fosite/commit/c6dcb90ccbd1d7a179a601e0e6d46cc1004cde92)) +- Finalized token endpoint api ([8de3f10](https://github.com/ory/fosite/commit/8de3f10d89b47ad0d23cf13b425442393f51e104)) +- Finished up integration tests ([a6d027e](https://github.com/ory/fosite/commit/a6d027e3a4f817bb72706fbf0d7e3245f8823b27)) +- Fix broken test ([653e324](https://github.com/ory/fosite/commit/653e3248c0a1aae3bb2c33f64f21854155304e1a)) +- Fix config ([82e9332](https://github.com/ory/fosite/commit/82e9332815579e538089dff61281a7a446f0f6cd)) +- Fix deps ([bcc6a07](https://github.com/ory/fosite/commit/bcc6a07fef6f4036643e79eaf3cdd1f485a682fb)) +- Fix jwt strategy interface mismatch ([#58](https://github.com/ory/fosite/issues/58)) ([4d0a545](https://github.com/ory/fosite/commit/4d0a5450dd3b44e44f5169f90b3591566a6eef1d)) +- Fix unique scope tests ([3ac3a79](https://github.com/ory/fosite/commit/3ac3a798cd1ad5fcd0a53abb45fbb93c7321d154)) +- Fixed granted scope match ([13b7efa](https://github.com/ory/fosite/commit/13b7efae68b4f68171422b876e8df197b3453e42)) +- Fixed racy tests ([f0b691d](https://github.com/ory/fosite/commit/f0b691dac03f455ae429116cf121a1ae9054c3e3)) +- Fixed tests ([8bf73e3](https://github.com/ory/fosite/commit/8bf73e3bb4b12e098f63b1007d4ce9a25e0221b7)) +- Fixed tests refactor broke ([5da857b](https://github.com/ory/fosite/commit/5da857b4bcf76b3cc87aa5c9c1f8ee2c0c814992)) +- Fixed typos ([a5391de](https://github.com/ory/fosite/commit/a5391deaa543441f1e3838b0c5093692be247015)), closes [#10](https://github.com/ory/fosite/issues/10) +- Fixed urls ([58908b8](https://github.com/ory/fosite/commit/58908b8cd323434dce944119c5a300f1196634f2)) +- Fixed wrongfully set constant ErrTemporaryUnvailableName ([71a9105](https://github.com/ory/fosite/commit/71a9105a1e4afde3eed0a3ef80239140f6674d15)), closes [#9](https://github.com/ory/fosite/issues/9) +- Generic claims and headers ([1f2e97f](https://github.com/ory/fosite/commit/1f2e97ff847921939fe1f93f6dfdfcbb7bfb0792)) +- Glide ([#43](https://github.com/ory/fosite/issues/43)) ([de85e2a](https://github.com/ory/fosite/commit/de85e2a7ebce57a804ae0beef42b1f1b9017914c)) +- Godep save ([c457104](https://github.com/ory/fosite/commit/c45710465f990e74e8cddf5190f2e309da592297)) +- Goimports ([8b9816c](https://github.com/ory/fosite/commit/8b9816cb1ecbc7befef924b6a923bd52530141f3)) +- Goimports ([96be194](https://github.com/ory/fosite/commit/96be194cae6562fe35696c6ee6c7c547ce20388d)) +- Implemented all core grant types ([ce0a849](https://github.com/ory/fosite/commit/ce0a8496942259d6fe518104bab0dfd3dfea9856)) +- Implemented and documented examples ([8c625c9](https://github.com/ory/fosite/commit/8c625c9cd1e9854eddecafc36e4502577c113ef0)) +- Implemented new token generator based on hmac-sha256 ([01f9ede](https://github.com/ory/fosite/commit/01f9ede7e69588caf12940979a1fc0586d5aac3c)), closes [#11](https://github.com/ory/fosite/issues/11) +- Implemented validator for access tokens ([4140422](https://github.com/ory/fosite/commit/414042259d6f7b1aefe4244bc3f8eb80a83a2d2c)) +- Implicit handlers do not require tls over https ([#61](https://github.com/ory/fosite/issues/61)) ([6c40c08](https://github.com/ory/fosite/commit/6c40c086a1f082d466bac21721571558c32de97c)), closes [#60](https://github.com/ory/fosite/issues/60) +- Improve handling of expiry and include a protected api example ([dfb047d](https://github.com/ory/fosite/commit/dfb047d52b75b5d8a28bcd8d70a3e139da289da1)) +- Improve strategy API ([21f5e8c](https://github.com/ory/fosite/commit/21f5e8ce68097959ef97b1b8dca268f2a9a5d276)) +- Increased coverage ([83194b6](https://github.com/ory/fosite/commit/83194b6b2849292da041385e2274d42a06b36120)) +- Issue refresh token only when 'offline' scope is set ([34068b9](https://github.com/ory/fosite/commit/34068b951d8deea523c40f792608b75d2b4c656f)), closes [#47](https://github.com/ory/fosite/issues/47) +- Jwt signing and client changes ([#44](https://github.com/ory/fosite/issues/44)) ([fae3c96](https://github.com/ory/fosite/commit/fae3c96e89cd364f21bee00f8d5384cd053ab9c1)) +- Made hybrid flow optional ([08ddbae](https://github.com/ory/fosite/commit/08ddbae46bca5ef18e4a8c7560a46d6238d6a3e9)) +- Major refactor, use enigma, finalized authorize skeleton ([38bacd3](https://github.com/ory/fosite/commit/38bacd340eed991d69dc95f8a7bf6c0f328d8f47)), closes [#8](https://github.com/ory/fosite/issues/8) [#11](https://github.com/ory/fosite/issues/11) +- More test cases ([1188750](https://github.com/ory/fosite/commit/1188750e06c6ba30ebc783a8297aab75a0f95247)) +- More tests ([164506a](https://github.com/ory/fosite/commit/164506a23a3105a37b60b1154052589d1be6c7b1)) +- Moved to root package, updated docs ([1871702](https://github.com/ory/fosite/commit/18717023c4d6b5c02691f94fe80714f2e5e9862d)) +- Moved to root package, updated docs ([5b9b20c](https://github.com/ory/fosite/commit/5b9b20cd6b91a5cf72d054dc9afa2afc9d6dfd15)) +- No "session" secret required ([d1f45ad](https://github.com/ory/fosite/commit/d1f45ad9dcbb0b2866f7c8fa0fe99bc77fb93506)) +- Preview ([ba84987](https://github.com/ory/fosite/commit/ba849870e24070ea44fec9cbcf99cc04a281ffef)) +- Refactor ([eb9153c](https://github.com/ory/fosite/commit/eb9153c389b1c7ca14af78b091705d84e5bba68c)) +- Refactor, fixed tests, incorporated feedback ([9e59df2](https://github.com/ory/fosite/commit/9e59df23353964644bfcc0d148745f8dca691b39)) +- Refactoring, more tests ([df79a81](https://github.com/ory/fosite/commit/df79a81577ec8a9b7517af794ea6f04da71abf91)) +- Refactoring, renaming, docs ([e5476d1](https://github.com/ory/fosite/commit/e5476d15413c7bf96b5a1c282f9d079f538dcc83)) +- Refactoring, renaming, more tests ([9467ca8](https://github.com/ory/fosite/commit/9467ca8ac7b7b7785c96f049a422ed1d16e639b4)) +- Remove duplicate field ([e134351](https://github.com/ory/fosite/commit/e13435109928d11ae9eeb13f1e347043e8be0d53)) +- Remove store mock ([80c14f7](https://github.com/ory/fosite/commit/80c14f786b4a1ed4f1379a5fd6deaf036ece4b47)) +- Rename fields name to client_name and secret to client_secret ([99ce066](https://github.com/ory/fosite/commit/99ce0662f10c82ce034c9c21c8041aa29c460883)) +- Renaming and refactoring ([d3697bd](https://github.com/ory/fosite/commit/d3697bd15cc05bbc8bf3a6833911c3cc5dd1b2f8)) +- Replace internal import ([#52](https://github.com/ory/fosite/issues/52)) ([1290282](https://github.com/ory/fosite/commit/1290282d421ee999ff8e5c2d5d6d0f762dba599c)) +- Replace pkg.ErrNotFound with fosite.ErrNotFound ([4390c49](https://github.com/ory/fosite/commit/4390c495a1794fc7cf26cbeb47969f92d19f0ecc)) +- Request should return unique scopes ([af66918](https://github.com/ory/fosite/commit/af66918f0c91a451659fa2bf01d2c804e14799eb)) +- Resolve an issue where query params could be used instead of post body ([7eb85c6](https://github.com/ory/fosite/commit/7eb85c6e4ae2bb4a67c2e6f6166824351cc17f1d)) +- Resolve danger of not reading enough bytes ([c68a3e9](https://github.com/ory/fosite/commit/c68a3e9bea4bb5a6550e55b2ce2beb59eb48782a)) +- Resolve id token issues with empty claims ([89c60c9](https://github.com/ory/fosite/commit/89c60c9f2898345fd3d75044c8e41eacbf0d4fd5)) +- Resolve scope issues ([#55](https://github.com/ory/fosite/issues/55)) ([9d54b98](https://github.com/ory/fosite/commit/9d54b989c8d04c4d586e7810cce2e6d4f03d7c48)): + + handler: resolve scope issues + +- Sanitized tests and apis ([12c70bb](https://github.com/ory/fosite/commit/12c70bb4f167afe8d39e85d3ef0e0f13b5761070)) +- Tests for client credentials flow ([c13298c](https://github.com/ory/fosite/commit/c13298cbf165c873f9463a6bbad91b962762f3b0)) +- Tests for resource owner password credentials grant ([f503615](https://github.com/ory/fosite/commit/f5036150f90d7d73e85088400cda9f7de2722a20)) +- Update ([88e84de](https://github.com/ory/fosite/commit/88e84de2676281bb5a7a1e6b5051faa1feb14c2e)) +- Update installation instructions ([201c6aa](https://github.com/ory/fosite/commit/201c6aa6c15d35da14022f7ec43d0e9b87b2bc68)), closes [#33](https://github.com/ory/fosite/issues/33) +- Updated example and added implicit grant ([d12fa5c](https://github.com/ory/fosite/commit/d12fa5ca89cfebb351e023d53b0c57420725195b)) +- Use jwt-go.v2 and fix bc break ([f731d88](https://github.com/ory/fosite/commit/f731d8892ca50501fdc054023f0b7b77d9ecb6ef)) diff --git a/fosite/CODE_OF_CONDUCT.md b/fosite/CODE_OF_CONDUCT.md new file mode 100644 index 00000000000..9cebaf358e3 --- /dev/null +++ b/fosite/CODE_OF_CONDUCT.md @@ -0,0 +1,145 @@ + + + +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, caste, color, religion, or sexual +identity and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +- Demonstrating empathy and kindness toward other people +- Being respectful of differing opinions, viewpoints, and experiences +- Giving and gracefully accepting constructive feedback +- Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +- Focusing on what is best not just for us as individuals, but for the overall + community + +Examples of unacceptable behavior include: + +- The use of sexualized language or imagery, and sexual attention or advances of + any kind +- Trolling, insulting or derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or email address, + without their explicit permission +- Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Open Source Community Support + +Ory Open source software is collaborative and based on contributions by +developers in the Ory community. There is no obligation from Ory to help with +individual problems. If Ory open source software is used in production in a +for-profit company or enterprise environment, we mandate a paid support contract +where Ory is obligated under their service level agreements (SLAs) to offer a +defined level of availability and responsibility. For more information about +paid support please contact us at sales@ory.sh. + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +[office@ory.sh](mailto:office@ory.sh). All complaints will be reviewed and +investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series of +actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or permanent +ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within the +community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.1, available at +[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1]. + +Community Impact Guidelines were inspired by [Mozilla's code of conduct +enforcement ladder][mozilla coc]. + +For answers to common questions about this code of conduct, see the FAQ at +[https://www.contributor-covenant.org/faq][faq]. Translations are available at +[https://www.contributor-covenant.org/translations][translations]. + +[homepage]: https://www.contributor-covenant.org +[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html +[mozilla coc]: https://github.com/mozilla/diversity +[faq]: https://www.contributor-covenant.org/faq +[translations]: https://www.contributor-covenant.org/translations diff --git a/fosite/CONTRIBUTING.md b/fosite/CONTRIBUTING.md new file mode 100644 index 00000000000..232c6a89e02 --- /dev/null +++ b/fosite/CONTRIBUTING.md @@ -0,0 +1,251 @@ + + + +# Contribute to Ory Fosite + + + + +- [Introduction](#introduction) +- [FAQ](#faq) +- [How can I contribute?](#how-can-i-contribute) +- [Communication](#communication) +- [Contribute examples or community projects](#contribute-examples-or-community-projects) +- [Contribute code](#contribute-code) +- [Contribute documentation](#contribute-documentation) +- [Disclosing vulnerabilities](#disclosing-vulnerabilities) +- [Code style](#code-style) + - [Working with forks](#working-with-forks) +- [Conduct](#conduct) + + + +## Introduction + +_Please note_: We take Ory Fosite's security and our users' trust very +seriously. If you believe you have found a security issue in Ory Fosite, please +disclose it by contacting us at security@ory.sh. + +There are many ways in which you can contribute. The goal of this document is to +provide a high-level overview of how you can get involved in Ory. + +As a potential contributor, your changes and ideas are welcome at any hour of +the day or night, on weekdays, weekends, and holidays. Please do not ever +hesitate to ask a question or send a pull request. + +If you are unsure, just ask or submit the issue or pull request anyways. You +won't be yelled at for giving it your best effort. The worst that can happen is +that you'll be politely asked to change something. We appreciate any sort of +contributions and don't want a wall of rules to get in the way of that. + +That said, if you want to ensure that a pull request is likely to be merged, +talk to us! You can find out our thoughts and ensure that your contribution +won't clash with Ory Fosite's direction. A great way to do this is via +[Ory Fosite Discussions](https://github.com/orgs/ory/discussions) or the +[Ory Chat](https://www.ory.sh/chat). + +## FAQ + +- I am new to the community. Where can I find the + [Ory Community Code of Conduct?](https://github.com/ory/fosite/blob/master/CODE_OF_CONDUCT.md) + +- I have a question. Where can I get + [answers to questions regarding Ory Fosite?](#communication) + +- I would like to contribute but I am not sure how. Are there + [easy ways to contribute?](#how-can-i-contribute) + [Or good first issues?](https://github.com/search?l=&o=desc&q=label%3A%22help+wanted%22+label%3A%22good+first+issue%22+is%3Aopen+user%3Aory+user%3Aory-corp&s=updated&type=Issues) + +- I want to talk to other Ory Fosite users. + [How can I become a part of the community?](#communication) + +- I would like to know what I am agreeing to when I contribute to Ory Fosite. + Does Ory have + [a Contributors License Agreement?](https://cla-assistant.io/ory/fosite) + +- I would like updates about new versions of Ory Fosite. + [How are new releases announced?](https://www.ory.sh/l/sign-up-newsletter) + +## How can I contribute? + +If you want to start to contribute code right away, take a look at the +[list of good first issues](https://github.com/ory/fosite/labels/good%20first%20issue). + +There are many other ways you can contribute. Here are a few things you can do +to help out: + +- **Give us a star.** It may not seem like much, but it really makes a + difference. This is something that everyone can do to help out Ory Fosite. + Github stars help the project gain visibility and stand out. + +- **Join the community.** Sometimes helping people can be as easy as listening + to their problems and offering a different perspective. Join our Slack, have a + look at discussions in the forum and take part in community events. More info + on this in [Communication](#communication). + +- **Answer discussions.** At all times, there are several unanswered discussions + on GitHub. You can see an + [overview here](https://github.com/discussions?discussions_q=is%3Aunanswered+org%3Aory+sort%3Aupdated-desc). + If you think you know an answer or can provide some information that might + help, please share it! Bonus: You get GitHub achievements for answered + discussions. + +- **Help with open issues.** We have a lot of open issues for Ory Fosite and + some of them may lack necessary information, some are duplicates of older + issues. You can help out by guiding people through the process of filling out + the issue template, asking for clarifying information or pointing them to + existing issues that match their description of the problem. + +- **Review documentation changes.** Most documentation just needs a review for + proper spelling and grammar. If you think a document can be improved in any + way, feel free to hit the `edit` button at the top of the page. More info on + contributing to the documentation [here](#contribute-documentation). + +- **Help with tests.** Pull requests may lack proper tests or test plans. These + are needed for the change to be implemented safely. + +## Communication + +We use [Slack](https://www.ory.sh/chat). You are welcome to drop in and ask +questions, discuss bugs and feature requests, talk to other users of Ory, etc. + +Check out [Ory Fosite Discussions](https://github.com/orgs/ory/discussions). +This is a great place for in-depth discussions and lots of code examples, logs +and similar data. + +You can also join our community calls if you want to speak to the Ory team +directly or ask some questions. You can find more info and participate in +[Slack](https://www.ory.sh/chat) in the #community-call channel. + +If you want to receive regular notifications about updates to Ory Fosite, +consider joining the mailing list. We will _only_ send you vital information on +the projects that you are interested in. + +Also, [follow us on Twitter](https://twitter.com/orycorp). + +## Contribute examples or community projects + +One of the most impactful ways to contribute is by adding code examples or other +Ory-related code. You can find an overview of community code in the +[awesome-ory](https://github.com/ory/awesome-ory) repository. + +_If you would like to contribute a new example, we would love to hear from you!_ + +Please [open a pull request at awesome-ory](https://github.com/ory/awesome-ory/) +to add your example or Ory-related project to the awesome-ory README. + +## Contribute code + +Unless you are fixing a known bug, we **strongly** recommend discussing it with +the core team via a GitHub issue or [in our chat](https://www.ory.sh/chat) +before getting started to ensure your work is consistent with Ory Fosite's +roadmap and architecture. + +All contributions are made via pull requests. To make a pull request, you will +need a GitHub account; if you are unclear on this process, see GitHub's +documentation on [forking](https://help.github.com/articles/fork-a-repo) and +[pull requests](https://help.github.com/articles/using-pull-requests). Pull +requests should be targeted at the `master` branch. Before creating a pull +request, go through this checklist: + +1. Create a feature branch off of `master` so that changes do not get mixed up. +1. [Rebase](http://git-scm.com/book/en/Git-Branching-Rebasing) your local + changes against the `master` branch. +1. Run the full project test suite with the `go test -tags sqlite ./...` (or + equivalent) command and confirm that it passes. +1. Run `make format` +1. Add a descriptive prefix to commits. This ensures a uniform commit history + and helps structure the changelog. Please refer to this + [Convential Commits configuration](https://github.com/ory/fosite/blob/master/.github/workflows/conventional_commits.yml) + for the list of accepted prefixes. You can read more about the Conventional + Commit specification + [at their site](https://www.conventionalcommits.org/en/v1.0.0/). + +If a pull request is not ready to be reviewed yet +[it should be marked as a "Draft"](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request). + +Before your contributions can be reviewed you need to sign our +[Contributor License Agreement](https://cla-assistant.io/ory/fosite). + +This agreement defines the terms under which your code is contributed to Ory. +More specifically it declares that you have the right to, and actually do, grant +us the rights to use your contribution. You can see the Apache 2.0 license under +which our projects are published +[here](https://github.com/ory/meta/blob/master/LICENSE). + +When pull requests fail the automated testing stages (for example unit or E2E +tests), authors are expected to update their pull requests to address the +failures until the tests pass. + +Pull requests eligible for review + +1. follow the repository's code formatting conventions; +2. include tests that prove that the change works as intended and does not add + regressions; +3. document the changes in the code and/or the project's documentation; +4. pass the CI pipeline; +5. have signed our + [Contributor License Agreement](https://cla-assistant.io/ory/fosite); +6. include a proper git commit message following the + [Conventional Commit Specification](https://www.conventionalcommits.org/en/v1.0.0/). + +If all of these items are checked, the pull request is ready to be reviewed and +you should change the status to "Ready for review" and +[request review from a maintainer](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/requesting-a-pull-request-review). + +Reviewers will approve the pull request once they are satisfied with the patch. + +## Contribute documentation + +Please provide documentation when changing, removing, or adding features. All +Ory Documentation resides in the +[Ory documentation repository](https://github.com/ory/docs/). For further +instructions please head over to the Ory Documentation +[README.md](https://github.com/ory/docs/blob/master/README.md). + +## Disclosing vulnerabilities + +Please disclose vulnerabilities exclusively to +[security@ory.sh](mailto:security@ory.sh). Do not use GitHub issues. + +## Code style + +Please run `make format` to format all source code following the Ory standard. + +### Working with forks + +```bash +# First you clone the original repository +git clone git@github.com:ory/ory/fosite.git + +# Next you add a git remote that is your fork: +git remote add fork git@github.com:/ory/fosite.git + +# Next you fetch the latest changes from origin for master: +git fetch origin +git checkout master +git pull --rebase + +# Next you create a new feature branch off of master: +git checkout my-feature-branch + +# Now you do your work and commit your changes: +git add -A +git commit -a -m "fix: this is the subject line" -m "This is the body line. Closes #123" + +# And the last step is pushing this to your fork +git push -u fork my-feature-branch +``` + +Now go to the project's GitHub Pull Request page and click "New pull request" + +## Conduct + +Whether you are a regular contributor or a newcomer, we care about making this +community a safe place for you and we've got your back. + +[Ory Community Code of Conduct](https://github.com/ory/fosite/blob/master/CODE_OF_CONDUCT.md) + +We welcome discussion about creating a welcoming, safe, and productive +environment for the community. If you have any questions, feedback, or concerns +[please let us know](https://www.ory.sh/chat). diff --git a/fosite/HISTORY.md b/fosite/HISTORY.md new file mode 100644 index 00000000000..4ed22ced92b --- /dev/null +++ b/fosite/HISTORY.md @@ -0,0 +1,742 @@ +**THIS DOCUMENT HAS MOVED** + +This file is no longer being updated and kept for historical reasons. Please +check the [CHANGELOG](CHANGELOG.md) instead! + + + + +- [0.28.0](#0280) +- [0.27.0](#0270) + - [Conceptual Changes](#conceptual-changes) + - [API Changes](#api-changes) +- [0.26.0](#0260) +- [0.24.0](#0240) + - [Breaking change(s)](#breaking-changes) + - [`fosite/handler/oauth2.JWTStrategy`](#fositehandleroauth2jwtstrategy) + - [`OpenIDConnectRequestValidator.ValidatePrompt`](#openidconnectrequestvalidatorvalidateprompt) +- [0.23.0](#0230) + - [Breaking change(s)](#breaking-changes-1) + - [`Hasher`](#hasher) +- [0.22.0](#0220) + - [Breaking change(s)](#breaking-changes-2) + - [`JWTStrategy`](#jwtstrategy) +- [0.21.0](#0210) + - [Changes to parsing of OAuth 2.0 Client `response_types`](#changes-to-parsing-of-oauth-20-client-response_types) + - [`openid.DefaultStrategy` field name changed](#openiddefaultstrategy-field-name-changed) + - [`oauth2.RS256JWTStrategy` was renamed and field name changed](#oauth2rs256jwtstrategy-was-renamed-and-field-name-changed) + - [Adds `private_key_jwt` client authentication method](#adds-private_key_jwt-client-authentication-method) + - [Response Type `id_token` no longer required for authorize_code flow](#response-type-id_token-no-longer-required-for-authorize_code-flow) +- [0.20.0](#0200) +- [Breaking Changes](#breaking-changes) + - [JWT Claims](#jwt-claims) + - [`AuthorizeCodeStorage`](#authorizecodestorage) +- [0.19.0](#0190) +- [0.18.0](#0180) +- [0.17.0](#0170) +- [0.16.0](#0160) +- [0.15.0](#0150) +- [0.14.0](#0140) +- [0.13.0](#0130) + - [Breaking changes](#breaking-changes) +- [0.12.0](#0120) + - [Breaking changes](#breaking-changes-1) + - [Improved cryptographic methods](#improved-cryptographic-methods) +- [0.11.0](#0110) + - [Non-breaking changes](#non-breaking-changes) + - [Storage adapter](#storage-adapter) + - [Reducing use of gomock](#reducing-use-of-gomock) + - [Breaking Changes](#breaking-changes-1) + - [`fosite/handler/oauth2.AuthorizeCodeGrantStorage` was removed](#fositehandleroauth2authorizecodegrantstorage-was-removed) + - [`fosite/handler/oauth2.RefreshTokenGrantStorage` was removed](#fositehandleroauth2refreshtokengrantstorage-was-removed) + - [`fosite/handler/oauth2.AuthorizeCodeGrantStorage` was removed](#fositehandleroauth2authorizecodegrantstorage-was-removed-1) + - [WildcardScopeStrategy](#wildcardscopestrategy) + - [Refresh tokens and authorize codes are no longer JWTs](#refresh-tokens-and-authorize-codes-are-no-longer-jwts) + - [Delete access tokens when persisting refresh session](#delete-access-tokens-when-persisting-refresh-session) +- [0.10.0](#0100) +- [0.9.0](#090) +- [0.8.0](#080) + - [Breaking changes](#breaking-changes-2) + - [`ClientManager`](#clientmanager) + - [`OAuth2Provider`](#oauth2provider) +- [0.7.0](#070) +- [0.6.0](#060) +- [0.5.0](#050) +- [0.4.0](#040) +- [0.3.0](#030) +- [0.2.0](#020) +- [0.1.0](#010) + + + +## 0.28.0 + +This version (re-)introduces refresh token lifespans. Per default, this feature +is enabled and set to 30 days. If a refresh token has not been used within 30 +days, it will expire. + +To disable refresh token lifespans (previous behaviour), set +`compose.Config.RefreshTokenLifespan = -1`. + +## 0.27.0 + +This PR adds the ability to specify a target audience for OAuth 2.0 Access +Tokens. + +### Conceptual Changes + +From now on, `scope` and `audience` will be checked against the client's +whitelisted scope and audience on every refresh token exchange. This prevents +clients, which no longer are allowed to request a certain audience or scope, to +keep using those values with existing refresh tokens. + +### API Changes + +```go +type fosite.Client interface { ++ // GetAudience returns the allowed audience(s) for this client. ++ GetAudience() Arguments +} +``` + +```go +type fosite.Request struct { +- Scopes Argument ++ RequestedScope Argument + +- GrantedScopes Argument ++ GrantedScope Argument +} +``` + +```go +type fosite.Requester interface { ++ // GetRequestedAudience returns the requested audiences for this request. ++ GetRequestedAudience() (audience Arguments) + ++ // SetRequestedAudience sets the requested audienc. ++ SetRequestedAudience(audience Arguments) + ++ // GetGrantedAudience returns all granted scopes. ++ GetGrantedAudience() (grantedAudience Arguments) + ++ // GrantAudience marks a request's audience as granted. ++ GrantAudience(audience string) +} +``` + +```go +type fosite/token/jwt.JWTClaimsContainer interface { +- // With returns a copy of itself with expiresAt and scope set to the given values. +- With(expiry time.Time, scope, audience []string) JWTClaimsContainer + ++ // With returns a copy of itself with expiresAt, scope, audience set to the given values. ++ With(expiry time.Time, scope, audience []string) JWTClaimsContainer +} +``` + +## 0.26.0 + +This release makes it easier to define custom JWT Containers for access tokens +when using the JWT strategy. To do that, the following signatures have changed: + +```go +// github.com/ory/fosite/handler/oauth2 +type JWTSessionContainer interface { + // GetJWTClaims returns the claims. +- GetJWTClaims() *jwt.JWTClaims ++ GetJWTClaims() jwt.JWTClaimsContainer + + // GetJWTHeader returns the header. + GetJWTHeader() *jwt.Headers + + fosite.Session +} +``` + +```go ++ type JWTClaimsContainer interface { ++ // With returns a copy of itself with expiresAt and scope set to the given values. ++ With(expiry time.Time, scope []string) JWTClaimsContainer ++ ++ // WithDefaults returns a copy of itself with issuedAt and issuer set to the given default values. If those ++ // values are already set in the claims, they will not be updated. ++ WithDefaults(iat time.Time, issuer string) JWTClaimsContainer ++ ++ // ToMapClaims returns the claims as a github.com/dgrijalva/jwt-go.MapClaims type. ++ ToMapClaims() jwt.MapClaims ++ } +``` + +All default session implementations have been updated to reflect this change. If +you define custom session, this patch will affect you. + +## 0.24.0 + +This release addresses areas where the go context was missing or not propagated +down the call path properly. + +### Breaking change(s) + +#### `fosite/handler/oauth2.JWTStrategy` + +The +[`fosite/handler/oauth2.JWTStrategy`](https://github.com/ory/fosite/blob/master/handler/oauth2/strategy.go) +interface changed as a context parameter was added to its method signature: + +```go +type JWTStrategy interface { +- Validate(tokenType fosite.TokenType, token string) (requester fosite.Requester, err error) ++ Validate(ctx context.Context, tokenType fosite.TokenType, token string) (requester fosite.Requester, err error) +} +``` + +#### `OpenIDConnectRequestValidator.ValidatePrompt` + +The +[`OpenIDConnectRequestValidator.ValidatePrompt`](https://github.com/ory/fosite/blob/master/handler/openid/validator.go) +method signature was updated to take a go context as its first parameter: + +```go +- func (v *OpenIDConnectRequestValidator) ValidatePrompt(req fosite.AuthorizeRequester) error { ++ func (v *OpenIDConnectRequestValidator) ValidatePrompt(ctx context.Context, req fosite.AuthorizeRequester) error { +``` + +## 0.23.0 + +This releases addresses inconsistencies in some of the public interfaces by +passing in the go context to their signatures. + +### Breaking change(s) + +#### `Hasher` + +The [`Hasher`](https://github.com/ory/fosite/blob/master/hash.go) interface +changed as a context parameter was added to its method signatures: + +```go +type Hasher interface { +- Compare(hash, data []byte) error ++ Compare(ctx context.Context, hash, data []byte) error +- Hash(data []byte) ([]byte, error) ++ Hash(ctx context.Context, data []byte) ([]byte, error) +} +``` + +## 0.22.0 + +This releases addresses inconsistencies in some of the public interfaces by +passing in the go context to their signatures. + +### Breaking change(s) + +#### `JWTStrategy` + +The [`JWTStrategy`](https://github.com/ory/fosite/blob/master/token/jwt/jwt.go) +interface changed as a context parameter was added to its method signatures: + +```go +type JWTStrategy interface { +- Generate(claims jwt.Claims, header Mapper) (string, string, error) ++ Generate(ctx context.Context, claims jwt.Claims, header Mapper) (string, string, error) +- Validate(token string) (string, error) ++ Validate(ctx context.Context, token string) (string, error) +- GetSignature(token string) (string, error) ++ GetSignature(ctx context.Context, token string) (string, error) +- Hash(in []byte) ([]byte, error) ++ Hash(ctx context.Context, in []byte) ([]byte, error) +- Decode(token string) (*jwt.Token, error) ++ Decode(ctx context.Context, token string) (*jwt.Token, error) + GetSigningMethodLength() int +} +``` + +## 0.21.0 + +This release improves compatibility with the OpenID Connect Dynamic Client +Registration 1.0 specification. + +### Changes to parsing of OAuth 2.0 Client `response_types` + +Previously, when response types such as `code token id_token` were requested +(OpenID Connect Hybrid Flow) it was enough for the client to have +`response_types=["code", "token", "id_token"]`. This is however incompatible +with the OpenID Connect Dynamic Client Registration 1.0 spec which dictates that +the `response_types` have to match exactly. + +Assuming you are requesting `&response_types=code+token+id_token`, your client +should have `response_types=["code token id_token"]`, if other response types +are required (e.g. `&response_types=code`, `&response_types=token`) they too +must be included: `response_types=["code", "token", "code token id_token"]`. + +### `openid.DefaultStrategy` field name changed + +Field `RS256JWTStrategy` was renamed to `JWTStrategy` and now relies on an +interface instead of a concrete struct. + +### `oauth2.RS256JWTStrategy` was renamed and field name changed + +The strategy `oauth2.RS256JWTStrategy` was renamed to +`oauth2.DefaultJWTStrategy` and now accepts an interface that implements +`jwt.JWTStrategy` instead of directly relying on `jwt.RS256JWTStrategy`. For +this reason, the field `RS256JWTStrategy` was renamed to `JWTStrategy` + +### Adds `private_key_jwt` client authentication method + +This patch adds the ability to perform the +[`private_key_jwt` client authentication method](http://openid.net/specs/openid-connect-core-1_0.html#ClientAuthentication) +defined in the OpenID Connect specification. Please note that method +`client_secret_jwt` is not supported because of the BCrypt hashing strategy. + +For this strategy to work, you must set the `TokenURL` field of the +`compose.Config` object to the authorization server's Token URL. + +If you would like to support this authentication method, your `Client` +implementation must also implement `fosite.DefaultOpenIDConnectClient` and then, +for example, `GetTokenEndpointAuthMethod()` should return `private_key_jwt`. + +### Response Type `id_token` no longer required for authorize_code flow + +The `authorize_code` +[does not require](https://openid.net/specs/openid-connect-registration-1_0.html#ClientMetadata) +the `id_token` response type to be available when performing the OpenID Connect +flow: + +> grant_types OPTIONAL. JSON array containing a list of the OAuth 2.0 Grant +> Types that the Client is declaring that it will restrict itself to using. The +> Grant Type values used by OpenID Connect are: +> +> authorization_code: The Authorization Code Grant Type described in OAuth 2.0 Section 4.1. +> implicit: The Implicit Grant Type described in OAuth 2.0 Section 4.2. +> refresh_token: The Refresh Token Grant Type described in OAuth 2.0 Section 6. +> +> The following table lists the correspondence between response_type values that the Client will use and grant_type values that MUST be included in the registered grant_types list: +> +> code: authorization_code +> id_token: implicit +> token id_token: implicit +> code id_token: authorization_code, implicit +> code token: authorization_code, implicit +> code token id_token: authorization_code, implicit +> +> If omitted, the default is that the Client will use only the authorization_code Grant Type. + +Before this patch, the `id_token` response type was required whenever an ID +Token was requested. This patch changes that. + +## 0.20.0 + +This release implements an OAuth 2.0 Best Practice with regards to revoking +already issued access and refresh tokens if an authorization code is used more +than one time. + +## Breaking Changes + +### JWT Claims + +- `github.com/ory/fosite/token/jwt.JWTClaims.Audience` is no longer a `string`, + but a string slice `[]string`. +- `github.com/ory/fosite/handler/openid.IDTokenClaims` is no longer a `string`, + but a string slice `[]string`. + +### `AuthorizeCodeStorage` + +This improves security as, in the event of an authorization code being leaked, +all associated tokens are revoked. To implement this feature, a breaking change +had to be introduced. The +`github.com/ory/fosite/handler/oauth2.AuthorizeCodeStorage` interface changed as +follows: + +- `DeleteAuthorizeCodeSession(ctx context.Context, code string) (err error)` has + been removed from the interface and is no longer used by this library. +- `InvalidateAuthorizeCodeSession(ctx context.Context, code string) (err error)` + has been introduced. +- The error `github.com/ory/fosite/handler/oauth2.ErrInvalidatedAuthorizeCode` + has been added. + +The following documentation sheds light on how you should update your storage +adapter: + +``` +// ErrInvalidatedAuthorizeCode is an error indicating that an authorization code has been +// used previously. +var ErrInvalidatedAuthorizeCode = errors.New("Authorization code has ben invalidated") + +// AuthorizeCodeStorage handles storage requests related to authorization codes. +type AuthorizeCodeStorage interface { + // GetAuthorizeCodeSession stores the authorization request for a given authorization code. + CreateAuthorizeCodeSession(ctx context.Context, code string, request fosite.Requester) (err error) + + // GetAuthorizeCodeSession hydrates the session based on the given code and returns the authorization request. + // If the authorization code has been invalidated with `InvalidateAuthorizeCodeSession`, this + // method should return the ErrInvalidatedAuthorizeCode error. + // + // Make sure to also return the fosite.Requester value when returning the ErrInvalidatedAuthorizeCode error! + GetAuthorizeCodeSession(ctx context.Context, code string, session fosite.Session) (request fosite.Requester, err error) + + // InvalidateAuthorizeCodeSession is called when an authorize code is being used. The state of the authorization + // code should be set to invalid and consecutive requests to GetAuthorizeCodeSession should return the + // ErrInvalidatedAuthorizeCode error. + InvalidateAuthorizeCodeSession(ctx context.Context, code string) (err error) +} +``` + +## 0.19.0 + +This release improves the OpenID Connect vaildation strategy which now properly +handles `prompt`, `max_age`, and `id_token_hint` at the `/oauth2/auth` endpoint +instead of the `/oauth2/token` endpoint. + +To achieve this, the `OpenIDConnectRequestValidator` has been modified and now +requires a `jwt.JWTStrategy` (implemented by, for example +`jwt.RS256JWTStrategy`). + +The compose package has been updated accordingly. You should not expect any +major breaking changes from this release. + +## 0.18.0 + +This release allows the introspection handler to return the token type (e.g. +`access_token`, `refresh_token`) of the introspected token. To achieve that, +some breaking API changes have been introduced: + +- `OAuth2.IntrospectToken(ctx context.Context, token string, tokenType TokenType, session Session, scope ...string) (AccessRequester, error)` + is now + `OAuth2.IntrospectToken(ctx context.Context, token string, tokenType TokenType, session Session, scope ...string) (TokenType, AccessRequester, error)`. +- `TokenIntrospector.IntrospectToken(ctx context.Context, token string, tokenType TokenType, accessRequest AccessRequester, scopes []string) (error)` + is now + `TokenIntrospector.IntrospectToken(ctx context.Context, token string, tokenType TokenType, accessRequest AccessRequester, scopes []string) (TokenType, error)`. + +This patch also resolves a misconfigured json key in the `IntrospectionResponse` +struct. `AccessRequester AccessRequester json:",extra"` is now properly declared +as `AccessRequester AccessRequester json:"extra"`. + +## 0.17.0 + +This release resolves a security issue (reported by +[platform.sh](https://www.platform.sh)) related to potential storage +implementations. This library used to pass all of the request body from both +authorize and token endpoints to the storage adapters. As some of these values +are needed in consecutive requests, some storage adapters chose to drop the full +body to the database. + +This implied that confidential parameters, such as the `client_secret` which can +be passed in the request body since version 0.15.0, were stored as key/value +pairs in plaintext in the database. While most client secrets are generated +programmatically (as opposed to set by the user), it's a considerable security +issue nonetheless. + +The issue has been resolved by sanitizing the request body and only including +those values truly required by their respective handlers. This lead to two +breaking changes in the API: + +1. The `fosite.Requester` interface has a new method + `Sanitize(allowedParameters []string) Requester` which returns a sanitized + clone of the method receiver. If you do not use your own `fosite.Requester` + implementation, this won't affect you. +2. If you use the PKCE handler, you will have to add three new methods to your + storage implementation. The methods to be added work exactly like, for + example `CreateAuthorizeCodeSession`. A reference implementation can be found + in [./storage/memory.go](./storage/memory.go). The method signatures are as + follows: + +```go +type PKCERequestStorage interface { + GetPKCERequestSession(ctx context.Context, signature string, session fosite.Session) (fosite.Requester, error) + CreatePKCERequestSession(ctx context.Context, signature string, requester fosite.Requester) error + DeletePKCERequestSession(ctx context.Context, signature string) error +} +``` + +We encourage you to upgrade to this release and check your storage +implementations and potentially remove old data. + +We would like to thank [platform.sh](https://www.platform.sh) for sponsoring the +development of a patch that resolves this issue. + +## 0.16.0 + +This patch introduces `SendDebugMessagesToClients` to the Fosite struct which +enables/disables sending debug information to clients. Debug information may +contain sensitive information as it forwards error messages from, for example, +storage implementations. For this reason, `RevealDebugPayloads` defaults to +false. Keep in mind that the information may be very helpful when specific OAuth +2.0 requests fail and we generally recommend displaying debug information. + +Additionally, error keys for JSON changed which caused a new minor version, +speicifically +[`statusCode` was changed to `status_code`](https://github.com/ory/fosite/pull/242/files#diff-dd25e0e0a594c3f3592c1c717039b85eR221). + +## 0.15.0 + +This release focuses on improving compatibility with OpenID Connect +Certification and better error context. + +- Error handling is improved by explicitly adding debug information (e.g. "Token + invalid because it was not found in the database") to the error object. + Previously, the original error was prepended which caused weird formatting + issues. +- Allows client credentials in POST body at the `/oauth2/token` endpoint. Please + note that this method is not recommended to be used, unless the client making + the request is unable to use HTTP Basic Authorization. +- Allows public clients (without secret) to access the `/oauth2/token` endpoint + which was previously only possible by adding an arbitrary secret. + +This release has no breaking changes to the external API but due to the nature +of the changes, it is released as a new major version. + +## 0.14.0 + +Improves error contexts. A breaking code changes to the public API was reverted +with 0.14.1. + +## 0.13.0 + +### Breaking changes + +`glide` was replaced with `dep`. + +## 0.12.0 + +### Breaking changes + +#### Improved cryptographic methods + +- The minimum required secret length used to generate signatures of access + tokens has increased from 16 to 32 byte. +- The algorithm used to generate access tokens using the HMAC-SHA strategy has + changed from HMAC-SHA256 to HMAC-SHA512. + +## 0.11.0 + +### Non-breaking changes + +#### Storage adapter + +To simplify the storage adapter logic, and also reduce the likelihoods of bugs +within the storage adapter, the interface was greatly simplified. Specifically, +these two methods have been removed: + +- `PersistRefreshTokenGrantSession(ctx context.Context, requestRefreshSignature, accessSignature, refreshSignature string, request fosite.Requester) error` +- `PersistAuthorizeCodeGrantSession(ctx context.Context, authorizeCode, accessSignature, refreshSignature string, request fosite.Requester) error` + +For this change, you don't need to do anything. You can however simply delete +those two methods from your store. + +#### Reducing use of gomock + +In the long term, fosite should remove all gomocks and instead test against the +internal implementations. This will increase iterations per line during tests +and reduce annoying mock updates. + +### Breaking Changes + +#### `fosite/handler/oauth2.AuthorizeCodeGrantStorage` was removed + +`AuthorizeCodeGrantStorage` was used specifically in the composer. Refactor +references to `AuthorizeCodeGrantStorage` with `CoreStorage`. + +#### `fosite/handler/oauth2.RefreshTokenGrantStorage` was removed + +`RefreshTokenGrantStorage` was used specifically in the composer. Refactor +references to `RefreshTokenGrantStorage` with `CoreStorage`. + +#### `fosite/handler/oauth2.AuthorizeCodeGrantStorage` was removed + +`AuthorizeCodeGrantStorage` was used specifically in the composer. Refactor +references to `AuthorizeCodeGrantStorage` with `CoreStorage`. + +#### WildcardScopeStrategy + +A new [scope strategy](https://github.com/ory/fosite/pull/187) was introduced +called `WildcardScopeStrategy`. This strategy is now the default when using the +composer. To set the HierarchicScopeStrategy strategy, do: + +``` +import "github.com/ory/fosite/compose" + +var config = &compose.Config{ + ScopeStrategy: fosite.HierarchicScopeStrategy, +} +``` + +#### Refresh tokens and authorize codes are no longer JWTs + +Using JWTs for refresh tokens and authorize codes did not make sense: + +1. Refresh tokens are long-living credentials, JWTs require an expiry date. +2. Refresh tokens are never validated client-side, only server-side. Thus access + to the store is available. +3. Authorize codes are never validated client-side, only server-side. + +Also, one compose method changed due to this: + +```go +package compose + +// .. + +- func NewOAuth2JWTStrategy(key *rsa.PrivateKey) *oauth2.RS256JWTStrategy ++ func NewOAuth2JWTStrategy(key *rsa.PrivateKey, strategy *oauth2.HMACSHAStrategy) *oauth2.RS256JWTStrategy +``` + +#### Delete access tokens when persisting refresh session + +Please delete access tokens in your store when you persist a refresh session. +This increases security. Here is an example of how to do that using only +existing methods: + +```go +func (s *MemoryStore) PersistRefreshTokenGrantSession(ctx context.Context, originalRefreshSignature, accessSignature, refreshSignature string, request fosite.Requester) error { + if ts, err := s.GetRefreshTokenSession(ctx, originalRefreshSignature, nil); err != nil { + return err + } else if err := s.RevokeAccessToken(ctx, ts.GetID()); err != nil { + return err + } else if err := s.RevokeRefreshToken(ctx, ts.GetID()); err != nil { + return err + } else if err := s.CreateAccessTokenSession(ctx, accessSignature, request); err != nil { + return err + } else if err := s.CreateRefreshTokenSession(ctx, refreshSignature, request); err != nil { + return err + } + + return nil +} +``` + +## 0.10.0 + +It is no longer possible to introspect authorize codes, and passing scopes to +the introspector now also checks refresh token scopes. + +## 0.9.0 + +This patch adds the ability to pass a custom hasher to `compose.Compose`, which +is a breaking change. You can pass nil for the fosite default hasher: + +``` +package compose + +-func Compose(config *Config, storage interface{}, strategy interface{}, factories ...Factory) fosite.OAuth2Provider { ++func Compose(config *Config, storage interface{}, strategy interface{}, hasher fosite.Hasher, factories ...Factory) fosite.OAuth2Provider { +``` + +## 0.8.0 + +This patch addresses some inconsistencies in the public interfaces. Also +remaining references to the old repository location at `ory-am/fosite` where +updated to `ory/fosite`. + +### Breaking changes + +#### `ClientManager` + +The +[`ClientManager`](https://github.com/ory/fosite/blob/master/client_manager.go) +interface changed, as a context parameter was added: + +```go +type ClientManager interface { + // GetClient loads the client by its ID or returns an error + // if the client does not exist or another error occurred. +- GetClient(id string) (Client, error) ++ GetClient(ctx context.Context, id string) (Client, error) +} +``` + +#### `OAuth2Provider` + +The [OAuth2Provider](https://github.com/ory/fosite/blob/master/oauth2.go) +interface changed, as the need for passing down `*http.Request` was removed. +This is justifiable because `NewAuthorizeRequest` and `NewAccessRequest` already +contain `*http.Request`. + +The public api of those two methods changed: + +```go +- NewAuthorizeResponse(ctx context.Context, req *http.Request, requester AuthorizeRequester, session Session) (AuthorizeResponder, error) ++ NewAuthorizeResponse(ctx context.Context, requester AuthorizeRequester, session Session) (AuthorizeResponder, error) + + +- NewAccessResponse(ctx context.Context, req *http.Request, requester AccessRequester) (AccessResponder, error) ++ NewAccessResponse(ctx context.Context, requester AccessRequester) (AccessResponder, error) +``` + +## 0.7.0 + +Breaking changes: + +- Replaced `"golang.org/x/net/context"` with `"context"`. +- Move the repo from `github.com/ory-am/fosite` to `github.com/ory/fosite` + +## 0.6.0 + +A bug related to refresh tokens was found. To mitigate it, a `Clone()` method +has been introduced to the `fosite.Session` interface. If you use a custom +session object, this will be a breaking change. Fosite's default sessions have +been upgraded and no additional work should be required. If you use your own +session struct, we encourage using package `gob/encoding` to deep-copy it in +`Clone()`. + +## 0.5.0 + +Breaking changes: + +- `compose.OpenIDConnectExplicit` is now `compose.OpenIDConnectExplicitFactory` +- `compose.OpenIDConnectImplicit` is now `compose.OpenIDConnectImplicitFactory` +- `compose.OpenIDConnectHybrid` is now `compose.OpenIDConnectHybridFactory` +- The token introspection handler is no longer added automatically by + `compose.OAuth2*`. Add `compose.OAuth2TokenIntrospectionFactory` to your + composer if you need token introspection. +- Session refactor: + - The HMACSessionContainer was removed and replaced by `fosite.Session` / + `fosite.DefaultSession`. All sessions must now implement this signature. The + new session interface allows for better expiration time handling. + - The OpenID `DefaultSession` signature changed as well, it is now + implementing the `fosite.Session` interface + +## 0.4.0 + +Breaking changes: + +- `./fosite-example` is now a separate repository: + https://github.com/ory-am/fosite-example +- `github.com/ory-am/fosite/fosite-example/pkg.Store` is now + `github.com/ory-am/fosite/storage.MemoryStore` +- `fosite.Client` has now a new method called `IsPublic()` which can be used to + identify public clients who do not own a client secret +- All grant types except the client_credentials grant now allow public clients. + public clients are usually mobile apps and single page apps. +- `TokenValidator` is now `TokenIntrospector`, `TokenValidationHandlers` is now + `TokenIntrospectionHandlers`. +- `TokenValidator.ValidateToken` is now `TokenIntrospector.IntrospectToken` +- `fosite.OAuth2Provider.NewIntrospectionRequest()` has been added +- `fosite.OAuth2Provider.WriteIntrospectionError()` has been added +- `fosite.OAuth2Provider.WriteIntrospectionResponse()` has been added + +## 0.3.0 + +- Updated jwt-go from 2.7.0 to 3.0.0 + +## 0.2.0 + +Breaking changes: + +- Token validation refactored: `ValidateRequestAuthorization` is now `Validate` + and does not require a http request but instead a token and a token hint. A + token can be anything, including authorization codes, refresh tokens, id + tokens, ... +- Remove mandatory scope: The mandatory scope (`fosite`) has been removed as it + has proven impractical. +- Allowed OAuth2 Client scopes are now being set with `scope` instead of + `granted_scopes` when using the DefaultClient. +- There is now a scope matching strategy that can be replaced. +- OAuth2 Client scopes are now checked on every grant type. +- Handler subpackages such as `core/client` or `oidc/explicit` have been merged + and moved one level up +- `handler/oidc` is now `handler/openid` +- `handler/core` is now `handler/oauth2` + +## 0.1.0 + +Initial release diff --git a/fosite/LICENSE b/fosite/LICENSE new file mode 100644 index 00000000000..261eeb9e9f8 --- /dev/null +++ b/fosite/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/fosite/MAINTAINERS b/fosite/MAINTAINERS new file mode 100644 index 00000000000..75713ca9ae3 --- /dev/null +++ b/fosite/MAINTAINERS @@ -0,0 +1,2 @@ +Aeneas Rekkas (github: arekkas) +Thomas Aidan Curran (github: tacurran) \ No newline at end of file diff --git a/fosite/Makefile b/fosite/Makefile new file mode 100644 index 00000000000..21fd2b99cb1 --- /dev/null +++ b/fosite/Makefile @@ -0,0 +1,37 @@ +export PATH := .bin:${PATH} + +format: .bin/goimports .bin/ory node_modules # formats the source code + .bin/ory dev headers copyright --type=open-source + .bin/goimports -w . + npm exec -- prettier --write . + +help: + @cat Makefile | grep '^[^ ]*:' | grep -v '^\.bin/' | grep -v '.SILENT:' | grep -v '^node_modules:' | grep -v help | sed 's/:.*#/#/' | column -s "#" -t + +licenses: .bin/licenses node_modules # checks open-source licenses + .bin/licenses + +test: # runs all tests + go test ./... + +.bin/goimports: Makefile + GOBIN=$(shell pwd)/.bin go install golang.org/x/tools/cmd/goimports@latest + +.bin/licenses: Makefile + curl --retry 7 --retry-connrefused https://raw.githubusercontent.com/ory/ci/master/licenses/install | sh + +.bin/mockgen: + go build -o .bin/mockgen go.uber.org/mock/mockgen + +.bin/ory: Makefile + curl --retry 7 --retry-connrefused https://raw.githubusercontent.com/ory/meta/master/install.sh | bash -s -- -b .bin ory v0.1.48 + touch .bin/ory + +node_modules: package-lock.json + npm ci + touch node_modules + +gen: .bin/goimports .bin/mockgen # generates mocks + ./generate-mocks.sh + +.DEFAULT_GOAL := help diff --git a/fosite/README.md b/fosite/README.md new file mode 100644 index 00000000000..785a930adcd --- /dev/null +++ b/fosite/README.md @@ -0,0 +1,519 @@ +

ORY Fosite - Security-first OAuth2 framework

+ +[![Build Status](https://travis-ci.org/ory/fosite.svg?branch=master)](https://travis-ci.org/ory/fosite?branch=master) +[![Coverage Status](https://coveralls.io/repos/ory/fosite/badge.svg?branch=master&service=github&foo)](https://coveralls.io/github/ory/fosite?branch=master) +[![Go Report Card](https://goreportcard.com/badge/ory/fosite)](https://goreportcard.com/report/ory/fosite) + +[![Join the chat at https://www.ory.sh/chat](https://img.shields.io/badge/join-chat-00cc99.svg)](https://www.ory.sh/chat) + +**The security first OAuth2 & OpenID Connect framework for +[Go](https://golang.org).** Built simple, powerful and extensible. This library +implements peer-reviewed [IETF RFC6749](https://tools.ietf.org/html/rfc6749), +counterfeits weaknesses covered in peer-reviewed +[IETF RFC6819](https://tools.ietf.org/html/rfc6819) and countermeasures various +database attack scenarios, keeping your application safe when that hacker +penetrates or leaks your database. OpenID Connect is implemented according to +[OpenID Connect Core 1.0 incorporating errata set 1](https://openid.net/specs/openid-connect-core-1_0.html) +and includes all flows: code, implicit, hybrid. + +This library considered and implemented: + +- [The OAuth 2.0 Authorization Framework](https://tools.ietf.org/html/rfc6749) +- [OAuth 2.0 Multiple Response Type Encoding Practices](https://openid.net/specs/oauth-v2-multiple-response-types-1_0.html) +- [OAuth 2.0 Threat Model and Security Considerations](https://tools.ietf.org/html/rfc6819) +- [Proof Key for Code Exchange by OAuth Public Clients](https://tools.ietf.org/html/rfc7636) +- [OAuth 2.0 for Native Apps](https://tools.ietf.org/html/rfc8252) +- [OpenID Connect Core 1.0](https://openid.net/specs/openid-connect-core-1_0.html) +- [OAuth 2.0 Pushed Authorization Request](https://datatracker.ietf.org/doc/html/rfc9126) + +OAuth2 and OpenID Connect are difficult protocols. If you want quick wins, we +strongly encourage you to look at [Hydra](https://github.com/ory-am/hydra). +Hydra is a secure, high performance, cloud native OAuth2 and OpenID Connect +service that integrates with every authentication method imaginable and is built +on top of Fosite. + + + + +**Table of Contents** + +- [Motivation](#motivation) +- [API Stability](#api-stability) +- [Example](#example) +- [A word on quality](#a-word-on-quality) +- [A word on security](#a-word-on-security) +- [A word on extensibility](#a-word-on-extensibility) +- [Installation](#installation) +- [Documentation](#documentation) + - [Scopes](#scopes) + - [`fosite.WildcardScopeStrategy`](#fositewildcardscopestrategy) + - [`fosite.HierarchicScopeStrategy`](#fositehierarchicscopestrategy) + - [Quickstart](#quickstart) + - [Code Examples](#code-examples) + - [Example Storage Implementation](#example-storage-implementation) + - [Extensible handlers](#extensible-handlers) + - [JWT Introspection](#jwt-introspection) +- [Contribute](#contribute) + - [Refresh mock objects](#refresh-mock-objects) +- [Hall of Fame](#hall-of-fame) + + + +## Motivation + +Fosite was written because our OAuth2 and OpenID Connect service +[**Hydra**](https://github.com/ory-am/hydra) required a secure and extensible +OAuth2 library. We had to realize that nothing matching our requirements was out +there, so we decided to build it ourselves. + +## API Stability + +The core public API is almost stable as most changes will only touch the inner +workings. + +We strongly encourage vendoring fosite using +[dep](https://github.com/golang/dep) or comparable tools. + +## Example + +The example does not have nice visuals but it should give you an idea of what +you can do with Fosite and a few lines of code. + +![Authorize Code Grant](docs/authorize-code-grant.gif) + +You can run this minimalistic example by doing + +``` +go get github.com/ory/fosite-example +cd $GOPATH/src/github.com/ory/fosite-example +dep ensure +go install github.com/ory/fosite-example +fosite-example +``` + +There should be a server listening on [localhost:3846](https://localhost:3846/). +You can check out the example's source code +[here](https://github.com/ory/fosite-example/). + +## A word on quality + +We tried to set up as many tests as possible and test for as many cases covered +in the RFCs as possible. But we are only human. Please, feel free to add tests +for the various cases defined in the OAuth2 RFCs 6749 and 6819 or any other +cases that improve the tests. + +**Everyone** writing an RFC conform test that breaks with the current +implementation, will receive a place in the [Hall of Fame](#hall-of-fame)! + +## A word on security + +Please be aware that Fosite only secures parts of your server side security. You +still need to secure your apps and clients, keep your tokens safe, prevent CSRF +attacks, ensure database security, use valid and strong TLS certificates and +much more. If you need any help or advice feel free to contact our security +staff through [our website](https://ory.am/)! + +We have given the various specifications, especially +[OAuth 2.0 Threat Model and Security Considerations](https://tools.ietf.org/html/rfc6819#section-5.1.5.3), +a very close look and included everything we thought was in the scope of this +framework. Here is a complete list of things we implemented in Fosite: + +- [No Cleartext Storage of Credentials](https://tools.ietf.org/html/rfc6819#section-5.1.4.1.3) +- [Encryption of Credentials](https://tools.ietf.org/html/rfc6819#section-5.1.4.1.4) +- [Use Short Expiration Time](https://tools.ietf.org/html/rfc6819#section-5.1.5.3) +- [Limit Number of Usages or One-Time Usage](https://tools.ietf.org/html/rfc6819#section-5.1.5.4) +- [Bind Token to Client id](https://tools.ietf.org/html/rfc6819#section-5.1.5.8) +- [Automatic Revocation of Derived Tokens If Abuse Is Detected](https://tools.ietf.org/html/rfc6819#section-5.2.1.1) +- [Binding of Refresh Token to "client_id"](https://tools.ietf.org/html/rfc6819#section-5.2.2.2) +- [Refresh Token Rotation](https://tools.ietf.org/html/rfc6819#section-5.2.2.3) +- [Revocation of Refresh Tokens](https://tools.ietf.org/html/rfc6819#section-5.2.2.4) +- [Validate Pre-Registered "redirect_uri"](https://tools.ietf.org/html/rfc6819#section-5.2.3.5) +- [Binding of Authorization "code" to "client_id"](https://tools.ietf.org/html/rfc6819#section-5.2.4.4) +- [Binding of Authorization "code" to "redirect_uri"](https://tools.ietf.org/html/rfc6819#section-5.2.4.6) +- [Opaque access tokens](https://tools.ietf.org/html/rfc6749#section-1.4) +- [Opaque refresh tokens](https://tools.ietf.org/html/rfc6749#section-1.5) +- [Ensure Confidentiality of Requests](https://tools.ietf.org/html/rfc6819#section-5.1.1) +- [Use of Asymmetric Cryptography](https://tools.ietf.org/html/rfc6819#section-5.1.4.1.5) + Fosite ensures that redirect URIs use https **except localhost** but you need + to implement TLS for the token and auth endpoints yourself. + +Additionally, we added these safeguards: + +- **Enforcing random states:** Without a random-looking state or OpenID Connect + nonce the request will fail. +- **Advanced Token Validation:** Tokens are layouted as `.` + where `` is created using HMAC-SHA256 using a global secret. This + is what a token can look like: + `/tgBeUhWlAT8tM8Bhmnx+Amf8rOYOUhrDi3pGzmjP7c=.BiV/Yhma+5moTP46anxMT6cWW8gz5R5vpC9RbpwSDdM=` + +Sections below [Section 5](https://tools.ietf.org/html/rfc6819#section-5) that +are not covered in the list above should be reviewed by you. If you think that a +specific section should be something that is covered in Fosite, feel free to +create an [issue](https://github.com/ory/fosite/issues). Please be aware that +OpenID Connect requires specific knowledge of the identity provider, which is +why Fosite only implements core requirements and most things must be implemented +by you (for example prompt, max_age, ui_locales, id_token_hint, user +authentication, session management, ...). + +**It is strongly encouraged to use the handlers shipped with Fosite as they +follow the specs and are well tested.** + +## A word on extensibility + +Fosite is extensible ... because OAuth2 is an extensible and flexible +**framework**. Fosite let's you register custom token and authorize endpoint +handlers with the security that the requests have been validated against the +OAuth2 specs beforehand. You can easily extend Fosite's capabilities. For +example, if you want to provide OpenID Connect on top of your OAuth2 stack, +that's no problem. Or custom assertions, what ever you like and as long as it is +secure. ;) + +## Installation + +[Go 1.11+](https://golang.org) must be installed on your system and it is +required that you have set up your GOPATH environment variable. + +``` +go get -u github.com/ory/fosite/... +``` + +We recommend to use [dep](https://github.com/golang/dep) to mitigate +compatibility breaks that come with new api versions. + +## Documentation + +There is an API documentation available at +[godoc.org/ory/fosite](https://godoc.org/github.com/ory/fosite). + +### Scopes + +Fosite has three strategies for matching scopes. You can replace the default +scope strategy if you need a custom one by implementing `fosite.ScopeStrategy`. + +Using the composer, setting a strategy is easy: + +```go +import "github.com/ory/fosite" + +var config = &fosite.Config{ +ScopeStrategy: fosite.HierarchicScopeStrategy, +} +``` + +**Note:** To issue refresh tokens with any of the grants, you need to include +the `offline` scope in the OAuth2 request. This can be modified by the +`RefreshTokenScopes` compose configuration. When set to an empty array, _all_ +grants will issue refresh tokens. + +#### `fosite.WildcardScopeStrategy` + +This is the default strategy, and the safest one. It is best explained by +looking at some examples: + +- `users.*` matches `users.read` +- `users.*` matches `users.read.foo` +- `users.read` matches `users.read` +- `users` does not match `users.read` +- `users.read.*` does not match `users.read` +- `users.*.*` does not match `users.read` +- `users.*.*` matches `users.read.own` +- `users.*.*` matches `users.read.own.other` +- `users.read.*` matches `users.read.own` +- `users.read.*` matches `users.read.own.other` +- `users.write.*` does not match `users.read.own` +- `users.*.bar` matches `users.baz.bar` +- `users.*.bar` does not `users.baz.baz.bar` + +To request `users.*`, a client must have exactly `users.*` as granted scope. + +#### `fosite.ExactScopeStrategy` + +This strategy is searching only for exact matches. It returns true iff the scope +is granted. + +#### `fosite.HierarchicScopeStrategy` + +This strategy is deprecated, use it with care. Again, it is best explained by +looking at some examples: + +- `users` matches `users` +- `users` matches `users.read` +- `users` matches `users.read.own` +- `users.read` matches `users.read` +- `users.read` matches `users.read.own` +- `users.read` does not match `users.write` +- `users.read` does not match `users.write.own` + +### Globalization + +Fosite does not natively carry translations for error messages and hints, but +offers an interface that allows the consumer to define catalog bundles and an +implementation to translate. This is available through the +[MessageCatalog](i18n/i18n.go) interface. The functions defined are +self-explanatory. The `DefaultMessageCatalog` illustrates this. Compose config +has been extended to take in an instance of the `MessageCatalog`. + +#### Building translated files + +There are three possible "message key" types: + +1. Value of `RFC6749Error.ErrorField`: This is a string like `invalid_request` + and correlates to most errors produced by Fosite. +2. Hint identifier passed into `RFC6749Error.WithHintIDOrDefaultf`: This func is + not used extensively in Fosite but, in time, most `WithHint` and `WithHintf` + will be replaced with this function. +3. Free text string format passed into `RFC6749Error.WithHint` and + `RFC6749Error.WithHintf`: This function is used in Fosite and Hydra + extensively and any message catalog implementation can use the format string + parameter as the message key. + +An example of a message catalog can be seen in the +[i18n_test.go](i18n/i18n_test.go). + +#### Generating the `en` messages file + +This is a WIP at the moment, but effectively any scripting language can be used +to generate this. It would need to traverse all files in the source code and +extract the possible message identifiers based on the different message key +types. + +### Quickstart + +Instantiating fosite by hand can be painful. Therefore we created a few +convenience helpers available through the [compose package](/compose). It is +strongly encouraged to use these well tested composers. + +In this very basic example, we will instantiate fosite with all OpenID Connect +and OAuth2 handlers enabled. Please refer to the +[example app](https://github.com/ory/fosite-example/) for more details. + +This little code snippet sets up a full-blown OAuth2 and OpenID Connect example. + +```go +package main + +import "github.com/ory/fosite" +import "github.com/ory/fosite/compose" +import "github.com/ory/fosite/storage" + +// This is the example storage that contains: +// * an OAuth2 Client with id "my-client" and secrets "foobar" and "foobaz" capable of all oauth2 and open id connect grant and response types. +// * a User for the resource owner password credentials grant type with username "peter" and password "secret". +// +// You will most likely replace this with your own logic once you set up a real world application. +var storage = storage.NewExampleStore() + +// This secret is being used to sign access and refresh tokens as well as +// authorization codes. It must be exactly 32 bytes long. +var secret = []byte("my super secret signing password") + +privateKey, err := rsa.GenerateKey(rand.Reader, 2048) +if err != nil { +panic("unable to create private key") +} + +// check the api docs of fosite.Config for further configuration options +var config = &fosite.Config{ + AccessTokenLifespan: time.Minute * 30, + GlobalSecret: secret, + // ... +} + +var oauth2Provider = compose.ComposeAllEnabled(config, storage, privateKey) + +// The authorize endpoint is usually at "https://mydomain.com/oauth2/auth". +func authorizeHandlerFunc(rw http.ResponseWriter, req *http.Request) { + // This context will be passed to all methods. It doesn't fulfill a real purpose in the standard library but could be used + // to abort database lookups or similar things. + ctx := req.Context() + + // Let's create an AuthorizeRequest object! + // It will analyze the request and extract important information like scopes, response type and others. + ar, err := oauth2Provider.NewAuthorizeRequest(ctx, req) + if err != nil { + oauth2Provider.WriteAuthorizeError(ctx, rw, ar, err) + return + } + + // Normally, this would be the place where you would check if the user is logged in and gives his consent. + // We're simplifying things and just checking if the request includes a valid username and password + if req.Form.Get("username") != "peter" { + rw.Header().Set("Content-Type", "text/html;charset=UTF-8") + rw.Write([]byte(`

Login page

`)) + rw.Write([]byte(` +

Howdy! This is the log in page. For this example, it is enough to supply the username.

+
+ try peter
+ +
+ `)) + return + } + + // Now that the user is authorized, we set up a session. When validating / looking up tokens, we additionally get + // the session. You can store anything you want in it. + + // The session will be persisted by the store and made available when e.g. validating tokens or handling token endpoint requests. + // The default OAuth2 and OpenID Connect handlers require the session to implement a few methods. Apart from that, the + // session struct can be anything you want it to be. + mySessionData := &fosite.DefaultSession{ + Username: req.Form.Get("username"), + } + + // It's also wise to check the requested scopes, e.g.: + // if authorizeRequest.GetScopes().Has("admin") { + // http.Error(rw, "you're not allowed to do that", http.StatusForbidden) + // return + // } + + // Now we need to get a response. This is the place where the AuthorizeEndpointHandlers kick in and start processing the request. + // NewAuthorizeResponse is capable of running multiple response type handlers which in turn enables this library + // to support open id connect. + response, err := oauth2Provider.NewAuthorizeResponse(ctx, ar, mySessionData) + if err != nil { + oauth2Provider.WriteAuthorizeError(ctx, rw, ar, err) + return + } + + // Awesome, now we redirect back to the client redirect uri and pass along an authorize code + oauth2Provider.WriteAuthorizeResponse(ctx, rw, ar, response) +} + +// The token endpoint is usually at "https://mydomain.com/oauth2/token" +func tokenHandlerFunc(rw http.ResponseWriter, req *http.Request) { + ctx := req.Context() + + // Create an empty session object that will be passed to storage implementation to populate (unmarshal) the session into. + // By passing an empty session object as a "prototype" to the store, the store can use the underlying type to unmarshal the value into it. + // For an example of storage implementation that takes advantage of that, see SQL Store (fosite_store_sql.go) from ory/Hydra project. + mySessionData := new(fosite.DefaultSession) + + // This will create an access request object and iterate through the registered TokenEndpointHandlers to validate the request. + accessRequest, err := oauth2Provider.NewAccessRequest(ctx, req, mySessionData) + if err != nil { + oauth2Provider.WriteAccessError(ctx, rw, accessRequest, err) + return + } + + if mySessionData.Username == "super-admin-guy" { + // do something... + } + + // Next we create a response for the access request. Again, we iterate through the TokenEndpointHandlers + // and aggregate the result in response. + response, err := oauth2Provider.NewAccessResponse(ctx, accessRequest) + if err != nil { + oauth2Provider.WriteAccessError(ctx, rw, accessRequest, err) + return + } + + // All done, send the response. + oauth2Provider.WriteAccessResponse(ctx, rw, accessRequest, response) + + // The client has a valid access token now +} + +func someResourceProviderHandlerFunc(rw http.ResponseWriter, req *http.Request) { + ctx := req.Context() + requiredScope := "blogposts.create" + + _, ar, err := oauth2Provider.IntrospectToken(ctx, fosite.AccessTokenFromRequest(req), fosite.AccessToken, new(fosite.DefaultSession), requiredScope) + if err != nil { + // ... + } + + // If no error occurred the token + scope is valid and you have access to: + // ar.GetClient().GetID(), ar.GetGrantedScopes(), ar.GetScopes(), ar.GetSession().UserID, ar.GetRequestedAt(), ... +} +``` + +### Code Examples + +Fosite provides integration tests as well as a http server example: + +- Fosite ships with an example app that runs in your browser: + [Example app](https://github.com/ory/fosite-example/). +- If you want to check out how to enable specific handlers, check out the + [integration tests](integration/). + +If you have working examples yourself, please share them with us! + +### Example Storage Implementation + +Fosite does not ship a storage implementation. This is intended, because +requirements vary with every environment. You can find a reference +implementation at [storage/memory.go](storage/memory.go). This storage fulfills +requirements from all OAuth2 and OpenID Connect handlers. + +### Extensible handlers + +OAuth2 is a framework. Fosite mimics this behaviour by enabling you to replace +existing or create new OAuth2 handlers. Of course, fosite ships handlers for all +OAuth2 and OpenID Connect flows. + +- **[Fosite OAuth2 Core Handlers](handler/oauth2)** implement the + [Client Credentials Grant](https://tools.ietf.org/html/rfc6749#section-4.4), + [Resource Owner Password Credentials Grant](https://tools.ietf.org/html/rfc6749#section-4.3), + [Implicit Grant](https://tools.ietf.org/html/rfc6749#section-4.2), + [Authorization Code Grant](https://tools.ietf.org/html/rfc6749#section-4.1), + [Refresh Token Grant](https://tools.ietf.org/html/rfc6749#section-6) +- **[Fosite OpenID Connect Handlers](handler/openid)** implement the + [Authentication using the Authorization Code Flow](http://openid.net/specs/openid-connect-core-1_0.html#CodeFlowAuth), + [Authentication using the Implicit Flow](http://openid.net/specs/openid-connect-core-1_0.html#ImplicitFlowAuth), + [Authentication using the Hybrid Flow](http://openid.net/specs/openid-connect-core-1_0.html#HybridFlowAuth) + +This section is missing documentation and we welcome any contributions in that +direction. + +### JWT Introspection + +Please note that when using the OAuth2StatelessJWTIntrospectionFactory access +token revocation is not possible. + +## Contribute + +You need git and golang installed on your system. + +``` +go get -d github.com/ory/fosite +cd $GOPATH/src/github.com/ory/fosite +git status +git remote add myfork +go test ./... +``` + +Simple, right? Now you are ready to go! Make sure to run `go test ./...` often, +detecting problems with your code rather sooner than later. Please read +[CONTRIBUTE.md] before creating pull requests and issues. + +### Refresh mock objects + +Run `./generate-mocks.sh` in fosite's root directory or run the contents of +[generate-mocks.sh] in a shell. + +## Hall of Fame + +This place is reserved for the fearless bug hunters, reviewers and contributors +(alphabetical order). + +- [agtorre](https://github.com/agtorre): + [contributions](https://github.com/ory/fosite/issues?q=author%3Aagtorre), + [participations](https://github.com/ory/fosite/issues?q=commenter%3Aagtorre). +- [danielchatfield](https://github.com/danielchatfield): + [contributions](https://github.com/ory/fosite/issues?q=author%3Adanielchatfield), + [participations](https://github.com/ory/fosite/issues?q=commenter%3Adanielchatfield). +- [leetal](https://github.com/leetal): + [contributions](https://github.com/ory/fosite/issues?q=author%3Aleetal), + [participations](https://github.com/ory/fosite/issues?q=commenter%3Aleetal). +- [jrossiter](https://github.com/jrossiter): + [contributions](https://github.com/ory/fosite/issues?q=author%3Ajrossiter), + [participations](https://github.com/ory/fosite/issues?q=commenter%3Ajrossiter). +- [jrossiter](https://github.com/jrossiter): + [contributions](https://github.com/ory/fosite/issues?q=author%3Ajrossiter), + [participations](https://github.com/ory/fosite/issues?q=commenter%3Ajrossiter). +- [danilobuerger](https://github.com/danilobuerger): + [contributions](https://github.com/ory/fosite/issues?q=author%3Adanilobuerger), + [participations](https://github.com/ory/fosite/issues?q=commenter%3Adanilobuerger). + +Find out more about the [author](https://aeneas.io/) of Fosite and Hydra, and +the [Ory Company](https://ory.am/). diff --git a/fosite/SECURITY.md b/fosite/SECURITY.md new file mode 100644 index 00000000000..6104514805c --- /dev/null +++ b/fosite/SECURITY.md @@ -0,0 +1,56 @@ + + + +# Ory Security Policy + +This policy outlines Ory's security commitments and practices for users across +different licensing and deployment models. + +To learn more about Ory's security service level agreements (SLAs) and +processes, please [contact us](https://www.ory.sh/contact/). + +## Ory Network Users + +- **Security SLA:** Ory addresses vulnerabilities in the Ory Network according + to the following guidelines: + - Critical: Typically addressed within 14 days. + - High: Typically addressed within 30 days. + - Medium: Typically addressed within 90 days. + - Low: Typically addressed within 180 days. + - Informational: Addressed as necessary. + These timelines are targets and may vary based on specific circumstances. +- **Release Schedule:** Updates are deployed to the Ory Network as + vulnerabilities are resolved. +- **Version Support:** The Ory Network always runs the latest version, ensuring + up-to-date security fixes. + +## Ory Enterprise License Customers + +- **Security SLA:** Ory addresses vulnerabilities based on their severity: + - Critical: Typically addressed within 14 days. + - High: Typically addressed within 30 days. + - Medium: Typically addressed within 90 days. + - Low: Typically addressed within 180 days. + - Informational: Addressed as necessary. + These timelines are targets and may vary based on specific circumstances. +- **Release Schedule:** Updates are made available as vulnerabilities are + resolved. Ory works closely with enterprise customers to ensure timely updates + that align with their operational needs. +- **Version Support:** Ory may provide security support for multiple versions, + depending on the terms of the enterprise agreement. + +## Apache 2.0 License Users + +- **Security SLA:** Ory does not provide a formal SLA for security issues under + the Apache 2.0 License. +- **Release Schedule:** Releases prioritize new functionality and include fixes + for known security vulnerabilities at the time of release. While major + releases typically occur one to two times per year, Ory does not guarantee a + fixed release schedule. +- **Version Support:** Security patches are only provided for the latest release + version. + +## Reporting a Vulnerability + +For details on how to report security vulnerabilities, visit our +[security policy documentation](https://www.ory.sh/docs/ecosystem/security). diff --git a/fosite/access_error.go b/fosite/access_error.go new file mode 100644 index 00000000000..85236715bdc --- /dev/null +++ b/fosite/access_error.go @@ -0,0 +1,43 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "encoding/json" + "fmt" + "net/http" +) + +// Convert an error to an http response as per RFC6749 +func (f *Fosite) WriteAccessError(ctx context.Context, rw http.ResponseWriter, req Requester, err error) { + f.writeJsonError(ctx, rw, req, err) +} + +func (f *Fosite) writeJsonError(ctx context.Context, rw http.ResponseWriter, requester Requester, err error) { + rw.Header().Set("Content-Type", "application/json;charset=UTF-8") + rw.Header().Set("Cache-Control", "no-store") + rw.Header().Set("Pragma", "no-cache") + + rfcerr := ErrorToRFC6749Error(err).WithLegacyFormat(f.Config.GetUseLegacyErrorFormat(ctx)).WithExposeDebug(f.Config.GetSendDebugMessagesToClients(ctx)) + + if requester != nil { + rfcerr = rfcerr.WithLocalizer(f.Config.GetMessageCatalog(ctx), getLangFromRequester(requester)) + } + + js, err := json.Marshal(rfcerr) + if err != nil { + if f.Config.GetSendDebugMessagesToClients(ctx) { + errorMessage := EscapeJSONString(err.Error()) + http.Error(rw, fmt.Sprintf(`{"error":"server_error","error_description":"%s"}`, errorMessage), http.StatusInternalServerError) + } else { + http.Error(rw, `{"error":"server_error"}`, http.StatusInternalServerError) + } + return + } + + rw.WriteHeader(rfcerr.CodeField) + // ignoring the error because the connection is broken when it happens + _, _ = rw.Write(js) +} diff --git a/fosite/access_error_test.go b/fosite/access_error_test.go new file mode 100644 index 00000000000..a17f5975da1 --- /dev/null +++ b/fosite/access_error_test.go @@ -0,0 +1,102 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + gomock "go.uber.org/mock/gomock" + + . "github.com/ory/hydra/v2/fosite" + . "github.com/ory/hydra/v2/fosite/internal" +) + +func TestWriteAccessError(t *testing.T) { + f := &Fosite{Config: new(Config)} + header := http.Header{} + ctrl := gomock.NewController(t) + rw := NewMockResponseWriter(ctrl) + t.Cleanup(ctrl.Finish) + + rw.EXPECT().Header().AnyTimes().Return(header) + rw.EXPECT().WriteHeader(http.StatusBadRequest) + rw.EXPECT().Write(gomock.Any()) + + f.WriteAccessError(context.Background(), rw, nil, ErrInvalidRequest) +} + +func TestWriteAccessError_RFC6749(t *testing.T) { + // https://tools.ietf.org/html/rfc6749#section-5.2 + + config := new(Config) + f := &Fosite{Config: config} + + for k, c := range []struct { + err *RFC6749Error + code string + debug bool + expectDebugMessage string + includeExtraFields bool + }{ + {ErrInvalidRequest.WithDebug("some-debug"), "invalid_request", true, "some-debug", true}, + {ErrInvalidRequest.WithDebugf("some-debug-%d", 1234), "invalid_request", true, "some-debug-1234", true}, + {ErrInvalidRequest.WithDebug("some-debug"), "invalid_request", false, "some-debug", true}, + {ErrInvalidClient.WithDebug("some-debug"), "invalid_client", false, "some-debug", true}, + {ErrInvalidGrant.WithDebug("some-debug"), "invalid_grant", false, "some-debug", true}, + {ErrInvalidScope.WithDebug("some-debug"), "invalid_scope", false, "some-debug", true}, + {ErrUnauthorizedClient.WithDebug("some-debug"), "unauthorized_client", false, "some-debug", true}, + {ErrUnsupportedGrantType.WithDebug("some-debug"), "unsupported_grant_type", false, "some-debug", true}, + {ErrUnsupportedGrantType.WithDebug("some-debug"), "unsupported_grant_type", false, "some-debug", false}, + {ErrUnsupportedGrantType.WithDebug("some-debug"), "unsupported_grant_type", true, "some-debug", false}, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + config.SendDebugMessagesToClients = c.debug + config.UseLegacyErrorFormat = c.includeExtraFields + + rw := httptest.NewRecorder() + f.WriteAccessError(context.Background(), rw, nil, c.err) + + var params struct { + Error string `json:"error"` // specified by RFC, required + Description string `json:"error_description"` // specified by RFC, optional + Debug string `json:"error_debug"` + Hint string `json:"error_hint"` + } + + require.NotNil(t, rw.Body) + err := json.NewDecoder(rw.Body).Decode(¶ms) + require.NoError(t, err) + + assert.Equal(t, c.code, params.Error) + if !c.includeExtraFields { + assert.Empty(t, params.Debug) + assert.Empty(t, params.Hint) + assert.Contains(t, params.Description, c.err.DescriptionField) + assert.Contains(t, params.Description, c.err.HintField) + + if c.debug { + assert.Contains(t, params.Description, c.err.DebugField) + } else { + assert.NotContains(t, params.Description, c.err.DebugField) + } + } else { + assert.EqualValues(t, c.err.DescriptionField, params.Description) + assert.EqualValues(t, c.err.HintField, params.Hint) + + if !c.debug { + assert.Empty(t, params.Debug) + } else { + assert.EqualValues(t, c.err.DebugField, params.Debug) + } + } + }) + } +} diff --git a/fosite/access_request.go b/fosite/access_request.go new file mode 100644 index 00000000000..de546005e8c --- /dev/null +++ b/fosite/access_request.go @@ -0,0 +1,25 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +type AccessRequest struct { + GrantTypes Arguments `json:"grantTypes" gorethink:"grantTypes"` + HandledGrantType Arguments `json:"handledGrantType" gorethink:"handledGrantType"` + + Request +} + +func NewAccessRequest(session Session) *AccessRequest { + r := &AccessRequest{ + GrantTypes: Arguments{}, + HandledGrantType: Arguments{}, + Request: *NewRequest(), + } + r.Session = session + return r +} + +func (a *AccessRequest) GetGrantTypes() Arguments { + return a.GrantTypes +} diff --git a/fosite/access_request_handler.go b/fosite/access_request_handler.go new file mode 100644 index 00000000000..5a0ef95adde --- /dev/null +++ b/fosite/access_request_handler.go @@ -0,0 +1,112 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "net/http" + "strings" + + "go.opentelemetry.io/otel/trace" + + "github.com/ory/hydra/v2/fosite/i18n" + "github.com/ory/x/errorsx" + "github.com/ory/x/otelx" + + "github.com/pkg/errors" +) + +// Implements +// - https://tools.ietf.org/html/rfc6749#section-2.3.1 +// Clients in possession of a client password MAY use the HTTP Basic +// authentication scheme as defined in [RFC2617] to authenticate with +// the authorization server. The client identifier is encoded using the +// "application/x-www-form-urlencoded" encoding algorithm per +// Appendix B, and the encoded value is used as the username; the client +// password is encoded using the same algorithm and used as the +// password. The authorization server MUST support the HTTP Basic +// authentication scheme for authenticating clients that were issued a +// client password. +// Including the client credentials in the request-body using the two +// parameters is NOT RECOMMENDED and SHOULD be limited to clients unable +// to directly utilize the HTTP Basic authentication scheme (or other +// password-based HTTP authentication schemes). The parameters can only +// be transmitted in the request-body and MUST NOT be included in the +// request URI. +// - https://tools.ietf.org/html/rfc6749#section-3.2.1 +// - Confidential clients or other clients issued client credentials MUST +// authenticate with the authorization server as described in +// Section 2.3 when making requests to the token endpoint. +// - If the client type is confidential or the client was issued client +// credentials (or assigned other authentication requirements), the +// client MUST authenticate with the authorization server as described +// in Section 3.2.1. +func (f *Fosite) NewAccessRequest(ctx context.Context, r *http.Request, session Session) (_ AccessRequester, err error) { + ctx, span := trace.SpanFromContext(ctx).TracerProvider().Tracer("github.com/ory/hydra/v2/fosite").Start(ctx, "Fosite.NewAccessRequest") + defer otelx.End(span, &err) + + accessRequest := NewAccessRequest(session) + accessRequest.Request.Lang = i18n.GetLangFromRequest(f.Config.GetMessageCatalog(ctx), r) + + ctx = context.WithValue(ctx, RequestContextKey, r) + ctx = context.WithValue(ctx, AccessRequestContextKey, accessRequest) + + if r.Method != "POST" { + return accessRequest, errorsx.WithStack(ErrInvalidRequest.WithHintf("HTTP method is '%s', expected 'POST'.", r.Method)) + } else if err := r.ParseMultipartForm(1 << 20); err != nil && err != http.ErrNotMultipart { + return accessRequest, errorsx.WithStack(ErrInvalidRequest.WithHint("Unable to parse HTTP body, make sure to send a properly formatted form request body.").WithWrap(err).WithDebug(err.Error())) + } else if len(r.PostForm) == 0 { + return accessRequest, errorsx.WithStack(ErrInvalidRequest.WithHint("The POST body can not be empty.")) + } + + accessRequest.Form = r.PostForm + if session == nil { + return accessRequest, errors.New("Session must not be nil") + } + + accessRequest.SetRequestedScopes(RemoveEmpty(strings.Split(r.PostForm.Get("scope"), " "))) + accessRequest.SetRequestedAudience(GetAudiences(r.PostForm)) + accessRequest.GrantTypes = RemoveEmpty(strings.Split(r.PostForm.Get("grant_type"), " ")) + if len(accessRequest.GrantTypes) < 1 { + return accessRequest, errorsx.WithStack(ErrInvalidRequest.WithHint("Request parameter 'grant_type' is missing")) + } + + client, clientErr := f.AuthenticateClient(ctx, r, r.PostForm) + if clientErr == nil { + accessRequest.Client = client + } + + var found = false + for _, loader := range f.Config.GetTokenEndpointHandlers(ctx) { + // Is the loader responsible for handling the request? + if !loader.CanHandleTokenEndpointRequest(ctx, accessRequest) { + continue + } + + // The handler **is** responsible! + + // Is the client supplied in the request? If not can this handler skip client auth? + if !loader.CanSkipClientAuth(ctx, accessRequest) && clientErr != nil { + // No client and handler can not skip client auth -> error. + return accessRequest, clientErr + } + + // All good. + if err := loader.HandleTokenEndpointRequest(ctx, accessRequest); err == nil { + found = true + } else if errors.Is(err, ErrUnknownRequest) { + // This is a duplicate because it should already have been handled by + // `loader.CanHandleTokenEndpointRequest(accessRequest)` but let's keep it for sanity. + // + continue + } else if err != nil { + return accessRequest, err + } + } + + if !found { + return nil, errorsx.WithStack(ErrInvalidRequest) + } + return accessRequest, nil +} diff --git a/fosite/access_request_handler_test.go b/fosite/access_request_handler_test.go new file mode 100644 index 00000000000..ac6c910daa5 --- /dev/null +++ b/fosite/access_request_handler_test.go @@ -0,0 +1,461 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "encoding/base64" + "fmt" + "net/http" + "net/url" + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + gomock "go.uber.org/mock/gomock" + + . "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/internal" +) + +func TestNewAccessRequest(t *testing.T) { + ctrl := gomock.NewController(t) + store := internal.NewMockStorage(ctrl) + clientManager := internal.NewMockClientManager(ctrl) + handler := internal.NewMockTokenEndpointHandler(ctrl) + handler.EXPECT().CanHandleTokenEndpointRequest(gomock.Any(), gomock.Any()).Return(true).AnyTimes() + handler.EXPECT().CanSkipClientAuth(gomock.Any(), gomock.Any()).Return(false).AnyTimes() + hasher := internal.NewMockHasher(ctrl) + t.Cleanup(ctrl.Finish) + + client := &DefaultClient{} + config := &Config{ClientSecretsHasher: hasher, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy} + fosite := &Fosite{Store: store, Config: config} + for k, c := range []struct { + header http.Header + form url.Values + mock func() + method string + expectErr error + expect *AccessRequest + handlers TokenEndpointHandlers + }{ + { + header: http.Header{}, + expectErr: ErrInvalidRequest, + form: url.Values{}, + method: "POST", + mock: func() {}, + }, + { + header: http.Header{}, + method: "POST", + form: url.Values{ + "grant_type": {"foo"}, + }, + mock: func() {}, + expectErr: ErrInvalidRequest, + }, + { + header: http.Header{}, + method: "POST", + form: url.Values{ + "grant_type": {"foo"}, + "client_id": {""}, + }, + expectErr: ErrInvalidRequest, + mock: func() {}, + }, + { + header: http.Header{ + "Authorization": {basicAuth("foo", "bar")}, + }, + method: "POST", + form: url.Values{ + "grant_type": {"foo"}, + }, + expectErr: ErrInvalidClient, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("foo")).Return(nil, errors.New("")) + }, + handlers: TokenEndpointHandlers{handler}, + }, + { + header: http.Header{ + "Authorization": {basicAuth("foo", "bar")}, + }, + method: "GET", + form: url.Values{ + "grant_type": {"foo"}, + }, + expectErr: ErrInvalidRequest, + mock: func() {}, + }, + { + header: http.Header{ + "Authorization": {basicAuth("foo", "bar")}, + }, + method: "POST", + form: url.Values{ + "grant_type": {"foo"}, + }, + expectErr: ErrInvalidClient, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("foo")).Return(nil, errors.New("")) + }, + handlers: TokenEndpointHandlers{handler}, + }, + { + header: http.Header{ + "Authorization": {basicAuth("foo", "bar")}, + }, + method: "POST", + form: url.Values{ + "grant_type": {"foo"}, + }, + expectErr: ErrInvalidClient, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("foo")).Return(client, nil) + client.Public = false + client.Secret = []byte("foo") + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("foo")), gomock.Eq([]byte("bar"))).Return(errors.New("")) + }, + handlers: TokenEndpointHandlers{handler}, + }, + { + header: http.Header{ + "Authorization": {basicAuth("foo", "bar")}, + }, + method: "POST", + form: url.Values{ + "grant_type": {"foo"}, + }, + expectErr: ErrServerError, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("foo")).Return(client, nil) + client.Public = false + client.Secret = []byte("foo") + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("foo")), gomock.Eq([]byte("bar"))).Return(nil) + handler.EXPECT().HandleTokenEndpointRequest(gomock.Any(), gomock.Any()).Return(ErrServerError) + }, + handlers: TokenEndpointHandlers{handler}, + }, + { + header: http.Header{ + "Authorization": {basicAuth("foo", "bar")}, + }, + method: "POST", + form: url.Values{ + "grant_type": {"foo"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("foo")).Return(client, nil) + client.Public = false + client.Secret = []byte("foo") + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("foo")), gomock.Eq([]byte("bar"))).Return(nil) + handler.EXPECT().HandleTokenEndpointRequest(gomock.Any(), gomock.Any()).Return(nil) + }, + handlers: TokenEndpointHandlers{handler}, + expect: &AccessRequest{ + GrantTypes: Arguments{"foo"}, + Request: Request{ + Client: client, + }, + }, + }, + { + header: http.Header{ + "Authorization": {basicAuth("foo", "bar")}, + }, + method: "POST", + form: url.Values{ + "grant_type": {"foo"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("foo")).Return(client, nil) + client.Public = true + handler.EXPECT().HandleTokenEndpointRequest(gomock.Any(), gomock.Any()).Return(nil) + }, + handlers: TokenEndpointHandlers{handler}, + expect: &AccessRequest{ + GrantTypes: Arguments{"foo"}, + Request: Request{ + Client: client, + }, + }, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + r := &http.Request{ + Header: c.header, + PostForm: c.form, + Form: c.form, + Method: c.method, + } + c.mock() + ctx := NewContext() + config.TokenEndpointHandlers = c.handlers + ar, err := fosite.NewAccessRequest(ctx, r, new(DefaultSession)) + + if c.expectErr != nil { + assert.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + AssertObjectKeysEqual(t, c.expect, ar, "GrantTypes", "Client") + assert.NotNil(t, ar.GetRequestedAt()) + } + }) + } +} + +func TestNewAccessRequestWithoutClientAuth(t *testing.T) { + ctrl := gomock.NewController(t) + store := internal.NewMockStorage(ctrl) + clientManager := internal.NewMockClientManager(ctrl) + handler := internal.NewMockTokenEndpointHandler(ctrl) + handler.EXPECT().CanHandleTokenEndpointRequest(gomock.Any(), gomock.Any()).Return(true).AnyTimes() + handler.EXPECT().CanSkipClientAuth(gomock.Any(), gomock.Any()).Return(true).AnyTimes() + hasher := internal.NewMockHasher(ctrl) + t.Cleanup(ctrl.Finish) + + client := &DefaultClient{} + anotherClient := &DefaultClient{ID: "another"} + config := &Config{ClientSecretsHasher: hasher, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy} + fosite := &Fosite{Store: store, Config: config} + for k, c := range []struct { + header http.Header + form url.Values + mock func() + method string + expectErr error + expect *AccessRequest + handlers TokenEndpointHandlers + }{ + // No grant type -> error + { + form: url.Values{}, + mock: func() { + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Any()).Times(0) + }, + method: "POST", + expectErr: ErrInvalidRequest, + }, + // No registered handlers -> error + { + form: url.Values{ + "grant_type": {"foo"}, + }, + mock: func() { + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Any()).Times(0) + }, + method: "POST", + expectErr: ErrInvalidRequest, + handlers: TokenEndpointHandlers{}, + }, + // Handler can skip client auth and ignores missing client. + { + header: http.Header{ + "Authorization": {basicAuth("foo", "bar")}, + }, + form: url.Values{ + "grant_type": {"foo"}, + }, + mock: func() { + // despite error from storage, we should success, because client auth is not required + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "foo").Return(nil, errors.New("no client")).Times(1) + handler.EXPECT().HandleTokenEndpointRequest(gomock.Any(), gomock.Any()).Return(nil) + }, + method: "POST", + expect: &AccessRequest{ + GrantTypes: Arguments{"foo"}, + Request: Request{ + Client: client, + }, + }, + handlers: TokenEndpointHandlers{handler}, + }, + // Should pass if no auth is set in the header and can skip! + { + form: url.Values{ + "grant_type": {"foo"}, + }, + mock: func() { + handler.EXPECT().HandleTokenEndpointRequest(gomock.Any(), gomock.Any()).Return(nil) + }, + method: "POST", + expect: &AccessRequest{ + GrantTypes: Arguments{"foo"}, + Request: Request{ + Client: client, + }, + }, + handlers: TokenEndpointHandlers{handler}, + }, + // Should also pass if client auth is set! + { + header: http.Header{ + "Authorization": {basicAuth("foo", "bar")}, + }, + form: url.Values{ + "grant_type": {"foo"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "foo").Return(anotherClient, nil).Times(1) + hasher.EXPECT().Compare(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil).Times(1) + handler.EXPECT().HandleTokenEndpointRequest(gomock.Any(), gomock.Any()).Return(nil) + }, + method: "POST", + expect: &AccessRequest{ + GrantTypes: Arguments{"foo"}, + Request: Request{ + Client: anotherClient, + }, + }, + handlers: TokenEndpointHandlers{handler}, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + r := &http.Request{ + Header: c.header, + PostForm: c.form, + Form: c.form, + Method: c.method, + } + c.mock() + ctx := NewContext() + config.TokenEndpointHandlers = c.handlers + ar, err := fosite.NewAccessRequest(ctx, r, new(DefaultSession)) + + if c.expectErr != nil { + assert.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + AssertObjectKeysEqual(t, c.expect, ar, "GrantTypes", "Client") + assert.NotNil(t, ar.GetRequestedAt()) + } + }) + } +} + +// In this test case one handler requires client auth and another handler not. +func TestNewAccessRequestWithMixedClientAuth(t *testing.T) { + ctrl := gomock.NewController(t) + store := internal.NewMockStorage(ctrl) + clientManager := internal.NewMockClientManager(ctrl) + + handlerWithClientAuth := internal.NewMockTokenEndpointHandler(ctrl) + handlerWithClientAuth.EXPECT().CanHandleTokenEndpointRequest(gomock.Any(), gomock.Any()).Return(true).AnyTimes() + handlerWithClientAuth.EXPECT().CanSkipClientAuth(gomock.Any(), gomock.Any()).Return(false).AnyTimes() + + handlerWithoutClientAuth := internal.NewMockTokenEndpointHandler(ctrl) + handlerWithoutClientAuth.EXPECT().CanHandleTokenEndpointRequest(gomock.Any(), gomock.Any()).Return(true).AnyTimes() + handlerWithoutClientAuth.EXPECT().CanSkipClientAuth(gomock.Any(), gomock.Any()).Return(true).AnyTimes() + + hasher := internal.NewMockHasher(ctrl) + t.Cleanup(ctrl.Finish) + + client := &DefaultClient{} + config := &Config{ClientSecretsHasher: hasher, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy} + fosite := &Fosite{Store: store, Config: config} + for k, c := range []struct { + header http.Header + form url.Values + mock func() + method string + expectErr error + expect *AccessRequest + handlers TokenEndpointHandlers + }{ + { + header: http.Header{ + "Authorization": {basicAuth("foo", "bar")}, + }, + form: url.Values{ + "grant_type": {"foo"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("foo")).Return(client, nil) + client.Public = false + client.Secret = []byte("foo") + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("foo")), gomock.Eq([]byte("bar"))).Return(errors.New("hash err")) + handlerWithoutClientAuth.EXPECT().HandleTokenEndpointRequest(gomock.Any(), gomock.Any()).Return(nil) + }, + method: "POST", + expectErr: ErrInvalidClient, + handlers: TokenEndpointHandlers{handlerWithoutClientAuth, handlerWithClientAuth}, + }, + { + header: http.Header{ + "Authorization": {basicAuth("foo", "bar")}, + }, + form: url.Values{ + "grant_type": {"foo"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("foo")).Return(client, nil) + client.Public = false + client.Secret = []byte("foo") + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("foo")), gomock.Eq([]byte("bar"))).Return(nil) + handlerWithoutClientAuth.EXPECT().HandleTokenEndpointRequest(gomock.Any(), gomock.Any()).Return(nil) + handlerWithClientAuth.EXPECT().HandleTokenEndpointRequest(gomock.Any(), gomock.Any()).Return(nil) + }, + method: "POST", + expect: &AccessRequest{ + GrantTypes: Arguments{"foo"}, + Request: Request{ + Client: client, + }, + }, + handlers: TokenEndpointHandlers{handlerWithoutClientAuth, handlerWithClientAuth}, + }, + { + header: http.Header{}, + form: url.Values{ + "grant_type": {"foo"}, + }, + mock: func() { + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Any()).Times(0) + handlerWithoutClientAuth.EXPECT().HandleTokenEndpointRequest(gomock.Any(), gomock.Any()).Return(nil) + }, + method: "POST", + expectErr: ErrInvalidRequest, + handlers: TokenEndpointHandlers{handlerWithoutClientAuth, handlerWithClientAuth}, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + r := &http.Request{ + Header: c.header, + PostForm: c.form, + Form: c.form, + Method: c.method, + } + c.mock() + ctx := NewContext() + config.TokenEndpointHandlers = c.handlers + ar, err := fosite.NewAccessRequest(ctx, r, new(DefaultSession)) + + if c.expectErr != nil { + assert.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + AssertObjectKeysEqual(t, c.expect, ar, "GrantTypes", "Client") + assert.NotNil(t, ar.GetRequestedAt()) + } + }) + } +} + +func basicAuth(username, password string) string { + return "Basic " + base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf("%s:%s", username, password))) +} diff --git a/fosite/access_request_test.go b/fosite/access_request_test.go new file mode 100644 index 00000000000..839cb86f3b4 --- /dev/null +++ b/fosite/access_request_test.go @@ -0,0 +1,25 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestAccessRequest(t *testing.T) { + ar := NewAccessRequest(nil) + ar.GrantTypes = Arguments{"foobar"} + ar.Client = &DefaultClient{} + ar.GrantScope("foo") + ar.SetRequestedAudience(Arguments{"foo", "foo", "bar"}) + ar.SetRequestedScopes(Arguments{"foo", "foo", "bar"}) + assert.True(t, ar.GetGrantedScopes().Has("foo")) + assert.NotNil(t, ar.GetRequestedAt()) + assert.Equal(t, ar.GrantTypes, ar.GetGrantTypes()) + assert.Equal(t, Arguments{"foo", "bar"}, ar.RequestedAudience) + assert.Equal(t, Arguments{"foo", "bar"}, ar.RequestedScope) + assert.Equal(t, ar.Client, ar.GetClient()) +} diff --git a/fosite/access_response.go b/fosite/access_response.go new file mode 100644 index 00000000000..be827644eb6 --- /dev/null +++ b/fosite/access_response.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "strings" + "time" +) + +func NewAccessResponse() *AccessResponse { + return &AccessResponse{ + Extra: map[string]interface{}{}, + } +} + +type AccessResponse struct { + Extra map[string]interface{} + AccessToken string + TokenType string +} + +func (a *AccessResponse) SetScopes(scopes Arguments) { + a.SetExtra("scope", strings.Join(scopes, " ")) +} + +func (a *AccessResponse) SetExpiresIn(expiresIn time.Duration) { + a.SetExtra("expires_in", int64(expiresIn/time.Second)) +} + +func (a *AccessResponse) SetExtra(key string, value interface{}) { + a.Extra[key] = value +} + +func (a *AccessResponse) GetExtra(key string) interface{} { + return a.Extra[key] +} + +func (a *AccessResponse) SetAccessToken(token string) { + a.AccessToken = token +} + +func (a *AccessResponse) SetTokenType(name string) { + a.TokenType = name +} + +func (a *AccessResponse) GetAccessToken() string { + return a.AccessToken +} + +func (a *AccessResponse) GetTokenType() string { + return a.TokenType +} + +func (a *AccessResponse) ToMap() map[string]interface{} { + a.Extra["access_token"] = a.GetAccessToken() + a.Extra["token_type"] = a.GetTokenType() + return a.Extra +} diff --git a/fosite/access_response_test.go b/fosite/access_response_test.go new file mode 100644 index 00000000000..01a4605f5b8 --- /dev/null +++ b/fosite/access_response_test.go @@ -0,0 +1,28 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + . "github.com/ory/hydra/v2/fosite" +) + +func TestAccessResponse(t *testing.T) { + ar := NewAccessResponse() + ar.SetAccessToken("access") + ar.SetTokenType("bearer") + ar.SetExtra("access_token", "invalid") + ar.SetExtra("foo", "bar") + assert.Equal(t, "access", ar.GetAccessToken()) + assert.Equal(t, "bearer", ar.GetTokenType()) + assert.Equal(t, "bar", ar.GetExtra("foo")) + assert.Equal(t, map[string]interface{}{ + "access_token": "access", + "token_type": "bearer", + "foo": "bar", + }, ar.ToMap()) +} diff --git a/fosite/access_response_writer.go b/fosite/access_response_writer.go new file mode 100644 index 00000000000..5bd7e6566e1 --- /dev/null +++ b/fosite/access_response_writer.go @@ -0,0 +1,46 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + + "go.opentelemetry.io/otel/trace" + + "github.com/ory/x/errorsx" + "github.com/ory/x/otelx" + + "github.com/pkg/errors" +) + +func (f *Fosite) NewAccessResponse(ctx context.Context, requester AccessRequester) (_ AccessResponder, err error) { + ctx, span := trace.SpanFromContext(ctx).TracerProvider().Tracer("github.com/ory/hydra/v2/fosite").Start(ctx, "Fosite.NewAccessResponse") + defer otelx.End(span, &err) + + var tk TokenEndpointHandler + + response := NewAccessResponse() + + ctx = context.WithValue(ctx, AccessRequestContextKey, requester) + ctx = context.WithValue(ctx, AccessResponseContextKey, response) + + for _, tk = range f.Config.GetTokenEndpointHandlers(ctx) { + if err = tk.PopulateTokenEndpointResponse(ctx, requester, response); err == nil { + // do nothing + } else if errors.Is(err, ErrUnknownRequest) { + // do nothing + } else if err != nil { + return nil, err + } + } + + if response.GetAccessToken() == "" || response.GetTokenType() == "" { + return nil, errorsx.WithStack(ErrServerError. + WithHint("An internal server occurred while trying to complete the request."). + WithDebug("Access token or token type not set by TokenEndpointHandlers."). + WithLocalizer(f.Config.GetMessageCatalog(ctx), getLangFromRequester(requester))) + } + + return response, nil +} diff --git a/fosite/access_response_writer_test.go b/fosite/access_response_writer_test.go new file mode 100644 index 00000000000..448b178bbeb --- /dev/null +++ b/fosite/access_response_writer_test.go @@ -0,0 +1,88 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "context" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + gomock "go.uber.org/mock/gomock" + + . "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/internal" +) + +func TestNewAccessResponse(t *testing.T) { + ctrl := gomock.NewController(t) + handler := internal.NewMockTokenEndpointHandler(ctrl) + t.Cleanup(ctrl.Finish) + + config := &Config{} + f := &Fosite{Config: config} + for k, c := range []struct { + handlers TokenEndpointHandlers + mock func() + expectErr error + expect AccessResponder + }{ + { + mock: func() {}, + handlers: TokenEndpointHandlers{}, + expectErr: ErrServerError, + }, + { + mock: func() { + handler.EXPECT().PopulateTokenEndpointResponse(gomock.Any(), gomock.Any(), gomock.Any()).Return(ErrServerError) + }, + handlers: TokenEndpointHandlers{handler}, + expectErr: ErrServerError, + }, + { + mock: func() { + handler.EXPECT().PopulateTokenEndpointResponse(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) + }, + handlers: TokenEndpointHandlers{handler}, + expectErr: ErrServerError, + }, + { + mock: func() { + handler.EXPECT().PopulateTokenEndpointResponse(gomock.Any(), gomock.Any(), gomock.Any()).Do(func(_ context.Context, _ AccessRequester, resp AccessResponder) { + resp.SetAccessToken("foo") + }).Return(nil) + }, + handlers: TokenEndpointHandlers{handler}, + expectErr: ErrServerError, + }, + { + mock: func() { + handler.EXPECT().PopulateTokenEndpointResponse(gomock.Any(), gomock.Any(), gomock.Any()).Do(func(_ context.Context, _ AccessRequester, resp AccessResponder) { + resp.SetAccessToken("foo") + resp.SetTokenType("bar") + }).Return(nil) + }, + handlers: TokenEndpointHandlers{handler}, + expect: &AccessResponse{ + Extra: map[string]interface{}{}, + AccessToken: "foo", + TokenType: "bar", + }, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + config.TokenEndpointHandlers = c.handlers + c.mock() + ar, err := f.NewAccessResponse(context.TODO(), nil) + + if c.expectErr != nil { + assert.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + assert.Equal(t, ar, c.expect) + } + }) + } +} diff --git a/fosite/access_write.go b/fosite/access_write.go new file mode 100644 index 00000000000..a8f1fd6a2c5 --- /dev/null +++ b/fosite/access_write.go @@ -0,0 +1,26 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "encoding/json" + "net/http" +) + +func (f *Fosite) WriteAccessResponse(ctx context.Context, rw http.ResponseWriter, requester AccessRequester, responder AccessResponder) { + rw.Header().Set("Cache-Control", "no-store") + rw.Header().Set("Pragma", "no-cache") + + js, err := json.Marshal(responder.ToMap()) + if err != nil { + http.Error(rw, err.Error(), http.StatusInternalServerError) + return + } + + rw.Header().Set("Content-Type", "application/json;charset=UTF-8") + + rw.WriteHeader(http.StatusOK) + _, _ = rw.Write(js) +} diff --git a/fosite/access_write_test.go b/fosite/access_write_test.go new file mode 100644 index 00000000000..c397572770e --- /dev/null +++ b/fosite/access_write_test.go @@ -0,0 +1,36 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "context" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + gomock "go.uber.org/mock/gomock" + + . "github.com/ory/hydra/v2/fosite" + . "github.com/ory/hydra/v2/fosite/internal" +) + +func TestWriteAccessResponse(t *testing.T) { + f := &Fosite{Config: new(Config)} + header := http.Header{} + ctrl := gomock.NewController(t) + rw := NewMockResponseWriter(ctrl) + ar := NewMockAccessRequester(ctrl) + resp := NewMockAccessResponder(ctrl) + t.Cleanup(ctrl.Finish) + + rw.EXPECT().Header().AnyTimes().Return(header) + rw.EXPECT().WriteHeader(http.StatusOK) + rw.EXPECT().Write(gomock.Any()) + resp.EXPECT().ToMap().Return(map[string]interface{}{}) + + f.WriteAccessResponse(context.Background(), rw, ar, resp) + assert.Equal(t, "application/json;charset=UTF-8", header.Get("Content-Type")) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "no-cache", header.Get("Pragma")) +} diff --git a/fosite/arguments.go b/fosite/arguments.go new file mode 100644 index 00000000000..f037919f62d --- /dev/null +++ b/fosite/arguments.go @@ -0,0 +1,80 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import "strings" + +type Arguments []string + +// Matches performs an case-insensitive, out-of-order check that the items +// provided exist and equal all of the args in arguments. +// Note: +// - Providing a list that includes duplicate string-case items will return not +// matched. +func (r Arguments) Matches(items ...string) bool { + if len(r) != len(items) { + return false + } + + found := make(map[string]bool) + for _, item := range items { + if !StringInSlice(item, r) { + return false + } + found[item] = true + } + + return len(found) == len(r) +} + +// Has checks, in a case-insensitive manner, that all of the items +// provided exists in arguments. +func (r Arguments) Has(items ...string) bool { + for _, item := range items { + if !StringInSlice(item, r) { + return false + } + } + + return true +} + +// HasOneOf checks, in a case-insensitive manner, that one of the items +// provided exists in arguments. +func (r Arguments) HasOneOf(items ...string) bool { + for _, item := range items { + if StringInSlice(item, r) { + return true + } + } + + return false +} + +// Deprecated: Use ExactOne, Matches or MatchesExact +func (r Arguments) Exact(name string) bool { + return name == strings.Join(r, " ") +} + +// ExactOne checks, by string case, that a single argument equals the provided +// string. +func (r Arguments) ExactOne(name string) bool { + return len(r) == 1 && r[0] == name +} + +// MatchesExact checks, by order and string case, that the items provided equal +// those in arguments. +func (r Arguments) MatchesExact(items ...string) bool { + if len(r) != len(items) { + return false + } + + for i, item := range items { + if item != r[i] { + return false + } + } + + return true +} diff --git a/fosite/arguments_test.go b/fosite/arguments_test.go new file mode 100644 index 00000000000..6e5bef8539c --- /dev/null +++ b/fosite/arguments_test.go @@ -0,0 +1,271 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +type exactTestCase struct { + args Arguments + exact string + expect bool +} + +var exactTests = []exactTestCase{ + { + args: Arguments{"foo"}, + exact: "foo", + expect: true, + }, + { + args: Arguments{"foo", "bar"}, + exact: "foo", + expect: false, + }, + { + args: Arguments{"foo", "bar"}, + exact: "bar", + expect: false, + }, + { + args: Arguments{"foo", "bar"}, + exact: "baz", + expect: false, + }, + { + args: Arguments{}, + exact: "baz", + expect: false, + }, +} + +func TestArgumentsExact(t *testing.T) { + testCases := append(exactTests, []exactTestCase{ + { + args: Arguments{"foo", "bar"}, + exact: "foo bar", + expect: true, + }, + }...) + + for k, c := range testCases { + assert.Equal(t, c.expect, c.args.Exact(c.exact), "%d", k) + t.Logf("Passed test case %d", k) + } +} + +func TestArgumentsExactOne(t *testing.T) { + testCases := append(exactTests, []exactTestCase{ + { + args: Arguments{"foo", "bar"}, + exact: "foo bar", + expect: false, + }, + }...) + + for k, c := range testCases { + assert.Equal(t, c.expect, c.args.ExactOne(c.exact), "%d", k) + t.Logf("Passed test case %d", k) + } +} + +func TestArgumentsHas(t *testing.T) { + for k, c := range []struct { + args Arguments + has []string + expect bool + }{ + { + args: Arguments{"foo", "bar"}, + has: []string{"foo", "bar"}, + expect: true, + }, + { + args: Arguments{"foo", "bar"}, + has: []string{"bar", "foo"}, + expect: true, + }, + { + args: Arguments{"bar", "foo"}, + has: []string{"foo"}, + expect: true, + }, + { + args: Arguments{"foo", "bar"}, + has: []string{"bar", "foo", "baz"}, + expect: false, + }, + { + args: Arguments{"foo", "bar"}, + has: []string{"foo"}, + expect: true, + }, + { + args: Arguments{"foo", "bar"}, + has: []string{"bar"}, + expect: true, + }, + { + args: Arguments{"foo", "bar"}, + has: []string{"baz"}, + expect: false, + }, + { + args: Arguments{}, + has: []string{"baz"}, + expect: false, + }, + } { + assert.Equal(t, c.expect, c.args.Has(c.has...), "%d", k) + t.Logf("Passed test case %d", k) + } +} + +type matchesTestCase struct { + args Arguments + is []string + expect bool +} + +var matchesTests = []matchesTestCase{ + { + args: Arguments{}, + is: []string{}, + expect: true, + }, + { + args: Arguments{"foo", "bar"}, + is: []string{"foo", "bar"}, + expect: true, + }, + { + args: Arguments{"Foo", "Bar"}, + is: []string{"Foo", "Bar"}, + expect: true, + }, + { + args: Arguments{"foo", "foo"}, + is: []string{"foo"}, + expect: false, + }, + { + args: Arguments{"foo", "foo"}, + is: []string{"bar", "foo"}, + expect: false, + }, + { + args: Arguments{"foo", "bar"}, + is: []string{"bar", "foo", "baz"}, + expect: false, + }, + { + args: Arguments{"foo", "bar"}, + is: []string{"foo"}, + expect: false, + }, + { + args: Arguments{"foo", "bar"}, + is: []string{"bar", "bar"}, + expect: false, + }, + { + args: Arguments{"foo", "bar"}, + is: []string{"baz"}, + expect: false, + }, + { + args: Arguments{}, + is: []string{"baz"}, + expect: false, + }, +} + +func TestArgumentsMatchesExact(t *testing.T) { + testCases := append(matchesTests, []matchesTestCase{ + // should fail if items are out of order + { + args: Arguments{"foo", "bar"}, + is: []string{"bar", "foo"}, + expect: false, + }, + // should fail due to case-sensitivity. + { + args: Arguments{"fOo", "bar"}, + is: []string{"foo", "BaR"}, + expect: false, + }, + // duplicate items should return allowed. + { + args: Arguments{"foo", "foo"}, + is: []string{"foo", "foo"}, + expect: true, + }, + }...) + for k, c := range testCases { + assert.Equal(t, c.expect, c.args.MatchesExact(c.is...), "%d", k) + t.Logf("Passed test case %d", k) + } +} + +func TestArgumentsMatches(t *testing.T) { + testCases := append(matchesTests, []matchesTestCase{ + // should match if items are out of order. + { + args: Arguments{"foo", "bar"}, + is: []string{"bar", "foo"}, + expect: true, + }, + // should allow case-insensitive matching. + { + args: Arguments{"fOo", "bar"}, + is: []string{"foo", "BaR"}, + expect: true, + }, + // should return non-matching if duplicate items exist. + { + args: Arguments{"foo", "bar"}, + is: []string{"FOO", "FOO", "bar"}, + expect: false, + }, + { + args: Arguments{"foo", "foo"}, + is: []string{"foo", "foo"}, + expect: false, + }, + }...) + for k, c := range testCases { + assert.Equal(t, c.expect, c.args.Matches(c.is...), "%d", k) + t.Logf("Passed test case %d", k) + } +} + +func TestArgumentsOneOf(t *testing.T) { + for k, c := range []struct { + args Arguments + oneOf []string + expect bool + }{ + { + args: Arguments{"baz", "bar"}, + oneOf: []string{"foo", "bar"}, + expect: true, + }, + { + args: Arguments{"foo", "baz"}, + oneOf: []string{"foo", "bar"}, + expect: true, + }, + { + args: Arguments{"baz"}, + oneOf: []string{"foo", "bar"}, + expect: false, + }, + } { + assert.Equal(t, c.expect, c.args.HasOneOf(c.oneOf...), "%d", k) + t.Logf("Passed test case %d", k) + } +} diff --git a/fosite/audience_strategy.go b/fosite/audience_strategy.go new file mode 100644 index 00000000000..61d836da128 --- /dev/null +++ b/fosite/audience_strategy.go @@ -0,0 +1,104 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "net/http" + "net/url" + "strings" + + "github.com/ory/x/errorsx" +) + +type AudienceMatchingStrategy func(haystack []string, needle []string) error + +func DefaultAudienceMatchingStrategy(haystack []string, needle []string) error { + if len(needle) == 0 { + return nil + } + + for _, n := range needle { + nu, err := url.Parse(n) + if err != nil { + return errorsx.WithStack(ErrInvalidRequest.WithHintf("Unable to parse requested audience '%s'.", n).WithWrap(err).WithDebug(err.Error())) + } + + var found bool + for _, h := range haystack { + hu, err := url.Parse(h) + if err != nil { + return errorsx.WithStack(ErrInvalidRequest.WithHintf("Unable to parse whitelisted audience '%s'.", h).WithWrap(err).WithDebug(err.Error())) + } + + allowedPath := strings.TrimRight(hu.Path, "/") + if nu.Scheme == hu.Scheme && + nu.Host == hu.Host && + (nu.Path == hu.Path || + nu.Path == allowedPath || + len(nu.Path) > len(allowedPath) && strings.TrimRight(nu.Path[:len(allowedPath)+1], "/")+"/" == allowedPath+"/") { + found = true + } + } + + if !found { + return errorsx.WithStack(ErrInvalidRequest.WithHintf("Requested audience '%s' has not been whitelisted by the OAuth 2.0 Client.", n)) + } + } + + return nil +} + +// ExactAudienceMatchingStrategy does not assume that audiences are URIs, but compares strings as-is and +// does matching with exact string comparison. It requires that all strings in "needle" are present in +// "haystack". Use this strategy when your audience values are not URIs (e.g., you use client IDs for +// audience and they are UUIDs or random strings). +func ExactAudienceMatchingStrategy(haystack []string, needle []string) error { + if len(needle) == 0 { + return nil + } + + for _, n := range needle { + var found bool + for _, h := range haystack { + if n == h { + found = true + } + } + + if !found { + return errorsx.WithStack(ErrInvalidRequest.WithHintf(`Requested audience "%s" has not been whitelisted by the OAuth 2.0 Client.`, n)) + } + } + + return nil +} + +// GetAudiences allows audiences to be provided as repeated "audience" form parameter, +// or as a space-delimited "audience" form parameter if it is not repeated. +// RFC 8693 in section 2.1 specifies that multiple audience values should be multiple +// query parameters, while RFC 6749 says that that request parameter must not be included +// more than once (and thus why we use space-delimited value). This function tries to satisfy both. +// If "audience" form parameter is repeated, we do not split the value by space. +func GetAudiences(form url.Values) []string { + audiences := form["audience"] + if len(audiences) > 1 { + return RemoveEmpty(audiences) + } else if len(audiences) == 1 { + return RemoveEmpty(strings.Split(audiences[0], " ")) + } else { + return []string{} + } +} + +func (f *Fosite) validateAudience(ctx context.Context, r *http.Request, request Requester) error { + audience := GetAudiences(request.GetRequestForm()) + + if err := f.Config.GetAudienceStrategy(ctx)(request.GetClient().GetAudience(), audience); err != nil { + return err + } + + request.SetRequestedAudience(audience) + return nil +} diff --git a/fosite/audience_strategy_test.go b/fosite/audience_strategy_test.go new file mode 100644 index 00000000000..dccf5b359fd --- /dev/null +++ b/fosite/audience_strategy_test.go @@ -0,0 +1,257 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestDefaultAudienceMatchingStrategy(t *testing.T) { + for k, tc := range []struct { + h []string + n []string + err bool + }{ + { + h: []string{}, + n: []string{}, + err: false, + }, + { + h: []string{"http://foo/bar"}, + n: []string{}, + err: false, + }, + { + h: []string{}, + n: []string{"http://foo/bar"}, + err: true, + }, + { + h: []string{"https://cloud.ory.sh/api/users"}, + n: []string{"https://cloud.ory.sh/api/users"}, + err: false, + }, + { + h: []string{"https://cloud.ory.sh/api/users"}, + n: []string{"https://cloud.ory.sh/api/users/"}, + err: false, + }, + { + h: []string{"https://cloud.ory.sh/api/users/"}, + n: []string{"https://cloud.ory.sh/api/users/"}, + err: false, + }, + { + h: []string{"https://cloud.ory.sh/api/users/"}, + n: []string{"https://cloud.ory.sh/api/users"}, + err: false, + }, + { + h: []string{"https://cloud.ory.sh/api/users"}, + n: []string{"https://cloud.ory.sh/api/users/1234"}, + err: false, + }, + { + h: []string{"https://cloud.ory.sh/api/users"}, + n: []string{"https://cloud.ory.sh/api/users", "https://cloud.ory.sh/api/users/", "https://cloud.ory.sh/api/users/1234"}, + err: false, + }, + { + h: []string{"https://cloud.ory.sh/api/users", "https://cloud.ory.sh/api/tenants"}, + n: []string{"https://cloud.ory.sh/api/users", "https://cloud.ory.sh/api/users/", "https://cloud.ory.sh/api/users/1234", "https://cloud.ory.sh/api/tenants"}, + err: false, + }, + { + h: []string{"https://cloud.ory.sh/api/users"}, + n: []string{"https://cloud.ory.sh/api/users1234"}, + err: true, + }, + { + h: []string{"https://cloud.ory.sh/api/users"}, + n: []string{"http://cloud.ory.sh/api/users"}, + err: true, + }, + { + h: []string{"https://cloud.ory.sh/api/users"}, + n: []string{"https://cloud.ory.sh:8000/api/users"}, + err: true, + }, + { + h: []string{"https://cloud.ory.sh/api/users"}, + n: []string{"https://cloud.ory.xyz/api/users"}, + err: true, + }, + { + h: []string{"foobar"}, + n: []string{"foobar"}, + err: false, + }, + { + h: []string{"foo bar"}, + n: []string{"foo bar"}, + err: false, + }, + { + h: []string{"foobar"}, + n: []string{"foobar"}, + err: false, + }, + { + h: []string{"zoo", "bar"}, + n: []string{"zoo"}, + err: false, + }, + { + h: []string{"zoo"}, + n: []string{"zoo", "bar"}, + err: true, + }, + { + h: []string{"foobar"}, + n: []string{"foobar/"}, + err: false, + }, + { + h: []string{"foobar/"}, + n: []string{"foobar"}, + err: false, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + err := DefaultAudienceMatchingStrategy(tc.h, tc.n) + if tc.err { + require.Error(t, err) + } else { + require.NoError(t, err) + } + }) + } +} + +func TestExactAudienceMatchingStrategy(t *testing.T) { + for k, tc := range []struct { + h []string + n []string + err bool + }{ + { + h: []string{}, + n: []string{}, + err: false, + }, + { + h: []string{"http://foo/bar"}, + n: []string{}, + err: false, + }, + { + h: []string{}, + n: []string{"http://foo/bar"}, + err: true, + }, + { + h: []string{"https://cloud.ory.sh/api/users"}, + n: []string{"https://cloud.ory.sh/api/users"}, + err: false, + }, + { + h: []string{"https://cloud.ory.sh/api/users"}, + n: []string{"https://cloud.ory.sh/api/users/"}, + err: true, + }, + { + h: []string{"https://cloud.ory.sh/api/users/"}, + n: []string{"https://cloud.ory.sh/api/users/"}, + err: false, + }, + { + h: []string{"https://cloud.ory.sh/api/users/"}, + n: []string{"https://cloud.ory.sh/api/users"}, + err: true, + }, + { + h: []string{"https://cloud.ory.sh/api/users"}, + n: []string{"https://cloud.ory.sh/api/users/1234"}, + err: true, + }, + { + h: []string{"https://cloud.ory.sh/api/users"}, + n: []string{"https://cloud.ory.sh/api/users", "https://cloud.ory.sh/api/users/", "https://cloud.ory.sh/api/users/1234"}, + err: true, + }, + { + h: []string{"https://cloud.ory.sh/api/users", "https://cloud.ory.sh/api/tenants"}, + n: []string{"https://cloud.ory.sh/api/users", "https://cloud.ory.sh/api/users/", "https://cloud.ory.sh/api/users/1234", "https://cloud.ory.sh/api/tenants"}, + err: true, + }, + { + h: []string{"https://cloud.ory.sh/api/users"}, + n: []string{"https://cloud.ory.sh/api/users1234"}, + err: true, + }, + { + h: []string{"https://cloud.ory.sh/api/users"}, + n: []string{"http://cloud.ory.sh/api/users"}, + err: true, + }, + { + h: []string{"https://cloud.ory.sh/api/users"}, + n: []string{"https://cloud.ory.sh:8000/api/users"}, + err: true, + }, + { + h: []string{"https://cloud.ory.sh/api/users"}, + n: []string{"https://cloud.ory.xyz/api/users"}, + err: true, + }, + { + h: []string{"foobar"}, + n: []string{"foobar"}, + err: false, + }, + { + h: []string{"foo bar"}, + n: []string{"foo bar"}, + err: false, + }, + { + h: []string{"foobar"}, + n: []string{"foobar"}, + err: false, + }, + { + h: []string{"zoo", "bar"}, + n: []string{"zoo"}, + err: false, + }, + { + h: []string{"zoo"}, + n: []string{"zoo", "bar"}, + err: true, + }, + { + h: []string{"foobar"}, + n: []string{"foobar/"}, + err: true, + }, + { + h: []string{"foobar/"}, + n: []string{"foobar"}, + err: true, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + err := ExactAudienceMatchingStrategy(tc.h, tc.n) + if tc.err { + require.Error(t, err) + } else { + require.NoError(t, err) + } + }) + } +} diff --git a/fosite/authorize_error.go b/fosite/authorize_error.go new file mode 100644 index 00000000000..7247cb743c1 --- /dev/null +++ b/fosite/authorize_error.go @@ -0,0 +1,69 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "encoding/json" + "fmt" + "net/http" +) + +func (f *Fosite) WriteAuthorizeError(ctx context.Context, rw http.ResponseWriter, ar AuthorizeRequester, err error) { + rw.Header().Set("Cache-Control", "no-store") + rw.Header().Set("Pragma", "no-cache") + + if f.ResponseModeHandler(ctx).ResponseModes().Has(ar.GetResponseMode()) { + f.ResponseModeHandler(ctx).WriteAuthorizeError(ctx, rw, ar, err) + return + } + + rfcerr := ErrorToRFC6749Error(err).WithLegacyFormat(f.Config.GetUseLegacyErrorFormat(ctx)).WithExposeDebug(f.Config.GetSendDebugMessagesToClients(ctx)).WithLocalizer(f.Config.GetMessageCatalog(ctx), getLangFromRequester(ar)) + if !ar.IsRedirectURIValid() { + rw.Header().Set("Content-Type", "application/json;charset=UTF-8") + + js, err := json.Marshal(rfcerr) + if err != nil { + if f.Config.GetSendDebugMessagesToClients(ctx) { + errorMessage := EscapeJSONString(err.Error()) + http.Error(rw, fmt.Sprintf(`{"error":"server_error","error_description":"%s"}`, errorMessage), http.StatusInternalServerError) + } else { + http.Error(rw, `{"error":"server_error"}`, http.StatusInternalServerError) + } + return + } + + rw.WriteHeader(rfcerr.CodeField) + _, _ = rw.Write(js) + return + } + + redirectURI := ar.GetRedirectURI() + + // The endpoint URI MUST NOT include a fragment component. + redirectURI.Fragment = "" + + errors := rfcerr.ToValues() + errors.Set("state", ar.GetState()) + + var redirectURIString string + if ar.GetResponseMode() == ResponseModeFormPost { + rw.Header().Set("Content-Type", "text/html;charset=UTF-8") + WriteAuthorizeFormPostResponse(redirectURI.String(), errors, GetPostFormHTMLTemplate(ctx, f), rw) + return + } else if ar.GetResponseMode() == ResponseModeFragment { + redirectURIString = redirectURI.String() + "#" + errors.Encode() + } else { + for key, values := range redirectURI.Query() { + for _, value := range values { + errors.Add(key, value) + } + } + redirectURI.RawQuery = errors.Encode() + redirectURIString = redirectURI.String() + } + + rw.Header().Set("Location", redirectURIString) + rw.WriteHeader(http.StatusSeeOther) +} diff --git a/fosite/authorize_error_test.go b/fosite/authorize_error_test.go new file mode 100644 index 00000000000..ea48ca1e29e --- /dev/null +++ b/fosite/authorize_error_test.go @@ -0,0 +1,449 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "context" + "fmt" + "net/http" + "net/url" + "testing" + + "github.com/stretchr/testify/assert" + gomock "go.uber.org/mock/gomock" + + . "github.com/ory/hydra/v2/fosite" + . "github.com/ory/hydra/v2/fosite/internal" +) + +// Test for +// - https://tools.ietf.org/html/rfc6749#section-4.1.2.1 +// If the request fails due to a missing, invalid, or mismatching +// redirection URI, or if the client identifier is missing or invalid, +// the authorization server SHOULD inform the resource owner of the +// error and MUST NOT automatically redirect the user-agent to the +// invalid redirection URI. +// - https://tools.ietf.org/html/rfc6749#section-3.1.2 +// The redirection endpoint URI MUST be an absolute URI as defined by +// [RFC3986] Section 4.3. The endpoint URI MAY include an +// "application/x-www-form-urlencoded" formatted (per Appendix B) query +// component ([RFC3986] Section 3.4), which MUST be retained when adding +// additional query parameters. The endpoint URI MUST NOT include a +// fragment component. +func TestWriteAuthorizeError(t *testing.T) { + urls := []string{ + "https://foobar.com/", + "https://foobar.com/?foo=bar", + } + purls := []*url.URL{} + for _, u := range urls { + purl, _ := url.Parse(u) + purls = append(purls, purl) + } + + header := http.Header{} + for k, c := range []struct { + err *RFC6749Error + debug bool + doNotUseLegacyFormat bool + mock func(*MockResponseWriter, *MockAuthorizeRequester) + checkHeader func(*testing.T, int) + }{ + // 0 + { + err: ErrInvalidGrant, + mock: func(rw *MockResponseWriter, req *MockAuthorizeRequester) { + req.EXPECT().IsRedirectURIValid().Return(false) + req.EXPECT().GetResponseMode().Return(ResponseModeDefault) + rw.EXPECT().Header().Times(3).Return(header) + rw.EXPECT().WriteHeader(http.StatusBadRequest) + rw.EXPECT().Write(gomock.Any()) + }, + checkHeader: func(t *testing.T, k int) { + assert.Equal(t, "application/json;charset=UTF-8", header.Get("Content-Type")) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "no-cache", header.Get("Pragma")) + }, + }, + // 1 + { + debug: true, + err: ErrInvalidRequest.WithDebug("with-debug"), + mock: func(rw *MockResponseWriter, req *MockAuthorizeRequester) { + req.EXPECT().IsRedirectURIValid().Return(true) + req.EXPECT().GetRedirectURI().Return(copyUrl(purls[0])) + req.EXPECT().GetState().Return("foostate") + req.EXPECT().GetResponseTypes().AnyTimes().Return(Arguments([]string{"code"})) + req.EXPECT().GetResponseMode().Return(ResponseModeQuery).AnyTimes() + rw.EXPECT().Header().Times(3).Return(header) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + checkHeader: func(t *testing.T, k int) { + a, _ := url.Parse("https://foobar.com/?error=invalid_request&error_debug=with-debug&error_description=The+request+is+missing+a+required+parameter%2C+includes+an+invalid+parameter+value%2C+includes+a+parameter+more+than+once%2C+or+is+otherwise+malformed.&error_hint=Make+sure+that+the+various+parameters+are+correct%2C+be+aware+of+case+sensitivity+and+trim+your+parameters.+Make+sure+that+the+client+you+are+using+has+exactly+whitelisted+the+redirect_uri+you+specified.&state=foostate") + b, _ := url.Parse(header.Get("Location")) + assert.Equal(t, a, b) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "no-cache", header.Get("Pragma")) + }, + }, + // 2 + { + debug: true, + doNotUseLegacyFormat: true, + err: ErrInvalidRequest.WithDebug("with-debug"), + mock: func(rw *MockResponseWriter, req *MockAuthorizeRequester) { + req.EXPECT().IsRedirectURIValid().Return(true) + req.EXPECT().GetRedirectURI().Return(copyUrl(purls[0])) + req.EXPECT().GetState().Return("foostate") + req.EXPECT().GetResponseTypes().AnyTimes().Return(Arguments([]string{"code"})) + req.EXPECT().GetResponseMode().Return(ResponseModeQuery).AnyTimes() + rw.EXPECT().Header().Times(3).Return(header) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + checkHeader: func(t *testing.T, k int) { + a, _ := url.Parse("https://foobar.com/?error=invalid_request&error_description=The+request+is+missing+a+required+parameter%2C+includes+an+invalid+parameter+value%2C+includes+a+parameter+more+than+once%2C+or+is+otherwise+malformed.+Make+sure+that+the+various+parameters+are+correct%2C+be+aware+of+case+sensitivity+and+trim+your+parameters.+Make+sure+that+the+client+you+are+using+has+exactly+whitelisted+the+redirect_uri+you+specified.+with-debug&state=foostate") + b, _ := url.Parse(header.Get("Location")) + assert.Equal(t, a, b) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "no-cache", header.Get("Pragma")) + }, + }, + // 3 + { + doNotUseLegacyFormat: true, + err: ErrInvalidRequest.WithDebug("with-debug"), + mock: func(rw *MockResponseWriter, req *MockAuthorizeRequester) { + req.EXPECT().IsRedirectURIValid().Return(true) + req.EXPECT().GetRedirectURI().Return(copyUrl(purls[0])) + req.EXPECT().GetState().Return("foostate") + req.EXPECT().GetResponseTypes().AnyTimes().Return(Arguments([]string{"code"})) + req.EXPECT().GetResponseMode().Return(ResponseModeQuery).AnyTimes() + rw.EXPECT().Header().Times(3).Return(header) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + checkHeader: func(t *testing.T, k int) { + a, _ := url.Parse("https://foobar.com/?error=invalid_request&error_description=The+request+is+missing+a+required+parameter%2C+includes+an+invalid+parameter+value%2C+includes+a+parameter+more+than+once%2C+or+is+otherwise+malformed.+Make+sure+that+the+various+parameters+are+correct%2C+be+aware+of+case+sensitivity+and+trim+your+parameters.+Make+sure+that+the+client+you+are+using+has+exactly+whitelisted+the+redirect_uri+you+specified.&state=foostate") + b, _ := url.Parse(header.Get("Location")) + assert.Equal(t, a, b) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "no-cache", header.Get("Pragma")) + }, + }, + // 4 + { + err: ErrInvalidRequest.WithDebug("with-debug"), + mock: func(rw *MockResponseWriter, req *MockAuthorizeRequester) { + req.EXPECT().IsRedirectURIValid().Return(true) + req.EXPECT().GetRedirectURI().Return(copyUrl(purls[0])) + req.EXPECT().GetState().Return("foostate") + req.EXPECT().GetResponseTypes().AnyTimes().Return(Arguments([]string{"code"})) + req.EXPECT().GetResponseMode().Return(ResponseModeDefault).AnyTimes() + rw.EXPECT().Header().Times(3).Return(header) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + checkHeader: func(t *testing.T, k int) { + a, _ := url.Parse("https://foobar.com/?error=invalid_request&error_description=The+request+is+missing+a+required+parameter%2C+includes+an+invalid+parameter+value%2C+includes+a+parameter+more+than+once%2C+or+is+otherwise+malformed.&error_hint=Make+sure+that+the+various+parameters+are+correct%2C+be+aware+of+case+sensitivity+and+trim+your+parameters.+Make+sure+that+the+client+you+are+using+has+exactly+whitelisted+the+redirect_uri+you+specified.&state=foostate") + b, _ := url.Parse(header.Get("Location")) + assert.Equal(t, a, b) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "no-cache", header.Get("Pragma")) + }, + }, + // 5 + { + err: ErrInvalidRequest, + mock: func(rw *MockResponseWriter, req *MockAuthorizeRequester) { + req.EXPECT().IsRedirectURIValid().Return(true) + req.EXPECT().GetRedirectURI().Return(copyUrl(purls[1])) + req.EXPECT().GetState().Return("foostate") + req.EXPECT().GetResponseTypes().AnyTimes().Return(Arguments([]string{"code"})) + req.EXPECT().GetResponseMode().Return(ResponseModeQuery).AnyTimes() + rw.EXPECT().Header().Times(3).Return(header) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + checkHeader: func(t *testing.T, k int) { + a, _ := url.Parse("https://foobar.com/?error=invalid_request&error_description=The+request+is+missing+a+required+parameter%2C+includes+an+invalid+parameter+value%2C+includes+a+parameter+more+than+once%2C+or+is+otherwise+malformed.&error_hint=Make+sure+that+the+various+parameters+are+correct%2C+be+aware+of+case+sensitivity+and+trim+your+parameters.+Make+sure+that+the+client+you+are+using+has+exactly+whitelisted+the+redirect_uri+you+specified.&foo=bar&state=foostate") + b, _ := url.Parse(header.Get("Location")) + assert.Equal(t, a, b) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "no-cache", header.Get("Pragma")) + }, + }, + // 6 + { + err: ErrUnsupportedGrantType, + mock: func(rw *MockResponseWriter, req *MockAuthorizeRequester) { + req.EXPECT().IsRedirectURIValid().Return(true) + req.EXPECT().GetRedirectURI().Return(copyUrl(purls[1])) + req.EXPECT().GetState().Return("foostate") + req.EXPECT().GetResponseTypes().AnyTimes().Return(Arguments([]string{"foobar"})) + req.EXPECT().GetResponseMode().Return(ResponseModeFragment).AnyTimes() + rw.EXPECT().Header().Times(3).Return(header) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + checkHeader: func(t *testing.T, k int) { + a, _ := url.Parse("https://foobar.com/?foo=bar#error=unsupported_grant_type&error_description=The+authorization+grant+type+is+not+supported+by+the+authorization+server.&state=foostate") + b, _ := url.Parse(header.Get("Location")) + assert.Equal(t, a, b) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "no-cache", header.Get("Pragma")) + }, + }, + // 7 + { + err: ErrInvalidRequest, + mock: func(rw *MockResponseWriter, req *MockAuthorizeRequester) { + req.EXPECT().IsRedirectURIValid().Return(true) + req.EXPECT().GetRedirectURI().Return(copyUrl(purls[0])) + req.EXPECT().GetState().Return("foostate") + req.EXPECT().GetResponseTypes().AnyTimes().Return(Arguments([]string{"token"})) + req.EXPECT().GetResponseMode().Return(ResponseModeFragment).AnyTimes() + rw.EXPECT().Header().Times(3).Return(header) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + checkHeader: func(t *testing.T, k int) { + a, _ := url.Parse("https://foobar.com/#error=invalid_request&error_description=The+request+is+missing+a+required+parameter%2C+includes+an+invalid+parameter+value%2C+includes+a+parameter+more+than+once%2C+or+is+otherwise+malformed.&error_hint=Make+sure+that+the+various+parameters+are+correct%2C+be+aware+of+case+sensitivity+and+trim+your+parameters.+Make+sure+that+the+client+you+are+using+has+exactly+whitelisted+the+redirect_uri+you+specified.&state=foostate") + b, _ := url.Parse(header.Get("Location")) + assert.Equal(t, a, b, "\n\t%s\n\t%s", header.Get("Location"), a.String()) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "no-cache", header.Get("Pragma")) + }, + }, + // 8 + { + err: ErrInvalidRequest, + mock: func(rw *MockResponseWriter, req *MockAuthorizeRequester) { + req.EXPECT().IsRedirectURIValid().Return(true) + req.EXPECT().GetRedirectURI().Return(copyUrl(purls[1])) + req.EXPECT().GetState().Return("foostate") + req.EXPECT().GetResponseTypes().AnyTimes().Return(Arguments([]string{"token"})) + req.EXPECT().GetResponseMode().Return(ResponseModeFragment).AnyTimes() + rw.EXPECT().Header().Times(3).Return(header) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + checkHeader: func(t *testing.T, k int) { + a, _ := url.Parse("https://foobar.com/?foo=bar#error=invalid_request&error_description=The+request+is+missing+a+required+parameter%2C+includes+an+invalid+parameter+value%2C+includes+a+parameter+more+than+once%2C+or+is+otherwise+malformed.&error_hint=Make+sure+that+the+various+parameters+are+correct%2C+be+aware+of+case+sensitivity+and+trim+your+parameters.+Make+sure+that+the+client+you+are+using+has+exactly+whitelisted+the+redirect_uri+you+specified.&state=foostate") + b, _ := url.Parse(header.Get("Location")) + assert.Equal(t, a, b, "\n\t%s\n\t%s", header.Get("Location"), a.String()) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "no-cache", header.Get("Pragma")) + }, + }, + // 9 + { + err: ErrInvalidRequest.WithDebug("with-debug"), + mock: func(rw *MockResponseWriter, req *MockAuthorizeRequester) { + req.EXPECT().IsRedirectURIValid().Return(true) + req.EXPECT().GetRedirectURI().Return(copyUrl(purls[0])) + req.EXPECT().GetState().Return("foostate") + req.EXPECT().GetResponseTypes().AnyTimes().Return(Arguments([]string{"code", "token"})) + req.EXPECT().GetResponseMode().Return(ResponseModeFragment).AnyTimes() + rw.EXPECT().Header().Times(3).Return(header) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + checkHeader: func(t *testing.T, k int) { + a, _ := url.Parse("https://foobar.com/#error=invalid_request&error_description=The+request+is+missing+a+required+parameter%2C+includes+an+invalid+parameter+value%2C+includes+a+parameter+more+than+once%2C+or+is+otherwise+malformed.&error_hint=Make+sure+that+the+various+parameters+are+correct%2C+be+aware+of+case+sensitivity+and+trim+your+parameters.+Make+sure+that+the+client+you+are+using+has+exactly+whitelisted+the+redirect_uri+you+specified.&state=foostate") + b, _ := url.Parse(header.Get("Location")) + assert.Equal(t, a, b, "\n\t%s\n\t%s", header.Get("Location"), a.String()) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "no-cache", header.Get("Pragma")) + }, + }, + // 10 + { + err: ErrInvalidRequest.WithDebug("with-debug"), + debug: true, + mock: func(rw *MockResponseWriter, req *MockAuthorizeRequester) { + req.EXPECT().IsRedirectURIValid().Return(true) + req.EXPECT().GetRedirectURI().Return(copyUrl(purls[0])) + req.EXPECT().GetState().Return("foostate") + req.EXPECT().GetResponseTypes().AnyTimes().Return(Arguments([]string{"code", "token"})) + req.EXPECT().GetResponseMode().Return(ResponseModeFragment).AnyTimes() + rw.EXPECT().Header().Times(3).Return(header) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + checkHeader: func(t *testing.T, k int) { + a, _ := url.Parse("https://foobar.com/#error=invalid_request&error_debug=with-debug&error_description=The+request+is+missing+a+required+parameter%2C+includes+an+invalid+parameter+value%2C+includes+a+parameter+more+than+once%2C+or+is+otherwise+malformed.&error_hint=Make+sure+that+the+various+parameters+are+correct%2C+be+aware+of+case+sensitivity+and+trim+your+parameters.+Make+sure+that+the+client+you+are+using+has+exactly+whitelisted+the+redirect_uri+you+specified.&state=foostate") + b, _ := url.Parse(header.Get("Location")) + assert.Equal(t, a, b, "\n\t%s\n\t%s", header.Get("Location"), a.String()) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "no-cache", header.Get("Pragma")) + }, + }, + // 11 + { + err: ErrInvalidRequest.WithDebug("with-debug"), + debug: true, + doNotUseLegacyFormat: true, + mock: func(rw *MockResponseWriter, req *MockAuthorizeRequester) { + req.EXPECT().IsRedirectURIValid().Return(true) + req.EXPECT().GetRedirectURI().Return(copyUrl(purls[0])) + req.EXPECT().GetState().Return("foostate") + req.EXPECT().GetResponseTypes().AnyTimes().Return(Arguments([]string{"code", "token"})) + req.EXPECT().GetResponseMode().Return(ResponseModeFragment).AnyTimes() + rw.EXPECT().Header().Times(3).Return(header) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + checkHeader: func(t *testing.T, k int) { + a, _ := url.Parse("https://foobar.com/#error=invalid_request&error_description=The+request+is+missing+a+required+parameter%2C+includes+an+invalid+parameter+value%2C+includes+a+parameter+more+than+once%2C+or+is+otherwise+malformed.+Make+sure+that+the+various+parameters+are+correct%2C+be+aware+of+case+sensitivity+and+trim+your+parameters.+Make+sure+that+the+client+you+are+using+has+exactly+whitelisted+the+redirect_uri+you+specified.+with-debug&state=foostate") + b, _ := url.Parse(header.Get("Location")) + assert.NotContains(t, header.Get("Location"), "error_hint") + assert.NotContains(t, header.Get("Location"), "error_debug") + assert.Equal(t, a, b, "\n\t%s\n\t%s", header.Get("Location"), a.String()) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "no-cache", header.Get("Pragma")) + }, + }, + // 12 + { + err: ErrInvalidRequest.WithDebug("with-debug"), + doNotUseLegacyFormat: true, + mock: func(rw *MockResponseWriter, req *MockAuthorizeRequester) { + req.EXPECT().IsRedirectURIValid().Return(true) + req.EXPECT().GetRedirectURI().Return(copyUrl(purls[0])) + req.EXPECT().GetState().Return("foostate") + req.EXPECT().GetResponseTypes().AnyTimes().Return(Arguments([]string{"code", "token"})) + req.EXPECT().GetResponseMode().Return(ResponseModeFragment).AnyTimes() + rw.EXPECT().Header().Times(3).Return(header) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + checkHeader: func(t *testing.T, k int) { + a, _ := url.Parse("https://foobar.com/#error=invalid_request&error_description=The+request+is+missing+a+required+parameter%2C+includes+an+invalid+parameter+value%2C+includes+a+parameter+more+than+once%2C+or+is+otherwise+malformed.+Make+sure+that+the+various+parameters+are+correct%2C+be+aware+of+case+sensitivity+and+trim+your+parameters.+Make+sure+that+the+client+you+are+using+has+exactly+whitelisted+the+redirect_uri+you+specified.&state=foostate") + b, _ := url.Parse(header.Get("Location")) + assert.NotContains(t, header.Get("Location"), "error_hint") + assert.NotContains(t, header.Get("Location"), "error_debug") + assert.NotContains(t, header.Get("Location"), "with-debug") + assert.Equal(t, a, b, "\n\t%s\n\t%s", header.Get("Location"), a.String()) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "no-cache", header.Get("Pragma")) + }, + }, + // 13 + { + err: ErrInvalidRequest.WithDebug("with-debug"), + mock: func(rw *MockResponseWriter, req *MockAuthorizeRequester) { + req.EXPECT().IsRedirectURIValid().Return(true) + req.EXPECT().GetRedirectURI().Return(copyUrl(purls[1])) + req.EXPECT().GetState().Return("foostate") + req.EXPECT().GetResponseTypes().AnyTimes().Return(Arguments([]string{"code", "token"})) + req.EXPECT().GetResponseMode().Return(ResponseModeFragment).AnyTimes() + rw.EXPECT().Header().Times(3).Return(header) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + checkHeader: func(t *testing.T, k int) { + a, _ := url.Parse("https://foobar.com/?foo=bar#error=invalid_request&error_description=The+request+is+missing+a+required+parameter%2C+includes+an+invalid+parameter+value%2C+includes+a+parameter+more+than+once%2C+or+is+otherwise+malformed.&error_hint=Make+sure+that+the+various+parameters+are+correct%2C+be+aware+of+case+sensitivity+and+trim+your+parameters.+Make+sure+that+the+client+you+are+using+has+exactly+whitelisted+the+redirect_uri+you+specified.&state=foostate") + b, _ := url.Parse(header.Get("Location")) + assert.Equal(t, a, b, "\n\t%s\n\t%s", header.Get("Location"), a.String()) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "no-cache", header.Get("Pragma")) + }, + }, + // 14 + { + debug: true, + err: ErrInvalidRequest.WithDebug("with-debug"), + mock: func(rw *MockResponseWriter, req *MockAuthorizeRequester) { + req.EXPECT().IsRedirectURIValid().Return(true) + req.EXPECT().GetRedirectURI().Return(copyUrl(purls[1])) + req.EXPECT().GetState().Return("foostate") + req.EXPECT().GetResponseTypes().AnyTimes().Return(Arguments([]string{"code", "token"})) + req.EXPECT().GetResponseMode().Return(ResponseModeFragment).AnyTimes() + rw.EXPECT().Header().Times(3).Return(header) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + checkHeader: func(t *testing.T, k int) { + a, _ := url.Parse("https://foobar.com/?foo=bar#error=invalid_request&error_debug=with-debug&error_description=The+request+is+missing+a+required+parameter%2C+includes+an+invalid+parameter+value%2C+includes+a+parameter+more+than+once%2C+or+is+otherwise+malformed.&error_hint=Make+sure+that+the+various+parameters+are+correct%2C+be+aware+of+case+sensitivity+and+trim+your+parameters.+Make+sure+that+the+client+you+are+using+has+exactly+whitelisted+the+redirect_uri+you+specified.&state=foostate") + b, _ := url.Parse(header.Get("Location")) + assert.Equal(t, a, b, "\n\t%s\n\t%s", header.Get("Location"), a.String()) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "no-cache", header.Get("Pragma")) + }, + }, + // 15 + { + debug: true, + err: ErrInvalidRequest.WithDebug("with-debug"), + mock: func(rw *MockResponseWriter, req *MockAuthorizeRequester) { + req.EXPECT().IsRedirectURIValid().Return(true) + req.EXPECT().GetRedirectURI().Return(copyUrl(purls[1])) + req.EXPECT().GetState().Return("foostate") + req.EXPECT().GetResponseTypes().AnyTimes().Return(Arguments([]string{"id_token"})) + req.EXPECT().GetResponseMode().Return(ResponseModeFragment).AnyTimes() + rw.EXPECT().Header().Times(3).Return(header) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + checkHeader: func(t *testing.T, k int) { + a, _ := url.Parse("https://foobar.com/?foo=bar#error=invalid_request&error_debug=with-debug&error_description=The+request+is+missing+a+required+parameter%2C+includes+an+invalid+parameter+value%2C+includes+a+parameter+more+than+once%2C+or+is+otherwise+malformed.&error_hint=Make+sure+that+the+various+parameters+are+correct%2C+be+aware+of+case+sensitivity+and+trim+your+parameters.+Make+sure+that+the+client+you+are+using+has+exactly+whitelisted+the+redirect_uri+you+specified.&state=foostate") + b, _ := url.Parse(header.Get("Location")) + assert.Equal(t, a, b, "\n\t%s\n\t%s", header.Get("Location"), a.String()) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "no-cache", header.Get("Pragma")) + }, + }, + // 16 + { + debug: true, + err: ErrInvalidRequest.WithDebug("with-debug"), + mock: func(rw *MockResponseWriter, req *MockAuthorizeRequester) { + req.EXPECT().IsRedirectURIValid().Return(true) + req.EXPECT().GetRedirectURI().Return(copyUrl(purls[1])) + req.EXPECT().GetState().Return("foostate") + req.EXPECT().GetResponseTypes().AnyTimes().Return(Arguments([]string{"token"})) + req.EXPECT().GetResponseMode().Return(ResponseModeFragment).AnyTimes() + rw.EXPECT().Header().Times(3).Return(header) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + checkHeader: func(t *testing.T, k int) { + a, _ := url.Parse("https://foobar.com/?foo=bar#error=invalid_request&error_debug=with-debug&error_description=The+request+is+missing+a+required+parameter%2C+includes+an+invalid+parameter+value%2C+includes+a+parameter+more+than+once%2C+or+is+otherwise+malformed.&error_hint=Make+sure+that+the+various+parameters+are+correct%2C+be+aware+of+case+sensitivity+and+trim+your+parameters.+Make+sure+that+the+client+you+are+using+has+exactly+whitelisted+the+redirect_uri+you+specified.&state=foostate") + b, _ := url.Parse(header.Get("Location")) + assert.Equal(t, a, b, "\n\t%s\n\t%s", header.Get("Location"), a.String()) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "no-cache", header.Get("Pragma")) + }, + }, + // 17 + { + debug: true, + err: ErrInvalidRequest.WithDebug("with-debug"), + mock: func(rw *MockResponseWriter, req *MockAuthorizeRequester) { + req.EXPECT().IsRedirectURIValid().Return(true) + req.EXPECT().GetRedirectURI().Return(copyUrl(purls[1])) + req.EXPECT().GetState().Return("foostate") + req.EXPECT().GetResponseTypes().AnyTimes().Return(Arguments([]string{"token"})) + req.EXPECT().GetResponseMode().Return(ResponseModeFormPost).Times(2) + rw.EXPECT().Header().Times(3).Return(header) + rw.EXPECT().Write(gomock.Any()).AnyTimes() + }, + checkHeader: func(t *testing.T, k int) { + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "no-cache", header.Get("Pragma")) + assert.Equal(t, "text/html;charset=UTF-8", header.Get("Content-Type")) + }, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + oauth2 := &Fosite{ + Config: &Config{ + SendDebugMessagesToClients: c.debug, + UseLegacyErrorFormat: !c.doNotUseLegacyFormat, + }, + } + + ctrl := gomock.NewController(t) + t.Cleanup(ctrl.Finish) + rw := NewMockResponseWriter(ctrl) + req := NewMockAuthorizeRequester(ctrl) + + c.mock(rw, req) + oauth2.WriteAuthorizeError(context.Background(), rw, req, c.err) + c.checkHeader(t, k) + header = http.Header{} + }) + } +} + +func copyUrl(u *url.URL) *url.URL { + u2, _ := url.Parse(u.String()) + return u2 +} diff --git a/fosite/authorize_helper.go b/fosite/authorize_helper.go new file mode 100644 index 00000000000..fc5b56b95b3 --- /dev/null +++ b/fosite/authorize_helper.go @@ -0,0 +1,217 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "fmt" + "html/template" + "io" + "net" + "net/url" + "strings" + + "github.com/ory/x/errorsx" + + "github.com/asaskevich/govalidator" +) + +var DefaultFormPostTemplate = template.Must(template.New("form_post").Parse(` + + Submit This Form + + +
+ {{ range $key,$value := .Parameters }} + {{ range $parameter:= $value}} + + {{end}} + {{ end }} +
+ +`)) + +// MatchRedirectURIWithClientRedirectURIs if the given uri is a registered redirect uri. Does not perform +// uri validation. +// +// Considered specifications +// +// - https://tools.ietf.org/html/rfc6749#section-3.1.2.3 +// If multiple redirection URIs have been registered, if only part of +// the redirection URI has been registered, or if no redirection URI has +// been registered, the client MUST include a redirection URI with the +// authorization request using the "redirect_uri" request parameter. +// +// When a redirection URI is included in an authorization request, the +// authorization server MUST compare and match the value received +// against at least one of the registered redirection URIs (or URI +// components) as defined in [RFC3986] Section 6, if any redirection +// URIs were registered. If the client registration included the full +// redirection URI, the authorization server MUST compare the two URIs +// using simple string comparison as defined in [RFC3986] Section 6.2.1. +// +// * https://tools.ietf.org/html/rfc6819#section-4.4.1.7 +// - The authorization server may also enforce the usage and validation +// of pre-registered redirect URIs (see Section 5.2.3.5). This will +// allow for early recognition of authorization "code" disclosure to +// counterfeit clients. +// - The attacker will need to use another redirect URI for its +// authorization process rather than the target web site because it +// needs to intercept the flow. So, if the authorization server +// associates the authorization "code" with the redirect URI of a +// particular end-user authorization and validates this redirect URI +// with the redirect URI passed to the token's endpoint, such an +// attack is detected (see Section 5.2.4.5). +func MatchRedirectURIWithClientRedirectURIs(rawurl string, client Client) (*url.URL, error) { + if rawurl == "" && len(client.GetRedirectURIs()) == 1 { + if redirectURIFromClient, err := url.Parse(client.GetRedirectURIs()[0]); err == nil && IsValidRedirectURI(redirectURIFromClient) { + // If no redirect_uri was given and the client has exactly one valid redirect_uri registered, use that instead + return redirectURIFromClient, nil + } + } else if redirectTo, ok := isMatchingRedirectURI(rawurl, client.GetRedirectURIs()); rawurl != "" && ok { + // If a redirect_uri was given and the clients knows it (simple string comparison!) + // return it. + if parsed, err := url.Parse(redirectTo); err == nil && IsValidRedirectURI(parsed) { + // If no redirect_uri was given and the client has exactly one valid redirect_uri registered, use that instead + return parsed, nil + } + } + + return nil, errorsx.WithStack(ErrInvalidRequest.WithHint("The 'redirect_uri' parameter does not match any of the OAuth 2.0 Client's pre-registered redirect urls.")) +} + +// Match a requested redirect URI against a pool of registered client URIs +// +// Test a given redirect URI against a pool of URIs provided by a registered client. +// If the OAuth 2.0 Client has loopback URIs registered either an IPv4 URI http://127.0.0.1 or +// an IPv6 URI http://[::1] a client is allowed to request a dynamic port and the server MUST accept +// it as a valid redirection uri. +// +// https://tools.ietf.org/html/rfc8252#section-7.3 +// Native apps that are able to open a port on the loopback network +// interface without needing special permissions (typically, those on +// desktop operating systems) can use the loopback interface to receive +// the OAuth redirect. +// +// Loopback redirect URIs use the "http" scheme and are constructed with +// the loopback IP literal and whatever port the client is listening on. +func isMatchingRedirectURI(uri string, haystack []string) (string, bool) { + requested, err := url.Parse(uri) + if err != nil { + return "", false + } + + for _, b := range haystack { + if b == uri { + return b, true + } else if isMatchingAsLoopback(requested, b) { + // We have to return the requested URL here because otherwise the port might get lost (see isMatchingAsLoopback) + // description. + return uri, true + } + } + return "", false +} + +func isMatchingAsLoopback(requested *url.URL, registeredURI string) bool { + registered, err := url.Parse(registeredURI) + if err != nil { + return false + } + + // Native apps that are able to open a port on the loopback network + // interface without needing special permissions (typically, those on + // desktop operating systems) can use the loopback interface to receive + // the OAuth redirect. + // + // Loopback redirect URIs use the "http" scheme and are constructed with + // the loopback IP literal and whatever port the client is listening on. + // + // Source: https://tools.ietf.org/html/rfc8252#section-7.3 + if requested.Scheme == "http" && + isLoopbackAddress(requested.Hostname()) && + registered.Hostname() == requested.Hostname() && + // The port is skipped here - see codedoc above! + registered.Path == requested.Path && + registered.RawQuery == requested.RawQuery { + return true + } + + return false +} + +// Check if address is either an IPv4 loopback or an IPv6 loopback. +func isLoopbackAddress(hostname string) bool { + return net.ParseIP(hostname).IsLoopback() +} + +// IsValidRedirectURI validates a redirect_uri as specified in: +// +// * https://tools.ietf.org/html/rfc6749#section-3.1.2 +// - The redirection endpoint URI MUST be an absolute URI as defined by [RFC3986] Section 4.3. +// - The endpoint URI MUST NOT include a fragment component. +// - https://tools.ietf.org/html/rfc3986#section-4.3 +// absolute-URI = scheme ":" hier-part [ "?" query ] +// - https://tools.ietf.org/html/rfc6819#section-5.1.1 +func IsValidRedirectURI(redirectURI *url.URL) bool { + // We need to explicitly check for a scheme + if !govalidator.IsRequestURL(redirectURI.String()) { + return false + } + + if redirectURI.Fragment != "" { + // "The endpoint URI MUST NOT include a fragment component." + return false + } + + return true +} + +func IsRedirectURISecure(ctx context.Context, redirectURI *url.URL) bool { + return !(redirectURI.Scheme == "http" && !IsLocalhost(redirectURI)) +} + +// IsRedirectURISecureStrict is stricter than IsRedirectURISecure and it does not allow custom-scheme +// URLs because they can be hijacked for native apps. Use claimed HTTPS redirects instead. +// See discussion in https://github.com/ory/hydra/v2/fosite/pull/489. +func IsRedirectURISecureStrict(ctx context.Context, redirectURI *url.URL) bool { + return redirectURI.Scheme == "https" || (redirectURI.Scheme == "http" && IsLocalhost(redirectURI)) +} + +func IsLocalhost(redirectURI *url.URL) bool { + hn := redirectURI.Hostname() + return strings.HasSuffix(hn, ".localhost") || isLoopbackAddress(hn) || hn == "localhost" +} + +func WriteAuthorizeFormPostResponse(redirectURL string, parameters url.Values, template *template.Template, rw io.Writer) { + _ = template.Execute(rw, struct { + RedirURL string + Parameters url.Values + }{ + RedirURL: redirectURL, + Parameters: parameters, + }) +} + +// Deprecated: Do not use. +func URLSetFragment(source *url.URL, fragment url.Values) { + var f string + for k, v := range fragment { + for _, vv := range v { + if len(f) != 0 { + f += fmt.Sprintf("&%s=%s", k, vv) + } else { + f += fmt.Sprintf("%s=%s", k, vv) + } + } + } + source.Fragment = f +} + +func GetPostFormHTMLTemplate(ctx context.Context, f *Fosite) *template.Template { + if t := f.Config.GetFormPostHTMLTemplate(ctx); t != nil { + return t + } + return DefaultFormPostTemplate +} diff --git a/fosite/authorize_helper_test.go b/fosite/authorize_helper_test.go new file mode 100644 index 00000000000..5fb9e782c47 --- /dev/null +++ b/fosite/authorize_helper_test.go @@ -0,0 +1,346 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "bytes" + "context" + "net/url" + "strings" + "testing" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/internal" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestIsLocalhost(t *testing.T) { + for k, c := range []struct { + expect bool + rawurl string + }{ + {expect: false, rawurl: "https://foo.bar"}, + {expect: true, rawurl: "https://localhost"}, + {expect: true, rawurl: "https://localhost:1234"}, + {expect: true, rawurl: "https://127.0.0.1:1234"}, + {expect: true, rawurl: "https://127.0.0.1"}, + {expect: true, rawurl: "https://test.localhost:1234"}, + {expect: true, rawurl: "https://test.localhost"}, + } { + u, _ := url.Parse(c.rawurl) + assert.Equal(t, c.expect, fosite.IsLocalhost(u), "case %d", k) + } +} + +// rfc6749 10.6. +// Authorization Code Redirection URI Manipulation +// The authorization server MUST require public clients and SHOULD require confidential clients +// to register their redirection URIs. If a redirection URI is provided +// in the request, the authorization server MUST validate it against the +// registered value. +// +// rfc6819 4.4.1.7. +// Threat: Authorization "code" Leakage through Counterfeit Client +// The authorization server may also enforce the usage and validation +// of pre-registered redirect URIs (see Section 5.2.3.5). +func TestDoesClientWhiteListRedirect(t *testing.T) { + for k, c := range []struct { + client fosite.Client + url string + isError bool + expected string + }{ + { + client: &fosite.DefaultClient{RedirectURIs: []string{""}}, + url: "https://foo.com/cb", + isError: true, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"wta://auth"}}, + url: "wta://auth", + expected: "wta://auth", + isError: false, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"wta:///auth"}}, + url: "wta:///auth", + expected: "wta:///auth", + isError: false, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"wta://foo/auth"}}, + url: "wta://foo/auth", + expected: "wta://foo/auth", + isError: false, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"https://bar.com/cb"}}, + url: "https://foo.com/cb", + isError: true, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"https://bar.com/cb"}}, + url: "", + isError: false, + expected: "https://bar.com/cb", + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{""}}, + url: "", + isError: true, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"https://bar.com/cb"}}, + url: "https://bar.com/cb", + isError: false, + expected: "https://bar.com/cb", + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"https://bar.com/cb"}}, + url: "https://bar.com/cb123", + isError: true, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"http://[::1]"}}, + url: "http://[::1]:1024", + expected: "http://[::1]:1024", + isError: false, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"http://[::1]"}}, + url: "http://[::1]:1024/cb", + isError: true, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"http://[::1]/cb"}}, + url: "http://[::1]:1024/cb", + expected: "http://[::1]:1024/cb", + isError: false, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"http://[::1]"}}, + url: "http://foo.bar/bar", + isError: true, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"http://127.0.0.1"}}, + url: "http://127.0.0.1:1024", + expected: "http://127.0.0.1:1024", + isError: false, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"http://127.0.0.1/cb"}}, + url: "http://127.0.0.1:64000/cb", + expected: "http://127.0.0.1:64000/cb", + isError: false, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"http://127.0.0.1"}}, + url: "http://127.0.0.1:64000/cb", + isError: true, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"http://127.0.0.1"}}, + url: "http://127.0.0.1", + expected: "http://127.0.0.1", + isError: false, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"http://127.0.0.1/Cb"}}, + url: "http://127.0.0.1:8080/Cb", + expected: "http://127.0.0.1:8080/Cb", + isError: false, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"http://127.0.0.1"}}, + url: "http://foo.bar/bar", + isError: true, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"http://127.0.0.1"}}, + url: ":/invalid.uri)bar", + isError: true, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"http://127.0.0.1:8080/cb"}}, + url: "http://127.0.0.1:8080/Cb", + isError: true, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"http://127.0.0.1:8080/cb"}}, + url: "http://127.0.0.1:8080/cb?foo=bar", + isError: true, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"http://127.0.0.1:8080/cb?foo=bar"}}, + url: "http://127.0.0.1:8080/cb?foo=bar", + expected: "http://127.0.0.1:8080/cb?foo=bar", + isError: false, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"http://127.0.0.1:8080/cb?foo=bar"}}, + url: "http://127.0.0.1:8080/cb?baz=bar&foo=bar", + isError: true, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"http://127.0.0.1:8080/cb?foo=bar&baz=bar"}}, + url: "http://127.0.0.1:8080/cb?baz=bar&foo=bar", + isError: true, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"https://www.ory.sh/cb"}}, + url: "http://127.0.0.1:8080/cb", + isError: true, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"http://127.0.0.1:8080/cb"}}, + url: "https://www.ory.sh/cb", + isError: true, + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"web+application://callback"}}, + url: "web+application://callback", + isError: false, + expected: "web+application://callback", + }, + { + client: &fosite.DefaultClient{RedirectURIs: []string{"https://google.com/?foo=bar%20foo+baz"}}, + url: "https://google.com/?foo=bar%20foo+baz", + isError: false, + expected: "https://google.com/?foo=bar%20foo+baz", + }, + } { + redir, err := fosite.MatchRedirectURIWithClientRedirectURIs(c.url, c.client) + assert.Equal(t, c.isError, err != nil, "%d: %+v", k, c) + if err == nil { + require.NotNil(t, redir, "%d", k) + assert.Equal(t, c.expected, redir.String(), "%d", k) + } + t.Logf("Passed test case %d", k) + } +} + +func TestIsRedirectURISecure(t *testing.T) { + for d, c := range []struct { + u string + err bool + }{ + {u: "http://google.com", err: true}, + {u: "https://google.com", err: false}, + {u: "http://localhost", err: false}, + {u: "http://test.localhost", err: false}, + {u: "http://127.0.0.1/", err: false}, + {u: "http://[::1]/", err: false}, + {u: "http://127.0.0.1:8080/", err: false}, + {u: "http://[::1]:8080/", err: false}, + {u: "http://testlocalhost", err: true}, + {u: "wta://auth", err: false}, + } { + uu, err := url.Parse(c.u) + require.NoError(t, err) + assert.Equal(t, !c.err, fosite.IsRedirectURISecure(context.Background(), uu), "case %d", d) + } +} + +func TestWriteAuthorizeFormPostResponse(t *testing.T) { + for d, c := range []struct { + parameters url.Values + check func(code string, state string, customParams url.Values, d int) + }{ + { + parameters: url.Values{"code": {"lshr755nsg39fgur"}, "state": {"924659540232"}}, + check: func(code string, state string, customParams url.Values, d int) { + assert.Equal(t, "lshr755nsg39fgur", code, "case %d", d) + assert.Equal(t, "924659540232", state, "case %d", d) + }, + }, + { + parameters: url.Values{"code": {"lshr75*ns-39f+ur"}, "state": {"9a:* <&)"}}, + check: func(code string, state string, customParams url.Values, d int) { + assert.Equal(t, "lshr75*ns-39f+ur", code, "case %d", d) + assert.Equal(t, "9a:* <&)", state, "case %d", d) + }, + }, + { + parameters: url.Values{"code": {"1234"}, "custom": {"test2", "test3"}}, + check: func(code string, state string, customParams url.Values, d int) { + assert.Equal(t, "1234", code, "case %d", d) + assert.Equal(t, []string{"test2", "test3"}, customParams["custom"], "case %d", d) + }, + }, + { + parameters: url.Values{"code": {"1234"}, "custom": {"Bold"}}, + check: func(code string, state string, customParams url.Values, d int) { + assert.Equal(t, "1234", code, "case %d", d) + assert.Equal(t, "Bold", customParams.Get("custom"), "case %d", d) + }, + }, + } { + var responseBuffer bytes.Buffer + redirectURL := "https://localhost:8080/cb" + + fosite.WriteAuthorizeFormPostResponse(redirectURL, c.parameters, fosite.DefaultFormPostTemplate, &responseBuffer) + + code, state, _, _, customParams, _ := internal.ParseFormPostResponse(t, redirectURL, bytes.NewReader(responseBuffer.Bytes())) + c.check(code, state, customParams, d) + } +} + +func TestIsRedirectURISecureStrict(t *testing.T) { + for d, c := range []struct { + u string + err bool + }{ + {u: "http://google.com", err: true}, + {u: "https://google.com", err: false}, + {u: "http://localhost", err: false}, + {u: "http://test.localhost", err: false}, + {u: "http://127.0.0.1/", err: false}, + {u: "http://[::1]/", err: false}, + {u: "http://127.0.0.1:8080/", err: false}, + {u: "http://[::1]:8080/", err: false}, + {u: "http://testlocalhost", err: true}, + {u: "wta://auth", err: true}, + } { + uu, err := url.Parse(c.u) + require.NoError(t, err) + assert.Equal(t, !c.err, fosite.IsRedirectURISecureStrict(context.Background(), uu), "case %d", d) + } +} + +func TestURLSetFragment(t *testing.T) { + for d, c := range []struct { + u string + a string + f url.Values + }{ + {u: "http://google.com", a: "http://google.com#code=567060896", f: url.Values{"code": []string{"567060896"}}}, + {u: "http://google.com", a: "http://google.com#code=567060896&scope=read", f: url.Values{"code": []string{"567060896"}, "scope": []string{"read"}}}, + {u: "http://google.com", a: "http://google.com#code=567060896&scope=read%20mail", f: url.Values{"code": []string{"567060896j"}, "scope": []string{"read mail"}}}, + {u: "http://google.com", a: "http://google.com#code=567060896&scope=read+write", f: url.Values{"code": []string{"567060896"}, "scope": []string{"read+write"}}}, + {u: "http://google.com", a: "http://google.com#code=567060896&scope=api:*", f: url.Values{"code": []string{"567060896"}, "scope": []string{"api:*"}}}, + {u: "https://google.com?foo=bar", a: "https://google.com?foo=bar#code=567060896", f: url.Values{"code": []string{"567060896"}}}, + {u: "http://localhost?foo=bar&baz=foo", a: "http://localhost?foo=bar&baz=foo#code=567060896", f: url.Values{"code": []string{"567060896"}}}, + } { + uu, err := url.Parse(c.u) + require.NoError(t, err) + fosite.URLSetFragment(uu, c.f) + tURL, err := url.Parse(uu.String()) + require.NoError(t, err) + r := ParseURLFragment(tURL.Fragment) + assert.Equal(t, c.f.Get("code"), r.Get("code"), "case %d", d) + assert.Equal(t, c.f.Get("scope"), r.Get("scope"), "case %d", d) + } +} +func ParseURLFragment(fragment string) url.Values { + r := url.Values{} + kvs := strings.Split(fragment, "&") + for _, kv := range kvs { + kva := strings.Split(kv, "=") + r.Add(kva[0], kva[1]) + } + return r +} diff --git a/fosite/authorize_helper_whitebox_test.go b/fosite/authorize_helper_whitebox_test.go new file mode 100644 index 00000000000..52a4bba5e1a --- /dev/null +++ b/fosite/authorize_helper_whitebox_test.go @@ -0,0 +1,82 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "net/url" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestIsLookbackAddress(t *testing.T) { + testCases := []struct { + name string + have string + expected bool + }{ + { + "ShouldReturnTrueIPv4Loopback", + "127.0.0.1", + true, + }, + { + "ShouldReturnTrueIPv4LoopbackWithPort", + "127.0.0.1:1230", + true, + }, + { + "ShouldReturnTrueIPv6Loopback", + "[::1]", + true, + }, + { + "ShouldReturnTrueIPv6LoopbackWithPort", + "[::1]:1230", + true, + }, + { + "ShouldReturnTrue12700255", + "127.0.0.255", + true, + }, + { + "ShouldReturnTrue12700255WithPort", + "127.0.0.255:1230", + true, + }, + { + "ShouldReturnFalse128001", + "128.0.0.1", + false, + }, + { + "ShouldReturnFalse128001WithPort", + "128.0.0.1:1230", + false, + }, + { + "ShouldReturnFalseInvalidFourthOctet", + "127.0.0.11230", + false, + }, + { + "ShouldReturnFalseInvalidIPv4", + "127x0x0x11230", + false, + }, + { + "ShouldReturnFalseInvalidIPv6", + "[::1]1230", + false, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + u := url.URL{Host: tc.have} + assert.Equal(t, tc.expected, isLoopbackAddress(u.Hostname())) + }) + } +} diff --git a/fosite/authorize_request.go b/fosite/authorize_request.go new file mode 100644 index 00000000000..a1874784397 --- /dev/null +++ b/fosite/authorize_request.go @@ -0,0 +1,98 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "net/url" +) + +type ResponseModeType string + +const ( + ResponseModeDefault = ResponseModeType("") + ResponseModeFormPost = ResponseModeType("form_post") + ResponseModeQuery = ResponseModeType("query") + ResponseModeFragment = ResponseModeType("fragment") +) + +// AuthorizeRequest is an implementation of AuthorizeRequester +type AuthorizeRequest struct { + ResponseTypes Arguments `json:"responseTypes" gorethink:"responseTypes"` + RedirectURI *url.URL `json:"redirectUri" gorethink:"redirectUri"` + State string `json:"state" gorethink:"state"` + HandledResponseTypes Arguments `json:"handledResponseTypes" gorethink:"handledResponseTypes"` + ResponseMode ResponseModeType `json:"ResponseModes" gorethink:"ResponseModes"` + DefaultResponseMode ResponseModeType `json:"DefaultResponseMode" gorethink:"DefaultResponseMode"` + + Request +} + +func NewAuthorizeRequest() *AuthorizeRequest { + return &AuthorizeRequest{ + ResponseTypes: Arguments{}, + HandledResponseTypes: Arguments{}, + Request: *NewRequest(), + ResponseMode: ResponseModeDefault, + // The redirect URL must be unset / nil for redirect detection to work properly: + // RedirectURI: &url.URL{}, + } +} + +func (d *AuthorizeRequest) IsRedirectURIValid() bool { + if d.GetRedirectURI() == nil { + return false + } + + raw := d.GetRedirectURI().String() + if d.GetClient() == nil { + return false + } + + redirectURI, err := MatchRedirectURIWithClientRedirectURIs(raw, d.GetClient()) + if err != nil { + return false + } + return IsValidRedirectURI(redirectURI) +} + +func (d *AuthorizeRequest) GetResponseTypes() Arguments { + return d.ResponseTypes +} + +func (d *AuthorizeRequest) GetState() string { + return d.State +} + +func (d *AuthorizeRequest) GetRedirectURI() *url.URL { + return d.RedirectURI +} + +func (d *AuthorizeRequest) SetResponseTypeHandled(name string) { + d.HandledResponseTypes = append(d.HandledResponseTypes, name) +} + +func (d *AuthorizeRequest) DidHandleAllResponseTypes() bool { + for _, rt := range d.ResponseTypes { + if !d.HandledResponseTypes.Has(rt) { + return false + } + } + + return len(d.ResponseTypes) > 0 +} + +func (d *AuthorizeRequest) GetResponseMode() ResponseModeType { + return d.ResponseMode +} + +func (d *AuthorizeRequest) SetDefaultResponseMode(defaultResponseMode ResponseModeType) { + if d.ResponseMode == ResponseModeDefault { + d.ResponseMode = defaultResponseMode + } + d.DefaultResponseMode = defaultResponseMode +} + +func (d *AuthorizeRequest) GetDefaultResponseMode() ResponseModeType { + return d.DefaultResponseMode +} diff --git a/fosite/authorize_request_handler.go b/fosite/authorize_request_handler.go new file mode 100644 index 00000000000..3bef2e5d06e --- /dev/null +++ b/fosite/authorize_request_handler.go @@ -0,0 +1,436 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "fmt" + "io" + "net/http" + "strings" + + "github.com/go-jose/go-jose/v3" + "github.com/hashicorp/go-retryablehttp" + "github.com/pkg/errors" + "go.opentelemetry.io/otel/trace" + + "github.com/ory/go-convenience/stringslice" + "github.com/ory/hydra/v2/fosite/i18n" + "github.com/ory/hydra/v2/fosite/token/jwt" + "github.com/ory/x/errorsx" + "github.com/ory/x/otelx" +) + +func wrapSigningKeyFailure(outer *RFC6749Error, inner error) *RFC6749Error { + outer = outer.WithWrap(inner).WithDebug(inner.Error()) + if e := new(RFC6749Error); errors.As(inner, &e) { + return outer.WithHintf("%s %s", outer.Reason(), e.Reason()) + } + return outer +} + +func (f *Fosite) authorizeRequestParametersFromOpenIDConnectRequest(ctx context.Context, request *AuthorizeRequest, isPARRequest bool) error { + var scope Arguments = RemoveEmpty(strings.Split(request.Form.Get("scope"), " ")) + + // Even if a scope parameter is present in the Request Object value, a scope parameter MUST always be passed using + // the OAuth 2.0 request syntax containing the openid scope value to indicate to the underlying OAuth 2.0 logic that this is an OpenID Connect request. + // Source: http://openid.net/specs/openid-connect-core-1_0.html#CodeFlowAuth + if !scope.Has("openid") { + return nil + } + + if len(request.Form.Get("request")+request.Form.Get("request_uri")) == 0 { + return nil + } else if len(request.Form.Get("request")) > 0 && len(request.Form.Get("request_uri")) > 0 { + return errorsx.WithStack(ErrInvalidRequest.WithHint("OpenID Connect parameters 'request' and 'request_uri' were both given, but you can use at most one.")) + } + + oidcClient, ok := request.Client.(OpenIDConnectClient) + if !ok { + if len(request.Form.Get("request_uri")) > 0 { + return errorsx.WithStack(ErrRequestURINotSupported.WithHint("OpenID Connect 'request_uri' context was given, but the OAuth 2.0 Client does not implement advanced OpenID Connect capabilities.")) + } + return errorsx.WithStack(ErrRequestNotSupported.WithHint("OpenID Connect 'request' context was given, but the OAuth 2.0 Client does not implement advanced OpenID Connect capabilities.")) + } + + if oidcClient.GetJSONWebKeys() == nil && len(oidcClient.GetJSONWebKeysURI()) == 0 { + return errorsx.WithStack(ErrInvalidRequest.WithHint("OpenID Connect 'request' or 'request_uri' context was given, but the OAuth 2.0 Client does not have any JSON Web Keys registered.")) + } + + assertion := request.Form.Get("request") + if location := request.Form.Get("request_uri"); len(location) > 0 { + if !stringslice.Has(oidcClient.GetRequestURIs(), location) { + return errorsx.WithStack(ErrInvalidRequestURI.WithHintf("Request URI '%s' is not whitelisted by the OAuth 2.0 Client.", location)) + } + + hc := f.Config.GetHTTPClient(ctx) + req, err := retryablehttp.NewRequestWithContext(ctx, "GET", location, nil) + if err != nil { + return errorsx.WithStack(ErrInvalidRequestURI.WithHintf("Unable to fetch OpenID Connect request parameters from 'request_uri' because: %s.", err.Error()).WithWrap(err).WithDebug(err.Error())) + } + response, err := hc.Do(req) + if err != nil { + return errorsx.WithStack(ErrInvalidRequestURI.WithHintf("Unable to fetch OpenID Connect request parameters from 'request_uri' because: %s.", err.Error()).WithWrap(err).WithDebug(err.Error())) + } + defer func(Body io.ReadCloser) { _ = Body.Close() }(response.Body) + response.Body = io.NopCloser(io.LimitReader(response.Body, 10*1024*1024)) // limit to 10MiB + + if response.StatusCode != http.StatusOK { + return errorsx.WithStack(ErrInvalidRequestURI.WithHintf("Unable to fetch OpenID Connect request parameters from 'request_uri' because status code '%d' was expected, but got '%d'.", http.StatusOK, response.StatusCode)) + } + + body, err := io.ReadAll(response.Body) + if err != nil { + return errorsx.WithStack(ErrInvalidRequestURI.WithHintf("Unable to fetch OpenID Connect request parameters from 'request_uri' because body parsing failed with: %s.", err).WithWrap(err).WithDebug(err.Error())) + } + + assertion = string(body) + } + + token, err := jwt.ParseWithClaims(assertion, jwt.MapClaims{}, func(t *jwt.Token) (interface{}, error) { + // request_object_signing_alg - OPTIONAL. + // JWS [JWS] alg algorithm [JWA] that MUST be used for signing Request Objects sent to the OP. All Request Objects from this Client MUST be rejected, + // if not signed with this algorithm. Request Objects are described in Section 6.1 of OpenID Connect Core 1.0 [OpenID.Core]. This algorithm MUST + // be used both when the Request Object is passed by value (using the request parameter) and when it is passed by reference (using the request_uri parameter). + // Servers SHOULD support RS256. The value none MAY be used. The default, if omitted, is that any algorithm supported by the OP and the RP MAY be used. + if oidcClient.GetRequestObjectSigningAlgorithm() != "" && oidcClient.GetRequestObjectSigningAlgorithm() != fmt.Sprintf("%s", t.Header["alg"]) { + return nil, errorsx.WithStack(ErrInvalidRequestObject.WithHintf("The request object uses signing algorithm '%s', but the requested OAuth 2.0 Client enforces signing algorithm '%s'.", t.Header["alg"], oidcClient.GetRequestObjectSigningAlgorithm())) + } + + if t.Method == jwt.SigningMethodNone { + return jwt.UnsafeAllowNoneSignatureType, nil + } + + switch t.Method { + case jose.RS256, jose.RS384, jose.RS512: + key, err := f.findClientPublicJWK(ctx, oidcClient, t, true) + if err != nil { + return nil, wrapSigningKeyFailure( + ErrInvalidRequestObject.WithHint("Unable to retrieve RSA signing key from OAuth 2.0 Client."), err) + } + return key, nil + case jose.ES256, jose.ES384, jose.ES512: + key, err := f.findClientPublicJWK(ctx, oidcClient, t, false) + if err != nil { + return nil, wrapSigningKeyFailure( + ErrInvalidRequestObject.WithHint("Unable to retrieve ECDSA signing key from OAuth 2.0 Client."), err) + } + return key, nil + case jose.PS256, jose.PS384, jose.PS512: + key, err := f.findClientPublicJWK(ctx, oidcClient, t, true) + if err != nil { + return nil, wrapSigningKeyFailure( + ErrInvalidRequestObject.WithHint("Unable to retrieve RSA signing key from OAuth 2.0 Client."), err) + } + return key, nil + default: + return nil, errorsx.WithStack(ErrInvalidRequestObject.WithHintf("This request object uses unsupported signing algorithm '%s'.", t.Header["alg"])) + } + }) + if err != nil { + // Do not re-process already enhanced errors + var e *jwt.ValidationError + if errors.As(err, &e) { + if e.Inner != nil { + return e.Inner + } + return errorsx.WithStack(ErrInvalidRequestObject.WithHint("Unable to verify the request object's signature.").WithWrap(err).WithDebug(err.Error())) + } + return err + } else if err := token.Claims.Valid(); err != nil { + return errorsx.WithStack(ErrInvalidRequestObject.WithHint("Unable to verify the request object because its claims could not be validated, check if the expiry time is set correctly.").WithWrap(err).WithDebug(err.Error())) + } + + claims := token.Claims + // Reject the request if the "request_uri" authorization request + // parameter is provided. + if requestURI, _ := claims["request_uri"].(string); isPARRequest && requestURI != "" { + return errorsx.WithStack(ErrInvalidRequestObject.WithHint("Pushed Authorization Requests can not contain the 'request_uri' parameter.")) + } + + for k, v := range claims { + request.Form.Set(k, fmt.Sprintf("%s", v)) + } + + claimScope := RemoveEmpty(strings.Split(request.Form.Get("scope"), " ")) + for _, s := range scope { + if !stringslice.Has(claimScope, s) { + claimScope = append(claimScope, s) + } + } + + request.State = request.Form.Get("state") + request.Form.Set("scope", strings.Join(claimScope, " ")) + return nil +} + +func (f *Fosite) validateAuthorizeRedirectURI(_ *http.Request, request *AuthorizeRequest) error { + // Fetch redirect URI from request + rawRedirURI := request.Form.Get("redirect_uri") + + // This ensures that the 'redirect_uri' parameter is present for OpenID Connect 1.0 authorization requests as per: + // + // Authorization Code Flow - https://openid.net/specs/openid-connect-core-1_0.html#AuthRequest + // Implicit Flow - https://openid.net/specs/openid-connect-core-1_0.html#ImplicitAuthRequest + // Hybrid Flow - https://openid.net/specs/openid-connect-core-1_0.html#HybridAuthRequest + // + // Note: as per the Hybrid Flow documentation the Hybrid Flow has the same requirements as the Authorization Code Flow. + if len(rawRedirURI) == 0 && request.GetRequestedScopes().Has("openid") { + return errorsx.WithStack(ErrInvalidRequest.WithHint("The 'redirect_uri' parameter is required when using OpenID Connect 1.0.")) + } + + // Validate redirect uri + redirectURI, err := MatchRedirectURIWithClientRedirectURIs(rawRedirURI, request.Client) + if err != nil { + return err + } else if !IsValidRedirectURI(redirectURI) { + return errorsx.WithStack(ErrInvalidRequest.WithHintf("The redirect URI '%s' contains an illegal character (for example #) or is otherwise invalid.", redirectURI)) + } + request.RedirectURI = redirectURI + return nil +} + +func (f *Fosite) parseAuthorizeScope(_ *http.Request, request *AuthorizeRequest) error { + request.SetRequestedScopes(RemoveEmpty(strings.Split(request.Form.Get("scope"), " "))) + + return nil +} + +func (f *Fosite) validateAuthorizeScope(ctx context.Context, _ *http.Request, request *AuthorizeRequest) error { + for _, permission := range request.GetRequestedScopes() { + if !f.Config.GetScopeStrategy(ctx)(request.Client.GetScopes(), permission) { + return errorsx.WithStack(ErrInvalidScope.WithHintf("The OAuth 2.0 Client is not allowed to request scope '%s'.", permission)) + } + } + + return nil +} + +func (f *Fosite) validateResponseTypes(r *http.Request, request *AuthorizeRequest) error { + // https://tools.ietf.org/html/rfc6749#section-3.1.1 + // Extension response types MAY contain a space-delimited (%x20) list of + // values, where the order of values does not matter (e.g., response + // type "a b" is the same as "b a"). The meaning of such composite + // response types is defined by their respective specifications. + responseTypes := RemoveEmpty(strings.Split(r.Form.Get("response_type"), " ")) + if len(responseTypes) == 0 { + return errorsx.WithStack(ErrUnsupportedResponseType.WithHint("`The request is missing the 'response_type' parameter.")) + } + + var found bool + for _, t := range request.GetClient().GetResponseTypes() { + if Arguments(responseTypes).Matches(RemoveEmpty(strings.Split(t, " "))...) { + found = true + break + } + } + + if !found { + return errorsx.WithStack(ErrUnsupportedResponseType.WithHintf("The client is not allowed to request response_type '%s'.", r.Form.Get("response_type"))) + } + + request.ResponseTypes = responseTypes + return nil +} + +func (f *Fosite) ParseResponseMode(ctx context.Context, r *http.Request, request *AuthorizeRequest) error { + switch responseMode := r.Form.Get("response_mode"); responseMode { + case string(ResponseModeDefault): + request.ResponseMode = ResponseModeDefault + case string(ResponseModeFragment): + request.ResponseMode = ResponseModeFragment + case string(ResponseModeQuery): + request.ResponseMode = ResponseModeQuery + case string(ResponseModeFormPost): + request.ResponseMode = ResponseModeFormPost + default: + rm := ResponseModeType(responseMode) + if f.ResponseModeHandler(ctx).ResponseModes().Has(rm) { + request.ResponseMode = rm + break + } + return errorsx.WithStack(ErrUnsupportedResponseMode.WithHintf("Request with unsupported response_mode \"%s\".", responseMode)) + } + + return nil +} + +func (f *Fosite) validateResponseMode(r *http.Request, request *AuthorizeRequest) error { + if request.ResponseMode == ResponseModeDefault { + return nil + } + + responseModeClient, ok := request.GetClient().(ResponseModeClient) + if !ok { + return errorsx.WithStack(ErrUnsupportedResponseMode.WithHintf("The request has response_mode \"%s\". set but registered OAuth 2.0 client doesn't support response_mode", r.Form.Get("response_mode"))) + } + + var found bool + for _, t := range responseModeClient.GetResponseModes() { + if request.ResponseMode == t { + found = true + break + } + } + + if !found { + return errorsx.WithStack(ErrUnsupportedResponseMode.WithHintf("The client is not allowed to request response_mode '%s'.", r.Form.Get("response_mode"))) + } + + return nil +} + +func (f *Fosite) authorizeRequestFromPAR(ctx context.Context, r *http.Request, request *AuthorizeRequest) (bool, error) { + configProvider, ok := f.Config.(PushedAuthorizeRequestConfigProvider) + if !ok { + // If the config provider is not implemented, PAR cannot be used. + return false, nil + } + + requestURI := r.Form.Get("request_uri") + if requestURI == "" || !strings.HasPrefix(requestURI, configProvider.GetPushedAuthorizeRequestURIPrefix(ctx)) { + // nothing to do here + return false, nil + } + + clientID := r.Form.Get("client_id") + + storage, ok := f.Store.(PARStorageProvider) + if !ok { + return false, errorsx.WithStack(ErrServerError.WithHint(ErrorPARNotSupported).WithDebug(DebugPARStorageInvalid)) + } + + // hydrate the requester + var parRequest AuthorizeRequester + var err error + if parRequest, err = storage.PARStorage().GetPARSession(ctx, requestURI); err != nil { + return false, errorsx.WithStack(ErrInvalidRequestURI.WithHint("Invalid PAR session").WithWrap(err).WithDebug(err.Error())) + } + + // hydrate the request object + request.Merge(parRequest) + request.RedirectURI = parRequest.GetRedirectURI() + request.ResponseTypes = parRequest.GetResponseTypes() + request.State = parRequest.GetState() + request.ResponseMode = parRequest.GetResponseMode() + + if err := storage.PARStorage().DeletePARSession(ctx, requestURI); err != nil { + return false, errorsx.WithStack(ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + // validate the clients match + if clientID != request.GetClient().GetID() { + return false, errorsx.WithStack(ErrInvalidRequest.WithHint("The 'client_id' must match the one sent in the pushed authorization request.")) + } + + return true, nil +} + +func (f *Fosite) NewAuthorizeRequest(ctx context.Context, r *http.Request) (_ AuthorizeRequester, err error) { + ctx, span := trace.SpanFromContext(ctx).TracerProvider().Tracer("github.com/ory/hydra/v2/fosite").Start(ctx, "Fosite.NewAuthorizeRequest") + defer otelx.End(span, &err) + + return f.newAuthorizeRequest(ctx, r, false) +} + +func (f *Fosite) newAuthorizeRequest(ctx context.Context, r *http.Request, isPARRequest bool) (AuthorizeRequester, error) { + request := NewAuthorizeRequest() + request.Request.Lang = i18n.GetLangFromRequest(f.Config.GetMessageCatalog(ctx), r) + + ctx = context.WithValue(ctx, RequestContextKey, r) + ctx = context.WithValue(ctx, AuthorizeRequestContextKey, request) + + if err := r.ParseMultipartForm(1 << 20); err != nil && err != http.ErrNotMultipart { + return request, errorsx.WithStack(ErrInvalidRequest.WithHint("Unable to parse HTTP body, make sure to send a properly formatted form request body.").WithWrap(err).WithDebug(err.Error())) + } + request.Form = r.Form + + // Save state to the request to be returned in error conditions (https://github.com/ory/hydra/issues/1642) + request.State = request.Form.Get("state") + + // Check if this is a continuation from a pushed authorization request + if !isPARRequest { + if isPAR, err := f.authorizeRequestFromPAR(ctx, r, request); err != nil { + return request, err + } else if isPAR { + // No need to continue + return request, nil + } else if configProvider, ok := f.Config.(PushedAuthorizeRequestConfigProvider); ok && configProvider.EnforcePushedAuthorize(ctx) { + return request, errorsx.WithStack(ErrInvalidRequest.WithHint("Pushed Authorization Requests are enforced but no such request was sent.")) + } + } + + client, err := f.Store.FositeClientManager().GetClient(ctx, request.GetRequestForm().Get("client_id")) + if err != nil { + return request, errorsx.WithStack(ErrInvalidClient.WithHint("The requested OAuth 2.0 Client does not exist.").WithWrap(err).WithDebug(err.Error())) + } + request.Client = client + + // Now that the base fields (state and client) are populated, we extract all the information + // from the request object or request object uri, if one is set. + // + // All other parse methods should come afterwards so that we ensure that the data is taken + // from the request_object if set. + if err := f.authorizeRequestParametersFromOpenIDConnectRequest(ctx, request, isPARRequest); err != nil { + return request, err + } + + // The request context is now fully available and we can start processing the individual + // fields. + if err := f.ParseResponseMode(ctx, r, request); err != nil { + return request, err + } + + if err = f.parseAuthorizeScope(r, request); err != nil { + return request, err + } + + if err = f.validateAuthorizeRedirectURI(r, request); err != nil { + return request, err + } + + if err = f.validateAuthorizeScope(ctx, r, request); err != nil { + return request, err + } + + if err = f.validateAudience(ctx, r, request); err != nil { + return request, err + } + + if len(request.Form.Get("registration")) > 0 { + return request, errorsx.WithStack(ErrRegistrationNotSupported) + } + + if err = f.validateResponseTypes(r, request); err != nil { + return request, err + } + + if err = f.validateResponseMode(r, request); err != nil { + return request, err + } + + // A fallback handler to set the default response mode in cases where we can not reach the Authorize Handlers + // but still need the e.g. correct error response mode. + if request.GetResponseMode() == ResponseModeDefault { + if request.ResponseTypes.ExactOne("code") { + request.SetDefaultResponseMode(ResponseModeQuery) + } else { + // If the response type is not `code` it is an implicit/hybrid (fragment) response mode. + request.SetDefaultResponseMode(ResponseModeFragment) + } + } + + // rfc6819 4.4.1.8. Threat: CSRF Attack against redirect-uri + // The "state" parameter should be used to link the authorization + // request with the redirect URI used to deliver the access token (Section 5.3.5). + // + // https://tools.ietf.org/html/rfc6819#section-4.4.1.8 + // The "state" parameter should not be guessable + if len(request.State) < f.GetMinParameterEntropy(ctx) { + // We're assuming that using less then, by default, 8 characters for the state can not be considered "unguessable" + return request, errorsx.WithStack(ErrInvalidState.WithHintf("Request parameter 'state' must be at least be %d characters long to ensure sufficient entropy.", f.GetMinParameterEntropy(ctx))) + } + + return request, nil +} diff --git a/fosite/authorize_request_handler_oidc_request_test.go b/fosite/authorize_request_handler_oidc_request_test.go new file mode 100644 index 00000000000..da85b9175a1 --- /dev/null +++ b/fosite/authorize_request_handler_oidc_request_test.go @@ -0,0 +1,221 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "crypto/rand" + "crypto/rsa" + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "net/url" + "testing" + + "github.com/pkg/errors" + + "github.com/go-jose/go-jose/v3" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +func mustGenerateAssertion(t *testing.T, claims jwt.MapClaims, key *rsa.PrivateKey, kid string) string { + token := jwt.NewWithClaims(jose.RS256, claims) + if kid != "" { + token.Header["kid"] = kid + } + tokenString, err := token.SignedString(key) + require.NoError(t, err) + return tokenString +} + +func mustGenerateHSAssertion(t *testing.T, claims jwt.MapClaims) string { + token := jwt.NewWithClaims(jose.HS256, claims) + tokenString, err := token.SignedString([]byte("aaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbcccccccccccccccccccccddddddddddddddddddddddd")) + require.NoError(t, err) + return tokenString +} + +func mustGenerateNoneAssertion(t *testing.T, claims jwt.MapClaims) string { + token := jwt.NewWithClaims(jwt.SigningMethodNone, claims) + tokenString, err := token.SignedString(jwt.UnsafeAllowNoneSignatureType) + require.NoError(t, err) + return tokenString +} + +func TestAuthorizeRequestParametersFromOpenIDConnectRequest(t *testing.T) { + key, err := rsa.GenerateKey(rand.Reader, 1024) + if err != nil { + panic(err) + } + jwks := &jose.JSONWebKeySet{ + Keys: []jose.JSONWebKey{ + { + KeyID: "kid-foo", + Use: "sig", + Key: &key.PublicKey, + }, + }, + } + + validRequestObject := mustGenerateAssertion(t, jwt.MapClaims{"scope": "foo", "foo": "bar", "baz": "baz", "response_type": "token", "response_mode": "post_form"}, key, "kid-foo") + validRequestObjectWithoutKid := mustGenerateAssertion(t, jwt.MapClaims{"scope": "foo", "foo": "bar", "baz": "baz"}, key, "") + validNoneRequestObject := mustGenerateNoneAssertion(t, jwt.MapClaims{"scope": "foo", "foo": "bar", "baz": "baz", "state": "some-state"}) + + var reqH http.HandlerFunc = func(rw http.ResponseWriter, r *http.Request) { + _, err := rw.Write([]byte(validRequestObject)) + require.NoError(t, err) + } + reqTS := httptest.NewServer(reqH) + defer reqTS.Close() + + var hJWK http.HandlerFunc = func(rw http.ResponseWriter, r *http.Request) { + require.NoError(t, json.NewEncoder(rw).Encode(jwks)) + } + reqJWK := httptest.NewServer(hJWK) + defer reqJWK.Close() + + f := &Fosite{Config: &Config{JWKSFetcherStrategy: NewDefaultJWKSFetcherStrategy()}} + for k, tc := range []struct { + client Client + form url.Values + d string + + expectErr error + expectErrReason string + expectForm url.Values + }{ + { + d: "should pass because no request context given and not openid", + form: url.Values{}, + expectErr: nil, + expectForm: url.Values{}, + }, + { + d: "should pass because no request context given", + form: url.Values{"scope": {"openid"}}, + expectErr: nil, + expectForm: url.Values{"scope": {"openid"}}, + }, + { + d: "should pass because request context given but not openid", + form: url.Values{"request": {"foo"}}, + expectErr: nil, + expectForm: url.Values{"request": {"foo"}}, + }, + { + d: "should fail because not an OpenIDConnect compliant client", + form: url.Values{"scope": {"openid"}, "request": {"foo"}}, + expectErr: ErrRequestNotSupported, + expectForm: url.Values{"scope": {"openid"}}, + }, + { + d: "should fail because not an OpenIDConnect compliant client", + form: url.Values{"scope": {"openid"}, "request_uri": {"foo"}}, + expectErr: ErrRequestURINotSupported, + expectForm: url.Values{"scope": {"openid"}}, + }, + { + d: "should fail because token invalid an no key set", + form: url.Values{"scope": {"openid"}, "request_uri": {"foo"}}, + client: &DefaultOpenIDConnectClient{RequestObjectSigningAlgorithm: "RS256"}, + expectErr: ErrInvalidRequest, + expectForm: url.Values{"scope": {"openid"}}, + }, + { + d: "should fail because token invalid", + form: url.Values{"scope": {"openid"}, "request": {"foo"}}, + client: &DefaultOpenIDConnectClient{JSONWebKeys: jwks, RequestObjectSigningAlgorithm: "RS256"}, + expectErr: ErrInvalidRequestObject, + expectForm: url.Values{"scope": {"openid"}}, + }, + { + d: "should fail because kid does not exist", + form: url.Values{"scope": {"openid"}, "request": {mustGenerateAssertion(t, jwt.MapClaims{}, key, "does-not-exists")}}, + client: &DefaultOpenIDConnectClient{JSONWebKeys: jwks, RequestObjectSigningAlgorithm: "RS256"}, + expectErr: ErrInvalidRequestObject, + expectErrReason: "Unable to retrieve RSA signing key from OAuth 2.0 Client. The JSON Web Token uses signing key with kid 'does-not-exists', which could not be found.", + expectForm: url.Values{"scope": {"openid"}}, + }, + { + d: "should fail because not RS256 token", + form: url.Values{"scope": {"openid"}, "request": {mustGenerateHSAssertion(t, jwt.MapClaims{})}}, + client: &DefaultOpenIDConnectClient{JSONWebKeys: jwks, RequestObjectSigningAlgorithm: "RS256"}, + expectErr: ErrInvalidRequestObject, + expectErrReason: "The request object uses signing algorithm 'HS256', but the requested OAuth 2.0 Client enforces signing algorithm 'RS256'.", + expectForm: url.Values{"scope": {"openid"}}, + }, + { + d: "should pass and set request parameters properly", + form: url.Values{"scope": {"openid"}, "response_type": {"code"}, "response_mode": {"none"}, "request": {validRequestObject}}, + client: &DefaultOpenIDConnectClient{JSONWebKeys: jwks, RequestObjectSigningAlgorithm: "RS256"}, + // The values from form are overwritten by the request object. + expectForm: url.Values{"response_type": {"token"}, "response_mode": {"post_form"}, "scope": {"foo openid"}, "request": {validRequestObject}, "foo": {"bar"}, "baz": {"baz"}}, + }, + { + d: "should pass even if kid is unset", + form: url.Values{"scope": {"openid"}, "request": {validRequestObjectWithoutKid}}, + client: &DefaultOpenIDConnectClient{JSONWebKeys: jwks, RequestObjectSigningAlgorithm: "RS256"}, + expectForm: url.Values{"scope": {"foo openid"}, "request": {validRequestObjectWithoutKid}, "foo": {"bar"}, "baz": {"baz"}}, + }, + { + d: "should fail because request uri is not whitelisted", + form: url.Values{"scope": {"openid"}, "request_uri": {reqTS.URL}}, + client: &DefaultOpenIDConnectClient{JSONWebKeysURI: reqJWK.URL, RequestObjectSigningAlgorithm: "RS256"}, + expectForm: url.Values{"scope": {"foo openid"}, "request_uri": {reqTS.URL}, "foo": {"bar"}, "baz": {"baz"}}, + expectErr: ErrInvalidRequestURI, + }, + { + d: "should pass and set request_uri parameters properly and also fetch jwk from remote", + form: url.Values{"scope": {"openid"}, "request_uri": {reqTS.URL}}, + client: &DefaultOpenIDConnectClient{JSONWebKeysURI: reqJWK.URL, RequestObjectSigningAlgorithm: "RS256", RequestURIs: []string{reqTS.URL}}, + expectForm: url.Values{"response_type": {"token"}, "response_mode": {"post_form"}, "scope": {"foo openid"}, "request_uri": {reqTS.URL}, "foo": {"bar"}, "baz": {"baz"}}, + }, + { + d: "should pass when request object uses algorithm none", + form: url.Values{"scope": {"openid"}, "request": {validNoneRequestObject}}, + client: &DefaultOpenIDConnectClient{JSONWebKeysURI: reqJWK.URL, RequestObjectSigningAlgorithm: "none"}, + expectForm: url.Values{"state": {"some-state"}, "scope": {"foo openid"}, "request": {validNoneRequestObject}, "foo": {"bar"}, "baz": {"baz"}}, + }, + { + d: "should pass when request object uses algorithm none and the client did not explicitly allow any algorithm", + form: url.Values{"scope": {"openid"}, "request": {validNoneRequestObject}}, + client: &DefaultOpenIDConnectClient{JSONWebKeysURI: reqJWK.URL}, + expectForm: url.Values{"state": {"some-state"}, "scope": {"foo openid"}, "request": {validNoneRequestObject}, "foo": {"bar"}, "baz": {"baz"}}, + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", k, tc.d), func(t *testing.T) { + req := &AuthorizeRequest{ + Request: Request{ + Client: tc.client, + Form: tc.form, + }, + } + + err := f.authorizeRequestParametersFromOpenIDConnectRequest(context.Background(), req, false) + if tc.expectErr != nil { + require.EqualError(t, err, tc.expectErr.Error(), "%+v", err) + if tc.expectErrReason != "" { + real := new(RFC6749Error) + require.True(t, errors.As(err, &real)) + assert.EqualValues(t, tc.expectErrReason, real.Reason()) + } + } else { + if err != nil { + real := new(RFC6749Error) + errors.As(err, &real) + require.NoErrorf(t, err, "Hint: %v\nDebug:%v", real.HintField, real.DebugField) + } + require.NoErrorf(t, err, "%+v", err) + require.Equal(t, len(tc.expectForm), len(req.Form)) + for k, v := range tc.expectForm { + assert.EqualValues(t, v, req.Form[k]) + } + } + }) + } +} diff --git a/fosite/authorize_request_handler_test.go b/fosite/authorize_request_handler_test.go new file mode 100644 index 00000000000..952fba00ae6 --- /dev/null +++ b/fosite/authorize_request_handler_test.go @@ -0,0 +1,585 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "context" + "fmt" + "net/http" + "net/url" + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + gomock "go.uber.org/mock/gomock" + + . "github.com/ory/hydra/v2/fosite" + . "github.com/ory/hydra/v2/fosite/internal" +) + +// Should pass +// +// - https://openid.net/specs/oauth-v2-multiple-response-types-1_0.html#Terminology +// The OAuth 2.0 specification allows for registration of space-separated response_type parameter values. +// If a Response Type contains one of more space characters (%20), it is compared as a space-delimited list of +// values in which the order of values does not matter. +func TestNewAuthorizeRequest(t *testing.T) { + var store *MockStorage + var clientManager *MockClientManager + + redir, _ := url.Parse("https://foo.bar/cb") + specialCharRedir, _ := url.Parse("web+application://callback") + for k, c := range []struct { + desc string + conf *Fosite + r *http.Request + query url.Values + expectedError error + mock func() + expect *AuthorizeRequest + }{ + /* empty request */ + { + desc: "empty request fails", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + r: &http.Request{}, + expectedError: ErrInvalidClient, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Any()).Return(nil, errors.New("foo")) + }, + }, + /* invalid redirect uri */ + { + desc: "invalid redirect uri fails", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + query: url.Values{"redirect_uri": []string{"invalid"}}, + expectedError: ErrInvalidClient, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Any()).Return(nil, errors.New("foo")) + }, + }, + /* invalid client */ + { + desc: "invalid client fails", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + query: url.Values{"redirect_uri": []string{"https://foo.bar/cb"}}, + expectedError: ErrInvalidClient, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Any()).Return(nil, errors.New("foo")) + }, + }, + /* redirect client mismatch */ + { + desc: "client and request redirects mismatch", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + query: url.Values{ + "client_id": []string{"1234"}, + }, + expectedError: ErrInvalidRequest, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{RedirectURIs: []string{"invalid"}, Scopes: []string{}}, nil) + }, + }, + /* redirect client mismatch */ + { + desc: "client and request redirects mismatch", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + query: url.Values{ + "redirect_uri": []string{""}, + "client_id": []string{"1234"}, + }, + expectedError: ErrInvalidRequest, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{RedirectURIs: []string{"invalid"}, Scopes: []string{}}, nil) + }, + }, + /* redirect client mismatch */ + { + desc: "client and request redirects mismatch", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + query: url.Values{ + "redirect_uri": []string{"https://foo.bar/cb"}, + "client_id": []string{"1234"}, + }, + expectedError: ErrInvalidRequest, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{RedirectURIs: []string{"invalid"}, Scopes: []string{}}, nil) + }, + }, + /* no state */ + { + desc: "no state", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + query: url.Values{ + "redirect_uri": []string{"https://foo.bar/cb"}, + "client_id": []string{"1234"}, + "response_type": []string{"code"}, + }, + expectedError: ErrInvalidState, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{RedirectURIs: []string{"https://foo.bar/cb"}, Scopes: []string{}}, nil) + }, + }, + /* short state */ + { + desc: "short state", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "response_type": {"code"}, + "state": {"short"}, + }, + expectedError: ErrInvalidState, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{RedirectURIs: []string{"https://foo.bar/cb"}, Scopes: []string{}}, nil) + }, + }, + /* fails because scope not given */ + { + desc: "should fail because client does not have scope baz", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar baz"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{RedirectURIs: []string{"https://foo.bar/cb"}, Scopes: []string{"foo", "bar"}}, nil) + }, + expectedError: ErrInvalidScope, + }, + /* fails because scope not given */ + { + desc: "should fail because client does not have scope baz", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "audience": {"https://cloud.ory.sh/api https://www.ory.sh/api"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{ + RedirectURIs: []string{"https://foo.bar/cb"}, Scopes: []string{"foo", "bar"}, + Audience: []string{"https://cloud.ory.sh/api"}, + }, nil) + }, + expectedError: ErrInvalidRequest, + }, + /* success case */ + { + desc: "should pass", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "audience": {"https://cloud.ory.sh/api https://www.ory.sh/api"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{ + ResponseTypes: []string{"code token"}, + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, nil) + }, + expect: &AuthorizeRequest{ + RedirectURI: redir, + ResponseTypes: []string{"code", "token"}, + State: "strong-state", + Request: Request{ + Client: &DefaultClient{ + ResponseTypes: []string{"code token"}, RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + RequestedScope: []string{"foo", "bar"}, + RequestedAudience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + }, + }, + /* repeated audience parameter */ + { + desc: "repeated audience parameter", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "audience": {"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{ + ResponseTypes: []string{"code token"}, + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, nil) + }, + expect: &AuthorizeRequest{ + RedirectURI: redir, + ResponseTypes: []string{"code", "token"}, + State: "strong-state", + Request: Request{ + Client: &DefaultClient{ + ResponseTypes: []string{"code token"}, RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + RequestedScope: []string{"foo", "bar"}, + RequestedAudience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + }, + }, + /* repeated audience parameter with tricky values */ + { + desc: "repeated audience parameter with tricky values", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: ExactAudienceMatchingStrategy}}, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "audience": {"test value", ""}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{ + ResponseTypes: []string{"code token"}, + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"test value"}, + }, nil) + }, + expect: &AuthorizeRequest{ + RedirectURI: redir, + ResponseTypes: []string{"code", "token"}, + State: "strong-state", + Request: Request{ + Client: &DefaultClient{ + ResponseTypes: []string{"code token"}, RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"test value"}, + }, + RequestedScope: []string{"foo", "bar"}, + RequestedAudience: []string{"test value"}, + }, + }, + }, + /* redirect_uri with special character in protocol*/ + { + desc: "redirect_uri with special character", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + query: url.Values{ + "redirect_uri": {"web+application://callback"}, + "client_id": {"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "audience": {"https://cloud.ory.sh/api https://www.ory.sh/api"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{ + ResponseTypes: []string{"code token"}, + RedirectURIs: []string{"web+application://callback"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, nil) + }, + expect: &AuthorizeRequest{ + RedirectURI: specialCharRedir, + ResponseTypes: []string{"code", "token"}, + State: "strong-state", + Request: Request{ + Client: &DefaultClient{ + ResponseTypes: []string{"code token"}, RedirectURIs: []string{"web+application://callback"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + RequestedScope: []string{"foo", "bar"}, + RequestedAudience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + }, + }, + /* audience with double spaces between values */ + { + desc: "audience with double spaces between values", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "audience": {"https://cloud.ory.sh/api https://www.ory.sh/api"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{ + ResponseTypes: []string{"code token"}, + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, nil) + }, + expect: &AuthorizeRequest{ + RedirectURI: redir, + ResponseTypes: []string{"code", "token"}, + State: "strong-state", + Request: Request{ + Client: &DefaultClient{ + ResponseTypes: []string{"code token"}, RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + RequestedScope: []string{"foo", "bar"}, + RequestedAudience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + }, + }, + /* fails because unknown response_mode*/ + { + desc: "should fail because unknown response_mode", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "response_mode": {"unknown"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{RedirectURIs: []string{"https://foo.bar/cb"}, Scopes: []string{"foo", "bar"}, ResponseTypes: []string{"code token"}}, nil) + }, + expectedError: ErrUnsupportedResponseMode, + }, + /* fails because response_mode is requested but the OAuth 2.0 client doesn't support response mode */ + { + desc: "should fail because response_mode is requested but the OAuth 2.0 client doesn't support response mode", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "response_mode": {"form_post"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{RedirectURIs: []string{"https://foo.bar/cb"}, Scopes: []string{"foo", "bar"}, ResponseTypes: []string{"code token"}}, nil) + }, + expectedError: ErrUnsupportedResponseMode, + }, + /* fails because requested response mode is not allowed */ + { + desc: "should fail because requested response mode is not allowed", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "response_mode": {"form_post"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultResponseModeClient{ + DefaultClient: &DefaultClient{ + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + ResponseTypes: []string{"code token"}, + }, + ResponseModes: []ResponseModeType{ResponseModeQuery}, + }, nil) + }, + expectedError: ErrUnsupportedResponseMode, + }, + /* success with response mode */ + { + desc: "success with response mode", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "response_mode": {"form_post"}, + "audience": {"https://cloud.ory.sh/api https://www.ory.sh/api"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultResponseModeClient{ + DefaultClient: &DefaultClient{ + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + ResponseTypes: []string{"code token"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + ResponseModes: []ResponseModeType{ResponseModeFormPost}, + }, nil) + }, + expect: &AuthorizeRequest{ + RedirectURI: redir, + ResponseTypes: []string{"code", "token"}, + State: "strong-state", + Request: Request{ + Client: &DefaultResponseModeClient{ + DefaultClient: &DefaultClient{ + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + ResponseTypes: []string{"code token"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + ResponseModes: []ResponseModeType{ResponseModeFormPost}, + }, + RequestedScope: []string{"foo", "bar"}, + RequestedAudience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + }, + }, + /* determine correct response mode if default */ + { + desc: "success with response mode", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "response_type": {"code"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "audience": {"https://cloud.ory.sh/api https://www.ory.sh/api"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultResponseModeClient{ + DefaultClient: &DefaultClient{ + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + ResponseTypes: []string{"code"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + ResponseModes: []ResponseModeType{ResponseModeQuery}, + }, nil) + }, + expect: &AuthorizeRequest{ + RedirectURI: redir, + ResponseTypes: []string{"code"}, + State: "strong-state", + Request: Request{ + Client: &DefaultResponseModeClient{ + DefaultClient: &DefaultClient{ + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + ResponseTypes: []string{"code"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + ResponseModes: []ResponseModeType{ResponseModeQuery}, + }, + RequestedScope: []string{"foo", "bar"}, + RequestedAudience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + }, + }, + /* determine correct response mode if default */ + { + desc: "success with response mode", + conf: &Fosite{Store: store, Config: &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy}}, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "audience": {"https://cloud.ory.sh/api https://www.ory.sh/api"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultResponseModeClient{ + DefaultClient: &DefaultClient{ + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + ResponseTypes: []string{"code token"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + ResponseModes: []ResponseModeType{ResponseModeFragment}, + }, nil) + }, + expect: &AuthorizeRequest{ + RedirectURI: redir, + ResponseTypes: []string{"code", "token"}, + State: "strong-state", + Request: Request{ + Client: &DefaultResponseModeClient{ + DefaultClient: &DefaultClient{ + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + ResponseTypes: []string{"code token"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + ResponseModes: []ResponseModeType{ResponseModeFragment}, + }, + RequestedScope: []string{"foo", "bar"}, + RequestedAudience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + }, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + ctrl := gomock.NewController(t) + store = NewMockStorage(ctrl) + clientManager = NewMockClientManager(ctrl) + t.Cleanup(ctrl.Finish) + + c.mock() + if c.r == nil { + c.r = &http.Request{Header: http.Header{}} + if c.query != nil { + c.r.URL = &url.URL{RawQuery: c.query.Encode()} + } + } + + c.conf.Store = store + ar, err := c.conf.NewAuthorizeRequest(context.Background(), c.r) + if c.expectedError != nil { + assert.EqualError(t, err, c.expectedError.Error()) + // https://github.com/ory/hydra/issues/1642 + AssertObjectKeysEqual(t, &AuthorizeRequest{State: c.query.Get("state")}, ar, "State") + } else { + require.NoError(t, err) + AssertObjectKeysEqual(t, c.expect, ar, "ResponseTypes", "RequestedAudience", "RequestedScope", "Client", "RedirectURI", "State") + assert.NotNil(t, ar.GetRequestedAt()) + } + }) + } +} diff --git a/fosite/authorize_request_test.go b/fosite/authorize_request_test.go new file mode 100644 index 00000000000..5886435ccba --- /dev/null +++ b/fosite/authorize_request_test.go @@ -0,0 +1,114 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "net/url" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/stretchr/testify/assert" +) + +func TestAuthorizeRequestURLRegression(t *testing.T) { + require.Nil(t, NewAuthorizeRequest().RedirectURI) +} + +func TestAuthorizeRequest(t *testing.T) { + var urlparse = func(rawurl string) *url.URL { + u, _ := url.Parse(rawurl) + return u + } + + for k, c := range []struct { + ar *AuthorizeRequest + isRedirValid bool + }{ + { + ar: NewAuthorizeRequest(), + isRedirValid: false, + }, + { + ar: &AuthorizeRequest{ + RedirectURI: urlparse("https://foobar"), + }, + isRedirValid: false, + }, + { + ar: &AuthorizeRequest{ + RedirectURI: urlparse("https://foobar"), + Request: Request{ + Client: &DefaultClient{RedirectURIs: []string{""}}, + }, + }, + isRedirValid: false, + }, + { + ar: &AuthorizeRequest{ + Request: Request{ + Client: &DefaultClient{RedirectURIs: []string{""}}, + }, + RedirectURI: urlparse(""), + }, + isRedirValid: false, + }, + { + ar: &AuthorizeRequest{ + Request: Request{ + Client: &DefaultClient{RedirectURIs: []string{""}}, + }, + RedirectURI: urlparse(""), + }, + isRedirValid: false, + }, + { + ar: &AuthorizeRequest{ + RedirectURI: urlparse("https://foobar.com#123"), + Request: Request{ + Client: &DefaultClient{RedirectURIs: []string{"https://foobar.com#123"}}, + }, + }, + isRedirValid: false, + }, + { + ar: &AuthorizeRequest{ + Request: Request{ + Client: &DefaultClient{RedirectURIs: []string{"https://foobar.com"}}, + }, + RedirectURI: urlparse("https://foobar.com#123"), + }, + isRedirValid: false, + }, + { + ar: &AuthorizeRequest{ + Request: Request{ + Client: &DefaultClient{RedirectURIs: []string{"https://foobar.com/cb"}}, + RequestedAt: time.Now().UTC(), + RequestedScope: []string{"foo", "bar"}, + }, + RedirectURI: urlparse("https://foobar.com/cb"), + ResponseTypes: []string{"foo", "bar"}, + State: "foobar", + }, + isRedirValid: true, + }, + } { + assert.Equal(t, c.ar.Client, c.ar.GetClient(), "%d", k) + assert.Equal(t, c.ar.RedirectURI, c.ar.GetRedirectURI(), "%d", k) + assert.Equal(t, c.ar.RequestedAt, c.ar.GetRequestedAt(), "%d", k) + assert.Equal(t, c.ar.ResponseTypes, c.ar.GetResponseTypes(), "%d", k) + assert.Equal(t, c.ar.RequestedScope, c.ar.GetRequestedScopes(), "%d", k) + assert.Equal(t, c.ar.State, c.ar.GetState(), "%d", k) + assert.Equal(t, c.isRedirValid, c.ar.IsRedirectURIValid(), "%d", k) + + c.ar.GrantScope("foo") + c.ar.SetSession(&DefaultSession{}) + c.ar.SetRequestedScopes([]string{"foo"}) + assert.True(t, c.ar.GetGrantedScopes().Has("foo")) + assert.True(t, c.ar.GetRequestedScopes().Has("foo")) + assert.Equal(t, &DefaultSession{}, c.ar.GetSession()) + } +} diff --git a/fosite/authorize_response.go b/fosite/authorize_response.go new file mode 100644 index 00000000000..fc84ffa411d --- /dev/null +++ b/fosite/authorize_response.go @@ -0,0 +1,46 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "net/http" + "net/url" +) + +// AuthorizeResponse is an implementation of AuthorizeResponder +type AuthorizeResponse struct { + Header http.Header + Parameters url.Values + code string +} + +func NewAuthorizeResponse() *AuthorizeResponse { + return &AuthorizeResponse{ + Header: http.Header{}, + Parameters: url.Values{}, + } +} + +func (a *AuthorizeResponse) GetCode() string { + return a.code +} + +func (a *AuthorizeResponse) GetHeader() http.Header { + return a.Header +} + +func (a *AuthorizeResponse) AddHeader(key, value string) { + a.Header.Add(key, value) +} + +func (a *AuthorizeResponse) GetParameters() url.Values { + return a.Parameters +} + +func (a *AuthorizeResponse) AddParameter(key, value string) { + if key == "code" { + a.code = value + } + a.Parameters.Add(key, value) +} diff --git a/fosite/authorize_response_test.go b/fosite/authorize_response_test.go new file mode 100644 index 00000000000..fc47dba4377 --- /dev/null +++ b/fosite/authorize_response_test.go @@ -0,0 +1,25 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestAuthorizeResponse(t *testing.T) { + ar := NewAuthorizeResponse() + ar.AddParameter("foo", "bar") + ar.AddParameter("bar", "bar") + + ar.AddHeader("foo", "foo") + + ar.AddParameter("code", "bar") + assert.Equal(t, "bar", ar.GetCode()) + + assert.Equal(t, "bar", ar.GetParameters().Get("foo")) + assert.Equal(t, "foo", ar.GetHeader().Get("foo")) + assert.Equal(t, "bar", ar.GetParameters().Get("bar")) +} diff --git a/fosite/authorize_response_writer.go b/fosite/authorize_response_writer.go new file mode 100644 index 00000000000..3ffd0a4e7a2 --- /dev/null +++ b/fosite/authorize_response_writer.go @@ -0,0 +1,45 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "net/http" + "net/url" + + "go.opentelemetry.io/otel/trace" + + "github.com/ory/x/errorsx" + "github.com/ory/x/otelx" +) + +func (f *Fosite) NewAuthorizeResponse(ctx context.Context, ar AuthorizeRequester, session Session) (_ AuthorizeResponder, err error) { + ctx, span := trace.SpanFromContext(ctx).TracerProvider().Tracer("github.com/ory/hydra/v2/fosite").Start(ctx, "Fosite.NewAuthorizeResponse") + defer otelx.End(span, &err) + + var resp = &AuthorizeResponse{ + Header: http.Header{}, + Parameters: url.Values{}, + } + + ctx = context.WithValue(ctx, AuthorizeRequestContextKey, ar) + ctx = context.WithValue(ctx, AuthorizeResponseContextKey, resp) + + ar.SetSession(session) + for _, h := range f.Config.GetAuthorizeEndpointHandlers(ctx) { + if err := h.HandleAuthorizeEndpointRequest(ctx, ar, resp); err != nil { + return nil, err + } + } + + if !ar.DidHandleAllResponseTypes() { + return nil, errorsx.WithStack(ErrUnsupportedResponseType) + } + + if ar.GetDefaultResponseMode() == ResponseModeFragment && ar.GetResponseMode() == ResponseModeQuery { + return nil, ErrUnsupportedResponseMode.WithHintf("Insecure response_mode '%s' for the response_type '%s'.", ar.GetResponseMode(), ar.GetResponseTypes()) + } + + return resp, nil +} diff --git a/fosite/authorize_response_writer_test.go b/fosite/authorize_response_writer_test.go new file mode 100644 index 00000000000..e74c33a8118 --- /dev/null +++ b/fosite/authorize_response_writer_test.go @@ -0,0 +1,96 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "context" + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + gomock "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite" + . "github.com/ory/hydra/v2/fosite" + . "github.com/ory/hydra/v2/fosite/internal" +) + +func TestNewAuthorizeResponse(t *testing.T) { + ctrl := gomock.NewController(t) + handlers := []*MockAuthorizeEndpointHandler{NewMockAuthorizeEndpointHandler(ctrl)} + ar := NewMockAuthorizeRequester(ctrl) + t.Cleanup(ctrl.Finish) + + ctx := context.Background() + oauth2 := &Fosite{Config: &Config{AuthorizeEndpointHandlers: AuthorizeEndpointHandlers{handlers[0]}}} + duo := &Fosite{Config: &Config{AuthorizeEndpointHandlers: AuthorizeEndpointHandlers{handlers[0], handlers[0]}}} + ar.EXPECT().SetSession(gomock.Eq(new(DefaultSession))).AnyTimes() + fooErr := errors.New("foo") + for k, c := range []struct { + isErr bool + mock func() + expectErr error + }{ + { + mock: func() { + handlers[0].EXPECT().HandleAuthorizeEndpointRequest(gomock.Any(), gomock.Any(), gomock.Any()).Return(fooErr) + }, + isErr: true, + expectErr: fooErr, + }, + { + mock: func() { + handlers[0].EXPECT().HandleAuthorizeEndpointRequest(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) + ar.EXPECT().DidHandleAllResponseTypes().Return(true) + ar.EXPECT().GetDefaultResponseMode().Return(ResponseModeFragment) + ar.EXPECT().GetResponseMode().Return(ResponseModeDefault) + }, + isErr: false, + }, + { + mock: func() { + oauth2 = duo + handlers[0].EXPECT().HandleAuthorizeEndpointRequest(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) + handlers[0].EXPECT().HandleAuthorizeEndpointRequest(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) + ar.EXPECT().DidHandleAllResponseTypes().Return(true) + ar.EXPECT().GetDefaultResponseMode().Return(ResponseModeFragment) + ar.EXPECT().GetResponseMode().Return(ResponseModeDefault) + }, + isErr: false, + }, + { + mock: func() { + oauth2 = duo + handlers[0].EXPECT().HandleAuthorizeEndpointRequest(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) + handlers[0].EXPECT().HandleAuthorizeEndpointRequest(gomock.Any(), gomock.Any(), gomock.Any()).Return(fooErr) + }, + isErr: true, + expectErr: fooErr, + }, + { + mock: func() { + oauth2 = duo + handlers[0].EXPECT().HandleAuthorizeEndpointRequest(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) + handlers[0].EXPECT().HandleAuthorizeEndpointRequest(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) + ar.EXPECT().DidHandleAllResponseTypes().Return(true) + ar.EXPECT().GetDefaultResponseMode().Return(ResponseModeFragment) + ar.EXPECT().GetResponseMode().Return(ResponseModeQuery).Times(2) + ar.EXPECT().GetResponseTypes().Return([]string{"token", "code"}) + }, + isErr: true, + expectErr: ErrUnsupportedResponseMode.WithHintf("Insecure response_mode '%s' for the response_type '%s'.", ResponseModeQuery, fosite.Arguments{"token", "code"}), + }, + } { + c.mock() + responder, err := oauth2.NewAuthorizeResponse(ctx, ar, new(DefaultSession)) + assert.Equal(t, c.isErr, err != nil, "%d: %s", k, err) + if err != nil { + assert.Equal(t, c.expectErr, err, "%d: %s", k, err) + assert.Nil(t, responder, "%d", k) + } else { + assert.NotNil(t, responder, "%d", k) + } + t.Logf("Passed test case %d", k) + } +} diff --git a/fosite/authorize_validators_test.go b/fosite/authorize_validators_test.go new file mode 100644 index 00000000000..36705fb62f8 --- /dev/null +++ b/fosite/authorize_validators_test.go @@ -0,0 +1,84 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "fmt" + "net/http" + "net/url" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestValidateResponseTypes(t *testing.T) { + f := &Fosite{Config: new(Config)} + for k, tc := range []struct { + rt string + art []string + expectErr bool + }{ + { + rt: "code", + art: []string{"token"}, + expectErr: true, + }, + { + rt: "token", + art: []string{"token"}, + }, + { + rt: "", + art: []string{"token"}, + expectErr: true, + }, + { + rt: " ", + art: []string{"token"}, + expectErr: true, + }, + { + rt: "disable", + art: []string{"token"}, + expectErr: true, + }, + { + rt: "code token", + art: []string{"token", "code"}, + expectErr: true, + }, + { + rt: "code token", + art: []string{"token", "token code"}, + }, + { + rt: "code token", + art: []string{"token", "code token"}, + }, + { + rt: "code token", + art: []string{"token", "code token id_token"}, + expectErr: true, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + r := &http.Request{Form: url.Values{"response_type": {tc.rt}}} + if tc.rt == "disable" { + r = &http.Request{Form: url.Values{}} + } + ar := NewAuthorizeRequest() + ar.Request.Client = &DefaultClient{ResponseTypes: tc.art} + + err := f.validateResponseTypes(r, ar) + if tc.expectErr { + require.Error(t, err) + } else { + require.NoError(t, err) + assert.EqualValues(t, RemoveEmpty(strings.Split(tc.rt, " ")), ar.GetResponseTypes()) + } + }) + } +} diff --git a/fosite/authorize_write.go b/fosite/authorize_write.go new file mode 100644 index 00000000000..f254a835822 --- /dev/null +++ b/fosite/authorize_write.go @@ -0,0 +1,67 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "net/http" +) + +func (f *Fosite) WriteAuthorizeResponse(ctx context.Context, rw http.ResponseWriter, ar AuthorizeRequester, resp AuthorizeResponder) { + // Set custom headers, e.g. "X-MySuperCoolCustomHeader" or "X-DONT-CACHE-ME"... + wh := rw.Header() + rh := resp.GetHeader() + for k := range rh { + wh.Set(k, rh.Get(k)) + } + + wh.Set("Cache-Control", "no-store") + wh.Set("Pragma", "no-cache") + + redir := ar.GetRedirectURI() + switch rm := ar.GetResponseMode(); rm { + case ResponseModeFormPost: + //form_post + rw.Header().Add("Content-Type", "text/html;charset=UTF-8") + WriteAuthorizeFormPostResponse(redir.String(), resp.GetParameters(), GetPostFormHTMLTemplate(ctx, f), rw) + return + case ResponseModeQuery, ResponseModeDefault: + // Explicit grants + q := redir.Query() + rq := resp.GetParameters() + for k := range rq { + q.Set(k, rq.Get(k)) + } + redir.RawQuery = q.Encode() + sendRedirect(redir.String(), rw) + return + case ResponseModeFragment: + // Implicit grants + // The endpoint URI MUST NOT include a fragment component. + redir.Fragment = "" + + u := redir.String() + fr := resp.GetParameters() + if len(fr) > 0 { + u = u + "#" + fr.Encode() + } + sendRedirect(u, rw) + return + default: + if f.ResponseModeHandler(ctx).ResponseModes().Has(rm) { + f.ResponseModeHandler(ctx).WriteAuthorizeResponse(ctx, rw, ar, resp) + return + } + } +} + +// https://tools.ietf.org/html/rfc6749#section-4.1.1 +// When a decision is established, the authorization server directs the +// user-agent to the provided client redirection URI using an HTTP +// redirection response, or by other means available to it via the +// user-agent. +func sendRedirect(url string, rw http.ResponseWriter) { + rw.Header().Set("Location", url) + rw.WriteHeader(http.StatusSeeOther) +} diff --git a/fosite/authorize_write_test.go b/fosite/authorize_write_test.go new file mode 100644 index 00000000000..8ac3437c7d7 --- /dev/null +++ b/fosite/authorize_write_test.go @@ -0,0 +1,213 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "context" + "net/http" + "net/url" + "testing" + + "github.com/stretchr/testify/assert" + gomock "go.uber.org/mock/gomock" + + . "github.com/ory/hydra/v2/fosite" + . "github.com/ory/hydra/v2/fosite/internal" +) + +func TestWriteAuthorizeResponse(t *testing.T) { + oauth2 := &Fosite{Config: new(Config)} + header := http.Header{} + ctrl := gomock.NewController(t) + rw := NewMockResponseWriter(ctrl) + ar := NewMockAuthorizeRequester(ctrl) + resp := NewMockAuthorizeResponder(ctrl) + t.Cleanup(ctrl.Finish) + + for k, c := range []struct { + setup func() + expect func() + }{ + { + setup: func() { + redir, _ := url.Parse("https://foobar.com/?foo=bar") + ar.EXPECT().GetRedirectURI().Return(redir) + ar.EXPECT().GetResponseMode().Return(ResponseModeDefault) + resp.EXPECT().GetParameters().Return(url.Values{}) + resp.EXPECT().GetHeader().Return(http.Header{}) + + rw.EXPECT().Header().Return(header).Times(2) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + expect: func() { + assert.Equal(t, http.Header{ + "Location": []string{"https://foobar.com/?foo=bar"}, + "Cache-Control": []string{"no-store"}, + "Pragma": []string{"no-cache"}, + }, header) + }, + }, + { + setup: func() { + redir, _ := url.Parse("https://foobar.com/?foo=bar") + ar.EXPECT().GetRedirectURI().Return(redir) + ar.EXPECT().GetResponseMode().Return(ResponseModeFragment) + resp.EXPECT().GetParameters().Return(url.Values{"bar": {"baz"}}) + resp.EXPECT().GetHeader().Return(http.Header{}) + + rw.EXPECT().Header().Return(header).Times(2) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + expect: func() { + assert.Equal(t, http.Header{ + "Location": []string{"https://foobar.com/?foo=bar#bar=baz"}, + "Cache-Control": []string{"no-store"}, + "Pragma": []string{"no-cache"}, + }, header) + }, + }, + { + setup: func() { + redir, _ := url.Parse("https://foobar.com/?foo=bar") + ar.EXPECT().GetRedirectURI().Return(redir) + ar.EXPECT().GetResponseMode().Return(ResponseModeQuery) + resp.EXPECT().GetParameters().Return(url.Values{"bar": {"baz"}}) + resp.EXPECT().GetHeader().Return(http.Header{}) + + rw.EXPECT().Header().Return(header).Times(2) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + expect: func() { + expectedUrl, _ := url.Parse("https://foobar.com/?foo=bar&bar=baz") + actualUrl, err := url.Parse(header.Get("Location")) + assert.Nil(t, err) + assert.Equal(t, expectedUrl.Query(), actualUrl.Query()) + assert.Equal(t, "no-cache", header.Get("Pragma")) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + }, + }, + { + setup: func() { + redir, _ := url.Parse("https://foobar.com/?foo=bar") + ar.EXPECT().GetRedirectURI().Return(redir) + ar.EXPECT().GetResponseMode().Return(ResponseModeFragment) + resp.EXPECT().GetParameters().Return(url.Values{"bar": {"b+az ab"}}) + resp.EXPECT().GetHeader().Return(http.Header{"X-Bar": {"baz"}}) + + rw.EXPECT().Header().Return(header).Times(2) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + expect: func() { + assert.Equal(t, http.Header{ + "X-Bar": {"baz"}, + "Location": {"https://foobar.com/?foo=bar#bar=b%2Baz+ab"}, + "Cache-Control": []string{"no-store"}, + "Pragma": []string{"no-cache"}, + }, header) + }, + }, + { + setup: func() { + redir, _ := url.Parse("https://foobar.com/?foo=bar") + ar.EXPECT().GetRedirectURI().Return(redir) + ar.EXPECT().GetResponseMode().Return(ResponseModeQuery) + resp.EXPECT().GetParameters().Return(url.Values{"bar": {"b+az"}, "scope": {"a b"}}) + resp.EXPECT().GetHeader().Return(http.Header{"X-Bar": {"baz"}}) + + rw.EXPECT().Header().Return(header).Times(2) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + expect: func() { + expectedUrl, err := url.Parse("https://foobar.com/?foo=bar&bar=b%2Baz&scope=a+b") + assert.Nil(t, err) + actualUrl, err := url.Parse(header.Get("Location")) + assert.Nil(t, err) + assert.Equal(t, expectedUrl.Query(), actualUrl.Query()) + assert.Equal(t, "no-cache", header.Get("Pragma")) + assert.Equal(t, "no-store", header.Get("Cache-Control")) + assert.Equal(t, "baz", header.Get("X-Bar")) + }, + }, + { + setup: func() { + redir, _ := url.Parse("https://foobar.com/?foo=bar") + ar.EXPECT().GetRedirectURI().Return(redir) + ar.EXPECT().GetResponseMode().Return(ResponseModeFragment) + resp.EXPECT().GetParameters().Return(url.Values{"scope": {"api:*"}}) + resp.EXPECT().GetHeader().Return(http.Header{"X-Bar": {"baz"}}) + + rw.EXPECT().Header().Return(header).Times(2) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + expect: func() { + assert.Equal(t, http.Header{ + "X-Bar": {"baz"}, + "Location": {"https://foobar.com/?foo=bar#scope=api%3A%2A"}, + "Cache-Control": []string{"no-store"}, + "Pragma": []string{"no-cache"}, + }, header) + }, + }, + { + setup: func() { + redir, _ := url.Parse("https://foobar.com/?foo=bar#bar=baz") + ar.EXPECT().GetRedirectURI().Return(redir) + ar.EXPECT().GetResponseMode().Return(ResponseModeFragment) + resp.EXPECT().GetParameters().Return(url.Values{"qux": {"quux"}}) + resp.EXPECT().GetHeader().Return(http.Header{}) + + rw.EXPECT().Header().Return(header).Times(2) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + expect: func() { + assert.Equal(t, http.Header{ + "Location": {"https://foobar.com/?foo=bar#qux=quux"}, + "Cache-Control": []string{"no-store"}, + "Pragma": []string{"no-cache"}, + }, header) + }, + }, + { + setup: func() { + redir, _ := url.Parse("https://foobar.com/?foo=bar") + ar.EXPECT().GetRedirectURI().Return(redir) + ar.EXPECT().GetResponseMode().Return(ResponseModeFragment) + resp.EXPECT().GetParameters().Return(url.Values{"state": {"{\"a\":\"b=c&d=e\"}"}}) + resp.EXPECT().GetHeader().Return(http.Header{}) + + rw.EXPECT().Header().Return(header).Times(2) + rw.EXPECT().WriteHeader(http.StatusSeeOther) + }, + expect: func() { + assert.Equal(t, http.Header{ + "Location": {"https://foobar.com/?foo=bar#state=%7B%22a%22%3A%22b%3Dc%26d%3De%22%7D"}, + "Cache-Control": []string{"no-store"}, + "Pragma": []string{"no-cache"}, + }, header) + }, + }, + { + setup: func() { + redir, _ := url.Parse("https://foobar.com/?foo=bar") + ar.EXPECT().GetRedirectURI().Return(redir) + ar.EXPECT().GetResponseMode().Return(ResponseModeFormPost) + resp.EXPECT().GetHeader().Return(http.Header{"X-Bar": {"baz"}}) + resp.EXPECT().GetParameters().Return(url.Values{"code": {"poz65kqoneu"}, "state": {"qm6dnsrn"}}) + + rw.EXPECT().Header().Return(header).AnyTimes() + rw.EXPECT().Write(gomock.Any()).AnyTimes() + }, + expect: func() { + assert.Equal(t, "text/html;charset=UTF-8", header.Get("Content-Type")) + }, + }, + } { + t.Logf("Starting test case %d", k) + c.setup() + oauth2.WriteAuthorizeResponse(context.Background(), rw, ar, resp) + c.expect() + header = http.Header{} + t.Logf("Passed test case %d", k) + } +} diff --git a/fosite/client.go b/fosite/client.go new file mode 100644 index 00000000000..b4014967141 --- /dev/null +++ b/fosite/client.go @@ -0,0 +1,190 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "github.com/go-jose/go-jose/v3" +) + +// Client represents a client or an app. +type Client interface { + // GetID returns the client ID. + GetID() string + + // GetHashedSecret returns the hashed secret as it is stored in the store. + GetHashedSecret() []byte + + // GetRedirectURIs returns the client's allowed redirect URIs. + GetRedirectURIs() []string + + // GetGrantTypes returns the client's allowed grant types. + GetGrantTypes() Arguments + + // GetResponseTypes returns the client's allowed response types. + // All allowed combinations of response types have to be listed, each combination having + // response types of the combination separated by a space. + GetResponseTypes() Arguments + + // GetScopes returns the scopes this client is allowed to request. + GetScopes() Arguments + + // IsPublic returns true, if this client is marked as public. + IsPublic() bool + + // GetAudience returns the allowed audience(s) for this client. + GetAudience() Arguments +} + +// ClientWithSecretRotation extends Client interface by a method providing a slice of rotated secrets. +type ClientWithSecretRotation interface { + Client + // GetRotatedHashes returns a slice of hashed secrets used for secrets rotation. + GetRotatedHashes() [][]byte +} + +// OpenIDConnectClient represents a client capable of performing OpenID Connect requests. +type OpenIDConnectClient interface { + // GetRequestURIs is an array of request_uri values that are pre-registered by the RP for use at the OP. Servers MAY + // cache the contents of the files referenced by these URIs and not retrieve them at the time they are used in a request. + // OPs can require that request_uri values used be pre-registered with the require_request_uri_registration + // discovery parameter. + GetRequestURIs() []string + + // GetJSONWebKeys returns the JSON Web Key Set containing the public key used by the client to authenticate. + GetJSONWebKeys() *jose.JSONWebKeySet + + // GetJSONWebKeys returns the URL for lookup of JSON Web Key Set containing the + // public key used by the client to authenticate. + GetJSONWebKeysURI() string + + // JWS [JWS] alg algorithm [JWA] that MUST be used for signing Request Objects sent to the OP. + // All Request Objects from this Client MUST be rejected, if not signed with this algorithm. + GetRequestObjectSigningAlgorithm() string + + // Requested Client Authentication method for the Token Endpoint. The options are client_secret_post, + // client_secret_basic, private_key_jwt, and none. + GetTokenEndpointAuthMethod() string + + // JWS [JWS] alg algorithm [JWA] that MUST be used for signing the JWT [JWT] used to authenticate the + // Client at the Token Endpoint for the private_key_jwt authentication method. + GetTokenEndpointAuthSigningAlgorithm() string +} + +// ResponseModeClient represents a client capable of handling response_mode +type ResponseModeClient interface { + // GetResponseMode returns the response modes that client is allowed to send + GetResponseModes() []ResponseModeType +} + +// DefaultClient is a simple default implementation of the Client interface. +type DefaultClient struct { + ID string `json:"id"` + Secret []byte `json:"client_secret,omitempty"` + RotatedSecrets [][]byte `json:"rotated_secrets,omitempty"` + RedirectURIs []string `json:"redirect_uris"` + GrantTypes []string `json:"grant_types"` + ResponseTypes []string `json:"response_types"` + Scopes []string `json:"scopes"` + Audience []string `json:"audience"` + Public bool `json:"public"` +} + +type DefaultOpenIDConnectClient struct { + *DefaultClient + JSONWebKeysURI string `json:"jwks_uri"` + JSONWebKeys *jose.JSONWebKeySet `json:"jwks"` + TokenEndpointAuthMethod string `json:"token_endpoint_auth_method"` + RequestURIs []string `json:"request_uris"` + RequestObjectSigningAlgorithm string `json:"request_object_signing_alg"` + TokenEndpointAuthSigningAlgorithm string `json:"token_endpoint_auth_signing_alg"` +} + +type DefaultResponseModeClient struct { + *DefaultClient + ResponseModes []ResponseModeType `json:"response_modes"` +} + +func (c *DefaultClient) GetID() string { + return c.ID +} + +func (c *DefaultClient) IsPublic() bool { + return c.Public +} + +func (c *DefaultClient) GetAudience() Arguments { + return c.Audience +} + +func (c *DefaultClient) GetRedirectURIs() []string { + return c.RedirectURIs +} + +func (c *DefaultClient) GetHashedSecret() []byte { + return c.Secret +} + +func (c *DefaultClient) GetRotatedHashes() [][]byte { + return c.RotatedSecrets +} + +func (c *DefaultClient) GetScopes() Arguments { + return c.Scopes +} + +func (c *DefaultClient) GetGrantTypes() Arguments { + // https://openid.net/specs/openid-connect-registration-1_0.html#ClientMetadata + // + // JSON array containing a list of the OAuth 2.0 Grant Types that the Client is declaring + // that it will restrict itself to using. + // If omitted, the default is that the Client will use only the authorization_code Grant Type. + if len(c.GrantTypes) == 0 { + return Arguments{"authorization_code"} + } + return Arguments(c.GrantTypes) +} + +func (c *DefaultClient) GetResponseTypes() Arguments { + // https://openid.net/specs/openid-connect-registration-1_0.html#ClientMetadata + // + // JSON array containing a list of the OAuth 2.0 response_type values that the Client is declaring + // that it will restrict itself to using. If omitted, the default is that the Client will use + // only the code Response Type. + if len(c.ResponseTypes) == 0 { + return Arguments{"code"} + } + return Arguments(c.ResponseTypes) +} + +func (c *DefaultOpenIDConnectClient) GetJSONWebKeysURI() string { + return c.JSONWebKeysURI +} + +func (c *DefaultOpenIDConnectClient) GetJSONWebKeys() *jose.JSONWebKeySet { + return c.JSONWebKeys +} + +func (c *DefaultOpenIDConnectClient) GetTokenEndpointAuthSigningAlgorithm() string { + if c.TokenEndpointAuthSigningAlgorithm == "" { + return "RS256" + } else { + return c.TokenEndpointAuthSigningAlgorithm + } +} + +func (c *DefaultOpenIDConnectClient) GetRequestObjectSigningAlgorithm() string { + return c.RequestObjectSigningAlgorithm +} + +func (c *DefaultOpenIDConnectClient) GetTokenEndpointAuthMethod() string { + return c.TokenEndpointAuthMethod +} + +func (c *DefaultOpenIDConnectClient) GetRequestURIs() []string { + return c.RequestURIs +} + +func (c *DefaultResponseModeClient) GetResponseModes() []ResponseModeType { + return c.ResponseModes +} diff --git a/fosite/client_authentication.go b/fosite/client_authentication.go new file mode 100644 index 00000000000..a98cd659db2 --- /dev/null +++ b/fosite/client_authentication.go @@ -0,0 +1,326 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "crypto/ecdsa" + "crypto/rsa" + "encoding/json" + "fmt" + "net/http" + "net/url" + "strings" + "time" + + "github.com/ory/x/errorsx" + + "github.com/go-jose/go-jose/v3" + "github.com/pkg/errors" + + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +// ClientAuthenticationStrategy provides a method signature for authenticating a client request +type ClientAuthenticationStrategy func(context.Context, *http.Request, url.Values) (Client, error) + +// #nosec:gosec G101 - False Positive +const clientAssertionJWTBearerType = "urn:ietf:params:oauth:client-assertion-type:jwt-bearer" + +func (f *Fosite) findClientPublicJWK(ctx context.Context, oidcClient OpenIDConnectClient, t *jwt.Token, expectsRSAKey bool) (interface{}, error) { + if set := oidcClient.GetJSONWebKeys(); set != nil { + return findPublicKey(t, set, expectsRSAKey) + } + + if location := oidcClient.GetJSONWebKeysURI(); len(location) > 0 { + keys, err := f.Config.GetJWKSFetcherStrategy(ctx).Resolve(ctx, location, false) + if err != nil { + return nil, err + } + + if key, err := findPublicKey(t, keys, expectsRSAKey); err == nil { + return key, nil + } + + keys, err = f.Config.GetJWKSFetcherStrategy(ctx).Resolve(ctx, location, true) + if err != nil { + return nil, err + } + + return findPublicKey(t, keys, expectsRSAKey) + } + + return nil, errorsx.WithStack(ErrInvalidClient.WithHint("The OAuth 2.0 Client has no JSON Web Keys set registered, but they are needed to complete the request.")) +} + +// AuthenticateClient authenticates client requests using the configured strategy +// `Fosite.ClientAuthenticationStrategy`, if nil it uses `Fosite.DefaultClientAuthenticationStrategy` +func (f *Fosite) AuthenticateClient(ctx context.Context, r *http.Request, form url.Values) (Client, error) { + if s := f.Config.GetClientAuthenticationStrategy(ctx); s != nil { + return s(ctx, r, form) + } + return f.DefaultClientAuthenticationStrategy(ctx, r, form) +} + +// DefaultClientAuthenticationStrategy provides the fosite's default client authentication strategy, +// HTTP Basic Authentication and JWT Bearer +func (f *Fosite) DefaultClientAuthenticationStrategy(ctx context.Context, r *http.Request, form url.Values) (Client, error) { + if assertionType := form.Get("client_assertion_type"); assertionType == clientAssertionJWTBearerType { + assertion := form.Get("client_assertion") + if len(assertion) == 0 { + return nil, errorsx.WithStack(ErrInvalidRequest.WithHintf("The client_assertion request parameter must be set when using client_assertion_type of '%s'.", clientAssertionJWTBearerType)) + } + + var clientID string + var client Client + + token, err := jwt.ParseWithClaims(assertion, jwt.MapClaims{}, func(t *jwt.Token) (interface{}, error) { + var err error + clientID, _, err = clientCredentialsFromRequestBody(form, false) + if err != nil { + return nil, err + } + + if clientID == "" { + claims := t.Claims + if sub, ok := claims["sub"].(string); !ok { + return nil, errorsx.WithStack(ErrInvalidClient.WithHint("The claim 'sub' from the client_assertion JSON Web Token is undefined.")) + } else { + clientID = sub + } + } + + client, err = f.Store.FositeClientManager().GetClient(ctx, clientID) + if err != nil { + return nil, errorsx.WithStack(ErrInvalidClient.WithWrap(err).WithDebug(err.Error())) + } + + oidcClient, ok := client.(OpenIDConnectClient) + if !ok { + return nil, errorsx.WithStack(ErrInvalidRequest.WithHint("The server configuration does not support OpenID Connect specific authentication methods.")) + } + + switch oidcClient.GetTokenEndpointAuthMethod() { + case "private_key_jwt": + break + case "none": + return nil, errorsx.WithStack(ErrInvalidClient.WithHint("This requested OAuth 2.0 client does not support client authentication, however 'client_assertion' was provided in the request.")) + case "client_secret_post": + fallthrough + case "client_secret_basic": + return nil, errorsx.WithStack(ErrInvalidClient.WithHintf("This requested OAuth 2.0 client only supports client authentication method '%s', however 'client_assertion' was provided in the request.", oidcClient.GetTokenEndpointAuthMethod())) + case "client_secret_jwt": + fallthrough + default: + return nil, errorsx.WithStack(ErrInvalidClient.WithHintf("This requested OAuth 2.0 client only supports client authentication method '%s', however that method is not supported by this server.", oidcClient.GetTokenEndpointAuthMethod())) + } + + if oidcClient.GetTokenEndpointAuthSigningAlgorithm() != fmt.Sprintf("%s", t.Header["alg"]) { + return nil, errorsx.WithStack(ErrInvalidClient.WithHintf("The 'client_assertion' uses signing algorithm '%s' but the requested OAuth 2.0 Client enforces signing algorithm '%s'.", t.Header["alg"], oidcClient.GetTokenEndpointAuthSigningAlgorithm())) + } + switch t.Method { + case jose.RS256, jose.RS384, jose.RS512: + return f.findClientPublicJWK(ctx, oidcClient, t, true) + case jose.ES256, jose.ES384, jose.ES512: + return f.findClientPublicJWK(ctx, oidcClient, t, false) + case jose.PS256, jose.PS384, jose.PS512: + return f.findClientPublicJWK(ctx, oidcClient, t, true) + case jose.HS256, jose.HS384, jose.HS512: + return nil, errorsx.WithStack(ErrInvalidClient.WithHint("This authorization server does not support client authentication method 'client_secret_jwt'.")) + default: + return nil, errorsx.WithStack(ErrInvalidClient.WithHintf("The 'client_assertion' request parameter uses unsupported signing algorithm '%s'.", t.Header["alg"])) + } + }) + if err != nil { + // Do not re-process already enhanced errors + var e *jwt.ValidationError + if errors.As(err, &e) { + if e.Inner != nil { + return nil, e.Inner + } + return nil, errorsx.WithStack(ErrInvalidClient.WithHint("Unable to verify the integrity of the 'client_assertion' value.").WithWrap(err).WithDebug(err.Error())) + } + return nil, err + } else if err := token.Claims.Valid(); err != nil { + return nil, errorsx.WithStack(ErrInvalidClient.WithHint("Unable to verify the request object because its claims could not be validated, check if the expiry time is set correctly.").WithWrap(err).WithDebug(err.Error())) + } + + claims := token.Claims + var jti string + if !claims.VerifyIssuer(clientID, true) { + return nil, errorsx.WithStack(ErrInvalidClient.WithHint("Claim 'iss' from 'client_assertion' must match the 'client_id' of the OAuth 2.0 Client.")) + } else if len(f.Config.GetTokenURLs(ctx)) == 0 { + return nil, errorsx.WithStack(ErrMisconfiguration.WithHint("The authorization server's token endpoint URL has not been set.")) + } else if sub, ok := claims["sub"].(string); !ok || sub != clientID { + return nil, errorsx.WithStack(ErrInvalidClient.WithHint("Claim 'sub' from 'client_assertion' must match the 'client_id' of the OAuth 2.0 Client.")) + } else if jti, ok = claims["jti"].(string); !ok || len(jti) == 0 { + return nil, errorsx.WithStack(ErrInvalidClient.WithHint("Claim 'jti' from 'client_assertion' must be set but is not.")) + } else if f.Store.FositeClientManager().ClientAssertionJWTValid(ctx, jti) != nil { + return nil, errorsx.WithStack(ErrJTIKnown.WithHint("Claim 'jti' from 'client_assertion' MUST only be used once.")) + } + + // type conversion according to jwt.MapClaims.VerifyExpiresAt + var expiry int64 + err = nil + switch exp := claims["exp"].(type) { + case float64: + expiry = int64(exp) + case int64: + expiry = exp + case json.Number: + expiry, err = exp.Int64() + default: + err = ErrInvalidClient.WithHint("Unable to type assert the expiry time from claims. This should not happen as we validate the expiry time already earlier with token.Claims.Valid()") + } + + if err != nil { + return nil, errorsx.WithStack(err) + } + if err := f.Store.FositeClientManager().SetClientAssertionJWT(ctx, jti, time.Unix(expiry, 0)); err != nil { + return nil, err + } + + if !audienceMatchesTokenURLs(claims, f.Config.GetTokenURLs(ctx)) { + return nil, errorsx.WithStack(ErrInvalidClient.WithHintf( + "Claim 'audience' from 'client_assertion' must match the authorization server's token endpoint '%s'.", + strings.Join(f.Config.GetTokenURLs(ctx), "' or '"))) + } + + return client, nil + } else if len(assertionType) > 0 { + return nil, errorsx.WithStack(ErrInvalidRequest.WithHintf("Unknown client_assertion_type '%s'.", assertionType)) + } + + clientID, clientSecret, err := clientCredentialsFromRequest(r, form) + if err != nil { + return nil, err + } + + client, err := f.Store.FositeClientManager().GetClient(ctx, clientID) + if err != nil { + return nil, errorsx.WithStack(ErrInvalidClient.WithWrap(err).WithDebug(err.Error())) + } + + if oidcClient, ok := client.(OpenIDConnectClient); !ok { + // If this isn't an OpenID Connect client then we actually don't care about any of this, just continue! + } else if ok && form.Get("client_id") != "" && form.Get("client_secret") != "" && oidcClient.GetTokenEndpointAuthMethod() != "client_secret_post" { + return nil, errorsx.WithStack(ErrInvalidClient.WithHintf("The OAuth 2.0 Client supports client authentication method '%s', but method 'client_secret_post' was requested. You must configure the OAuth 2.0 client's 'token_endpoint_auth_method' value to accept 'client_secret_post'.", oidcClient.GetTokenEndpointAuthMethod())) + } else if _, secret, basicOk := r.BasicAuth(); basicOk && ok && secret != "" && oidcClient.GetTokenEndpointAuthMethod() != "client_secret_basic" { + return nil, errorsx.WithStack(ErrInvalidClient.WithHintf("The OAuth 2.0 Client supports client authentication method '%s', but method 'client_secret_basic' was requested. You must configure the OAuth 2.0 client's 'token_endpoint_auth_method' value to accept 'client_secret_basic'.", oidcClient.GetTokenEndpointAuthMethod())) + } else if ok && oidcClient.GetTokenEndpointAuthMethod() != "none" && client.IsPublic() { + return nil, errorsx.WithStack(ErrInvalidClient.WithHintf("The OAuth 2.0 Client supports client authentication method '%s', but method 'none' was requested. You must configure the OAuth 2.0 client's 'token_endpoint_auth_method' value to accept 'none'.", oidcClient.GetTokenEndpointAuthMethod())) + } + + if client.IsPublic() { + return client, nil + } + + // Enforce client authentication + if err := f.checkClientSecret(ctx, client, []byte(clientSecret)); err != nil { + return nil, errorsx.WithStack(ErrInvalidClient.WithWrap(err).WithDebug(err.Error())) + } + + return client, nil +} + +func audienceMatchesTokenURLs(claims jwt.MapClaims, tokenURLs []string) bool { + for _, tokenURL := range tokenURLs { + if audienceMatchesTokenURL(claims, tokenURL) { + return true + } + } + return false +} + +func audienceMatchesTokenURL(claims jwt.MapClaims, tokenURL string) bool { + if audiences, ok := claims["aud"].([]interface{}); ok { + for _, aud := range audiences { + if a, ok := aud.(string); ok && a == tokenURL { + return true + } + } + return false + } + return claims.VerifyAudience(tokenURL, true) +} + +func (f *Fosite) checkClientSecret(ctx context.Context, client Client, clientSecret []byte) error { + var err error + err = f.Config.GetSecretsHasher(ctx).Compare(ctx, client.GetHashedSecret(), clientSecret) + if err == nil { + return nil + } + cc, ok := client.(ClientWithSecretRotation) + if !ok { + return err + } + for _, hash := range cc.GetRotatedHashes() { + err = f.Config.GetSecretsHasher(ctx).Compare(ctx, hash, clientSecret) + if err == nil { + return nil + } + } + + return err +} + +func findPublicKey(t *jwt.Token, set *jose.JSONWebKeySet, expectsRSAKey bool) (interface{}, error) { + keys := set.Keys + if len(keys) == 0 { + return nil, errorsx.WithStack(ErrInvalidRequest.WithHintf("The retrieved JSON Web Key Set does not contain any key.")) + } + + kid, ok := t.Header["kid"].(string) + if ok { + keys = set.Key(kid) + } + + if len(keys) == 0 { + return nil, errorsx.WithStack(ErrInvalidRequest.WithHintf("The JSON Web Token uses signing key with kid '%s', which could not be found.", kid)) + } + + for _, key := range keys { + if key.Use != "sig" { + continue + } + if expectsRSAKey { + if k, ok := key.Key.(*rsa.PublicKey); ok { + return k, nil + } + } else { + if k, ok := key.Key.(*ecdsa.PublicKey); ok { + return k, nil + } + } + } + + if expectsRSAKey { + return nil, errorsx.WithStack(ErrInvalidRequest.WithHintf("Unable to find RSA public key with use='sig' for kid '%s' in JSON Web Key Set.", kid)) + } else { + return nil, errorsx.WithStack(ErrInvalidRequest.WithHintf("Unable to find ECDSA public key with use='sig' for kid '%s' in JSON Web Key Set.", kid)) + } +} + +func clientCredentialsFromRequest(r *http.Request, form url.Values) (clientID, clientSecret string, err error) { + if id, secret, ok := r.BasicAuth(); !ok { + return clientCredentialsFromRequestBody(form, true) + } else if clientID, err = url.QueryUnescape(id); err != nil { + return "", "", errorsx.WithStack(ErrInvalidRequest.WithHint("The client id in the HTTP authorization header could not be decoded from 'application/x-www-form-urlencoded'.").WithWrap(err).WithDebug(err.Error())) + } else if clientSecret, err = url.QueryUnescape(secret); err != nil { + return "", "", errorsx.WithStack(ErrInvalidRequest.WithHint("The client secret in the HTTP authorization header could not be decoded from 'application/x-www-form-urlencoded'.").WithWrap(err).WithDebug(err.Error())) + } + + return clientID, clientSecret, nil +} + +func clientCredentialsFromRequestBody(form url.Values, forceID bool) (clientID, clientSecret string, err error) { + clientID = form.Get("client_id") + clientSecret = form.Get("client_secret") + + if clientID == "" && forceID { + return "", "", errorsx.WithStack(ErrInvalidRequest.WithHint("Client credentials missing or malformed in both HTTP Authorization header and HTTP POST body.")) + } + + return clientID, clientSecret, nil +} diff --git a/fosite/client_authentication_jwks_strategy.go b/fosite/client_authentication_jwks_strategy.go new file mode 100644 index 00000000000..5d1bd77c00a --- /dev/null +++ b/fosite/client_authentication_jwks_strategy.go @@ -0,0 +1,137 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "encoding/json" + "time" + + "github.com/dgraph-io/ristretto" + "github.com/hashicorp/go-retryablehttp" + + "github.com/ory/x/errorsx" + + "github.com/go-jose/go-jose/v3" +) + +const defaultJWKSFetcherStrategyCachePrefix = "github.com/ory/hydra/v2/fosite.DefaultJWKSFetcherStrategy:" + +// JWKSFetcherStrategy is a strategy which pulls (optionally caches) JSON Web Key Sets from a location, +// typically a client's jwks_uri. +type JWKSFetcherStrategy interface { + // Resolve returns the JSON Web Key Set, or an error if something went wrong. The forceRefresh, if true, forces + // the strategy to fetch the key from the remote. If forceRefresh is false, the strategy may use a caching strategy + // to fetch the key. + Resolve(ctx context.Context, location string, ignoreCache bool) (*jose.JSONWebKeySet, error) +} + +// DefaultJWKSFetcherStrategy is a default implementation of the JWKSFetcherStrategy interface. +type DefaultJWKSFetcherStrategy struct { + client *retryablehttp.Client + cache *ristretto.Cache[string, *jose.JSONWebKeySet] + ttl time.Duration + clientSourceFunc func(ctx context.Context) *retryablehttp.Client +} + +// NewDefaultJWKSFetcherStrategy returns a new instance of the DefaultJWKSFetcherStrategy. +func NewDefaultJWKSFetcherStrategy(opts ...func(*DefaultJWKSFetcherStrategy)) JWKSFetcherStrategy { + dc, err := ristretto.NewCache(&ristretto.Config[string, *jose.JSONWebKeySet]{ + NumCounters: 10000 * 10, + MaxCost: 10000, + BufferItems: 64, + Metrics: false, + Cost: func(value *jose.JSONWebKeySet) int64 { + return 1 + }, + }) + if err != nil { + panic(err) + } + + s := &DefaultJWKSFetcherStrategy{ + cache: dc, + client: retryablehttp.NewClient(), + ttl: time.Hour, + } + + for _, o := range opts { + o(s) + } + + return s +} + +// JKWKSFetcherWithDefaultTTL sets the default TTL for the cache. +func JKWKSFetcherWithDefaultTTL(ttl time.Duration) func(*DefaultJWKSFetcherStrategy) { + return func(s *DefaultJWKSFetcherStrategy) { + s.ttl = ttl + } +} + +// JWKSFetcherWithCache sets the cache to use. +func JWKSFetcherWithCache(cache *ristretto.Cache[string, *jose.JSONWebKeySet]) func(*DefaultJWKSFetcherStrategy) { + return func(s *DefaultJWKSFetcherStrategy) { + s.cache = cache + } +} + +// JWKSFetcherWithHTTPClient sets the HTTP client to use. +func JWKSFetcherWithHTTPClient(client *retryablehttp.Client) func(*DefaultJWKSFetcherStrategy) { + return func(s *DefaultJWKSFetcherStrategy) { + s.client = client + } +} + +// JWKSFetcherWithHTTPClientSource sets the HTTP client source function to use. +func JWKSFetcherWithHTTPClientSource(clientSourceFunc func(ctx context.Context) *retryablehttp.Client) func(*DefaultJWKSFetcherStrategy) { + return func(s *DefaultJWKSFetcherStrategy) { + s.clientSourceFunc = clientSourceFunc + } +} + +// Resolve returns the JSON Web Key Set, or an error if something went wrong. The forceRefresh, if true, forces +// the strategy to fetch the key from the remote. If forceRefresh is false, the strategy may use a caching strategy +// to fetch the key. +func (s *DefaultJWKSFetcherStrategy) Resolve(ctx context.Context, location string, ignoreCache bool) (*jose.JSONWebKeySet, error) { + cacheKey := defaultJWKSFetcherStrategyCachePrefix + location + key, ok := s.cache.Get(cacheKey) + if !ok || ignoreCache { + req, err := retryablehttp.NewRequest("GET", location, nil) + if err != nil { + return nil, errorsx.WithStack(ErrServerError.WithHintf("Unable to create HTTP 'GET' request to fetch JSON Web Keys from location '%s'.", location).WithWrap(err).WithDebug(err.Error())) + } + + hc := s.client + if s.clientSourceFunc != nil { + hc = s.clientSourceFunc(ctx) + } + + response, err := hc.Do(req.WithContext(ctx)) + if err != nil { + return nil, errorsx.WithStack(ErrServerError.WithHintf("Unable to fetch JSON Web Keys from location '%s'. Check for typos or other network issues.", location).WithWrap(err).WithDebug(err.Error())) + } + defer func() { + _ = response.Body.Close() + }() + + if response.StatusCode < 200 || response.StatusCode >= 400 { + return nil, errorsx.WithStack(ErrServerError.WithHintf("Expected successful status code in range of 200 - 399 from location '%s' but received code %d.", location, response.StatusCode)) + } + + var set jose.JSONWebKeySet + if err := json.NewDecoder(response.Body).Decode(&set); err != nil { + return nil, errorsx.WithStack(ErrServerError.WithHintf("Unable to decode JSON Web Keys from location '%s'. Please check for typos and if the URL returns valid JSON.", location).WithWrap(err).WithDebug(err.Error())) + } + + _ = s.cache.SetWithTTL(cacheKey, &set, 1, s.ttl) + return &set, nil + } + + return key, nil +} + +func (s *DefaultJWKSFetcherStrategy) WaitForCache() { + s.cache.Wait() +} diff --git a/fosite/client_authentication_jwks_strategy_test.go b/fosite/client_authentication_jwks_strategy_test.go new file mode 100644 index 00000000000..32e294e9bb3 --- /dev/null +++ b/fosite/client_authentication_jwks_strategy_test.go @@ -0,0 +1,178 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/dgraph-io/ristretto" + "github.com/hashicorp/go-retryablehttp" + "github.com/pkg/errors" + + "github.com/ory/hydra/v2/fosite/internal/gen" + + "github.com/go-jose/go-jose/v3" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func initServerWithKey(t *testing.T) *httptest.Server { + var set *jose.JSONWebKeySet + var h http.HandlerFunc = func(w http.ResponseWriter, r *http.Request) { + require.NoError(t, json.NewEncoder(w).Encode(set)) + } + ts := httptest.NewServer(h) + + set = &jose.JSONWebKeySet{ + Keys: []jose.JSONWebKey{ + { + KeyID: "bar", + Use: "sig", + Key: &gen.MustRSAKey().PublicKey, + }, + }, + } + + t.Cleanup(ts.Close) + return ts +} + +var errRoundTrip = errors.New("roundtrip error") + +type failingTripper struct{} + +func (r *failingTripper) RoundTrip(*http.Request) (*http.Response, error) { + return nil, errRoundTrip +} + +func TestDefaultJWKSFetcherStrategy(t *testing.T) { + ctx := context.Background() + var h http.HandlerFunc + + s := NewDefaultJWKSFetcherStrategy() + t.Run("case=fetching", func(t *testing.T) { + var set *jose.JSONWebKeySet + h = func(w http.ResponseWriter, r *http.Request) { + require.NoError(t, json.NewEncoder(w).Encode(set)) + } + ts := httptest.NewServer(h) + defer ts.Close() + + set = &jose.JSONWebKeySet{ + Keys: []jose.JSONWebKey{ + { + KeyID: "foo", + Use: "sig", + Key: &gen.MustRSAKey().PublicKey, + }, + }, + } + + keys, err := s.Resolve(ctx, ts.URL, false) + require.NoError(t, err) + assert.True(t, len(keys.Key("foo")) == 1) + + set = &jose.JSONWebKeySet{ + Keys: []jose.JSONWebKey{ + { + KeyID: "bar", + Use: "sig", + Key: &gen.MustRSAKey().PublicKey, + }, + }, + } + + keys, err = s.Resolve(ctx, ts.URL, false) + require.NoError(t, err) + assert.Len(t, keys.Keys, 1, "%+v", keys) + assert.True(t, len(keys.Key("foo")) == 1) + assert.True(t, len(keys.Key("bar")) == 0) + + keys, err = s.Resolve(ctx, ts.URL, true) + require.NoError(t, err) + assert.True(t, len(keys.Key("foo")) == 0) + assert.True(t, len(keys.Key("bar")) == 1) + }) + + t.Run("JWKSFetcherWithCache", func(t *testing.T) { + ts := initServerWithKey(t) + + cache, _ := ristretto.NewCache(&ristretto.Config[string, *jose.JSONWebKeySet]{NumCounters: 10 * 1000, MaxCost: 1000, BufferItems: 64}) + location := ts.URL + expected := &jose.JSONWebKeySet{} + require.True(t, cache.Set(defaultJWKSFetcherStrategyCachePrefix+location, expected, 1)) + cache.Wait() + + s := NewDefaultJWKSFetcherStrategy(JWKSFetcherWithCache(cache)) + actual, err := s.Resolve(ctx, location, false) + require.NoError(t, err) + assert.Equal(t, expected, actual) + }) + + t.Run("JWKSFetcherWithTTL", func(t *testing.T) { + ts := initServerWithKey(t) + + s := NewDefaultJWKSFetcherStrategy(JKWKSFetcherWithDefaultTTL(time.Nanosecond)) + _, err := s.Resolve(ctx, ts.URL, false) + require.NoError(t, err) + s.(*DefaultJWKSFetcherStrategy).cache.Wait() + + _, ok := s.(*DefaultJWKSFetcherStrategy).cache.Get(defaultJWKSFetcherStrategyCachePrefix + ts.URL) + assert.Falsef(t, ok, "expected cache to be empty") + }) + + t.Run("JWKSFetcherWithHTTPClient", func(t *testing.T) { + rt := retryablehttp.NewClient() + rt.RetryMax = 0 + rt.HTTPClient = &http.Client{Transport: new(failingTripper)} + s := NewDefaultJWKSFetcherStrategy(JWKSFetcherWithHTTPClient(rt)) + _, err := s.Resolve(ctx, "https://google.com", false) + require.ErrorIs(t, err, errRoundTrip) + }) + + t.Run("JWKSFetcherWithHTTPClientSource", func(t *testing.T) { + rt := retryablehttp.NewClient() + rt.RetryMax = 0 + rt.HTTPClient = &http.Client{Transport: new(failingTripper)} + s := NewDefaultJWKSFetcherStrategy( + JWKSFetcherWithHTTPClient(retryablehttp.NewClient()), + JWKSFetcherWithHTTPClientSource(func(ctx context.Context) *retryablehttp.Client { + return rt + })) + _, err := s.Resolve(ctx, "https://www.google.com", false) + require.ErrorIs(t, err, errRoundTrip) + }) + + t.Run("case=error_network", func(t *testing.T) { + s := NewDefaultJWKSFetcherStrategy() + h = func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(400) + } + ts := httptest.NewServer(h) + defer ts.Close() + + _, err := s.Resolve(context.Background(), ts.URL, true) + require.Error(t, err) + + _, err = s.Resolve(context.Background(), "$%/19", true) + require.Error(t, err) + }) + + t.Run("case=error_encoding", func(t *testing.T) { + s := NewDefaultJWKSFetcherStrategy() + h = func(w http.ResponseWriter, r *http.Request) { + _, _ = w.Write([]byte("[]")) + } + ts := httptest.NewServer(h) + defer ts.Close() + + _, err := s.Resolve(context.Background(), ts.URL, true) + require.Error(t, err) + }) +} diff --git a/fosite/client_authentication_test.go b/fosite/client_authentication_test.go new file mode 100644 index 00000000000..6a3b8edf9e4 --- /dev/null +++ b/fosite/client_authentication_test.go @@ -0,0 +1,597 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "context" + "crypto/ecdsa" + "crypto/rsa" + "encoding/base64" + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "net/url" + "testing" + "time" + + "github.com/hashicorp/go-retryablehttp" + + "github.com/ory/hydra/v2/fosite/internal/gen" + + "github.com/go-jose/go-jose/v3" + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/fosite/token/jwt" + + . "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/storage" +) + +func mustGenerateRSAAssertion(t *testing.T, claims jwt.MapClaims, key *rsa.PrivateKey, kid string) string { + token := jwt.NewWithClaims(jose.RS256, claims) + token.Header["kid"] = kid + tokenString, err := token.SignedString(key) + require.NoError(t, err) + return tokenString +} + +func mustGenerateECDSAAssertion(t *testing.T, claims jwt.MapClaims, key *ecdsa.PrivateKey, kid string) string { + token := jwt.NewWithClaims(jose.ES256, claims) + token.Header["kid"] = kid + tokenString, err := token.SignedString(key) + require.NoError(t, err) + return tokenString +} + +func mustGenerateHSAssertion(t *testing.T, claims jwt.MapClaims, key *rsa.PrivateKey, kid string) string { + token := jwt.NewWithClaims(jose.HS256, claims) + tokenString, err := token.SignedString([]byte("aaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbcccccccccccccccccccccddddddddddddddddddddddd")) + require.NoError(t, err) + return tokenString +} + +func mustGenerateNoneAssertion(t *testing.T, claims jwt.MapClaims, key *rsa.PrivateKey, kid string) string { + token := jwt.NewWithClaims(jwt.SigningMethodNone, claims) + tokenString, err := token.SignedString(jwt.UnsafeAllowNoneSignatureType) + require.NoError(t, err) + return tokenString +} + +// returns an http basic authorization header, encoded using application/x-www-form-urlencoded +func clientBasicAuthHeader(clientID, clientSecret string) http.Header { + creds := url.QueryEscape(clientID) + ":" + url.QueryEscape(clientSecret) + return http.Header{ + "Authorization": { + "Basic " + base64.StdEncoding.EncodeToString([]byte(creds)), + }, + } +} + +func TestAuthenticateClient(t *testing.T) { + const at = "urn:ietf:params:oauth:client-assertion-type:jwt-bearer" + + hasher := &BCrypt{Config: &Config{HashCost: 6}} + f := &Fosite{ + Store: storage.NewMemoryStore(), + Config: &Config{ + JWKSFetcherStrategy: NewDefaultJWKSFetcherStrategy(), + ClientSecretsHasher: hasher, + TokenURL: "token-url", + HTTPClient: retryablehttp.NewClient(), + }, + } + + barSecret, err := hasher.Hash(context.TODO(), []byte("bar")) + require.NoError(t, err) + + // a secret containing various special characters + complexSecretRaw := "foo %66%6F%6F@$<§!✓" + complexSecret, err := hasher.Hash(context.TODO(), []byte(complexSecretRaw)) + require.NoError(t, err) + + rsaKey := gen.MustRSAKey() + rsaJwks := &jose.JSONWebKeySet{ + Keys: []jose.JSONWebKey{ + { + KeyID: "kid-foo", + Use: "sig", + Key: &rsaKey.PublicKey, + }, + }, + } + + ecdsaKey := gen.MustES256Key() + ecdsaJwks := &jose.JSONWebKeySet{ + Keys: []jose.JSONWebKey{ + { + KeyID: "kid-foo", + Use: "sig", + Key: &ecdsaKey.PublicKey, + }, + }, + } + + var h http.HandlerFunc = func(w http.ResponseWriter, r *http.Request) { + require.NoError(t, json.NewEncoder(w).Encode(rsaJwks)) + } + ts := httptest.NewServer(h) + defer ts.Close() + + for k, tc := range []struct { + d string + client *DefaultOpenIDConnectClient + assertionType string + assertion string + r *http.Request + form url.Values + expectErr error + }{ + { + d: "should fail because authentication can not be determined", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo"}, TokenEndpointAuthMethod: "client_secret_basic"}, + form: url.Values{}, + r: new(http.Request), + expectErr: ErrInvalidRequest, + }, + { + d: "should fail because client does not exist", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo", Public: true}, TokenEndpointAuthMethod: "none"}, + form: url.Values{"client_id": []string{"bar"}}, + r: new(http.Request), + expectErr: ErrInvalidClient, + }, + { + d: "should pass because client is public and authentication requirements are met", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo", Public: true}, TokenEndpointAuthMethod: "none"}, + form: url.Values{"client_id": []string{"foo"}}, + r: new(http.Request), + }, + { + d: "should pass because client is public and client secret is empty in query param", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo", Public: true}, TokenEndpointAuthMethod: "none"}, + form: url.Values{"client_id": []string{"foo"}, "client_secret": []string{""}}, + r: new(http.Request), + }, + { + d: "should pass because client is public and client secret is empty in basic auth header", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo", Public: true}, TokenEndpointAuthMethod: "none"}, + form: url.Values{}, + r: &http.Request{Header: clientBasicAuthHeader("foo", "")}, + }, + { + d: "should fail because client requires basic auth and client secret is empty in basic auth header", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo", Public: true}, TokenEndpointAuthMethod: "client_secret_basic"}, + form: url.Values{}, + r: &http.Request{Header: clientBasicAuthHeader("foo", "")}, + expectErr: ErrInvalidClient, + }, + { + d: "should pass with client credentials containing special characters", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "!foo%20bar", Secret: complexSecret}, TokenEndpointAuthMethod: "client_secret_post"}, + form: url.Values{"client_id": []string{"!foo%20bar"}, "client_secret": []string{complexSecretRaw}}, + r: new(http.Request), + }, + { + d: "should pass with client credentials containing special characters via basic auth", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo — bar! +<&>*", Secret: complexSecret}, TokenEndpointAuthMethod: "client_secret_basic"}, + form: url.Values{}, + r: &http.Request{Header: clientBasicAuthHeader("foo — bar! +<&>*", complexSecretRaw)}, + }, + { + d: "should fail because auth method is not none", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo", Public: true}, TokenEndpointAuthMethod: "client_secret_basic"}, + form: url.Values{"client_id": []string{"foo"}}, + r: new(http.Request), + expectErr: ErrInvalidClient, + }, + { + d: "should pass because client is confidential and id and secret match in post body", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo", Secret: []byte("invalid_hash"), RotatedSecrets: [][]byte{barSecret}}, TokenEndpointAuthMethod: "client_secret_post"}, + form: url.Values{"client_id": []string{"foo"}, "client_secret": []string{"bar"}}, + r: new(http.Request), + }, + { + d: "should pass because client is confidential and id and rotated secret match in post body", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo", Secret: barSecret}, TokenEndpointAuthMethod: "client_secret_post"}, + form: url.Values{"client_id": []string{"foo"}, "client_secret": []string{"bar"}}, + r: new(http.Request), + }, + { + d: "should fail because client is confidential and secret does not match in post body", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo", Secret: barSecret}, TokenEndpointAuthMethod: "client_secret_post"}, + form: url.Values{"client_id": []string{"foo"}, "client_secret": []string{"baz"}}, + r: new(http.Request), + expectErr: ErrInvalidClient, + }, + { + d: "should fail because client is confidential and id does not exist in post body", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, TokenEndpointAuthMethod: "client_secret_post"}, + form: url.Values{"client_id": []string{"foo"}, "client_secret": []string{"bar"}}, + r: new(http.Request), + expectErr: ErrInvalidClient, + }, + { + d: "should pass because client is confidential and id and secret match in header", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo", Secret: barSecret}, TokenEndpointAuthMethod: "client_secret_basic"}, + form: url.Values{}, + r: &http.Request{Header: clientBasicAuthHeader("foo", "bar")}, + }, + { + d: "should pass because client is confidential and id and rotated secret match in header", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo", Secret: []byte("invalid_hash"), RotatedSecrets: [][]byte{barSecret}}, TokenEndpointAuthMethod: "client_secret_basic"}, + form: url.Values{}, + r: &http.Request{Header: clientBasicAuthHeader("foo", "bar")}, + }, + { + d: "should pass because client is confidential and id and rotated secret match in header", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo", Secret: []byte("invalid_hash"), RotatedSecrets: [][]byte{[]byte("invalid"), barSecret}}, TokenEndpointAuthMethod: "client_secret_basic"}, + form: url.Values{}, + r: &http.Request{Header: clientBasicAuthHeader("foo", "bar")}, + }, + { + d: "should fail because auth method is not client_secret_basic", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo", Secret: barSecret}, TokenEndpointAuthMethod: "client_secret_post"}, + form: url.Values{}, + r: &http.Request{Header: clientBasicAuthHeader("foo", "bar")}, + expectErr: ErrInvalidClient, + }, + { + d: "should fail because client is confidential and secret does not match in header", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo", Secret: barSecret}, TokenEndpointAuthMethod: "client_secret_basic"}, + form: url.Values{}, + r: &http.Request{Header: clientBasicAuthHeader("foo", "baz")}, + expectErr: ErrInvalidClient, + }, + { + d: "should fail because client is confidential and neither secret nor rotated does match in header", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo", Secret: barSecret, RotatedSecrets: [][]byte{barSecret}}, TokenEndpointAuthMethod: "client_secret_basic"}, + form: url.Values{}, + r: &http.Request{Header: clientBasicAuthHeader("foo", "baz")}, + expectErr: ErrInvalidClient, + }, + { + d: "should fail because client id is not encoded using application/x-www-form-urlencoded", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo", Secret: barSecret}, TokenEndpointAuthMethod: "client_secret_basic"}, + form: url.Values{}, + r: &http.Request{Header: http.Header{"Authorization": {"Basic " + base64.StdEncoding.EncodeToString([]byte("%%%%%%:foo"))}}}, + expectErr: ErrInvalidRequest, + }, + { + d: "should fail because client secret is not encoded using application/x-www-form-urlencoded", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo", Secret: barSecret}, TokenEndpointAuthMethod: "client_secret_basic"}, + form: url.Values{}, + r: &http.Request{Header: http.Header{"Authorization": {"Basic " + base64.StdEncoding.EncodeToString([]byte("foo:%%%%%%%"))}}}, + expectErr: ErrInvalidRequest, + }, + { + d: "should fail because client is confidential and id does not exist in header", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, TokenEndpointAuthMethod: "client_secret_basic"}, + form: url.Values{}, + r: &http.Request{Header: http.Header{"Authorization": {"Basic " + base64.StdEncoding.EncodeToString([]byte("foo:bar"))}}}, + expectErr: ErrInvalidClient, + }, + { + d: "should fail because client_assertion but client_assertion is missing", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo", Secret: barSecret}, TokenEndpointAuthMethod: "private_key_jwt"}, + form: url.Values{"client_id": []string{"foo"}, "client_assertion_type": []string{at}}, + r: new(http.Request), + expectErr: ErrInvalidRequest, + }, + { + d: "should fail because client_assertion_type is unknown", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "foo", Secret: barSecret}, TokenEndpointAuthMethod: "private_key_jwt"}, + form: url.Values{"client_id": []string{"foo"}, "client_assertion_type": []string{"foobar"}}, + r: new(http.Request), + expectErr: ErrInvalidRequest, + }, + { + d: "should pass with proper RSA assertion when JWKs are set within the client and client_id is not set in the request", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, JSONWebKeys: rsaJwks, TokenEndpointAuthMethod: "private_key_jwt"}, + form: url.Values{"client_assertion": {mustGenerateRSAAssertion(t, jwt.MapClaims{ + "sub": "bar", + "exp": time.Now().Add(time.Hour).Unix(), + "iss": "bar", + "jti": "12345", + "aud": "token-url", + }, rsaKey, "kid-foo")}, "client_assertion_type": []string{at}}, + r: new(http.Request), + }, + { + d: "should pass with proper ECDSA assertion when JWKs are set within the client and client_id is not set in the request", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, JSONWebKeys: ecdsaJwks, TokenEndpointAuthMethod: "private_key_jwt", TokenEndpointAuthSigningAlgorithm: "ES256"}, + form: url.Values{"client_assertion": {mustGenerateECDSAAssertion(t, jwt.MapClaims{ + "sub": "bar", + "exp": time.Now().Add(time.Hour).Unix(), + "iss": "bar", + "jti": "12345", + "aud": "token-url", + }, ecdsaKey, "kid-foo")}, "client_assertion_type": []string{at}}, + r: new(http.Request), + }, + { + d: "should fail because RSA assertion is used, but ECDSA assertion is required", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, JSONWebKeys: ecdsaJwks, TokenEndpointAuthMethod: "private_key_jwt", TokenEndpointAuthSigningAlgorithm: "ES256"}, + form: url.Values{"client_assertion": {mustGenerateRSAAssertion(t, jwt.MapClaims{ + "sub": "bar", + "exp": time.Now().Add(time.Hour).Unix(), + "iss": "bar", + "jti": "12345", + "aud": "token-url", + }, rsaKey, "kid-foo")}, "client_assertion_type": []string{at}}, + r: new(http.Request), + expectErr: ErrInvalidClient, + }, + { + d: "should fail because token auth method is not private_key_jwt, but client_secret_jwt", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, JSONWebKeys: rsaJwks, TokenEndpointAuthMethod: "client_secret_jwt"}, + form: url.Values{"client_assertion": {mustGenerateRSAAssertion(t, jwt.MapClaims{ + "sub": "bar", + "exp": time.Now().Add(time.Hour).Unix(), + "iss": "bar", + "jti": "12345", + "aud": "token-url", + }, rsaKey, "kid-foo")}, "client_assertion_type": []string{at}}, + r: new(http.Request), + expectErr: ErrInvalidClient, + }, + { + d: "should fail because token auth method is not private_key_jwt, but none", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, JSONWebKeys: rsaJwks, TokenEndpointAuthMethod: "none"}, + form: url.Values{"client_assertion": {mustGenerateRSAAssertion(t, jwt.MapClaims{ + "sub": "bar", + "exp": time.Now().Add(time.Hour).Unix(), + "iss": "bar", + "jti": "12345", + "aud": "token-url", + }, rsaKey, "kid-foo")}, "client_assertion_type": []string{at}}, + r: new(http.Request), + expectErr: ErrInvalidClient, + }, + { + d: "should fail because token auth method is not private_key_jwt, but client_secret_post", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, JSONWebKeys: rsaJwks, TokenEndpointAuthMethod: "client_secret_post"}, + form: url.Values{"client_assertion": {mustGenerateRSAAssertion(t, jwt.MapClaims{ + "sub": "bar", + "exp": time.Now().Add(time.Hour).Unix(), + "iss": "bar", + "jti": "12345", + "aud": "token-url", + }, rsaKey, "kid-foo")}, "client_assertion_type": []string{at}}, + r: new(http.Request), + expectErr: ErrInvalidClient, + }, + { + d: "should fail because token auth method is not private_key_jwt, but client_secret_basic", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, JSONWebKeys: rsaJwks, TokenEndpointAuthMethod: "client_secret_basic"}, + form: url.Values{"client_assertion": {mustGenerateRSAAssertion(t, jwt.MapClaims{ + "sub": "bar", + "exp": time.Now().Add(time.Hour).Unix(), + "iss": "bar", + "jti": "12345", + "aud": "token-url", + }, rsaKey, "kid-foo")}, "client_assertion_type": []string{at}}, + r: new(http.Request), + expectErr: ErrInvalidClient, + }, + { + d: "should fail because token auth method is not private_key_jwt, but foobar", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, JSONWebKeys: rsaJwks, TokenEndpointAuthMethod: "foobar"}, + form: url.Values{"client_assertion": {mustGenerateRSAAssertion(t, jwt.MapClaims{ + "sub": "bar", + "exp": time.Now().Add(time.Hour).Unix(), + "iss": "bar", + "jti": "12345", + "aud": "token-url", + }, rsaKey, "kid-foo")}, "client_assertion_type": []string{at}}, + r: new(http.Request), + expectErr: ErrInvalidClient, + }, + { + d: "should pass with proper assertion when JWKs are set within the client and client_id is not set in the request (aud is array)", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, JSONWebKeys: rsaJwks, TokenEndpointAuthMethod: "private_key_jwt"}, + form: url.Values{"client_assertion": {mustGenerateRSAAssertion(t, jwt.MapClaims{ + "sub": "bar", + "exp": time.Now().Add(time.Hour).Unix(), + "iss": "bar", + "jti": "12345", + "aud": []string{"token-url-2", "token-url"}, + }, rsaKey, "kid-foo")}, "client_assertion_type": []string{at}}, + r: new(http.Request), + }, + { + d: "should fail because audience (array) does not match token url", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, JSONWebKeys: rsaJwks, TokenEndpointAuthMethod: "private_key_jwt"}, + form: url.Values{"client_assertion": {mustGenerateRSAAssertion(t, jwt.MapClaims{ + "sub": "bar", + "exp": time.Now().Add(time.Hour).Unix(), + "iss": "bar", + "jti": "12345", + "aud": []string{"token-url-1", "token-url-2"}, + }, rsaKey, "kid-foo")}, "client_assertion_type": []string{at}}, + r: new(http.Request), + expectErr: ErrInvalidClient, + }, + { + d: "should pass with proper assertion when JWKs are set within the client", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, JSONWebKeys: rsaJwks, TokenEndpointAuthMethod: "private_key_jwt"}, + form: url.Values{"client_id": []string{"bar"}, "client_assertion": {mustGenerateRSAAssertion(t, jwt.MapClaims{ + "sub": "bar", + "exp": time.Now().Add(time.Hour).Unix(), + "iss": "bar", + "jti": "12345", + "aud": "token-url", + }, rsaKey, "kid-foo")}, "client_assertion_type": []string{at}}, + r: new(http.Request), + }, + { + d: "should fail because JWT algorithm is HS256", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, JSONWebKeys: rsaJwks, TokenEndpointAuthMethod: "private_key_jwt"}, + form: url.Values{"client_id": []string{"bar"}, "client_assertion": {mustGenerateHSAssertion(t, jwt.MapClaims{ + "sub": "bar", + "exp": time.Now().Add(time.Hour).Unix(), + "iss": "bar", + "jti": "12345", + "aud": "token-url", + }, rsaKey, "kid-foo")}, "client_assertion_type": []string{at}}, + r: new(http.Request), + expectErr: ErrInvalidClient, + }, + { + d: "should fail because JWT algorithm is none", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, JSONWebKeys: rsaJwks, TokenEndpointAuthMethod: "private_key_jwt"}, + form: url.Values{"client_id": []string{"bar"}, "client_assertion": {mustGenerateNoneAssertion(t, jwt.MapClaims{ + "sub": "bar", + "exp": time.Now().Add(time.Hour).Unix(), + "iss": "bar", + "jti": "12345", + "aud": "token-url", + }, rsaKey, "kid-foo")}, "client_assertion_type": []string{at}}, + r: new(http.Request), + expectErr: ErrInvalidClient, + }, + { + d: "should pass with proper assertion when JWKs URI is set", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, JSONWebKeysURI: ts.URL, TokenEndpointAuthMethod: "private_key_jwt"}, + form: url.Values{"client_id": []string{"bar"}, "client_assertion": {mustGenerateRSAAssertion(t, jwt.MapClaims{ + "sub": "bar", + "exp": time.Now().Add(time.Hour).Unix(), + "iss": "bar", + "jti": "12345", + "aud": "token-url", + }, rsaKey, "kid-foo")}, "client_assertion_type": []string{at}}, + r: new(http.Request), + }, + { + d: "should fail because client_assertion sub does not match client", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, JSONWebKeys: rsaJwks, TokenEndpointAuthMethod: "private_key_jwt"}, + form: url.Values{"client_id": []string{"bar"}, "client_assertion": {mustGenerateRSAAssertion(t, jwt.MapClaims{ + "sub": "not-bar", + "exp": time.Now().Add(time.Hour).Unix(), + "iss": "bar", + "jti": "12345", + "aud": "token-url", + }, rsaKey, "kid-foo")}, "client_assertion_type": []string{at}}, + r: new(http.Request), + expectErr: ErrInvalidClient, + }, + { + d: "should fail because client_assertion iss does not match client", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, JSONWebKeys: rsaJwks, TokenEndpointAuthMethod: "private_key_jwt"}, + form: url.Values{"client_id": []string{"bar"}, "client_assertion": {mustGenerateRSAAssertion(t, jwt.MapClaims{ + "sub": "bar", + "exp": time.Now().Add(time.Hour).Unix(), + "iss": "not-bar", + "jti": "12345", + "aud": "token-url", + }, rsaKey, "kid-foo")}, "client_assertion_type": []string{at}}, + r: new(http.Request), + expectErr: ErrInvalidClient, + }, + { + d: "should fail because client_assertion jti is not set", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, JSONWebKeys: rsaJwks, TokenEndpointAuthMethod: "private_key_jwt"}, + form: url.Values{"client_id": []string{"bar"}, "client_assertion": {mustGenerateRSAAssertion(t, jwt.MapClaims{ + "sub": "bar", + "exp": time.Now().Add(time.Hour).Unix(), + "iss": "bar", + "aud": "token-url", + }, rsaKey, "kid-foo")}, "client_assertion_type": []string{at}}, + r: new(http.Request), + expectErr: ErrInvalidClient, + }, + { + d: "should fail because client_assertion aud is not set", + client: &DefaultOpenIDConnectClient{DefaultClient: &DefaultClient{ID: "bar", Secret: barSecret}, JSONWebKeys: rsaJwks, TokenEndpointAuthMethod: "private_key_jwt"}, + form: url.Values{"client_id": []string{"bar"}, "client_assertion": {mustGenerateRSAAssertion(t, jwt.MapClaims{ + "sub": "bar", + "exp": time.Now().Add(time.Hour).Unix(), + "iss": "bar", + "jti": "12345", + "aud": "not-token-url", + }, rsaKey, "kid-foo")}, "client_assertion_type": []string{at}}, + r: new(http.Request), + expectErr: ErrInvalidClient, + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", k, tc.d), func(t *testing.T) { + store := storage.NewMemoryStore() + store.Clients[tc.client.ID] = tc.client + f.Store = store + + c, err := f.AuthenticateClient(context.Background(), tc.r, tc.form) + if tc.expectErr != nil { + require.EqualError(t, err, tc.expectErr.Error()) + return + } + + if err != nil { + var validationError *jwt.ValidationError + var rfcError *RFC6749Error + if errors.As(err, &validationError) { + t.Logf("Error is: %s", validationError.Inner) + } else if errors.As(err, &rfcError) { + t.Logf("DebugField is: %s", rfcError.DebugField) + t.Logf("HintField is: %s", rfcError.HintField) + } + } + require.NoError(t, err) + assert.EqualValues(t, tc.client, c) + }) + } +} + +func TestAuthenticateClientTwice(t *testing.T) { + const at = "urn:ietf:params:oauth:client-assertion-type:jwt-bearer" + + key := gen.MustRSAKey() + client := &DefaultOpenIDConnectClient{ + DefaultClient: &DefaultClient{ + ID: "bar", + Secret: []byte("secret"), + }, + JSONWebKeys: &jose.JSONWebKeySet{ + Keys: []jose.JSONWebKey{ + { + KeyID: "kid-foo", + Use: "sig", + Key: &key.PublicKey, + }, + }, + }, + TokenEndpointAuthMethod: "private_key_jwt", + } + store := storage.NewMemoryStore() + store.Clients[client.ID] = client + + hasher := &BCrypt{&Config{HashCost: 6}} + f := &Fosite{ + Store: store, + Config: &Config{ + JWKSFetcherStrategy: NewDefaultJWKSFetcherStrategy(), + ClientSecretsHasher: hasher, + TokenURL: "token-url", + }, + } + + formValues := url.Values{"client_id": []string{"bar"}, "client_assertion": {mustGenerateRSAAssertion(t, jwt.MapClaims{ + "sub": "bar", + "exp": time.Now().Add(time.Hour).Unix(), + "iss": "bar", + "jti": "12345", + "aud": "token-url", + }, key, "kid-foo")}, "client_assertion_type": []string{at}} + + c, err := f.AuthenticateClient(context.Background(), new(http.Request), formValues) + require.NoError(t, err, "%#v", err) + assert.Equal(t, client, c) + + // replay the request and expect it to fail + c, err = f.AuthenticateClient(context.Background(), new(http.Request), formValues) + require.Error(t, err) + assert.EqualError(t, err, ErrJTIKnown.Error()) + assert.Nil(t, c) +} diff --git a/fosite/client_manager.go b/fosite/client_manager.go new file mode 100644 index 00000000000..4e9685dc045 --- /dev/null +++ b/fosite/client_manager.go @@ -0,0 +1,24 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "time" +) + +// ClientManager defines the (persistent) manager interface for clients. +type ClientManager interface { + // GetClient loads the client by its ID or returns an error + // if the client does not exist or another error occurred. + GetClient(ctx context.Context, id string) (Client, error) + // ClientAssertionJWTValid returns an error if the JTI is + // known or the DB check failed and nil if the JTI is not known. + ClientAssertionJWTValid(ctx context.Context, jti string) error + // SetClientAssertionJWT marks a JTI as known for the given + // expiry time. Before inserting the new JTI, it will clean + // up any existing JTIs that have expired as those tokens can + // not be replayed due to the expiry. + SetClientAssertionJWT(ctx context.Context, jti string, exp time.Time) error +} diff --git a/fosite/client_test.go b/fosite/client_test.go new file mode 100644 index 00000000000..d1ce2e3be3c --- /dev/null +++ b/fosite/client_test.go @@ -0,0 +1,42 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDefaultClient(t *testing.T) { + sc := &DefaultClient{ + ID: "1", + Secret: []byte("foobar-"), + RotatedSecrets: [][]byte{[]byte("foobar-1"), []byte("foobar-2")}, + RedirectURIs: []string{"foo", "bar"}, + ResponseTypes: []string{"foo", "bar"}, + GrantTypes: []string{"foo", "bar"}, + Scopes: []string{"fooscope"}, + } + + assert.Equal(t, sc.ID, sc.GetID()) + assert.Equal(t, sc.RedirectURIs, sc.GetRedirectURIs()) + assert.Equal(t, sc.Secret, sc.GetHashedSecret()) + assert.Equal(t, sc.RotatedSecrets, sc.GetRotatedHashes()) + assert.EqualValues(t, sc.ResponseTypes, sc.GetResponseTypes()) + assert.EqualValues(t, sc.GrantTypes, sc.GetGrantTypes()) + assert.EqualValues(t, sc.Scopes, sc.GetScopes()) + + sc.GrantTypes = []string{} + sc.ResponseTypes = []string{} + assert.Equal(t, "code", sc.GetResponseTypes()[0]) + assert.Equal(t, "authorization_code", sc.GetGrantTypes()[0]) + + var _ ClientWithSecretRotation = sc +} + +func TestDefaultResponseModeClient_GetResponseMode(t *testing.T) { + rc := &DefaultResponseModeClient{ResponseModes: []ResponseModeType{ResponseModeFragment}} + assert.Equal(t, []ResponseModeType{ResponseModeFragment}, rc.GetResponseModes()) +} diff --git a/fosite/client_with_custom_token_lifespans.go b/fosite/client_with_custom_token_lifespans.go new file mode 100644 index 00000000000..b46e2ba5457 --- /dev/null +++ b/fosite/client_with_custom_token_lifespans.go @@ -0,0 +1,104 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import "time" + +// GetEffectiveLifespan either maps GrantType x TokenType to the client's configured lifespan, or returns the fallback value. +func GetEffectiveLifespan(c Client, gt GrantType, tt TokenType, fallback time.Duration) time.Duration { + if clc, ok := c.(ClientWithCustomTokenLifespans); ok { + return clc.GetEffectiveLifespan(gt, tt, fallback) + } + return fallback +} + +type ClientWithCustomTokenLifespans interface { + // GetEffectiveLifespan either maps GrantType x TokenType to the client's configured lifespan, or returns the fallback value. + GetEffectiveLifespan(gt GrantType, tt TokenType, fallback time.Duration) time.Duration +} + +// ClientLifespanConfig holds default lifespan configuration for the different +// token types that may be issued for the client. This configuration takes +// precedence over fosite's instance-wide default lifespan, but it may be +// overridden by a session's expires_at claim. +// +// The OIDC Hybrid grant type inherits token lifespan configuration from the implicit grant. +type ClientLifespanConfig struct { + AuthorizationCodeGrantAccessTokenLifespan *time.Duration `json:"authorization_code_grant_access_token_lifespan"` + AuthorizationCodeGrantIDTokenLifespan *time.Duration `json:"authorization_code_grant_id_token_lifespan"` + AuthorizationCodeGrantRefreshTokenLifespan *time.Duration `json:"authorization_code_grant_refresh_token_lifespan"` + ClientCredentialsGrantAccessTokenLifespan *time.Duration `json:"client_credentials_grant_access_token_lifespan"` + ImplicitGrantAccessTokenLifespan *time.Duration `json:"implicit_grant_access_token_lifespan"` + ImplicitGrantIDTokenLifespan *time.Duration `json:"implicit_grant_id_token_lifespan"` + JwtBearerGrantAccessTokenLifespan *time.Duration `json:"jwt_bearer_grant_access_token_lifespan"` + PasswordGrantAccessTokenLifespan *time.Duration `json:"password_grant_access_token_lifespan"` + PasswordGrantRefreshTokenLifespan *time.Duration `json:"password_grant_refresh_token_lifespan"` + RefreshTokenGrantIDTokenLifespan *time.Duration `json:"refresh_token_grant_id_token_lifespan"` + RefreshTokenGrantAccessTokenLifespan *time.Duration `json:"refresh_token_grant_access_token_lifespan"` + RefreshTokenGrantRefreshTokenLifespan *time.Duration `json:"refresh_token_grant_refresh_token_lifespan"` + //Hybrid grant tokens are not independently configurable, see the comment above. +} + +type DefaultClientWithCustomTokenLifespans struct { + *DefaultClient + TokenLifespans *ClientLifespanConfig `json:"token_lifespans"` +} + +func (c *DefaultClientWithCustomTokenLifespans) GetTokenLifespans() *ClientLifespanConfig { + return c.TokenLifespans +} + +func (c *DefaultClientWithCustomTokenLifespans) SetTokenLifespans(lifespans *ClientLifespanConfig) { + c.TokenLifespans = lifespans +} + +// GetEffectiveLifespan either maps GrantType x TokenType to the client's configured lifespan, or returns the fallback value. +func (c *DefaultClientWithCustomTokenLifespans) GetEffectiveLifespan(gt GrantType, tt TokenType, fallback time.Duration) time.Duration { + if c.TokenLifespans == nil { + return fallback + } + var cl *time.Duration + if gt == GrantTypeAuthorizationCode { + if tt == AccessToken { + cl = c.TokenLifespans.AuthorizationCodeGrantAccessTokenLifespan + } else if tt == IDToken { + cl = c.TokenLifespans.AuthorizationCodeGrantIDTokenLifespan + } else if tt == RefreshToken { + cl = c.TokenLifespans.AuthorizationCodeGrantRefreshTokenLifespan + } + } else if gt == GrantTypeClientCredentials { + if tt == AccessToken { + cl = c.TokenLifespans.ClientCredentialsGrantAccessTokenLifespan + } + } else if gt == GrantTypeImplicit { + if tt == AccessToken { + cl = c.TokenLifespans.ImplicitGrantAccessTokenLifespan + } else if tt == IDToken { + cl = c.TokenLifespans.ImplicitGrantIDTokenLifespan + } + } else if gt == GrantTypeJWTBearer { + if tt == AccessToken { + cl = c.TokenLifespans.JwtBearerGrantAccessTokenLifespan + } + } else if gt == GrantTypePassword { + if tt == AccessToken { + cl = c.TokenLifespans.PasswordGrantAccessTokenLifespan + } else if tt == RefreshToken { + cl = c.TokenLifespans.PasswordGrantRefreshTokenLifespan + } + } else if gt == GrantTypeRefreshToken { + if tt == AccessToken { + cl = c.TokenLifespans.RefreshTokenGrantAccessTokenLifespan + } else if tt == IDToken { + cl = c.TokenLifespans.RefreshTokenGrantIDTokenLifespan + } else if tt == RefreshToken { + cl = c.TokenLifespans.RefreshTokenGrantRefreshTokenLifespan + } + } + + if cl == nil { + return fallback + } + return *cl +} diff --git a/fosite/client_with_custom_token_lifespans_test.go b/fosite/client_with_custom_token_lifespans_test.go new file mode 100644 index 00000000000..813645c8387 --- /dev/null +++ b/fosite/client_with_custom_token_lifespans_test.go @@ -0,0 +1,29 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestDefaultClientWithCustomTokenLifespans(t *testing.T) { + clc := &DefaultClientWithCustomTokenLifespans{ + DefaultClient: &DefaultClient{}, + } + + assert.Equal(t, clc.GetTokenLifespans(), (*ClientLifespanConfig)(nil)) + + require.Equal(t, time.Minute*42, GetEffectiveLifespan(clc, GrantTypeImplicit, IDToken, time.Minute*42)) + + customLifespan := 36 * time.Hour + clc.SetTokenLifespans(&ClientLifespanConfig{ImplicitGrantIDTokenLifespan: &customLifespan}) + assert.NotEqual(t, clc.GetTokenLifespans(), nil) + + require.Equal(t, customLifespan, GetEffectiveLifespan(clc, GrantTypeImplicit, IDToken, time.Minute*42)) + var _ ClientWithCustomTokenLifespans = clc +} diff --git a/fosite/compose/compose.go b/fosite/compose/compose.go new file mode 100644 index 00000000000..b40b85af903 --- /dev/null +++ b/fosite/compose/compose.go @@ -0,0 +1,99 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package compose + +import ( + "context" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +type Factory func(config fosite.Configurator, storage fosite.Storage, strategy interface{}) interface{} + +// Compose takes a config, a storage, a strategy and handlers to instantiate an OAuth2Provider: +// +// import "github.com/ory/hydra/v2/fosite/compose" +// +// // var storage = new(MyFositeStorage) +// var config = Config { +// AccessTokenLifespan: time.Minute * 30, +// // check Config for further configuration options +// } +// +// var strategy = NewOAuth2HMACStrategy(config) +// +// var oauth2Provider = Compose( +// config, +// storage, +// strategy, +// NewOAuth2AuthorizeExplicitHandler, +// OAuth2ClientCredentialsGrantFactory, +// // for a complete list refer to the docs of this package +// ) +// +// Compose makes use of interface{} types in order to be able to handle a all types of stores, strategies and handlers. +func Compose(config *fosite.Config, storage fosite.Storage, strategy interface{}, factories ...Factory) fosite.OAuth2Provider { + f := fosite.NewOAuth2Provider(storage, config) + for _, factory := range factories { + res := factory(config, storage, strategy) + if ah, ok := res.(fosite.AuthorizeEndpointHandler); ok { + config.AuthorizeEndpointHandlers.Append(ah) + } + if th, ok := res.(fosite.TokenEndpointHandler); ok { + config.TokenEndpointHandlers.Append(th) + } + if tv, ok := res.(fosite.TokenIntrospector); ok { + config.TokenIntrospectionHandlers.Append(tv) + } + if rh, ok := res.(fosite.RevocationHandler); ok { + config.RevocationHandlers.Append(rh) + } + if ph, ok := res.(fosite.PushedAuthorizeEndpointHandler); ok { + config.PushedAuthorizeEndpointHandlers.Append(ph) + } + if dh, ok := res.(fosite.DeviceEndpointHandler); ok { + config.DeviceEndpointHandlers.Append(dh) + } + } + + return f +} + +// ComposeAllEnabled returns a fosite instance with all OAuth2 and OpenID Connect handlers enabled. +func ComposeAllEnabled(config *fosite.Config, storage fosite.Storage, key interface{}) fosite.OAuth2Provider { + keyGetter := func(context.Context) (interface{}, error) { + return key, nil + } + return Compose( + config, + storage, + &CommonStrategyProvider{ + CoreStrategy: NewOAuth2HMACStrategy(config), + DeviceStrategy: NewDeviceStrategy(config), + OIDCTokenStrategy: NewOpenIDConnectStrategy(keyGetter, config), + Signer: &jwt.DefaultSigner{GetPrivateKey: keyGetter}, + }, + OAuth2AuthorizeExplicitFactory, + OAuth2AuthorizeImplicitFactory, + OAuth2ClientCredentialsGrantFactory, + OAuth2RefreshTokenGrantFactory, + OAuth2ResourceOwnerPasswordCredentialsFactory, + RFC7523AssertionGrantFactory, + RFC8628DeviceFactory, + RFC8628DeviceAuthorizationTokenFactory, + + OpenIDConnectExplicitFactory, + OpenIDConnectImplicitFactory, + OpenIDConnectHybridFactory, + OpenIDConnectRefreshFactory, + OpenIDConnectDeviceFactory, + + OAuth2TokenIntrospectionFactory, + OAuth2TokenRevocationFactory, + + OAuth2PKCEFactory, + PushedAuthorizeHandlerFactory, + ) +} diff --git a/fosite/compose/compose_oauth2.go b/fosite/compose/compose_oauth2.go new file mode 100644 index 00000000000..29023d42407 --- /dev/null +++ b/fosite/compose/compose_oauth2.go @@ -0,0 +1,132 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package compose + +import ( + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +// OAuth2AuthorizeExplicitFactory creates an OAuth2 authorize code grant ("authorize explicit flow") handler and registers +// an access token, refresh token and authorize code validator. +func OAuth2AuthorizeExplicitFactory(config fosite.Configurator, storage fosite.Storage, strategy interface{}) interface{} { + return &oauth2.AuthorizeExplicitGrantHandler{ + Strategy: strategy.(interface { + oauth2.AccessTokenStrategyProvider + oauth2.RefreshTokenStrategyProvider + oauth2.AuthorizeCodeStrategyProvider + }), + Storage: storage.(interface { + oauth2.AuthorizeCodeStorageProvider + oauth2.AccessTokenStorageProvider + oauth2.RefreshTokenStorageProvider + oauth2.TokenRevocationStorageProvider + }), + Config: config, + } +} + +// OAuth2ClientCredentialsGrantFactory creates an OAuth2 client credentials grant handler and registers +// an access token, refresh token and authorize code validator. +func OAuth2ClientCredentialsGrantFactory(config fosite.Configurator, storage fosite.Storage, strategy interface{}) interface{} { + return &oauth2.ClientCredentialsGrantHandler{ + Strategy: strategy.(oauth2.AccessTokenStrategyProvider), + Storage: storage.(oauth2.AccessTokenStorageProvider), + Config: config, + } +} + +// OAuth2RefreshTokenGrantFactory creates an OAuth2 refresh grant handler and registers +// an access token, refresh token and authorize code validator.nmj +func OAuth2RefreshTokenGrantFactory(config fosite.Configurator, storage fosite.Storage, strategy interface{}) interface{} { + return &oauth2.RefreshTokenGrantHandler{ + Strategy: strategy.(interface { + oauth2.AccessTokenStrategyProvider + oauth2.RefreshTokenStrategyProvider + }), + Storage: storage.(interface { + oauth2.AccessTokenStorageProvider + oauth2.RefreshTokenStorageProvider + oauth2.TokenRevocationStorageProvider + }), + Config: config, + } +} + +// OAuth2AuthorizeImplicitFactory creates an OAuth2 implicit grant ("authorize implicit flow") handler and registers +// an access token, refresh token and authorize code validator. +func OAuth2AuthorizeImplicitFactory(config fosite.Configurator, storage fosite.Storage, strategy interface{}) interface{} { + return &oauth2.AuthorizeImplicitGrantHandler{ + Strategy: strategy.(oauth2.AccessTokenStrategyProvider), + Storage: storage.(oauth2.AccessTokenStorageProvider), + Config: config, + } +} + +// OAuth2ResourceOwnerPasswordCredentialsFactory creates an OAuth2 resource owner password credentials grant handler and registers +// an access token, refresh token and authorize code validator. +// +// Deprecated: This factory is deprecated as a means to communicate that the ROPC grant type is widely discouraged and +// is at the time of this writing going to be omitted in the OAuth 2.1 spec. For more information on why this grant type +// is discouraged see: https://www.scottbrady91.com/oauth/why-the-resource-owner-password-credentials-grant-type-is-not-authentication-nor-suitable-for-modern-applications +func OAuth2ResourceOwnerPasswordCredentialsFactory(config fosite.Configurator, storage fosite.Storage, strategy interface{}) interface{} { + return &oauth2.ResourceOwnerPasswordCredentialsGrantHandler{ + Strategy: strategy.(interface { + oauth2.AccessTokenStrategyProvider + oauth2.RefreshTokenStrategyProvider + }), + Storage: storage.(interface { + oauth2.ResourceOwnerPasswordCredentialsGrantStorageProvider + oauth2.AccessTokenStorageProvider + oauth2.RefreshTokenStorageProvider + }), + Config: config, + } +} + +// OAuth2TokenRevocationFactory creates an OAuth2 token revocation handler. +func OAuth2TokenRevocationFactory(_ fosite.Configurator, storage fosite.Storage, strategy interface{}) interface{} { + return &oauth2.TokenRevocationHandler{ + Strategy: strategy.(interface { + oauth2.AccessTokenStrategyProvider + oauth2.RefreshTokenStrategyProvider + }), + Storage: storage.(interface { + oauth2.AccessTokenStorageProvider + oauth2.RefreshTokenStorageProvider + oauth2.TokenRevocationStorageProvider + }), + } +} + +// OAuth2TokenIntrospectionFactory creates an OAuth2 token introspection handler and registers +// an access token and refresh token validator. +func OAuth2TokenIntrospectionFactory(config fosite.Configurator, storage fosite.Storage, strategy interface{}) interface{} { + return &oauth2.CoreValidator{ + Strategy: strategy.(interface { + oauth2.AccessTokenStrategyProvider + oauth2.RefreshTokenStrategyProvider + }), + Storage: storage.(interface { + oauth2.AccessTokenStorageProvider + oauth2.RefreshTokenStorageProvider + }), + Config: config, + } +} + +// OAuth2StatelessJWTIntrospectionFactory creates an OAuth2 token introspection handler and +// registers an access token validator. This can only be used to validate JWTs and does so +// statelessly, meaning it uses only the data available in the JWT itself, and does not access the +// storage implementation at all. +// +// Due to the stateless nature of this factory, THE BUILT-IN REVOCATION MECHANISMS WILL NOT WORK. +// If you need revocation, you can validate JWTs statefully, using the other factories. +func OAuth2StatelessJWTIntrospectionFactory(config fosite.Configurator, _ fosite.Storage, strategy interface{}) interface{} { + return &oauth2.StatelessJWTValidator{ + Signer: strategy.(jwt.Signer), + Config: config, + } +} diff --git a/fosite/compose/compose_openid.go b/fosite/compose/compose_openid.go new file mode 100644 index 00000000000..97de813e848 --- /dev/null +++ b/fosite/compose/compose_openid.go @@ -0,0 +1,103 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package compose + +import ( + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/handler/rfc8628" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +// OpenIDConnectExplicitFactory creates an OpenID Connect explicit ("authorize code flow") grant handler. +// +// **Important note:** You must add this handler *after* you have added an OAuth2 authorize code handler! +func OpenIDConnectExplicitFactory(config fosite.Configurator, storage fosite.Storage, strategy interface{}) interface{} { + return &openid.ExplicitHandler{ + Storage: storage.(openid.OpenIDConnectRequestStorageProvider), + IDTokenHandleHelper: &openid.IDTokenHandleHelper{ + IDTokenStrategy: strategy.(openid.OpenIDConnectTokenStrategyProvider), + }, + OpenIDConnectRequestValidator: openid.NewOpenIDConnectRequestValidator(strategy.(jwt.Signer), config), + Config: config, + } +} + +// OpenIDConnectRefreshFactory creates a handler for refreshing openid connect tokens. +// +// **Important note:** You must add this handler *after* you have added an OAuth2 authorize code handler! +func OpenIDConnectRefreshFactory(config fosite.Configurator, _ fosite.Storage, strategy interface{}) interface{} { + return &openid.OpenIDConnectRefreshHandler{ + IDTokenHandleHelper: &openid.IDTokenHandleHelper{ + IDTokenStrategy: strategy.(openid.OpenIDConnectTokenStrategyProvider), + }, + Config: config, + } +} + +// OpenIDConnectImplicitFactory creates an OpenID Connect implicit ("implicit flow") grant handler. +// +// **Important note:** You must add this handler *after* you have added an OAuth2 authorize code handler! +func OpenIDConnectImplicitFactory(config fosite.Configurator, storage fosite.Storage, strategy interface{}) interface{} { + return &openid.OpenIDConnectImplicitHandler{ + AuthorizeImplicitGrantTypeHandler: &oauth2.AuthorizeImplicitGrantHandler{ + Strategy: strategy.(oauth2.AccessTokenStrategyProvider), + Storage: storage.(oauth2.AccessTokenStorageProvider), + Config: config, + }, + Config: config, + IDTokenHandleHelper: &openid.IDTokenHandleHelper{ + IDTokenStrategy: strategy.(openid.OpenIDConnectTokenStrategyProvider), + }, + OpenIDConnectRequestValidator: openid.NewOpenIDConnectRequestValidator(strategy.(jwt.Signer), config), + } +} + +// OpenIDConnectHybridFactory creates an OpenID Connect hybrid grant handler. +// +// **Important note:** You must add this handler *after* you have added an OAuth2 authorize code handler! +func OpenIDConnectHybridFactory(config fosite.Configurator, storage fosite.Storage, strategy interface{}) interface{} { + return &openid.OpenIDConnectHybridHandler{ + AuthorizeExplicitGrantHandler: &oauth2.AuthorizeExplicitGrantHandler{ + Strategy: strategy.(interface { + oauth2.AuthorizeCodeStrategyProvider + oauth2.AccessTokenStrategyProvider + oauth2.RefreshTokenStrategyProvider + }), + Storage: storage.(interface { + oauth2.AuthorizeCodeStorageProvider + oauth2.AccessTokenStorageProvider + oauth2.RefreshTokenStorageProvider + oauth2.TokenRevocationStorageProvider + }), + Config: config, + }, + Config: config, + AuthorizeImplicitGrantHandler: &oauth2.AuthorizeImplicitGrantHandler{ + Strategy: strategy.(oauth2.AccessTokenStrategyProvider), + Storage: storage.(oauth2.AccessTokenStorageProvider), + Config: config, + }, + IDTokenHandleHelper: &openid.IDTokenHandleHelper{ + IDTokenStrategy: strategy.(openid.OpenIDConnectTokenStrategyProvider), + }, + OpenIDConnectRequestStorage: storage.(openid.OpenIDConnectRequestStorageProvider), + OpenIDConnectRequestValidator: openid.NewOpenIDConnectRequestValidator(strategy.(jwt.Signer), config), + } +} + +// OpenIDConnectDeviceFactory creates an OpenID Connect device ("device code flow") grant handler. +// +// **Important note:** You must add this handler *after* you have added an OAuth2 device authorization handler! +func OpenIDConnectDeviceFactory(config fosite.Configurator, storage fosite.Storage, strategy interface{}) interface{} { + return &openid.OpenIDConnectDeviceHandler{ + Storage: storage.(openid.OpenIDConnectRequestStorageProvider), + IDTokenHandleHelper: &openid.IDTokenHandleHelper{ + IDTokenStrategy: strategy.(openid.OpenIDConnectTokenStrategyProvider), + }, + Strategy: strategy.(rfc8628.DeviceCodeStrategyProvider), + Config: config, + } +} diff --git a/fosite/compose/compose_par.go b/fosite/compose/compose_par.go new file mode 100644 index 00000000000..5d117a76726 --- /dev/null +++ b/fosite/compose/compose_par.go @@ -0,0 +1,17 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package compose + +import ( + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/par" +) + +// PushedAuthorizeHandlerFactory creates the basic PAR handler +func PushedAuthorizeHandlerFactory(config fosite.Configurator, storage fosite.Storage, _ interface{}) interface{} { + return &par.PushedAuthorizeHandler{ + Storage: storage.(fosite.PARStorageProvider), + Config: config, + } +} diff --git a/fosite/compose/compose_pkce.go b/fosite/compose/compose_pkce.go new file mode 100644 index 00000000000..70d0a8c018a --- /dev/null +++ b/fosite/compose/compose_pkce.go @@ -0,0 +1,19 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package compose + +import ( + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/handler/pkce" +) + +// OAuth2PKCEFactory creates a PKCE handler. +func OAuth2PKCEFactory(config fosite.Configurator, storage fosite.Storage, strategy interface{}) interface{} { + return &pkce.Handler{ + Strategy: strategy.(oauth2.AuthorizeCodeStrategyProvider), + Storage: storage.(pkce.PKCERequestStorageProvider), + Config: config, + } +} diff --git a/fosite/compose/compose_rfc7523.go b/fosite/compose/compose_rfc7523.go new file mode 100644 index 00000000000..e1454c0b0a2 --- /dev/null +++ b/fosite/compose/compose_rfc7523.go @@ -0,0 +1,23 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package compose + +import ( + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/handler/rfc7523" +) + +// RFC7523AssertionGrantFactory creates an OAuth2 Authorize JWT Grant (using JWTs as Authorization Grants) handler +// and registers an access token, refresh token and authorize code validator. +func RFC7523AssertionGrantFactory(config fosite.Configurator, storage fosite.Storage, strategy interface{}) interface{} { + return &rfc7523.Handler{ + Strategy: strategy.(oauth2.AccessTokenStrategyProvider), + Storage: storage.(interface { + oauth2.AccessTokenStorageProvider + rfc7523.RFC7523KeyStorageProvider + }), + Config: config, + } +} diff --git a/fosite/compose/compose_rfc8628.go b/fosite/compose/compose_rfc8628.go new file mode 100644 index 00000000000..f4489f19ccb --- /dev/null +++ b/fosite/compose/compose_rfc8628.go @@ -0,0 +1,51 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Package compose provides various objects which can be used to +// instantiate OAuth2Providers with different functionality. +package compose + +import ( + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/handler/rfc8628" +) + +// RFC8628DeviceFactory creates an OAuth2 device code grant ("Device Authorization Grant") handler and registers +// a user code, device code, access token and a refresh token validator. +func RFC8628DeviceFactory(config fosite.Configurator, storage fosite.Storage, strategy interface{}) interface{} { + return &rfc8628.DeviceAuthHandler{ + Strategy: strategy.(interface { + rfc8628.DeviceRateLimitStrategyProvider + rfc8628.DeviceCodeStrategyProvider + rfc8628.UserCodeStrategyProvider + }), + Storage: storage.(interface { + rfc8628.DeviceAuthStorageProvider + oauth2.AccessTokenStorageProvider + oauth2.RefreshTokenStorageProvider + }), + Config: config, + } +} + +// RFC8628DeviceAuthorizationTokenFactory creates an OAuth2 device authorization grant ("Device Authorization Grant") handler and registers +// an access token, refresh token and authorize code validator. +func RFC8628DeviceAuthorizationTokenFactory(config fosite.Configurator, storage fosite.Storage, strategy interface{}) interface{} { + return &rfc8628.DeviceCodeTokenEndpointHandler{ + Strategy: strategy.(interface { + rfc8628.DeviceRateLimitStrategyProvider + rfc8628.DeviceCodeStrategyProvider + rfc8628.UserCodeStrategyProvider + oauth2.AccessTokenStrategyProvider + oauth2.RefreshTokenStrategyProvider + }), + Storage: storage.(interface { + rfc8628.DeviceAuthStorageProvider + oauth2.AccessTokenStorageProvider + oauth2.RefreshTokenStorageProvider + oauth2.TokenRevocationStorageProvider + }), + Config: config, + } +} diff --git a/fosite/compose/compose_strategy.go b/fosite/compose/compose_strategy.go new file mode 100644 index 00000000000..f7831e32758 --- /dev/null +++ b/fosite/compose/compose_strategy.go @@ -0,0 +1,104 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package compose + +import ( + "context" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/handler/rfc8628" + "github.com/ory/hydra/v2/fosite/token/hmac" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +type CommonStrategyProvider struct { + CoreStrategy oauth2.CoreStrategy + AccessTokenStrat oauth2.AccessTokenStrategy + DeviceStrategy *rfc8628.DefaultDeviceStrategy + OIDCTokenStrategy openid.OpenIDConnectTokenStrategy + jwt.Signer +} + +var _ oauth2.AuthorizeCodeStrategyProvider = (*CommonStrategyProvider)(nil) + +func (s *CommonStrategyProvider) AuthorizeCodeStrategy() oauth2.AuthorizeCodeStrategy { + return s.CoreStrategy +} + +var _ oauth2.AccessTokenStrategyProvider = (*CommonStrategyProvider)(nil) + +func (s *CommonStrategyProvider) AccessTokenStrategy() oauth2.AccessTokenStrategy { + if s.AccessTokenStrat != nil { + return s.AccessTokenStrat + } + return s.CoreStrategy +} + +var _ oauth2.RefreshTokenStrategyProvider = (*CommonStrategyProvider)(nil) + +func (s *CommonStrategyProvider) RefreshTokenStrategy() oauth2.RefreshTokenStrategy { + return s.CoreStrategy +} + +var _ openid.OpenIDConnectTokenStrategyProvider = (*CommonStrategyProvider)(nil) + +func (s *CommonStrategyProvider) OpenIDConnectTokenStrategy() openid.OpenIDConnectTokenStrategy { + return s.OIDCTokenStrategy +} + +var _ rfc8628.DeviceRateLimitStrategyProvider = (*CommonStrategyProvider)(nil) + +func (s *CommonStrategyProvider) DeviceRateLimitStrategy() rfc8628.DeviceRateLimitStrategy { + return s.DeviceStrategy +} + +var _ rfc8628.DeviceCodeStrategyProvider = (*CommonStrategyProvider)(nil) + +func (s *CommonStrategyProvider) DeviceCodeStrategy() rfc8628.DeviceCodeStrategy { + return s.DeviceStrategy +} + +var _ rfc8628.UserCodeStrategyProvider = (*CommonStrategyProvider)(nil) + +func (s *CommonStrategyProvider) UserCodeStrategy() rfc8628.UserCodeStrategy { + return s.DeviceStrategy +} + +type HMACSHAStrategyConfigurator interface { + fosite.AccessTokenLifespanProvider + fosite.RefreshTokenLifespanProvider + fosite.AuthorizeCodeLifespanProvider + fosite.TokenEntropyProvider + fosite.GlobalSecretProvider + fosite.RotatedGlobalSecretsProvider + fosite.HMACHashingProvider + fosite.DeviceAndUserCodeLifespanProvider +} + +func NewOAuth2HMACStrategy(config HMACSHAStrategyConfigurator) *oauth2.HMACSHAStrategy { + return oauth2.NewHMACSHAStrategy(&hmac.HMACStrategy{Config: config}, config) +} + +func NewOAuth2JWTStrategy(keyGetter func(context.Context) (interface{}, error), config fosite.Configurator) *oauth2.DefaultJWTStrategy { + return &oauth2.DefaultJWTStrategy{ + Signer: &jwt.DefaultSigner{GetPrivateKey: keyGetter}, + Config: config, + } +} + +func NewOpenIDConnectStrategy(keyGetter func(context.Context) (interface{}, error), config fosite.Configurator) *openid.DefaultStrategy { + return &openid.DefaultStrategy{ + Signer: &jwt.DefaultSigner{GetPrivateKey: keyGetter}, + Config: config, + } +} + +func NewDeviceStrategy(config fosite.Configurator) *rfc8628.DefaultDeviceStrategy { + return &rfc8628.DefaultDeviceStrategy{ + Enigma: &hmac.HMACStrategy{Config: config}, + Config: config, + } +} diff --git a/fosite/compose/compose_userinfo_vc.go b/fosite/compose/compose_userinfo_vc.go new file mode 100644 index 00000000000..dcea03f6809 --- /dev/null +++ b/fosite/compose/compose_userinfo_vc.go @@ -0,0 +1,18 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package compose + +import ( + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/verifiable" +) + +// OIDCUserinfoVerifiableCredentialFactory creates a verifiable credentials +// handler. +func OIDCUserinfoVerifiableCredentialFactory(config fosite.Configurator, storage fosite.Storage, strategy any) any { + return &verifiable.Handler{ + NonceManagerProvider: storage.(verifiable.NonceManagerProvider), + Config: config, + } +} diff --git a/fosite/config.go b/fosite/config.go new file mode 100644 index 00000000000..0b7e5cd2d76 --- /dev/null +++ b/fosite/config.go @@ -0,0 +1,331 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "hash" + "html/template" + "net/url" + "time" + + "github.com/hashicorp/go-retryablehttp" + + "github.com/ory/hydra/v2/fosite/i18n" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +// AuthorizeCodeLifespanProvider returns the provider for configuring the authorization code lifespan. +type AuthorizeCodeLifespanProvider interface { + // GetAuthorizeCodeLifespan returns the authorization code lifespan. + GetAuthorizeCodeLifespan(ctx context.Context) time.Duration +} + +// RefreshTokenLifespanProvider returns the provider for configuring the refresh token lifespan. +type RefreshTokenLifespanProvider interface { + // GetRefreshTokenLifespan returns the refresh token lifespan. + GetRefreshTokenLifespan(ctx context.Context) time.Duration +} + +// AccessTokenLifespanProvider returns the provider for configuring the access token lifespan. +type AccessTokenLifespanProvider interface { + // GetAccessTokenLifespan returns the access token lifespan. + GetAccessTokenLifespan(ctx context.Context) time.Duration +} + +// VerifiableCredentialsNonceLifespanProvider returns the provider for configuring the access token lifespan. +type VerifiableCredentialsNonceLifespanProvider interface { + // GetNonceLifespan returns the nonce lifespan. + GetVerifiableCredentialsNonceLifespan(ctx context.Context) time.Duration +} + +// IDTokenLifespanProvider returns the provider for configuring the ID token lifespan. +type IDTokenLifespanProvider interface { + // GetIDTokenLifespan returns the ID token lifespan. + GetIDTokenLifespan(ctx context.Context) time.Duration +} + +// DeviceAndUserCodeLifespanProvider returns the provider for configuring the device and user code lifespan +type DeviceAndUserCodeLifespanProvider interface { + GetDeviceAndUserCodeLifespan(ctx context.Context) time.Duration +} + +// DeviceAndUserCodeLifespanProvider returns the provider for configuring the device and user code lifespan +type UserCodeProvider interface { + GetUserCodeLength(ctx context.Context) int + GetUserCodeSymbols(ctx context.Context) []rune +} + +// ScopeStrategyProvider returns the provider for configuring the scope strategy. +type ScopeStrategyProvider interface { + // GetScopeStrategy returns the scope strategy. + GetScopeStrategy(ctx context.Context) ScopeStrategy +} + +// AudienceStrategyProvider returns the provider for configuring the audience strategy. +type AudienceStrategyProvider interface { + // GetAudienceStrategy returns the audience strategy. + GetAudienceStrategy(ctx context.Context) AudienceMatchingStrategy +} + +// RedirectSecureCheckerProvider returns the provider for configuring the redirect URL security validator. +type RedirectSecureCheckerProvider interface { + // GetRedirectSecureChecker returns the redirect URL security validator. + GetRedirectSecureChecker(ctx context.Context) func(context.Context, *url.URL) bool +} + +// RefreshTokenScopesProvider returns the provider for configuring the refresh token scopes. +type RefreshTokenScopesProvider interface { + // GetRefreshTokenScopes returns the refresh token scopes. + GetRefreshTokenScopes(ctx context.Context) []string +} + +// DisableRefreshTokenValidationProvider returns the provider for configuring the refresh token validation. +type DisableRefreshTokenValidationProvider interface { + // GetDisableRefreshTokenValidation returns the disable refresh token validation flag. + GetDisableRefreshTokenValidation(ctx context.Context) bool +} + +// DeviceProvider returns the provider for configuring the device flow +type DeviceProvider interface { + GetDeviceVerificationURL(ctx context.Context) string + GetDeviceAuthTokenPollingInterval(ctx context.Context) time.Duration +} + +// BCryptCostProvider returns the provider for configuring the BCrypt hash cost. +type BCryptCostProvider interface { + // GetBCryptCost returns the BCrypt hash cost. + GetBCryptCost(ctx context.Context) int +} + +// AllowedPromptValuesProvider returns the provider for configuring the allowed prompt values. +type AllowedPromptValuesProvider interface { + // GetAllowedPromptValues returns the allowed prompt values. + GetAllowedPromptValues(ctx context.Context) int +} + +// AccessTokenIssuerProvider returns the provider for configuring the JWT issuer. +type AccessTokenIssuerProvider interface { + // GetAccessTokenIssuer returns the access token issuer. + GetAccessTokenIssuer(ctx context.Context) string +} + +// IDTokenIssuerProvider returns the provider for configuring the ID token issuer. +type IDTokenIssuerProvider interface { + // GetIDTokenIssuer returns the ID token issuer. + GetIDTokenIssuer(ctx context.Context) string +} + +// JWTScopeFieldProvider returns the provider for configuring the JWT scope field. +type JWTScopeFieldProvider interface { + // GetJWTScopeField returns the JWT scope field. + GetJWTScopeField(ctx context.Context) jwt.JWTScopeFieldEnum +} + +// AllowedPromptsProvider returns the provider for configuring the allowed prompts. +type AllowedPromptsProvider interface { + // GetAllowedPrompts returns the allowed prompts. + GetAllowedPrompts(ctx context.Context) []string +} + +// MinParameterEntropyProvider returns the provider for configuring the minimum parameter entropy. +type MinParameterEntropyProvider interface { + // GetMinParameterEntropy returns the minimum parameter entropy. + GetMinParameterEntropy(_ context.Context) int +} + +// SanitationAllowedProvider returns the provider for configuring the sanitation white list. +type SanitationAllowedProvider interface { + // GetSanitationWhiteList is a whitelist of form values that are required by the token endpoint. These values + // are safe for storage in a database (cleartext). + GetSanitationWhiteList(ctx context.Context) []string +} + +// OmitRedirectScopeParamProvider returns the provider for configuring the omit redirect scope param. +type OmitRedirectScopeParamProvider interface { + // GetOmitRedirectScopeParam must be set to true if the scope query param is to be omitted + // in the authorization's redirect URI + GetOmitRedirectScopeParam(ctx context.Context) bool +} + +// EnforcePKCEProvider returns the provider for configuring the enforcement of PKCE. +type EnforcePKCEProvider interface { + // GetEnforcePKCE returns the enforcement of PKCE. + GetEnforcePKCE(ctx context.Context) bool +} + +// EnforcePKCEForPublicClientsProvider returns the provider for configuring the enforcement of PKCE for public clients. +type EnforcePKCEForPublicClientsProvider interface { + // GetEnforcePKCEForPublicClients returns the enforcement of PKCE for public clients. + GetEnforcePKCEForPublicClients(ctx context.Context) bool +} + +// EnablePKCEPlainChallengeMethodProvider returns the provider for configuring the enable PKCE plain challenge method. +type EnablePKCEPlainChallengeMethodProvider interface { + // GetEnablePKCEPlainChallengeMethod returns the enable PKCE plain challenge method. + GetEnablePKCEPlainChallengeMethod(ctx context.Context) bool +} + +// GrantTypeJWTBearerCanSkipClientAuthProvider returns the provider for configuring the grant type JWT bearer can skip client auth. +type GrantTypeJWTBearerCanSkipClientAuthProvider interface { + // GetGrantTypeJWTBearerCanSkipClientAuth returns the grant type JWT bearer can skip client auth. + GetGrantTypeJWTBearerCanSkipClientAuth(ctx context.Context) bool +} + +// GrantTypeJWTBearerIDOptionalProvider returns the provider for configuring the grant type JWT bearer ID optional. +type GrantTypeJWTBearerIDOptionalProvider interface { + // GetGrantTypeJWTBearerIDOptional returns the grant type JWT bearer ID optional. + GetGrantTypeJWTBearerIDOptional(ctx context.Context) bool +} + +// GrantTypeJWTBearerIssuedDateOptionalProvider returns the provider for configuring the grant type JWT bearer issued date optional. +type GrantTypeJWTBearerIssuedDateOptionalProvider interface { + // GetGrantTypeJWTBearerIssuedDateOptional returns the grant type JWT bearer issued date optional. + GetGrantTypeJWTBearerIssuedDateOptional(ctx context.Context) bool +} + +// GetJWTMaxDurationProvider returns the provider for configuring the JWT max duration. +type GetJWTMaxDurationProvider interface { + // GetJWTMaxDuration returns the JWT max duration. + GetJWTMaxDuration(ctx context.Context) time.Duration +} + +// TokenEntropyProvider returns the provider for configuring the token entropy. +type TokenEntropyProvider interface { + // GetTokenEntropy returns the token entropy. + GetTokenEntropy(ctx context.Context) int +} + +// GlobalSecretProvider returns the provider for configuring the global secret. +type GlobalSecretProvider interface { + // GetGlobalSecret returns the global secret. + GetGlobalSecret(ctx context.Context) ([]byte, error) +} + +// RotatedGlobalSecretsProvider returns the provider for configuring the rotated global secrets. +type RotatedGlobalSecretsProvider interface { + // GetRotatedGlobalSecrets returns the rotated global secrets. + GetRotatedGlobalSecrets(ctx context.Context) ([][]byte, error) +} + +// HMACHashingProvider returns the provider for configuring the hash function. +type HMACHashingProvider interface { + // GetHMACHasher returns the hash function. + GetHMACHasher(ctx context.Context) func() hash.Hash +} + +// GetSecretsHashingProvider provides the client secrets hashing function. +type GetSecretsHashingProvider interface { + // GetSecretsHasher returns the client secrets hashing function. + GetSecretsHasher(ctx context.Context) Hasher +} + +// SendDebugMessagesToClientsProvider returns the provider for configuring the send debug messages to clients. +type SendDebugMessagesToClientsProvider interface { + // GetSendDebugMessagesToClients returns the send debug messages to clients. + GetSendDebugMessagesToClients(ctx context.Context) bool +} + +// JWKSFetcherStrategyProvider returns the provider for configuring the JWKS fetcher strategy. +type JWKSFetcherStrategyProvider interface { + // GetJWKSFetcherStrategy returns the JWKS fetcher strategy. + GetJWKSFetcherStrategy(ctx context.Context) JWKSFetcherStrategy +} + +// HTTPClientProvider returns the provider for configuring the HTTP client. +type HTTPClientProvider interface { + // GetHTTPClient returns the HTTP client provider. + GetHTTPClient(ctx context.Context) *retryablehttp.Client +} + +// ClientAuthenticationStrategyProvider returns the provider for configuring the client authentication strategy. +type ClientAuthenticationStrategyProvider interface { + // GetClientAuthenticationStrategy returns the client authentication strategy. + GetClientAuthenticationStrategy(ctx context.Context) ClientAuthenticationStrategy +} + +// ResponseModeHandlerExtensionProvider returns the provider for configuring the response mode handler extension. +type ResponseModeHandlerExtensionProvider interface { + // GetResponseModeHandlerExtension returns the response mode handler extension. + GetResponseModeHandlerExtension(ctx context.Context) ResponseModeHandler +} + +// MessageCatalogProvider returns the provider for configuring the message catalog. +type MessageCatalogProvider interface { + // GetMessageCatalog returns the message catalog. + GetMessageCatalog(ctx context.Context) i18n.MessageCatalog +} + +// FormPostHTMLTemplateProvider returns the provider for configuring the form post HTML template. +type FormPostHTMLTemplateProvider interface { + // GetFormPostHTMLTemplate returns the form post HTML template. + GetFormPostHTMLTemplate(ctx context.Context) *template.Template +} + +type TokenURLProvider interface { + // GetTokenURLs returns the token URL. + GetTokenURLs(ctx context.Context) []string +} + +// AuthorizeEndpointHandlersProvider returns the provider for configuring the authorize endpoint handlers. +type AuthorizeEndpointHandlersProvider interface { + // GetAuthorizeEndpointHandlers returns the authorize endpoint handlers. + GetAuthorizeEndpointHandlers(ctx context.Context) AuthorizeEndpointHandlers +} + +// TokenEndpointHandlersProvider returns the provider for configuring the token endpoint handlers. +type TokenEndpointHandlersProvider interface { + // GetTokenEndpointHandlers returns the token endpoint handlers. + GetTokenEndpointHandlers(ctx context.Context) TokenEndpointHandlers +} + +// TokenIntrospectionHandlersProvider returns the provider for configuring the token introspection handlers. +type TokenIntrospectionHandlersProvider interface { + // GetTokenIntrospectionHandlers returns the token introspection handlers. + GetTokenIntrospectionHandlers(ctx context.Context) TokenIntrospectionHandlers +} + +// RevocationHandlersProvider returns the provider for configuring the revocation handlers. +type RevocationHandlersProvider interface { + // GetRevocationHandlers returns the revocation handlers. + GetRevocationHandlers(ctx context.Context) RevocationHandlers +} + +// PushedAuthorizeEndpointHandlersProvider returns the provider for configuring the PAR handlers. +type PushedAuthorizeRequestHandlersProvider interface { + // GetPushedAuthorizeEndpointHandlers returns the handlers. + GetPushedAuthorizeEndpointHandlers(ctx context.Context) PushedAuthorizeEndpointHandlers +} + +// UseLegacyErrorFormatProvider returns the provider for configuring whether to use the legacy error format. +// +// DEPRECATED: Do not use this flag anymore. +type UseLegacyErrorFormatProvider interface { + // GetUseLegacyErrorFormat returns whether to use the legacy error format. + // + // DEPRECATED: Do not use this flag anymore. + GetUseLegacyErrorFormat(ctx context.Context) bool +} + +// PushedAuthorizeRequestConfigProvider is the configuration provider for pushed +// authorization request. +type PushedAuthorizeRequestConfigProvider interface { + // GetPushedAuthorizeRequestURIPrefix is the request URI prefix. This is + // usually 'urn:ietf:params:oauth:request_uri:'. + GetPushedAuthorizeRequestURIPrefix(ctx context.Context) string + + // GetPushedAuthorizeContextLifespan is the lifespan of the short-lived PAR context. + GetPushedAuthorizeContextLifespan(ctx context.Context) time.Duration + + // EnforcePushedAuthorize indicates if PAR is enforced. In this mode, a client + // cannot pass authorize parameters at the 'authorize' endpoint. The 'authorize' endpoint + // must contain the PAR request_uri. + EnforcePushedAuthorize(ctx context.Context) bool +} + +// DeviceEndpointHandlersProvider returns the provider for setting up the Device handlers. +type DeviceEndpointHandlersProvider interface { + // GetDeviceEndpointHandlers returns the handlers. + GetDeviceEndpointHandlers(ctx context.Context) DeviceEndpointHandlers +} diff --git a/fosite/config_default.go b/fosite/config_default.go new file mode 100644 index 00000000000..52011112792 --- /dev/null +++ b/fosite/config_default.go @@ -0,0 +1,565 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "hash" + "html/template" + "net/url" + "time" + + "github.com/hashicorp/go-retryablehttp" + + "github.com/ory/hydra/v2/fosite/token/jwt" + "github.com/ory/x/randx" + + "github.com/ory/hydra/v2/fosite/i18n" +) + +const ( + defaultPARPrefix = "urn:ietf:params:oauth:request_uri:" + defaultPARContextLifetime = 5 * time.Minute + defaultDeviceAndUserCodeLifespan = 10 * time.Minute + defaultAuthTokenPollingInterval = 5 * time.Second +) + +var ( + _ AuthorizeCodeLifespanProvider = (*Config)(nil) + _ RefreshTokenLifespanProvider = (*Config)(nil) + _ AccessTokenLifespanProvider = (*Config)(nil) + _ ScopeStrategyProvider = (*Config)(nil) + _ AudienceStrategyProvider = (*Config)(nil) + _ RedirectSecureCheckerProvider = (*Config)(nil) + _ RefreshTokenScopesProvider = (*Config)(nil) + _ DisableRefreshTokenValidationProvider = (*Config)(nil) + _ AccessTokenIssuerProvider = (*Config)(nil) + _ JWTScopeFieldProvider = (*Config)(nil) + _ AllowedPromptsProvider = (*Config)(nil) + _ OmitRedirectScopeParamProvider = (*Config)(nil) + _ MinParameterEntropyProvider = (*Config)(nil) + _ SanitationAllowedProvider = (*Config)(nil) + _ EnforcePKCEForPublicClientsProvider = (*Config)(nil) + _ EnablePKCEPlainChallengeMethodProvider = (*Config)(nil) + _ EnforcePKCEProvider = (*Config)(nil) + _ GrantTypeJWTBearerCanSkipClientAuthProvider = (*Config)(nil) + _ GrantTypeJWTBearerIDOptionalProvider = (*Config)(nil) + _ GrantTypeJWTBearerIssuedDateOptionalProvider = (*Config)(nil) + _ GetJWTMaxDurationProvider = (*Config)(nil) + _ IDTokenLifespanProvider = (*Config)(nil) + _ IDTokenIssuerProvider = (*Config)(nil) + _ JWKSFetcherStrategyProvider = (*Config)(nil) + _ ClientAuthenticationStrategyProvider = (*Config)(nil) + _ SendDebugMessagesToClientsProvider = (*Config)(nil) + _ ResponseModeHandlerExtensionProvider = (*Config)(nil) + _ MessageCatalogProvider = (*Config)(nil) + _ FormPostHTMLTemplateProvider = (*Config)(nil) + _ TokenURLProvider = (*Config)(nil) + _ GetSecretsHashingProvider = (*Config)(nil) + _ HTTPClientProvider = (*Config)(nil) + _ HMACHashingProvider = (*Config)(nil) + _ AuthorizeEndpointHandlersProvider = (*Config)(nil) + _ TokenEndpointHandlersProvider = (*Config)(nil) + _ TokenIntrospectionHandlersProvider = (*Config)(nil) + _ RevocationHandlersProvider = (*Config)(nil) + _ PushedAuthorizeRequestHandlersProvider = (*Config)(nil) + _ PushedAuthorizeRequestConfigProvider = (*Config)(nil) +) + +type Config struct { + // AccessTokenLifespan sets how long an access token is going to be valid. Defaults to one hour. + AccessTokenLifespan time.Duration + + // VerifiableCredentialsNonceLifespan sets how long a verifiable credentials nonce is going to be valid. Defaults to one hour. + VerifiableCredentialsNonceLifespan time.Duration + + // RefreshTokenLifespan sets how long a refresh token is going to be valid. Defaults to 30 days. Set to -1 for + // refresh tokens that never expire. + RefreshTokenLifespan time.Duration + + // AuthorizeCodeLifespan sets how long an authorize code is going to be valid. Defaults to fifteen minutes. + AuthorizeCodeLifespan time.Duration + + // IDTokenLifespan sets the default id token lifetime. Defaults to one hour. + IDTokenLifespan time.Duration + + // IDTokenIssuer sets the default issuer of the ID Token. + IDTokenIssuer string + + // Sets how long a device user/device code pair is valid for + DeviceAndUserCodeLifespan time.Duration + + // DeviceAuthTokenPollingInterval sets the interval that clients should check for device code grants + DeviceAuthTokenPollingInterval time.Duration + + // DeviceVerificationURL is the URL of the device verification endpoint, this is is included with the device code request responses + DeviceVerificationURL string + + // HashCost sets the cost of the password hashing cost. Defaults to 12. + HashCost int + + // DisableRefreshTokenValidation sets the introspection endpoint to disable refresh token validation. + DisableRefreshTokenValidation bool + + // SendDebugMessagesToClients if set to true, includes error debug messages in response payloads. Be aware that sensitive + // data may be exposed, depending on your implementation of Fosite. Such sensitive data might include database error + // codes or other information. Proceed with caution! + SendDebugMessagesToClients bool + + // ScopeStrategy sets the scope strategy that should be supported, for example fosite.WildcardScopeStrategy. + ScopeStrategy ScopeStrategy + + // AudienceMatchingStrategy sets the audience matching strategy that should be supported, defaults to fosite.DefaultsAudienceMatchingStrategy. + AudienceMatchingStrategy AudienceMatchingStrategy + + // EnforcePKCE, if set to true, requires clients to perform authorize code flows with PKCE. Defaults to false. + EnforcePKCE bool + + // EnforcePKCEForPublicClients requires only public clients to use PKCE with the authorize code flow. Defaults to false. + EnforcePKCEForPublicClients bool + + // EnablePKCEPlainChallengeMethod sets whether or not to allow the plain challenge method (S256 should be used whenever possible, plain is really discouraged). Defaults to false. + EnablePKCEPlainChallengeMethod bool + + // AllowedPromptValues sets which OpenID Connect prompt values the server supports. Defaults to []string{"login", "none", "consent", "select_account"}. + AllowedPromptValues []string + + // TokenURL is the the URL of the Authorization Server's Token Endpoint. If the authorization server is intended + // to be compatible with the private_key_jwt client authentication method (see http://openid.net/specs/openid-connect-core-1_0.html#CodeFlowAuth), + // this value MUST be set. + TokenURL string + + // JWKSFetcherStrategy is responsible for fetching JSON Web Keys from remote URLs. This is required when the private_key_jwt + // client authentication method is used. Defaults to fosite.DefaultJWKSFetcherStrategy. + JWKSFetcherStrategy JWKSFetcherStrategy + + // TokenEntropy indicates the entropy of the random string, used as the "message" part of the HMAC token. + // Defaults to 32. + TokenEntropy int + + // RedirectSecureChecker is a function that returns true if the provided URL can be securely used as a redirect URL. + RedirectSecureChecker func(context.Context, *url.URL) bool + + // RefreshTokenScopes defines which OAuth scopes will be given refresh tokens during the authorization code grant exchange. This defaults to "offline" and "offline_access". When set to an empty array, all exchanges will be given refresh tokens. + RefreshTokenScopes []string + + // MinParameterEntropy controls the minimum size of state and nonce parameters. Defaults to fosite.MinParameterEntropy. + MinParameterEntropy int + + // UseLegacyErrorFormat controls whether the legacy error format (with `error_debug`, `error_hint`, ...) + // should be used or not. + UseLegacyErrorFormat bool + + // GrantTypeJWTBearerCanSkipClientAuth indicates, if client authentication can be skipped, when using jwt as assertion. + GrantTypeJWTBearerCanSkipClientAuth bool + + // GrantTypeJWTBearerIDOptional indicates, if jti (JWT ID) claim required or not in JWT. + GrantTypeJWTBearerIDOptional bool + + // GrantTypeJWTBearerIssuedDateOptional indicates, if "iat" (issued at) claim required or not in JWT. + GrantTypeJWTBearerIssuedDateOptional bool + + // GrantTypeJWTBearerMaxDuration sets the maximum time after JWT issued date, during which the JWT is considered valid. + GrantTypeJWTBearerMaxDuration time.Duration + + // ClientAuthenticationStrategy indicates the Strategy to authenticate client requests + ClientAuthenticationStrategy ClientAuthenticationStrategy + + // ResponseModeHandlerExtension provides a handler for custom response modes + ResponseModeHandlerExtension ResponseModeHandler + + // MessageCatalog is the message bundle used for i18n + MessageCatalog i18n.MessageCatalog + + // FormPostHTMLTemplate sets html template for rendering the authorization response when the request has response_mode=form_post. + FormPostHTMLTemplate *template.Template + + // OmitRedirectScopeParam indicates whether the "scope" parameter should be omitted from the redirect URL. + OmitRedirectScopeParam bool + + // SanitationWhiteList is a whitelist of form values that are required by the token endpoint. These values + // are safe for storage in a database (cleartext). + SanitationWhiteList []string + + // JWTScopeClaimKey defines the claim key to be used to set the scope in. Valid fields are "scope" or "scp" or both. + JWTScopeClaimKey jwt.JWTScopeFieldEnum + + // AccessTokenIssuer is the issuer to be used when generating access tokens. + AccessTokenIssuer string + + // ClientSecretsHasher is the hasher used to hash OAuth2 Client Secrets. + ClientSecretsHasher Hasher + + // HTTPClient is the HTTP client to use for requests. + HTTPClient *retryablehttp.Client + + // AuthorizeEndpointHandlers is a list of handlers that are called before the authorization endpoint is served. + AuthorizeEndpointHandlers AuthorizeEndpointHandlers + + // TokenEndpointHandlers is a list of handlers that are called before the token endpoint is served. + TokenEndpointHandlers TokenEndpointHandlers + + // TokenIntrospectionHandlers is a list of handlers that are called before the token introspection endpoint is served. + TokenIntrospectionHandlers TokenIntrospectionHandlers + + // RevocationHandlers is a list of handlers that are called before the revocation endpoint is served. + RevocationHandlers RevocationHandlers + + // PushedAuthorizeEndpointHandlers is a list of handlers that are called before the PAR endpoint is served. + PushedAuthorizeEndpointHandlers PushedAuthorizeEndpointHandlers + + // GlobalSecret is the global secret used to sign and verify signatures. + GlobalSecret []byte + + // RotatedGlobalSecrets is a list of global secrets that are used to verify signatures. + RotatedGlobalSecrets [][]byte + + // HMACHasher is the hasher used to generate HMAC signatures. + HMACHasher func() hash.Hash + + // PushedAuthorizeRequestURIPrefix is the URI prefix for the PAR request_uri. + // This is defaulted to 'urn:ietf:params:oauth:request_uri:'. + PushedAuthorizeRequestURIPrefix string + + // PushedAuthorizeContextLifespan is the lifespan of the PAR context + PushedAuthorizeContextLifespan time.Duration + + // DeviceEndpointHandlers is a list of handlers that are called before the device endpoint is served. + DeviceEndpointHandlers DeviceEndpointHandlers + + // IsPushedAuthorizeEnforced enforces pushed authorization request for /authorize + IsPushedAuthorizeEnforced bool + + // UserCodeLength defines the length of the user_code + UserCodeLength int + + // UserCodeSymbols defines the symbols that will be used to construct the user_code + UserCodeSymbols []rune +} + +func (c *Config) GetGlobalSecret(ctx context.Context) ([]byte, error) { + return c.GlobalSecret, nil +} + +func (c *Config) GetUseLegacyErrorFormat(ctx context.Context) bool { + return c.UseLegacyErrorFormat +} + +func (c *Config) GetRotatedGlobalSecrets(ctx context.Context) ([][]byte, error) { + return c.RotatedGlobalSecrets, nil +} + +func (c *Config) GetHMACHasher(ctx context.Context) func() hash.Hash { + return c.HMACHasher +} + +func (c *Config) GetAuthorizeEndpointHandlers(ctx context.Context) AuthorizeEndpointHandlers { + return c.AuthorizeEndpointHandlers +} + +func (c *Config) GetTokenEndpointHandlers(ctx context.Context) TokenEndpointHandlers { + return c.TokenEndpointHandlers +} + +func (c *Config) GetTokenIntrospectionHandlers(ctx context.Context) TokenIntrospectionHandlers { + return c.TokenIntrospectionHandlers +} + +// GetDeviceEndpointHandlers return the Device Endpoint Handlers +func (c *Config) GetDeviceEndpointHandlers(ctx context.Context) DeviceEndpointHandlers { + return c.DeviceEndpointHandlers +} + +func (c *Config) GetRevocationHandlers(ctx context.Context) RevocationHandlers { + return c.RevocationHandlers +} + +func (c *Config) GetHTTPClient(ctx context.Context) *retryablehttp.Client { + if c.HTTPClient == nil { + return retryablehttp.NewClient() + } + return c.HTTPClient +} + +func (c *Config) GetSecretsHasher(ctx context.Context) Hasher { + if c.ClientSecretsHasher == nil { + c.ClientSecretsHasher = &BCrypt{Config: c} + } + return c.ClientSecretsHasher +} + +func (c *Config) GetTokenURLs(ctx context.Context) []string { + return []string{c.TokenURL} +} + +func (c *Config) GetFormPostHTMLTemplate(ctx context.Context) *template.Template { + return c.FormPostHTMLTemplate +} + +func (c *Config) GetMessageCatalog(ctx context.Context) i18n.MessageCatalog { + return c.MessageCatalog +} + +func (c *Config) GetResponseModeHandlerExtension(ctx context.Context) ResponseModeHandler { + return c.ResponseModeHandlerExtension +} + +func (c *Config) GetSendDebugMessagesToClients(ctx context.Context) bool { + return c.SendDebugMessagesToClients +} + +func (c *Config) GetIDTokenIssuer(ctx context.Context) string { + return c.IDTokenIssuer +} + +// GetGrantTypeJWTBearerIssuedDateOptional returns the GrantTypeJWTBearerIssuedDateOptional field. +func (c *Config) GetGrantTypeJWTBearerIssuedDateOptional(ctx context.Context) bool { + return c.GrantTypeJWTBearerIssuedDateOptional +} + +// GetGrantTypeJWTBearerIDOptional returns the GrantTypeJWTBearerIDOptional field. +func (c *Config) GetGrantTypeJWTBearerIDOptional(ctx context.Context) bool { + return c.GrantTypeJWTBearerIDOptional +} + +// GetGrantTypeJWTBearerCanSkipClientAuth returns the GrantTypeJWTBearerCanSkipClientAuth field. +func (c *Config) GetGrantTypeJWTBearerCanSkipClientAuth(ctx context.Context) bool { + return c.GrantTypeJWTBearerCanSkipClientAuth +} + +// GetEnforcePKCE If set to true, public clients must use PKCE. +func (c *Config) GetEnforcePKCE(ctx context.Context) bool { + return c.EnforcePKCE +} + +// GetEnablePKCEPlainChallengeMethod returns whether or not to allow the plain challenge method (S256 should be used whenever possible, plain is really discouraged). +func (c *Config) GetEnablePKCEPlainChallengeMethod(ctx context.Context) bool { + return c.EnablePKCEPlainChallengeMethod +} + +// GetEnforcePKCEForPublicClients returns the value of EnforcePKCEForPublicClients. +func (c *Config) GetEnforcePKCEForPublicClients(ctx context.Context) bool { + return c.EnforcePKCEForPublicClients +} + +// GetSanitationWhiteList returns a list of allowed form values that are required by the token endpoint. These values +// are safe for storage in a database (cleartext). +func (c *Config) GetSanitationWhiteList(ctx context.Context) []string { + return c.SanitationWhiteList +} + +func (c *Config) GetOmitRedirectScopeParam(ctx context.Context) bool { + return c.OmitRedirectScopeParam +} + +func (c *Config) GetAccessTokenIssuer(ctx context.Context) string { + return c.AccessTokenIssuer +} + +func (c *Config) GetJWTScopeField(ctx context.Context) jwt.JWTScopeFieldEnum { + return c.JWTScopeClaimKey +} + +func (c *Config) GetAllowedPrompts(_ context.Context) []string { + return c.AllowedPromptValues +} + +// GetScopeStrategy returns the scope strategy to be used. Defaults to glob scope strategy. +func (c *Config) GetScopeStrategy(_ context.Context) ScopeStrategy { + if c.ScopeStrategy == nil { + c.ScopeStrategy = WildcardScopeStrategy + } + return c.ScopeStrategy +} + +// GetAudienceStrategy returns the scope strategy to be used. Defaults to glob scope strategy. +func (c *Config) GetAudienceStrategy(_ context.Context) AudienceMatchingStrategy { + if c.AudienceMatchingStrategy == nil { + c.AudienceMatchingStrategy = DefaultAudienceMatchingStrategy + } + return c.AudienceMatchingStrategy +} + +// GetAuthorizeCodeLifespan returns how long an authorize code should be valid. Defaults to one fifteen minutes. +func (c *Config) GetAuthorizeCodeLifespan(_ context.Context) time.Duration { + if c.AuthorizeCodeLifespan == 0 { + return time.Minute * 15 + } + return c.AuthorizeCodeLifespan +} + +// GetIDTokenLifespan returns how long an id token should be valid. Defaults to one hour. +func (c *Config) GetIDTokenLifespan(_ context.Context) time.Duration { + if c.IDTokenLifespan == 0 { + return time.Hour + } + return c.IDTokenLifespan +} + +// GetAccessTokenLifespan returns how long an access token should be valid. Defaults to one hour. +func (c *Config) GetAccessTokenLifespan(_ context.Context) time.Duration { + if c.AccessTokenLifespan == 0 { + return time.Hour + } + return c.AccessTokenLifespan +} + +// GetNonceLifespan returns how long a nonce should be valid. Defaults to one hour. +func (c *Config) GetVerifiableCredentialsNonceLifespan(_ context.Context) time.Duration { + if c.VerifiableCredentialsNonceLifespan == 0 { + return time.Hour + } + return c.VerifiableCredentialsNonceLifespan +} + +// GetRefreshTokenLifespan sets how long a refresh token is going to be valid. Defaults to 30 days. Set to -1 for +// refresh tokens that never expire. +func (c *Config) GetRefreshTokenLifespan(_ context.Context) time.Duration { + if c.RefreshTokenLifespan == 0 { + return time.Hour * 24 * 30 + } + return c.RefreshTokenLifespan +} + +// GetDeviceAndUserCodeLifespan returns how long the device and user codes should be valid. +// Defaults to 10 minutes +func (c *Config) GetDeviceAndUserCodeLifespan(_ context.Context) time.Duration { + if c.DeviceAndUserCodeLifespan == 0 { + return defaultDeviceAndUserCodeLifespan + } + return c.DeviceAndUserCodeLifespan +} + +// GetBCryptCost returns the bcrypt cost factor. Defaults to 12. +func (c *Config) GetBCryptCost(_ context.Context) int { + if c.HashCost == 0 { + return DefaultBCryptWorkFactor + } + return c.HashCost +} + +// GetJWKSFetcherStrategy returns the JWKSFetcherStrategy. +func (c *Config) GetJWKSFetcherStrategy(_ context.Context) JWKSFetcherStrategy { + if c.JWKSFetcherStrategy == nil { + c.JWKSFetcherStrategy = NewDefaultJWKSFetcherStrategy() + } + return c.JWKSFetcherStrategy +} + +// GetTokenEntropy returns the entropy of the "message" part of a HMAC Token. Defaults to 32. +func (c *Config) GetTokenEntropy(_ context.Context) int { + if c.TokenEntropy == 0 { + return 32 + } + return c.TokenEntropy +} + +// GetRedirectSecureChecker returns the checker to check if redirect URI is secure. Defaults to fosite.IsRedirectURISecure. +func (c *Config) GetRedirectSecureChecker(_ context.Context) func(context.Context, *url.URL) bool { + if c.RedirectSecureChecker == nil { + return IsRedirectURISecure + } + return c.RedirectSecureChecker +} + +// GetRefreshTokenScopes returns which scopes will provide refresh tokens. +func (c *Config) GetRefreshTokenScopes(_ context.Context) []string { + if c.RefreshTokenScopes == nil { + return []string{"offline", "offline_access"} + } + return c.RefreshTokenScopes +} + +// GetMinParameterEntropy returns MinParameterEntropy if set. Defaults to fosite.MinParameterEntropy. +func (c *Config) GetMinParameterEntropy(_ context.Context) int { + if c.MinParameterEntropy == 0 { + return MinParameterEntropy + } else { + return c.MinParameterEntropy + } +} + +// GetJWTMaxDuration specified the maximum amount of allowed `exp` time for a JWT. It compares +// the time with the JWT's `exp` time if the JWT time is larger, will cause the JWT to be invalid. +// +// Defaults to a day. +func (c *Config) GetJWTMaxDuration(_ context.Context) time.Duration { + if c.GrantTypeJWTBearerMaxDuration == 0 { + return time.Hour * 24 + } + return c.GrantTypeJWTBearerMaxDuration +} + +// GetClientAuthenticationStrategy returns the configured client authentication strategy. +// Defaults to nil. +// Note that on a nil strategy `fosite.Fosite` fallbacks to its default client authentication strategy +// `fosite.Fosite.DefaultClientAuthenticationStrategy` +func (c *Config) GetClientAuthenticationStrategy(_ context.Context) ClientAuthenticationStrategy { + return c.ClientAuthenticationStrategy +} + +// GetDisableRefreshTokenValidation returns whether to disable the validation of the refresh token. +func (c *Config) GetDisableRefreshTokenValidation(_ context.Context) bool { + return c.DisableRefreshTokenValidation +} + +// GetPushedAuthorizeEndpointHandlers returns the handlers. +func (c *Config) GetPushedAuthorizeEndpointHandlers(ctx context.Context) PushedAuthorizeEndpointHandlers { + return c.PushedAuthorizeEndpointHandlers +} + +// GetPushedAuthorizeRequestURIPrefix is the request URI prefix. This is +// usually 'urn:ietf:params:oauth:request_uri:'. +func (c *Config) GetPushedAuthorizeRequestURIPrefix(ctx context.Context) string { + if c.PushedAuthorizeRequestURIPrefix == "" { + return defaultPARPrefix + } + + return c.PushedAuthorizeRequestURIPrefix +} + +// GetPushedAuthorizeContextLifespan is the lifespan of the short-lived PAR context. +func (c *Config) GetPushedAuthorizeContextLifespan(ctx context.Context) time.Duration { + if c.PushedAuthorizeContextLifespan <= 0 { + return defaultPARContextLifetime + } + + return c.PushedAuthorizeContextLifespan +} + +// EnforcePushedAuthorize indicates if PAR is enforced. In this mode, a client +// cannot pass authorize parameters at the 'authorize' endpoint. The 'authorize' endpoint +// must contain the PAR request_uri. +func (c *Config) EnforcePushedAuthorize(ctx context.Context) bool { + return c.IsPushedAuthorizeEnforced +} + +// GetDeviceVerificationURL returns the device verification URL +func (c *Config) GetDeviceVerificationURL(ctx context.Context) string { + return c.DeviceVerificationURL +} + +// GetDeviceAuthTokenPollingInterval returns configured device token endpoint polling interval +func (c *Config) GetDeviceAuthTokenPollingInterval(ctx context.Context) time.Duration { + if c.DeviceAuthTokenPollingInterval == 0 { + return defaultAuthTokenPollingInterval + } + return c.DeviceAuthTokenPollingInterval +} + +// GetUserCodeLength returns configured user_code length +func (c *Config) GetUserCodeLength(ctx context.Context) int { + if c.UserCodeLength == 0 { + return 8 + } + return c.UserCodeLength +} + +// GetDeviceAuthTokenPollingInterval returns configured user_code allowed symbols +func (c *Config) GetUserCodeSymbols(ctx context.Context) []rune { + if c.UserCodeSymbols == nil { + return []rune(randx.AlphaUpper) + } + return c.UserCodeSymbols +} diff --git a/fosite/context.go b/fosite/context.go new file mode 100644 index 00000000000..34cdce1eee8 --- /dev/null +++ b/fosite/context.go @@ -0,0 +1,22 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import "context" + +func NewContext() context.Context { + return context.Background() +} + +type ContextKey string + +const ( + RequestContextKey = ContextKey("request") + AccessRequestContextKey = ContextKey("accessRequest") + AccessResponseContextKey = ContextKey("accessResponse") + AuthorizeRequestContextKey = ContextKey("authorizeRequest") + AuthorizeResponseContextKey = ContextKey("authorizeResponse") + // PushedAuthorizeResponseContextKey is the response context + PushedAuthorizeResponseContextKey = ContextKey("pushedAuthorizeResponse") +) diff --git a/fosite/device_request.go b/fosite/device_request.go new file mode 100644 index 00000000000..35f454ef440 --- /dev/null +++ b/fosite/device_request.go @@ -0,0 +1,43 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +type UserCodeState int16 + +const ( + // User code is active + UserCodeUnused = UserCodeState(0) + // User code has been accepted + UserCodeAccepted = UserCodeState(1) + // User code has been rejected + UserCodeRejected = UserCodeState(2) +) + +// DeviceRequest is an implementation of DeviceRequester +type DeviceRequest struct { + UserCodeState UserCodeState + Request +} + +func (d *DeviceRequest) GetUserCodeState() UserCodeState { + return d.UserCodeState +} + +func (d *DeviceRequest) SetUserCodeState(state UserCodeState) { + d.UserCodeState = state +} + +func (d *DeviceRequest) Sanitize(allowedParameters []string) Requester { + r, _ := d.Request.Sanitize(allowedParameters).(*Request) + d.Request = *r + return d +} + +// NewDeviceRequest returns a new device request +func NewDeviceRequest() *DeviceRequest { + return &DeviceRequest{ + UserCodeState: UserCodeUnused, + Request: *NewRequest(), + } +} diff --git a/fosite/device_request_handler.go b/fosite/device_request_handler.go new file mode 100644 index 00000000000..69fb1a7b4ad --- /dev/null +++ b/fosite/device_request_handler.go @@ -0,0 +1,72 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "net/http" + "strings" + + "go.opentelemetry.io/otel/trace" + + "github.com/ory/x/errorsx" + "github.com/ory/x/otelx" + + "github.com/ory/hydra/v2/fosite/i18n" +) + +// NewDeviceRequest parses an http Request returns a Device request +func (f *Fosite) NewDeviceRequest(ctx context.Context, r *http.Request) (_ DeviceRequester, err error) { + ctx, span := trace.SpanFromContext(ctx).TracerProvider().Tracer("github.com/ory/hydra/v2/fosite").Start(ctx, "Fosite.NewDeviceRequest") + defer otelx.End(span, &err) + + request := NewDeviceRequest() + request.Lang = i18n.GetLangFromRequest(f.Config.GetMessageCatalog(ctx), r) + + if r.Method != http.MethodPost { + return request, errorsx.WithStack(ErrInvalidRequest.WithHintf("HTTP method is '%s', expected 'POST'.", r.Method)) + } + if err := r.ParseForm(); err != nil { + return request, errorsx.WithStack(ErrInvalidRequest.WithHint("Unable to parse HTTP body, make sure to send a properly formatted form request body.").WithWrap(err).WithDebug(err.Error())) + } + if len(r.PostForm) == 0 { + return request, errorsx.WithStack(ErrInvalidRequest.WithHint("The POST body can not be empty.")) + } + request.Form = r.PostForm + + client, clientErr := f.AuthenticateClient(ctx, r, r.PostForm) + if clientErr != nil { + return request, clientErr + } + if client.GetID() != request.Form.Get("client_id") { + return request, errorsx.WithStack(ErrInvalidRequest.WithHint("Provided client_id mismatch.")) + } + request.Client = client + + if !client.GetGrantTypes().Has(string(GrantTypeDeviceCode)) { + return request, errorsx.WithStack(ErrInvalidGrant.WithHint("The requested OAuth 2.0 Client does not have the 'urn:ietf:params:oauth:grant-type:device_code' grant.")) + } + + if err := f.validateDeviceScope(ctx, r, request); err != nil { + return request, err + } + + if err := f.validateAudience(ctx, r, request); err != nil { + return request, err + } + + return request, nil +} + +func (f *Fosite) validateDeviceScope(ctx context.Context, r *http.Request, request *DeviceRequest) error { + scopes := RemoveEmpty(strings.Split(request.Form.Get("scope"), " ")) + scopeStrategy := f.Config.GetScopeStrategy(ctx) + for _, scope := range scopes { + if !scopeStrategy(request.Client.GetScopes(), scope) { + return errorsx.WithStack(ErrInvalidScope.WithHintf("The OAuth 2.0 Client is not allowed to request scope '%s'.", scope)) + } + } + request.SetRequestedScopes(scopes) + return nil +} diff --git a/fosite/device_request_handler_test.go b/fosite/device_request_handler_test.go new file mode 100644 index 00000000000..beb5d9cc986 --- /dev/null +++ b/fosite/device_request_handler_test.go @@ -0,0 +1,225 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "context" + "fmt" + "net/http" + "net/url" + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + gomock "go.uber.org/mock/gomock" + + . "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/internal" +) + +func TestNewDeviceRequestWithPublicClient(t *testing.T) { + ctrl := gomock.NewController(t) + store := internal.NewMockStorage(ctrl) + clientManager := internal.NewMockClientManager(ctrl) + deviceClient := &DefaultClient{ID: "client_id"} + deviceClient.Public = true + deviceClient.Scopes = []string{"17", "42"} + deviceClient.Audience = []string{"aud2"} + deviceClient.GrantTypes = []string{"urn:ietf:params:oauth:grant-type:device_code"} + + authCodeClient := &DefaultClient{ID: "client_id_2"} + authCodeClient.Public = true + authCodeClient.Scopes = []string{"17", "42"} + authCodeClient.GrantTypes = []string{"authorization_code"} + + t.Cleanup(ctrl.Finish) + config := &Config{ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy} + fosite := &Fosite{Store: store, Config: config} + for k, c := range []struct { + header http.Header + form url.Values + method string + expectedError error + mock func() + expect DeviceRequester + description string + }{{ + description: "invalid method", + expectedError: ErrInvalidRequest, + method: "GET", + mock: func() {}, + }, { + description: "empty request", + expectedError: ErrInvalidRequest, + method: "POST", + mock: func() {}, + }, { + description: "invalid client", + form: url.Values{ + "client_id": {"client_id"}, + "scope": {"foo bar"}, + }, + expectedError: ErrInvalidClient, + method: "POST", + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("client_id")).Return(nil, errors.New("")) + }, + }, { + description: "fails because scope not allowed", + form: url.Values{ + "client_id": {"client_id"}, + "scope": {"17 42 foo"}, + }, + method: "POST", + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("client_id")).Return(deviceClient, nil) + }, + expectedError: ErrInvalidScope, + }, { + description: "fails because audience not allowed", + form: url.Values{ + "client_id": {"client_id"}, + "scope": {"17 42"}, + "audience": {"random_aud"}, + }, + method: "POST", + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("client_id")).Return(deviceClient, nil) + }, + expectedError: ErrInvalidRequest, + }, { + description: "fails because it doesn't have the proper grant", + form: url.Values{ + "client_id": {"client_id_2"}, + "scope": {"17 42"}, + }, + method: "POST", + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("client_id_2")).Return(authCodeClient, nil) + }, + expectedError: ErrInvalidGrant, + }, { + description: "success", + form: url.Values{ + "client_id": {"client_id"}, + "scope": {"17 42"}, + }, + method: "POST", + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("client_id")).Return(deviceClient, nil) + }, + }} { + t.Run(fmt.Sprintf("case=%d description=%s", k, c.description), func(t *testing.T) { + c.mock() + r := &http.Request{ + Header: c.header, + PostForm: c.form, + Form: c.form, + Method: c.method, + } + + ar, err := fosite.NewDeviceRequest(context.Background(), r) + require.ErrorIs(t, err, c.expectedError) + if c.expectedError == nil { + assert.NotNil(t, ar.GetRequestedAt()) + } + }) + } +} + +func TestNewDeviceRequestWithClientAuthn(t *testing.T) { + ctrl := gomock.NewController(t) + store := internal.NewMockStorage(ctrl) + clientManager := internal.NewMockClientManager(ctrl) + hasher := internal.NewMockHasher(ctrl) + client := &DefaultClient{ID: "client_id"} + t.Cleanup(ctrl.Finish) + config := &Config{ClientSecretsHasher: hasher, ScopeStrategy: ExactScopeStrategy, AudienceMatchingStrategy: DefaultAudienceMatchingStrategy} + fosite := &Fosite{Store: store, Config: config} + + client.Public = false + client.Secret = []byte("client_secret") + client.Scopes = []string{"foo", "bar"} + client.GrantTypes = []string{"urn:ietf:params:oauth:grant-type:device_code"} + + for k, c := range []struct { + header http.Header + form url.Values + method string + expectedError error + mock func() + expect DeviceRequester + description string + }{ + { + form: url.Values{ + "client_id": {"client_id"}, + "scope": {"foo bar"}, + }, + expectedError: ErrInvalidClient, + method: "POST", + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("client_id")).Return(client, nil) + hasher.EXPECT().Compare(gomock.Any(), gomock.Any(), gomock.Any()).Return(errors.New("")) + }, + description: "Should failed becaue no client authn provided.", + }, + { + form: url.Values{ + "client_id": {"client_id2"}, + "scope": {"foo bar"}, + }, + header: http.Header{ + "Authorization": {basicAuth("client_id", "client_secret")}, + }, + expectedError: ErrInvalidRequest, + method: "POST", + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("client_id")).Return(client, nil) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("client_secret")), gomock.Eq([]byte("client_secret"))).Return(nil) + }, + description: "should fail because different client is used in authn than in form", + }, + { + form: url.Values{ + "client_id": {"client_id"}, + "scope": {"foo bar"}, + }, + header: http.Header{ + "Authorization": {basicAuth("client_id", "client_secret")}, + }, + method: "POST", + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("client_id")).Return(client, nil) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("client_secret")), gomock.Eq([]byte("client_secret"))).Return(nil) + }, + description: "should succeed", + }, + } { + t.Run(fmt.Sprintf("case=%d description=%s", k, c.description), func(t *testing.T) { + c.mock() + r := &http.Request{ + Header: c.header, + PostForm: c.form, + Form: c.form, + Method: c.method, + } + + req, err := fosite.NewDeviceRequest(context.Background(), r) + require.ErrorIs(t, err, c.expectedError) + if c.expectedError == nil { + assert.NotZero(t, req.GetRequestedAt()) + } + }) + } +} diff --git a/fosite/device_response.go b/fosite/device_response.go new file mode 100644 index 00000000000..95b3993b4e1 --- /dev/null +++ b/fosite/device_response.go @@ -0,0 +1,94 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "net/http" +) + +// DeviceResponse represents the device authorization response +type DeviceResponse struct { + Header http.Header + DeviceCode string `json:"device_code"` + UserCode string `json:"user_code"` + VerificationURI string `json:"verification_uri"` + VerificationURIComplete string `json:"verification_uri_complete,omitempty"` + ExpiresIn int64 `json:"expires_in"` + Interval int `json:"interval,omitempty"` +} + +// NewDeviceResponse returns a new DeviceResponse +func NewDeviceResponse() *DeviceResponse { + return &DeviceResponse{} +} + +// GetDeviceCode returns the response's device_code +func (d *DeviceResponse) GetDeviceCode() string { + return d.DeviceCode +} + +// SetDeviceCode sets the response's device_code +func (d *DeviceResponse) SetDeviceCode(code string) { + d.DeviceCode = code +} + +// GetUserCode returns the response's user_code +func (d *DeviceResponse) GetUserCode() string { + return d.UserCode +} + +// SetUserCode sets the response's user_code +func (d *DeviceResponse) SetUserCode(code string) { + d.UserCode = code +} + +// GetVerificationURI returns the response's verification uri +func (d *DeviceResponse) GetVerificationURI() string { + return d.VerificationURI +} + +// SetVerificationURI sets the response's verification uri +func (d *DeviceResponse) SetVerificationURI(uri string) { + d.VerificationURI = uri +} + +// GetVerificationURIComplete returns the response's complete verification uri if set +func (d *DeviceResponse) GetVerificationURIComplete() string { + return d.VerificationURIComplete +} + +// SetVerificationURIComplete sets the response's complete verification uri +func (d *DeviceResponse) SetVerificationURIComplete(uri string) { + d.VerificationURIComplete = uri +} + +// GetExpiresIn returns the response's device code and user code lifetime in seconds if set +func (d *DeviceResponse) GetExpiresIn() int64 { + return d.ExpiresIn +} + +// SetExpiresIn sets the response's device code and user code lifetime in seconds +func (d *DeviceResponse) SetExpiresIn(seconds int64) { + d.ExpiresIn = seconds +} + +// GetInterval returns the response's polling interval if set +func (d *DeviceResponse) GetInterval() int { + return d.Interval +} + +// SetInterval sets the response's polling interval +func (d *DeviceResponse) SetInterval(seconds int) { + d.Interval = seconds +} + +// GetHeader returns the response's headers +func (d *DeviceResponse) GetHeader() http.Header { + return d.Header +} + +// AddHeader adds a header to the response +func (d *DeviceResponse) AddHeader(key, value string) { + d.Header.Add(key, value) +} diff --git a/fosite/device_response_test.go b/fosite/device_response_test.go new file mode 100644 index 00000000000..38e3f841391 --- /dev/null +++ b/fosite/device_response_test.go @@ -0,0 +1,26 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDeviceResponse(t *testing.T) { + r := NewDeviceResponse() + r.SetDeviceCode("device_code") + r.SetUserCode("user_code") + r.SetExpiresIn(5) + r.SetVerificationURI("https://www.example.com") + r.SetVerificationURIComplete("https://www.example.com?code=user_code") + r.SetInterval(5) + assert.Equal(t, "device_code", r.GetDeviceCode()) + assert.Equal(t, "user_code", r.GetUserCode()) + assert.Equal(t, int64(5), r.GetExpiresIn()) + assert.Equal(t, "https://www.example.com", r.GetVerificationURI()) + assert.Equal(t, "https://www.example.com?code=user_code", r.GetVerificationURIComplete()) + assert.Equal(t, 5, r.GetInterval()) +} diff --git a/fosite/device_response_writer.go b/fosite/device_response_writer.go new file mode 100644 index 00000000000..722abbaee7e --- /dev/null +++ b/fosite/device_response_writer.go @@ -0,0 +1,22 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" +) + +// NewDeviceResponse returns a new DeviceResponder +func (f *Fosite) NewDeviceResponse(ctx context.Context, r DeviceRequester, session Session) (DeviceResponder, error) { + resp := &DeviceResponse{} + + r.SetSession(session) + for _, h := range f.Config.GetDeviceEndpointHandlers(ctx) { + if err := h.HandleDeviceEndpointRequest(ctx, r, resp); err != nil { + return nil, err + } + } + + return resp, nil +} diff --git a/fosite/device_write.go b/fosite/device_write.go new file mode 100644 index 00000000000..b7c45d2e8e3 --- /dev/null +++ b/fosite/device_write.go @@ -0,0 +1,40 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "encoding/json" + "net/http" +) + +// WriteDeviceResponse writes the device response +func (f *Fosite) WriteDeviceResponse(ctx context.Context, rw http.ResponseWriter, requester DeviceRequester, responder DeviceResponder) { + // Set custom headers, e.g. "X-MySuperCoolCustomHeader" or "X-DONT-CACHE-ME"... + wh := rw.Header() + rh := responder.GetHeader() + for k := range rh { + wh.Set(k, rh.Get(k)) + } + + rw.Header().Set("Content-Type", "application/json;charset=UTF-8") + rw.Header().Set("Cache-Control", "no-store") + rw.Header().Set("Pragma", "no-cache") + + deviceResponse := &DeviceResponse{ + DeviceCode: responder.GetDeviceCode(), + UserCode: responder.GetUserCode(), + VerificationURI: responder.GetVerificationURI(), + VerificationURIComplete: responder.GetVerificationURIComplete(), + ExpiresIn: responder.GetExpiresIn(), + Interval: responder.GetInterval(), + } + + r, err := json.Marshal(deviceResponse) + _, _ = rw.Write(r) + if err != nil { + http.Error(rw, ErrServerError.WithWrap(err).WithDebug(err.Error()).Error(), http.StatusInternalServerError) + return + } +} diff --git a/fosite/device_write_test.go b/fosite/device_write_test.go new file mode 100644 index 00000000000..aa92d2d96dd --- /dev/null +++ b/fosite/device_write_test.go @@ -0,0 +1,61 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "context" + "encoding/json" + "io" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + . "github.com/ory/hydra/v2/fosite" +) + +func TestWriteDeviceUserResponse(t *testing.T) { + oauth2 := &Fosite{Config: &Config{ + DeviceAndUserCodeLifespan: time.Minute, + DeviceAuthTokenPollingInterval: time.Minute, + DeviceVerificationURL: "http://ory.sh", + }} + ctx := context.Background() + + rw := httptest.NewRecorder() + ar := &DeviceRequest{} + resp := &DeviceResponse{} + resp.SetUserCode("AAAA") + resp.SetDeviceCode("BBBB") + resp.SetInterval(int( + oauth2.Config.GetDeviceAuthTokenPollingInterval(ctx).Round(time.Second).Seconds(), + )) + resp.SetExpiresIn(int64( + oauth2.Config.GetDeviceAndUserCodeLifespan(ctx), + )) + resp.SetVerificationURI(oauth2.Config.GetDeviceVerificationURL(ctx)) + resp.SetVerificationURIComplete( + oauth2.Config.GetDeviceVerificationURL(ctx) + "?user_code=" + resp.GetUserCode(), + ) + + oauth2.WriteDeviceResponse(context.Background(), rw, ar, resp) + + assert.Equal(t, 200, rw.Code) + + body, err := io.ReadAll(rw.Body) + require.NoError(t, err) + + wroteDeviceResponse := DeviceResponse{} + err = json.Unmarshal(body, &wroteDeviceResponse) + require.NoError(t, err) + + assert.Equal(t, resp.GetUserCode(), wroteDeviceResponse.UserCode) + assert.Equal(t, resp.GetDeviceCode(), wroteDeviceResponse.DeviceCode) + assert.Equal(t, resp.GetVerificationURI(), wroteDeviceResponse.VerificationURI) + assert.Equal(t, resp.GetVerificationURIComplete(), wroteDeviceResponse.VerificationURIComplete) + assert.Equal(t, resp.GetInterval(), wroteDeviceResponse.Interval) + assert.Equal(t, resp.GetExpiresIn(), wroteDeviceResponse.ExpiresIn) +} diff --git a/fosite/docs/authorize-code-grant.gif b/fosite/docs/authorize-code-grant.gif new file mode 100644 index 00000000000..bda5368efb9 Binary files /dev/null and b/fosite/docs/authorize-code-grant.gif differ diff --git a/fosite/docs/how-tos/client_credentials_grant.md b/fosite/docs/how-tos/client_credentials_grant.md new file mode 100644 index 00000000000..3f8dfbeb971 --- /dev/null +++ b/fosite/docs/how-tos/client_credentials_grant.md @@ -0,0 +1,167 @@ +# Client Credentials Grant + +The following example configures a _fosite_ _OAuth2 Provider_ for issuing _JWT_ +_access tokens_ using the _Client Credentials Grant_. This grant allows a client +to request access tokens using only its client credentials at the _Token +Endpoint_(see +[rfc6749 Section 4.4](https://tools.ietf.org/html/rfc6749#section-4.4). For this +aim, this _how-to_ configures: + +- RSA _JWT Strategy_ to sign JWT _access tokens_ +- _Token Endpoint_ http handler +- A `fosite.OAuth2Provider` that provides the following services: + - Create and validate + [_OAuth2 Access Token Requests_](https://tools.ietf.org/html/rfc6749#section-4.1.3) + with _Client Credentials Grant_ + - Create an + [_Access Token Response_](https://tools.ietf.org/html/rfc6749#section-4.1.4) + and + - Sends a [successful](https://tools.ietf.org/html/rfc6749#section-5.1) or + [error](https://tools.ietf.org/html/rfc6749#section-5.2) HTTP response to + client + +## Code Example + +`token_handler.go` + +````golang +package main + +import ( + "net/http" + + "github.com/ory/fosite" + "github.com/ory/fosite/handler/oauth2" +) + +type tokenHandler struct { + oauth fosite.OAuth2Provider +} + +func (t *tokenHandler) TokenHandler(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + // A JWT session allows to configure JWT + // header, body and claims for the *access token*. + // Sessions also keeps data between calls in a flow + // but the client credentials flow only uses the Token Endpoint + session := &oauth2.JWTSession{} + + // NewAccessRequest creates an [Access Token Request](https://tools.ietf.org/html/rfc6749#section-4.1.3) + // if the given http request is valid. + ar, err := t.oauth.NewAccessRequest(ctx, r, session) + if err != nil { + t.oauth.WriteAccessError(w, ar, err) + return + } + + // NewAccessResponse creates a [Access Token Response](https://tools.ietf.org/html/rfc6749#section-4.1.4) + // from a *Access Token Request*. + // This response has methods and attributes to setup a valid RFC response + // for Token Endpont, for example: + // + // ``` + // { + // "access_token":"2YotnFZFEjr1zCsicMWpAA", + // "token_type":"example", + // "expires_in":3600, + // "refresh_token":"tGzv3JOkF0XG5Qx2TlKWIA", + // "example_parameter":"example_value" + // } + // ``` + response, err := t.oauth.NewAccessResponse(ctx, ar) + if err != nil { + t.oauth.WriteAccessError(w, ar, err) + return + } + + // WriteAccessResponse writes the Access Token Response + // as a HTTP response + t.oauth.WriteAccessResponse(w, ar, response) +} + +```` + +`main.go` + +```go +package main + +import ( + "crypto/rand" + "crypto/rsa" + "log" + "net/http" + "time" + + "github.com/ory/fosite" + "github.com/ory/fosite/compose" + "github.com/ory/fosite/storage" +) + +func main() { + // Generates a RSA key to sign JWT tokens + key, err := rsa.GenerateKey(rand.Reader, 2048) + if err != nil { + log.Fatalf("Cannot generate RSA key: %v", err) + } + + var storage = storage.NewMemoryStore() + + // Register a test client in the memory store + storage.Clients["test-client"] = &fosite.DefaultClient{ + ID: "test-client", + Secret: []byte(`$2a$10$IxMdI6d.LIRZPpSfEwNoeu4rY3FhDREsxFJXikcgdRRAStxUlsuEO`), // = "foobar" + GrantTypes: []string{"client_credentials"}, + } + + // check the api docs of compose.Config for further configuration options + var config = &compose.Config{ + AccessTokenLifespan: time.Minute * 30, + } + + var oauth2Provider = compose.Compose( + config, + storage, + compose.NewOAuth2JWTStrategy( + key, + // HMACStrategy is used to sign refresh token + // therefore not required for our example + nil, + ), + // BCrypt hasher is automatically created when omitted. + // Hasher is used to store hashed client authentication passwords. + nil, + compose.OAuth2ClientCredentialsGrantFactory, + ) + + accessTokenHandler := tokenHandler{oauth: oauth2Provider} + http.HandleFunc("/token", accessTokenHandler.TokenHandler) + log.Println("serving on 0.0.0.0:8080") + if err := http.ListenAndServe("0.0.0.0:8080", nil); err != nil { + log.Fatal(err) + } +} + +``` + +## To run + +In one terminal run the http server as follows: + +```bash +$go run . +2021/04/26 12:57:24 serving on 0.0.0.0:8080 +``` + +In a different terminal issue a token as follows: + +```bash +$curl http://localhost:8080/token -d grant_type=client_credentials -d client_id=test-client -d client_secret=foobar +{ + "access_token": "", + "expires_in": 1799, + "scope": "", + "token_type": "bearer" +} +``` diff --git a/fosite/docs/image/banner_fosite.png b/fosite/docs/image/banner_fosite.png new file mode 100644 index 00000000000..924457d8d2d Binary files /dev/null and b/fosite/docs/image/banner_fosite.png differ diff --git a/oauth2/equalKeys.go b/fosite/equalKeys_test.go similarity index 56% rename from oauth2/equalKeys.go rename to fosite/equalKeys_test.go index e16568e078a..5b965461dd8 100644 --- a/oauth2/equalKeys.go +++ b/fosite/equalKeys_test.go @@ -1,7 +1,7 @@ -// Copyright © 2022 Ory Corp +// Copyright © 2025 Ory Corp // SPDX-License-Identifier: Apache-2.0 -package oauth2 +package fosite_test import ( "testing" @@ -11,45 +11,59 @@ import ( "github.com/stretchr/testify/require" ) +func TestAssertObjectsAreEqualByKeys(t *testing.T) { + type foo struct { + Name string + Body int + } + a := &foo{"foo", 1} + b := &foo{"bar", 1} + c := &foo{"baz", 3} + + AssertObjectKeysEqual(t, a, a, "Name", "Body") + AssertObjectKeysNotEqual(t, a, b, "Name") + AssertObjectKeysNotEqual(t, a, c, "Name", "Body") +} + func AssertObjectKeysEqual(t *testing.T, a, b interface{}, keys ...string) { - assert.True(t, len(keys) > 0, "No keys provided.") + assert.True(t, len(keys) > 0, "No key provided.") for _, k := range keys { c, err := reflections.GetField(a, k) - assert.Nil(t, err) + assert.NoError(t, err) d, err := reflections.GetField(b, k) - assert.Nil(t, err) - assert.Equal(t, c, d, "%s", k) + assert.NoError(t, err) + assert.Equal(t, c, d, "field: %s", k) } } func AssertObjectKeysNotEqual(t *testing.T, a, b interface{}, keys ...string) { - assert.True(t, len(keys) > 0, "No keys provided.") + assert.True(t, len(keys) > 0, "No key provided.") for _, k := range keys { c, err := reflections.GetField(a, k) - assert.Nil(t, err) + assert.NoError(t, err) d, err := reflections.GetField(b, k) - assert.Nil(t, err) + assert.NoError(t, err) assert.NotEqual(t, c, d, "%s", k) } } func RequireObjectKeysEqual(t *testing.T, a, b interface{}, keys ...string) { - assert.True(t, len(keys) > 0, "No keys provided.") + assert.True(t, len(keys) > 0, "No key provided.") for _, k := range keys { c, err := reflections.GetField(a, k) - assert.Nil(t, err) + assert.NoError(t, err) d, err := reflections.GetField(b, k) - assert.Nil(t, err) + assert.NoError(t, err) require.Equal(t, c, d, "%s", k) } } func RequireObjectKeysNotEqual(t *testing.T, a, b interface{}, keys ...string) { - assert.True(t, len(keys) > 0, "No keys provided.") + assert.True(t, len(keys) > 0, "No key provided.") for _, k := range keys { c, err := reflections.GetField(a, k) - assert.Nil(t, err) + assert.NoError(t, err) d, err := reflections.GetField(b, k) - assert.Nil(t, err) + assert.NoError(t, err) require.NotEqual(t, c, d, "%s", k) } } diff --git a/fosite/errors.go b/fosite/errors.go new file mode 100644 index 00000000000..3ad2a47b571 --- /dev/null +++ b/fosite/errors.go @@ -0,0 +1,562 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "encoding/json" + stderr "errors" + "fmt" + "net/http" + "net/url" + "strings" + + "golang.org/x/text/language" + + "github.com/ory/hydra/v2/fosite/i18n" + "github.com/ory/x/errorsx" + + "github.com/pkg/errors" +) + +var ( + // ErrInvalidatedAuthorizeCode is an error indicating that an authorization code has been + // used previously. + ErrInvalidatedAuthorizeCode = stderr.New("Authorization code has ben invalidated") + // ErrInvalidatedDeviceCode is an error indicating that a device code has benn used previously. + ErrInvalidatedDeviceCode = stderr.New("Device code has been invalidated") + // ErrExistingUserCodeSignature is an error indicating that a row already exists with the provided user_code signature. + ErrExistingUserCodeSignature = stderr.New("User code signature already exists in the database") + // ErrSerializationFailure is an error indicating that the transactional capable storage could not guarantee + // consistency of Update & Delete operations on the same rows between multiple sessions. + ErrSerializationFailure = &RFC6749Error{ + ErrorField: errUnknownErrorName, + DescriptionField: "The request could not be completed because another request is competing for the same resource.", + CodeField: http.StatusConflict, + } + ErrUnknownRequest = &RFC6749Error{ + ErrorField: errUnknownErrorName, + DescriptionField: "The handler is not responsible for this request.", + CodeField: http.StatusBadRequest, + } + ErrRequestForbidden = &RFC6749Error{ + ErrorField: errRequestForbidden, + DescriptionField: "The request is not allowed.", + HintField: "You are not allowed to perform this action.", + CodeField: http.StatusForbidden, + } + ErrInvalidRequest = &RFC6749Error{ + ErrorField: errInvalidRequestName, + DescriptionField: "The request is missing a required parameter, includes an invalid parameter value, includes a parameter more than once, or is otherwise malformed.", + HintField: "Make sure that the various parameters are correct, be aware of case sensitivity and trim your parameters. Make sure that the client you are using has exactly whitelisted the redirect_uri you specified.", + CodeField: http.StatusBadRequest, + } + ErrUnauthorizedClient = &RFC6749Error{ + ErrorField: errUnauthorizedClientName, + DescriptionField: "The client is not authorized to request a token using this method.", + HintField: "Make sure that client id and secret are correctly specified and that the client exists.", + CodeField: http.StatusBadRequest, + } + ErrAccessDenied = &RFC6749Error{ + ErrorField: errAccessDeniedName, + DescriptionField: "The resource owner or authorization server denied the request.", + HintField: "Make sure that the request you are making is valid. Maybe the credential or request parameters you are using are limited in scope or otherwise restricted.", + CodeField: http.StatusForbidden, + } + ErrUnsupportedResponseType = &RFC6749Error{ + ErrorField: errUnsupportedResponseTypeName, + DescriptionField: "The authorization server does not support obtaining a token using this method.", + CodeField: http.StatusBadRequest, + } + ErrUnsupportedResponseMode = &RFC6749Error{ + ErrorField: errUnsupportedResponseModeName, + DescriptionField: "The authorization server does not support obtaining a response using this response mode.", + CodeField: http.StatusBadRequest, + } + ErrInvalidScope = &RFC6749Error{ + ErrorField: errInvalidScopeName, + DescriptionField: "The requested scope is invalid, unknown, or malformed.", + CodeField: http.StatusBadRequest, + } + ErrServerError = &RFC6749Error{ + ErrorField: errServerErrorName, + DescriptionField: "The authorization server encountered an unexpected condition that prevented it from fulfilling the request.", + CodeField: http.StatusInternalServerError, + } + ErrTemporarilyUnavailable = &RFC6749Error{ + ErrorField: errTemporarilyUnavailableName, + DescriptionField: "The authorization server is currently unable to handle the request due to a temporary overloading or maintenance of the server.", + CodeField: http.StatusServiceUnavailable, + } + ErrUnsupportedGrantType = &RFC6749Error{ + ErrorField: errUnsupportedGrantTypeName, + DescriptionField: "The authorization grant type is not supported by the authorization server.", + CodeField: http.StatusBadRequest, + } + ErrInvalidGrant = &RFC6749Error{ + ErrorField: errInvalidGrantName, + DescriptionField: "The provided authorization grant (e.g., authorization code, resource owner credentials) or refresh token is invalid, expired, revoked, does not match the redirection URI used in the authorization request, or was issued to another client.", + CodeField: http.StatusBadRequest, + } + ErrInvalidClient = &RFC6749Error{ + ErrorField: errInvalidClientName, + DescriptionField: "Client authentication failed (e.g., unknown client, no client authentication included, or unsupported authentication method).", + CodeField: http.StatusUnauthorized, + } + ErrInvalidState = &RFC6749Error{ + ErrorField: errInvalidStateName, + DescriptionField: "The state is missing or does not have enough characters and is therefore considered too weak.", + CodeField: http.StatusBadRequest, + } + ErrMisconfiguration = &RFC6749Error{ + ErrorField: errMisconfigurationName, + DescriptionField: "The request failed because of an internal error that is probably caused by misconfiguration.", + CodeField: http.StatusInternalServerError, + } + ErrInsufficientEntropy = &RFC6749Error{ + ErrorField: errInsufficientEntropyName, + DescriptionField: "The request used a security parameter (e.g., anti-replay, anti-csrf) with insufficient entropy.", + CodeField: http.StatusBadRequest, + } + ErrNotFound = &RFC6749Error{ + ErrorField: errNotFoundName, + DescriptionField: "Could not find the requested resource(s).", + CodeField: http.StatusNotFound, + } + ErrRequestUnauthorized = &RFC6749Error{ + ErrorField: errRequestUnauthorizedName, + DescriptionField: "The request could not be authorized.", + HintField: "Check that you provided valid credentials in the right format.", + CodeField: http.StatusUnauthorized, + } + ErrTokenSignatureMismatch = &RFC6749Error{ + ErrorField: errTokenSignatureMismatchName, + DescriptionField: "Token signature mismatch.", + HintField: "Check that you provided a valid token in the right format.", + CodeField: http.StatusBadRequest, + } + ErrInvalidTokenFormat = &RFC6749Error{ + ErrorField: errInvalidTokenFormatName, + DescriptionField: "Invalid token format.", + HintField: "Check that you provided a valid token in the right format.", + CodeField: http.StatusBadRequest, + } + ErrTokenExpired = &RFC6749Error{ + ErrorField: errTokenExpiredName, + DescriptionField: "Token expired.", + HintField: "The token expired.", + CodeField: http.StatusUnauthorized, + } + ErrScopeNotGranted = &RFC6749Error{ + ErrorField: errScopeNotGrantedName, + DescriptionField: "The token was not granted the requested scope.", + HintField: "The resource owner did not grant the requested scope.", + CodeField: http.StatusForbidden, + } + ErrTokenClaim = &RFC6749Error{ + ErrorField: errTokenClaimName, + DescriptionField: "The token failed validation due to a claim mismatch.", + HintField: "One or more token claims failed validation.", + CodeField: http.StatusUnauthorized, + } + ErrInactiveToken = &RFC6749Error{ + ErrorField: errTokenInactiveName, + DescriptionField: "Token is inactive because it is malformed, expired or otherwise invalid.", + HintField: "Token validation failed.", + CodeField: http.StatusUnauthorized, + } + ErrLoginRequired = &RFC6749Error{ + ErrorField: errLoginRequired, + DescriptionField: "The Authorization Server requires End-User authentication.", + CodeField: http.StatusBadRequest, + } + ErrInteractionRequired = &RFC6749Error{ + DescriptionField: "The Authorization Server requires End-User interaction of some form to proceed.", + ErrorField: errInteractionRequired, + CodeField: http.StatusBadRequest, + } + ErrConsentRequired = &RFC6749Error{ + DescriptionField: "The Authorization Server requires End-User consent.", + ErrorField: errConsentRequired, + CodeField: http.StatusBadRequest, + } + ErrRequestNotSupported = &RFC6749Error{ + DescriptionField: "The OP does not support use of the request parameter.", + ErrorField: errRequestNotSupportedName, + CodeField: http.StatusBadRequest, + } + ErrRequestURINotSupported = &RFC6749Error{ + DescriptionField: "The OP does not support use of the request_uri parameter.", + ErrorField: errRequestURINotSupportedName, + CodeField: http.StatusBadRequest, + } + ErrRegistrationNotSupported = &RFC6749Error{ + DescriptionField: "The OP does not support use of the registration parameter.", + ErrorField: errRegistrationNotSupportedName, + CodeField: http.StatusBadRequest, + } + ErrInvalidRequestURI = &RFC6749Error{ + DescriptionField: "The request_uri in the Authorization Request returns an error or contains invalid data.", + ErrorField: errInvalidRequestURI, + CodeField: http.StatusBadRequest, + } + ErrInvalidRequestObject = &RFC6749Error{ + DescriptionField: "The request parameter contains an invalid Request Object.", + ErrorField: errInvalidRequestObject, + CodeField: http.StatusBadRequest, + } + ErrJTIKnown = &RFC6749Error{ + DescriptionField: "The jti was already used.", + ErrorField: errJTIKnownName, + CodeField: http.StatusBadRequest, + } + ErrAuthorizationPending = &RFC6749Error{ + DescriptionField: "The authorization request is still pending as the end user hasn't yet completed the user-interaction steps.", + ErrorField: errAuthorizationPending, + CodeField: http.StatusBadRequest, + } + ErrSlowDown = &RFC6749Error{ + DescriptionField: "The authorization request was rate-limited to prevent system overload.", + HintField: "Ensure that you don't call the token endpoint sooner than the polling interval", + ErrorField: errSlowDown, + CodeField: http.StatusBadRequest, + } + ErrDeviceExpiredToken = &RFC6749Error{ + DescriptionField: "The device_code has expired, and the device authorization session has concluded.", + ErrorField: errDeviceExpiredToken, + CodeField: http.StatusBadRequest, + } +) + +const ( + errInvalidRequestURI = "invalid_request_uri" + errInvalidRequestObject = "invalid_request_object" + errConsentRequired = "consent_required" + errInteractionRequired = "interaction_required" + errLoginRequired = "login_required" + errRequestUnauthorizedName = "request_unauthorized" + errRequestForbidden = "request_forbidden" + errInvalidRequestName = "invalid_request" + errUnauthorizedClientName = "unauthorized_client" + errAccessDeniedName = "access_denied" + errUnsupportedResponseTypeName = "unsupported_response_type" + errUnsupportedResponseModeName = "unsupported_response_mode" + errInvalidScopeName = "invalid_scope" + errServerErrorName = "server_error" + errTemporarilyUnavailableName = "temporarily_unavailable" + errUnsupportedGrantTypeName = "unsupported_grant_type" + errInvalidGrantName = "invalid_grant" + errInvalidClientName = "invalid_client" + errNotFoundName = "not_found" + errInvalidStateName = "invalid_state" + errMisconfigurationName = "misconfiguration" + errInsufficientEntropyName = "insufficient_entropy" + errInvalidTokenFormatName = "invalid_token" + errTokenSignatureMismatchName = "token_signature_mismatch" + errTokenExpiredName = "invalid_token" // https://tools.ietf.org/html/rfc6750#section-3.1 + errScopeNotGrantedName = "scope_not_granted" + errTokenClaimName = "token_claim" + errTokenInactiveName = "token_inactive" + // errAuthorizationCodeInactiveName = "authorization_code_inactive" + errUnknownErrorName = "error" + errRequestNotSupportedName = "request_not_supported" + errRequestURINotSupportedName = "request_uri_not_supported" + errRegistrationNotSupportedName = "registration_not_supported" + errJTIKnownName = "jti_known" + errAuthorizationPending = "authorization_pending" + errSlowDown = "slow_down" + errDeviceExpiredToken = "expired_token" +) + +type ( + RFC6749Error struct { + ErrorField string + DescriptionField string + HintField string + CodeField int + DebugField string + cause error + useLegacyFormat bool + exposeDebug bool + + // Fields for globalization + hintIDField string + hintArgs []interface{} + catalog i18n.MessageCatalog + lang language.Tag + } + stackTracer interface { + StackTrace() errors.StackTrace + } +) + +var ( + _ errorsx.DebugCarrier = new(RFC6749Error) + _ errorsx.ReasonCarrier = new(RFC6749Error) + _ errorsx.RequestIDCarrier = new(RFC6749Error) + _ errorsx.StatusCarrier = new(RFC6749Error) + _ errorsx.StatusCodeCarrier = new(RFC6749Error) + // _ errorsx.DetailsCarrier = new(RFC6749Error) +) + +func ErrorToRFC6749Error(err error) *RFC6749Error { + var e *RFC6749Error + if errors.As(err, &e) { + return e + } + return &RFC6749Error{ + ErrorField: errUnknownErrorName, + DescriptionField: "The error is unrecognizable", + DebugField: err.Error(), + CodeField: http.StatusInternalServerError, + cause: err, + } +} + +// StackTrace returns the error's stack trace. +func (e *RFC6749Error) StackTrace() (trace errors.StackTrace) { + if e.cause == e || e.cause == nil { + return + } + + if st := stackTracer(nil); stderr.As(e.cause, &st) { + trace = st.StackTrace() + } + + return +} + +func (e RFC6749Error) Unwrap() error { + return e.cause +} + +func (e *RFC6749Error) Wrap(err error) { + e.cause = err +} + +func (e RFC6749Error) WithWrap(cause error) *RFC6749Error { + e.cause = cause + return &e +} + +func (e RFC6749Error) WithLegacyFormat(useLegacyFormat bool) *RFC6749Error { + e.useLegacyFormat = useLegacyFormat + return &e +} + +func (e *RFC6749Error) WithTrace(err error) *RFC6749Error { + if st := stackTracer(nil); !stderr.As(e.cause, &st) { + e.Wrap(errorsx.WithStack(err)) + } else { + e.Wrap(err) + } + return e +} + +func (e RFC6749Error) Is(err error) bool { + switch te := err.(type) { + case RFC6749Error: + return e.ErrorField == te.ErrorField && + e.CodeField == te.CodeField + case *RFC6749Error: + return e.ErrorField == te.ErrorField && + e.CodeField == te.CodeField + } + return false +} + +func (e *RFC6749Error) Status() string { + return http.StatusText(e.CodeField) +} + +func (e RFC6749Error) Error() string { + return e.ErrorField +} + +func (e *RFC6749Error) RequestID() string { + return "" +} + +func (e *RFC6749Error) Reason() string { + return e.HintField +} + +func (e *RFC6749Error) StatusCode() int { + return e.CodeField +} + +func (e *RFC6749Error) Cause() error { + return e.cause +} + +func (e *RFC6749Error) WithHintf(hint string, args ...interface{}) *RFC6749Error { + err := *e + if err.hintIDField == "" { + err.hintIDField = hint + } + + err.hintArgs = args + err.HintField = fmt.Sprintf(hint, args...) + return &err +} + +func (e *RFC6749Error) WithHint(hint string) *RFC6749Error { + err := *e + if err.hintIDField == "" { + err.hintIDField = hint + } + + err.HintField = hint + return &err +} + +// WithHintIDOrDefaultf accepts the ID of the hint message +func (e *RFC6749Error) WithHintIDOrDefaultf(ID string, def string, args ...interface{}) *RFC6749Error { + err := *e + err.hintIDField = ID + err.hintArgs = args + err.HintField = fmt.Sprintf(def, args...) + return &err +} + +// WithHintTranslationID accepts the ID of the hint message and should be paired with +// WithHint and WithHintf to add a default message and vaargs. +func (e *RFC6749Error) WithHintTranslationID(ID string) *RFC6749Error { + err := *e + err.hintIDField = ID + return &err +} + +func (e *RFC6749Error) Debug() string { + return e.DebugField +} + +func (e *RFC6749Error) WithDebug(debug string) *RFC6749Error { + err := *e + err.DebugField = debug + return &err +} + +func (e *RFC6749Error) WithDebugf(debug string, args ...interface{}) *RFC6749Error { + return e.WithDebug(fmt.Sprintf(debug, args...)) +} + +func (e *RFC6749Error) WithDescription(description string) *RFC6749Error { + err := *e + err.DescriptionField = description + return &err +} + +func (e *RFC6749Error) WithLocalizer(catalog i18n.MessageCatalog, lang language.Tag) *RFC6749Error { + err := *e + err.catalog = catalog + err.lang = lang + return &err +} + +// Sanitize strips the debug field +// +// Deprecated: Use WithExposeDebug instead. +func (e *RFC6749Error) Sanitize() *RFC6749Error { + err := *e + err.DebugField = "" + return &err +} + +// WithExposeDebug if set to true exposes debug messages +func (e *RFC6749Error) WithExposeDebug(exposeDebug bool) *RFC6749Error { + err := *e + err.exposeDebug = exposeDebug + return &err +} + +// GetDescription returns a more description description, combined with hint and debug (when available). +func (e *RFC6749Error) GetDescription() string { + description := i18n.GetMessageOrDefault(e.catalog, e.ErrorField, e.lang, e.DescriptionField) + e.computeHintField() + if e.HintField != "" { + description += " " + e.HintField + } + if e.DebugField != "" && e.exposeDebug { + description += " " + e.DebugField + } + return strings.ReplaceAll(description, "\"", "'") +} + +// RFC6749ErrorJson is a helper struct for JSON encoding/decoding of RFC6749Error. +type RFC6749ErrorJson struct { + Name string `json:"error"` + Description string `json:"error_description"` + Hint string `json:"error_hint,omitempty"` + Code int `json:"status_code,omitempty"` + Debug string `json:"error_debug,omitempty"` +} + +func (e *RFC6749Error) UnmarshalJSON(b []byte) error { + var data RFC6749ErrorJson + + if err := json.Unmarshal(b, &data); err != nil { + return err + } + + e.ErrorField = data.Name + e.CodeField = data.Code + e.DescriptionField = data.Description + + if len(data.Hint+data.Debug) > 0 { + e.HintField = data.Hint + e.DebugField = data.Debug + e.useLegacyFormat = true + } + + return nil +} + +func (e RFC6749Error) MarshalJSON() ([]byte, error) { + if !e.useLegacyFormat { + return json.Marshal(&RFC6749ErrorJson{ + Name: e.ErrorField, + Description: e.GetDescription(), + }) + } + + var debug string + if e.exposeDebug { + debug = e.DebugField + } + + return json.Marshal(&RFC6749ErrorJson{ + Name: e.ErrorField, + Description: e.DescriptionField, + Hint: e.HintField, + Code: e.CodeField, + Debug: debug, + }) +} + +func (e *RFC6749Error) ToValues() url.Values { + values := url.Values{} + values.Set("error", e.ErrorField) + values.Set("error_description", e.GetDescription()) + + if e.useLegacyFormat { + values.Set("error_description", e.DescriptionField) + if e.HintField != "" { + values.Set("error_hint", e.HintField) + } + + if e.DebugField != "" && e.exposeDebug { + values.Set("error_debug", e.DebugField) + } + } + + return values +} + +func (e *RFC6749Error) computeHintField() { + if e.hintIDField == "" { + return + } + + e.HintField = i18n.GetMessageOrDefault(e.catalog, e.hintIDField, e.lang, e.HintField, e.hintArgs...) +} diff --git a/fosite/errors_test.go b/fosite/errors_test.go new file mode 100644 index 00000000000..b0b877a154b --- /dev/null +++ b/fosite/errors_test.go @@ -0,0 +1,90 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "golang.org/x/text/language" + + "github.com/ory/hydra/v2/fosite/i18n" +) + +func TestRFC6749Error(t *testing.T) { + t.Run("case=wrap", func(t *testing.T) { + orig := errors.New("hi") + wrap := new(RFC6749Error) + wrap.Wrap(orig) + + assert.EqualValues(t, orig.(stackTracer).StackTrace(), wrap.StackTrace()) + }) + + t.Run("case=wrap_self", func(t *testing.T) { + wrap := new(RFC6749Error) + wrap.Wrap(wrap) + + assert.Empty(t, wrap.StackTrace()) + }) +} + +func TestErrorI18N(t *testing.T) { + catalog := i18n.NewDefaultMessageCatalog([]*i18n.DefaultLocaleBundle{ + { + LangTag: "en", + Messages: []*i18n.DefaultMessage{ + { + ID: "access_denied", + FormattedMessage: "The resource owner or authorization server denied the request.", + }, + { + ID: "badRequestMethod", + FormattedMessage: "HTTP method is '%s', expected 'POST'.", + }, + }, + }, + { + LangTag: "es", + Messages: []*i18n.DefaultMessage{ + { + ID: "access_denied", + FormattedMessage: "El propietario del recurso o el servidor de autorización denegó la solicitud.", + }, + { + ID: "HTTP method is '%s', expected 'POST'.", + FormattedMessage: "El método HTTP es '%s', esperado 'POST'.", + }, + { + ID: "Unable to parse HTTP body, make sure to send a properly formatted form request body.", + FormattedMessage: "No se puede analizar el cuerpo HTTP, asegúrese de enviar un cuerpo de solicitud de formulario con el formato adecuado.", + }, + { + ID: "badRequestMethod", + FormattedMessage: "El método HTTP es '%s', esperado 'POST'.", + }, + }, + }, + }) + + t.Run("case=legacy", func(t *testing.T) { + err := ErrAccessDenied.WithLocalizer(catalog, language.Spanish).WithHintf("HTTP method is '%s', expected 'POST'.", "GET") + assert.EqualValues(t, "El propietario del recurso o el servidor de autorización denegó la solicitud. El método HTTP es 'GET', esperado 'POST'.", err.GetDescription()) + }) + + t.Run("case=unsupported_locale_legacy", func(t *testing.T) { + err := ErrAccessDenied.WithLocalizer(catalog, language.Afrikaans).WithHintf("HTTP method is '%s', expected 'POST'.", "GET") + assert.EqualValues(t, "The resource owner or authorization server denied the request. HTTP method is 'GET', expected 'POST'.", err.GetDescription()) + }) + + t.Run("case=simple", func(t *testing.T) { + err := ErrAccessDenied.WithLocalizer(catalog, language.Spanish).WithHintIDOrDefaultf("badRequestMethod", "HTTP method is '%s', expected 'POST'.", "GET") + assert.EqualValues(t, "El propietario del recurso o el servidor de autorización denegó la solicitud. El método HTTP es 'GET', esperado 'POST'.", err.GetDescription()) + }) + + t.Run("case=unsupported_locale", func(t *testing.T) { + err := ErrAccessDenied.WithLocalizer(catalog, language.Afrikaans).WithHintIDOrDefaultf("badRequestMethod", "HTTP method is '%s', expected 'POST'.", "GET") + assert.EqualValues(t, "The resource owner or authorization server denied the request. HTTP method is 'GET', expected 'POST'.", err.GetDescription()) + }) +} diff --git a/fosite/fosite.go b/fosite/fosite.go new file mode 100644 index 00000000000..b7ab0547c6c --- /dev/null +++ b/fosite/fosite.go @@ -0,0 +1,180 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "reflect" +) + +const MinParameterEntropy = 8 + +var defaultResponseModeHandler = &DefaultResponseModeHandler{} + +// AuthorizeEndpointHandlers is a list of AuthorizeEndpointHandler +type AuthorizeEndpointHandlers []AuthorizeEndpointHandler + +// Append adds an AuthorizeEndpointHandler to this list. Ignores duplicates based on reflect.TypeOf. +func (a *AuthorizeEndpointHandlers) Append(h AuthorizeEndpointHandler) { + for _, this := range *a { + if reflect.TypeOf(this) == reflect.TypeOf(h) { + return + } + } + + *a = append(*a, h) +} + +// TokenEndpointHandlers is a list of TokenEndpointHandler +type TokenEndpointHandlers []TokenEndpointHandler + +// Append adds an TokenEndpointHandler to this list. Ignores duplicates based on reflect.TypeOf. +func (t *TokenEndpointHandlers) Append(h TokenEndpointHandler) { + for _, this := range *t { + if reflect.TypeOf(this) == reflect.TypeOf(h) { + return + } + } + + *t = append(*t, h) +} + +// TokenIntrospectionHandlers is a list of TokenValidator +type TokenIntrospectionHandlers []TokenIntrospector + +// Append adds an AccessTokenValidator to this list. Ignores duplicates based on reflect.TypeOf. +func (t *TokenIntrospectionHandlers) Append(h TokenIntrospector) { + for _, this := range *t { + if reflect.TypeOf(this) == reflect.TypeOf(h) { + return + } + } + + *t = append(*t, h) +} + +// RevocationHandlers is a list of RevocationHandler +type RevocationHandlers []RevocationHandler + +// Append adds an RevocationHandler to this list. Ignores duplicates based on reflect.TypeOf. +func (t *RevocationHandlers) Append(h RevocationHandler) { + for _, this := range *t { + if reflect.TypeOf(this) == reflect.TypeOf(h) { + return + } + } + + *t = append(*t, h) +} + +// PushedAuthorizeEndpointHandlers is a list of PushedAuthorizeEndpointHandler +type PushedAuthorizeEndpointHandlers []PushedAuthorizeEndpointHandler + +// Append adds an AuthorizeEndpointHandler to this list. Ignores duplicates based on reflect.TypeOf. +func (a *PushedAuthorizeEndpointHandlers) Append(h PushedAuthorizeEndpointHandler) { + for _, this := range *a { + if reflect.TypeOf(this) == reflect.TypeOf(h) { + return + } + } + + *a = append(*a, h) +} + +// DeviceEndpointHandlers is a list of DeviceEndpointHandler +type DeviceEndpointHandlers []DeviceEndpointHandler + +// Append adds an DeviceEndpointHandlers to this list. Ignores duplicates based on reflect.TypeOf. +func (a *DeviceEndpointHandlers) Append(h DeviceEndpointHandler) { + for _, this := range *a { + if reflect.TypeOf(this) == reflect.TypeOf(h) { + return + } + } + + *a = append(*a, h) +} + +var _ OAuth2Provider = (*Fosite)(nil) + +type Configurator interface { + IDTokenIssuerProvider + IDTokenLifespanProvider + AllowedPromptsProvider + EnforcePKCEProvider + EnforcePKCEForPublicClientsProvider + EnablePKCEPlainChallengeMethodProvider + GrantTypeJWTBearerCanSkipClientAuthProvider + GrantTypeJWTBearerIDOptionalProvider + GrantTypeJWTBearerIssuedDateOptionalProvider + GetJWTMaxDurationProvider + AudienceStrategyProvider + ScopeStrategyProvider + RedirectSecureCheckerProvider + OmitRedirectScopeParamProvider + SanitationAllowedProvider + JWTScopeFieldProvider + AccessTokenIssuerProvider + DisableRefreshTokenValidationProvider + RefreshTokenScopesProvider + AccessTokenLifespanProvider + RefreshTokenLifespanProvider + VerifiableCredentialsNonceLifespanProvider + AuthorizeCodeLifespanProvider + DeviceAndUserCodeLifespanProvider + TokenEntropyProvider + RotatedGlobalSecretsProvider + GlobalSecretProvider + JWKSFetcherStrategyProvider + HTTPClientProvider + ScopeStrategyProvider + AudienceStrategyProvider + MinParameterEntropyProvider + HMACHashingProvider + ClientAuthenticationStrategyProvider + ResponseModeHandlerExtensionProvider + SendDebugMessagesToClientsProvider + JWKSFetcherStrategyProvider + ClientAuthenticationStrategyProvider + ResponseModeHandlerExtensionProvider + MessageCatalogProvider + FormPostHTMLTemplateProvider + TokenURLProvider + GetSecretsHashingProvider + AuthorizeEndpointHandlersProvider + TokenEndpointHandlersProvider + TokenIntrospectionHandlersProvider + RevocationHandlersProvider + UseLegacyErrorFormatProvider + DeviceEndpointHandlersProvider + UserCodeProvider + DeviceProvider +} + +func NewOAuth2Provider(s Storage, c Configurator) *Fosite { + return &Fosite{Store: s, Config: c} +} + +// Fosite implements OAuth2Provider. +type Fosite struct { + Store Storage + + Config Configurator +} + +// GetMinParameterEntropy returns MinParameterEntropy if set. Defaults to fosite.MinParameterEntropy. +func (f *Fosite) GetMinParameterEntropy(ctx context.Context) int { + if mp := f.Config.GetMinParameterEntropy(ctx); mp > 0 { + return mp + } + + return MinParameterEntropy +} + +func (f *Fosite) ResponseModeHandler(ctx context.Context) ResponseModeHandler { + if ext := f.Config.GetResponseModeHandlerExtension(ctx); ext != nil { + return ext + } + return defaultResponseModeHandler +} diff --git a/fosite/fosite.png b/fosite/fosite.png new file mode 100644 index 00000000000..315eb4d38c5 Binary files /dev/null and b/fosite/fosite.png differ diff --git a/fosite/fosite_test.go b/fosite/fosite_test.go new file mode 100644 index 00000000000..cd9e435cbce --- /dev/null +++ b/fosite/fosite_test.go @@ -0,0 +1,77 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + . "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/handler/par" + "github.com/ory/hydra/v2/fosite/handler/rfc8628" +) + +func TestAuthorizeEndpointHandlers(t *testing.T) { + h := &oauth2.AuthorizeExplicitGrantHandler{} + hs := AuthorizeEndpointHandlers{} + hs.Append(h) + hs.Append(h) + hs.Append(&oauth2.AuthorizeExplicitGrantHandler{}) + assert.Len(t, hs, 1) + assert.Equal(t, hs[0], h) +} + +func TestDeviceAuthorizeEndpointHandlers(t *testing.T) { + h := &rfc8628.DeviceAuthHandler{} + hs := DeviceEndpointHandlers{} + hs.Append(h) + hs.Append(h) + hs.Append(&rfc8628.DeviceAuthHandler{}) + assert.Len(t, hs, 1) + assert.Equal(t, hs[0], h) +} + +func TestTokenEndpointHandlers(t *testing.T) { + h := &oauth2.AuthorizeExplicitGrantHandler{} + hs := TokenEndpointHandlers{} + hs.Append(h) + hs.Append(h) + // do some crazy type things and make sure dupe detection works + var f interface{} = &oauth2.AuthorizeExplicitGrantHandler{} + hs.Append(&oauth2.AuthorizeExplicitGrantHandler{}) + hs.Append(f.(TokenEndpointHandler)) + require.Len(t, hs, 1) + assert.Equal(t, hs[0], h) +} + +func TestAuthorizedRequestValidators(t *testing.T) { + h := &oauth2.CoreValidator{} + hs := TokenIntrospectionHandlers{} + hs.Append(h) + hs.Append(h) + hs.Append(&oauth2.CoreValidator{}) + require.Len(t, hs, 1) + assert.Equal(t, hs[0], h) +} + +func TestPushedAuthorizedRequestHandlers(t *testing.T) { + h := &par.PushedAuthorizeHandler{} + hs := PushedAuthorizeEndpointHandlers{} + hs.Append(h) + hs.Append(h) + require.Len(t, hs, 1) + assert.Equal(t, hs[0], h) +} + +func TestMinParameterEntropy(t *testing.T) { + f := Fosite{Config: new(Config)} + assert.Equal(t, MinParameterEntropy, f.GetMinParameterEntropy(context.Background())) + + f = Fosite{Config: &Config{MinParameterEntropy: 42}} + assert.Equal(t, 42, f.GetMinParameterEntropy(context.Background())) +} diff --git a/fosite/generate-mocks.sh b/fosite/generate-mocks.sh new file mode 100755 index 00000000000..222c8564fae --- /dev/null +++ b/fosite/generate-mocks.sh @@ -0,0 +1,56 @@ +#!/bin/bash + +mockgen -package internal -destination internal/access_request.go github.com/ory/hydra/v2/fosite AccessRequester +mockgen -package internal -destination internal/access_response.go github.com/ory/hydra/v2/fosite AccessResponder +mockgen -package internal -destination internal/access_token_storage.go github.com/ory/hydra/v2/fosite/handler/oauth2 AccessTokenStorage +mockgen -package internal -destination internal/access_token_storage_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 AccessTokenStorageProvider +mockgen -package internal -destination internal/access_token_strategy.go github.com/ory/hydra/v2/fosite/handler/oauth2 AccessTokenStrategy +mockgen -package internal -destination internal/access_token_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 AccessTokenStrategyProvider +mockgen -package internal -destination internal/authorize_code_storage.go github.com/ory/hydra/v2/fosite/handler/oauth2 AuthorizeCodeStorage +mockgen -package internal -destination internal/authorize_code_storage_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 AuthorizeCodeStorageProvider +mockgen -package internal -destination internal/authorize_code_strategy.go github.com/ory/hydra/v2/fosite/handler/oauth2 AuthorizeCodeStrategy +mockgen -package internal -destination internal/authorize_code_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 AuthorizeCodeStrategyProvider +mockgen -package internal -destination internal/authorize_endpoint_handler.go github.com/ory/hydra/v2/fosite AuthorizeEndpointHandler +mockgen -package internal -destination internal/authorize_endpoint_handlers_provider.go github.com/ory/hydra/v2/fosite AuthorizeEndpointHandlersProvider +mockgen -package internal -destination internal/authorize_request.go github.com/ory/hydra/v2/fosite AuthorizeRequester +mockgen -package internal -destination internal/authorize_response.go github.com/ory/hydra/v2/fosite AuthorizeResponder +mockgen -package internal -destination internal/client.go github.com/ory/hydra/v2/fosite Client +mockgen -package internal -destination internal/client_manager.go github.com/ory/hydra/v2/fosite ClientManager +mockgen -package internal -destination internal/oauth2_storage.go github.com/ory/hydra/v2/fosite/handler/oauth2 CoreStorage +mockgen -package internal -destination internal/oauth2_strategy.go github.com/ory/hydra/v2/fosite/handler/oauth2 CoreStrategy +mockgen -package internal -destination internal/device_auth_storage.go github.com/ory/hydra/v2/fosite/handler/rfc8628 DeviceAuthStorage +mockgen -package internal -destination internal/device_auth_storage_provider.go github.com/ory/hydra/v2/fosite/handler/rfc8628 DeviceAuthStorageProvider +mockgen -package internal -destination internal/device_code_strategy.go github.com/ory/hydra/v2/fosite/handler/rfc8628 DeviceCodeStrategy +mockgen -package internal -destination internal/device_code_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/rfc8628 DeviceCodeStrategyProvider +mockgen -package internal -destination internal/device_rate_limit_strategy.go github.com/ory/hydra/v2/fosite/handler/rfc8628 DeviceRateLimitStrategy +mockgen -package internal -destination internal/device_rate_limit_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/rfc8628 DeviceRateLimitStrategyProvider +mockgen -package internal -destination internal/hash.go github.com/ory/hydra/v2/fosite Hasher +mockgen -package internal -destination internal/open_id_connect_token_strategy.go github.com/ory/hydra/v2/fosite/handler/openid OpenIDConnectTokenStrategy +mockgen -package internal -destination internal/open_id_connect_token_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/openid OpenIDConnectTokenStrategyProvider +mockgen -package internal -destination internal/open_id_connect_request_storage.go github.com/ory/hydra/v2/fosite/handler/openid OpenIDConnectRequestStorage +mockgen -package internal -destination internal/open_id_connect_request_storage_provider.go github.com/ory/hydra/v2/fosite/handler/openid OpenIDConnectRequestStorageProvider +mockgen -package internal -destination internal/par_storage.go github.com/ory/hydra/v2/fosite PARStorage +mockgen -package internal -destination internal/par_storage_provider.go github.com/ory/hydra/v2/fosite PARStorageProvider +mockgen -package internal -destination internal/pkce_request_storage.go github.com/ory/hydra/v2/fosite/handler/pkce PKCERequestStorage +mockgen -package internal -destination internal/pkce_request_storage_provider.go github.com/ory/hydra/v2/fosite/handler/pkce PKCERequestStorageProvider +mockgen -package internal -destination internal/refresh_token_storage.go github.com/ory/hydra/v2/fosite/handler/oauth2 RefreshTokenStorage +mockgen -package internal -destination internal/refresh_token_storage_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 RefreshTokenStorageProvider +mockgen -package internal -destination internal/refresh_token_strategy.go github.com/ory/hydra/v2/fosite/handler/oauth2 RefreshTokenStrategy +mockgen -package internal -destination internal/refresh_token_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 RefreshTokenStrategyProvider +mockgen -package internal -destination internal/request.go github.com/ory/hydra/v2/fosite Requester +mockgen -package internal -destination internal/resource_owner_password_credentials_grant_storage.go github.com/ory/hydra/v2/fosite/handler/oauth2 ResourceOwnerPasswordCredentialsGrantStorage +mockgen -package internal -destination internal/resource_owner_password_credentials_grant_storage_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 ResourceOwnerPasswordCredentialsGrantStorageProvider +mockgen -package internal -destination internal/revocation_handler.go github.com/ory/hydra/v2/fosite RevocationHandler +mockgen -package internal -destination internal/revocation_handlers_provider.go github.com/ory/hydra/v2/fosite RevocationHandlersProvider +mockgen -package internal -destination internal/rfc7523_key_storage.go github.com/ory/hydra/v2/fosite/handler/rfc7523 RFC7523KeyStorage +mockgen -package internal -destination internal/rfc7523_key_storage_provider.go github.com/ory/hydra/v2/fosite/handler/rfc7523 RFC7523KeyStorageProvider +mockgen -package internal -destination internal/storage.go github.com/ory/hydra/v2/fosite Storage +mockgen -package internal -destination internal/token_endpoint_handler.go github.com/ory/hydra/v2/fosite TokenEndpointHandler +mockgen -package internal -destination internal/token_introspector.go github.com/ory/hydra/v2/fosite TokenIntrospector +mockgen -package internal -destination internal/token_revocation_storage.go github.com/ory/hydra/v2/fosite/handler/oauth2 TokenRevocationStorage +mockgen -package internal -destination internal/token_revocation_storage_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 TokenRevocationStorageProvider +mockgen -package internal -destination internal/transactional.go github.com/ory/hydra/v2/fosite Transactional +mockgen -package internal -destination internal/user_code_strategy.go github.com/ory/hydra/v2/fosite/handler/rfc8628 UserCodeStrategy +mockgen -package internal -destination internal/user_code_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/rfc8628 UserCodeStrategyProvider + +goimports -w internal/ diff --git a/fosite/generate.go b/fosite/generate.go new file mode 100644 index 00000000000..1c9af1dad9f --- /dev/null +++ b/fosite/generate.go @@ -0,0 +1,57 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/access_request.go github.com/ory/hydra/v2/fosite AccessRequester +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/access_response.go github.com/ory/hydra/v2/fosite AccessResponder +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/access_token_storage.go github.com/ory/hydra/v2/fosite/handler/oauth2 AccessTokenStorage +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/access_token_storage_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 AccessTokenStorageProvider +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/access_token_strategy.go github.com/ory/hydra/v2/fosite/handler/oauth2 AccessTokenStrategy +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/access_token_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 AccessTokenStrategyProvider +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/authorize_code_storage.go github.com/ory/hydra/v2/fosite/handler/oauth2 AuthorizeCodeStorage +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/authorize_code_storage_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 AuthorizeCodeStorageProvider +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/authorize_code_strategy.go github.com/ory/hydra/v2/fosite/handler/oauth2 AuthorizeCodeStrategy +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/authorize_code_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 AuthorizeCodeStrategyProvider +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/authorize_endpoint_handler.go github.com/ory/hydra/v2/fosite AuthorizeEndpointHandler +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/authorize_endpoint_handlers_provider.go github.com/ory/hydra/v2/fosite AuthorizeEndpointHandlersProvider +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/authorize_request.go github.com/ory/hydra/v2/fosite AuthorizeRequester +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/authorize_response.go github.com/ory/hydra/v2/fosite AuthorizeResponder +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/client.go github.com/ory/hydra/v2/fosite Client +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/client_manager.go github.com/ory/hydra/v2/fosite ClientManager +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/oauth2_storage.go github.com/ory/hydra/v2/fosite/handler/oauth2 CoreStorage +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/oauth2_strategy.go github.com/ory/hydra/v2/fosite/handler/oauth2 CoreStrategy +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/device_auth_storage.go github.com/ory/hydra/v2/fosite/handler/rfc8628 DeviceAuthStorage +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/device_auth_storage_provider.go github.com/ory/hydra/v2/fosite/handler/rfc8628 DeviceAuthStorageProvider +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/device_code_strategy.go github.com/ory/hydra/v2/fosite/handler/rfc8628 DeviceCodeStrategy +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/device_code_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/rfc8628 DeviceCodeStrategyProvider +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/device_rate_limit_strategy.go github.com/ory/hydra/v2/fosite/handler/rfc8628 DeviceRateLimitStrategy +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/device_rate_limit_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/rfc8628 DeviceRateLimitStrategyProvider +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/hash.go github.com/ory/hydra/v2/fosite Hasher +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/open_id_connect_token_strategy.go github.com/ory/hydra/v2/fosite/handler/openid OpenIDConnectTokenStrategy +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/open_id_connect_token_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/openid OpenIDConnectTokenStrategyProvider +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/open_id_connect_request_storage.go github.com/ory/hydra/v2/fosite/handler/openid OpenIDConnectRequestStorage +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/open_id_connect_request_storage_provider.go github.com/ory/hydra/v2/fosite/handler/openid OpenIDConnectRequestStorageProvider +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/par_storage.go github.com/ory/hydra/v2/fosite PARStorage +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/par_storage_provider.go github.com/ory/hydra/v2/fosite PARStorageProvider +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/pkce_request_storage.go github.com/ory/hydra/v2/fosite/handler/pkce PKCERequestStorage +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/pkce_request_storage_provider.go github.com/ory/hydra/v2/fosite/handler/pkce PKCERequestStorageProvider +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/refresh_token_storage.go github.com/ory/hydra/v2/fosite/handler/oauth2 RefreshTokenStorage +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/refresh_token_storage_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 RefreshTokenStorageProvider +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/refresh_token_strategy.go github.com/ory/hydra/v2/fosite/handler/oauth2 RefreshTokenStrategy +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/refresh_token_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 RefreshTokenStrategyProvider +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/request.go github.com/ory/hydra/v2/fosite Requester +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/resource_owner_password_credentials_grant_storage.go github.com/ory/hydra/v2/fosite/handler/oauth2 ResourceOwnerPasswordCredentialsGrantStorage +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/resource_owner_password_credentials_grant_storage_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 ResourceOwnerPasswordCredentialsGrantStorageProvider +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/revocation_handler.go github.com/ory/hydra/v2/fosite RevocationHandler +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/revocation_handlers_provider.go github.com/ory/hydra/v2/fosite RevocationHandlersProvider +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/rfc7523_key_storage.go github.com/ory/hydra/v2/fosite/handler/rfc7523 RFC7523KeyStorage +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/rfc7523_key_storage_provider.go github.com/ory/hydra/v2/fosite/handler/rfc7523 RFC7523KeyStorageProvider +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/storage.go github.com/ory/hydra/v2/fosite Storage +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/token_endpoint_handler.go github.com/ory/hydra/v2/fosite TokenEndpointHandler +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/token_introspector.go github.com/ory/hydra/v2/fosite TokenIntrospector +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/token_revocation_storage.go github.com/ory/hydra/v2/fosite/handler/oauth2 TokenRevocationStorage +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/token_revocation_storage_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 TokenRevocationStorageProvider +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/transactional.go github.com/ory/hydra/v2/fosite Transactional +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/user_code_strategy.go github.com/ory/hydra/v2/fosite/handler/rfc8628 UserCodeStrategy +//go:generate go run go.uber.org/mock/mockgen -package internal -destination internal/user_code_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/rfc8628 UserCodeStrategyProvider diff --git a/fosite/go.sum b/fosite/go.sum new file mode 100644 index 00000000000..af36927c0cf --- /dev/null +++ b/fosite/go.sum @@ -0,0 +1,852 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= +github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= +github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= +github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= +github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/cristalhq/jwt/v4 v4.0.2 h1:g/AD3h0VicDamtlM70GWGElp8kssQEv+5wYd7L9WOhU= +github.com/cristalhq/jwt/v4 v4.0.2/go.mod h1:HnYraSNKDRag1DZP92rYHyrjyQHnVEHPNqesmzs+miQ= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgraph-io/ristretto v1.0.0 h1:SYG07bONKMlFDUYu5pEu3DGAh8c2OFNzKm6G9J4Si84= +github.com/dgraph-io/ristretto v1.0.0/go.mod h1:jTi2FiYEhQ1NsMmA7DeBykizjOuY88NhKBkepyu1jPc= +github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 h1:fAjc9m62+UWV/WAFKLNi6ZS0675eEUC9y3AlwSbQu1Y= +github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/elazarl/goproxy v1.2.1 h1:njjgvO6cRG9rIqN2ebkqy6cQz2Njkx7Fsfv/zIZqgug= +github.com/elazarl/goproxy v1.2.1/go.mod h1:YfEbZtqP4AetfO6d40vWchF3znWX7C7Vd6ZMfdL8z64= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= +github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= +github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY= +github.com/frankban/quicktest v1.14.4/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= +github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= +github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-jose/go-jose/v3 v3.0.3 h1:fFKWeig/irsp7XD2zBxvnmA/XaRWp5V3CBsZXJF7G7k= +github.com/go-jose/go-jose/v3 v3.0.3/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/gobuffalo/attrs v1.0.3/go.mod h1:KvDJCE0avbufqS0Bw3UV7RQynESY0jjod+572ctX4t8= +github.com/gobuffalo/envy v1.10.2/go.mod h1:qGAGwdvDsaEtPhfBzb3o0SfDea8ByGn9j8bKmVft9z8= +github.com/gobuffalo/fizz v1.14.4/go.mod h1:9/2fGNXNeIFOXEEgTPJwiK63e44RjG+Nc4hfMm1ArGM= +github.com/gobuffalo/flect v0.3.0/go.mod h1:5pf3aGnsvqvCj50AVni7mJJF8ICxGZ8HomberC3pXLE= +github.com/gobuffalo/flect v1.0.0/go.mod h1:l9V6xSb4BlXwsxEMj3FVEub2nkdQjWhPvD8XTTlHPQc= +github.com/gobuffalo/genny/v2 v2.1.0/go.mod h1:4yoTNk4bYuP3BMM6uQKYPvtP6WsXFGm2w2EFYZdRls8= +github.com/gobuffalo/github_flavored_markdown v1.1.3/go.mod h1:IzgO5xS6hqkDmUh91BW/+Qxo/qYnvfzoz3A7uLkg77I= +github.com/gobuffalo/helpers v0.6.7/go.mod h1:j0u1iC1VqlCaJEEVkZN8Ia3TEzfj/zoXANqyJExTMTA= +github.com/gobuffalo/logger v1.0.7/go.mod h1:u40u6Bq3VVvaMcy5sRBclD8SXhBYPS0Qk95ubt+1xJM= +github.com/gobuffalo/nulls v0.4.2/go.mod h1:EElw2zmBYafU2R9W4Ii1ByIj177wA/pc0JdjtD0EsH8= +github.com/gobuffalo/packd v1.0.2/go.mod h1:sUc61tDqGMXON80zpKGp92lDb86Km28jfvX7IAyxFT8= +github.com/gobuffalo/plush/v4 v4.1.16/go.mod h1:6t7swVsarJ8qSLw1qyAH/KbrcSTwdun2ASEQkOznakg= +github.com/gobuffalo/plush/v4 v4.1.18/go.mod h1:xi2tJIhFI4UdzIL8sxZtzGYOd2xbBpcFbLZlIPGGZhU= +github.com/gobuffalo/pop/v6 v6.1.1 h1:eUDBaZcb0gYrmFnKwpuTEUA7t5ZHqNfvS4POqJYXDZY= +github.com/gobuffalo/pop/v6 v6.1.1/go.mod h1:1n7jAmI1i7fxuXPZjZb0VBPQDbksRtCoFnrDV5IsvaI= +github.com/gobuffalo/tags/v3 v3.1.4/go.mod h1:ArRNo3ErlHO8BtdA0REaZxijuWnWzF6PUXngmMXd2I0= +github.com/gobuffalo/validate/v3 v3.3.3/go.mod h1:YC7FsbJ/9hW/VjQdmXPvFqvRis4vrRYFxr69WiNZw6g= +github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gofrs/uuid v4.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gofrs/uuid v4.3.1+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= +github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= +github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= +github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= +github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= +github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.23.0 h1:ad0vkEBuk23VJzZR9nkLVG0YAoN9coASF1GusYX6AlU= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.23.0/go.mod h1:igFoXX2ELCW06bol23DWPB5BEWfZISOzSP5K2sbLea0= +github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= +github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU= +github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= +github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA= +github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE= +github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s= +github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o= +github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY= +github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= +github.com/jackc/pgconn v1.13.0/go.mod h1:AnowpAqO4CMIIJNZl2VJp+KrkAZciAkhEl0W0JIobpI= +github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= +github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= +github.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd/go.mod h1:hrBW0Enj2AZTNpt/7Y5rr2xe/9Mn757Wtb2xeBzPv2c= +github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= +github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.3.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= +github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg= +github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc= +github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= +github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM= +github.com/jackc/pgtype v1.12.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= +github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= +github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= +github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= +github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs= +github.com/jackc/pgx/v4 v4.17.2/go.mod h1:lcxIZN44yMIrWI78a5CpucdD14hX0SBDbNRvjDBItsw= +github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.3.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jandelgado/gcov2lcov v1.0.5 h1:rkBt40h0CVK4oCb8Dps950gvfd1rYvQ8+cWa346lVU0= +github.com/jandelgado/gcov2lcov v1.0.5/go.mod h1:NnSxK6TMlg1oGDBfGelGbjgorT5/L3cchlbtgFYZSss= +github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= +github.com/joho/godotenv v1.4.0/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/knadh/koanf/maps v0.1.1 h1:G5TjmUh2D7G2YWf5SQQqSiHRJEjaicvU0KpypqB3NIs= +github.com/knadh/koanf/maps v0.1.1/go.mod h1:npD/QZY3V6ghQDdcQzl1W4ICNVTkohC8E73eI2xW4yI= +github.com/knadh/koanf/parsers/json v0.1.0 h1:dzSZl5pf5bBcW0Acnu20Djleto19T0CfHcvZ14NJ6fU= +github.com/knadh/koanf/parsers/json v0.1.0/go.mod h1:ll2/MlXcZ2BfXD6YJcjVFzhG9P0TdJ207aIBKQhV2hY= +github.com/knadh/koanf/providers/rawbytes v0.1.0 h1:dpzgu2KO6uf6oCb4aP05KDmKmAmI51k5pe8RYKQ0qME= +github.com/knadh/koanf/providers/rawbytes v0.1.0/go.mod h1:mMTB1/IcJ/yE++A2iEZbY1MLygX7vttU+C+S/YmPu9c= +github.com/knadh/koanf/v2 v2.0.1 h1:1dYGITt1I23x8cfx8ZnldtezdyaZtfAuRtIFOiRzK7g= +github.com/knadh/koanf/v2 v2.0.1/go.mod h1:ZeiIlIDXTE7w1lMT6UVcNiRAS2/rCeLn/GdLNvY1Dus= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lib/pq v1.10.7/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/luna-duclos/instrumentedsql v1.1.3/go.mod h1:9J1njvFds+zN7y85EDhN9XNQLANWwZt2ULeIC8yMNYs= +github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= +github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= +github.com/mattn/go-sqlite3 v1.14.15/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= +github.com/mattn/go-sqlite3 v1.14.16/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= +github.com/mattn/goveralls v0.0.12 h1:PEEeF0k1SsTjOBQ8FOmrOAoCu4ytuMaWCnWe94zxbCg= +github.com/mattn/goveralls v0.0.12/go.mod h1:44ImGEUfmqH8bBtaMrYKsM65LXfNLWmwaxFGjZwgMSQ= +github.com/microcosm-cc/bluemonday v1.0.20/go.mod h1:yfBmMi8mxvaZut3Yytv+jTXRY8mxyjJ0/kQBTElld50= +github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= +github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= +github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= +github.com/moul/http2curl v0.0.0-20170919181001-9ac6cf4d929b h1:Pip12xNtMvEFUBF4f8/b5yRXj94LLrNdLWELfOr2KcY= +github.com/moul/http2curl v0.0.0-20170919181001-9ac6cf4d929b/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ= +github.com/nyaruka/phonenumbers v1.1.6 h1:DcueYq7QrOArAprAYNoQfDgp0KetO4LqtnBtQC6Wyes= +github.com/nyaruka/phonenumbers v1.1.6/go.mod h1:yShPJHDSH3aTKzCbXyVxNpbl2kA+F+Ne5Pun/MvFRos= +github.com/oleiade/reflections v1.0.1 h1:D1XO3LVEYroYskEsoSiGItp9RUxG6jWnCVvrqH0HHQM= +github.com/oleiade/reflections v1.0.1/go.mod h1:rdFxbxq4QXVZWj0F+e9jqjDkc7dbp97vkRixKo2JR60= +github.com/openzipkin/zipkin-go v0.4.3 h1:9EGwpqkgnwdEIJ+Od7QVSEIH+ocmm5nPat0G7sjsSdg= +github.com/openzipkin/zipkin-go v0.4.3/go.mod h1:M9wCJZFWCo2RiY+o1eBCEMe0Dp2S5LDHcMZmk3RmK7c= +github.com/ory/go-acc v0.2.9-0.20230103102148-6b1c9a70dbbe h1:rvu4obdvqR0fkSIJ8IfgzKOWwZ5kOT2UNfLq81Qk7rc= +github.com/ory/go-acc v0.2.9-0.20230103102148-6b1c9a70dbbe/go.mod h1:z4n3u6as84LbV4YmgjHhnwtccQqzf4cZlSk9f1FhygI= +github.com/ory/go-convenience v0.1.0 h1:zouLKfF2GoSGnJwGq+PE/nJAE6dj2Zj5QlTgmMTsTS8= +github.com/ory/go-convenience v0.1.0/go.mod h1:uEY/a60PL5c12nYz4V5cHY03IBmwIAEm8TWB0yn9KNs= +github.com/ory/herodot v0.10.2 h1:gGvNMHgAwWzdP/eo+roSiT5CGssygHSjDU7MSQNlJ4E= +github.com/ory/herodot v0.10.2/go.mod h1:MMNmY6MG1uB6fnXYFaHoqdV23DTWctlPsmRCeq/2+wc= +github.com/ory/jsonschema/v3 v3.0.8 h1:Ssdb3eJ4lDZ/+XnGkvQS/te0p+EkolqwTsDOCxr/FmU= +github.com/ory/jsonschema/v3 v3.0.8/go.mod h1:ZPzqjDkwd3QTnb2Z6PAS+OTvBE2x5i6m25wCGx54W/0= +github.com/ory/x v0.0.677 h1:ZulzE4EBhNBXNotWmGSmGsVNbgbZpIr4snMURRkski0= +github.com/ory/x v0.0.677/go.mod h1:zJmnDtKje2FCP4EeFvRsKk94XXiqKCSGJMZcirAfhUs= +github.com/parnurzeal/gorequest v0.2.15 h1:oPjDCsF5IkD4gUk6vIgsxYNaSgvAnIh1EJeROn3HdJU= +github.com/parnurzeal/gorequest v0.2.15/go.mod h1:3Kh2QUMJoqw3icWAecsyzkpY7UzRfDhbRdTjtNwNiUE= +github.com/pelletier/go-toml/v2 v2.0.9 h1:uH2qQXheeefCCkuBBSLi7jCiSmj3VRh2+Goq2N7Xxu0= +github.com/pelletier/go-toml/v2 v2.0.9/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= +github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= +github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= +github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= +github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/seatgeek/logrus-gelf-formatter v0.0.0-20210414080842-5b05eb8ff761 h1:0b8DF5kR0PhRoRXDiEEdzrgBc8UqVY4JWLkQJCRsLME= +github.com/seatgeek/logrus-gelf-formatter v0.0.0-20210414080842-5b05eb8ff761/go.mod h1:/THDZYi7F/BsVEcYzYPqdcWFQ+1C2InkawTKfLOAnzg= +github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= +github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d/go.mod h1:UdhH50NIW0fCiwBSr0co2m7BnFLdv4fQTgdqdJTHFeE= +github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e/go.mod h1:HuIsMU8RRBOtsCgI77wP899iHVBQpCmg4ErYMZB+2IA= +github.com/spf13/afero v1.9.5 h1:stMpOSZFs//0Lv29HduCmli3GUfpFoF3Y1Q/aXj/wVM= +github.com/spf13/afero v1.9.5/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ= +github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA= +github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48= +github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY= +github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I= +github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0= +github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= +github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.16.0 h1:rGGH0XDZhdUOryiDWjmIvUSWpbNqisK8Wk0Vyefw8hc= +github.com/spf13/viper v1.16.0/go.mod h1:yg78JgCJcbrQOvV9YLXgkLaZqUidkY9K+Dd1FofRzQg= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/subosito/gotenv v1.4.2 h1:X1TuBLAMDFbaTAChgCBLu3DU3UPyELpnF2jjJ2cz/S8= +github.com/subosito/gotenv v1.4.2/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= +github.com/tidwall/gjson v1.14.3 h1:9jvXn7olKEHU1S9vwoMGliaT8jq1vJ7IH/n9zD9Dnlw= +github.com/tidwall/gjson v1.14.3/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= +github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= +github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= +github.com/urfave/negroni v1.0.0 h1:kIimOitoypq34K7TG7DUaJ9kq/N4Ofuwi1sjz0KipXc= +github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.57.0 h1:7F3XCD6WYzDkwbi8I8N+oYJWquPVScnRosKGgqjsR8c= +go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.57.0/go.mod h1:Dk3C0BfIlZDZ5c6eVS7TYiH2vssuyUU3vUsgbrR+5V4= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.57.0 h1:DheMAlT6POBP+gh8RUH19EOTnQIor5QE0uSRPtzCpSw= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.57.0/go.mod h1:wZcGmeVO9nzP67aYSLDqXNWK87EZWhi7JWj1v7ZXf94= +go.opentelemetry.io/contrib/propagators/b3 v1.32.0 h1:MazJBz2Zf6HTN/nK/s3Ru1qme+VhWU5hm83QxEP+dvw= +go.opentelemetry.io/contrib/propagators/b3 v1.32.0/go.mod h1:B0s70QHYPrJwPOwD1o3V/R8vETNOG9N3qZf4LDYvA30= +go.opentelemetry.io/contrib/propagators/jaeger v1.32.0 h1:K/fOyTMD6GELKTIJBaJ9k3ppF2Njt8MeUGBOwfaWXXA= +go.opentelemetry.io/contrib/propagators/jaeger v1.32.0/go.mod h1:ISE6hda//MTWvtngG7p4et3OCngsrTVfl7c6DjN17f8= +go.opentelemetry.io/contrib/samplers/jaegerremote v0.26.0 h1:/SKXyZLAnuj981HVc8G5ZylYK3qD2W6AYR6cJx5kIHw= +go.opentelemetry.io/contrib/samplers/jaegerremote v0.26.0/go.mod h1:cOEzME0M2OKeHB45lJiOKfvUCdg/r75mf7YS5w0tbmE= +go.opentelemetry.io/otel v1.32.0 h1:WnBN+Xjcteh0zdk01SVqV55d/m62NJLJdIyb4y/WO5U= +go.opentelemetry.io/otel v1.32.0/go.mod h1:00DCVSB0RQcnzlwyTfqtxSm+DRr9hpYrHjNGiBHVQIg= +go.opentelemetry.io/otel/exporters/jaeger v1.17.0 h1:D7UpUy2Xc2wsi1Ras6V40q806WM07rqoCWzXu7Sqy+4= +go.opentelemetry.io/otel/exporters/jaeger v1.17.0/go.mod h1:nPCqOnEH9rNLKqH/+rrUjiMzHJdV1BlpKcTwRTyKkKI= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.32.0 h1:IJFEoHiytixx8cMiVAO+GmHR6Frwu+u5Ur8njpFO6Ac= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.32.0/go.mod h1:3rHrKNtLIoS0oZwkY2vxi+oJcwFRWdtUyRII+so45p8= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.32.0 h1:cMyu9O88joYEaI47CnQkxO1XZdpoTF9fEnW2duIddhw= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.32.0/go.mod h1:6Am3rn7P9TVVeXYG+wtcGE7IE1tsQ+bP3AuWcKt/gOI= +go.opentelemetry.io/otel/exporters/zipkin v1.32.0 h1:6O8HgLHPXtXE9QEKEWkBImL9mEKCGEl+m+OncVO53go= +go.opentelemetry.io/otel/exporters/zipkin v1.32.0/go.mod h1:+MFvorlowjy0iWnsKaNxC1kzczSxe71mw85h4p8yEvg= +go.opentelemetry.io/otel/metric v1.32.0 h1:xV2umtmNcThh2/a/aCP+h64Xx5wsj8qqnkYZktzNa0M= +go.opentelemetry.io/otel/metric v1.32.0/go.mod h1:jH7CIbbK6SH2V2wE16W05BHCtIDzauciCRLoc/SyMv8= +go.opentelemetry.io/otel/sdk v1.32.0 h1:RNxepc9vK59A8XsgZQouW8ue8Gkb4jpWtJm9ge5lEG4= +go.opentelemetry.io/otel/sdk v1.32.0/go.mod h1:LqgegDBjKMmb2GC6/PrTnteJG39I8/vJCAP9LlJXEjU= +go.opentelemetry.io/otel/trace v1.32.0 h1:WIC9mYrXf8TmY/EXuULKc8hR17vE+Hjv2cssQDe03fM= +go.opentelemetry.io/otel/trace v1.32.0/go.mod h1:+i4rkvCraA+tG6AzwloGaCtkx53Fa+L+V8e9a7YvhT8= +go.opentelemetry.io/proto/otlp v1.3.1 h1:TrMUixzpM0yuc/znrFTP9MMRh8trP93mkCiDVeXrui0= +go.opentelemetry.io/proto/otlp v1.3.1/go.mod h1:0X1WI4de4ZsLrrJNLAQbFeLCm3T7yBkR0XqQ7niQU+8= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/mock v0.5.0 h1:KAMbZvZPyBPWgD14IrIQ38QCyjwpvVVV6K/bHl1IwQU= +go.uber.org/mock v0.5.0/go.mod h1:ge71pBPLYDk7QIi1LupWxdAykm7KIEFchiOqd6z7qMM= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= +go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U= +golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.10.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.18.0 h1:5+9lSbEzPSdWkH32vYPBwEpX8KwDbM52Ud9xBUvNlb0= +golang.org/x/mod v0.18.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= +golang.org/x/net v0.0.0-20221002022538-bcab6841153b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I= +golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.23.0 h1:PbgcYx2W7i4LvjJWEbf0ngHV6qJYr86PkAV3bXdLEbs= +golang.org/x/oauth2 v0.23.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.0.0-20220722155259-a9ba230a4035/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= +golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.8.0/go.mod h1:JxBZ99ISMI5ViVkT1tr6tdNmXeTrcpVSD3vZ1RsRdN4= +golang.org/x/tools v0.22.0 h1:gqSGLZqv+AI9lIQzniJ0nZDRG5GBPsSi+DRNHWNz6yA= +golang.org/x/tools v0.22.0/go.mod h1:aCwcsjqvq7Yqt6TNyX7QMU2enbQ/Gt0bo6krSeEri+c= +golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto/googleapis/api v0.0.0-20241104194629-dd2ea8efbc28 h1:M0KvPgPmDZHPlbRbaNU1APr28TvwvvdUPlSv7PUvy8g= +google.golang.org/genproto/googleapis/api v0.0.0-20241104194629-dd2ea8efbc28/go.mod h1:dguCy7UOdZhTvLzDyt15+rOrawrpM4q7DD9dQ1P11P4= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241104194629-dd2ea8efbc28 h1:XVhgTWWV3kGQlwJHR3upFWZeTsei6Oks1apkZSeonIE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241104194629-dd2ea8efbc28/go.mod h1:GX3210XPVPUjJbTUbvwI8f2IpZDMZuPJWDzDuebbviI= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.67.1 h1:zWnc1Vrcno+lHZCOofnIMvycFcc0QRGIzm9dhnDX68E= +google.golang.org/grpc v1.67.1/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.35.1 h1:m3LfL6/Ca+fqnjnlqQXNpFPABW1UD7mjh8KO2mKFytA= +google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= +gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= +gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/fosite/go_mod_indirect_pins.go b/fosite/go_mod_indirect_pins.go new file mode 100644 index 00000000000..f635123c909 --- /dev/null +++ b/fosite/go_mod_indirect_pins.go @@ -0,0 +1,12 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +//go:build tools + +package fosite + +import ( + _ "github.com/mattn/goveralls" + + _ "github.com/ory/go-acc" +) diff --git a/fosite/handler.go b/fosite/handler.go new file mode 100644 index 00000000000..b98cd7f9d52 --- /dev/null +++ b/fosite/handler.go @@ -0,0 +1,79 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" +) + +type AuthorizeEndpointHandler interface { + // HandleAuthorizeRequest handles an authorize endpoint request. To extend the handler's capabilities, the http request + // is passed along, if further information retrieval is required. If the handler feels that he is not responsible for + // the authorize request, he must return nil and NOT modify session nor responder neither requester. + // + // The following spec is a good example of what HandleAuthorizeRequest should do. + // * https://tools.ietf.org/html/rfc6749#section-3.1.1 + // response_type REQUIRED. + // The value MUST be one of "code" for requesting an + // authorization code as described by Section 4.1.1, "token" for + // requesting an access token (implicit grant) as described by + // Section 4.2.1, or a registered extension value as described by Section 8.4. + HandleAuthorizeEndpointRequest(ctx context.Context, requester AuthorizeRequester, responder AuthorizeResponder) error +} + +type TokenEndpointHandler interface { + // PopulateTokenEndpointResponse is responsible for setting return values and should only be executed if + // the handler's HandleTokenEndpointRequest did not return ErrUnknownRequest. + PopulateTokenEndpointResponse(ctx context.Context, requester AccessRequester, responder AccessResponder) error + + // HandleTokenEndpointRequest handles an authorize request. If the handler is not responsible for handling + // the request, this method should return ErrUnknownRequest and otherwise handle the request. + HandleTokenEndpointRequest(ctx context.Context, requester AccessRequester) error + + // CanSkipClientAuth indicates if client authentication can be skipped. By default it MUST be false, unless you are + // implementing extension grant type, which allows unauthenticated client. CanSkipClientAuth must be called + // before HandleTokenEndpointRequest to decide, if AccessRequester will contain authenticated client. + CanSkipClientAuth(ctx context.Context, requester AccessRequester) bool + + // CanHandleRequest indicates, if TokenEndpointHandler can handle this request or not. If true, + // HandleTokenEndpointRequest can be called. + CanHandleTokenEndpointRequest(ctx context.Context, requester AccessRequester) bool +} + +// RevocationHandler is the interface that allows token revocation for an OAuth2.0 provider. +// https://tools.ietf.org/html/rfc7009 +// +// RevokeToken is invoked after a new token revocation request is parsed. +// +// https://tools.ietf.org/html/rfc7009#section-2.1 +// If the particular +// token is a refresh token and the authorization server supports the +// revocation of access tokens, then the authorization server SHOULD +// also invalidate all access tokens based on the same authorization +// grant (see Implementation Note). If the token passed to the request +// is an access token, the server MAY revoke the respective refresh +// token as well. +type RevocationHandler interface { + // RevokeToken handles access and refresh token revocation. + RevokeToken(ctx context.Context, token string, tokenType TokenType, client Client) error +} + +// PushedAuthorizeEndpointHandler is the interface that handles PAR (https://datatracker.ietf.org/doc/html/rfc9126) +type PushedAuthorizeEndpointHandler interface { + // HandlePushedAuthorizeRequest handles a pushed authorize endpoint request. To extend the handler's capabilities, the http request + // is passed along, if further information retrieval is required. If the handler feels that he is not responsible for + // the pushed authorize request, he must return nil and NOT modify session nor responder neither requester. + HandlePushedAuthorizeEndpointRequest(ctx context.Context, requester AuthorizeRequester, responder PushedAuthorizeResponder) error +} + +// DeviceEndpointHandler is the interface that handles https://tools.ietf.org/html/rfc8628 +type DeviceEndpointHandler interface { + // HandleDeviceEndpointRequest handles a device authorize endpoint request. To extend the handler's capabilities, the http request + // is passed along, if further information retrieval is required. If the handler feels that he is not responsible for + // the device authorize request, he must return nil and NOT modify session nor responder neither requester. + // + // The following spec is a good example of what HandleDeviceEndpointRequest should do. + // * https://tools.ietf.org/html/rfc8628#section-3.2 + HandleDeviceEndpointRequest(ctx context.Context, requester DeviceRequester, responder DeviceResponder) error +} diff --git a/fosite/handler/oauth2/export_test.go b/fosite/handler/oauth2/export_test.go new file mode 100644 index 00000000000..de5687a7b6b --- /dev/null +++ b/fosite/handler/oauth2/export_test.go @@ -0,0 +1,18 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "time" + + "github.com/ory/hydra/v2/fosite" +) + +func CallGetExpiresIn(r fosite.Requester, key fosite.TokenType, defaultLifespan time.Duration, now time.Time) time.Duration { + return getExpiresIn(r, key, defaultLifespan, now) +} + +func CallSignature(token string) string { + return signature(token) +} diff --git a/fosite/handler/oauth2/flow_authorize_code_auth.go b/fosite/handler/oauth2/flow_authorize_code_auth.go new file mode 100644 index 00000000000..af53d573203 --- /dev/null +++ b/fosite/handler/oauth2/flow_authorize_code_auth.go @@ -0,0 +1,109 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "context" + "net/url" + "strings" + "time" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/x/errorsx" +) + +var ( + _ fosite.AuthorizeEndpointHandler = (*AuthorizeExplicitGrantHandler)(nil) + _ fosite.TokenEndpointHandler = (*AuthorizeExplicitGrantHandler)(nil) +) + +// AuthorizeExplicitGrantHandler is a response handler for the Authorize Code grant using the explicit grant type +// as defined in https://tools.ietf.org/html/rfc6749#section-4.1 +type AuthorizeExplicitGrantHandler struct { + Storage interface { + AuthorizeCodeStorageProvider + AccessTokenStorageProvider + RefreshTokenStorageProvider + TokenRevocationStorageProvider + } + Strategy CoreStrategyProvider + Config interface { + fosite.AuthorizeCodeLifespanProvider + fosite.AccessTokenLifespanProvider + fosite.RefreshTokenLifespanProvider + fosite.ScopeStrategyProvider + fosite.AudienceStrategyProvider + fosite.RedirectSecureCheckerProvider + fosite.RefreshTokenScopesProvider + fosite.OmitRedirectScopeParamProvider + fosite.SanitationAllowedProvider + } +} + +func (c *AuthorizeExplicitGrantHandler) secureChecker(ctx context.Context) func(context.Context, *url.URL) bool { + if c.Config.GetRedirectSecureChecker(ctx) == nil { + return fosite.IsRedirectURISecure + } + return c.Config.GetRedirectSecureChecker(ctx) +} + +func (c *AuthorizeExplicitGrantHandler) HandleAuthorizeEndpointRequest(ctx context.Context, ar fosite.AuthorizeRequester, resp fosite.AuthorizeResponder) error { + // This let's us define multiple response types, for example open id connect's id_token + if !ar.GetResponseTypes().ExactOne("code") { + return nil + } + + ar.SetDefaultResponseMode(fosite.ResponseModeQuery) + + // Disabled because this is already handled at the authorize_request_handler + // if !ar.GetClient().GetResponseTypes().Has("code") { + // return errorsx.WithStack(fosite.ErrInvalidGrant) + // } + + if !c.secureChecker(ctx)(ctx, ar.GetRedirectURI()) { + return errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("Redirect URL is using an insecure protocol, http is only allowed for hosts with suffix 'localhost', for example: http://myapp.localhost/.")) + } + + client := ar.GetClient() + for _, scope := range ar.GetRequestedScopes() { + if !c.Config.GetScopeStrategy(ctx)(client.GetScopes(), scope) { + return errorsx.WithStack(fosite.ErrInvalidScope.WithHintf("The OAuth 2.0 Client is not allowed to request scope '%s'.", scope)) + } + } + + if err := c.Config.GetAudienceStrategy(ctx)(client.GetAudience(), ar.GetRequestedAudience()); err != nil { + return err + } + + return c.IssueAuthorizeCode(ctx, ar, resp) +} + +func (c *AuthorizeExplicitGrantHandler) IssueAuthorizeCode(ctx context.Context, ar fosite.AuthorizeRequester, resp fosite.AuthorizeResponder) error { + code, signature, err := c.Strategy.AuthorizeCodeStrategy().GenerateAuthorizeCode(ctx, ar) + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + ar.GetSession().SetExpiresAt(fosite.AuthorizeCode, time.Now().UTC().Add(c.Config.GetAuthorizeCodeLifespan(ctx))) + if err := c.Storage.AuthorizeCodeStorage().CreateAuthorizeCodeSession(ctx, signature, ar.Sanitize(c.GetSanitationWhiteList(ctx))); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + resp.AddParameter("code", code) + resp.AddParameter("state", ar.GetState()) + if !c.Config.GetOmitRedirectScopeParam(ctx) { + resp.AddParameter("scope", strings.Join(ar.GetGrantedScopes(), " ")) + } + + ar.SetResponseTypeHandled("code") + return nil +} + +func (c *AuthorizeExplicitGrantHandler) GetSanitationWhiteList(ctx context.Context) []string { + if allowedList := c.Config.GetSanitationWhiteList(ctx); len(allowedList) > 0 { + return allowedList + } + + return []string{"code", "redirect_uri"} +} diff --git a/fosite/handler/oauth2/flow_authorize_code_auth_test.go b/fosite/handler/oauth2/flow_authorize_code_auth_test.go new file mode 100644 index 00000000000..6463bed6405 --- /dev/null +++ b/fosite/handler/oauth2/flow_authorize_code_auth_test.go @@ -0,0 +1,181 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2_test + +import ( + "context" + "net/url" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/storage" +) + +func parseUrl(uu string) *url.URL { + u, _ := url.Parse(uu) + return u +} + +func TestAuthorizeCode_HandleAuthorizeEndpointRequest(t *testing.T) { + for k, strategy := range map[string]oauth2.CoreStrategy{ + "hmac": hmacshaStrategy, + } { + t.Run("strategy="+k, func(t *testing.T) { + store := storage.NewMemoryStore() + handler := oauth2.AuthorizeExplicitGrantHandler{ + Storage: store, + Strategy: &compose.CommonStrategyProvider{CoreStrategy: strategy}, + Config: &fosite.Config{ + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + ScopeStrategy: fosite.HierarchicScopeStrategy, + }, + } + for _, c := range []struct { + handler oauth2.AuthorizeExplicitGrantHandler + areq *fosite.AuthorizeRequest + description string + expectErr error + expect func(t *testing.T, areq *fosite.AuthorizeRequest, aresp *fosite.AuthorizeResponse) + }{ + { + handler: handler, + areq: &fosite.AuthorizeRequest{ + ResponseTypes: fosite.Arguments{""}, + Request: *fosite.NewRequest(), + }, + description: "should pass because not responsible for handling an empty response type", + }, + { + handler: handler, + areq: &fosite.AuthorizeRequest{ + ResponseTypes: fosite.Arguments{"foo"}, + Request: *fosite.NewRequest(), + }, + description: "should pass because not responsible for handling an invalid response type", + }, + { + handler: handler, + areq: &fosite.AuthorizeRequest{ + ResponseTypes: fosite.Arguments{"code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + ResponseTypes: fosite.Arguments{"code"}, + RedirectURIs: []string{"http://asdf.com/cb"}, + }, + }, + RedirectURI: parseUrl("http://asdf.com/cb"), + }, + description: "should fail because redirect uri is not https", + expectErr: fosite.ErrInvalidRequest, + }, + { + handler: handler, + areq: &fosite.AuthorizeRequest{ + ResponseTypes: fosite.Arguments{"code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + ResponseTypes: fosite.Arguments{"code"}, + RedirectURIs: []string{"https://asdf.com/cb"}, + Audience: []string{"https://www.ory.sh/api"}, + }, + RequestedAudience: []string{"https://www.ory.sh/not-api"}, + }, + RedirectURI: parseUrl("https://asdf.com/cb"), + }, + description: "should fail because audience doesn't match", + expectErr: fosite.ErrInvalidRequest, + }, + { + handler: handler, + areq: &fosite.AuthorizeRequest{ + ResponseTypes: fosite.Arguments{"code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + ResponseTypes: fosite.Arguments{"code"}, + RedirectURIs: []string{"https://asdf.de/cb"}, + Audience: []string{"https://www.ory.sh/api"}, + }, + RequestedAudience: []string{"https://www.ory.sh/api"}, + GrantedScope: fosite.Arguments{"a", "b"}, + Session: &fosite.DefaultSession{ + ExpiresAt: map[fosite.TokenType]time.Time{fosite.AccessToken: time.Now().UTC().Add(time.Hour)}, + }, + RequestedAt: time.Now().UTC(), + }, + State: "superstate", + RedirectURI: parseUrl("https://asdf.de/cb"), + }, + description: "should pass", + expect: func(t *testing.T, areq *fosite.AuthorizeRequest, aresp *fosite.AuthorizeResponse) { + code := aresp.GetParameters().Get("code") + assert.NotEmpty(t, code) + + assert.Equal(t, strings.Join(areq.GrantedScope, " "), aresp.GetParameters().Get("scope")) + assert.Equal(t, areq.State, aresp.GetParameters().Get("state")) + assert.Equal(t, fosite.ResponseModeQuery, areq.GetResponseMode()) + }, + }, + { + handler: oauth2.AuthorizeExplicitGrantHandler{ + Storage: store, + Strategy: &compose.CommonStrategyProvider{CoreStrategy: strategy}, + Config: &fosite.Config{ + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + OmitRedirectScopeParam: true, + }, + }, + areq: &fosite.AuthorizeRequest{ + ResponseTypes: fosite.Arguments{"code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + ResponseTypes: fosite.Arguments{"code"}, + RedirectURIs: []string{"https://asdf.de/cb"}, + Audience: []string{"https://www.ory.sh/api"}, + }, + RequestedAudience: []string{"https://www.ory.sh/api"}, + GrantedScope: fosite.Arguments{"a", "b"}, + Session: &fosite.DefaultSession{ + ExpiresAt: map[fosite.TokenType]time.Time{fosite.AccessToken: time.Now().UTC().Add(time.Hour)}, + }, + RequestedAt: time.Now().UTC(), + }, + State: "superstate", + RedirectURI: parseUrl("https://asdf.de/cb"), + }, + description: "should pass but no scope in redirect uri", + expect: func(t *testing.T, areq *fosite.AuthorizeRequest, aresp *fosite.AuthorizeResponse) { + code := aresp.GetParameters().Get("code") + assert.NotEmpty(t, code) + + assert.Empty(t, aresp.GetParameters().Get("scope")) + assert.Equal(t, areq.State, aresp.GetParameters().Get("state")) + assert.Equal(t, fosite.ResponseModeQuery, areq.GetResponseMode()) + }, + }, + } { + t.Run("case="+c.description, func(t *testing.T) { + aresp := fosite.NewAuthorizeResponse() + err := c.handler.HandleAuthorizeEndpointRequest(context.Background(), c.areq, aresp) + if c.expectErr != nil { + require.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + } + + if c.expect != nil { + c.expect(t, c.areq, aresp) + } + }) + } + }) + } +} diff --git a/fosite/handler/oauth2/flow_authorize_code_token.go b/fosite/handler/oauth2/flow_authorize_code_token.go new file mode 100644 index 00000000000..fd014262171 --- /dev/null +++ b/fosite/handler/oauth2/flow_authorize_code_token.go @@ -0,0 +1,198 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "context" + "time" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/x/errorsx" + + "github.com/pkg/errors" +) + +// HandleTokenEndpointRequest implements +// * https://tools.ietf.org/html/rfc6749#section-4.1.3 (everything) +func (c *AuthorizeExplicitGrantHandler) HandleTokenEndpointRequest(ctx context.Context, request fosite.AccessRequester) error { + if !c.CanHandleTokenEndpointRequest(ctx, request) { + return errorsx.WithStack(errorsx.WithStack(fosite.ErrUnknownRequest)) + } + + if !request.GetClient().GetGrantTypes().Has("authorization_code") { + return errorsx.WithStack(fosite.ErrUnauthorizedClient.WithHint("The OAuth 2.0 Client is not allowed to use authorization grant \"authorization_code\".")) + } + + code := request.GetRequestForm().Get("code") + signature := c.Strategy.AuthorizeCodeStrategy().AuthorizeCodeSignature(ctx, code) + authorizeRequest, err := c.Storage.AuthorizeCodeStorage().GetAuthorizeCodeSession(ctx, signature, request.GetSession()) + if errors.Is(err, fosite.ErrInvalidatedAuthorizeCode) { + if authorizeRequest == nil { + return fosite.ErrServerError. + WithHint("Misconfigured code lead to an error that prohibited the OAuth 2.0 Framework from processing this request."). + WithDebug("GetAuthorizeCodeSession must return a value for \"fosite.Requester\" when returning \"ErrInvalidatedAuthorizeCode\".") + } + + // If an authorize code is used twice, we revoke all refresh and access tokens associated with this request. + reqID := authorizeRequest.GetID() + hint := "The authorization code has already been used." + debug := "" + if revErr := c.Storage.TokenRevocationStorage().RevokeAccessToken(ctx, reqID); revErr != nil { + hint += " Additionally, an error occurred during processing the access token revocation." + debug += "Revocation of access_token lead to error " + revErr.Error() + "." + } + if revErr := c.Storage.TokenRevocationStorage().RevokeRefreshToken(ctx, reqID); revErr != nil { + hint += " Additionally, an error occurred during processing the refresh token revocation." + debug += "Revocation of refresh_token lead to error " + revErr.Error() + "." + } + return errorsx.WithStack(fosite.ErrInvalidGrant.WithHint(hint).WithDebug(debug)) + } else if err != nil && errors.Is(err, fosite.ErrNotFound) { + return errorsx.WithStack(fosite.ErrInvalidGrant.WithWrap(err).WithDebug(err.Error())) + } else if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + // The authorization server MUST verify that the authorization code is valid + // This needs to happen after store retrieval for the session to be hydrated properly + if err := c.Strategy.AuthorizeCodeStrategy().ValidateAuthorizeCode(ctx, request, code); err != nil { + return errorsx.WithStack(fosite.ErrInvalidGrant.WithWrap(err).WithDebug(err.Error())) + } + + // Override scopes + request.SetRequestedScopes(authorizeRequest.GetRequestedScopes()) + + // Override audiences + request.SetRequestedAudience(authorizeRequest.GetRequestedAudience()) + + // The authorization server MUST ensure that the authorization code was issued to the authenticated + // confidential client, or if the client is public, ensure that the + // code was issued to "client_id" in the request, + if authorizeRequest.GetClient().GetID() != request.GetClient().GetID() { + return errorsx.WithStack(fosite.ErrInvalidGrant.WithHint("The OAuth 2.0 Client ID from this request does not match the one from the authorize request.")) + } + + // ensure that the "redirect_uri" parameter is present if the + // "redirect_uri" parameter was included in the initial authorization + // request as described in Section 4.1.1, and if included ensure that + // their values are identical. + forcedRedirectURI := authorizeRequest.GetRequestForm().Get("redirect_uri") + if forcedRedirectURI != "" && forcedRedirectURI != request.GetRequestForm().Get("redirect_uri") { + return errorsx.WithStack(fosite.ErrInvalidGrant.WithHint("The \"redirect_uri\" from this request does not match the one from the authorize request.")) + } + + // Checking of POST client_id skipped, because: + // If the client type is confidential or the client was issued client + // credentials (or assigned other authentication requirements), the + // client MUST authenticate with the authorization server as described + // in Section 3.2.1. + request.SetSession(authorizeRequest.GetSession()) + request.SetID(authorizeRequest.GetID()) + + atLifespan := fosite.GetEffectiveLifespan(request.GetClient(), fosite.GrantTypeAuthorizationCode, fosite.AccessToken, c.Config.GetAccessTokenLifespan(ctx)) + request.GetSession().SetExpiresAt(fosite.AccessToken, time.Now().UTC().Add(atLifespan).Round(time.Second)) + + rtLifespan := fosite.GetEffectiveLifespan(request.GetClient(), fosite.GrantTypeAuthorizationCode, fosite.RefreshToken, c.Config.GetRefreshTokenLifespan(ctx)) + if rtLifespan > -1 { + request.GetSession().SetExpiresAt(fosite.RefreshToken, time.Now().UTC().Add(rtLifespan).Round(time.Second)) + } + + return nil +} + +func canIssueRefreshToken(ctx context.Context, c *AuthorizeExplicitGrantHandler, request fosite.Requester) bool { + scope := c.Config.GetRefreshTokenScopes(ctx) + // Require one of the refresh token scopes, if set. + if len(scope) > 0 && !request.GetGrantedScopes().HasOneOf(scope...) { + return false + } + // Do not issue a refresh token to clients that cannot use the refresh token grant type. + if !request.GetClient().GetGrantTypes().Has("refresh_token") { + return false + } + return true +} + +func (c *AuthorizeExplicitGrantHandler) PopulateTokenEndpointResponse(ctx context.Context, requester fosite.AccessRequester, responder fosite.AccessResponder) (err error) { + if !c.CanHandleTokenEndpointRequest(ctx, requester) { + return errorsx.WithStack(fosite.ErrUnknownRequest) + } + + code := requester.GetRequestForm().Get("code") + signature := c.Strategy.AuthorizeCodeStrategy().AuthorizeCodeSignature(ctx, code) + authorizeRequest, err := c.Storage.AuthorizeCodeStorage().GetAuthorizeCodeSession(ctx, signature, requester.GetSession()) + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } else if err := c.Strategy.AuthorizeCodeStrategy().ValidateAuthorizeCode(ctx, requester, code); err != nil { + // This needs to happen after store retrieval for the session to be hydrated properly + return errorsx.WithStack(fosite.ErrInvalidRequest.WithWrap(err).WithDebug(err.Error())) + } + + for _, scope := range authorizeRequest.GetGrantedScopes() { + requester.GrantScope(scope) + } + + for _, audience := range authorizeRequest.GetGrantedAudience() { + requester.GrantAudience(audience) + } + + access, accessSignature, err := c.Strategy.AccessTokenStrategy().GenerateAccessToken(ctx, requester) + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + var refresh, refreshSignature string + if canIssueRefreshToken(ctx, c, authorizeRequest) { + refresh, refreshSignature, err = c.Strategy.RefreshTokenStrategy().GenerateRefreshToken(ctx, requester) + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + } + + ctx, err = fosite.MaybeBeginTx(ctx, c.Storage) + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + defer func() { + if err != nil { + if rollBackTxnErr := fosite.MaybeRollbackTx(ctx, c.Storage); rollBackTxnErr != nil { + err = errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebugf("error: %s; rollback error: %s", err, rollBackTxnErr)) + } + } + }() + + if err = c.Storage.AuthorizeCodeStorage().InvalidateAuthorizeCodeSession(ctx, signature); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } else if err = c.Storage.AccessTokenStorage().CreateAccessTokenSession(ctx, accessSignature, requester.Sanitize([]string{})); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } else if refreshSignature != "" { + if err = c.Storage.RefreshTokenStorage().CreateRefreshTokenSession(ctx, refreshSignature, accessSignature, requester.Sanitize([]string{})); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + } + + responder.SetAccessToken(access) + responder.SetTokenType("bearer") + atLifespan := fosite.GetEffectiveLifespan(requester.GetClient(), fosite.GrantTypeAuthorizationCode, fosite.AccessToken, c.Config.GetAccessTokenLifespan(ctx)) + responder.SetExpiresIn(getExpiresIn(requester, fosite.AccessToken, atLifespan, time.Now().UTC())) + responder.SetScopes(requester.GetGrantedScopes()) + if refresh != "" { + responder.SetExtra("refresh_token", refresh) + } + + if err = fosite.MaybeCommitTx(ctx, c.Storage); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + return nil +} + +func (c *AuthorizeExplicitGrantHandler) CanSkipClientAuth(_ context.Context, _ fosite.AccessRequester) bool { + return false +} + +func (c *AuthorizeExplicitGrantHandler) CanHandleTokenEndpointRequest(_ context.Context, requester fosite.AccessRequester) bool { + // grant_type REQUIRED. + // Value MUST be set to "authorization_code" + return requester.GetGrantTypes().ExactOne("authorization_code") +} diff --git a/fosite/handler/oauth2/flow_authorize_code_token_test.go b/fosite/handler/oauth2/flow_authorize_code_token_test.go new file mode 100644 index 00000000000..5cab8fe03f1 --- /dev/null +++ b/fosite/handler/oauth2/flow_authorize_code_token_test.go @@ -0,0 +1,1000 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2_test + +import ( + "context" + "fmt" + "net/url" + "testing" + "time" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/internal" + "github.com/ory/hydra/v2/fosite/storage" +) + +func TestAuthorizeCode_PopulateTokenEndpointResponse(t *testing.T) { + for k, strategy := range map[string]oauth2.CoreStrategy{ + "hmac": hmacshaStrategy, + } { + t.Run("strategy="+k, func(t *testing.T) { + store := storage.NewMemoryStore() + + var h oauth2.AuthorizeExplicitGrantHandler + for _, c := range []struct { + areq *fosite.AccessRequest + description string + setup func(t *testing.T, areq *fosite.AccessRequest, config *fosite.Config) + check func(t *testing.T, aresp *fosite.AccessResponse) + expectErr error + }{ + { + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"123"}, + }, + description: "should fail because not responsible", + expectErr: fosite.ErrUnknownRequest, + }, + { + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"authorization_code"}, + Request: fosite.Request{ + Form: url.Values{}, + Client: &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"authorization_code"}, + }, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + description: "should fail because authcode not found", + setup: func(t *testing.T, areq *fosite.AccessRequest, config *fosite.Config) { + code, _, err := strategy.GenerateAuthorizeCode(context.Background(), nil) + require.NoError(t, err) + areq.Form.Set("code", code) + }, + expectErr: fosite.ErrServerError, + }, + { + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"authorization_code"}, + Request: fosite.Request{ + Form: url.Values{"code": []string{"foo.bar"}}, + Client: &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"authorization_code"}, + }, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + description: "should fail because validation failed", + setup: func(t *testing.T, areq *fosite.AccessRequest, config *fosite.Config) { + require.NoError(t, store.CreateAuthorizeCodeSession(context.Background(), "bar", areq)) + }, + expectErr: fosite.ErrInvalidRequest, + }, + { + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"authorization_code"}, + Request: fosite.Request{ + Form: url.Values{}, + Client: &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"authorization_code", "refresh_token"}, + }, + GrantedScope: fosite.Arguments{"foo", "offline"}, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + setup: func(t *testing.T, areq *fosite.AccessRequest, config *fosite.Config) { + code, sig, err := strategy.GenerateAuthorizeCode(context.Background(), nil) + require.NoError(t, err) + areq.Form.Add("code", code) + + require.NoError(t, store.CreateAuthorizeCodeSession(context.Background(), sig, areq)) + }, + description: "should pass with offline scope and refresh token", + check: func(t *testing.T, aresp *fosite.AccessResponse) { + assert.NotEmpty(t, aresp.AccessToken) + assert.Equal(t, "bearer", aresp.TokenType) + assert.NotEmpty(t, aresp.GetExtra("refresh_token")) + assert.NotEmpty(t, aresp.GetExtra("expires_in")) + assert.Equal(t, "foo offline", aresp.GetExtra("scope")) + }, + }, + { + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"authorization_code"}, + Request: fosite.Request{ + Form: url.Values{}, + Client: &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"authorization_code", "refresh_token"}, + }, + GrantedScope: fosite.Arguments{"foo"}, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + setup: func(t *testing.T, areq *fosite.AccessRequest, config *fosite.Config) { + config.RefreshTokenScopes = []string{} + code, sig, err := strategy.GenerateAuthorizeCode(context.Background(), nil) + require.NoError(t, err) + areq.Form.Add("code", code) + + require.NoError(t, store.CreateAuthorizeCodeSession(context.Background(), sig, areq)) + }, + description: "should pass with refresh token always provided", + check: func(t *testing.T, aresp *fosite.AccessResponse) { + assert.NotEmpty(t, aresp.AccessToken) + assert.Equal(t, "bearer", aresp.TokenType) + assert.NotEmpty(t, aresp.GetExtra("refresh_token")) + assert.NotEmpty(t, aresp.GetExtra("expires_in")) + assert.Equal(t, "foo", aresp.GetExtra("scope")) + }, + }, + { + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"authorization_code"}, + Request: fosite.Request{ + Form: url.Values{}, + Client: &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"authorization_code"}, + }, + GrantedScope: fosite.Arguments{}, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + setup: func(t *testing.T, areq *fosite.AccessRequest, config *fosite.Config) { + config.RefreshTokenScopes = []string{} + code, sig, err := strategy.GenerateAuthorizeCode(context.Background(), nil) + require.NoError(t, err) + areq.Form.Add("code", code) + + require.NoError(t, store.CreateAuthorizeCodeSession(context.Background(), sig, areq)) + }, + description: "should pass with no refresh token", + check: func(t *testing.T, aresp *fosite.AccessResponse) { + assert.NotEmpty(t, aresp.AccessToken) + assert.Equal(t, "bearer", aresp.TokenType) + assert.Empty(t, aresp.GetExtra("refresh_token")) + assert.NotEmpty(t, aresp.GetExtra("expires_in")) + assert.Empty(t, aresp.GetExtra("scope")) + }, + }, + { + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"authorization_code"}, + Request: fosite.Request{ + Form: url.Values{}, + Client: &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"authorization_code"}, + }, + GrantedScope: fosite.Arguments{"foo"}, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + setup: func(t *testing.T, areq *fosite.AccessRequest, config *fosite.Config) { + code, sig, err := strategy.GenerateAuthorizeCode(context.Background(), nil) + require.NoError(t, err) + areq.Form.Add("code", code) + + require.NoError(t, store.CreateAuthorizeCodeSession(context.Background(), sig, areq)) + }, + description: "should not have refresh token", + check: func(t *testing.T, aresp *fosite.AccessResponse) { + assert.NotEmpty(t, aresp.AccessToken) + assert.Equal(t, "bearer", aresp.TokenType) + assert.Empty(t, aresp.GetExtra("refresh_token")) + assert.NotEmpty(t, aresp.GetExtra("expires_in")) + assert.Equal(t, "foo", aresp.GetExtra("scope")) + }, + }, + } { + t.Run("case="+c.description, func(t *testing.T) { + config := &fosite.Config{ + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + AccessTokenLifespan: time.Minute, + RefreshTokenScopes: []string{"offline"}, + } + h = oauth2.AuthorizeExplicitGrantHandler{ + Storage: store, + Strategy: &compose.CommonStrategyProvider{CoreStrategy: strategy}, + Config: config, + } + + if c.setup != nil { + c.setup(t, c.areq, config) + } + + aresp := fosite.NewAccessResponse() + err := h.PopulateTokenEndpointResponse(context.Background(), c.areq, aresp) + + if c.expectErr != nil { + require.EqualError(t, err, c.expectErr.Error(), "%+v", err) + } else { + require.NoError(t, err, "%+v", err) + } + + if c.check != nil { + c.check(t, aresp) + } + }) + } + }) + } +} + +func TestAuthorizeCode_HandleTokenEndpointRequest(t *testing.T) { + for k, strategy := range map[string]oauth2.CoreStrategy{ + "hmac": hmacshaStrategy, + } { + t.Run("strategy="+k, func(t *testing.T) { + store := storage.NewMemoryStore() + + h := oauth2.AuthorizeExplicitGrantHandler{ + Storage: store, + Strategy: &compose.CommonStrategyProvider{CoreStrategy: hmacshaStrategy}, + Config: &fosite.Config{ + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + AuthorizeCodeLifespan: time.Minute, + }, + } + for i, c := range []struct { + areq *fosite.AccessRequest + authreq *fosite.AuthorizeRequest + description string + setup func(t *testing.T, areq *fosite.AccessRequest, authreq *fosite.AuthorizeRequest) + check func(t *testing.T, areq *fosite.AccessRequest, authreq *fosite.AuthorizeRequest) + expectErr error + }{ + { + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"12345678"}, + }, + description: "should fail because not responsible", + expectErr: fosite.ErrUnknownRequest, + }, + { + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"authorization_code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ID: "foo", GrantTypes: []string{""}}, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + description: "should fail because client is not granted this grant type", + expectErr: fosite.ErrUnauthorizedClient, + }, + { + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"authorization_code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{GrantTypes: []string{"authorization_code"}}, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + description: "should fail because authcode could not be retrieved (1)", + setup: func(t *testing.T, areq *fosite.AccessRequest, authreq *fosite.AuthorizeRequest) { + token, _, err := strategy.GenerateAuthorizeCode(context.Background(), nil) + require.NoError(t, err) + areq.Form = url.Values{"code": {token}} + }, + expectErr: fosite.ErrInvalidGrant, + }, + { + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"authorization_code"}, + Request: fosite.Request{ + Form: url.Values{"code": {"foo.bar"}}, + Client: &fosite.DefaultClient{GrantTypes: []string{"authorization_code"}}, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + description: "should fail because authcode validation failed", + expectErr: fosite.ErrInvalidGrant, + }, + { + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"authorization_code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ID: "foo", GrantTypes: []string{"authorization_code"}}, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + authreq: &fosite.AuthorizeRequest{ + Request: fosite.Request{ + Client: &fosite.DefaultClient{ID: "bar"}, + RequestedScope: fosite.Arguments{"a", "b"}, + }, + }, + description: "should fail because client mismatch", + setup: func(t *testing.T, areq *fosite.AccessRequest, authreq *fosite.AuthorizeRequest) { + token, signature, err := strategy.GenerateAuthorizeCode(context.Background(), nil) + require.NoError(t, err) + areq.Form = url.Values{"code": {token}} + + require.NoError(t, store.CreateAuthorizeCodeSession(context.Background(), signature, authreq)) + }, + expectErr: fosite.ErrInvalidGrant, + }, + { + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"authorization_code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ID: "foo", GrantTypes: []string{"authorization_code"}}, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + authreq: &fosite.AuthorizeRequest{ + Request: fosite.Request{ + Client: &fosite.DefaultClient{ID: "foo", GrantTypes: []string{"authorization_code"}}, + Form: url.Values{"redirect_uri": []string{"request-redir"}}, + Session: &fosite.DefaultSession{}, + }, + }, + description: "should fail because redirect uri was set during /authorize call, but not in /token call", + setup: func(t *testing.T, areq *fosite.AccessRequest, authreq *fosite.AuthorizeRequest) { + token, signature, err := strategy.GenerateAuthorizeCode(context.Background(), nil) + require.NoError(t, err) + areq.Form = url.Values{"code": {token}} + + require.NoError(t, store.CreateAuthorizeCodeSession(context.Background(), signature, authreq)) + }, + expectErr: fosite.ErrInvalidGrant, + }, + { + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"authorization_code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ID: "foo", GrantTypes: []string{"authorization_code"}}, + Form: url.Values{"redirect_uri": []string{"request-redir"}}, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + authreq: &fosite.AuthorizeRequest{ + Request: fosite.Request{ + Client: &fosite.DefaultClient{ID: "foo", GrantTypes: []string{"authorization_code"}}, + Session: &fosite.DefaultSession{}, + RequestedScope: fosite.Arguments{"a", "b"}, + RequestedAt: time.Now().UTC(), + }, + }, + description: "should pass", + setup: func(t *testing.T, areq *fosite.AccessRequest, authreq *fosite.AuthorizeRequest) { + token, signature, err := strategy.GenerateAuthorizeCode(context.Background(), nil) + require.NoError(t, err) + + areq.Form = url.Values{"code": {token}} + require.NoError(t, store.CreateAuthorizeCodeSession(context.Background(), signature, authreq)) + }, + }, + { + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"authorization_code"}, + Request: fosite.Request{ + Form: url.Values{}, + Client: &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"authorization_code"}, + }, + GrantedScope: fosite.Arguments{"foo", "offline"}, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + check: func(t *testing.T, areq *fosite.AccessRequest, authreq *fosite.AuthorizeRequest) { + assert.Equal(t, time.Now().Add(time.Minute).UTC().Round(time.Second), areq.GetSession().GetExpiresAt(fosite.AccessToken)) + assert.Equal(t, time.Now().Add(time.Minute).UTC().Round(time.Second), areq.GetSession().GetExpiresAt(fosite.RefreshToken)) + }, + setup: func(t *testing.T, areq *fosite.AccessRequest, authreq *fosite.AuthorizeRequest) { + code, sig, err := strategy.GenerateAuthorizeCode(context.Background(), nil) + require.NoError(t, err) + areq.Form.Add("code", code) + + require.NoError(t, store.CreateAuthorizeCodeSession(context.Background(), sig, areq)) + require.NoError(t, store.InvalidateAuthorizeCodeSession(context.Background(), sig)) + }, + description: "should fail because code has been used already", + expectErr: fosite.ErrInvalidGrant, + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", i, c.description), func(t *testing.T) { + if c.setup != nil { + c.setup(t, c.areq, c.authreq) + } + + t.Logf("Processing %+v", c.areq.Client) + + err := h.HandleTokenEndpointRequest(context.Background(), c.areq) + if c.expectErr != nil { + require.EqualError(t, err, c.expectErr.Error(), "%+v", err) + } else { + require.NoError(t, err, "%+v", err) + if c.check != nil { + c.check(t, c.areq, c.authreq) + } + } + }) + } + }) + } +} + +func TestAuthorizeCodeTransactional_HandleTokenEndpointRequest(t *testing.T) { + token, _, err := hmacshaStrategy.GenerateAuthorizeCode(context.Background(), nil) + require.NoError(t, err) + + request := &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"authorization_code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"authorization_code", "refresh_token"}, + }, + GrantedScope: fosite.Arguments{"offline"}, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + } + request.Form = url.Values{"code": {token}} + propagatedContext := context.Background() + + for k, c := range []struct { + description string + setup func( + mockTransactional *internal.MockTransactional, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + authorizeCodeStorageProvider *internal.MockAuthorizeCodeStorageProvider, + authorizeCodeStorage *internal.MockAuthorizeCodeStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + authorizeCodeStrategyProvider *internal.MockAuthorizeCodeStrategyProvider, + authorizeCodeStrategy *internal.MockAuthorizeCodeStrategy, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + ) + expectError error + }{ + { + description: "transaction should be committed successfully if no errors occur", + setup: func( + mockTransactional *internal.MockTransactional, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + authorizeCodeStorageProvider *internal.MockAuthorizeCodeStorageProvider, + authorizeCodeStorage *internal.MockAuthorizeCodeStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + authorizeCodeStrategyProvider *internal.MockAuthorizeCodeStrategyProvider, + authorizeCodeStrategy *internal.MockAuthorizeCodeStrategy, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + ) { + authorizeCodeStrategyProvider.EXPECT().AuthorizeCodeStrategy().Return(authorizeCodeStrategy).Times(2) + authorizeCodeStrategy.EXPECT().AuthorizeCodeSignature(gomock.Any(), gomock.Any()) + authorizeCodeStrategy.EXPECT().ValidateAuthorizeCode(gomock.Any(), gomock.Any(), gomock.Any()) + + // Set up CoreStorage to return the authorize code storage mock + authorizeCodeStorageProvider. + EXPECT(). + AuthorizeCodeStorage(). + Return(authorizeCodeStorage). + Times(2) + + // Set up authorize code storage expectations + authorizeCodeStorage. + EXPECT(). + GetAuthorizeCodeSession(gomock.Any(), gomock.Any(), gomock.Any()). + Return(request, nil). + Times(1) + authorizeCodeStorage. + EXPECT(). + InvalidateAuthorizeCodeSession(gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(1) + accessTokenStrategy.EXPECT().GenerateAccessToken(gomock.Any(), gomock.Any()) + + // Set up CoreStorage to return the access token storage mock + accessTokenStorageProvider. + EXPECT(). + AccessTokenStorage(). + Return(accessTokenStorage). + Times(1) + + // Set up access token storage expectations + accessTokenStorage. + EXPECT(). + CreateAccessTokenSession(propagatedContext, gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().GenerateRefreshToken(gomock.Any(), gomock.Any()) + + // Set up CoreStorage to return the refresh token storage mock + refreshTokenStorageProvider. + EXPECT(). + RefreshTokenStorage(). + Return(refreshTokenStorage). + Times(0) + + // Set up refresh token storage expectations + refreshTokenStorage. + EXPECT(). + CreateRefreshTokenSession(propagatedContext, gomock.Any(), gomock.Any(), gomock.Any()). + Return(nil). + Times(0) + + // Set up transaction expectations + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil) + mockTransactional. + EXPECT(). + Commit(propagatedContext). + Return(nil). + Times(1) + }, + }, + { + description: "transaction should be rolled back if `InvalidateAuthorizeCodeSession` returns an error", + setup: func( + mockTransactional *internal.MockTransactional, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + authorizeCodeStorageProvider *internal.MockAuthorizeCodeStorageProvider, + authorizeCodeStorage *internal.MockAuthorizeCodeStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + authorizeCodeStrategyProvider *internal.MockAuthorizeCodeStrategyProvider, + authorizeCodeStrategy *internal.MockAuthorizeCodeStrategy, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + ) { + authorizeCodeStrategyProvider.EXPECT().AuthorizeCodeStrategy().Return(authorizeCodeStrategy).Times(2) + authorizeCodeStrategy.EXPECT().AuthorizeCodeSignature(gomock.Any(), gomock.Any()) + authorizeCodeStrategy.EXPECT().ValidateAuthorizeCode(gomock.Any(), gomock.Any(), gomock.Any()) + + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(1) + accessTokenStrategy.EXPECT().GenerateAccessToken(gomock.Any(), gomock.Any()) + + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().GenerateRefreshToken(gomock.Any(), gomock.Any()) + + // Set up CoreStorage to return the authorize code storage mock + authorizeCodeStorageProvider. + EXPECT(). + AuthorizeCodeStorage(). + Return(authorizeCodeStorage). + Times(2) + + // Set up authorize code storage expectations + authorizeCodeStorage. + EXPECT(). + GetAuthorizeCodeSession(gomock.Any(), gomock.Any(), gomock.Any()). + Return(request, nil). + Times(1) + authorizeCodeStorage. + EXPECT(). + InvalidateAuthorizeCodeSession(gomock.Any(), gomock.Any()). + Return(errors.New("Whoops, a nasty database error occurred!")). + Times(1) + + // Set up transaction expectations + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil) + mockTransactional. + EXPECT(). + Rollback(propagatedContext). + Return(nil). + Times(1) + }, + expectError: fosite.ErrServerError, + }, + { + description: "transaction should be rolled back if `CreateAccessTokenSession` returns an error", + setup: func( + mockTransactional *internal.MockTransactional, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + authorizeCodeStorageProvider *internal.MockAuthorizeCodeStorageProvider, + authorizeCodeStorage *internal.MockAuthorizeCodeStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + authorizeCodeStrategyProvider *internal.MockAuthorizeCodeStrategyProvider, + authorizeCodeStrategy *internal.MockAuthorizeCodeStrategy, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + ) { + authorizeCodeStrategyProvider.EXPECT().AuthorizeCodeStrategy().Return(authorizeCodeStrategy).Times(2) + authorizeCodeStrategy.EXPECT().AuthorizeCodeSignature(gomock.Any(), gomock.Any()) + authorizeCodeStrategy.EXPECT().ValidateAuthorizeCode(gomock.Any(), gomock.Any(), gomock.Any()) + + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(1) + accessTokenStrategy.EXPECT().GenerateAccessToken(gomock.Any(), gomock.Any()) + + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().GenerateRefreshToken(gomock.Any(), gomock.Any()) + + // Set up CoreStorage to return the authorize code storage mock + authorizeCodeStorageProvider. + EXPECT(). + AuthorizeCodeStorage(). + Return(authorizeCodeStorage). + Times(2) + + // Set up authorize code storage expectations + authorizeCodeStorage. + EXPECT(). + GetAuthorizeCodeSession(gomock.Any(), gomock.Any(), gomock.Any()). + Return(request, nil). + Times(1) + authorizeCodeStorage. + EXPECT(). + InvalidateAuthorizeCodeSession(gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + + // Set up CoreStorage to return the access token storage mock + accessTokenStorageProvider. + EXPECT(). + AccessTokenStorage(). + Return(accessTokenStorage). + Times(1) + + // Set up access token storage expectations + accessTokenStorage. + EXPECT(). + CreateAccessTokenSession(propagatedContext, gomock.Any(), gomock.Any()). + Return(errors.New("Whoops, a nasty database error occurred!")). + Times(1) + + // Set up transaction expectations + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil). + Times(1) + mockTransactional. + EXPECT(). + Rollback(propagatedContext). + Return(nil). + Times(1) + }, + expectError: fosite.ErrServerError, + }, + { + description: "should result in a server error if transaction cannot be created", + setup: func( + mockTransactional *internal.MockTransactional, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + authorizeCodeStorageProvider *internal.MockAuthorizeCodeStorageProvider, + authorizeCodeStorage *internal.MockAuthorizeCodeStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + authorizeCodeStrategyProvider *internal.MockAuthorizeCodeStrategyProvider, + authorizeCodeStrategy *internal.MockAuthorizeCodeStrategy, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + ) { + authorizeCodeStrategyProvider.EXPECT().AuthorizeCodeStrategy().Return(authorizeCodeStrategy).Times(2) + authorizeCodeStrategy.EXPECT().AuthorizeCodeSignature(gomock.Any(), gomock.Any()) + authorizeCodeStrategy.EXPECT().ValidateAuthorizeCode(gomock.Any(), gomock.Any(), gomock.Any()) + + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(1) + accessTokenStrategy.EXPECT().GenerateAccessToken(gomock.Any(), gomock.Any()) + + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().GenerateRefreshToken(gomock.Any(), gomock.Any()) + + // Set up CoreStorage to return the authorize code storage mock + authorizeCodeStorageProvider. + EXPECT(). + AuthorizeCodeStorage(). + Return(authorizeCodeStorage). + Times(1) + + // Set up authorize code storage expectations + authorizeCodeStorage. + EXPECT(). + GetAuthorizeCodeSession(gomock.Any(), gomock.Any(), gomock.Any()). + Return(request, nil). + Times(1) + + // Set up transaction expectations + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(nil, errors.New("Whoops, unable to create transaction!")) + }, + expectError: fosite.ErrServerError, + }, + { + description: "should result in a server error if transaction cannot be rolled back", + setup: func( + mockTransactional *internal.MockTransactional, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + authorizeCodeStorageProvider *internal.MockAuthorizeCodeStorageProvider, + authorizeCodeStorage *internal.MockAuthorizeCodeStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + authorizeCodeStrategyProvider *internal.MockAuthorizeCodeStrategyProvider, + authorizeCodeStrategy *internal.MockAuthorizeCodeStrategy, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + ) { + authorizeCodeStrategyProvider.EXPECT().AuthorizeCodeStrategy().Return(authorizeCodeStrategy).Times(2) + authorizeCodeStrategy.EXPECT().AuthorizeCodeSignature(gomock.Any(), gomock.Any()) + authorizeCodeStrategy.EXPECT().ValidateAuthorizeCode(gomock.Any(), gomock.Any(), gomock.Any()) + + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(1) + accessTokenStrategy.EXPECT().GenerateAccessToken(gomock.Any(), gomock.Any()) + + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().GenerateRefreshToken(gomock.Any(), gomock.Any()) + + // Set up CoreStorage to return the authorize code storage mock + authorizeCodeStorageProvider. + EXPECT(). + AuthorizeCodeStorage(). + Return(authorizeCodeStorage). + Times(2) + + // Set up authorize code storage expectations + authorizeCodeStorage. + EXPECT(). + GetAuthorizeCodeSession(gomock.Any(), gomock.Any(), gomock.Any()). + Return(request, nil). + Times(1) + authorizeCodeStorage. + EXPECT(). + InvalidateAuthorizeCodeSession(gomock.Any(), gomock.Any()). + Return(errors.New("Whoops, a nasty database error occurred!")). + Times(1) + + // Set up transaction expectations + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil) + mockTransactional. + EXPECT(). + Rollback(propagatedContext). + Return(errors.New("Whoops, unable to rollback transaction!")). + Times(1) + }, + expectError: fosite.ErrServerError, + }, + { + description: "should result in a server error if transaction cannot be committed", + setup: func( + mockTransactional *internal.MockTransactional, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + authorizeCodeStorageProvider *internal.MockAuthorizeCodeStorageProvider, + authorizeCodeStorage *internal.MockAuthorizeCodeStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + authorizeCodeStrategyProvider *internal.MockAuthorizeCodeStrategyProvider, + authorizeCodeStrategy *internal.MockAuthorizeCodeStrategy, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + ) { + authorizeCodeStrategyProvider.EXPECT().AuthorizeCodeStrategy().Return(authorizeCodeStrategy).Times(2) + authorizeCodeStrategy.EXPECT().AuthorizeCodeSignature(gomock.Any(), gomock.Any()) + authorizeCodeStrategy.EXPECT().ValidateAuthorizeCode(gomock.Any(), gomock.Any(), gomock.Any()) + + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(1) + accessTokenStrategy.EXPECT().GenerateAccessToken(gomock.Any(), gomock.Any()) + + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().GenerateRefreshToken(gomock.Any(), gomock.Any()) + + // Set up CoreStorage to return the authorize code storage mock + authorizeCodeStorageProvider. + EXPECT(). + AuthorizeCodeStorage(). + Return(authorizeCodeStorage). + Times(2) + + // Set up authorize code storage expectations + authorizeCodeStorage. + EXPECT(). + GetAuthorizeCodeSession(gomock.Any(), gomock.Any(), gomock.Any()). + Return(request, nil). + Times(1) + authorizeCodeStorage. + EXPECT(). + InvalidateAuthorizeCodeSession(gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + + // Set up CoreStorage to return the access token storage mock + accessTokenStorageProvider. + EXPECT(). + AccessTokenStorage(). + Return(accessTokenStorage). + Times(1) + + // Set up access token storage expectations + accessTokenStorage. + EXPECT(). + CreateAccessTokenSession(propagatedContext, gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + + // Set up CoreStorage to return the refresh token storage mock + refreshTokenStorageProvider. + EXPECT(). + RefreshTokenStorage(). + Return(refreshTokenStorage). + Times(0) + + // Set up refresh token storage expectations + refreshTokenStorage. + EXPECT(). + CreateRefreshTokenSession(propagatedContext, gomock.Any(), gomock.Any(), gomock.Any()). + Return(nil). + Times(0) + + // Set up transaction expectations + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil) + mockTransactional. + EXPECT(). + Commit(propagatedContext). + Return(errors.New("Whoops, unable to commit transaction!")). + Times(1) + mockTransactional. + EXPECT(). + Rollback(propagatedContext). + Return(nil). + Times(1) + }, + expectError: fosite.ErrServerError, + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", k, c.description), func(t *testing.T) { + ctrl := gomock.NewController(t) + t.Cleanup(ctrl.Finish) + + // Initialize all mocks + mockTransactional := internal.NewMockTransactional(ctrl) + + tokenRevocationStorageProvider := internal.NewMockTokenRevocationStorageProvider(ctrl) + tokenRevocationStorage := internal.NewMockTokenRevocationStorage(ctrl) + + authorizeCodeStorageProvider := internal.NewMockAuthorizeCodeStorageProvider(ctrl) + authorizeCodeStorage := internal.NewMockAuthorizeCodeStorage(ctrl) + + accessTokenStorageProvider := internal.NewMockAccessTokenStorageProvider(ctrl) + accessTokenStorage := internal.NewMockAccessTokenStorage(ctrl) + + refreshTokenStorageProvider := internal.NewMockRefreshTokenStorageProvider(ctrl) + refreshTokenStorage := internal.NewMockRefreshTokenStorage(ctrl) + + authorizeCodeStrategyProvider := internal.NewMockAuthorizeCodeStrategyProvider(ctrl) + authorizeCodeStrategy := internal.NewMockAuthorizeCodeStrategy(ctrl) + + accessTokenStrategyProvider := internal.NewMockAccessTokenStrategyProvider(ctrl) + accessTokenStrategy := internal.NewMockAccessTokenStrategy(ctrl) + + refreshTokenStrategyProvider := internal.NewMockRefreshTokenStrategyProvider(ctrl) + refreshTokenStrategy := internal.NewMockRefreshTokenStrategy(ctrl) + + // define concrete types + mockStorage := struct { + *internal.MockAuthorizeCodeStorageProvider + *internal.MockAccessTokenStorageProvider + *internal.MockRefreshTokenStorageProvider + *internal.MockTokenRevocationStorageProvider + *internal.MockTransactional + }{ + MockAuthorizeCodeStorageProvider: authorizeCodeStorageProvider, + MockAccessTokenStorageProvider: accessTokenStorageProvider, + MockRefreshTokenStorageProvider: refreshTokenStorageProvider, + MockTokenRevocationStorageProvider: tokenRevocationStorageProvider, + MockTransactional: mockTransactional, + } + + mockStrategy := struct { + *internal.MockAuthorizeCodeStrategyProvider + *internal.MockAccessTokenStrategyProvider + *internal.MockRefreshTokenStrategyProvider + }{ + MockAuthorizeCodeStrategyProvider: authorizeCodeStrategyProvider, + MockAccessTokenStrategyProvider: accessTokenStrategyProvider, + MockRefreshTokenStrategyProvider: refreshTokenStrategyProvider, + } + + handler := oauth2.AuthorizeExplicitGrantHandler{ + Storage: mockStorage, + Strategy: mockStrategy, + Config: &fosite.Config{ + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + AuthorizeCodeLifespan: time.Minute, + }, + } + + // set up mock expectations + c.setup( + mockTransactional, + tokenRevocationStorageProvider, + tokenRevocationStorage, + authorizeCodeStorageProvider, + authorizeCodeStorage, + accessTokenStorageProvider, + accessTokenStorage, + refreshTokenStorageProvider, + refreshTokenStorage, + authorizeCodeStrategyProvider, + authorizeCodeStrategy, + accessTokenStrategyProvider, + accessTokenStrategy, + refreshTokenStrategyProvider, + refreshTokenStrategy, + ) + + // invoke function under test + if err := handler.PopulateTokenEndpointResponse(propagatedContext, request, fosite.NewAccessResponse()); c.expectError != nil { + assert.EqualError(t, err, c.expectError.Error()) + } + }) + } +} diff --git a/fosite/handler/oauth2/flow_authorize_implicit.go b/fosite/handler/oauth2/flow_authorize_implicit.go new file mode 100644 index 00000000000..21d0791dabf --- /dev/null +++ b/fosite/handler/oauth2/flow_authorize_implicit.go @@ -0,0 +1,91 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "context" + "strconv" + "strings" + "time" + + "github.com/ory/x/errorsx" + + "github.com/ory/hydra/v2/fosite" +) + +var _ fosite.AuthorizeEndpointHandler = (*AuthorizeImplicitGrantHandler)(nil) + +// AuthorizeImplicitGrantHandler is a response handler for the Authorize Code grant using the implicit grant type +// as defined in https://tools.ietf.org/html/rfc6749#section-4.2 +type AuthorizeImplicitGrantHandler struct { + Strategy AccessTokenStrategyProvider + Storage AccessTokenStorageProvider + Config interface { + fosite.AccessTokenLifespanProvider + fosite.ScopeStrategyProvider + fosite.AudienceStrategyProvider + } +} + +func (c *AuthorizeImplicitGrantHandler) HandleAuthorizeEndpointRequest(ctx context.Context, ar fosite.AuthorizeRequester, resp fosite.AuthorizeResponder) error { + // This let's us define multiple response types, for example open id connect's id_token + if !ar.GetResponseTypes().ExactOne("token") { + return nil + } + + ar.SetDefaultResponseMode(fosite.ResponseModeFragment) + + // Disabled because this is already handled at the authorize_request_handler + // if !ar.GetClient().GetResponseTypes().Has("token") { + // return errorsx.WithStack(fosite.ErrInvalidGrant.WithDebug("The client is not allowed to use response type token")) + // } + + if !ar.GetClient().GetGrantTypes().Has("implicit") { + return errorsx.WithStack(fosite.ErrInvalidGrant.WithHint("The OAuth 2.0 Client is not allowed to use the authorization grant 'implicit'.")) + } + + client := ar.GetClient() + for _, scope := range ar.GetRequestedScopes() { + if !c.Config.GetScopeStrategy(ctx)(client.GetScopes(), scope) { + return errorsx.WithStack(fosite.ErrInvalidScope.WithHintf("The OAuth 2.0 Client is not allowed to request scope '%s'.", scope)) + } + } + + if err := c.Config.GetAudienceStrategy(ctx)(client.GetAudience(), ar.GetRequestedAudience()); err != nil { + return err + } + + // there is no need to check for https, because implicit flow does not require https + // https://tools.ietf.org/html/rfc6819#section-4.4.2 + + return c.IssueImplicitAccessToken(ctx, ar, resp) +} + +func (c *AuthorizeImplicitGrantHandler) IssueImplicitAccessToken(ctx context.Context, ar fosite.AuthorizeRequester, resp fosite.AuthorizeResponder) error { + // Only override expiry if none is set. + atLifespan := fosite.GetEffectiveLifespan(ar.GetClient(), fosite.GrantTypeImplicit, fosite.AccessToken, c.Config.GetAccessTokenLifespan(ctx)) + if ar.GetSession().GetExpiresAt(fosite.AccessToken).IsZero() { + ar.GetSession().SetExpiresAt(fosite.AccessToken, time.Now().UTC().Add(atLifespan).Round(time.Second)) + } + + // Generate the access token + token, signature, err := c.Strategy.AccessTokenStrategy().GenerateAccessToken(ctx, ar) + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + if err := c.Storage.AccessTokenStorage().CreateAccessTokenSession(ctx, signature, ar.Sanitize([]string{})); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + resp.AddParameter("access_token", token) + resp.AddParameter("expires_in", strconv.FormatInt(int64(getExpiresIn(ar, fosite.AccessToken, atLifespan, time.Now().UTC())/time.Second), 10)) + resp.AddParameter("token_type", "bearer") + resp.AddParameter("state", ar.GetState()) + resp.AddParameter("scope", strings.Join(ar.GetGrantedScopes(), " ")) + + ar.SetResponseTypeHandled("token") + + return nil +} diff --git a/fosite/handler/oauth2/flow_authorize_implicit_test.go b/fosite/handler/oauth2/flow_authorize_implicit_test.go new file mode 100644 index 00000000000..aa176a5f2d5 --- /dev/null +++ b/fosite/handler/oauth2/flow_authorize_implicit_test.go @@ -0,0 +1,180 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2_test + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/pkg/errors" + "github.com/stretchr/testify/require" + gomock "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/internal" +) + +func TestAuthorizeImplicit_EndpointHandler(t *testing.T) { + ctrl := gomock.NewController(t) + t.Cleanup(ctrl.Finish) + + areq := fosite.NewAuthorizeRequest() + areq.Session = new(fosite.DefaultSession) + h, store, provider, chgen, chgenp, aresp := makeAuthorizeImplicitGrantTypeHandler(ctrl) + + for k, c := range []struct { + description string + setup func() + expectErr error + }{ + { + description: "should pass because not responsible for handling the response type", + setup: func() { + areq.ResponseTypes = fosite.Arguments{"a"} + }, + }, + { + description: "should fail because access token generation failed", + setup: func() { + areq.ResponseTypes = fosite.Arguments{"token"} + areq.Client = &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"implicit"}, + ResponseTypes: fosite.Arguments{"token"}, + } + chgenp.EXPECT().AccessTokenStrategy().Return(chgen).Times(1) + chgen.EXPECT().GenerateAccessToken(gomock.Any(), areq).Return("", "", errors.New("")) + }, + expectErr: fosite.ErrServerError, + }, + { + description: "should fail because scope invalid", + setup: func() { + areq.ResponseTypes = fosite.Arguments{"token"} + areq.RequestedScope = fosite.Arguments{"scope"} + areq.Client = &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"implicit"}, + ResponseTypes: fosite.Arguments{"token"}, + } + }, + expectErr: fosite.ErrInvalidScope, + }, + { + description: "should fail because audience invalid", + setup: func() { + areq.ResponseTypes = fosite.Arguments{"token"} + areq.RequestedScope = fosite.Arguments{"scope"} + areq.RequestedAudience = fosite.Arguments{"https://www.ory.sh/not-api"} + areq.Client = &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"implicit"}, + ResponseTypes: fosite.Arguments{"token"}, + Scopes: []string{"scope"}, + Audience: []string{"https://www.ory.sh/api"}, + } + }, + expectErr: fosite.ErrInvalidRequest, + }, + { + description: "should fail because persistence failed", + setup: func() { + areq.RequestedAudience = fosite.Arguments{"https://www.ory.sh/api"} + chgenp.EXPECT().AccessTokenStrategy().Return(chgen).Times(1) + chgen.EXPECT().GenerateAccessToken(gomock.Any(), areq).AnyTimes().Return("access.ats", "ats", nil) + provider.EXPECT().AccessTokenStorage().Return(store).Times(1) + store.EXPECT().CreateAccessTokenSession(gomock.Any(), "ats", gomock.Eq(areq.Sanitize([]string{}))).Return(errors.New("")) + }, + expectErr: fosite.ErrServerError, + }, + { + description: "should pass", + setup: func() { + areq.State = "state" + areq.GrantedScope = fosite.Arguments{"scope"} + chgenp.EXPECT().AccessTokenStrategy().Return(chgen).Times(1) + provider.EXPECT().AccessTokenStorage().Return(store).Times(1) + store.EXPECT().CreateAccessTokenSession(gomock.Any(), "ats", gomock.Eq(areq.Sanitize([]string{}))).AnyTimes().Return(nil) + + aresp.EXPECT().AddParameter("access_token", "access.ats") + aresp.EXPECT().AddParameter("expires_in", gomock.Any()) + aresp.EXPECT().AddParameter("token_type", "bearer") + aresp.EXPECT().AddParameter("state", "state") + aresp.EXPECT().AddParameter("scope", "scope") + }, + expectErr: nil, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + c.setup() + err := h.HandleAuthorizeEndpointRequest(context.Background(), areq, aresp) + if c.expectErr != nil { + require.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + } + }) + } +} + +func makeAuthorizeImplicitGrantTypeHandler(ctrl *gomock.Controller) (oauth2.AuthorizeImplicitGrantHandler, + *internal.MockAccessTokenStorage, *internal.MockAccessTokenStorageProvider, *internal.MockAccessTokenStrategy, *internal.MockAccessTokenStrategyProvider, *internal.MockAuthorizeResponder, +) { + store := internal.NewMockAccessTokenStorage(ctrl) + provider := internal.NewMockAccessTokenStorageProvider(ctrl) + chgen := internal.NewMockAccessTokenStrategy(ctrl) + chgenp := internal.NewMockAccessTokenStrategyProvider(ctrl) + aresp := internal.NewMockAuthorizeResponder(ctrl) + + h := oauth2.AuthorizeImplicitGrantHandler{ + Storage: provider, + Strategy: chgenp, + Config: &fosite.Config{ + AccessTokenLifespan: time.Hour, + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + }, + } + + return h, store, provider, chgen, chgenp, aresp +} + +func TestDefaultResponseMode_AuthorizeImplicit_EndpointHandler(t *testing.T) { + ctrl := gomock.NewController(t) + t.Cleanup(ctrl.Finish) + + areq := fosite.NewAuthorizeRequest() + areq.Session = new(fosite.DefaultSession) + h, store, provider, chgen, chgenp, aresp := makeAuthorizeImplicitGrantTypeHandler(ctrl) + + areq.State = "state" + areq.GrantedScope = fosite.Arguments{"scope"} + areq.ResponseTypes = fosite.Arguments{"token"} + areq.Client = &fosite.DefaultClientWithCustomTokenLifespans{ + DefaultClient: &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"implicit"}, + ResponseTypes: fosite.Arguments{"token"}, + }, + TokenLifespans: &internal.TestLifespans, + } + + provider.EXPECT().AccessTokenStorage().Return(store).Times(1) + store.EXPECT().CreateAccessTokenSession(gomock.Any(), "ats", gomock.Eq(areq.Sanitize([]string{}))).AnyTimes().Return(nil) + + aresp.EXPECT().AddParameter("access_token", "access.ats") + aresp.EXPECT().AddParameter("expires_in", gomock.Any()) + aresp.EXPECT().AddParameter("token_type", "bearer") + aresp.EXPECT().AddParameter("state", "state") + aresp.EXPECT().AddParameter("scope", "scope") + chgenp.EXPECT().AccessTokenStrategy().Return(chgen).Times(1) + chgen.EXPECT().GenerateAccessToken(gomock.Any(), areq).AnyTimes().Return("access.ats", "ats", nil) + + err := h.HandleAuthorizeEndpointRequest(context.Background(), areq, aresp) + assert.NoError(t, err) + assert.Equal(t, fosite.ResponseModeFragment, areq.GetResponseMode()) + + internal.RequireEqualTime(t, time.Now().UTC().Add(*internal.TestLifespans.ImplicitGrantAccessTokenLifespan), areq.Session.GetExpiresAt(fosite.AccessToken), time.Minute) +} diff --git a/fosite/handler/oauth2/flow_client_credentials.go b/fosite/handler/oauth2/flow_client_credentials.go new file mode 100644 index 00000000000..b69ff39d84a --- /dev/null +++ b/fosite/handler/oauth2/flow_client_credentials.go @@ -0,0 +1,100 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "context" + "time" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/x/errorsx" +) + +var _ fosite.TokenEndpointHandler = (*ClientCredentialsGrantHandler)(nil) + +type ClientCredentialsGrantHandler struct { + Storage AccessTokenStorageProvider + Strategy AccessTokenStrategyProvider + Config interface { + fosite.ScopeStrategyProvider + fosite.AudienceStrategyProvider + fosite.AccessTokenLifespanProvider + fosite.RefreshTokenLifespanProvider + } +} + +// IntrospectTokenEndpointRequest implements https://tools.ietf.org/html/rfc6749#section-4.4.2 +func (c *ClientCredentialsGrantHandler) HandleTokenEndpointRequest(ctx context.Context, request fosite.AccessRequester) error { + if !c.CanHandleTokenEndpointRequest(ctx, request) { + return errorsx.WithStack(fosite.ErrUnknownRequest) + } + + client := request.GetClient() + for _, scope := range request.GetRequestedScopes() { + if !c.Config.GetScopeStrategy(ctx)(client.GetScopes(), scope) { + return errorsx.WithStack(fosite.ErrInvalidScope.WithHintf("The OAuth 2.0 Client is not allowed to request scope '%s'.", scope)) + } + } + + if err := c.Config.GetAudienceStrategy(ctx)(client.GetAudience(), request.GetRequestedAudience()); err != nil { + return err + } + + // The client MUST authenticate with the authorization server as described in Section 3.2.1. + // This requirement is already fulfilled because fosite requires all token requests to be authenticated as described + // in https://tools.ietf.org/html/rfc6749#section-3.2.1 + if client.IsPublic() { + return errorsx.WithStack(fosite.ErrInvalidGrant.WithHint("The OAuth 2.0 Client is marked as public and is thus not allowed to use authorization grant 'client_credentials'.")) + } + // if the client is not public, he has already been authenticated by the access request handler. + + atLifespan := fosite.GetEffectiveLifespan(client, fosite.GrantTypeClientCredentials, fosite.AccessToken, c.Config.GetAccessTokenLifespan(ctx)) + request.GetSession().SetExpiresAt(fosite.AccessToken, time.Now().UTC().Add(atLifespan)) + return nil +} + +// PopulateTokenEndpointResponse implements https://tools.ietf.org/html/rfc6749#section-4.4.3 +func (c *ClientCredentialsGrantHandler) PopulateTokenEndpointResponse(ctx context.Context, request fosite.AccessRequester, response fosite.AccessResponder) error { + if !c.CanHandleTokenEndpointRequest(ctx, request) { + return errorsx.WithStack(fosite.ErrUnknownRequest) + } + + if !request.GetClient().GetGrantTypes().Has("client_credentials") { + return errorsx.WithStack(fosite.ErrUnauthorizedClient.WithHint("The OAuth 2.0 Client is not allowed to use authorization grant 'client_credentials'.")) + } + + atLifespan := fosite.GetEffectiveLifespan(request.GetClient(), fosite.GrantTypeClientCredentials, fosite.AccessToken, c.Config.GetAccessTokenLifespan(ctx)) + _, err := c.IssueAccessToken(ctx, atLifespan, request, response) + return err +} + +func (c *ClientCredentialsGrantHandler) IssueAccessToken(ctx context.Context, atLifespan time.Duration, requester fosite.AccessRequester, responder fosite.AccessResponder) (signature string, err error) { + token, signature, err := c.Strategy.AccessTokenStrategy().GenerateAccessToken(ctx, requester) + if err != nil { + return "", err + } else if err := c.Storage.AccessTokenStorage().CreateAccessTokenSession(ctx, signature, requester.Sanitize([]string{})); err != nil { + return "", err + } + + if !requester.GetSession().GetExpiresAt(fosite.AccessToken).IsZero() { + atLifespan = time.Duration(requester.GetSession().GetExpiresAt(fosite.AccessToken).UnixNano() - time.Now().UTC().UnixNano()) + } + + responder.SetAccessToken(token) + responder.SetTokenType("bearer") + responder.SetExpiresIn(atLifespan) + responder.SetScopes(requester.GetGrantedScopes()) + + return signature, nil +} + +func (c *ClientCredentialsGrantHandler) CanSkipClientAuth(ctx context.Context, requester fosite.AccessRequester) bool { + return false +} + +func (c *ClientCredentialsGrantHandler) CanHandleTokenEndpointRequest(ctx context.Context, requester fosite.AccessRequester) bool { + // grant_type REQUIRED. + // Value MUST be set to "client_credentials". + return requester.GetGrantTypes().ExactOne("client_credentials") +} diff --git a/fosite/handler/oauth2/flow_client_credentials_test.go b/fosite/handler/oauth2/flow_client_credentials_test.go new file mode 100644 index 00000000000..e0863a92bf8 --- /dev/null +++ b/fosite/handler/oauth2/flow_client_credentials_test.go @@ -0,0 +1,163 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2_test + +import ( + "context" + "fmt" + "net/http" + "testing" + "time" + + "github.com/stretchr/testify/require" + gomock "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/internal" +) + +func TestClientCredentials_HandleTokenEndpointRequest(t *testing.T) { + ctrl := gomock.NewController(t) + provider := internal.NewMockAccessTokenStorageProvider(ctrl) + chgenp := internal.NewMockAccessTokenStrategyProvider(ctrl) + areq := internal.NewMockAccessRequester(ctrl) + t.Cleanup(ctrl.Finish) + + h := oauth2.ClientCredentialsGrantHandler{ + Storage: provider, + Strategy: chgenp, + Config: &fosite.Config{ + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + AccessTokenLifespan: time.Hour, + }, + } + for k, c := range []struct { + description string + mock func() + req *http.Request + expectErr error + }{ + { + description: "should fail because not responsible", + expectErr: fosite.ErrUnknownRequest, + mock: func() { + areq.EXPECT().GetGrantTypes().Return(fosite.Arguments{""}) + }, + }, + { + description: "should fail because audience not valid", + expectErr: fosite.ErrInvalidRequest, + mock: func() { + areq.EXPECT().GetGrantTypes().Return(fosite.Arguments{"client_credentials"}) + areq.EXPECT().GetRequestedScopes().Return([]string{}) + areq.EXPECT().GetRequestedAudience().Return([]string{"https://www.ory.sh/not-api"}) + areq.EXPECT().GetClient().Return(&fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"client_credentials"}, + Audience: []string{"https://www.ory.sh/api"}, + }) + }, + }, + { + description: "should fail because scope not valid", + expectErr: fosite.ErrInvalidScope, + mock: func() { + areq.EXPECT().GetGrantTypes().Return(fosite.Arguments{"client_credentials"}) + areq.EXPECT().GetRequestedScopes().Return([]string{"foo", "bar", "baz.bar"}) + areq.EXPECT().GetClient().Return(&fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"client_credentials"}, + Scopes: []string{"foo"}, + }) + }, + }, + { + description: "should pass", + mock: func() { + areq.EXPECT().GetSession().Return(new(fosite.DefaultSession)) + areq.EXPECT().GetGrantTypes().Return(fosite.Arguments{"client_credentials"}) + areq.EXPECT().GetRequestedScopes().Return([]string{"foo", "bar", "baz.bar"}) + areq.EXPECT().GetRequestedAudience().Return([]string{}) + areq.EXPECT().GetClient().Return(&fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"client_credentials"}, + Scopes: []string{"foo", "bar", "baz"}, + }) + }, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + c.mock() + err := h.HandleTokenEndpointRequest(context.Background(), areq) + if c.expectErr != nil { + require.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + } + }) + } +} + +func TestClientCredentials_PopulateTokenEndpointResponse(t *testing.T) { + ctrl := gomock.NewController(t) + store := internal.NewMockClientCredentialsGrantStorage(ctrl) + provider := internal.NewMockAccessTokenStorageProvider(ctrl) + chgen := internal.NewMockAccessTokenStrategy(ctrl) + chgenp := internal.NewMockAccessTokenStrategyProvider(ctrl) + areq := fosite.NewAccessRequest(new(fosite.DefaultSession)) + aresp := fosite.NewAccessResponse() + t.Cleanup(ctrl.Finish) + + h := oauth2.ClientCredentialsGrantHandler{ + Storage: provider, + Strategy: chgenp, + Config: &fosite.Config{ + ScopeStrategy: fosite.HierarchicScopeStrategy, + AccessTokenLifespan: time.Hour, + }, + } + for k, c := range []struct { + description string + mock func() + req *http.Request + expectErr error + }{ + { + description: "should fail because not responsible", + expectErr: fosite.ErrUnknownRequest, + mock: func() { + areq.GrantTypes = fosite.Arguments{""} + }, + }, + { + description: "should fail because grant_type not allowed", + expectErr: fosite.ErrUnauthorizedClient, + mock: func() { + areq.GrantTypes = fosite.Arguments{"client_credentials"} + areq.Client = &fosite.DefaultClient{GrantTypes: fosite.Arguments{"authorization_code"}} + }, + }, + { + description: "should pass", + mock: func() { + areq.GrantTypes = fosite.Arguments{"client_credentials"} + areq.Session = &fosite.DefaultSession{} + areq.Client = &fosite.DefaultClient{GrantTypes: fosite.Arguments{"client_credentials"}} + chgenp.EXPECT().AccessTokenStrategy().Return(chgen).Times(1) + chgen.EXPECT().GenerateAccessToken(gomock.Any(), areq).Return("tokenfoo.bar", "bar", nil) + provider.EXPECT().AccessTokenStorage().Return(store).Times(1) + store.EXPECT().CreateAccessTokenSession(gomock.Any(), "bar", gomock.Eq(areq.Sanitize([]string{}))).Return(nil) + }, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + c.mock() + err := h.PopulateTokenEndpointResponse(context.Background(), areq, aresp) + if c.expectErr != nil { + require.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + } + }) + } +} diff --git a/fosite/handler/oauth2/flow_refresh.go b/fosite/handler/oauth2/flow_refresh.go new file mode 100644 index 00000000000..95b8bc091f8 --- /dev/null +++ b/fosite/handler/oauth2/flow_refresh.go @@ -0,0 +1,247 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/ory/x/errorsx" + + "github.com/pkg/errors" + + "github.com/ory/hydra/v2/fosite" +) + +var _ fosite.TokenEndpointHandler = (*RefreshTokenGrantHandler)(nil) + +type RefreshTokenGrantHandler struct { + Strategy interface { + AccessTokenStrategyProvider + RefreshTokenStrategyProvider + } + Storage interface { + TokenRevocationStorageProvider + AccessTokenStorageProvider + RefreshTokenStorageProvider + } + Config interface { + fosite.AccessTokenLifespanProvider + fosite.RefreshTokenLifespanProvider + fosite.ScopeStrategyProvider + fosite.AudienceStrategyProvider + fosite.RefreshTokenScopesProvider + } +} + +// HandleTokenEndpointRequest implements https://tools.ietf.org/html/rfc6749#section-6 +func (c *RefreshTokenGrantHandler) HandleTokenEndpointRequest(ctx context.Context, request fosite.AccessRequester) error { + if !c.CanHandleTokenEndpointRequest(ctx, request) { + return errorsx.WithStack(fosite.ErrUnknownRequest) + } + + if !request.GetClient().GetGrantTypes().Has("refresh_token") { + return errorsx.WithStack(fosite.ErrUnauthorizedClient.WithHint("The OAuth 2.0 Client is not allowed to use authorization grant 'refresh_token'.")) + } + + refresh := request.GetRequestForm().Get("refresh_token") + signature := c.Strategy.RefreshTokenStrategy().RefreshTokenSignature(ctx, refresh) + originalRequest, err := c.Storage.RefreshTokenStorage().GetRefreshTokenSession(ctx, signature, request.GetSession()) + if errors.Is(err, fosite.ErrInactiveToken) { + // Detected refresh token reuse + if rErr := c.handleRefreshTokenReuse(ctx, signature, originalRequest); rErr != nil { + return errorsx.WithStack(rErr) + } + + return fosite.ErrInvalidGrant.WithWrap(err). + WithHint("The refresh token was already used."). + WithDebugf("Refresh token re-use was detected. All related tokens have been revoked.") + } else if errors.Is(err, fosite.ErrNotFound) { + return fosite.ErrInvalidGrant.WithWrap(err). + WithHint("The refresh token is malformed or not valid."). + WithDebug("The refresh token can not be found.") + } else if err != nil { + return fosite.ErrServerError.WithWrap(err).WithDebug(err.Error()) + } + + if err := c.Strategy.RefreshTokenStrategy().ValidateRefreshToken(ctx, originalRequest, refresh); err != nil { + // The authorization server MUST ... validate the refresh token. + // This needs to happen after store retrieval for the session to be hydrated properly + if errors.Is(err, fosite.ErrTokenExpired) { + return fosite.ErrInvalidGrant.WithWrap(err). + WithHint("The refresh token expired.") + } + return fosite.ErrInvalidRequest.WithWrap(err).WithDebug(err.Error()) + } + + if !(len(c.Config.GetRefreshTokenScopes(ctx)) == 0 || originalRequest.GetGrantedScopes().HasOneOf(c.Config.GetRefreshTokenScopes(ctx)...)) { + scopeNames := strings.Join(c.Config.GetRefreshTokenScopes(ctx), " or ") + hint := fmt.Sprintf("The OAuth 2.0 Client was not granted scope %s and may thus not perform the 'refresh_token' authorization grant.", scopeNames) + return errorsx.WithStack(fosite.ErrScopeNotGranted.WithHint(hint)) + } + + // The authorization server MUST ... and ensure that the refresh token was issued to the authenticated client + if originalRequest.GetClient().GetID() != request.GetClient().GetID() { + return errorsx.WithStack(fosite.ErrInvalidGrant.WithHint("The OAuth 2.0 Client ID from this request does not match the ID during the initial token issuance.")) + } + + request.SetID(originalRequest.GetID()) + request.SetSession(originalRequest.GetSession().Clone()) + request.SetRequestedScopes(originalRequest.GetRequestedScopes()) + request.SetRequestedAudience(originalRequest.GetRequestedAudience()) + + for _, scope := range originalRequest.GetGrantedScopes() { + if !c.Config.GetScopeStrategy(ctx)(request.GetClient().GetScopes(), scope) { + return errorsx.WithStack(fosite.ErrInvalidScope.WithHintf("The OAuth 2.0 Client is not allowed to request scope '%s'.", scope)) + } + request.GrantScope(scope) + } + + if err := c.Config.GetAudienceStrategy(ctx)(request.GetClient().GetAudience(), originalRequest.GetGrantedAudience()); err != nil { + return err + } + + for _, audience := range originalRequest.GetGrantedAudience() { + request.GrantAudience(audience) + } + + atLifespan := fosite.GetEffectiveLifespan(request.GetClient(), fosite.GrantTypeRefreshToken, fosite.AccessToken, c.Config.GetAccessTokenLifespan(ctx)) + request.GetSession().SetExpiresAt(fosite.AccessToken, time.Now().UTC().Add(atLifespan).Round(time.Second)) + + rtLifespan := fosite.GetEffectiveLifespan(request.GetClient(), fosite.GrantTypeRefreshToken, fosite.RefreshToken, c.Config.GetRefreshTokenLifespan(ctx)) + if rtLifespan > -1 { + request.GetSession().SetExpiresAt(fosite.RefreshToken, time.Now().UTC().Add(rtLifespan).Round(time.Second)) + } + + return nil +} + +// PopulateTokenEndpointResponse implements https://tools.ietf.org/html/rfc6749#section-6 +func (c *RefreshTokenGrantHandler) PopulateTokenEndpointResponse(ctx context.Context, requester fosite.AccessRequester, responder fosite.AccessResponder) (err error) { + if !c.CanHandleTokenEndpointRequest(ctx, requester) { + return errorsx.WithStack(fosite.ErrUnknownRequest) + } + + accessToken, accessSignature, err := c.Strategy.AccessTokenStrategy().GenerateAccessToken(ctx, requester) + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + refreshToken, refreshSignature, err := c.Strategy.RefreshTokenStrategy().GenerateRefreshToken(ctx, requester) + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + signature := c.Strategy.RefreshTokenStrategy().RefreshTokenSignature(ctx, requester.GetRequestForm().Get("refresh_token")) + + ctx, err = fosite.MaybeBeginTx(ctx, c.Storage) + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + storeReq := requester.Sanitize([]string{}) + storeReq.SetID(requester.GetID()) + + if err = c.Storage.RefreshTokenStorage().RotateRefreshToken(ctx, requester.GetID(), signature); err != nil { + return c.handleRefreshTokenEndpointStorageError(ctx, err) + } + + if err = c.Storage.AccessTokenStorage().CreateAccessTokenSession(ctx, accessSignature, storeReq); err != nil { + return c.handleRefreshTokenEndpointStorageError(ctx, err) + } + + if err = c.Storage.RefreshTokenStorage().CreateRefreshTokenSession(ctx, refreshSignature, accessSignature, storeReq); err != nil { + return c.handleRefreshTokenEndpointStorageError(ctx, err) + } + + responder.SetAccessToken(accessToken) + responder.SetTokenType("bearer") + atLifespan := fosite.GetEffectiveLifespan(requester.GetClient(), fosite.GrantTypeRefreshToken, fosite.AccessToken, c.Config.GetAccessTokenLifespan(ctx)) + responder.SetExpiresIn(getExpiresIn(requester, fosite.AccessToken, atLifespan, time.Now().UTC())) + responder.SetScopes(requester.GetGrantedScopes()) + responder.SetExtra("refresh_token", refreshToken) + + if err = fosite.MaybeCommitTx(ctx, c.Storage); err != nil { + return c.handleRefreshTokenEndpointStorageError(ctx, err) + } + + return nil +} + +// Reference: https://tools.ietf.org/html/rfc6819#section-5.2.2.3 +// +// The basic idea is to change the refresh token +// value with every refresh request in order to detect attempts to +// obtain access tokens using old refresh tokens. Since the +// authorization server cannot determine whether the attacker or the +// legitimate client is trying to access, in case of such an access +// attempt the valid refresh token and the access authorization +// associated with it are both revoked. +func (c *RefreshTokenGrantHandler) handleRefreshTokenReuse(ctx context.Context, signature string, req fosite.Requester) (err error) { + ctx, err = fosite.MaybeBeginTx(ctx, c.Storage) + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + defer func() { + err = c.handleRefreshTokenEndpointStorageError(ctx, err) + }() + + if err = c.Storage.RefreshTokenStorage().DeleteRefreshTokenSession(ctx, signature); err != nil { + return err + } else if err = c.Storage.TokenRevocationStorage().RevokeRefreshToken( + ctx, req.GetID(), + ); err != nil && !errors.Is(err, fosite.ErrNotFound) { + return err + } else if err = c.Storage.TokenRevocationStorage().RevokeAccessToken( + ctx, req.GetID(), + ); err != nil && !errors.Is(err, fosite.ErrNotFound) { + return err + } + + if err = fosite.MaybeCommitTx(ctx, c.Storage); err != nil { + return err + } + + return nil +} + +func (c *RefreshTokenGrantHandler) handleRefreshTokenEndpointStorageError(ctx context.Context, storageErr error) (err error) { + if storageErr == nil { + return nil + } + + defer func() { + if rollBackTxnErr := fosite.MaybeRollbackTx(ctx, c.Storage); rollBackTxnErr != nil { + err = errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebugf("error: %s; rollback error: %s", err, rollBackTxnErr)) + } + }() + + if errors.Is(storageErr, fosite.ErrSerializationFailure) { + return errorsx.WithStack(fosite.ErrInvalidRequest. + WithDebug(storageErr.Error()). + WithWrap(storageErr). + WithHint("Failed to refresh token because of multiple concurrent requests using the same token. Please retry the request.")) + } + + if errors.Is(storageErr, fosite.ErrNotFound) || errors.Is(storageErr, fosite.ErrInactiveToken) { + return errorsx.WithStack(fosite.ErrInvalidRequest. + WithDebug(storageErr.Error()). + WithWrap(storageErr). + WithHint("Failed to refresh token. Please retry the request.")) + } + + return errorsx.WithStack(fosite.ErrServerError.WithWrap(storageErr).WithDebug(storageErr.Error())) +} + +func (c *RefreshTokenGrantHandler) CanSkipClientAuth(ctx context.Context, requester fosite.AccessRequester) bool { + return false +} + +func (c *RefreshTokenGrantHandler) CanHandleTokenEndpointRequest(ctx context.Context, requester fosite.AccessRequester) bool { + // grant_type REQUIRED. + // Value MUST be set to "refresh_token". + return requester.GetGrantTypes().ExactOne("refresh_token") +} diff --git a/fosite/handler/oauth2/flow_refresh_test.go b/fosite/handler/oauth2/flow_refresh_test.go new file mode 100644 index 00000000000..7972aae72f1 --- /dev/null +++ b/fosite/handler/oauth2/flow_refresh_test.go @@ -0,0 +1,1060 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2_test + +import ( + "context" + "fmt" + "net/url" + "testing" + "time" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/internal" + "github.com/ory/hydra/v2/fosite/storage" +) + +func TestRefreshFlow_HandleTokenEndpointRequest(t *testing.T) { + var areq *fosite.AccessRequest + sess := &fosite.DefaultSession{Subject: "othersub"} + expiredSess := &fosite.DefaultSession{ + ExpiresAt: map[fosite.TokenType]time.Time{ + fosite.RefreshToken: time.Now().UTC().Add(-time.Hour), + }, + } + + for k, strategy := range map[string]oauth2.CoreStrategy{ + "hmac": hmacshaStrategy, + } { + t.Run("strategy="+k, func(t *testing.T) { + store := storage.NewMemoryStore() + var handler *oauth2.RefreshTokenGrantHandler + for _, c := range []struct { + description string + setup func(config *fosite.Config) + expectErr error + expect func(t *testing.T) + }{ + { + description: "should fail because not responsible", + expectErr: fosite.ErrUnknownRequest, + setup: func(config *fosite.Config) { + areq.GrantTypes = fosite.Arguments{"123"} + }, + }, + { + description: "should fail because token invalid", + setup: func(config *fosite.Config) { + areq.GrantTypes = fosite.Arguments{"refresh_token"} + areq.Client = &fosite.DefaultClient{GrantTypes: fosite.Arguments{"refresh_token"}} + + areq.Form.Add("refresh_token", "some.refreshtokensig") + }, + expectErr: fosite.ErrInvalidGrant, + }, + { + description: "should fail because token is valid but does not exist", + setup: func(config *fosite.Config) { + areq.GrantTypes = fosite.Arguments{"refresh_token"} + areq.Client = &fosite.DefaultClient{GrantTypes: fosite.Arguments{"refresh_token"}} + + token, _, err := strategy.GenerateRefreshToken(context.Background(), nil) + require.NoError(t, err) + areq.Form.Add("refresh_token", token) + }, + expectErr: fosite.ErrInvalidGrant, + }, + { + description: "should fail because client mismatches", + setup: func(config *fosite.Config) { + areq.GrantTypes = fosite.Arguments{"refresh_token"} + areq.Client = &fosite.DefaultClient{ + ID: "foo", + GrantTypes: fosite.Arguments{"refresh_token"}, + } + + token, sig, err := strategy.GenerateRefreshToken(context.Background(), nil) + require.NoError(t, err) + + areq.Form.Add("refresh_token", token) + err = store.CreateRefreshTokenSession(context.Background(), sig, "", &fosite.Request{ + Client: &fosite.DefaultClient{ID: ""}, + GrantedScope: []string{"offline"}, + Session: sess, + }) + require.NoError(t, err) + }, + expectErr: fosite.ErrInvalidGrant, + }, + { + description: "should fail because token is expired", + setup: func(config *fosite.Config) { + areq.GrantTypes = fosite.Arguments{"refresh_token"} + areq.Client = &fosite.DefaultClient{ + ID: "foo", + GrantTypes: fosite.Arguments{"refresh_token"}, + Scopes: []string{"foo", "bar", "offline"}, + } + + token, sig, err := strategy.GenerateRefreshToken(context.Background(), nil) + require.NoError(t, err) + + areq.Form.Add("refresh_token", token) + err = store.CreateRefreshTokenSession(context.Background(), sig, "", &fosite.Request{ + Client: areq.Client, + GrantedScope: fosite.Arguments{"foo", "offline"}, + RequestedScope: fosite.Arguments{"foo", "bar", "offline"}, + Session: expiredSess, + Form: url.Values{"foo": []string{"bar"}}, + RequestedAt: time.Now().UTC().Add(-time.Hour).Round(time.Hour), + }) + require.NoError(t, err) + }, + expectErr: fosite.ErrInvalidGrant, + }, + { + description: "should fail because offline scope has been granted but client no longer allowed to request it", + setup: func(config *fosite.Config) { + areq.GrantTypes = fosite.Arguments{"refresh_token"} + areq.Client = &fosite.DefaultClient{ + ID: "foo", + GrantTypes: fosite.Arguments{"refresh_token"}, + } + + token, sig, err := strategy.GenerateRefreshToken(context.Background(), nil) + require.NoError(t, err) + + areq.Form.Add("refresh_token", token) + err = store.CreateRefreshTokenSession(context.Background(), sig, "", &fosite.Request{ + Client: areq.Client, + GrantedScope: fosite.Arguments{"foo", "offline"}, + RequestedScope: fosite.Arguments{"foo", "offline"}, + Session: sess, + Form: url.Values{"foo": []string{"bar"}}, + RequestedAt: time.Now().UTC().Add(-time.Hour).Round(time.Hour), + }) + require.NoError(t, err) + }, + expectErr: fosite.ErrInvalidScope, + }, + { + description: "should pass", + setup: func(config *fosite.Config) { + areq.GrantTypes = fosite.Arguments{"refresh_token"} + areq.Client = &fosite.DefaultClient{ + ID: "foo", + GrantTypes: fosite.Arguments{"refresh_token"}, + Scopes: []string{"foo", "bar", "offline"}, + } + + token, sig, err := strategy.GenerateRefreshToken(context.Background(), nil) + require.NoError(t, err) + + areq.Form.Add("refresh_token", token) + + orReqID := areq.GetID() + "_OR" + areq.Form.Add("or_request_id", orReqID) + err = store.CreateRefreshTokenSession(context.Background(), sig, "", &fosite.Request{ + ID: orReqID, + Client: areq.Client, + GrantedScope: fosite.Arguments{"foo", "offline"}, + RequestedScope: fosite.Arguments{"foo", "bar", "offline"}, + Session: sess, + Form: url.Values{"foo": []string{"bar"}}, + RequestedAt: time.Now().UTC().Add(-time.Hour).Round(time.Hour), + }) + require.NoError(t, err) + }, + expect: func(t *testing.T) { + assert.NotEqual(t, sess, areq.Session) + assert.NotEqual(t, time.Now().UTC().Add(-time.Hour).Round(time.Hour), areq.RequestedAt) + assert.Equal(t, fosite.Arguments{"foo", "offline"}, areq.GrantedScope) + assert.Equal(t, fosite.Arguments{"foo", "bar", "offline"}, areq.RequestedScope) + assert.NotEqual(t, url.Values{"foo": []string{"bar"}}, areq.Form) + assert.Equal(t, time.Now().Add(time.Hour).UTC().Round(time.Second), areq.GetSession().GetExpiresAt(fosite.AccessToken)) + assert.Equal(t, time.Now().Add(time.Hour).UTC().Round(time.Second), areq.GetSession().GetExpiresAt(fosite.RefreshToken)) + assert.EqualValues(t, areq.Form.Get("or_request_id"), areq.GetID(), "Requester ID should be replaced based on the refresh token session") + }, + }, + { + description: "should pass with custom client lifespans", + setup: func(config *fosite.Config) { + areq.GrantTypes = fosite.Arguments{"refresh_token"} + areq.Client = &fosite.DefaultClientWithCustomTokenLifespans{ + DefaultClient: &fosite.DefaultClient{ + ID: "foo", + GrantTypes: fosite.Arguments{"refresh_token"}, + Scopes: []string{"foo", "bar", "offline"}, + }, + } + + areq.Client.(*fosite.DefaultClientWithCustomTokenLifespans).SetTokenLifespans(&internal.TestLifespans) + + token, sig, err := strategy.GenerateRefreshToken(context.Background(), nil) + require.NoError(t, err) + + areq.Form.Add("refresh_token", token) + err = store.CreateRefreshTokenSession(context.Background(), sig, "", &fosite.Request{ + Client: areq.Client, + GrantedScope: fosite.Arguments{"foo", "offline"}, + RequestedScope: fosite.Arguments{"foo", "bar", "offline"}, + Session: sess, + Form: url.Values{"foo": []string{"bar"}}, + RequestedAt: time.Now().UTC().Add(-time.Hour).Round(time.Hour), + }) + require.NoError(t, err) + }, + expect: func(t *testing.T) { + assert.NotEqual(t, sess, areq.Session) + assert.NotEqual(t, time.Now().UTC().Add(-time.Hour).Round(time.Hour), areq.RequestedAt) + assert.Equal(t, fosite.Arguments{"foo", "offline"}, areq.GrantedScope) + assert.Equal(t, fosite.Arguments{"foo", "bar", "offline"}, areq.RequestedScope) + assert.NotEqual(t, url.Values{"foo": []string{"bar"}}, areq.Form) + internal.RequireEqualTime(t, time.Now().Add(*internal.TestLifespans.RefreshTokenGrantAccessTokenLifespan).UTC(), areq.GetSession().GetExpiresAt(fosite.AccessToken), time.Minute) + internal.RequireEqualTime(t, time.Now().Add(*internal.TestLifespans.RefreshTokenGrantRefreshTokenLifespan).UTC(), areq.GetSession().GetExpiresAt(fosite.RefreshToken), time.Minute) + }, + }, + { + description: "should fail without offline scope", + setup: func(config *fosite.Config) { + areq.GrantTypes = fosite.Arguments{"refresh_token"} + areq.Client = &fosite.DefaultClient{ + ID: "foo", + GrantTypes: fosite.Arguments{"refresh_token"}, + Scopes: []string{"foo", "bar"}, + } + + token, sig, err := strategy.GenerateRefreshToken(context.Background(), nil) + require.NoError(t, err) + + areq.Form.Add("refresh_token", token) + err = store.CreateRefreshTokenSession(context.Background(), sig, "", &fosite.Request{ + Client: areq.Client, + GrantedScope: fosite.Arguments{"foo"}, + RequestedScope: fosite.Arguments{"foo", "bar"}, + Session: sess, + Form: url.Values{"foo": []string{"bar"}}, + RequestedAt: time.Now().UTC().Add(-time.Hour).Round(time.Hour), + }) + require.NoError(t, err) + }, + expectErr: fosite.ErrScopeNotGranted, + }, + { + description: "should pass without offline scope when configured to allow refresh tokens", + setup: func(config *fosite.Config) { + config.RefreshTokenScopes = []string{} + areq.GrantTypes = fosite.Arguments{"refresh_token"} + areq.Client = &fosite.DefaultClient{ + ID: "foo", + GrantTypes: fosite.Arguments{"refresh_token"}, + Scopes: []string{"foo", "bar"}, + } + + token, sig, err := strategy.GenerateRefreshToken(context.Background(), nil) + require.NoError(t, err) + + areq.Form.Add("refresh_token", token) + err = store.CreateRefreshTokenSession(context.Background(), sig, "", &fosite.Request{ + Client: areq.Client, + GrantedScope: fosite.Arguments{"foo"}, + RequestedScope: fosite.Arguments{"foo", "bar"}, + Session: sess, + Form: url.Values{"foo": []string{"bar"}}, + RequestedAt: time.Now().UTC().Add(-time.Hour).Round(time.Hour), + }) + require.NoError(t, err) + }, + expect: func(t *testing.T) { + assert.NotEqual(t, sess, areq.Session) + assert.NotEqual(t, time.Now().UTC().Add(-time.Hour).Round(time.Hour), areq.RequestedAt) + assert.Equal(t, fosite.Arguments{"foo"}, areq.GrantedScope) + assert.Equal(t, fosite.Arguments{"foo", "bar"}, areq.RequestedScope) + assert.NotEqual(t, url.Values{"foo": []string{"bar"}}, areq.Form) + assert.Equal(t, time.Now().Add(time.Hour).UTC().Round(time.Second), areq.GetSession().GetExpiresAt(fosite.AccessToken)) + assert.Equal(t, time.Now().Add(time.Hour).UTC().Round(time.Second), areq.GetSession().GetExpiresAt(fosite.RefreshToken)) + }, + }, + { + description: "should deny access on token reuse", + setup: func(config *fosite.Config) { + areq.GrantTypes = fosite.Arguments{"refresh_token"} + areq.Client = &fosite.DefaultClient{ + ID: "foo", + GrantTypes: fosite.Arguments{"refresh_token"}, + Scopes: []string{"foo", "bar", "offline"}, + } + + token, sig, err := strategy.GenerateRefreshToken(context.Background(), nil) + require.NoError(t, err) + + areq.Form.Add("refresh_token", token) + req := &fosite.Request{ + Client: areq.Client, + GrantedScope: fosite.Arguments{"foo", "offline"}, + RequestedScope: fosite.Arguments{"foo", "bar", "offline"}, + Session: sess, + Form: url.Values{"foo": []string{"bar"}}, + RequestedAt: time.Now().UTC().Add(-time.Hour).Round(time.Hour), + } + err = store.CreateRefreshTokenSession(context.Background(), sig, "", req) + require.NoError(t, err) + + err = store.RevokeRefreshToken(context.Background(), req.ID) + require.NoError(t, err) + }, + expectErr: fosite.ErrInvalidGrant, + }, + } { + t.Run("case="+c.description, func(t *testing.T) { + config := &fosite.Config{ + AccessTokenLifespan: time.Hour, + RefreshTokenLifespan: time.Hour, + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + RefreshTokenScopes: []string{"offline"}, + } + handler = &oauth2.RefreshTokenGrantHandler{ + Storage: store, + Strategy: &compose.CommonStrategyProvider{CoreStrategy: strategy}, + Config: config, + } + + areq = fosite.NewAccessRequest(&fosite.DefaultSession{}) + areq.Form = url.Values{} + c.setup(config) + + err := handler.HandleTokenEndpointRequest(context.Background(), areq) + if c.expectErr != nil { + require.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + } + + if c.expect != nil { + c.expect(t) + } + }) + } + }) + } +} + +func TestRefreshFlowTransactional_HandleTokenEndpointRequest(t *testing.T) { + var ( + mockTransactional *internal.MockTransactional + mockTokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider + mockTokenRevocationStorage *internal.MockTokenRevocationStorage + mockAccessTokenStorageProvider *internal.MockAccessTokenStorageProvider + mockRefreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider + mockRefreshTokenStorage *internal.MockRefreshTokenStorage + ) + + request := fosite.NewAccessRequest(&fosite.DefaultSession{}) + propagatedContext := context.Background() + + for _, testCase := range []struct { + description string + setup func() + expectError error + }{ + { + description: "should revoke session on token reuse", + setup: func() { + request.GrantTypes = fosite.Arguments{"refresh_token"} + request.Client = &fosite.DefaultClient{ + ID: "foo", + GrantTypes: fosite.Arguments{"refresh_token"}, + } + mockRefreshTokenStorageProvider. + EXPECT(). + RefreshTokenStorage(). + Return(mockRefreshTokenStorage). + Times(2) + mockRefreshTokenStorage. + EXPECT(). + GetRefreshTokenSession(propagatedContext, gomock.Any(), gomock.Any()). + Return(request, fosite.ErrInactiveToken). + Times(1) + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil). + Times(1) + mockRefreshTokenStorage. + EXPECT(). + DeleteRefreshTokenSession(propagatedContext, gomock.Any()). + Return(nil). + Times(1) + mockTokenRevocationStorageProvider. + EXPECT(). + TokenRevocationStorage(). + Return(mockTokenRevocationStorage). + Times(2) + mockTokenRevocationStorage. + EXPECT(). + RevokeRefreshToken(propagatedContext, gomock.Any()). + Return(nil). + Times(1) + mockTokenRevocationStorage. + EXPECT(). + RevokeAccessToken(propagatedContext, gomock.Any()). + Return(nil). + Times(1) + mockTransactional. + EXPECT(). + Commit(propagatedContext). + Return(nil). + Times(1) + }, + expectError: fosite.ErrInvalidGrant, + }, + } { + t.Run(fmt.Sprintf("scenario=%s", testCase.description), func(t *testing.T) { + ctrl := gomock.NewController(t) + t.Cleanup(ctrl.Finish) + + mockTransactional = internal.NewMockTransactional(ctrl) + + mockTokenRevocationStorageProvider = internal.NewMockTokenRevocationStorageProvider(ctrl) + mockTokenRevocationStorage = internal.NewMockTokenRevocationStorage(ctrl) + + mockAccessTokenStorageProvider = internal.NewMockAccessTokenStorageProvider(ctrl) + + mockRefreshTokenStorageProvider = internal.NewMockRefreshTokenStorageProvider(ctrl) + mockRefreshTokenStorage = internal.NewMockRefreshTokenStorage(ctrl) + + // define concrete types + mockStorage := struct { + *internal.MockAccessTokenStorageProvider + *internal.MockRefreshTokenStorageProvider + *internal.MockTokenRevocationStorageProvider + *internal.MockTransactional + }{ + MockAccessTokenStorageProvider: mockAccessTokenStorageProvider, + MockRefreshTokenStorageProvider: mockRefreshTokenStorageProvider, + MockTokenRevocationStorageProvider: mockTokenRevocationStorageProvider, + MockTransactional: mockTransactional, + } + + handler := oauth2.RefreshTokenGrantHandler{ + Storage: mockStorage, + Strategy: &compose.CommonStrategyProvider{CoreStrategy: hmacshaStrategy}, + Config: &fosite.Config{ + AccessTokenLifespan: time.Hour, + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + }, + } + + testCase.setup() + + if err := handler.HandleTokenEndpointRequest(propagatedContext, request); testCase.expectError != nil { + assert.EqualError(t, err, testCase.expectError.Error()) + } + }) + } +} + +func TestRefreshFlow_PopulateTokenEndpointResponse(t *testing.T) { + var areq *fosite.AccessRequest + var aresp *fosite.AccessResponse + + for k, strategy := range map[string]oauth2.CoreStrategy{ + "hmac": hmacshaStrategy, + } { + t.Run("strategy="+k, func(t *testing.T) { + store := storage.NewMemoryStore() + + for _, c := range []struct { + description string + setup func(config *fosite.Config) + check func(t *testing.T) + expectErr error + }{ + { + description: "should fail because not responsible", + expectErr: fosite.ErrUnknownRequest, + setup: func(config *fosite.Config) { + areq.GrantTypes = fosite.Arguments{"313"} + }, + }, + { + description: "should pass", + setup: func(config *fosite.Config) { + areq.ID = "req-id" + areq.GrantTypes = fosite.Arguments{"refresh_token"} + areq.RequestedScope = fosite.Arguments{"foo", "bar"} + areq.GrantedScope = fosite.Arguments{"foo", "bar"} + + token, signature, err := strategy.GenerateRefreshToken(context.Background(), nil) + require.NoError(t, err) + require.NoError(t, store.CreateRefreshTokenSession(context.Background(), signature, "", areq)) + areq.Form.Add("refresh_token", token) + }, + check: func(t *testing.T) { + signature := strategy.RefreshTokenSignature(context.Background(), areq.Form.Get("refresh_token")) + + // The old refresh token should be deleted + _, err := store.GetRefreshTokenSession(context.Background(), signature, nil) + require.Error(t, err) + + assert.Equal(t, "req-id", areq.ID) + require.NoError(t, strategy.ValidateAccessToken(context.Background(), areq, aresp.GetAccessToken())) + require.NoError(t, strategy.ValidateRefreshToken(context.Background(), areq, aresp.ToMap()["refresh_token"].(string))) + assert.Equal(t, "bearer", aresp.GetTokenType()) + assert.NotEmpty(t, aresp.ToMap()["expires_in"]) + assert.Equal(t, "foo bar", aresp.ToMap()["scope"]) + }, + }, + } { + t.Run("case="+c.description, func(t *testing.T) { + config := &fosite.Config{ + AccessTokenLifespan: time.Hour, + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + } + h := oauth2.RefreshTokenGrantHandler{ + Storage: store, + Strategy: &compose.CommonStrategyProvider{CoreStrategy: strategy}, + Config: config, + } + areq = fosite.NewAccessRequest(&fosite.DefaultSession{}) + aresp = fosite.NewAccessResponse() + areq.Client = &fosite.DefaultClient{} + areq.Form = url.Values{} + + c.setup(config) + + err := h.PopulateTokenEndpointResponse(context.Background(), areq, aresp) + if c.expectErr != nil { + assert.EqualError(t, err, c.expectErr.Error()) + } else { + assert.NoError(t, err) + } + + if c.check != nil { + c.check(t) + } + }) + } + }) + } +} + +func TestRefreshFlowTransactional_PopulateTokenEndpointResponse(t *testing.T) { + var ( + mockTransactional *internal.MockTransactional + mockTokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider + mockAccessTokenStorageProvider *internal.MockAccessTokenStorageProvider + mockAccessTokenStorage *internal.MockAccessTokenStorage + mockRefreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider + mockRefreshTokenStorage *internal.MockRefreshTokenStorage + ) + + request := fosite.NewAccessRequest(&fosite.DefaultSession{}) + response := fosite.NewAccessResponse() + propagatedContext := context.Background() + + for _, testCase := range []struct { + description string + setup func() + expectError error + }{ + { + description: "transaction should be committed successfully if no errors occur", + setup: func() { + request.GrantTypes = fosite.Arguments{"refresh_token"} + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil). + Times(1) + mockRefreshTokenStorageProvider. + EXPECT(). + RefreshTokenStorage(). + Return(mockRefreshTokenStorage). + Times(2) + mockRefreshTokenStorage. + EXPECT(). + RotateRefreshToken(propagatedContext, gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + mockAccessTokenStorageProvider. + EXPECT(). + AccessTokenStorage(). + Return(mockAccessTokenStorage). + Times(1) + mockAccessTokenStorage. + EXPECT(). + CreateAccessTokenSession(propagatedContext, gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + mockRefreshTokenStorage. + EXPECT(). + CreateRefreshTokenSession(propagatedContext, gomock.Any(), gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + mockTransactional. + EXPECT(). + Commit(propagatedContext). + Return(nil). + Times(1) + }, + }, + { + description: "transaction should be rolled back if call to `RevokeAccessToken` results in an error", + setup: func() { + request.GrantTypes = fosite.Arguments{"refresh_token"} + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil). + Times(1) + mockRefreshTokenStorageProvider. + EXPECT(). + RefreshTokenStorage(). + Return(mockRefreshTokenStorage). + Times(1) + mockRefreshTokenStorage. + EXPECT(). + RotateRefreshToken(propagatedContext, gomock.Any(), gomock.Any()). + Return(errors.New("Whoops, a nasty database error occurred!")). + Times(1) + mockTransactional. + EXPECT(). + Rollback(propagatedContext). + Return(nil). + Times(1) + }, + expectError: fosite.ErrServerError, + }, + { + description: "should result in a fosite.ErrInvalidRequest if call to `RevokeAccessToken` results in a " + + "fosite.ErrSerializationFailure error", + setup: func() { + request.GrantTypes = fosite.Arguments{"refresh_token"} + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil). + Times(1) + mockRefreshTokenStorageProvider. + EXPECT(). + RefreshTokenStorage(). + Return(mockRefreshTokenStorage). + Times(1) + mockRefreshTokenStorage. + EXPECT(). + RotateRefreshToken(propagatedContext, gomock.Any(), gomock.Any()). + Return(fosite.ErrSerializationFailure). + Times(1) + mockTransactional. + EXPECT(). + Rollback(propagatedContext). + Return(nil). + Times(1) + }, + expectError: fosite.ErrInvalidRequest, + }, + { + description: "transaction should be rolled back if call to `RotateRefreshToken` results in an error", + setup: func() { + request.GrantTypes = fosite.Arguments{"refresh_token"} + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil). + Times(1) + mockRefreshTokenStorageProvider. + EXPECT(). + RefreshTokenStorage(). + Return(mockRefreshTokenStorage). + Times(1) + mockRefreshTokenStorage. + EXPECT(). + RotateRefreshToken(propagatedContext, gomock.Any(), gomock.Any()). + Return(errors.New("Whoops, a nasty database error occurred!")). + Times(1) + mockTransactional. + EXPECT(). + Rollback(propagatedContext). + Return(nil). + Times(1) + }, + expectError: fosite.ErrServerError, + }, + { + description: "should result in a fosite.ErrInvalidRequest if call to `RotateRefreshToken` results in a " + + "fosite.ErrSerializationFailure error", + setup: func() { + request.GrantTypes = fosite.Arguments{"refresh_token"} + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil). + Times(1) + mockRefreshTokenStorageProvider. + EXPECT(). + RefreshTokenStorage(). + Return(mockRefreshTokenStorage). + Times(1) + mockRefreshTokenStorage. + EXPECT(). + RotateRefreshToken(propagatedContext, gomock.Any(), gomock.Any()). + Return(fosite.ErrSerializationFailure). + Times(1) + mockTransactional. + EXPECT(). + Rollback(propagatedContext). + Return(nil). + Times(1) + }, + expectError: fosite.ErrInvalidRequest, + }, + { + description: "should result in a fosite.ErrInvalidRequest if call to `CreateAccessTokenSession` results in " + + "a fosite.ErrSerializationFailure error", + setup: func() { + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil). + Times(1) + mockRefreshTokenStorageProvider. + EXPECT(). + RefreshTokenStorage(). + Return(mockRefreshTokenStorage). + Times(1) + mockRefreshTokenStorage. + EXPECT(). + RotateRefreshToken(propagatedContext, gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + mockAccessTokenStorageProvider. + EXPECT(). + AccessTokenStorage(). + Return(mockAccessTokenStorage). + Times(1) + mockAccessTokenStorage. + EXPECT(). + CreateAccessTokenSession(propagatedContext, gomock.Any(), gomock.Any()). + Return(fosite.ErrSerializationFailure). + Times(1) + mockTransactional. + EXPECT(). + Rollback(propagatedContext). + Return(nil). + Times(1) + }, + expectError: fosite.ErrInvalidRequest, + }, + { + description: "transaction should be rolled back if call to `CreateAccessTokenSession` results in an error", + setup: func() { + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil). + Times(1) + mockRefreshTokenStorageProvider. + EXPECT(). + RefreshTokenStorage(). + Return(mockRefreshTokenStorage). + Times(1) + mockRefreshTokenStorage. + EXPECT(). + RotateRefreshToken(propagatedContext, gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + mockAccessTokenStorageProvider. + EXPECT(). + AccessTokenStorage(). + Return(mockAccessTokenStorage). + Times(1) + mockAccessTokenStorage. + EXPECT(). + CreateAccessTokenSession(propagatedContext, gomock.Any(), gomock.Any()). + Return(errors.New("Whoops, a nasty database error occurred!")). + Times(1) + mockTransactional. + EXPECT(). + Rollback(propagatedContext). + Return(nil). + Times(1) + }, + expectError: fosite.ErrServerError, + }, + { + description: "transaction should be rolled back if call to `CreateRefreshTokenSession` results in an error", + setup: func() { + request.GrantTypes = fosite.Arguments{"refresh_token"} + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil). + Times(1) + mockRefreshTokenStorageProvider. + EXPECT(). + RefreshTokenStorage(). + Return(mockRefreshTokenStorage). + Times(2) + mockRefreshTokenStorage. + EXPECT(). + RotateRefreshToken(propagatedContext, gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + mockAccessTokenStorageProvider. + EXPECT(). + AccessTokenStorage(). + Return(mockAccessTokenStorage). + Times(1) + mockAccessTokenStorage. + EXPECT(). + CreateAccessTokenSession(propagatedContext, gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + mockRefreshTokenStorage. + EXPECT(). + CreateRefreshTokenSession(propagatedContext, gomock.Any(), gomock.Any(), gomock.Any()). + Return(errors.New("Whoops, a nasty database error occurred!")). + Times(1) + mockTransactional. + EXPECT(). + Rollback(propagatedContext). + Return(nil). + Times(1) + }, + expectError: fosite.ErrServerError, + }, + { + description: "should result in a fosite.ErrInvalidRequest if call to `CreateRefreshTokenSession` results in " + + "a fosite.ErrSerializationFailure error", + setup: func() { + request.GrantTypes = fosite.Arguments{"refresh_token"} + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil). + Times(1) + mockRefreshTokenStorageProvider. + EXPECT(). + RefreshTokenStorage(). + Return(mockRefreshTokenStorage). + Times(2) + mockRefreshTokenStorage. + EXPECT(). + RotateRefreshToken(propagatedContext, gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + mockAccessTokenStorageProvider. + EXPECT(). + AccessTokenStorage(). + Return(mockAccessTokenStorage). + Times(1) + mockAccessTokenStorage. + EXPECT(). + CreateAccessTokenSession(propagatedContext, gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + mockRefreshTokenStorage. + EXPECT(). + CreateRefreshTokenSession(propagatedContext, gomock.Any(), gomock.Any(), gomock.Any()). + Return(fosite.ErrSerializationFailure). + Times(1) + mockTransactional. + EXPECT(). + Rollback(propagatedContext). + Return(nil). + Times(1) + }, + expectError: fosite.ErrInvalidRequest, + }, + { + description: "should result in a server error if transaction cannot be created", + setup: func() { + request.GrantTypes = fosite.Arguments{"refresh_token"} + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(nil, errors.New("Could not create transaction!")). + Times(1) + }, + expectError: fosite.ErrServerError, + }, + { + description: "should result in a server error if transaction cannot be rolled back", + setup: func() { + request.GrantTypes = fosite.Arguments{"refresh_token"} + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil). + Times(1) + mockRefreshTokenStorageProvider. + EXPECT(). + RefreshTokenStorage(). + Return(mockRefreshTokenStorage). + Times(1) + mockRefreshTokenStorage. + EXPECT(). + RotateRefreshToken(propagatedContext, gomock.Any(), gomock.Any()). + Return(fosite.ErrNotFound). + Times(1) + mockTransactional. + EXPECT(). + Rollback(propagatedContext). + Return(errors.New("Could not rollback transaction!")). + Times(1) + }, + expectError: fosite.ErrServerError, + }, + { + description: "should result in a server error if transaction cannot be committed", + setup: func() { + request.GrantTypes = fosite.Arguments{"refresh_token"} + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil). + Times(1) + mockRefreshTokenStorageProvider. + EXPECT(). + RefreshTokenStorage(). + Return(mockRefreshTokenStorage). + Times(2) + mockRefreshTokenStorage. + EXPECT(). + RotateRefreshToken(propagatedContext, gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + mockAccessTokenStorageProvider. + EXPECT(). + AccessTokenStorage(). + Return(mockAccessTokenStorage). + Times(1) + mockAccessTokenStorage. + EXPECT(). + CreateAccessTokenSession(propagatedContext, gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + mockRefreshTokenStorage. + EXPECT(). + CreateRefreshTokenSession(propagatedContext, gomock.Any(), gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + mockTransactional. + EXPECT(). + Commit(propagatedContext). + Return(errors.New("Could not commit transaction!")). + Times(1) + mockTransactional. + EXPECT(). + Rollback(propagatedContext). + Return(nil). + Times(1) + }, + expectError: nil, + }, + { + description: "should result in a `fosite.ErrInvalidRequest` if transaction fails to commit due to a " + + "`fosite.ErrSerializationFailure` error", + setup: func() { + request.GrantTypes = fosite.Arguments{"refresh_token"} + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil). + Times(1) + mockRefreshTokenStorageProvider. + EXPECT(). + RefreshTokenStorage(). + Return(mockRefreshTokenStorage). + Times(2) + mockRefreshTokenStorage. + EXPECT(). + RotateRefreshToken(propagatedContext, gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + mockAccessTokenStorageProvider. + EXPECT(). + AccessTokenStorage(). + Return(mockAccessTokenStorage). + Times(1) + mockAccessTokenStorage. + EXPECT(). + CreateAccessTokenSession(propagatedContext, gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + mockRefreshTokenStorage. + EXPECT(). + CreateRefreshTokenSession(propagatedContext, gomock.Any(), gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + mockTransactional. + EXPECT(). + Commit(propagatedContext). + Return(fosite.ErrSerializationFailure). + Times(1) + mockTransactional. + EXPECT(). + Rollback(propagatedContext). + Return(nil). + Times(1) + }, + expectError: nil, + }, + } { + t.Run(fmt.Sprintf("scenario=%s", testCase.description), func(t *testing.T) { + ctrl := gomock.NewController(t) + t.Cleanup(ctrl.Finish) + + mockTransactional = internal.NewMockTransactional(ctrl) + + mockTokenRevocationStorageProvider = internal.NewMockTokenRevocationStorageProvider(ctrl) + + mockAccessTokenStorageProvider = internal.NewMockAccessTokenStorageProvider(ctrl) + mockAccessTokenStorage = internal.NewMockAccessTokenStorage(ctrl) + + mockRefreshTokenStorageProvider = internal.NewMockRefreshTokenStorageProvider(ctrl) + mockRefreshTokenStorage = internal.NewMockRefreshTokenStorage(ctrl) + + // define concrete types + mockStorage := struct { + *internal.MockAccessTokenStorageProvider + *internal.MockRefreshTokenStorageProvider + *internal.MockTokenRevocationStorageProvider + *internal.MockTransactional + }{ + MockAccessTokenStorageProvider: mockAccessTokenStorageProvider, + MockRefreshTokenStorageProvider: mockRefreshTokenStorageProvider, + MockTokenRevocationStorageProvider: mockTokenRevocationStorageProvider, + MockTransactional: mockTransactional, + } + + handler := oauth2.RefreshTokenGrantHandler{ + Storage: mockStorage, + Strategy: &compose.CommonStrategyProvider{CoreStrategy: hmacshaStrategy}, + Config: &fosite.Config{ + AccessTokenLifespan: time.Hour, + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + }, + } + + testCase.setup() + + if err := handler.PopulateTokenEndpointResponse(propagatedContext, request, response); testCase.expectError != nil { + assert.EqualError(t, err, testCase.expectError.Error()) + } + }) + } +} diff --git a/fosite/handler/oauth2/flow_resource_owner.go b/fosite/handler/oauth2/flow_resource_owner.go new file mode 100644 index 00000000000..28fdd944f78 --- /dev/null +++ b/fosite/handler/oauth2/flow_resource_owner.go @@ -0,0 +1,153 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "context" + "time" + + "github.com/ory/x/errorsx" + + "github.com/pkg/errors" + + "github.com/ory/hydra/v2/fosite" +) + +var _ fosite.TokenEndpointHandler = (*ResourceOwnerPasswordCredentialsGrantHandler)(nil) + +// Deprecated: This handler is deprecated as a means to communicate that the ROPC grant type is widely discouraged and +// is at the time of this writing going to be omitted in the OAuth 2.1 spec. For more information on why this grant type +// is discouraged see: https://www.scottbrady91.com/oauth/why-the-resource-owner-password-credentials-grant-type-is-not-authentication-nor-suitable-for-modern-applications +type ResourceOwnerPasswordCredentialsGrantHandler struct { + Storage interface { + ResourceOwnerPasswordCredentialsGrantStorageProvider + AccessTokenStorageProvider + RefreshTokenStorageProvider + } + Strategy interface { + AccessTokenStrategyProvider + RefreshTokenStrategyProvider + } + Config interface { + fosite.ScopeStrategyProvider + fosite.AudienceStrategyProvider + fosite.RefreshTokenScopesProvider + fosite.RefreshTokenLifespanProvider + fosite.AccessTokenLifespanProvider + } +} + +type Session interface { + // SetSubject sets the session's subject. + SetSubject(subject string) +} + +// HandleTokenEndpointRequest implements https://tools.ietf.org/html/rfc6749#section-4.3.2 +func (c *ResourceOwnerPasswordCredentialsGrantHandler) HandleTokenEndpointRequest(ctx context.Context, request fosite.AccessRequester) error { + if !c.CanHandleTokenEndpointRequest(ctx, request) { + return errorsx.WithStack(fosite.ErrUnknownRequest) + } + + if !request.GetClient().GetGrantTypes().Has("password") { + return errorsx.WithStack(fosite.ErrUnauthorizedClient.WithHint("The client is not allowed to use authorization grant 'password'.")) + } + + client := request.GetClient() + for _, scope := range request.GetRequestedScopes() { + if !c.Config.GetScopeStrategy(ctx)(client.GetScopes(), scope) { + return errorsx.WithStack(fosite.ErrInvalidScope.WithHintf("The OAuth 2.0 Client is not allowed to request scope '%s'.", scope)) + } + } + + if err := c.Config.GetAudienceStrategy(ctx)(client.GetAudience(), request.GetRequestedAudience()); err != nil { + return err + } + + username := request.GetRequestForm().Get("username") + password := request.GetRequestForm().Get("password") + if username == "" || password == "" { + return errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("Username or password are missing from the POST body.")) + } else if sub, err := c.Storage.ResourceOwnerPasswordCredentialsGrantStorage().Authenticate(ctx, username, password); errors.Is(err, fosite.ErrNotFound) { + return errorsx.WithStack(fosite.ErrInvalidGrant.WithHint("Unable to authenticate the provided username and password credentials.").WithWrap(err).WithDebug(err.Error())) + } else if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } else { + if sess, ok := request.GetSession().(Session); ok { + sess.SetSubject(sub) + } + } + + // Credentials must not be passed around, potentially leaking to the database! + delete(request.GetRequestForm(), "password") + + atLifespan := fosite.GetEffectiveLifespan(request.GetClient(), fosite.GrantTypePassword, fosite.AccessToken, c.Config.GetAccessTokenLifespan(ctx)) + request.GetSession().SetExpiresAt(fosite.AccessToken, time.Now().UTC().Add(atLifespan).Round(time.Second)) + + rtLifespan := fosite.GetEffectiveLifespan(request.GetClient(), fosite.GrantTypePassword, fosite.RefreshToken, c.Config.GetRefreshTokenLifespan(ctx)) + if rtLifespan > -1 { + request.GetSession().SetExpiresAt(fosite.RefreshToken, time.Now().UTC().Add(rtLifespan).Round(time.Second)) + } + + return nil +} + +// PopulateTokenEndpointResponse implements https://tools.ietf.org/html/rfc6749#section-4.3.3 +func (c *ResourceOwnerPasswordCredentialsGrantHandler) PopulateTokenEndpointResponse(ctx context.Context, requester fosite.AccessRequester, responder fosite.AccessResponder) error { + if !c.CanHandleTokenEndpointRequest(ctx, requester) { + return errorsx.WithStack(fosite.ErrUnknownRequest) + } + + atLifespan := fosite.GetEffectiveLifespan(requester.GetClient(), fosite.GrantTypePassword, fosite.AccessToken, c.Config.GetAccessTokenLifespan(ctx)) + accessTokenSignature, err := c.IssueAccessToken(ctx, atLifespan, requester, responder) + if err != nil { + return err + } + + var refresh, refreshSignature string + if len(c.Config.GetRefreshTokenScopes(ctx)) == 0 || requester.GetGrantedScopes().HasOneOf(c.Config.GetRefreshTokenScopes(ctx)...) { + var err error + refresh, refreshSignature, err = c.Strategy.RefreshTokenStrategy().GenerateRefreshToken(ctx, requester) + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } else if err := c.Storage.RefreshTokenStorage().CreateRefreshTokenSession(ctx, refreshSignature, accessTokenSignature, requester.Sanitize([]string{})); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + } + + if refresh != "" { + responder.SetExtra("refresh_token", refresh) + } + + return nil +} + +func (c *ResourceOwnerPasswordCredentialsGrantHandler) IssueAccessToken(ctx context.Context, atLifespan time.Duration, requester fosite.AccessRequester, responder fosite.AccessResponder) (signature string, err error) { + token, signature, err := c.Strategy.AccessTokenStrategy().GenerateAccessToken(ctx, requester) + if err != nil { + return "", err + } else if err := c.Storage.AccessTokenStorage().CreateAccessTokenSession(ctx, signature, requester.Sanitize([]string{})); err != nil { + return "", err + } + + if !requester.GetSession().GetExpiresAt(fosite.AccessToken).IsZero() { + atLifespan = time.Duration(requester.GetSession().GetExpiresAt(fosite.AccessToken).UnixNano() - time.Now().UTC().UnixNano()) + } + + responder.SetAccessToken(token) + responder.SetTokenType("bearer") + responder.SetExpiresIn(atLifespan) + responder.SetScopes(requester.GetGrantedScopes()) + + return signature, nil +} + +func (c *ResourceOwnerPasswordCredentialsGrantHandler) CanSkipClientAuth(ctx context.Context, _ fosite.AccessRequester) bool { + return false +} + +func (c *ResourceOwnerPasswordCredentialsGrantHandler) CanHandleTokenEndpointRequest(ctx context.Context, requester fosite.AccessRequester) bool { + // grant_type REQUIRED. + // Value MUST be set to "password". + return requester.GetGrantTypes().ExactOne("password") +} diff --git a/fosite/handler/oauth2/flow_resource_owner_storage.go b/fosite/handler/oauth2/flow_resource_owner_storage.go new file mode 100644 index 00000000000..31b1f57a436 --- /dev/null +++ b/fosite/handler/oauth2/flow_resource_owner_storage.go @@ -0,0 +1,18 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "context" +) + +// ResourceOwnerPasswordCredentialsGrantStorage provides storage for the resource owner password credentials grant. +type ResourceOwnerPasswordCredentialsGrantStorage interface { + Authenticate(ctx context.Context, name string, secret string) (subject string, err error) +} + +// ResourceOwnerPasswordCredentialsGrantStorageProvider provides the resource owner password credentials grant storage. +type ResourceOwnerPasswordCredentialsGrantStorageProvider interface { + ResourceOwnerPasswordCredentialsGrantStorage() ResourceOwnerPasswordCredentialsGrantStorage +} diff --git a/fosite/handler/oauth2/flow_resource_owner_test.go b/fosite/handler/oauth2/flow_resource_owner_test.go new file mode 100644 index 00000000000..90649b9ee48 --- /dev/null +++ b/fosite/handler/oauth2/flow_resource_owner_test.go @@ -0,0 +1,285 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2_test + +import ( + "context" + "fmt" + "net/url" + "testing" + "time" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + gomock "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/internal" +) + +func TestResourceOwnerFlow_HandleTokenEndpointRequest(t *testing.T) { + ctrl := gomock.NewController(t) + mockRopcgStorageProvider := internal.NewMockResourceOwnerPasswordCredentialsGrantStorageProvider(ctrl) + mockRopcgStorage := internal.NewMockResourceOwnerPasswordCredentialsGrantStorage(ctrl) + mockAccessTokenStorageProvider := internal.NewMockAccessTokenStorageProvider(ctrl) + mockRefreshTokenStorageProvider := internal.NewMockRefreshTokenStorageProvider(ctrl) + t.Cleanup(ctrl.Finish) + + areq := fosite.NewAccessRequest(new(fosite.DefaultSession)) + areq.Form = url.Values{} + for k, c := range []struct { + description string + setup func(config *fosite.Config) + expectErr error + check func(areq *fosite.AccessRequest) + }{ + { + description: "should fail because not responsible", + expectErr: fosite.ErrUnknownRequest, + setup: func(config *fosite.Config) { + areq.GrantTypes = fosite.Arguments{"123"} + }, + }, + { + description: "should fail because scope missing", + setup: func(config *fosite.Config) { + areq.GrantTypes = fosite.Arguments{"password"} + areq.Client = &fosite.DefaultClient{GrantTypes: fosite.Arguments{"password"}, Scopes: []string{}} + areq.RequestedScope = []string{"foo-scope"} + }, + expectErr: fosite.ErrInvalidScope, + }, + { + description: "should fail because audience missing", + setup: func(config *fosite.Config) { + areq.RequestedAudience = fosite.Arguments{"https://www.ory.sh/api"} + areq.Client = &fosite.DefaultClient{GrantTypes: fosite.Arguments{"password"}, Scopes: []string{"foo-scope"}} + }, + expectErr: fosite.ErrInvalidRequest, + }, + { + description: "should fail because invalid grant_type specified", + setup: func(config *fosite.Config) { + areq.GrantTypes = fosite.Arguments{"password"} + areq.Client = &fosite.DefaultClient{GrantTypes: fosite.Arguments{"authorization_code"}, Scopes: []string{"foo-scope"}} + }, + expectErr: fosite.ErrUnauthorizedClient, + }, + { + description: "should fail because invalid credentials", + setup: func(config *fosite.Config) { + areq.Form.Set("username", "peter") + areq.Form.Set("password", "pan") + areq.Client = &fosite.DefaultClient{GrantTypes: fosite.Arguments{"password"}, Scopes: []string{"foo-scope"}, Audience: []string{"https://www.ory.sh/api"}} + + mockRopcgStorageProvider.EXPECT().ResourceOwnerPasswordCredentialsGrantStorage().Return(mockRopcgStorage).Times(1) + mockRopcgStorage.EXPECT().Authenticate(gomock.Any(), "peter", "pan").Return("", fosite.ErrNotFound) + }, + expectErr: fosite.ErrInvalidGrant, + }, + { + description: "should fail because error on lookup", + setup: func(config *fosite.Config) { + mockRopcgStorageProvider.EXPECT().ResourceOwnerPasswordCredentialsGrantStorage().Return(mockRopcgStorage).Times(1) + mockRopcgStorage.EXPECT().Authenticate(gomock.Any(), "peter", "pan").Return("", errors.New("")) + }, + expectErr: fosite.ErrServerError, + }, + { + description: "should pass", + setup: func(config *fosite.Config) { + mockRopcgStorageProvider.EXPECT().ResourceOwnerPasswordCredentialsGrantStorage().Return(mockRopcgStorage).Times(1) + mockRopcgStorage.EXPECT().Authenticate(gomock.Any(), "peter", "pan").Return("", nil) + }, + check: func(areq *fosite.AccessRequest) { + // assert.NotEmpty(t, areq.GetSession().GetExpiresAt(fosite.AccessToken)) + assert.Equal(t, time.Now().Add(time.Hour).UTC().Round(time.Second), areq.GetSession().GetExpiresAt(fosite.AccessToken)) + assert.Equal(t, time.Now().Add(time.Hour).UTC().Round(time.Second), areq.GetSession().GetExpiresAt(fosite.RefreshToken)) + }, + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", k, c.description), func(t *testing.T) { + config := &fosite.Config{ + AccessTokenLifespan: time.Hour, + RefreshTokenLifespan: time.Hour, + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + } + mockStorage := struct { + *internal.MockResourceOwnerPasswordCredentialsGrantStorageProvider + *internal.MockAccessTokenStorageProvider + *internal.MockRefreshTokenStorageProvider + }{ + MockResourceOwnerPasswordCredentialsGrantStorageProvider: mockRopcgStorageProvider, + MockAccessTokenStorageProvider: mockAccessTokenStorageProvider, + MockRefreshTokenStorageProvider: mockRefreshTokenStorageProvider, + } + h := oauth2.ResourceOwnerPasswordCredentialsGrantHandler{ + Storage: mockStorage, + Config: config, + } + c.setup(config) + err := h.HandleTokenEndpointRequest(context.Background(), areq) + + if c.expectErr != nil { + require.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + if c.check != nil { + c.check(areq) + } + } + }) + } +} + +func TestResourceOwnerFlow_PopulateTokenEndpointResponse(t *testing.T) { + var ( + mockRopcgStorageProvider *internal.MockResourceOwnerPasswordCredentialsGrantStorageProvider + mockAccessTokenStorageProvider *internal.MockAccessTokenStorageProvider + mockAccessTokenStorage *internal.MockAccessTokenStorage + mockRefreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider + mockRefreshTokenStorage *internal.MockRefreshTokenStorage + mockAccessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider + mockAccessTokenStrategy *internal.MockAccessTokenStrategy + mockRefreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider + mockRefreshTokenStrategy *internal.MockRefreshTokenStrategy + + areq *fosite.AccessRequest + aresp *fosite.AccessResponse + h oauth2.ResourceOwnerPasswordCredentialsGrantHandler + ) + + mockAT := "accesstoken.foo.bar" + mockRT := "refreshtoken.bar.foo" + + config := &fosite.Config{} + h.Config = config + + for k, c := range []struct { + description string + setup func(*fosite.Config) + expectErr error + expect func() + }{ + { + description: "should fail because not responsible", + expectErr: fosite.ErrUnknownRequest, + setup: func(config *fosite.Config) { + areq.GrantTypes = fosite.Arguments{""} + }, + }, + { + description: "should pass", + setup: func(config *fosite.Config) { + areq.GrantTypes = fosite.Arguments{"password"} + mockAccessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(mockAccessTokenStrategy).Times(1) + mockAccessTokenStrategy.EXPECT().GenerateAccessToken(gomock.Any(), areq).Return(mockAT, "bar", nil) + mockAccessTokenStorageProvider.EXPECT().AccessTokenStorage().Return(mockAccessTokenStorage).Times(1) + mockAccessTokenStorage.EXPECT().CreateAccessTokenSession(gomock.Any(), "bar", gomock.Eq(areq.Sanitize([]string{}))).Return(nil) + }, + expect: func() { + assert.Nil(t, aresp.GetExtra("refresh_token"), "unexpected refresh token") + }, + }, + { + description: "should pass - offline scope", + setup: func(config *fosite.Config) { + areq.GrantTypes = fosite.Arguments{"password"} + areq.GrantScope("offline") + mockRefreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(mockRefreshTokenStrategy).Times(1) + mockRefreshTokenStrategy.EXPECT().GenerateRefreshToken(gomock.Any(), areq).Return(mockRT, "bar", nil) + mockRefreshTokenStorageProvider.EXPECT().RefreshTokenStorage().Return(mockRefreshTokenStorage).Times(1) + mockRefreshTokenStorage.EXPECT().CreateRefreshTokenSession(gomock.Any(), "bar", "bar", gomock.Eq(areq.Sanitize([]string{}))).Return(nil) + mockAccessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(mockAccessTokenStrategy).Times(1) + mockAccessTokenStrategy.EXPECT().GenerateAccessToken(gomock.Any(), areq).Return(mockAT, "bar", nil) + mockAccessTokenStorageProvider.EXPECT().AccessTokenStorage().Return(mockAccessTokenStorage).Times(1) + mockAccessTokenStorage.EXPECT().CreateAccessTokenSession(gomock.Any(), "bar", gomock.Eq(areq.Sanitize([]string{}))).Return(nil) + }, + expect: func() { + assert.NotNil(t, aresp.GetExtra("refresh_token"), "expected refresh token") + }, + }, + { + description: "should pass - refresh token without offline scope", + setup: func(config *fosite.Config) { + config.RefreshTokenScopes = []string{} + areq.GrantTypes = fosite.Arguments{"password"} + mockAccessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(mockAccessTokenStrategy).Times(1) + mockAccessTokenStrategy.EXPECT().GenerateAccessToken(gomock.Any(), areq).Return(mockAT, "bar", nil) + mockAccessTokenStorageProvider.EXPECT().AccessTokenStorage().Return(mockAccessTokenStorage).Times(1) + mockAccessTokenStorage.EXPECT().CreateAccessTokenSession(gomock.Any(), "bar", gomock.Eq(areq.Sanitize([]string{}))).Return(nil) + mockRefreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(mockRefreshTokenStrategy).Times(1) + mockRefreshTokenStrategy.EXPECT().GenerateRefreshToken(gomock.Any(), areq).Return(mockRT, "bar", nil) + mockRefreshTokenStorageProvider.EXPECT().RefreshTokenStorage().Return(mockRefreshTokenStorage).Times(1) + mockRefreshTokenStorage.EXPECT().CreateRefreshTokenSession(gomock.Any(), "bar", "bar", gomock.Eq(areq.Sanitize([]string{}))).Return(nil) + }, + expect: func() { + assert.NotNil(t, aresp.GetExtra("refresh_token"), "expected refresh token") + }, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + ctrl := gomock.NewController(t) + t.Cleanup(ctrl.Finish) + + areq = fosite.NewAccessRequest(nil) + areq.Session = &fosite.DefaultSession{} + aresp = fosite.NewAccessResponse() + + config := &fosite.Config{ + RefreshTokenScopes: []string{"offline"}, + AccessTokenLifespan: time.Hour, + } + + mockRopcgStorageProvider = internal.NewMockResourceOwnerPasswordCredentialsGrantStorageProvider(ctrl) + mockAccessTokenStorageProvider = internal.NewMockAccessTokenStorageProvider(ctrl) + mockAccessTokenStorage = internal.NewMockAccessTokenStorage(ctrl) + mockRefreshTokenStorageProvider = internal.NewMockRefreshTokenStorageProvider(ctrl) + mockRefreshTokenStorage = internal.NewMockRefreshTokenStorage(ctrl) + mockAccessTokenStrategyProvider = internal.NewMockAccessTokenStrategyProvider(ctrl) + mockAccessTokenStrategy = internal.NewMockAccessTokenStrategy(ctrl) + mockRefreshTokenStrategyProvider = internal.NewMockRefreshTokenStrategyProvider(ctrl) + mockRefreshTokenStrategy = internal.NewMockRefreshTokenStrategy(ctrl) + + mockStorage := struct { + *internal.MockResourceOwnerPasswordCredentialsGrantStorageProvider + *internal.MockAccessTokenStorageProvider + *internal.MockRefreshTokenStorageProvider + }{ + MockResourceOwnerPasswordCredentialsGrantStorageProvider: mockRopcgStorageProvider, + MockAccessTokenStorageProvider: mockAccessTokenStorageProvider, + MockRefreshTokenStorageProvider: mockRefreshTokenStorageProvider, + } + + mockStrategy := struct { + *internal.MockAccessTokenStrategyProvider + *internal.MockRefreshTokenStrategyProvider + }{ + MockAccessTokenStrategyProvider: mockAccessTokenStrategyProvider, + MockRefreshTokenStrategyProvider: mockRefreshTokenStrategyProvider, + } + + h = oauth2.ResourceOwnerPasswordCredentialsGrantHandler{ + Storage: mockStorage, + Strategy: mockStrategy, + Config: config, + } + + c.setup(config) + + err := h.PopulateTokenEndpointResponse(context.Background(), areq, aresp) + if c.expectErr != nil { + require.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + if c.expect != nil { + c.expect() + } + } + }) + } +} diff --git a/fosite/handler/oauth2/helper.go b/fosite/handler/oauth2/helper.go new file mode 100644 index 00000000000..7bf800abaf3 --- /dev/null +++ b/fosite/handler/oauth2/helper.go @@ -0,0 +1,44 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "context" + "time" + + "github.com/ory/hydra/v2/fosite" +) + +type HandleHelperConfigProvider interface { + fosite.AccessTokenLifespanProvider + fosite.RefreshTokenLifespanProvider +} + +type HandleHelper struct { + AccessTokenStrategy AccessTokenStrategy + Storage AccessTokenStorageProvider + Config HandleHelperConfigProvider +} + +func (h *HandleHelper) IssueAccessToken(ctx context.Context, defaultLifespan time.Duration, requester fosite.AccessRequester, responder fosite.AccessResponder) (signature string, err error) { + token, signature, err := h.AccessTokenStrategy.GenerateAccessToken(ctx, requester) + if err != nil { + return "", err + } else if err := h.Storage.AccessTokenStorage().CreateAccessTokenSession(ctx, signature, requester.Sanitize([]string{})); err != nil { + return "", err + } + + responder.SetAccessToken(token) + responder.SetTokenType("bearer") + responder.SetExpiresIn(getExpiresIn(requester, fosite.AccessToken, defaultLifespan, time.Now().UTC())) + responder.SetScopes(requester.GetGrantedScopes()) + return signature, nil +} + +func getExpiresIn(r fosite.Requester, key fosite.TokenType, defaultLifespan time.Duration, now time.Time) time.Duration { + if r.GetSession().GetExpiresAt(key).IsZero() { + return defaultLifespan + } + return time.Duration(r.GetSession().GetExpiresAt(key).UnixNano() - now.UnixNano()) +} diff --git a/fosite/handler/oauth2/helper_test.go b/fosite/handler/oauth2/helper_test.go new file mode 100644 index 00000000000..62e1b5bb1d9 --- /dev/null +++ b/fosite/handler/oauth2/helper_test.go @@ -0,0 +1,85 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2_test + +import ( + "context" + "testing" + "time" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + gomock "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/internal" +) + +func TestGetExpiresIn(t *testing.T) { + now := time.Now().UTC() + r := fosite.NewAccessRequest(&fosite.DefaultSession{ + ExpiresAt: map[fosite.TokenType]time.Time{ + fosite.AccessToken: now.Add(time.Hour), + }, + }) + assert.Equal(t, time.Hour, oauth2.CallGetExpiresIn(r, fosite.AccessToken, time.Millisecond, now)) +} + +func TestIssueAccessToken(t *testing.T) { + ctrl := gomock.NewController(t) + areq := &fosite.AccessRequest{} + aresp := &fosite.AccessResponse{Extra: map[string]interface{}{}} + accessStrat := internal.NewMockAccessTokenStrategy(ctrl) + accessStore := internal.NewMockAccessTokenStorage(ctrl) + provider := internal.NewMockAccessTokenStorageProvider(ctrl) + t.Cleanup(ctrl.Finish) + + helper := oauth2.HandleHelper{ + Storage: provider, + AccessTokenStrategy: accessStrat, + Config: &fosite.Config{ + AccessTokenLifespan: time.Hour, + }, + } + + areq.Session = &fosite.DefaultSession{} + for k, c := range []struct { + mock func() + err error + }{ + { + mock: func() { + accessStrat.EXPECT().GenerateAccessToken(gomock.Any(), areq).Return("", "", errors.New("")) + }, + err: errors.New(""), + }, + { + mock: func() { + accessStrat.EXPECT().GenerateAccessToken(gomock.Any(), areq).Return("token", "signature", nil) + provider.EXPECT().AccessTokenStorage().Return(accessStore).Times(1) + accessStore.EXPECT().CreateAccessTokenSession(gomock.Any(), "signature", gomock.Eq(areq.Sanitize([]string{}))).Return(errors.New("")) + }, + err: errors.New(""), + }, + { + mock: func() { + accessStrat.EXPECT().GenerateAccessToken(gomock.Any(), areq).Return("token", "signature", nil) + provider.EXPECT().AccessTokenStorage().Return(accessStore).Times(1) + accessStore.EXPECT().CreateAccessTokenSession(gomock.Any(), "signature", gomock.Eq(areq.Sanitize([]string{}))).Return(nil) + }, + err: nil, + }, + } { + c.mock() + signature, err := helper.IssueAccessToken(context.Background(), helper.Config.GetAccessTokenLifespan(context.TODO()), areq, aresp) + require.Equal(t, err == nil, c.err == nil) + if c.err != nil { + assert.EqualError(t, err, c.err.Error(), "Case %d", k) + } else { + assert.NotEmpty(t, signature, "Case %d", k) + } + } +} diff --git a/fosite/handler/oauth2/introspector.go b/fosite/handler/oauth2/introspector.go new file mode 100644 index 00000000000..a81f03333e9 --- /dev/null +++ b/fosite/handler/oauth2/introspector.go @@ -0,0 +1,105 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "context" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/x/errorsx" +) + +var _ fosite.TokenIntrospector = (*CoreValidator)(nil) + +type CoreValidator struct { + Storage interface { + AccessTokenStorageProvider + RefreshTokenStorageProvider + } + Strategy interface { + AccessTokenStrategyProvider + RefreshTokenStrategyProvider + } + Config interface { + fosite.ScopeStrategyProvider + fosite.DisableRefreshTokenValidationProvider + } +} + +func (c *CoreValidator) IntrospectToken(ctx context.Context, token string, tokenUse fosite.TokenUse, accessRequest fosite.AccessRequester, scopes []string) (fosite.TokenUse, error) { + if c.Config.GetDisableRefreshTokenValidation(ctx) { + if err := c.introspectAccessToken(ctx, token, accessRequest, scopes); err != nil { + return "", err + } + return fosite.AccessToken, nil + } + + var err error + switch tokenUse { + case fosite.RefreshToken: + if err = c.introspectRefreshToken(ctx, token, accessRequest, scopes); err == nil { + return fosite.RefreshToken, nil + } else if err = c.introspectAccessToken(ctx, token, accessRequest, scopes); err == nil { + return fosite.AccessToken, nil + } + return "", err + } + + if err = c.introspectAccessToken(ctx, token, accessRequest, scopes); err == nil { + return fosite.AccessToken, nil + } else if err := c.introspectRefreshToken(ctx, token, accessRequest, scopes); err == nil { + return fosite.RefreshToken, nil + } + + return "", err +} + +func matchScopes(ss fosite.ScopeStrategy, granted, scopes []string) error { + for _, scope := range scopes { + if scope == "" { + continue + } + + if !ss(granted, scope) { + return errorsx.WithStack(fosite.ErrInvalidScope.WithHintf("The request scope '%s' has not been granted or is not allowed to be requested.", scope)) + } + } + + return nil +} + +func (c *CoreValidator) introspectAccessToken(ctx context.Context, token string, accessRequest fosite.AccessRequester, scopes []string) error { + sig := c.Strategy.AccessTokenStrategy().AccessTokenSignature(ctx, token) + or, err := c.Storage.AccessTokenStorage().GetAccessTokenSession(ctx, sig, accessRequest.GetSession()) + if err != nil { + return errorsx.WithStack(fosite.ErrRequestUnauthorized.WithWrap(err).WithDebug(err.Error())) + } else if err := c.Strategy.AccessTokenStrategy().ValidateAccessToken(ctx, or, token); err != nil { + return err + } + + if err := matchScopes(c.Config.GetScopeStrategy(ctx), or.GetGrantedScopes(), scopes); err != nil { + return err + } + + accessRequest.Merge(or) + return nil +} + +func (c *CoreValidator) introspectRefreshToken(ctx context.Context, token string, accessRequest fosite.AccessRequester, scopes []string) error { + sig := c.Strategy.RefreshTokenStrategy().RefreshTokenSignature(ctx, token) + or, err := c.Storage.RefreshTokenStorage().GetRefreshTokenSession(ctx, sig, accessRequest.GetSession()) + + if err != nil { + return errorsx.WithStack(fosite.ErrRequestUnauthorized.WithWrap(err).WithDebug(err.Error())) + } else if err := c.Strategy.RefreshTokenStrategy().ValidateRefreshToken(ctx, or, token); err != nil { + return err + } + + if err := matchScopes(c.Config.GetScopeStrategy(ctx), or.GetGrantedScopes(), scopes); err != nil { + return err + } + + accessRequest.Merge(or) + return nil +} diff --git a/fosite/handler/oauth2/introspector_jwt.go b/fosite/handler/oauth2/introspector_jwt.go new file mode 100644 index 00000000000..3516fafbbe0 --- /dev/null +++ b/fosite/handler/oauth2/introspector_jwt.go @@ -0,0 +1,88 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "context" + "time" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +type StatelessJWTValidator struct { + jwt.Signer + Config interface { + fosite.ScopeStrategyProvider + } +} + +// AccessTokenJWTToRequest tries to reconstruct fosite.Request from a JWT. +func AccessTokenJWTToRequest(token *jwt.Token) fosite.Requester { + mapClaims := token.Claims + claims := jwt.JWTClaims{} + claims.FromMapClaims(mapClaims) + + requestedAt := claims.IssuedAt + requestedAtClaim, ok := mapClaims["rat"] + if ok { + switch at := requestedAtClaim.(type) { + case float64: + requestedAt = time.Unix(int64(at), 0).UTC() + case int64: + requestedAt = time.Unix(at, 0).UTC() + } + } + + clientId := "" + clientIdClaim, ok := mapClaims["client_id"] + if ok { + switch cid := clientIdClaim.(type) { + case string: + clientId = cid + } + } + + return &fosite.Request{ + RequestedAt: requestedAt, + Client: &fosite.DefaultClient{ + ID: clientId, + }, + // We do not really know which scopes were requested, so we set them to granted. + RequestedScope: claims.Scope, + GrantedScope: claims.Scope, + Session: &JWTSession{ + JWTClaims: &claims, + JWTHeader: &jwt.Headers{ + Extra: token.Header, + }, + ExpiresAt: map[fosite.TokenType]time.Time{ + fosite.AccessToken: claims.ExpiresAt, + }, + Subject: claims.Subject, + }, + // We do not really know which audiences were requested, so we set them to granted. + RequestedAudience: claims.Audience, + GrantedAudience: claims.Audience, + } +} + +func (v *StatelessJWTValidator) IntrospectToken(ctx context.Context, token string, _ fosite.TokenUse, accessRequest fosite.AccessRequester, scopes []string) (fosite.TokenUse, error) { + t, err := validate(ctx, v.Signer, token) + if err != nil { + return "", err + } + + // TODO: From here we assume it is an access token, but how do we know it is really and that is not an ID token? + + requester := AccessTokenJWTToRequest(t) + + if err := matchScopes(v.Config.GetScopeStrategy(ctx), requester.GetGrantedScopes(), scopes); err != nil { + return fosite.AccessToken, err + } + + accessRequest.Merge(requester) + + return fosite.AccessToken, nil +} diff --git a/fosite/handler/oauth2/introspector_jwt_test.go b/fosite/handler/oauth2/introspector_jwt_test.go new file mode 100644 index 00000000000..7dbe3689168 --- /dev/null +++ b/fosite/handler/oauth2/introspector_jwt_test.go @@ -0,0 +1,136 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2_test + +import ( + "context" + "encoding/base64" + "fmt" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/internal/gen" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +func TestIntrospectJWT(t *testing.T) { + rsaKey := gen.MustRSAKey() + + signer := &jwt.DefaultSigner{GetPrivateKey: func(_ context.Context) (interface{}, error) { return rsaKey, nil }} + strat := &oauth2.DefaultJWTStrategy{ + Signer: signer, + Config: &fosite.Config{}, + } + + v := &oauth2.StatelessJWTValidator{ + Signer: signer, + Config: &fosite.Config{ + ScopeStrategy: fosite.HierarchicScopeStrategy, + }, + } + for k, c := range []struct { + description string + token func(t *testing.T) string + expectErr error + scopes []string + }{ + { + description: "should fail because jwt is expired", + token: func(t *testing.T) string { + tok := jwtExpiredCase(fosite.AccessToken) + token, _, err := strat.GenerateAccessToken(t.Context(), tok) + require.NoError(t, err) + return token + }, + expectErr: fosite.ErrTokenExpired, + }, + { + description: "should pass because scope was granted", + token: func(t *testing.T) string { + tok := jwtValidCase(fosite.AccessToken) + tok.GrantedScope = []string{"foo", "bar"} + token, _, err := strat.GenerateAccessToken(t.Context(), tok) + require.NoError(t, err) + return token + }, + scopes: []string{"foo"}, + }, + { + description: "should fail because scope was not granted", + token: func(t *testing.T) string { + tok := jwtValidCase(fosite.AccessToken) + token, _, err := strat.GenerateAccessToken(t.Context(), tok) + require.NoError(t, err) + return token + }, + scopes: []string{"foo"}, + expectErr: fosite.ErrInvalidScope, + }, + { + description: "should fail because signature is invalid", + token: func(t *testing.T) string { + tok := jwtValidCase(fosite.AccessToken) + token, _, err := strat.GenerateAccessToken(t.Context(), tok) + require.NoError(t, err) + parts := strings.Split(token, ".") + require.Len(t, parts, 3, "%s - %v", token, parts) + dec, err := base64.RawURLEncoding.DecodeString(parts[1]) + require.NoError(t, err) + s := strings.ReplaceAll(string(dec), "peter", "piper") + parts[1] = base64.RawURLEncoding.EncodeToString([]byte(s)) + return strings.Join(parts, ".") + }, + expectErr: fosite.ErrTokenSignatureMismatch, + }, + { + description: "should pass", + token: func(t *testing.T) string { + tok := jwtValidCase(fosite.AccessToken) + token, _, err := strat.GenerateAccessToken(t.Context(), tok) + require.NoError(t, err) + return token + }, + }, + } { + t.Run(fmt.Sprintf("case=%d:%v", k, c.description), func(t *testing.T) { + if c.scopes == nil { + c.scopes = []string{} + } + + areq := fosite.NewAccessRequest(nil) + _, err := v.IntrospectToken(t.Context(), c.token(t), fosite.AccessToken, areq, c.scopes) + + if c.expectErr != nil { + require.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + assert.Equal(t, "peter", areq.Session.GetSubject()) + } + }) + } +} + +func BenchmarkIntrospectJWT(b *testing.B) { + key := gen.MustRSAKey() + signer := &jwt.DefaultSigner{GetPrivateKey: func(_ context.Context) (interface{}, error) { return key, nil }} + strat := &oauth2.DefaultJWTStrategy{Signer: signer, Config: &fosite.Config{}} + + v := &oauth2.StatelessJWTValidator{Signer: signer} + + tok := jwtValidCase(fosite.AccessToken) + token, _, err := strat.GenerateAccessToken(b.Context(), tok) + assert.NoError(b, err) + areq := fosite.NewAccessRequest(nil) + + for n := 0; n < b.N; n++ { + _, err = v.IntrospectToken(b.Context(), token, fosite.AccessToken, areq, []string{}) + } + + assert.NoError(b, err) +} diff --git a/fosite/handler/oauth2/introspector_test.go b/fosite/handler/oauth2/introspector_test.go new file mode 100644 index 00000000000..31d17275024 --- /dev/null +++ b/fosite/handler/oauth2/introspector_test.go @@ -0,0 +1,140 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2_test + +import ( + "context" + "fmt" + "net/http" + "testing" + + "github.com/ory/x/errorsx" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + gomock "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/internal" +) + +func TestIntrospectToken(t *testing.T) { + ctrl := gomock.NewController(t) + accessTokenStorageProvider := internal.NewMockAccessTokenStorageProvider(ctrl) + accessTokenStorage := internal.NewMockAccessTokenStorage(ctrl) + refreshTokenStorageProvider := internal.NewMockRefreshTokenStorageProvider(ctrl) + refreshTokenStorage := internal.NewMockRefreshTokenStorage(ctrl) + accessTokenStrategyProvider := internal.NewMockAccessTokenStrategyProvider(ctrl) + accessTokenStrategy := internal.NewMockAccessTokenStrategy(ctrl) + refreshTokenStrategyProvider := internal.NewMockRefreshTokenStrategyProvider(ctrl) + refreshTokenStrategy := internal.NewMockRefreshTokenStrategy(ctrl) + areq := fosite.NewAccessRequest(nil) + t.Cleanup(ctrl.Finish) + + mockStorage := struct { + *internal.MockAccessTokenStorageProvider + *internal.MockRefreshTokenStorageProvider + }{ + MockAccessTokenStorageProvider: accessTokenStorageProvider, + MockRefreshTokenStorageProvider: refreshTokenStorageProvider, + } + mockStrategy := struct { + *internal.MockAccessTokenStrategyProvider + *internal.MockRefreshTokenStrategyProvider + }{ + MockAccessTokenStrategyProvider: accessTokenStrategyProvider, + MockRefreshTokenStrategyProvider: refreshTokenStrategyProvider, + } + + config := &fosite.Config{} + v := &oauth2.CoreValidator{ + Strategy: mockStrategy, + Storage: mockStorage, + Config: config, + } + httpreq := &http.Request{Header: http.Header{}} + + for k, c := range []struct { + description string + setup func() + expectErr error + expectTU fosite.TokenUse + }{ + { + description: "should fail because no bearer token set", + setup: func() { + httpreq.Header.Set("Authorization", "bearer") + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(1) + accessTokenStrategy.EXPECT().AccessTokenSignature(gomock.Any(), "").Return("") + accessTokenStorageProvider.EXPECT().AccessTokenStorage().Return(accessTokenStorage).Times(1) + accessTokenStorage.EXPECT().GetAccessTokenSession(gomock.Any(), "", nil).Return(nil, errors.New("")) + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().RefreshTokenSignature(gomock.Any(), "").Return("") + refreshTokenStorageProvider.EXPECT().RefreshTokenStorage().Return(refreshTokenStorage).Times(1) + refreshTokenStorage.EXPECT().GetRefreshTokenSession(gomock.Any(), "", nil).Return(nil, errors.New("")) + }, + expectErr: fosite.ErrRequestUnauthorized, + }, + { + description: "should fail because retrieval fails", + setup: func() { + httpreq.Header.Set("Authorization", "bearer 1234") + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(1) + accessTokenStrategy.EXPECT().AccessTokenSignature(gomock.Any(), "1234").AnyTimes().Return("asdf") + accessTokenStorageProvider.EXPECT().AccessTokenStorage().Return(accessTokenStorage).Times(1) + accessTokenStorage.EXPECT().GetAccessTokenSession(gomock.Any(), "asdf", nil).Return(nil, errors.New("")) + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().RefreshTokenSignature(gomock.Any(), "1234").Return("asdf") + refreshTokenStorageProvider.EXPECT().RefreshTokenStorage().Return(refreshTokenStorage).Times(1) + refreshTokenStorage.EXPECT().GetRefreshTokenSession(gomock.Any(), "asdf", nil).Return(nil, errors.New("")) + }, + expectErr: fosite.ErrRequestUnauthorized, + }, + { + description: "should fail because validation fails", + setup: func() { + accessTokenStorageProvider.EXPECT().AccessTokenStorage().Return(accessTokenStorage).AnyTimes() + accessTokenStorage.EXPECT().GetAccessTokenSession(gomock.Any(), "asdf", nil).AnyTimes().Return(areq, nil) + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(2) + accessTokenStrategy.EXPECT().ValidateAccessToken(gomock.Any(), areq, "1234").Return(errorsx.WithStack(fosite.ErrTokenExpired)) + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().RefreshTokenSignature(gomock.Any(), "1234").Return("asdf") + refreshTokenStorageProvider.EXPECT().RefreshTokenStorage().Return(refreshTokenStorage).Times(1) + refreshTokenStorage.EXPECT().GetRefreshTokenSession(gomock.Any(), "asdf", nil).Return(nil, errors.New("")) + }, + expectErr: fosite.ErrTokenExpired, + }, + { + description: "should fail because access token invalid", + setup: func() { + config.DisableRefreshTokenValidation = true + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(2) + accessTokenStrategy.EXPECT().ValidateAccessToken(gomock.Any(), areq, "1234").Return(errorsx.WithStack(fosite.ErrInvalidTokenFormat)) + }, + expectErr: fosite.ErrInvalidTokenFormat, + }, + { + description: "should pass", + setup: func() { + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(2) + accessTokenStrategy.EXPECT().ValidateAccessToken(gomock.Any(), areq, "1234").Return(nil) + }, + expectTU: fosite.AccessToken, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + c.setup() + tu, err := v.IntrospectToken(context.Background(), fosite.AccessTokenFromRequest(httpreq), fosite.AccessToken, areq, []string{}) + + if c.expectErr != nil { + require.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + assert.Equal(t, c.expectTU, tu) + } + }) + } +} diff --git a/fosite/handler/oauth2/providers.go b/fosite/handler/oauth2/providers.go new file mode 100644 index 00000000000..e9233354de8 --- /dev/null +++ b/fosite/handler/oauth2/providers.go @@ -0,0 +1,12 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import "github.com/ory/hydra/v2/fosite" + +type LifespanConfigProvider interface { + fosite.AccessTokenLifespanProvider + fosite.RefreshTokenLifespanProvider + fosite.AuthorizeCodeLifespanProvider +} diff --git a/fosite/handler/oauth2/revocation.go b/fosite/handler/oauth2/revocation.go new file mode 100644 index 00000000000..d376d8f4b99 --- /dev/null +++ b/fosite/handler/oauth2/revocation.go @@ -0,0 +1,81 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "context" + + "github.com/ory/x/errorsx" + + "github.com/pkg/errors" + + "github.com/ory/hydra/v2/fosite" +) + +var _ fosite.RevocationHandler = (*TokenRevocationHandler)(nil) + +type TokenRevocationHandler struct { + Storage interface { + TokenRevocationStorageProvider + AccessTokenStorageProvider + RefreshTokenStorageProvider + } + Strategy interface { + AccessTokenStrategyProvider + RefreshTokenStrategyProvider + } +} + +// RevokeToken implements https://tools.ietf.org/html/rfc7009#section-2.1 +// The token type hint indicates which token type check should be performed first. +func (r *TokenRevocationHandler) RevokeToken(ctx context.Context, token string, tokenType fosite.TokenType, client fosite.Client) error { + discoveryFuncs := []func() (request fosite.Requester, err error){ + func() (request fosite.Requester, err error) { + // Refresh token + signature := r.Strategy.RefreshTokenStrategy().RefreshTokenSignature(ctx, token) + return r.Storage.RefreshTokenStorage().GetRefreshTokenSession(ctx, signature, nil) + }, + func() (request fosite.Requester, err error) { + // Access token + signature := r.Strategy.AccessTokenStrategy().AccessTokenSignature(ctx, token) + return r.Storage.AccessTokenStorage().GetAccessTokenSession(ctx, signature, nil) + }, + } + + // Token type hinting + if tokenType == fosite.AccessToken { + discoveryFuncs[0], discoveryFuncs[1] = discoveryFuncs[1], discoveryFuncs[0] + } + + var ar fosite.Requester + var err1, err2 error + if ar, err1 = discoveryFuncs[0](); err1 != nil { + ar, err2 = discoveryFuncs[1]() + } + // err2 can only be not nil if first err1 was not nil + if err2 != nil { + return storeErrorsToRevocationError(err1, err2) + } + + if ar.GetClient().GetID() != client.GetID() { + return errorsx.WithStack(fosite.ErrUnauthorizedClient) + } + + requestID := ar.GetID() + err1 = r.Storage.TokenRevocationStorage().RevokeRefreshToken(ctx, requestID) + err2 = r.Storage.TokenRevocationStorage().RevokeAccessToken(ctx, requestID) + + return storeErrorsToRevocationError(err1, err2) +} + +func storeErrorsToRevocationError(err1, err2 error) error { + // both errors are fosite.ErrNotFound and fosite.ErrInactiveToken or nil <=> the token is revoked + if (errors.Is(err1, fosite.ErrNotFound) || errors.Is(err1, fosite.ErrInactiveToken) || err1 == nil) && + (errors.Is(err2, fosite.ErrNotFound) || errors.Is(err2, fosite.ErrInactiveToken) || err2 == nil) { + return nil + } + + // there was an unexpected error => the token may still exist and the client should retry later + return errorsx.WithStack(fosite.ErrTemporarilyUnavailable) +} diff --git a/fosite/handler/oauth2/revocation_storage.go b/fosite/handler/oauth2/revocation_storage.go new file mode 100644 index 00000000000..35a3897fe76 --- /dev/null +++ b/fosite/handler/oauth2/revocation_storage.go @@ -0,0 +1,32 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "context" +) + +// TokenRevocationStorage provides the storage implementation +// as specified in: https://tools.ietf.org/html/rfc7009 +type TokenRevocationStorage interface { + // RevokeRefreshToken revokes a refresh token as specified in: + // https://tools.ietf.org/html/rfc7009#section-2.1 + // If the particular + // token is a refresh token and the authorization server supports the + // revocation of access tokens, then the authorization server SHOULD + // also invalidate all access tokens based on the same authorization + // grant (see Implementation Note). + RevokeRefreshToken(ctx context.Context, requestID string) error + + // RevokeAccessToken revokes an access token as specified in: + // https://tools.ietf.org/html/rfc7009#section-2.1 + // If the token passed to the request + // is an access token, the server MAY revoke the respective refresh + // token as well. + RevokeAccessToken(ctx context.Context, requestID string) error +} + +type TokenRevocationStorageProvider interface { + TokenRevocationStorage() TokenRevocationStorage +} diff --git a/fosite/handler/oauth2/revocation_test.go b/fosite/handler/oauth2/revocation_test.go new file mode 100644 index 00000000000..1adc0e3e44c --- /dev/null +++ b/fosite/handler/oauth2/revocation_test.go @@ -0,0 +1,575 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2_test + +import ( + "context" + "fmt" + "testing" + + "github.com/stretchr/testify/require" + gomock "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/internal" +) + +func TestRevokeToken(t *testing.T) { + for k, c := range []struct { + description string + mock func( + ar *internal.MockAccessRequester, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + token *string, + tokenType *fosite.TokenType, + ) + expectErr error + client fosite.Client + }{ + { + description: "should fail - token was issued to another client", + expectErr: fosite.ErrUnauthorizedClient, + client: &fosite.DefaultClient{ID: "bar"}, + mock: func( + ar *internal.MockAccessRequester, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + token *string, + tokenType *fosite.TokenType, + ) { + *token = "foo" + *tokenType = fosite.RefreshToken + + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().RefreshTokenSignature(gomock.Any(), *token) + + refreshTokenStorageProvider.EXPECT().RefreshTokenStorage().Return(refreshTokenStorage).Times(1) + refreshTokenStorage.EXPECT().GetRefreshTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(ar, nil) + + ar.EXPECT().GetClient().Return(&fosite.DefaultClient{ID: "foo"}) + }, + }, + { + description: "should pass - refresh token discovery first; refresh token found", + expectErr: nil, + client: &fosite.DefaultClient{ID: "bar"}, + mock: func( + ar *internal.MockAccessRequester, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + token *string, + tokenType *fosite.TokenType, + ) { + *token = "foo" + *tokenType = fosite.RefreshToken + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().RefreshTokenSignature(gomock.Any(), *token) + + refreshTokenStorageProvider.EXPECT().RefreshTokenStorage().Return(refreshTokenStorage).Times(1) + refreshTokenStorage.EXPECT().GetRefreshTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(ar, nil) + + ar.EXPECT().GetID() + ar.EXPECT().GetClient().Return(&fosite.DefaultClient{ID: "bar"}) + + tokenRevocationStorageProvider.EXPECT().TokenRevocationStorage().Return(tokenRevocationStorage).Times(2) + tokenRevocationStorage.EXPECT().RevokeRefreshToken(gomock.Any(), gomock.Any()) + tokenRevocationStorage.EXPECT().RevokeAccessToken(gomock.Any(), gomock.Any()) + }, + }, + { + description: "should pass - access token discovery first; access token found", + expectErr: nil, + client: &fosite.DefaultClient{ID: "bar"}, + mock: func( + ar *internal.MockAccessRequester, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + token *string, + tokenType *fosite.TokenType, + ) { + *token = "foo" + *tokenType = fosite.AccessToken + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(1) + accessTokenStrategy.EXPECT().AccessTokenSignature(gomock.Any(), *token) + + accessTokenStorageProvider.EXPECT().AccessTokenStorage().Return(accessTokenStorage).Times(1) + accessTokenStorage.EXPECT().GetAccessTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(ar, nil) + + ar.EXPECT().GetID() + ar.EXPECT().GetClient().Return(&fosite.DefaultClient{ID: "bar"}) + + tokenRevocationStorageProvider.EXPECT().TokenRevocationStorage().Return(tokenRevocationStorage).Times(2) + tokenRevocationStorage.EXPECT().RevokeRefreshToken(gomock.Any(), gomock.Any()) + tokenRevocationStorage.EXPECT().RevokeAccessToken(gomock.Any(), gomock.Any()) + }, + }, + { + description: "should pass - refresh token discovery first; refresh token not found", + expectErr: nil, + client: &fosite.DefaultClient{ID: "bar"}, + mock: func( + ar *internal.MockAccessRequester, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + token *string, + tokenType *fosite.TokenType, + ) { + *token = "foo" + *tokenType = fosite.AccessToken + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(1) + accessTokenStrategy.EXPECT().AccessTokenSignature(gomock.Any(), *token) + + accessTokenStorageProvider.EXPECT().AccessTokenStorage().Return(accessTokenStorage).Times(1) + accessTokenStorage.EXPECT().GetAccessTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, fosite.ErrNotFound) + + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().RefreshTokenSignature(gomock.Any(), *token) + + refreshTokenStorageProvider.EXPECT().RefreshTokenStorage().Return(refreshTokenStorage).Times(1) + refreshTokenStorage.EXPECT().GetRefreshTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(ar, nil) + + ar.EXPECT().GetID() + ar.EXPECT().GetClient().Return(&fosite.DefaultClient{ID: "bar"}) + + tokenRevocationStorageProvider.EXPECT().TokenRevocationStorage().Return(tokenRevocationStorage).Times(2) + tokenRevocationStorage.EXPECT().RevokeRefreshToken(gomock.Any(), gomock.Any()) + tokenRevocationStorage.EXPECT().RevokeAccessToken(gomock.Any(), gomock.Any()) + }, + }, + { + description: "should pass - access token discovery first; access token not found", + expectErr: nil, + client: &fosite.DefaultClient{ID: "bar"}, + mock: func( + ar *internal.MockAccessRequester, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + token *string, + tokenType *fosite.TokenType, + ) { + *token = "foo" + *tokenType = fosite.RefreshToken + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().RefreshTokenSignature(gomock.Any(), *token) + + refreshTokenStorageProvider.EXPECT().RefreshTokenStorage().Return(refreshTokenStorage).Times(1) + refreshTokenStorage.EXPECT().GetRefreshTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, fosite.ErrNotFound) + + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(1) + accessTokenStrategy.EXPECT().AccessTokenSignature(gomock.Any(), *token) + + accessTokenStorageProvider.EXPECT().AccessTokenStorage().Return(accessTokenStorage).Times(1) + accessTokenStorage.EXPECT().GetAccessTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(ar, nil) + + ar.EXPECT().GetID() + ar.EXPECT().GetClient().Return(&fosite.DefaultClient{ID: "bar"}) + + tokenRevocationStorageProvider.EXPECT().TokenRevocationStorage().Return(tokenRevocationStorage).Times(2) + tokenRevocationStorage.EXPECT().RevokeRefreshToken(gomock.Any(), gomock.Any()) + tokenRevocationStorage.EXPECT().RevokeAccessToken(gomock.Any(), gomock.Any()) + }, + }, + { + description: "should pass - refresh token discovery first; both tokens not found", + expectErr: nil, + client: &fosite.DefaultClient{ID: "bar"}, + mock: func( + ar *internal.MockAccessRequester, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + token *string, + tokenType *fosite.TokenType, + ) { + *token = "foo" + *tokenType = fosite.RefreshToken + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().RefreshTokenSignature(gomock.Any(), *token) + + refreshTokenStorageProvider.EXPECT().RefreshTokenStorage().Return(refreshTokenStorage).Times(1) + refreshTokenStorage.EXPECT().GetRefreshTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, fosite.ErrNotFound) + + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(1) + accessTokenStrategy.EXPECT().AccessTokenSignature(gomock.Any(), *token) + + accessTokenStorageProvider.EXPECT().AccessTokenStorage().Return(accessTokenStorage).Times(1) + accessTokenStorage.EXPECT().GetAccessTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, fosite.ErrNotFound) + }, + }, + { + description: "should pass - access token discovery first; both tokens not found", + expectErr: nil, + client: &fosite.DefaultClient{ID: "bar"}, + mock: func( + ar *internal.MockAccessRequester, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + token *string, + tokenType *fosite.TokenType, + ) { + *token = "foo" + *tokenType = fosite.AccessToken + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(1) + accessTokenStrategy.EXPECT().AccessTokenSignature(gomock.Any(), *token) + + accessTokenStorageProvider.EXPECT().AccessTokenStorage().Return(accessTokenStorage).Times(1) + accessTokenStorage.EXPECT().GetAccessTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, fosite.ErrNotFound) + + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().RefreshTokenSignature(gomock.Any(), *token) + + refreshTokenStorageProvider.EXPECT().RefreshTokenStorage().Return(refreshTokenStorage).Times(1) + refreshTokenStorage.EXPECT().GetRefreshTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, fosite.ErrNotFound) + }, + }, + { + description: "should pass - refresh token discovery first; refresh token is inactive", + expectErr: nil, + client: &fosite.DefaultClient{ID: "bar"}, + mock: func( + ar *internal.MockAccessRequester, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + token *string, + tokenType *fosite.TokenType, + ) { + *token = "foo" + *tokenType = fosite.RefreshToken + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().RefreshTokenSignature(gomock.Any(), *token) + + refreshTokenStorageProvider.EXPECT().RefreshTokenStorage().Return(refreshTokenStorage).Times(1) + refreshTokenStorage.EXPECT().GetRefreshTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, fosite.ErrInactiveToken) + + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(1) + accessTokenStrategy.EXPECT().AccessTokenSignature(gomock.Any(), *token) + + accessTokenStorageProvider.EXPECT().AccessTokenStorage().Return(accessTokenStorage).Times(1) + accessTokenStorage.EXPECT().GetAccessTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, fosite.ErrNotFound) + }, + }, + { + description: "should pass - access token discovery first; refresh token is inactive", + expectErr: nil, + client: &fosite.DefaultClient{ID: "bar"}, + mock: func( + ar *internal.MockAccessRequester, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + token *string, + tokenType *fosite.TokenType, + ) { + *token = "foo" + *tokenType = fosite.AccessToken + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(1) + accessTokenStrategy.EXPECT().AccessTokenSignature(gomock.Any(), *token) + + accessTokenStorageProvider.EXPECT().AccessTokenStorage().Return(accessTokenStorage).Times(1) + accessTokenStorage.EXPECT().GetAccessTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, fosite.ErrNotFound) + + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().RefreshTokenSignature(gomock.Any(), *token) + + refreshTokenStorageProvider.EXPECT().RefreshTokenStorage().Return(refreshTokenStorage).Times(1) + refreshTokenStorage.EXPECT().GetRefreshTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, fosite.ErrInactiveToken) + }, + }, + { + description: "should fail - store error for access token get", + expectErr: fosite.ErrTemporarilyUnavailable, + client: &fosite.DefaultClient{ID: "bar"}, + mock: func( + ar *internal.MockAccessRequester, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + token *string, + tokenType *fosite.TokenType, + ) { + *token = "foo" + *tokenType = fosite.AccessToken + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(1) + accessTokenStrategy.EXPECT().AccessTokenSignature(gomock.Any(), *token) + + accessTokenStorageProvider.EXPECT().AccessTokenStorage().Return(accessTokenStorage).Times(1) + accessTokenStorage.EXPECT().GetAccessTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, fmt.Errorf("random error")) + + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().RefreshTokenSignature(gomock.Any(), *token) + + refreshTokenStorageProvider.EXPECT().RefreshTokenStorage().Return(refreshTokenStorage).Times(1) + refreshTokenStorage.EXPECT().GetRefreshTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, fosite.ErrNotFound) + }, + }, + { + description: "should fail - store error for refresh token get", + expectErr: fosite.ErrTemporarilyUnavailable, + client: &fosite.DefaultClient{ID: "bar"}, + mock: func( + ar *internal.MockAccessRequester, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + token *string, + tokenType *fosite.TokenType, + ) { + *token = "foo" + *tokenType = fosite.RefreshToken + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(1) + accessTokenStrategy.EXPECT().AccessTokenSignature(gomock.Any(), *token) + + accessTokenStorageProvider.EXPECT().AccessTokenStorage().Return(accessTokenStorage).Times(1) + accessTokenStorage.EXPECT().GetAccessTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, fosite.ErrNotFound) + + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().RefreshTokenSignature(gomock.Any(), *token) + + refreshTokenStorageProvider.EXPECT().RefreshTokenStorage().Return(refreshTokenStorage).Times(1) + refreshTokenStorage.EXPECT().GetRefreshTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, fmt.Errorf("random error")) + }, + }, + { + description: "should fail - store error for access token revoke", + expectErr: fosite.ErrTemporarilyUnavailable, + client: &fosite.DefaultClient{ID: "bar"}, + mock: func( + ar *internal.MockAccessRequester, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + token *string, + tokenType *fosite.TokenType, + ) { + *token = "foo" + *tokenType = fosite.AccessToken + accessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(accessTokenStrategy).Times(1) + accessTokenStrategy.EXPECT().AccessTokenSignature(gomock.Any(), *token) + + accessTokenStorageProvider.EXPECT().AccessTokenStorage().Return(accessTokenStorage).Times(1) + accessTokenStorage.EXPECT().GetAccessTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(ar, nil) + + ar.EXPECT().GetID() + ar.EXPECT().GetClient().Return(&fosite.DefaultClient{ID: "bar"}) + + tokenRevocationStorageProvider.EXPECT().TokenRevocationStorage().Return(tokenRevocationStorage).Times(2) + tokenRevocationStorage.EXPECT().RevokeRefreshToken(gomock.Any(), gomock.Any()).Return(fosite.ErrNotFound) + tokenRevocationStorage.EXPECT().RevokeAccessToken(gomock.Any(), gomock.Any()).Return(fmt.Errorf("random error")) + }, + }, + { + description: "should fail - store error for refresh token revoke", + expectErr: fosite.ErrTemporarilyUnavailable, + client: &fosite.DefaultClient{ID: "bar"}, + mock: func( + ar *internal.MockAccessRequester, + tokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider, + tokenRevocationStorage *internal.MockTokenRevocationStorage, + accessTokenStorageProvider *internal.MockAccessTokenStorageProvider, + accessTokenStorage *internal.MockAccessTokenStorage, + refreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider, + refreshTokenStorage *internal.MockRefreshTokenStorage, + accessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider, + accessTokenStrategy *internal.MockAccessTokenStrategy, + refreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider, + refreshTokenStrategy *internal.MockRefreshTokenStrategy, + token *string, + tokenType *fosite.TokenType, + ) { + *token = "foo" + *tokenType = fosite.RefreshToken + refreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(refreshTokenStrategy).Times(1) + refreshTokenStrategy.EXPECT().RefreshTokenSignature(gomock.Any(), *token) + + refreshTokenStorageProvider.EXPECT().RefreshTokenStorage().Return(refreshTokenStorage).Times(1) + refreshTokenStorage.EXPECT().GetRefreshTokenSession(gomock.Any(), gomock.Any(), gomock.Any()).Return(ar, nil) + + ar.EXPECT().GetID() + ar.EXPECT().GetClient().Return(&fosite.DefaultClient{ID: "bar"}) + + tokenRevocationStorageProvider.EXPECT().TokenRevocationStorage().Return(tokenRevocationStorage).Times(2) + tokenRevocationStorage.EXPECT().RevokeRefreshToken(gomock.Any(), gomock.Any()).Return(fmt.Errorf("random error")) + tokenRevocationStorage.EXPECT().RevokeAccessToken(gomock.Any(), gomock.Any()).Return(fosite.ErrNotFound) + }, + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", k, c.description), func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + t.Cleanup(ctrl.Finish) + + // define mocks + ar := internal.NewMockAccessRequester(ctrl) + + tokenRevocationStorageProvider := internal.NewMockTokenRevocationStorageProvider(ctrl) + tokenRevocationStorage := internal.NewMockTokenRevocationStorage(ctrl) + + accessTokenStorageProvider := internal.NewMockAccessTokenStorageProvider(ctrl) + accessTokenStorage := internal.NewMockAccessTokenStorage(ctrl) + + refreshTokenStorageProvider := internal.NewMockRefreshTokenStorageProvider(ctrl) + refreshTokenStorage := internal.NewMockRefreshTokenStorage(ctrl) + + accessTokenStrategyProvider := internal.NewMockAccessTokenStrategyProvider(ctrl) + accessTokenStrategy := internal.NewMockAccessTokenStrategy(ctrl) + + refreshTokenStrategyProvider := internal.NewMockRefreshTokenStrategyProvider(ctrl) + refreshTokenStrategy := internal.NewMockRefreshTokenStrategy(ctrl) + + // define concrete types + var token string + var tokenType fosite.TokenType + + mockStorage := struct { + *internal.MockTokenRevocationStorageProvider + *internal.MockAccessTokenStorageProvider + *internal.MockRefreshTokenStorageProvider + }{ + MockTokenRevocationStorageProvider: tokenRevocationStorageProvider, + MockAccessTokenStorageProvider: accessTokenStorageProvider, + MockRefreshTokenStorageProvider: refreshTokenStorageProvider, + } + + mockStrategy := struct { + *internal.MockAccessTokenStrategyProvider + *internal.MockRefreshTokenStrategyProvider + }{ + MockAccessTokenStrategyProvider: accessTokenStrategyProvider, + MockRefreshTokenStrategyProvider: refreshTokenStrategyProvider, + } + + h := oauth2.TokenRevocationHandler{ + Storage: mockStorage, + Strategy: mockStrategy, + } + + // set up mock expectations + c.mock( + ar, + tokenRevocationStorageProvider, + tokenRevocationStorage, + accessTokenStorageProvider, + accessTokenStorage, + refreshTokenStorageProvider, + refreshTokenStorage, + accessTokenStrategyProvider, + accessTokenStrategy, + refreshTokenStrategyProvider, + refreshTokenStrategy, + &token, + &tokenType, + ) + + // invoke function under test + err := h.RevokeToken(context.Background(), token, tokenType, c.client) + if c.expectErr != nil { + require.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + } + }) + } +} diff --git a/fosite/handler/oauth2/storage.go b/fosite/handler/oauth2/storage.go new file mode 100644 index 00000000000..e9e939096a2 --- /dev/null +++ b/fosite/handler/oauth2/storage.go @@ -0,0 +1,57 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "context" + + "github.com/ory/hydra/v2/fosite" +) + +// AuthorizeCodeStorage handles storage requests related to authorization codes. +type AuthorizeCodeStorage interface { + // CreateAuthorizeCodeSession stores the authorization request for a given authorization code. + CreateAuthorizeCodeSession(ctx context.Context, code string, request fosite.Requester) (err error) + + // GetAuthorizeCodeSession hydrates the session based on the given code and returns the authorization request. + // If the authorization code has been invalidated with `InvalidateAuthorizeCodeSession`, this + // method should return the ErrInvalidatedAuthorizeCode error. + // + // Make sure to also return the fosite.Requester value when returning the fosite.ErrInvalidatedAuthorizeCode error! + GetAuthorizeCodeSession(ctx context.Context, code string, session fosite.Session) (request fosite.Requester, err error) + + // InvalidateAuthorizeCodeSession is called when an authorize code is being used. The state of the authorization + // code should be set to invalid and consecutive requests to GetAuthorizeCodeSession should return the + // ErrInvalidatedAuthorizeCode error. + InvalidateAuthorizeCodeSession(ctx context.Context, code string) (err error) +} +type AuthorizeCodeStorageProvider interface { + AuthorizeCodeStorage() AuthorizeCodeStorage +} + +type AccessTokenStorage interface { + CreateAccessTokenSession(ctx context.Context, signature string, request fosite.Requester) (err error) + + GetAccessTokenSession(ctx context.Context, signature string, session fosite.Session) (request fosite.Requester, err error) + + DeleteAccessTokenSession(ctx context.Context, signature string) (err error) +} + +type AccessTokenStorageProvider interface { + AccessTokenStorage() AccessTokenStorage +} + +type RefreshTokenStorage interface { + CreateRefreshTokenSession(ctx context.Context, signature string, accessSignature string, request fosite.Requester) (err error) + + GetRefreshTokenSession(ctx context.Context, signature string, session fosite.Session) (request fosite.Requester, err error) + + DeleteRefreshTokenSession(ctx context.Context, signature string) (err error) + + RotateRefreshToken(ctx context.Context, requestID string, refreshTokenSignature string) (err error) +} + +type RefreshTokenStorageProvider interface { + RefreshTokenStorage() RefreshTokenStorage +} diff --git a/fosite/handler/oauth2/strategy.go b/fosite/handler/oauth2/strategy.go new file mode 100644 index 00000000000..d044ed3c9d2 --- /dev/null +++ b/fosite/handler/oauth2/strategy.go @@ -0,0 +1,47 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "context" + + "github.com/ory/hydra/v2/fosite" +) + +type CoreStrategy interface { + AuthorizeCodeStrategy + AccessTokenStrategy + RefreshTokenStrategy +} +type CoreStrategyProvider interface { + AuthorizeCodeStrategyProvider + AccessTokenStrategyProvider + RefreshTokenStrategyProvider +} + +type AuthorizeCodeStrategy interface { + AuthorizeCodeSignature(ctx context.Context, token string) string + GenerateAuthorizeCode(ctx context.Context, requester fosite.Requester) (token string, signature string, err error) + ValidateAuthorizeCode(ctx context.Context, requester fosite.Requester, token string) (err error) +} +type AuthorizeCodeStrategyProvider interface { + AuthorizeCodeStrategy() AuthorizeCodeStrategy +} +type AccessTokenStrategy interface { + AccessTokenSignature(ctx context.Context, token string) string + GenerateAccessToken(ctx context.Context, requester fosite.Requester) (token string, signature string, err error) + ValidateAccessToken(ctx context.Context, requester fosite.Requester, token string) (err error) +} +type AccessTokenStrategyProvider interface { + AccessTokenStrategy() AccessTokenStrategy +} + +type RefreshTokenStrategy interface { + RefreshTokenSignature(ctx context.Context, token string) string + GenerateRefreshToken(ctx context.Context, requester fosite.Requester) (token string, signature string, err error) + ValidateRefreshToken(ctx context.Context, requester fosite.Requester, token string) (err error) +} +type RefreshTokenStrategyProvider interface { + RefreshTokenStrategy() RefreshTokenStrategy +} diff --git a/fosite/handler/oauth2/strategy_hmacsha_plain.go b/fosite/handler/oauth2/strategy_hmacsha_plain.go new file mode 100644 index 00000000000..c6d84fc553e --- /dev/null +++ b/fosite/handler/oauth2/strategy_hmacsha_plain.go @@ -0,0 +1,114 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "context" + "time" + + "github.com/ory/x/errorsx" + + "github.com/ory/hydra/v2/fosite" + enigma "github.com/ory/hydra/v2/fosite/token/hmac" +) + +var ( + _ AuthorizeCodeStrategy = (*HMACSHAStrategyUnPrefixed)(nil) + _ AccessTokenStrategy = (*HMACSHAStrategyUnPrefixed)(nil) + _ RefreshTokenStrategy = (*HMACSHAStrategyUnPrefixed)(nil) +) + +type HMACSHAStrategyUnPrefixed struct { + Enigma *enigma.HMACStrategy + Config LifespanConfigProvider +} + +func NewHMACSHAStrategyUnPrefixed( + enigma *enigma.HMACStrategy, + config LifespanConfigProvider, +) *HMACSHAStrategyUnPrefixed { + return &HMACSHAStrategyUnPrefixed{ + Enigma: enigma, + Config: config, + } +} + +func (h *HMACSHAStrategyUnPrefixed) AccessTokenSignature(ctx context.Context, token string) string { + return h.Enigma.Signature(token) +} + +func (h *HMACSHAStrategyUnPrefixed) RefreshTokenSignature(ctx context.Context, token string) string { + return h.Enigma.Signature(token) +} + +func (h *HMACSHAStrategyUnPrefixed) AuthorizeCodeSignature(ctx context.Context, token string) string { + return h.Enigma.Signature(token) +} + +func (h *HMACSHAStrategyUnPrefixed) GenerateAccessToken(ctx context.Context, _ fosite.Requester) (token string, signature string, err error) { + token, sig, err := h.Enigma.Generate(ctx) + if err != nil { + return "", "", err + } + + return token, sig, nil +} + +func (h *HMACSHAStrategyUnPrefixed) ValidateAccessToken(ctx context.Context, r fosite.Requester, token string) (err error) { + exp := r.GetSession().GetExpiresAt(fosite.AccessToken) + if exp.IsZero() && r.GetRequestedAt().Add(h.Config.GetAccessTokenLifespan(ctx)).Before(time.Now().UTC()) { + return errorsx.WithStack(fosite.ErrTokenExpired.WithHintf("Access token expired at '%s'.", r.GetRequestedAt().Add(h.Config.GetAccessTokenLifespan(ctx)))) + } + + if !exp.IsZero() && exp.Before(time.Now().UTC()) { + return errorsx.WithStack(fosite.ErrTokenExpired.WithHintf("Access token expired at '%s'.", exp)) + } + + return h.Enigma.Validate(ctx, token) +} + +func (h *HMACSHAStrategyUnPrefixed) GenerateRefreshToken(ctx context.Context, _ fosite.Requester) (token string, signature string, err error) { + token, sig, err := h.Enigma.Generate(ctx) + if err != nil { + return "", "", err + } + + return token, sig, nil +} + +func (h *HMACSHAStrategyUnPrefixed) ValidateRefreshToken(ctx context.Context, r fosite.Requester, token string) (err error) { + exp := r.GetSession().GetExpiresAt(fosite.RefreshToken) + if exp.IsZero() { + // Unlimited lifetime + return h.Enigma.Validate(ctx, token) + } + + if !exp.IsZero() && exp.Before(time.Now().UTC()) { + return errorsx.WithStack(fosite.ErrTokenExpired.WithHintf("Refresh token expired at '%s'.", exp)) + } + + return h.Enigma.Validate(ctx, token) +} + +func (h *HMACSHAStrategyUnPrefixed) GenerateAuthorizeCode(ctx context.Context, _ fosite.Requester) (token string, signature string, err error) { + token, sig, err := h.Enigma.Generate(ctx) + if err != nil { + return "", "", err + } + + return token, sig, nil +} + +func (h *HMACSHAStrategyUnPrefixed) ValidateAuthorizeCode(ctx context.Context, r fosite.Requester, token string) (err error) { + exp := r.GetSession().GetExpiresAt(fosite.AuthorizeCode) + if exp.IsZero() && r.GetRequestedAt().Add(h.Config.GetAuthorizeCodeLifespan(ctx)).Before(time.Now().UTC()) { + return errorsx.WithStack(fosite.ErrTokenExpired.WithHintf("Authorize code expired at '%s'.", r.GetRequestedAt().Add(h.Config.GetAuthorizeCodeLifespan(ctx)))) + } + + if !exp.IsZero() && exp.Before(time.Now().UTC()) { + return errorsx.WithStack(fosite.ErrTokenExpired.WithHintf("Authorize code expired at '%s'.", exp)) + } + + return h.Enigma.Validate(ctx, token) +} diff --git a/fosite/handler/oauth2/strategy_hmacsha_prefixed.go b/fosite/handler/oauth2/strategy_hmacsha_prefixed.go new file mode 100644 index 00000000000..e243997281e --- /dev/null +++ b/fosite/handler/oauth2/strategy_hmacsha_prefixed.go @@ -0,0 +1,75 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "context" + "fmt" + "strings" + + enigma "github.com/ory/hydra/v2/fosite/token/hmac" + + "github.com/ory/hydra/v2/fosite" +) + +var ( + _ AuthorizeCodeStrategy = (*HMACSHAStrategy)(nil) + _ AccessTokenStrategy = (*HMACSHAStrategy)(nil) + _ RefreshTokenStrategy = (*HMACSHAStrategy)(nil) +) + +type HMACSHAStrategy struct { + *HMACSHAStrategyUnPrefixed +} + +func NewHMACSHAStrategy( + enigma *enigma.HMACStrategy, + config LifespanConfigProvider, +) *HMACSHAStrategy { + return &HMACSHAStrategy{ + HMACSHAStrategyUnPrefixed: NewHMACSHAStrategyUnPrefixed(enigma, config), + } +} + +func (h *HMACSHAStrategy) getPrefix(part string) string { + return fmt.Sprintf("ory_%s_", part) +} + +func (h *HMACSHAStrategy) trimPrefix(token, part string) string { + return strings.TrimPrefix(token, h.getPrefix(part)) +} + +func (h *HMACSHAStrategy) setPrefix(token, part string) string { + if token == "" { + return "" + } + return h.getPrefix(part) + token +} + +func (h *HMACSHAStrategy) GenerateAccessToken(ctx context.Context, r fosite.Requester) (token string, signature string, err error) { + token, sig, err := h.HMACSHAStrategyUnPrefixed.GenerateAccessToken(ctx, r) + return h.setPrefix(token, "at"), sig, err +} + +func (h *HMACSHAStrategy) ValidateAccessToken(ctx context.Context, r fosite.Requester, token string) (err error) { + return h.HMACSHAStrategyUnPrefixed.ValidateAccessToken(ctx, r, h.trimPrefix(token, "at")) +} + +func (h *HMACSHAStrategy) GenerateRefreshToken(ctx context.Context, r fosite.Requester) (token string, signature string, err error) { + token, sig, err := h.HMACSHAStrategyUnPrefixed.GenerateRefreshToken(ctx, r) + return h.setPrefix(token, "rt"), sig, err +} + +func (h *HMACSHAStrategy) ValidateRefreshToken(ctx context.Context, r fosite.Requester, token string) (err error) { + return h.HMACSHAStrategyUnPrefixed.ValidateRefreshToken(ctx, r, h.trimPrefix(token, "rt")) +} + +func (h *HMACSHAStrategy) GenerateAuthorizeCode(ctx context.Context, r fosite.Requester) (token string, signature string, err error) { + token, sig, err := h.HMACSHAStrategyUnPrefixed.GenerateAuthorizeCode(ctx, r) + return h.setPrefix(token, "ac"), sig, err +} + +func (h *HMACSHAStrategy) ValidateAuthorizeCode(ctx context.Context, r fosite.Requester, token string) (err error) { + return h.HMACSHAStrategyUnPrefixed.ValidateAuthorizeCode(ctx, r, h.trimPrefix(token, "ac")) +} diff --git a/fosite/handler/oauth2/strategy_hmacsha_test.go b/fosite/handler/oauth2/strategy_hmacsha_test.go new file mode 100644 index 00000000000..c5e2842eb28 --- /dev/null +++ b/fosite/handler/oauth2/strategy_hmacsha_test.go @@ -0,0 +1,192 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2_test + +import ( + "context" + "fmt" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/token/hmac" +) + +var hmacshaStrategy = oauth2.NewHMACSHAStrategy( + &hmac.HMACStrategy{Config: &fosite.Config{GlobalSecret: []byte("foobarfoobarfoobarfoobarfoobarfoobarfoobarfoobar")}}, + &fosite.Config{ + AccessTokenLifespan: time.Hour * 24, + AuthorizeCodeLifespan: time.Hour * 24, + }, +) + +var hmacshaStrategyUnprefixed = oauth2.NewHMACSHAStrategyUnPrefixed( + &hmac.HMACStrategy{Config: &fosite.Config{GlobalSecret: []byte("foobarfoobarfoobarfoobarfoobarfoobarfoobarfoobar")}}, + &fosite.Config{ + AccessTokenLifespan: time.Hour * 24, + AuthorizeCodeLifespan: time.Hour * 24, + }, +) + +var hmacExpiredCase = fosite.Request{ + Client: &fosite.DefaultClient{ + Secret: []byte("foobarfoobarfoobarfoobar"), + }, + Session: &fosite.DefaultSession{ + ExpiresAt: map[fosite.TokenType]time.Time{ + fosite.AccessToken: time.Now().UTC().Add(-time.Hour), + fosite.AuthorizeCode: time.Now().UTC().Add(-time.Hour), + fosite.RefreshToken: time.Now().UTC().Add(-time.Hour), + }, + }, +} + +var hmacValidCase = fosite.Request{ + Client: &fosite.DefaultClient{ + Secret: []byte("foobarfoobarfoobarfoobar"), + }, + Session: &fosite.DefaultSession{ + ExpiresAt: map[fosite.TokenType]time.Time{ + fosite.AccessToken: time.Now().UTC().Add(time.Hour), + fosite.AuthorizeCode: time.Now().UTC().Add(time.Hour), + fosite.RefreshToken: time.Now().UTC().Add(time.Hour), + }, + }, +} + +func TestHMACAccessToken(t *testing.T) { + for k, c := range []struct { + r fosite.Request + pass bool + strat any + prefix string + }{ + { + r: hmacValidCase, + pass: true, + strat: hmacshaStrategy, + prefix: "ory_at_", + }, + { + r: hmacExpiredCase, + pass: false, + strat: hmacshaStrategy, + prefix: "ory_at_", + }, + { + r: hmacValidCase, + pass: true, + strat: hmacshaStrategyUnprefixed, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + token, signature, err := hmacshaStrategy.GenerateAccessToken(context.Background(), &c.r) + assert.NoError(t, err) + assert.Equal(t, strings.Split(token, ".")[1], signature) + assert.Contains(t, token, c.prefix) + + cases := []string{ + token, + } + if c.prefix != "" { + cases = append(cases, strings.TrimPrefix(token, c.prefix)) + } + + for k, token := range cases { + t.Run(fmt.Sprintf("prefix=%v", k == 0), func(t *testing.T) { + err = hmacshaStrategy.ValidateAccessToken(context.Background(), &c.r, token) + if c.pass { + assert.NoError(t, err) + validate := hmacshaStrategy.Enigma.Signature(token) + assert.Equal(t, signature, validate) + } else { + assert.Error(t, err) + } + }) + } + }) + } +} + +func TestHMACRefreshToken(t *testing.T) { + for k, c := range []struct { + r fosite.Request + pass bool + }{ + { + r: hmacValidCase, + pass: true, + }, + { + r: hmacExpiredCase, + pass: false, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + token, signature, err := hmacshaStrategy.GenerateRefreshToken(context.Background(), &c.r) + assert.NoError(t, err) + assert.Equal(t, strings.Split(token, ".")[1], signature) + assert.Contains(t, token, "ory_rt_") + + for k, token := range []string{ + token, + strings.TrimPrefix(token, "ory_rt_"), + } { + t.Run(fmt.Sprintf("prefix=%v", k == 0), func(t *testing.T) { + err = hmacshaStrategy.ValidateRefreshToken(context.Background(), &c.r, token) + if c.pass { + assert.NoError(t, err) + validate := hmacshaStrategy.Enigma.Signature(token) + assert.Equal(t, signature, validate) + } else { + assert.Error(t, err) + } + }) + } + }) + } +} + +func TestHMACAuthorizeCode(t *testing.T) { + for k, c := range []struct { + r fosite.Request + pass bool + }{ + { + r: hmacValidCase, + pass: true, + }, + { + r: hmacExpiredCase, + pass: false, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + token, signature, err := hmacshaStrategy.GenerateAuthorizeCode(context.Background(), &c.r) + assert.NoError(t, err) + assert.Equal(t, strings.Split(token, ".")[1], signature) + assert.Contains(t, token, "ory_ac_") + + for k, token := range []string{ + token, + strings.TrimPrefix(token, "ory_ac_"), + } { + t.Run(fmt.Sprintf("prefix=%v", k == 0), func(t *testing.T) { + err = hmacshaStrategy.ValidateAuthorizeCode(context.Background(), &c.r, token) + if c.pass { + assert.NoError(t, err) + validate := hmacshaStrategy.Enigma.Signature(token) + assert.Equal(t, signature, validate) + } else { + assert.Error(t, err) + } + }) + } + }) + } +} diff --git a/fosite/handler/oauth2/strategy_jwt.go b/fosite/handler/oauth2/strategy_jwt.go new file mode 100644 index 00000000000..a55af4a7466 --- /dev/null +++ b/fosite/handler/oauth2/strategy_jwt.go @@ -0,0 +1,110 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "context" + "strings" + "time" + + "github.com/pkg/errors" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/token/jwt" + "github.com/ory/x/errorsx" +) + +// DefaultJWTStrategy is a JWT RS256 strategy. +type DefaultJWTStrategy struct { + Signer jwt.Signer + Config interface { + fosite.AccessTokenIssuerProvider + fosite.JWTScopeFieldProvider + } +} + +var _ AccessTokenStrategy = (*DefaultJWTStrategy)(nil) + +func signature(token string) string { + split := strings.Split(token, ".") + if len(split) != 3 { + return "" + } + + return split[2] +} + +func (h *DefaultJWTStrategy) AccessTokenSignature(_ context.Context, token string) string { + return signature(token) +} + +func (h *DefaultJWTStrategy) GenerateAccessToken(ctx context.Context, requester fosite.Requester) (token string, signature string, err error) { + return h.generate(ctx, fosite.AccessToken, requester) +} + +func (h *DefaultJWTStrategy) ValidateAccessToken(ctx context.Context, _ fosite.Requester, token string) error { + _, err := validate(ctx, h.Signer, token) + return err +} + +func validate(ctx context.Context, jwtStrategy jwt.Signer, token string) (t *jwt.Token, err error) { + t, err = jwtStrategy.Decode(ctx, token) + if err == nil { + err = t.Claims.Valid() + return + } + + var e *jwt.ValidationError + if errors.As(err, &e) { + err = errorsx.WithStack(toRFCErr(e).WithWrap(err).WithDebug(err.Error())) + } + + return +} + +func toRFCErr(v *jwt.ValidationError) *fosite.RFC6749Error { + switch { + case v == nil: + return nil + case v.Has(jwt.ValidationErrorMalformed): + return fosite.ErrInvalidTokenFormat + case v.Has(jwt.ValidationErrorUnverifiable | jwt.ValidationErrorSignatureInvalid): + return fosite.ErrTokenSignatureMismatch + case v.Has(jwt.ValidationErrorExpired): + return fosite.ErrTokenExpired + case v.Has(jwt.ValidationErrorAudience | + jwt.ValidationErrorIssuedAt | + jwt.ValidationErrorIssuer | + jwt.ValidationErrorNotValidYet | + jwt.ValidationErrorId | + jwt.ValidationErrorClaimsInvalid): + return fosite.ErrTokenClaim + default: + return fosite.ErrRequestUnauthorized + } +} + +func (h *DefaultJWTStrategy) generate(ctx context.Context, tokenType fosite.TokenType, requester fosite.Requester) (string, string, error) { + if jwtSession, ok := requester.GetSession().(JWTSessionContainer); !ok { + return "", "", errors.Errorf("Session must be of type JWTSessionContainer but got type: %T", requester.GetSession()) + } else if claims := jwtSession.GetJWTClaims(); claims == nil { + return "", "", errors.New("GetTokenClaims() must not be nil") + } else { + claims. + With( + jwtSession.GetExpiresAt(tokenType), + requester.GetGrantedScopes(), + requester.GetGrantedAudience(), + ). + WithDefaults( + time.Now().UTC(), + h.Config.GetAccessTokenIssuer(ctx), + ). + WithScopeField( + h.Config.GetJWTScopeField(ctx), + ) + + return h.Signer.Generate(ctx, claims.ToMapClaims(), jwtSession.GetJWTHeader()) + } +} diff --git a/fosite/handler/oauth2/strategy_jwt_session.go b/fosite/handler/oauth2/strategy_jwt_session.go new file mode 100644 index 00000000000..5b06d9cba7a --- /dev/null +++ b/fosite/handler/oauth2/strategy_jwt_session.go @@ -0,0 +1,102 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "time" + + "github.com/mohae/deepcopy" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +type JWTSessionContainer interface { + // GetJWTClaims returns the claims. + GetJWTClaims() jwt.JWTClaimsContainer + + // GetJWTHeader returns the header. + GetJWTHeader() *jwt.Headers + + fosite.Session +} + +// JWTSession Container for the JWT session. +type JWTSession struct { + JWTClaims *jwt.JWTClaims + JWTHeader *jwt.Headers + ExpiresAt map[fosite.TokenType]time.Time + Username string + Subject string +} + +func (j *JWTSession) GetJWTClaims() jwt.JWTClaimsContainer { + if j.JWTClaims == nil { + j.JWTClaims = &jwt.JWTClaims{} + } + return j.JWTClaims +} + +func (j *JWTSession) GetJWTHeader() *jwt.Headers { + if j.JWTHeader == nil { + j.JWTHeader = &jwt.Headers{} + } + return j.JWTHeader +} + +func (j *JWTSession) SetExpiresAt(key fosite.TokenType, exp time.Time) { + if j.ExpiresAt == nil { + j.ExpiresAt = make(map[fosite.TokenType]time.Time) + } + j.ExpiresAt[key] = exp +} + +func (j *JWTSession) GetExpiresAt(key fosite.TokenType) time.Time { + if j.ExpiresAt == nil { + j.ExpiresAt = make(map[fosite.TokenType]time.Time) + } + + if _, ok := j.ExpiresAt[key]; !ok { + return time.Time{} + } + return j.ExpiresAt[key] +} + +func (j *JWTSession) GetUsername() string { + if j == nil { + return "" + } + return j.Username +} + +func (j *JWTSession) SetSubject(subject string) { + j.Subject = subject +} + +func (j *JWTSession) GetSubject() string { + if j == nil { + return "" + } + + return j.Subject +} + +func (j *JWTSession) Clone() fosite.Session { + if j == nil { + return nil + } + + return deepcopy.Copy(j).(fosite.Session) +} + +// GetExtraClaims implements ExtraClaimsSession for JWTSession. +// The returned value is a copy of JWTSession claims. +func (s *JWTSession) GetExtraClaims() map[string]interface{} { + if s == nil { + return nil + } + + // We make a clone so that WithScopeField does not change the original value. + return s.Clone().(*JWTSession).GetJWTClaims().WithScopeField(jwt.JWTScopeFieldString).ToMapClaims() +} diff --git a/fosite/handler/oauth2/strategy_jwt_test.go b/fosite/handler/oauth2/strategy_jwt_test.go new file mode 100644 index 00000000000..ce2deef4397 --- /dev/null +++ b/fosite/handler/oauth2/strategy_jwt_test.go @@ -0,0 +1,237 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2_test + +import ( + "context" + "encoding/base64" + "encoding/json" + "fmt" + "strings" + "testing" + "time" + + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/internal/gen" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +var ( + rsaKey = gen.MustRSAKey() + j = &oauth2.DefaultJWTStrategy{ + Signer: &jwt.DefaultSigner{ + GetPrivateKey: func(_ context.Context) (interface{}, error) { + return rsaKey, nil + }, + }, + Config: &fosite.Config{}, + } +) + +// returns a valid JWT type. The JWTClaims.ExpiresAt time is intentionally +// left empty to ensure it is pulled from the session's ExpiresAt map for +// the given fosite.TokenType. +var jwtValidCase = func(tokenType fosite.TokenType) *fosite.Request { + r := &fosite.Request{ + Client: &fosite.DefaultClient{ + Secret: []byte("foobarfoobarfoobarfoobar"), + }, + Session: &oauth2.JWTSession{ + JWTClaims: &jwt.JWTClaims{ + Issuer: "fosite", + Subject: "peter", + IssuedAt: time.Now().UTC(), + NotBefore: time.Now().UTC(), + Extra: map[string]interface{}{"foo": "bar"}, + }, + JWTHeader: &jwt.Headers{ + Extra: make(map[string]interface{}), + }, + ExpiresAt: map[fosite.TokenType]time.Time{ + tokenType: time.Now().UTC().Add(time.Hour), + }, + }, + } + r.SetRequestedScopes([]string{"email", "offline"}) + r.GrantScope("email") + r.GrantScope("offline") + r.SetRequestedAudience([]string{"group0"}) + r.GrantAudience("group0") + return r +} + +var jwtValidCaseWithZeroRefreshExpiry = func(tokenType fosite.TokenType) *fosite.Request { + r := &fosite.Request{ + Client: &fosite.DefaultClient{ + Secret: []byte("foobarfoobarfoobarfoobar"), + }, + Session: &oauth2.JWTSession{ + JWTClaims: &jwt.JWTClaims{ + Issuer: "fosite", + Subject: "peter", + IssuedAt: time.Now().UTC(), + NotBefore: time.Now().UTC(), + Extra: map[string]interface{}{"foo": "bar"}, + }, + JWTHeader: &jwt.Headers{ + Extra: make(map[string]interface{}), + }, + ExpiresAt: map[fosite.TokenType]time.Time{ + tokenType: time.Now().UTC().Add(time.Hour), + fosite.RefreshToken: {}, + }, + }, + } + r.SetRequestedScopes([]string{"email", "offline"}) + r.GrantScope("email") + r.GrantScope("offline") + r.SetRequestedAudience([]string{"group0"}) + r.GrantAudience("group0") + return r +} + +var jwtValidCaseWithRefreshExpiry = func(tokenType fosite.TokenType) *fosite.Request { + r := &fosite.Request{ + Client: &fosite.DefaultClient{ + Secret: []byte("foobarfoobarfoobarfoobar"), + }, + Session: &oauth2.JWTSession{ + JWTClaims: &jwt.JWTClaims{ + Issuer: "fosite", + Subject: "peter", + IssuedAt: time.Now().UTC(), + NotBefore: time.Now().UTC(), + Extra: map[string]interface{}{"foo": "bar"}, + }, + JWTHeader: &jwt.Headers{ + Extra: make(map[string]interface{}), + }, + ExpiresAt: map[fosite.TokenType]time.Time{ + tokenType: time.Now().UTC().Add(time.Hour), + fosite.RefreshToken: time.Now().UTC().Add(time.Hour * 2).Round(time.Hour), + }, + }, + } + r.SetRequestedScopes([]string{"email", "offline"}) + r.GrantScope("email") + r.GrantScope("offline") + r.SetRequestedAudience([]string{"group0"}) + r.GrantAudience("group0") + return r +} + +// returns an expired JWT type. The JWTClaims.ExpiresAt time is intentionally +// left empty to ensure it is pulled from the session's ExpiresAt map for +// the given fosite.TokenType. +var jwtExpiredCase = func(tokenType fosite.TokenType) *fosite.Request { + r := &fosite.Request{ + Client: &fosite.DefaultClient{ + Secret: []byte("foobarfoobarfoobarfoobar"), + }, + Session: &oauth2.JWTSession{ + JWTClaims: &jwt.JWTClaims{ + Issuer: "fosite", + Subject: "peter", + IssuedAt: time.Now().UTC(), + NotBefore: time.Now().UTC(), + ExpiresAt: time.Now().UTC().Add(-time.Minute), + Extra: map[string]interface{}{"foo": "bar"}, + }, + JWTHeader: &jwt.Headers{ + Extra: make(map[string]interface{}), + }, + ExpiresAt: map[fosite.TokenType]time.Time{ + tokenType: time.Now().UTC().Add(-time.Hour), + }, + }, + } + r.SetRequestedScopes([]string{"email", "offline"}) + r.GrantScope("email") + r.GrantScope("offline") + r.SetRequestedAudience([]string{"group0"}) + r.GrantAudience("group0") + return r +} + +func TestAccessToken(t *testing.T) { + for s, scopeField := range []jwt.JWTScopeFieldEnum{ + jwt.JWTScopeFieldList, + jwt.JWTScopeFieldString, + jwt.JWTScopeFieldBoth, + } { + for k, c := range []struct { + r *fosite.Request + pass bool + }{ + { + r: jwtValidCase(fosite.AccessToken), + pass: true, + }, + { + r: jwtExpiredCase(fosite.AccessToken), + pass: false, + }, + { + r: jwtValidCaseWithZeroRefreshExpiry(fosite.AccessToken), + pass: true, + }, + { + r: jwtValidCaseWithRefreshExpiry(fosite.AccessToken), + pass: true, + }, + } { + t.Run(fmt.Sprintf("case=%d/%d", s, k), func(t *testing.T) { + j.Config = &fosite.Config{ + JWTScopeClaimKey: scopeField, + } + token, signature, err := j.GenerateAccessToken(context.Background(), c.r) + assert.NoError(t, err) + + parts := strings.Split(token, ".") + require.Len(t, parts, 3, "%s - %v", token, parts) + assert.Equal(t, parts[2], signature) + + rawPayload, err := base64.RawURLEncoding.DecodeString(parts[1]) + require.NoError(t, err) + var payload map[string]interface{} + err = json.Unmarshal(rawPayload, &payload) + require.NoError(t, err) + if scopeField == jwt.JWTScopeFieldList || scopeField == jwt.JWTScopeFieldBoth { + scope, ok := payload["scp"] + require.True(t, ok) + assert.Equal(t, []interface{}{"email", "offline"}, scope) + } + if scopeField == jwt.JWTScopeFieldString || scopeField == jwt.JWTScopeFieldBoth { + scope, ok := payload["scope"] + require.True(t, ok) + assert.Equal(t, "email offline", scope) + } + + extraClaimsSession, ok := c.r.GetSession().(fosite.ExtraClaimsSession) + require.True(t, ok) + claims := extraClaimsSession.GetExtraClaims() + assert.Equal(t, "bar", claims["foo"]) + // Returned, but will be ignored by the introspect handler. + assert.Equal(t, "peter", claims["sub"]) + assert.Equal(t, []string{"group0"}, claims["aud"]) + // Scope field is always a string. + assert.Equal(t, "email offline", claims["scope"]) + + validate := oauth2.CallSignature(token) + err = j.ValidateAccessToken(context.Background(), c.r, token) + if c.pass { + assert.NoError(t, err) + assert.Equal(t, signature, validate) + } else { + assert.Error(t, err) + } + }) + } + } +} diff --git a/fosite/handler/openid/errors.go b/fosite/handler/openid/errors.go new file mode 100644 index 00000000000..81436b95cea --- /dev/null +++ b/fosite/handler/openid/errors.go @@ -0,0 +1,10 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid + +import "github.com/pkg/errors" + +var ( + ErrInvalidSession = errors.New("Session type mismatch") +) diff --git a/fosite/handler/openid/export_test.go b/fosite/handler/openid/export_test.go new file mode 100644 index 00000000000..e18ceabf202 --- /dev/null +++ b/fosite/handler/openid/export_test.go @@ -0,0 +1,15 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid + +import ( + "context" + "time" + + "github.com/ory/hydra/v2/fosite" +) + +func CallGenerateIDToken(ctx context.Context, lifespan time.Duration, fosr fosite.Requester, h *IDTokenHandleHelper) (token string, err error) { + return h.generateIDToken(ctx, lifespan, fosr) +} diff --git a/fosite/handler/openid/flow_device_auth.go b/fosite/handler/openid/flow_device_auth.go new file mode 100644 index 00000000000..619e4590933 --- /dev/null +++ b/fosite/handler/openid/flow_device_auth.go @@ -0,0 +1,28 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid + +import ( + "context" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/rfc8628" +) + +// OpenIDConnectDeviceHandler a response handler for the Device Authorization Grant with OpenID Connect identity layer +type OpenIDConnectDeviceHandler struct { + Storage OpenIDConnectRequestStorageProvider + Strategy rfc8628.DeviceCodeStrategyProvider + Config interface { + fosite.IDTokenLifespanProvider + } + *IDTokenHandleHelper +} + +func (c *OpenIDConnectDeviceHandler) HandleDeviceEndpointRequest(ctx context.Context, dar fosite.DeviceRequester, resp fosite.DeviceResponder) error { + // We don't want to create the openid session on this call, because we don't know if the user + // will actually complete the flow and give consent. The implementer MUST call the CreateOpenIDConnectSession + // methods when the user logs in to instantiate the session. + return nil +} diff --git a/fosite/handler/openid/flow_device_auth_test.go b/fosite/handler/openid/flow_device_auth_test.go new file mode 100644 index 00000000000..140cd4eb134 --- /dev/null +++ b/fosite/handler/openid/flow_device_auth_test.go @@ -0,0 +1,131 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid_test + +import ( + "context" + "fmt" + "testing" + "time" + + gomock "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite/internal" + + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/handler/rfc8628" + "github.com/ory/hydra/v2/fosite/token/hmac" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +func TestDeviceAuth_HandleDeviceEndpointRequest(t *testing.T) { + ctrl := gomock.NewController(t) + t.Cleanup(ctrl.Finish) + store := internal.NewMockOpenIDConnectRequestStorageProvider(ctrl) + strategyProvider := internal.NewMockDeviceCodeStrategyProvider(ctrl) + openIDTokenStrategyProvider := internal.NewMockOpenIDConnectTokenStrategyProvider(ctrl) + + config := &fosite.Config{ + MinParameterEntropy: fosite.MinParameterEntropy, + DeviceAndUserCodeLifespan: time.Hour * 24, + } + + strategy := &rfc8628.DefaultDeviceStrategy{ + Enigma: &hmac.HMACStrategy{Config: &fosite.Config{GlobalSecret: []byte("foobar")}}, + Config: config, + } + strategyProvider.EXPECT().DeviceCodeStrategy().Return(strategy).Times(0) + + signer := &jwt.DefaultSigner{ + GetPrivateKey: func(ctx context.Context) (interface{}, error) { + return key, nil + }, + } + + defaultStrategy := &openid.DefaultStrategy{ + Signer: signer, + Config: config, + } + openIDTokenStrategyProvider.EXPECT().OpenIDConnectTokenStrategy().Return(defaultStrategy).Times(0) + + h := openid.OpenIDConnectDeviceHandler{ + Storage: store, + Strategy: strategyProvider, + Config: config, + IDTokenHandleHelper: &openid.IDTokenHandleHelper{ + IDTokenStrategy: openIDTokenStrategyProvider, + }, + } + + session := &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "foo", + }, + Headers: &jwt.Headers{}, + } + + client := &fosite.DefaultClient{ + ID: "foo", + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + } + + testCases := []struct { + description string + authreq *fosite.DeviceRequest + authresp *fosite.DeviceResponse + setup func(authreq *fosite.DeviceRequest) + expectErr error + }{ + { + description: "should ignore because scope openid is not set", + authreq: &fosite.DeviceRequest{ + Request: fosite.Request{ + RequestedScope: fosite.Arguments{"email"}, + }, + }, + }, + { + description: "should ignore because client grant type is invalid", + authreq: &fosite.DeviceRequest{ + Request: fosite.Request{ + RequestedScope: fosite.Arguments{"openid", "email"}, + Client: &fosite.DefaultClient{ + GrantTypes: []string{"authorization_code"}, + }, + }, + }, + }, + { + description: "should pass", + authreq: &fosite.DeviceRequest{ + Request: fosite.Request{ + RequestedScope: fosite.Arguments{"openid", "email"}, + Client: client, + Session: session, + }, + }, + authresp: &fosite.DeviceResponse{ + DeviceCode: "device_code", + }, + }, + } + + for i, testCase := range testCases { + t.Run(fmt.Sprintf("case=%d/description=%s", i, testCase.description), func(t *testing.T) { + if testCase.setup != nil { + testCase.setup(testCase.authreq) + } + + err := h.HandleDeviceEndpointRequest(context.Background(), testCase.authreq, testCase.authresp) + if testCase.expectErr != nil { + require.EqualError(t, err, testCase.expectErr.Error(), "%+v", err) + } else { + require.NoError(t, err, "%+v", err) + } + }) + } +} diff --git a/fosite/handler/openid/flow_device_token.go b/fosite/handler/openid/flow_device_token.go new file mode 100644 index 00000000000..6c85360577c --- /dev/null +++ b/fosite/handler/openid/flow_device_token.go @@ -0,0 +1,69 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid + +import ( + "context" + + "github.com/pkg/errors" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/x/errorsx" +) + +func (c *OpenIDConnectDeviceHandler) HandleTokenEndpointRequest(ctx context.Context, requester fosite.AccessRequester) error { + return errorsx.WithStack(fosite.ErrUnknownRequest) +} + +func (c *OpenIDConnectDeviceHandler) PopulateTokenEndpointResponse(ctx context.Context, requester fosite.AccessRequester, responder fosite.AccessResponder) error { + if !c.CanHandleTokenEndpointRequest(ctx, requester) { + return errorsx.WithStack(fosite.ErrUnknownRequest) + } + + if !requester.GetClient().GetGrantTypes().Has(string(fosite.GrantTypeDeviceCode)) { + return errorsx.WithStack(fosite.ErrUnauthorizedClient.WithHint("The OAuth 2.0 Client is not allowed to use the authorization grant \"urn:ietf:params:oauth:grant-type:device_code\".")) + } + + deviceCode := requester.GetRequestForm().Get("device_code") + signature, _ := c.Strategy.DeviceCodeStrategy().DeviceCodeSignature(ctx, deviceCode) + ar, err := c.Storage.OpenIDConnectRequestStorage().GetOpenIDConnectSession(ctx, signature, requester) + if errors.Is(err, ErrNoSessionFound) { + return errorsx.WithStack(fosite.ErrUnknownRequest.WithWrap(err).WithDebug(err.Error())) + } + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + if !ar.GetGrantedScopes().Has("openid") { + return errorsx.WithStack(fosite.ErrMisconfiguration.WithDebug("An OpenID Connect session was found but the openid scope is missing, probably due to a broken code configuration.")) + } + + session, ok := ar.GetSession().(Session) + if !ok { + return errorsx.WithStack(fosite.ErrServerError.WithDebug("Failed to generate id token because session must be of type fosite/handler/openid.Session.")) + } + + claims := session.IDTokenClaims() + if claims.Subject == "" { + return errorsx.WithStack(fosite.ErrServerError.WithDebug("Failed to generate id token because subject is an empty string.")) + } + + err = c.Storage.OpenIDConnectRequestStorage().DeleteOpenIDConnectSession(ctx, deviceCode) + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + claims.AccessTokenHash = c.GetAccessTokenHash(ctx, requester, responder) + + idTokenLifespan := fosite.GetEffectiveLifespan(requester.GetClient(), fosite.GrantTypeDeviceCode, fosite.IDToken, c.Config.GetIDTokenLifespan(ctx)) + return c.IssueExplicitIDToken(ctx, idTokenLifespan, ar, responder) +} + +func (c *OpenIDConnectDeviceHandler) CanSkipClientAuth(ctx context.Context, requester fosite.AccessRequester) bool { + return false +} + +func (c *OpenIDConnectDeviceHandler) CanHandleTokenEndpointRequest(ctx context.Context, requester fosite.AccessRequester) bool { + return requester.GetGrantTypes().ExactOne(string(fosite.GrantTypeDeviceCode)) +} diff --git a/fosite/handler/openid/flow_device_token_test.go b/fosite/handler/openid/flow_device_token_test.go new file mode 100644 index 00000000000..c63256d2a65 --- /dev/null +++ b/fosite/handler/openid/flow_device_token_test.go @@ -0,0 +1,276 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid_test + +import ( + "context" + "fmt" + "net/url" + "testing" + "time" + + "github.com/pkg/errors" + "github.com/stretchr/testify/require" + + gomock "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/handler/rfc8628" + "github.com/ory/hydra/v2/fosite/internal" + "github.com/ory/hydra/v2/fosite/token/hmac" + "github.com/ory/hydra/v2/fosite/token/jwt" + + "github.com/stretchr/testify/assert" + + "github.com/ory/hydra/v2/fosite" +) + +func TestDeviceToken_HandleTokenEndpointRequest(t *testing.T) { + h := openid.OpenIDConnectDeviceHandler{ + Config: &fosite.Config{}, + } + areq := fosite.NewAccessRequest(nil) + areq.Client = &fosite.DefaultClient{ + ResponseTypes: fosite.Arguments{"code"}, + } + + err := h.HandleTokenEndpointRequest(context.Background(), areq) + assert.EqualError(t, err, fosite.ErrUnknownRequest.Error()) +} + +func TestDeviceToken_PopulateTokenEndpointResponse(t *testing.T) { + ctrl := gomock.NewController(t) + t.Cleanup(ctrl.Finish) + + store := internal.NewMockOpenIDConnectRequestStorage(ctrl) + provider := internal.NewMockOpenIDConnectRequestStorageProvider(ctrl) + strategyProvider := internal.NewMockDeviceCodeStrategyProvider(ctrl) + openIDTokenStrategyProvider := internal.NewMockOpenIDConnectTokenStrategyProvider(ctrl) + + config := &fosite.Config{ + MinParameterEntropy: fosite.MinParameterEntropy, + DeviceAndUserCodeLifespan: time.Hour * 24, + IDTokenLifespan: time.Hour * 24, + } + strategy := &rfc8628.DefaultDeviceStrategy{ + Enigma: &hmac.HMACStrategy{Config: &fosite.Config{GlobalSecret: []byte("foobar")}}, + Config: config, + } + strategyProvider.EXPECT().DeviceCodeStrategy().Return(strategy).AnyTimes() + + signer := &jwt.DefaultSigner{ + GetPrivateKey: func(ctx context.Context) (interface{}, error) { + return key, nil + }, + } + + defaultStrategy := &openid.DefaultStrategy{ + Signer: signer, + Config: config, + } + openIDTokenStrategyProvider.EXPECT().OpenIDConnectTokenStrategy().Return(defaultStrategy).AnyTimes() + + h := openid.OpenIDConnectDeviceHandler{ + Storage: provider, + Strategy: strategyProvider, + Config: config, + IDTokenHandleHelper: &openid.IDTokenHandleHelper{ + IDTokenStrategy: openIDTokenStrategyProvider, + }, + } + + session := &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "foo", + }, + Headers: &jwt.Headers{}, + } + + client := &fosite.DefaultClient{ + ID: "foo", + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + } + + testCases := []struct { + description string + areq *fosite.AccessRequest + aresp *fosite.AccessResponse + setup func(areq *fosite.AccessRequest) + check func(t *testing.T, aresp *fosite.AccessResponse) + expectErr error + }{ + { + description: "should fail because the grant type is invalid", + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"authorization_code"}, + Request: fosite.Request{ + Client: client, + Form: url.Values{"device_code": []string{"device_code"}}, + Session: session, + }, + }, + aresp: fosite.NewAccessResponse(), + expectErr: fosite.ErrUnknownRequest, + }, + { + description: "should fail because session not found", + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + Request: fosite.Request{ + Client: client, + Form: url.Values{"device_code": []string{"device_code"}}, + Session: session, + }, + }, + aresp: fosite.NewAccessResponse(), + setup: func(areq *fosite.AccessRequest) { + provider.EXPECT().OpenIDConnectRequestStorage().Return(store).Times(1) + store.EXPECT().GetOpenIDConnectSession(gomock.Any(), gomock.Any(), areq).Return(nil, openid.ErrNoSessionFound) + }, + expectErr: fosite.ErrUnknownRequest, + }, + { + description: "should fail because session lookup fails", + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + Request: fosite.Request{ + Client: client, + Form: url.Values{"device_code": []string{"device_code"}}, + Session: session, + }, + }, + setup: func(areq *fosite.AccessRequest) { + provider.EXPECT().OpenIDConnectRequestStorage().Return(store).Times(1) + store.EXPECT().GetOpenIDConnectSession(gomock.Any(), gomock.Any(), areq).Return(nil, errors.New("")) + }, + expectErr: fosite.ErrServerError, + }, + { + description: "should fail because auth request grant scope is invalid", + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + Request: fosite.Request{ + Client: client, + Form: url.Values{"device_code": []string{"device_code"}}, + Session: session, + }, + }, + setup: func(areq *fosite.AccessRequest) { + authreq := &fosite.DeviceRequest{ + Request: fosite.Request{ + Client: client, + GrantedScope: fosite.Arguments{"email"}, + Session: session, + }, + } + provider.EXPECT().OpenIDConnectRequestStorage().Return(store).Times(1) + store.EXPECT().GetOpenIDConnectSession(gomock.Any(), gomock.Any(), areq).Return(authreq, nil) + }, + expectErr: fosite.ErrMisconfiguration, + }, + { + description: "should fail because auth request is missing session", + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + Request: fosite.Request{ + Client: client, + Form: url.Values{"device_code": []string{"device_code"}}, + Session: session, + }, + }, + setup: func(areq *fosite.AccessRequest) { + authreq := &fosite.DeviceRequest{ + Request: fosite.Request{ + Client: client, + GrantedScope: fosite.Arguments{"openid", "email"}, + }, + } + provider.EXPECT().OpenIDConnectRequestStorage().Return(store).Times(1) + store.EXPECT().GetOpenIDConnectSession(gomock.Any(), gomock.Any(), areq).Return(authreq, nil) + }, + expectErr: fosite.ErrServerError, + }, + { + description: "should fail because auth request session is missing subject claims", + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + Request: fosite.Request{ + Client: client, + Form: url.Values{"device_code": []string{"device_code"}}, + Session: session, + }, + }, + setup: func(areq *fosite.AccessRequest) { + authreq := &fosite.DeviceRequest{ + Request: fosite.Request{ + Client: client, + GrantedScope: fosite.Arguments{"openid", "email"}, + Session: openid.NewDefaultSession(), + }, + } + provider.EXPECT().OpenIDConnectRequestStorage().Return(store).Times(1) + store.EXPECT().GetOpenIDConnectSession(gomock.Any(), gomock.Any(), areq).Return(authreq, nil) + }, + expectErr: fosite.ErrServerError, + }, + { + description: "should pass", + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + Request: fosite.Request{ + Client: client, + Form: url.Values{"device_code": []string{"device_code"}}, + Session: session, + }, + }, + setup: func(areq *fosite.AccessRequest) { + authreq := &fosite.DeviceRequest{ + Request: fosite.Request{ + Client: client, + GrantedScope: fosite.Arguments{"openid", "email"}, + Session: session, + }, + } + provider.EXPECT().OpenIDConnectRequestStorage().Return(store).Times(2) + store.EXPECT().GetOpenIDConnectSession(gomock.Any(), gomock.Any(), areq).Return(authreq, nil) + store.EXPECT().DeleteOpenIDConnectSession(gomock.Any(), gomock.Any()).Return(nil) + }, + check: func(t *testing.T, aresp *fosite.AccessResponse) { + assert.NotEmpty(t, aresp.GetExtra("id_token")) + + idToken, _ := aresp.GetExtra("id_token").(string) + decodedIdToken, err := jwt.Parse(idToken, func(token *jwt.Token) (interface{}, error) { + return key.PublicKey, nil + }) + require.NoError(t, err) + + claims := decodedIdToken.Claims + assert.NotEmpty(t, claims["at_hash"]) + + idTokenExp := internal.ExtractJwtExpClaim(t, idToken) + internal.RequireEqualTime(t, time.Now().Add(time.Hour*24), *idTokenExp, time.Minute) + }, + }, + } + + for i, testCase := range testCases { + t.Run(fmt.Sprintf("case=%d/description=%s", i, testCase.description), func(t *testing.T) { + if testCase.setup != nil { + testCase.setup(testCase.areq) + } + + aresp := fosite.NewAccessResponse() + err := h.PopulateTokenEndpointResponse(context.Background(), testCase.areq, aresp) + if testCase.expectErr != nil { + require.EqualError(t, err, testCase.expectErr.Error(), "%+v", err) + } else { + require.NoError(t, err, "%+v", err) + } + + if testCase.check != nil { + testCase.check(t, aresp) + } + }) + } +} diff --git a/fosite/handler/openid/flow_explicit_auth.go b/fosite/handler/openid/flow_explicit_auth.go new file mode 100644 index 00000000000..79b9260129a --- /dev/null +++ b/fosite/handler/openid/flow_explicit_auth.go @@ -0,0 +1,75 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid + +import ( + "context" + + "github.com/ory/x/errorsx" + + "github.com/ory/hydra/v2/fosite" +) + +var ( + _ fosite.AuthorizeEndpointHandler = (*ExplicitHandler)(nil) + _ fosite.TokenEndpointHandler = (*ExplicitHandler)(nil) +) + +type ExplicitHandler struct { + Storage OpenIDConnectRequestStorageProvider + OpenIDConnectRequestValidator *OpenIDConnectRequestValidator + + Config interface { + fosite.IDTokenLifespanProvider + } + + *IDTokenHandleHelper +} + +var oidcParameters = []string{ + "grant_type", + "max_age", + "prompt", + "acr_values", + "id_token_hint", + "nonce", +} + +func (c *ExplicitHandler) HandleAuthorizeEndpointRequest(ctx context.Context, ar fosite.AuthorizeRequester, resp fosite.AuthorizeResponder) error { + if !(ar.GetGrantedScopes().Has("openid") && ar.GetResponseTypes().ExactOne("code")) { + return nil + } + + //if !ar.GetClient().GetResponseTypes().Has("id_token", "code") { + // return errorsx.WithStack(fosite.ErrInvalidRequest.WithDebug("The client is not allowed to use response type id_token and code")) + //} + + if len(resp.GetCode()) == 0 { + return errorsx.WithStack(fosite.ErrMisconfiguration.WithDebug("The authorization code has not been issued yet, indicating a broken code configuration.")) + } + + // This ensures that the 'redirect_uri' parameter is present for OpenID Connect 1.0 authorization requests as per: + // + // Authorization Code Flow - https://openid.net/specs/openid-connect-core-1_0.html#AuthRequest + // Implicit Flow - https://openid.net/specs/openid-connect-core-1_0.html#ImplicitAuthRequest + // Hybrid Flow - https://openid.net/specs/openid-connect-core-1_0.html#HybridAuthRequest + // + // Note: as per the Hybrid Flow documentation the Hybrid Flow has the same requirements as the Authorization Code Flow. + rawRedirectURI := ar.GetRequestForm().Get("redirect_uri") + if len(rawRedirectURI) == 0 { + return errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("The 'redirect_uri' parameter is required when using OpenID Connect 1.0.")) + } + + if err := c.OpenIDConnectRequestValidator.ValidatePrompt(ctx, ar); err != nil { + return err + } + + if err := c.Storage.OpenIDConnectRequestStorage().CreateOpenIDConnectSession(ctx, resp.GetCode(), ar.Sanitize(oidcParameters)); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + // there is no need to check for https, because it has already been checked by core.explicit + + return nil +} diff --git a/fosite/handler/openid/flow_explicit_auth_test.go b/fosite/handler/openid/flow_explicit_auth_test.go new file mode 100644 index 00000000000..aa77cf97949 --- /dev/null +++ b/fosite/handler/openid/flow_explicit_auth_test.go @@ -0,0 +1,154 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid_test + +import ( + "context" + "fmt" + "net/url" + "testing" + + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/internal/gen" + + "github.com/pkg/errors" + "github.com/stretchr/testify/require" + gomock "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/internal" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +// expose key to verify id_token +var key = gen.MustRSAKey() + +var oidcParameters = []string{ + "grant_type", + "max_age", + "prompt", + "acr_values", + "id_token_hint", + "nonce", +} + +func makeOpenIDConnectExplicitHandler(ctrl *gomock.Controller, minParameterEntropy int) (openid.ExplicitHandler, *internal.MockOpenIDConnectRequestStorage, *internal.MockOpenIDConnectRequestStorageProvider) { + store := internal.NewMockOpenIDConnectRequestStorage(ctrl) + provider := internal.NewMockOpenIDConnectRequestStorageProvider(ctrl) + openIDTokenStrategyProvider := internal.NewMockOpenIDConnectTokenStrategyProvider(ctrl) + config := &fosite.Config{MinParameterEntropy: minParameterEntropy} + + defaultStrategy := &openid.DefaultStrategy{ + Signer: &jwt.DefaultSigner{ + GetPrivateKey: func(ctx context.Context) (interface{}, error) { + return key, nil + }, + }, + Config: config, + } + openIDTokenStrategyProvider.EXPECT().OpenIDConnectTokenStrategy().Return(defaultStrategy).AnyTimes() + + return openid.ExplicitHandler{ + Storage: provider, + IDTokenHandleHelper: &openid.IDTokenHandleHelper{ + IDTokenStrategy: openIDTokenStrategyProvider, + }, + OpenIDConnectRequestValidator: openid.NewOpenIDConnectRequestValidator(defaultStrategy.Signer, config), + Config: config, + }, store, provider +} + +func TestExplicit_HandleAuthorizeEndpointRequest(t *testing.T) { + ctrl := gomock.NewController(t) + aresp := internal.NewMockAuthorizeResponder(ctrl) + t.Cleanup(ctrl.Finish) + + areq := fosite.NewAuthorizeRequest() + + session := openid.NewDefaultSession() + session.Claims.Subject = "foo" + areq.Session = session + areq.Form = url.Values{ + "redirect_uri": {"https://foobar.com"}, + } + + for k, c := range []struct { + description string + setup func() openid.ExplicitHandler + expectErr error + }{ + { + description: "should pass because not responsible for handling an empty response type", + setup: func() openid.ExplicitHandler { + h, _, _ := makeOpenIDConnectExplicitHandler(ctrl, fosite.MinParameterEntropy) + areq.ResponseTypes = fosite.Arguments{""} + return h + }, + }, + { + description: "should pass because scope openid is not set", + setup: func() openid.ExplicitHandler { + h, _, _ := makeOpenIDConnectExplicitHandler(ctrl, fosite.MinParameterEntropy) + areq.ResponseTypes = fosite.Arguments{"code"} + areq.Client = &fosite.DefaultClient{ + ResponseTypes: fosite.Arguments{"code"}, + } + areq.RequestedScope = fosite.Arguments{""} + return h + }, + }, + { + description: "should fail because no code set", + setup: func() openid.ExplicitHandler { + h, _, _ := makeOpenIDConnectExplicitHandler(ctrl, fosite.MinParameterEntropy) + areq.GrantedScope = fosite.Arguments{"openid"} + areq.Form.Set("nonce", "11111111111111111111111111111") + aresp.EXPECT().GetCode().Return("") + return h + }, + expectErr: fosite.ErrMisconfiguration, + }, + { + description: "should fail because lookup fails", + setup: func() openid.ExplicitHandler { + h, store, provider := makeOpenIDConnectExplicitHandler(ctrl, fosite.MinParameterEntropy) + aresp.EXPECT().GetCode().AnyTimes().Return("codeexample") + provider.EXPECT().OpenIDConnectRequestStorage().Return(store).Times(1) + store.EXPECT().CreateOpenIDConnectSession(gomock.Any(), "codeexample", gomock.Eq(areq.Sanitize(oidcParameters))).Return(errors.New("")) + return h + }, + expectErr: fosite.ErrServerError, + }, + { + description: "should pass", + setup: func() openid.ExplicitHandler { + h, store, provider := makeOpenIDConnectExplicitHandler(ctrl, fosite.MinParameterEntropy) + provider.EXPECT().OpenIDConnectRequestStorage().Return(store).Times(1) + store.EXPECT().CreateOpenIDConnectSession(gomock.Any(), "codeexample", gomock.Eq(areq.Sanitize(oidcParameters))).AnyTimes().Return(nil) + return h + }, + }, + { + description: "should fail because redirect url is missing", + setup: func() openid.ExplicitHandler { + areq.Form.Del("redirect_uri") + h, store, _ := makeOpenIDConnectExplicitHandler(ctrl, fosite.MinParameterEntropy) + store.EXPECT().CreateOpenIDConnectSession(gomock.Any(), "codeexample", gomock.Eq(areq.Sanitize(oidcParameters))).AnyTimes().Return(nil) + return h + }, + expectErr: fosite.ErrInvalidRequest, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + h := c.setup() + err := h.HandleAuthorizeEndpointRequest(context.Background(), areq, aresp) + + if c.expectErr != nil { + require.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + } + }) + } +} diff --git a/fosite/handler/openid/flow_explicit_token.go b/fosite/handler/openid/flow_explicit_token.go new file mode 100644 index 00000000000..8685364139c --- /dev/null +++ b/fosite/handler/openid/flow_explicit_token.go @@ -0,0 +1,76 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid + +import ( + "context" + + "github.com/ory/x/errorsx" + + "github.com/pkg/errors" + + "github.com/ory/hydra/v2/fosite" +) + +func (c *ExplicitHandler) HandleTokenEndpointRequest(ctx context.Context, request fosite.AccessRequester) error { + return errorsx.WithStack(fosite.ErrUnknownRequest) +} + +func (c *ExplicitHandler) PopulateTokenEndpointResponse(ctx context.Context, requester fosite.AccessRequester, responder fosite.AccessResponder) error { + if !c.CanHandleTokenEndpointRequest(ctx, requester) { + return errorsx.WithStack(fosite.ErrUnknownRequest) + } + + authorizeCode := requester.GetRequestForm().Get("code") + + authorize, err := c.Storage.OpenIDConnectRequestStorage().GetOpenIDConnectSession(ctx, authorizeCode, requester) + if errors.Is(err, ErrNoSessionFound) { + return errorsx.WithStack(fosite.ErrUnknownRequest.WithWrap(err).WithDebug(err.Error())) + } else if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + if !authorize.GetGrantedScopes().Has("openid") { + return errorsx.WithStack(fosite.ErrMisconfiguration.WithDebug("An OpenID Connect session was found but the openid scope is missing, probably due to a broken code configuration.")) + } + + if !requester.GetClient().GetGrantTypes().Has("authorization_code") { + return errorsx.WithStack(fosite.ErrUnauthorizedClient.WithHint("The OAuth 2.0 Client is not allowed to use the authorization grant \"authorization_code\".")) + } + + sess, ok := authorize.GetSession().(Session) + if !ok { + return errorsx.WithStack(fosite.ErrServerError.WithDebug("Failed to generate id token because session must be of type fosite/handler/openid.Session.")) + } + + claims := sess.IDTokenClaims() + if claims.Subject == "" { + return errorsx.WithStack(fosite.ErrServerError.WithDebug("Failed to generate id token because subject is an empty string.")) + } + + err = c.Storage.OpenIDConnectRequestStorage().DeleteOpenIDConnectSession(ctx, authorizeCode) + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + claims.AccessTokenHash = c.GetAccessTokenHash(ctx, requester, responder) + + // The response type `id_token` is only required when performing the implicit or hybrid flow, see: + // https://openid.net/specs/openid-connect-registration-1_0.html + // + // if !requester.GetClient().GetResponseTypes().Has("id_token") { + // return errorsx.WithStack(fosite.ErrInvalidGrant.WithDebug("The client is not allowed to use response type id_token")) + // } + + idTokenLifespan := fosite.GetEffectiveLifespan(requester.GetClient(), fosite.GrantTypeAuthorizationCode, fosite.IDToken, c.Config.GetIDTokenLifespan(ctx)) + return c.IssueExplicitIDToken(ctx, idTokenLifespan, authorize, responder) +} + +func (c *ExplicitHandler) CanSkipClientAuth(ctx context.Context, requester fosite.AccessRequester) bool { + return false +} + +func (c *ExplicitHandler) CanHandleTokenEndpointRequest(ctx context.Context, requester fosite.AccessRequester) bool { + return requester.GetGrantTypes().ExactOne("authorization_code") +} diff --git a/fosite/handler/openid/flow_explicit_token_test.go b/fosite/handler/openid/flow_explicit_token_test.go new file mode 100644 index 00000000000..3cf22492a61 --- /dev/null +++ b/fosite/handler/openid/flow_explicit_token_test.go @@ -0,0 +1,257 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid_test + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + gomock "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/internal" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +func TestHandleTokenEndpointRequest(t *testing.T) { + h := &openid.ExplicitHandler{Config: &fosite.Config{}} + areq := fosite.NewAccessRequest(nil) + areq.Client = &fosite.DefaultClient{ + // ResponseTypes: fosite.Arguments{"id_token"}, + } + assert.EqualError(t, h.HandleTokenEndpointRequest(context.Background(), areq), fosite.ErrUnknownRequest.Error()) +} + +func TestExplicit_PopulateTokenEndpointResponse(t *testing.T) { + for k, c := range []struct { + description string + setup func(provider *internal.MockOpenIDConnectRequestStorageProvider, store *internal.MockOpenIDConnectRequestStorage, req *fosite.AccessRequest) + expectErr error + check func(t *testing.T, aresp *fosite.AccessResponse) + }{ + { + description: "should fail because current request has invalid grant type", + setup: func(provider *internal.MockOpenIDConnectRequestStorageProvider, store *internal.MockOpenIDConnectRequestStorage, req *fosite.AccessRequest) { + req.GrantTypes = fosite.Arguments{"some_other_grant_type"} + }, + expectErr: fosite.ErrUnknownRequest, + }, + { + description: "should fail because storage lookup returns not found", + setup: func(provider *internal.MockOpenIDConnectRequestStorageProvider, store *internal.MockOpenIDConnectRequestStorage, req *fosite.AccessRequest) { + req.GrantTypes = fosite.Arguments{"authorization_code"} + req.Form.Set("code", "foobar") + provider.EXPECT().OpenIDConnectRequestStorage().Return(store).Times(1) + store.EXPECT().GetOpenIDConnectSession(gomock.Any(), "foobar", req).Return(nil, openid.ErrNoSessionFound) + }, + expectErr: fosite.ErrUnknownRequest, + }, + { + description: "should fail because storage lookup fails", + setup: func(provider *internal.MockOpenIDConnectRequestStorageProvider, store *internal.MockOpenIDConnectRequestStorage, req *fosite.AccessRequest) { + req.GrantTypes = fosite.Arguments{"authorization_code"} + req.Form.Set("code", "foobar") + provider.EXPECT().OpenIDConnectRequestStorage().Return(store).Times(1) + store.EXPECT().GetOpenIDConnectSession(gomock.Any(), "foobar", req).Return(nil, errors.New("")) + }, + expectErr: fosite.ErrServerError, + }, + { + description: "should fail because stored request is missing openid scope", + setup: func(provider *internal.MockOpenIDConnectRequestStorageProvider, store *internal.MockOpenIDConnectRequestStorage, req *fosite.AccessRequest) { + req.GrantTypes = fosite.Arguments{"authorization_code"} + req.Form.Set("code", "foobar") + provider.EXPECT().OpenIDConnectRequestStorage().Return(store).Times(1) + store.EXPECT().GetOpenIDConnectSession(gomock.Any(), "foobar", req).Return(fosite.NewAuthorizeRequest(), nil) + }, + expectErr: fosite.ErrMisconfiguration, + }, + { + description: "should fail because current request's client does not have authorization_code grant type", + setup: func(provider *internal.MockOpenIDConnectRequestStorageProvider, store *internal.MockOpenIDConnectRequestStorage, req *fosite.AccessRequest) { + req.Client = &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"some_other_grant_type"}, + } + req.GrantTypes = fosite.Arguments{"authorization_code"} + req.Form.Set("code", "foobar") + storedReq := fosite.NewAuthorizeRequest() + storedReq.GrantedScope = fosite.Arguments{"openid"} + provider.EXPECT().OpenIDConnectRequestStorage().Return(store).Times(1) + store.EXPECT().GetOpenIDConnectSession(gomock.Any(), "foobar", req).Return(storedReq, nil) + }, + expectErr: fosite.ErrUnauthorizedClient, + }, + { + description: "should pass with custom client lifespans", + setup: func(provider *internal.MockOpenIDConnectRequestStorageProvider, store *internal.MockOpenIDConnectRequestStorage, req *fosite.AccessRequest) { + req.Client = &fosite.DefaultClientWithCustomTokenLifespans{ + DefaultClient: &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"authorization_code"}, + }, + TokenLifespans: &internal.TestLifespans, + } + req.GrantTypes = fosite.Arguments{"authorization_code"} + req.Form.Set("code", "foobar") + storedSession := &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{Subject: "peter"}, + } + storedReq := fosite.NewAuthorizeRequest() + storedReq.Session = storedSession + storedReq.GrantedScope = fosite.Arguments{"openid"} + storedReq.Form.Set("nonce", "1111111111111111") + provider.EXPECT().OpenIDConnectRequestStorage().Return(store).Times(2) + store.EXPECT().GetOpenIDConnectSession(gomock.Any(), "foobar", req).Return(storedReq, nil) + store.EXPECT().DeleteOpenIDConnectSession(gomock.Any(), "foobar").Return(nil) + }, + check: func(t *testing.T, aresp *fosite.AccessResponse) { + assert.NotEmpty(t, aresp.GetExtra("id_token")) + idToken, _ := aresp.GetExtra("id_token").(string) + decodedIdToken, err := jwt.Parse(idToken, func(token *jwt.Token) (interface{}, error) { + return key.PublicKey, nil + }) + require.NoError(t, err) + claims := decodedIdToken.Claims + assert.NotEmpty(t, claims["at_hash"]) + idTokenExp := internal.ExtractJwtExpClaim(t, idToken) + internal.RequireEqualTime(t, time.Now().Add(*internal.TestLifespans.AuthorizationCodeGrantIDTokenLifespan).UTC(), *idTokenExp, time.Minute) + }, + }, + { + description: "should pass", + setup: func(provider *internal.MockOpenIDConnectRequestStorageProvider, store *internal.MockOpenIDConnectRequestStorage, req *fosite.AccessRequest) { + req.Client = &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"authorization_code"}, + } + req.GrantTypes = fosite.Arguments{"authorization_code"} + req.Form.Set("code", "foobar") + storedSession := &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{Subject: "peter"}, + } + storedReq := fosite.NewAuthorizeRequest() + storedReq.Session = storedSession + storedReq.GrantedScope = fosite.Arguments{"openid"} + storedReq.Form.Set("nonce", "1111111111111111") + provider.EXPECT().OpenIDConnectRequestStorage().Return(store).Times(2) + store.EXPECT().GetOpenIDConnectSession(gomock.Any(), "foobar", req).Return(storedReq, nil) + store.EXPECT().DeleteOpenIDConnectSession(gomock.Any(), "foobar").Return(nil) + }, + check: func(t *testing.T, aresp *fosite.AccessResponse) { + assert.NotEmpty(t, aresp.GetExtra("id_token")) + idToken, _ := aresp.GetExtra("id_token").(string) + decodedIdToken, err := jwt.Parse(idToken, func(token *jwt.Token) (interface{}, error) { + return key.PublicKey, nil + }) + require.NoError(t, err) + claims := decodedIdToken.Claims + assert.NotEmpty(t, claims["at_hash"]) + idTokenExp := internal.ExtractJwtExpClaim(t, idToken) + internal.RequireEqualTime(t, time.Now().Add(time.Hour), *idTokenExp, time.Minute) + }, + }, + { + description: "should fail because stored request's session is missing subject claim", + setup: func(provider *internal.MockOpenIDConnectRequestStorageProvider, store *internal.MockOpenIDConnectRequestStorage, req *fosite.AccessRequest) { + req.GrantTypes = fosite.Arguments{"authorization_code"} + req.Form.Set("code", "foobar") + storedSession := &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{Subject: ""}, + } + storedReq := fosite.NewAuthorizeRequest() + storedReq.Session = storedSession + storedReq.GrantedScope = fosite.Arguments{"openid"} + provider.EXPECT().OpenIDConnectRequestStorage().Return(store).Times(1) + store.EXPECT().GetOpenIDConnectSession(gomock.Any(), "foobar", req).Return(storedReq, nil) + }, + expectErr: fosite.ErrServerError, + }, + { + description: "should fail because stored request is missing session", + setup: func(provider *internal.MockOpenIDConnectRequestStorageProvider, store *internal.MockOpenIDConnectRequestStorage, req *fosite.AccessRequest) { + req.GrantTypes = fosite.Arguments{"authorization_code"} + req.Form.Set("code", "foobar") + storedReq := fosite.NewAuthorizeRequest() + storedReq.Session = nil + storedReq.GrantScope("openid") + provider.EXPECT().OpenIDConnectRequestStorage().Return(store).Times(1) + store.EXPECT().GetOpenIDConnectSession(gomock.Any(), "foobar", req).Return(storedReq, nil) + }, + expectErr: fosite.ErrServerError, + }, + { + description: "should fail because storage returns error when deleting openid session", + setup: func(provider *internal.MockOpenIDConnectRequestStorageProvider, store *internal.MockOpenIDConnectRequestStorage, req *fosite.AccessRequest) { + req.Client = &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"authorization_code"}, + } + req.GrantTypes = fosite.Arguments{"authorization_code"} + req.Form.Set("code", "foobar") + storedSession := &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{Subject: "peter"}, + } + storedReq := fosite.NewAuthorizeRequest() + storedReq.Session = storedSession + storedReq.GrantedScope = fosite.Arguments{"openid"} + provider.EXPECT().OpenIDConnectRequestStorage().Return(store).Times(2) + store.EXPECT().GetOpenIDConnectSession(gomock.Any(), "foobar", req).Return(storedReq, nil) + store.EXPECT().DeleteOpenIDConnectSession(gomock.Any(), "foobar").Return(errors.New("delete openid session err")) + }, + expectErr: fosite.ErrServerError, + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", k, c.description), func(t *testing.T) { + ctrl := gomock.NewController(t) + store := internal.NewMockOpenIDConnectRequestStorage(ctrl) + provider := internal.NewMockOpenIDConnectRequestStorageProvider(ctrl) + openIDTokenStrategyProvider := internal.NewMockOpenIDConnectTokenStrategyProvider(ctrl) + t.Cleanup(ctrl.Finish) + + session := &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + }, + Headers: &jwt.Headers{}, + } + aresp := fosite.NewAccessResponse() + areq := fosite.NewAccessRequest(session) + + j := &openid.DefaultStrategy{ + Signer: &jwt.DefaultSigner{ + GetPrivateKey: func(ctx context.Context) (interface{}, error) { + return key, nil + }, + }, + Config: &fosite.Config{ + MinParameterEntropy: fosite.MinParameterEntropy, + }, + } + openIDTokenStrategyProvider.EXPECT().OpenIDConnectTokenStrategy().Return(j).AnyTimes() + + h := &openid.ExplicitHandler{ + Storage: provider, + IDTokenHandleHelper: &openid.IDTokenHandleHelper{ + IDTokenStrategy: openIDTokenStrategyProvider, + }, + Config: &fosite.Config{}, + } + + c.setup(provider, store, areq) + err := h.PopulateTokenEndpointResponse(context.Background(), areq, aresp) + + if c.expectErr != nil { + require.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + } + if c.check != nil { + c.check(t, aresp) + } + }) + } +} diff --git a/fosite/handler/openid/flow_hybrid.go b/fosite/handler/openid/flow_hybrid.go new file mode 100644 index 00000000000..778eae8a5bf --- /dev/null +++ b/fosite/handler/openid/flow_hybrid.go @@ -0,0 +1,168 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid + +import ( + "context" + "time" + + "github.com/ory/x/errorsx" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +type OpenIDConnectHybridHandler struct { + AuthorizeImplicitGrantHandler *oauth2.AuthorizeImplicitGrantHandler + AuthorizeExplicitGrantHandler *oauth2.AuthorizeExplicitGrantHandler + IDTokenHandleHelper *IDTokenHandleHelper + OpenIDConnectRequestValidator *OpenIDConnectRequestValidator + OpenIDConnectRequestStorage OpenIDConnectRequestStorageProvider + + Enigma *jwt.DefaultSigner + + Config interface { + fosite.IDTokenLifespanProvider + fosite.MinParameterEntropyProvider + fosite.ScopeStrategyProvider + } +} + +func (c *OpenIDConnectHybridHandler) HandleAuthorizeEndpointRequest(ctx context.Context, ar fosite.AuthorizeRequester, resp fosite.AuthorizeResponder) error { + if len(ar.GetResponseTypes()) < 2 { + return nil + } + + if !(ar.GetResponseTypes().Matches("token", "id_token", "code") || ar.GetResponseTypes().Matches("token", "code") || ar.GetResponseTypes().Matches("id_token", "code")) { + return nil + } + + ar.SetDefaultResponseMode(fosite.ResponseModeFragment) + + // Disabled because this is already handled at the authorize_request_handler + //if ar.GetResponseTypes().Matches("token") && !ar.GetClient().GetResponseTypes().Has("token") { + // return errorsx.WithStack(fosite.ErrInvalidGrant.WithDebug("The client is not allowed to use the token response type")) + //} else if ar.GetResponseTypes().Matches("code") && !ar.GetClient().GetResponseTypes().Has("code") { + // return errorsx.WithStack(fosite.ErrInvalidGrant.WithDebug("The client is not allowed to use the code response type")) + //} else if ar.GetResponseTypes().Matches("id_token") && !ar.GetClient().GetResponseTypes().Has("id_token") { + // return errorsx.WithStack(fosite.ErrInvalidGrant.WithDebug("The client is not allowed to use the id_token response type")) + //} + + // The nonce is actually not required for hybrid flows. It fails the OpenID Connect Conformity + // Test Module "oidcc-ensure-request-without-nonce-succeeds-for-code-flow" if enabled. + // + nonce := ar.GetRequestForm().Get("nonce") + + if len(nonce) == 0 && ar.GetResponseTypes().Has("id_token") { + return errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("Parameter 'nonce' must be set when requesting an ID Token using the OpenID Connect Hybrid Flow.")) + } + + if len(nonce) > 0 && len(nonce) < c.Config.GetMinParameterEntropy(ctx) { + return errorsx.WithStack(fosite.ErrInsufficientEntropy.WithHintf("Parameter 'nonce' is set but does not satisfy the minimum entropy of %d characters.", c.Config.GetMinParameterEntropy(ctx))) + } + + // This ensures that the 'redirect_uri' parameter is present for OpenID Connect 1.0 authorization requests as per: + // + // Authorization Code Flow - https://openid.net/specs/openid-connect-core-1_0.html#AuthRequest + // Implicit Flow - https://openid.net/specs/openid-connect-core-1_0.html#ImplicitAuthRequest + // Hybrid Flow - https://openid.net/specs/openid-connect-core-1_0.html#HybridAuthRequest + // + // Note: as per the Hybrid Flow documentation the Hybrid Flow has the same requirements as the Authorization Code Flow. + rawRedirectURI := ar.GetRequestForm().Get("redirect_uri") + if len(rawRedirectURI) == 0 { + return errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("The 'redirect_uri' parameter is required when using OpenID Connect 1.0.")) + } + + sess, ok := ar.GetSession().(Session) + if !ok { + return errorsx.WithStack(ErrInvalidSession) + } + + if err := c.OpenIDConnectRequestValidator.ValidatePrompt(ctx, ar); err != nil { + return err + } + + client := ar.GetClient() + for _, scope := range ar.GetRequestedScopes() { + if !c.Config.GetScopeStrategy(ctx)(client.GetScopes(), scope) { + return errorsx.WithStack(fosite.ErrInvalidScope.WithHintf("The OAuth 2.0 Client is not allowed to request scope '%s'.", scope)) + } + } + + claims := sess.IDTokenClaims() + if ar.GetResponseTypes().Has("code") { + if !ar.GetClient().GetGrantTypes().Has("authorization_code") { + return errorsx.WithStack(fosite.ErrInvalidGrant.WithHint("The OAuth 2.0 Client is not allowed to use authorization grant 'authorization_code'.")) + } + + code, signature, err := c.AuthorizeExplicitGrantHandler.Strategy.AuthorizeCodeStrategy().GenerateAuthorizeCode(ctx, ar) + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + // This is not required because the auth code flow is being handled by oauth2/flow_authorize_code_token which in turn + // sets the proper access/refresh token lifetimes. + // + // if c.AuthorizeExplicitGrantHandler.RefreshTokenLifespan > -1 { + // ar.GetSession().SetExpiresAt(fosite.RefreshToken, time.Now().UTC().Add(c.AuthorizeExplicitGrantHandler.RefreshTokenLifespan).Round(time.Second)) + // } + + // This is required because we must limit the authorize code lifespan. + ar.GetSession().SetExpiresAt(fosite.AuthorizeCode, time.Now().UTC().Add(c.AuthorizeExplicitGrantHandler.Config.GetAuthorizeCodeLifespan(ctx)).Round(time.Second)) + if err := c.AuthorizeExplicitGrantHandler.Storage.AuthorizeCodeStorage().CreateAuthorizeCodeSession(ctx, signature, ar.Sanitize(c.AuthorizeExplicitGrantHandler.GetSanitationWhiteList(ctx))); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + resp.AddParameter("code", code) + ar.SetResponseTypeHandled("code") + + hash, err := c.IDTokenHandleHelper.ComputeHash(ctx, sess, resp.GetParameters().Get("code")) + if err != nil { + return err + } + claims.CodeHash = hash + + if ar.GetGrantedScopes().Has("openid") { + if err := c.OpenIDConnectRequestStorage.OpenIDConnectRequestStorage().CreateOpenIDConnectSession(ctx, resp.GetCode(), ar.Sanitize(oidcParameters)); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + } + } + + if ar.GetResponseTypes().Has("token") { + if !ar.GetClient().GetGrantTypes().Has("implicit") { + return errorsx.WithStack(fosite.ErrInvalidGrant.WithHint("The OAuth 2.0 Client is not allowed to use the authorization grant 'implicit'.")) + } else if err := c.AuthorizeImplicitGrantHandler.IssueImplicitAccessToken(ctx, ar, resp); err != nil { + return errorsx.WithStack(err) + } + ar.SetResponseTypeHandled("token") + + hash, err := c.IDTokenHandleHelper.ComputeHash(ctx, sess, resp.GetParameters().Get("access_token")) + if err != nil { + return err + } + claims.AccessTokenHash = hash + } + + if _, ok := resp.GetParameters()["state"]; !ok { + resp.AddParameter("state", ar.GetState()) + } + + if !ar.GetGrantedScopes().Has("openid") || !ar.GetResponseTypes().Has("id_token") { + ar.SetResponseTypeHandled("id_token") + return nil + } + + // Hybrid flow uses implicit flow config for the id token's lifespan + idTokenLifespan := fosite.GetEffectiveLifespan(ar.GetClient(), fosite.GrantTypeImplicit, fosite.IDToken, c.Config.GetIDTokenLifespan(ctx)) + if err := c.IDTokenHandleHelper.IssueImplicitIDToken(ctx, idTokenLifespan, ar, resp); err != nil { + return errorsx.WithStack(err) + } + + ar.SetResponseTypeHandled("id_token") + return nil + // there is no need to check for https, because implicit flow does not require https + // https://tools.ietf.org/html/rfc6819#section-4.4.2 +} diff --git a/fosite/handler/openid/flow_hybrid_test.go b/fosite/handler/openid/flow_hybrid_test.go new file mode 100644 index 00000000000..f8bd40b19fb --- /dev/null +++ b/fosite/handler/openid/flow_hybrid_test.go @@ -0,0 +1,362 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid_test + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "testing" + "time" + + cristaljwt "github.com/cristalhq/jwt/v4" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/internal" + "github.com/ory/hydra/v2/fosite/internal/gen" + "github.com/ory/hydra/v2/fosite/storage" + "github.com/ory/hydra/v2/fosite/token/hmac" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +var hmacStrategy = &compose.CommonStrategyProvider{CoreStrategy: oauth2.NewHMACSHAStrategy( + &hmac.HMACStrategy{Config: &fosite.Config{GlobalSecret: []byte("some-super-cool-secret-that-nobody-knows-nobody-knows")}}, + nil, +)} + +type mockOpenIDConnectTokenStrategyProvider struct { + strategy openid.DefaultStrategy +} + +func (p mockOpenIDConnectTokenStrategyProvider) OpenIDConnectTokenStrategy() openid.OpenIDConnectTokenStrategy { + return p.strategy +} + +func makeOpenIDConnectHybridHandler(minParameterEntropy int) openid.OpenIDConnectHybridHandler { + defaultStrategyProvider := mockOpenIDConnectTokenStrategyProvider{ + strategy: openid.DefaultStrategy{ + Signer: &jwt.DefaultSigner{ + GetPrivateKey: func(_ context.Context) (interface{}, error) { + return gen.MustRSAKey(), nil + }, + }, + Config: &fosite.Config{ + MinParameterEntropy: minParameterEntropy, + }, + }, + } + + j := &openid.DefaultStrategy{ + Signer: &jwt.DefaultSigner{ + GetPrivateKey: func(_ context.Context) (interface{}, error) { + return key, nil + }, + }, + Config: &fosite.Config{ + MinParameterEntropy: minParameterEntropy, + }, + } + + config := &fosite.Config{ + ScopeStrategy: fosite.HierarchicScopeStrategy, + MinParameterEntropy: minParameterEntropy, + AccessTokenLifespan: time.Hour, + AuthorizeCodeLifespan: time.Hour, + RefreshTokenLifespan: time.Hour, + } + return openid.OpenIDConnectHybridHandler{ + AuthorizeExplicitGrantHandler: &oauth2.AuthorizeExplicitGrantHandler{ + Strategy: hmacStrategy, + Storage: storage.NewMemoryStore(), + Config: config, + }, + AuthorizeImplicitGrantHandler: &oauth2.AuthorizeImplicitGrantHandler{ + Config: &fosite.Config{ + AccessTokenLifespan: time.Hour, + }, + Strategy: hmacStrategy, + Storage: storage.NewMemoryStore(), + }, + IDTokenHandleHelper: &openid.IDTokenHandleHelper{ + IDTokenStrategy: defaultStrategyProvider, + }, + Config: config, + OpenIDConnectRequestValidator: openid.NewOpenIDConnectRequestValidator(j.Signer, config), + OpenIDConnectRequestStorage: storage.NewMemoryStore(), + } +} + +func TestHybrid_HandleAuthorizeEndpointRequest(t *testing.T) { + ctrl := gomock.NewController(t) + t.Cleanup(ctrl.Finish) + + aresp := fosite.NewAuthorizeResponse() + areq := fosite.NewAuthorizeRequest() + areq.Form = url.Values{"redirect_uri": {"https://foobar.com"}} + + for k, c := range []struct { + description string + setup func() openid.OpenIDConnectHybridHandler + check func() + expectErr error + }{ + { + description: "should not do anything because not a hybrid request", + setup: func() openid.OpenIDConnectHybridHandler { + return makeOpenIDConnectHybridHandler(fosite.MinParameterEntropy) + }, + }, + { + description: "should not do anything because not a hybrid request", + setup: func() openid.OpenIDConnectHybridHandler { + areq.ResponseTypes = fosite.Arguments{"token", "id_token"} + return makeOpenIDConnectHybridHandler(fosite.MinParameterEntropy) + }, + }, + { + description: "should fail because nonce set but too short", + setup: func() openid.OpenIDConnectHybridHandler { + areq.Form = url.Values{ + "redirect_uri": {"https://foobar.com"}, + "nonce": {"short"}, + } + areq.ResponseTypes = fosite.Arguments{"token", "code"} + areq.Client = &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"authorization_code", "implicit"}, + ResponseTypes: fosite.Arguments{"token", "code", "id_token"}, + Scopes: []string{"openid"}, + } + areq.GrantedScope = fosite.Arguments{"openid"} + return makeOpenIDConnectHybridHandler(fosite.MinParameterEntropy) + }, + expectErr: fosite.ErrInsufficientEntropy, + }, + { + description: "should fail because nonce set but too short for non-default min entropy", + setup: func() openid.OpenIDConnectHybridHandler { + areq.Form = url.Values{ + "nonce": {"some-foobar-nonce-win"}, + "redirect_uri": {"https://foobar.com"}, + } + areq.ResponseTypes = fosite.Arguments{"token", "code"} + areq.Client = &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"authorization_code", "implicit"}, + ResponseTypes: fosite.Arguments{"token", "code", "id_token"}, + Scopes: []string{"openid"}, + } + areq.GrantedScope = fosite.Arguments{"openid"} + return makeOpenIDConnectHybridHandler(42) + }, + expectErr: fosite.ErrInsufficientEntropy, + }, + { + description: "should fail because session not given", + setup: func() openid.OpenIDConnectHybridHandler { + areq.Form = url.Values{ + "nonce": {"long-enough"}, + "redirect_uri": {"https://foobar.com"}, + } + areq.ResponseTypes = fosite.Arguments{"token", "code"} + areq.Client = &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"authorization_code", "implicit"}, + ResponseTypes: fosite.Arguments{"token", "code", "id_token"}, + Scopes: []string{"openid"}, + } + areq.GrantedScope = fosite.Arguments{"openid"} + return makeOpenIDConnectHybridHandler(fosite.MinParameterEntropy) + }, + expectErr: openid.ErrInvalidSession, + }, + { + description: "should fail because client missing response types", + setup: func() openid.OpenIDConnectHybridHandler { + areq.ResponseTypes = fosite.Arguments{"token", "code", "id_token"} + areq.Client = &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"implicit"}, + ResponseTypes: fosite.Arguments{"token", "code", "id_token"}, + Scopes: []string{"openid"}, + } + areq.Session = &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + }, + Headers: &jwt.Headers{}, + Subject: "peter", + } + return makeOpenIDConnectHybridHandler(fosite.MinParameterEntropy) + }, + expectErr: fosite.ErrInvalidGrant, + }, + { + description: "should pass with exact one state parameter in response", + setup: func() openid.OpenIDConnectHybridHandler { + areq.Form = url.Values{ + "redirect_uri": {"https://foobar.com"}, + "nonce": {"long-enough"}, + "state": {""}, + } + areq.Client = &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"authorization_code", "implicit"}, + ResponseTypes: fosite.Arguments{"token", "code", "id_token"}, + Scopes: []string{"openid"}, + } + return makeOpenIDConnectHybridHandler(fosite.MinParameterEntropy) + }, + check: func() { + params := aresp.GetParameters() + var stateParam []string + for k, v := range params { + if k == "state" { + stateParam = v + break + } + } + assert.Len(t, stateParam, 1) + }, + }, + { + description: "should pass because nonce was set with sufficient entropy", + setup: func() openid.OpenIDConnectHybridHandler { + areq.Form.Set("nonce", "some-foobar-nonce-win") + areq.Client = &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"authorization_code", "implicit"}, + ResponseTypes: fosite.Arguments{"token", "code", "id_token"}, + Scopes: []string{"openid"}, + } + return makeOpenIDConnectHybridHandler(fosite.MinParameterEntropy) + }, + }, + { + description: "should pass even if nonce was not set", + setup: func() openid.OpenIDConnectHybridHandler { + areq.Client = &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"authorization_code", "implicit"}, + ResponseTypes: fosite.Arguments{"token", "code", "id_token"}, + Scopes: []string{"openid"}, + } + return makeOpenIDConnectHybridHandler(fosite.MinParameterEntropy) + }, + }, + { + description: "should pass because nonce was set with low entropy but also with low min entropy", + setup: func() openid.OpenIDConnectHybridHandler { + areq.Form.Set("nonce", "short") + areq.Client = &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"authorization_code", "implicit"}, + ResponseTypes: fosite.Arguments{"token", "code", "id_token"}, + Scopes: []string{"openid"}, + } + return makeOpenIDConnectHybridHandler(4) + }, + }, + { + description: "should pass because AuthorizeCode's ExpiresAt is set, even if AuthorizeCodeLifespan is zero", + setup: func() openid.OpenIDConnectHybridHandler { + areq.Form.Set("nonce", "some-foobar-nonce-win") + return makeOpenIDConnectHybridHandler(fosite.MinParameterEntropy) + }, + check: func() { + assert.True(t, !areq.Session.GetExpiresAt(fosite.AuthorizeCode).IsZero()) + }, + }, + { + description: "should pass", + setup: func() openid.OpenIDConnectHybridHandler { + return makeOpenIDConnectHybridHandler(fosite.MinParameterEntropy) + }, + check: func() { + assert.NotEmpty(t, aresp.GetParameters().Get("id_token")) + assert.NotEmpty(t, aresp.GetParameters().Get("code")) + assert.NotEmpty(t, aresp.GetParameters().Get("access_token")) + internal.RequireEqualTime(t, time.Now().Add(time.Hour).UTC(), areq.GetSession().GetExpiresAt(fosite.AuthorizeCode), time.Second) + }, + }, + { + description: "should fail if redirect_uri is missing", + setup: func() openid.OpenIDConnectHybridHandler { + areq.Form.Del("redirect_uri") + return makeOpenIDConnectHybridHandler(fosite.MinParameterEntropy) + }, + expectErr: fosite.ErrInvalidRequest, + }, + { + description: "should pass with custom client lifespans", + setup: func() openid.OpenIDConnectHybridHandler { + aresp = fosite.NewAuthorizeResponse() + areq = fosite.NewAuthorizeRequest() + areq.Form.Set("nonce", "some-foobar-nonce-win") + areq.Form.Set("redirect_uri", "https://foobar.com") + areq.ResponseTypes = fosite.Arguments{"token", "code", "id_token"} + areq.Client = &fosite.DefaultClientWithCustomTokenLifespans{ + DefaultClient: &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"authorization_code", "implicit"}, + ResponseTypes: fosite.Arguments{"token", "code", "id_token"}, + Scopes: []string{"openid"}, + }, + } + areq.GrantedScope = fosite.Arguments{"openid"} + areq.Session = &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + }, + Headers: &jwt.Headers{}, + Subject: "peter", + } + areq.GetClient().(*fosite.DefaultClientWithCustomTokenLifespans).SetTokenLifespans(&internal.TestLifespans) + return makeOpenIDConnectHybridHandler(fosite.MinParameterEntropy) + }, + check: func() { + assert.NotEmpty(t, aresp.GetParameters().Get("code")) + internal.RequireEqualTime(t, time.Now().Add(1*time.Hour).UTC(), areq.GetSession().GetExpiresAt(fosite.AuthorizeCode), time.Second) + + idToken := aresp.GetParameters().Get("id_token") + assert.NotEmpty(t, idToken) + assert.True(t, areq.GetSession().GetExpiresAt(fosite.IDToken).IsZero()) + jwt, err := cristaljwt.ParseNoVerify([]byte(idToken)) + require.NoError(t, err) + claims := &cristaljwt.RegisteredClaims{} + require.NoError(t, json.Unmarshal(jwt.Claims(), claims)) + internal.RequireEqualTime(t, time.Now().Add(*internal.TestLifespans.ImplicitGrantIDTokenLifespan), claims.ExpiresAt.Time, time.Minute) + + assert.NotEmpty(t, aresp.GetParameters().Get("access_token")) + internal.RequireEqualTime(t, time.Now().Add(*internal.TestLifespans.ImplicitGrantAccessTokenLifespan).UTC(), areq.GetSession().GetExpiresAt(fosite.AccessToken), time.Second) + }, + }, + { + description: "Default responseMode check", + setup: func() openid.OpenIDConnectHybridHandler { + return makeOpenIDConnectHybridHandler(fosite.MinParameterEntropy) + }, + check: func() { + assert.NotEmpty(t, aresp.GetParameters().Get("id_token")) + assert.NotEmpty(t, aresp.GetParameters().Get("code")) + assert.NotEmpty(t, aresp.GetParameters().Get("access_token")) + assert.Equal(t, fosite.ResponseModeFragment, areq.GetResponseMode()) + assert.WithinDuration(t, time.Now().Add(time.Hour).UTC(), areq.GetSession().GetExpiresAt(fosite.AuthorizeCode), 5*time.Second) + }, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + h := c.setup() + err := h.HandleAuthorizeEndpointRequest(context.Background(), areq, aresp) + + if c.expectErr != nil { + require.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + } + + if c.check != nil { + c.check() + } + }) + } +} diff --git a/fosite/handler/openid/flow_implicit.go b/fosite/handler/openid/flow_implicit.go new file mode 100644 index 00000000000..42bb53289af --- /dev/null +++ b/fosite/handler/openid/flow_implicit.go @@ -0,0 +1,112 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid + +import ( + "context" + + "github.com/ory/x/errorsx" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +type OpenIDConnectImplicitHandler struct { + *IDTokenHandleHelper + + AuthorizeImplicitGrantTypeHandler *oauth2.AuthorizeImplicitGrantHandler + OpenIDConnectRequestValidator *OpenIDConnectRequestValidator + RS256JWTStrategy *jwt.DefaultSigner + + Config interface { + fosite.IDTokenLifespanProvider + fosite.MinParameterEntropyProvider + fosite.ScopeStrategyProvider + } +} + +func (c *OpenIDConnectImplicitHandler) HandleAuthorizeEndpointRequest(ctx context.Context, ar fosite.AuthorizeRequester, resp fosite.AuthorizeResponder) error { + if !(ar.GetGrantedScopes().Has("openid") && (ar.GetResponseTypes().Has("token", "id_token") || ar.GetResponseTypes().ExactOne("id_token"))) { + return nil + } else if ar.GetResponseTypes().Has("code") { + // hybrid flow + return nil + } + + ar.SetDefaultResponseMode(fosite.ResponseModeFragment) + + if !ar.GetClient().GetGrantTypes().Has("implicit") { + return errorsx.WithStack(fosite.ErrInvalidGrant.WithHint("The OAuth 2.0 Client is not allowed to use the authorization grant 'implicit'.")) + } + + // Disabled because this is already handled at the authorize_request_handler + //if ar.GetResponseTypes().ExactOne("id_token") && !ar.GetClient().GetResponseTypes().Has("id_token") { + // return errorsx.WithStack(fosite.ErrInvalidGrant.WithDebug("The client is not allowed to use response type id_token")) + //} else if ar.GetResponseTypes().Matches("token", "id_token") && !ar.GetClient().GetResponseTypes().Has("token", "id_token") { + // return errorsx.WithStack(fosite.ErrInvalidGrant.WithDebug("The client is not allowed to use response type token and id_token")) + //} + + // This ensures that the 'redirect_uri' parameter is present for OpenID Connect 1.0 authorization requests as per: + // + // Authorization Code Flow - https://openid.net/specs/openid-connect-core-1_0.html#AuthRequest + // Implicit Flow - https://openid.net/specs/openid-connect-core-1_0.html#ImplicitAuthRequest + // Hybrid Flow - https://openid.net/specs/openid-connect-core-1_0.html#HybridAuthRequest + // + // Note: as per the Hybrid Flow documentation the Hybrid Flow has the same requirements as the Authorization Code Flow. + rawRedirectURI := ar.GetRequestForm().Get("redirect_uri") + if len(rawRedirectURI) == 0 { + return errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("The 'redirect_uri' parameter is required when using OpenID Connect 1.0.")) + } + + if nonce := ar.GetRequestForm().Get("nonce"); len(nonce) == 0 { + return errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("Parameter 'nonce' must be set when using the OpenID Connect Implicit Flow.")) + } else if len(nonce) < c.Config.GetMinParameterEntropy(ctx) { + return errorsx.WithStack(fosite.ErrInsufficientEntropy.WithHintf("Parameter 'nonce' is set but does not satisfy the minimum entropy of %d characters.", c.Config.GetMinParameterEntropy(ctx))) + } + + client := ar.GetClient() + for _, scope := range ar.GetRequestedScopes() { + if !c.Config.GetScopeStrategy(ctx)(client.GetScopes(), scope) { + return errorsx.WithStack(fosite.ErrInvalidScope.WithHintf("The OAuth 2.0 Client is not allowed to request scope '%s'.", scope)) + } + } + + sess, ok := ar.GetSession().(Session) + if !ok { + return errorsx.WithStack(ErrInvalidSession) + } + + if err := c.OpenIDConnectRequestValidator.ValidatePrompt(ctx, ar); err != nil { + return err + } + + claims := sess.IDTokenClaims() + if ar.GetResponseTypes().Has("token") { + if err := c.AuthorizeImplicitGrantTypeHandler.IssueImplicitAccessToken(ctx, ar, resp); err != nil { + return errorsx.WithStack(err) + } + + ar.SetResponseTypeHandled("token") + hash, err := c.ComputeHash(ctx, sess, resp.GetParameters().Get("access_token")) + if err != nil { + return err + } + + claims.AccessTokenHash = hash + } else { + resp.AddParameter("state", ar.GetState()) + } + + idTokenLifespan := fosite.GetEffectiveLifespan(ar.GetClient(), fosite.GrantTypeImplicit, fosite.IDToken, c.Config.GetIDTokenLifespan(ctx)) + if err := c.IssueImplicitIDToken(ctx, idTokenLifespan, ar, resp); err != nil { + return errorsx.WithStack(err) + } + + // there is no need to check for https, because implicit flow does not require https + // https://tools.ietf.org/html/rfc6819#section-4.4.2 + + ar.SetResponseTypeHandled("id_token") + return nil +} diff --git a/fosite/handler/openid/flow_implicit_test.go b/fosite/handler/openid/flow_implicit_test.go new file mode 100644 index 00000000000..2eb812ac99d --- /dev/null +++ b/fosite/handler/openid/flow_implicit_test.go @@ -0,0 +1,320 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid_test + +import ( + "context" + "fmt" + "net/url" + "testing" + "time" + + "github.com/ory/hydra/v2/fosite/internal" + "github.com/ory/hydra/v2/fosite/internal/gen" + + "github.com/stretchr/testify/assert" + gomock "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/storage" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +func makeOpenIDConnectImplicitHandler(minParameterEntropy int) openid.OpenIDConnectImplicitHandler { + config := &fosite.Config{ + MinParameterEntropy: minParameterEntropy, + AccessTokenLifespan: time.Hour, + ScopeStrategy: fosite.HierarchicScopeStrategy, + } + + defaultStrategyProvider := mockOpenIDConnectTokenStrategyProvider{ + strategy: openid.DefaultStrategy{ + Signer: &jwt.DefaultSigner{ + GetPrivateKey: func(ctx context.Context) (interface{}, error) { + return gen.MustRSAKey(), nil + }, + }, + Config: config, + }, + } + + j := &openid.DefaultStrategy{ + Signer: &jwt.DefaultSigner{ + GetPrivateKey: func(ctx context.Context) (interface{}, error) { + return key, nil + }, + }, + Config: config, + } + + return openid.OpenIDConnectImplicitHandler{ + AuthorizeImplicitGrantTypeHandler: &oauth2.AuthorizeImplicitGrantHandler{ + Config: config, + Strategy: hmacStrategy, + Storage: storage.NewMemoryStore(), + }, + IDTokenHandleHelper: &openid.IDTokenHandleHelper{ + IDTokenStrategy: defaultStrategyProvider, + }, + OpenIDConnectRequestValidator: openid.NewOpenIDConnectRequestValidator(j.Signer, config), + Config: config, + } +} + +func TestImplicit_HandleAuthorizeEndpointRequest(t *testing.T) { + ctrl := gomock.NewController(t) + t.Cleanup(ctrl.Finish) + + aresp := fosite.NewAuthorizeResponse() + areq := fosite.NewAuthorizeRequest() + areq.Form = url.Values{ + "redirect_uri": {"https://foobar.com"}, + } + areq.Session = new(fosite.DefaultSession) + + for k, c := range []struct { + description string + setup func() openid.OpenIDConnectImplicitHandler + expectErr error + check func() + }{ + { + description: "should not do anything because request requirements are not met", + setup: func() openid.OpenIDConnectImplicitHandler { + return makeOpenIDConnectImplicitHandler(fosite.MinParameterEntropy) + }, + }, + { + description: "should not do anything because request requirements are not met", + setup: func() openid.OpenIDConnectImplicitHandler { + areq.ResponseTypes = fosite.Arguments{"id_token"} + areq.State = "foostate" + return makeOpenIDConnectImplicitHandler(fosite.MinParameterEntropy) + }, + }, + { + description: "should not do anything because request requirements are not met", + setup: func() openid.OpenIDConnectImplicitHandler { + areq.ResponseTypes = fosite.Arguments{"token", "id_token"} + return makeOpenIDConnectImplicitHandler(fosite.MinParameterEntropy) + }, + }, + { + description: "should not do anything because request requirements are not met", + setup: func() openid.OpenIDConnectImplicitHandler { + areq.ResponseTypes = fosite.Arguments{} + areq.GrantedScope = fosite.Arguments{"openid"} + return makeOpenIDConnectImplicitHandler(fosite.MinParameterEntropy) + }, + }, + { + description: "should not do anything because request requirements are not met", + setup: func() openid.OpenIDConnectImplicitHandler { + areq.ResponseTypes = fosite.Arguments{"token", "id_token"} + areq.RequestedScope = fosite.Arguments{"openid"} + areq.Client = &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{}, + ResponseTypes: fosite.Arguments{}, + Scopes: []string{"openid", "fosite"}, + } + return makeOpenIDConnectImplicitHandler(fosite.MinParameterEntropy) + }, + expectErr: fosite.ErrInvalidGrant, + }, + // Disabled because this is already handled at the authorize_request_handler + //{ + // description: "should not do anything because request requirements are not met", + // setup: func() OpenIDConnectImplicitHandler { + // areq.ResponseTypes = fosite.Arguments{"token", "id_token"} + // areq.RequestedScope = fosite.Arguments{"openid"} + // areq.Client = &fosite.DefaultClient{ + // GrantTypes: fosite.Arguments{"implicit"}, + // ResponseTypes: fosite.Arguments{}, + // RequestedScope: []string{"openid", "fosite"}, + // } + // return makeOpenIDConnectImplicitHandler(fosite.MinParameterEntropy) + // }, + // expectErr: fosite.ErrInvalidGrant, + //}, + { + description: "should not do anything because request requirements are not met", + setup: func() openid.OpenIDConnectImplicitHandler { + areq.ResponseTypes = fosite.Arguments{"id_token"} + areq.RequestedScope = fosite.Arguments{"openid"} + areq.Client = &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"implicit"}, + // ResponseTypes: fosite.Arguments{"token", "id_token"}, + Scopes: []string{"openid", "fosite"}, + } + return makeOpenIDConnectImplicitHandler(fosite.MinParameterEntropy) + }, + expectErr: fosite.ErrInvalidRequest, + }, + { + description: "should not do anything because request requirements are not met", + setup: func() openid.OpenIDConnectImplicitHandler { + areq.Form = url.Values{ + "nonce": {"short"}, + "redirect_uri": {"https://foobar.com"}, + } + areq.ResponseTypes = fosite.Arguments{"id_token"} + areq.RequestedScope = fosite.Arguments{"openid"} + areq.Client = &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"implicit"}, + ResponseTypes: fosite.Arguments{"token", "id_token"}, + Scopes: []string{"openid", "fosite"}, + } + return makeOpenIDConnectImplicitHandler(fosite.MinParameterEntropy) + }, + expectErr: fosite.ErrInsufficientEntropy, + }, + { + description: "should fail because session not set", + setup: func() openid.OpenIDConnectImplicitHandler { + areq.Form = url.Values{ + "nonce": {"long-enough"}, + "redirect_uri": {"https://foobar.com"}, + } + areq.ResponseTypes = fosite.Arguments{"id_token"} + areq.RequestedScope = fosite.Arguments{"openid"} + areq.Client = &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"implicit"}, + ResponseTypes: fosite.Arguments{"token", "id_token"}, + Scopes: []string{"openid", "fosite"}, + } + return makeOpenIDConnectImplicitHandler(fosite.MinParameterEntropy) + }, + expectErr: openid.ErrInvalidSession, + }, + { + description: "should pass because nonce set", + setup: func() openid.OpenIDConnectImplicitHandler { + areq.Session = &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + }, + Headers: &jwt.Headers{}, + Subject: "peter", + } + areq.Form.Add("nonce", "some-random-foo-nonce-wow") + return makeOpenIDConnectImplicitHandler(fosite.MinParameterEntropy) + }, + }, + { + description: "should pass", + setup: func() openid.OpenIDConnectImplicitHandler { + areq.ResponseTypes = fosite.Arguments{"id_token"} + return makeOpenIDConnectImplicitHandler(fosite.MinParameterEntropy) + }, + check: func() { + assert.NotEmpty(t, aresp.GetParameters().Get("state")) + assert.Empty(t, aresp.GetParameters().Get("access_token")) + + idToken := aresp.GetParameters().Get("id_token") + assert.NotEmpty(t, idToken) + idTokenExp := internal.ExtractJwtExpClaim(t, idToken) + internal.RequireEqualTime(t, time.Now().Add(time.Hour), *idTokenExp, time.Minute) + }, + }, + { + description: "should pass with nondefault id token lifespan", + setup: func() openid.OpenIDConnectImplicitHandler { + aresp = fosite.NewAuthorizeResponse() + areq.Session = &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + }, + Headers: &jwt.Headers{}, + Subject: "peter", + } + areq.ResponseTypes = fosite.Arguments{"id_token"} + areq.Client = &fosite.DefaultClientWithCustomTokenLifespans{ + DefaultClient: &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"implicit"}, + ResponseTypes: fosite.Arguments{"token", "id_token"}, + Scopes: []string{"openid", "fosite"}, + }, + } + areq.Client.(*fosite.DefaultClientWithCustomTokenLifespans).SetTokenLifespans(&internal.TestLifespans) + return makeOpenIDConnectImplicitHandler(fosite.MinParameterEntropy) + }, + check: func() { + idToken := aresp.GetParameters().Get("id_token") + assert.NotEmpty(t, idToken) + assert.NotEmpty(t, aresp.GetParameters().Get("state")) + assert.Empty(t, aresp.GetParameters().Get("access_token")) + idTokenExp := internal.ExtractJwtExpClaim(t, idToken) + internal.RequireEqualTime(t, time.Now().Add(*internal.TestLifespans.ImplicitGrantIDTokenLifespan), *idTokenExp, time.Minute) + }, + }, + { + description: "should pass", + setup: func() openid.OpenIDConnectImplicitHandler { + aresp = fosite.NewAuthorizeResponse() + areq.ResponseTypes = fosite.Arguments{"token", "id_token"} + return makeOpenIDConnectImplicitHandler(fosite.MinParameterEntropy) + }, + check: func() { + assert.NotEmpty(t, aresp.GetParameters().Get("state")) + + idToken := aresp.GetParameters().Get("id_token") + assert.NotEmpty(t, idToken) + internal.RequireEqualTime(t, time.Now().Add(*internal.TestLifespans.ImplicitGrantIDTokenLifespan).UTC(), *internal.ExtractJwtExpClaim(t, idToken), time.Minute) + + assert.NotEmpty(t, aresp.GetParameters().Get("access_token")) + internal.RequireEqualTime(t, time.Now().Add(*internal.TestLifespans.ImplicitGrantAccessTokenLifespan).UTC(), areq.Session.GetExpiresAt(fosite.AccessToken), time.Minute) + }, + }, + { + description: "should pass", + setup: func() openid.OpenIDConnectImplicitHandler { + areq.ResponseTypes = fosite.Arguments{"id_token", "token"} + areq.RequestedScope = fosite.Arguments{"fosite", "openid"} + return makeOpenIDConnectImplicitHandler(fosite.MinParameterEntropy) + }, + check: func() { + assert.NotEmpty(t, aresp.GetParameters().Get("id_token")) + assert.NotEmpty(t, aresp.GetParameters().Get("state")) + assert.NotEmpty(t, aresp.GetParameters().Get("access_token")) + assert.Equal(t, fosite.ResponseModeFragment, areq.GetResponseMode()) + }, + }, + { + description: "should pass with low min entropy", + setup: func() openid.OpenIDConnectImplicitHandler { + areq.Form.Set("nonce", "short") + return makeOpenIDConnectImplicitHandler(4) + }, + check: func() { + assert.NotEmpty(t, aresp.GetParameters().Get("id_token")) + assert.NotEmpty(t, aresp.GetParameters().Get("state")) + assert.NotEmpty(t, aresp.GetParameters().Get("access_token")) + }, + }, + { + description: "should fail without redirect_uri", + setup: func() openid.OpenIDConnectImplicitHandler { + areq.Form.Del("redirect_uri") + return makeOpenIDConnectImplicitHandler(4) + }, + expectErr: fosite.ErrInvalidRequest, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + h := c.setup() + err := h.HandleAuthorizeEndpointRequest(context.Background(), areq, aresp) + + if c.expectErr != nil { + assert.EqualError(t, err, c.expectErr.Error()) + } else { + assert.NoError(t, err) + if c.check != nil { + c.check() + } + } + }) + } +} diff --git a/fosite/handler/openid/flow_refresh_token.go b/fosite/handler/openid/flow_refresh_token.go new file mode 100644 index 00000000000..18ad4732fa2 --- /dev/null +++ b/fosite/handler/openid/flow_refresh_token.go @@ -0,0 +1,110 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid + +import ( + "context" + "time" + + "github.com/google/uuid" + + "github.com/ory/x/errorsx" + + "github.com/pkg/errors" + + "github.com/ory/hydra/v2/fosite" +) + +type OpenIDConnectRefreshHandler struct { + *IDTokenHandleHelper + + Config interface { + fosite.IDTokenLifespanProvider + } +} + +func (c *OpenIDConnectRefreshHandler) HandleTokenEndpointRequest(ctx context.Context, request fosite.AccessRequester) error { + if !c.CanHandleTokenEndpointRequest(ctx, request) { + return errorsx.WithStack(fosite.ErrUnknownRequest) + } + + if !request.GetGrantedScopes().Has("openid") { + return errorsx.WithStack(fosite.ErrUnknownRequest) + } + + if !request.GetClient().GetGrantTypes().Has("refresh_token") { + return errorsx.WithStack(fosite.ErrUnauthorizedClient.WithHint("The OAuth 2.0 Client is not allowed to use the authorization grant \"refresh_token\".")) + } + + // Refresh tokens can only be issued by an authorize_code which in turn disables the need to check if the id_token + // response type is enabled by the client. + // + // if !request.GetClient().GetResponseTypes().Has("id_token") { + // return errorsx.WithStack(fosite.ErrUnknownRequest.WithDebug("The client is not allowed to use response type id_token")) + // } + + sess, ok := request.GetSession().(Session) + if !ok { + return errors.New("Failed to generate id token because session must be of type fosite/handler/openid.Session") + } + + // We need to reset the expires at value as this would be the previous expiry. + sess.IDTokenClaims().ExpiresAt = time.Time{} + + // These will be recomputed in PopulateTokenEndpointResponse + sess.IDTokenClaims().JTI = "" + sess.IDTokenClaims().AccessTokenHash = "" + + // We are not issuing a code so there is no need for this field. + sess.IDTokenClaims().CodeHash = "" + + return nil +} + +func (c *OpenIDConnectRefreshHandler) PopulateTokenEndpointResponse(ctx context.Context, requester fosite.AccessRequester, responder fosite.AccessResponder) error { + if !c.CanHandleTokenEndpointRequest(ctx, requester) { + return errorsx.WithStack(fosite.ErrUnknownRequest) + } + + if !requester.GetGrantedScopes().Has("openid") { + return errorsx.WithStack(fosite.ErrUnknownRequest) + } + + if !requester.GetClient().GetGrantTypes().Has("refresh_token") { + return errorsx.WithStack(fosite.ErrInvalidGrant.WithHint("The OAuth 2.0 Client is not allowed to use the authorization grant \"refresh_token\".")) + } + + // Disabled because this is already handled at the authorize_request_handler + // if !requester.GetClient().GetResponseTypes().Has("id_token") { + // return errorsx.WithStack(fosite.ErrUnknownRequest.WithDebug("The client is not allowed to use response type id_token")) + // } + + sess, ok := requester.GetSession().(Session) + if !ok { + return errorsx.WithStack(fosite.ErrServerError.WithDebug("Failed to generate id token because session must be of type fosite/handler/openid.Session.")) + } + + claims := sess.IDTokenClaims() + if claims.Subject == "" { + return errorsx.WithStack(fosite.ErrServerError.WithDebug("Failed to generate id token because subject is an empty string.")) + } + + claims.AccessTokenHash = c.GetAccessTokenHash(ctx, requester, responder) + claims.JTI = uuid.New().String() + claims.CodeHash = "" + claims.IssuedAt = time.Now().Truncate(time.Second) + + idTokenLifespan := fosite.GetEffectiveLifespan(requester.GetClient(), fosite.GrantTypeRefreshToken, fosite.IDToken, c.Config.GetIDTokenLifespan(ctx)) + return c.IssueExplicitIDToken(ctx, idTokenLifespan, requester, responder) +} + +func (c *OpenIDConnectRefreshHandler) CanSkipClientAuth(ctx context.Context, requester fosite.AccessRequester) bool { + return false +} + +func (c *OpenIDConnectRefreshHandler) CanHandleTokenEndpointRequest(ctx context.Context, requester fosite.AccessRequester) bool { + // grant_type REQUIRED. + // Value MUST be set to "refresh_token" + return requester.GetGrantTypes().ExactOne("refresh_token") +} diff --git a/fosite/handler/openid/flow_refresh_token_test.go b/fosite/handler/openid/flow_refresh_token_test.go new file mode 100644 index 00000000000..bbf4bbe0e5b --- /dev/null +++ b/fosite/handler/openid/flow_refresh_token_test.go @@ -0,0 +1,251 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid_test + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/internal" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +func TestOpenIDConnectRefreshHandler_HandleTokenEndpointRequest(t *testing.T) { + h := &openid.OpenIDConnectRefreshHandler{Config: &fosite.Config{}} + for _, c := range []struct { + areq *fosite.AccessRequest + expectedErr error + description string + }{ + { + description: "should not pass because grant_type is wrong", + areq: &fosite.AccessRequest{ + GrantTypes: []string{"foo"}, + }, + expectedErr: fosite.ErrUnknownRequest, + }, + { + description: "should not pass because grant_type is right but scope is missing", + areq: &fosite.AccessRequest{ + GrantTypes: []string{"refresh_token"}, + Request: fosite.Request{ + GrantedScope: []string{"something"}, + }, + }, + expectedErr: fosite.ErrUnknownRequest, + }, + { + description: "should not pass because client may not execute this grant type", + areq: &fosite.AccessRequest{ + GrantTypes: []string{"refresh_token"}, + Request: fosite.Request{ + GrantedScope: []string{"openid"}, + Client: &fosite.DefaultClient{}, + }, + }, + expectedErr: fosite.ErrUnauthorizedClient, + }, + { + description: "should pass", + areq: &fosite.AccessRequest{ + GrantTypes: []string{"refresh_token"}, + Request: fosite.Request{ + GrantedScope: []string{"openid"}, + Client: &fosite.DefaultClient{ + GrantTypes: []string{"refresh_token"}, + // ResponseTypes: []string{"id_token"}, + }, + Session: &openid.DefaultSession{}, + }, + }, + }, + } { + t.Run("case="+c.description, func(t *testing.T) { + err := h.HandleTokenEndpointRequest(context.Background(), c.areq) + if c.expectedErr != nil { + require.EqualError(t, err, c.expectedErr.Error(), "%v", err) + } else { + require.NoError(t, err) + } + }) + } +} + +func TestOpenIDConnectRefreshHandler_PopulateTokenEndpointResponse(t *testing.T) { + defaultStrategyProvider := mockOpenIDConnectTokenStrategyProvider{ + strategy: openid.DefaultStrategy{ + Signer: &jwt.DefaultSigner{ + GetPrivateKey: func(ctx context.Context) (interface{}, error) { + return key, nil + }, + }, + Config: &fosite.Config{ + MinParameterEntropy: fosite.MinParameterEntropy, + }, + }, + } + + h := &openid.OpenIDConnectRefreshHandler{ + IDTokenHandleHelper: &openid.IDTokenHandleHelper{ + IDTokenStrategy: defaultStrategyProvider, + }, + Config: &fosite.Config{}, + } + for _, c := range []struct { + areq *fosite.AccessRequest + expectedErr error + check func(t *testing.T, aresp *fosite.AccessResponse) + description string + }{ + { + description: "should not pass because grant_type is wrong", + areq: &fosite.AccessRequest{ + GrantTypes: []string{"foo"}, + }, + expectedErr: fosite.ErrUnknownRequest, + }, + { + description: "should not pass because grant_type is right but scope is missing", + areq: &fosite.AccessRequest{ + GrantTypes: []string{"refresh_token"}, + Request: fosite.Request{ + GrantedScope: []string{"something"}, + }, + }, + expectedErr: fosite.ErrUnknownRequest, + }, + // Disabled because this is already handled at the authorize_request_handler + //{ + // description: "should not pass because client may not ask for id_token", + // areq: &fosite.AccessRequest{ + // GrantTypes: []string{"refresh_token"}, + // Request: fosite.Request{ + // GrantedScope: []string{"openid"}, + // Client: &fosite.DefaultClient{ + // GrantTypes: []string{"refresh_token"}, + // }, + // }, + // }, + // expectedErr: fosite.ErrUnknownRequest, + //}, + { + description: "should pass", + areq: &fosite.AccessRequest{ + GrantTypes: []string{"refresh_token"}, + Request: fosite.Request{ + GrantedScope: []string{"openid"}, + Client: &fosite.DefaultClient{ + GrantTypes: []string{"refresh_token"}, + // ResponseTypes: []string{"id_token"}, + }, + Session: &openid.DefaultSession{ + Subject: "foo", + Claims: &jwt.IDTokenClaims{ + Subject: "foo", + }, + }, + }, + }, + check: func(t *testing.T, aresp *fosite.AccessResponse) { + assert.NotEmpty(t, aresp.GetExtra("id_token")) + idToken, _ := aresp.GetExtra("id_token").(string) + decodedIdToken, err := jwt.Parse(idToken, func(token *jwt.Token) (interface{}, error) { + return key.PublicKey, nil + }) + require.NoError(t, err) + claims := decodedIdToken.Claims + assert.NotEmpty(t, claims["at_hash"]) + idTokenExp := internal.ExtractJwtExpClaim(t, idToken) + require.NotEmpty(t, idTokenExp) + internal.RequireEqualTime(t, time.Now().Add(time.Hour).UTC(), *idTokenExp, time.Minute) + }, + }, + { + description: "should pass", + areq: &fosite.AccessRequest{ + GrantTypes: []string{"refresh_token"}, + Request: fosite.Request{ + GrantedScope: []string{"openid"}, + Client: &fosite.DefaultClientWithCustomTokenLifespans{ + DefaultClient: &fosite.DefaultClient{ + GrantTypes: []string{"refresh_token"}, + // ResponseTypes: []string{"id_token"}, + }, + TokenLifespans: &internal.TestLifespans, + }, + Session: &openid.DefaultSession{ + Subject: "foo", + Claims: &jwt.IDTokenClaims{ + Subject: "foo", + }, + }, + }, + }, + check: func(t *testing.T, aresp *fosite.AccessResponse) { + assert.NotEmpty(t, aresp.GetExtra("id_token")) + idToken, _ := aresp.GetExtra("id_token").(string) + decodedIdToken, err := jwt.Parse(idToken, func(token *jwt.Token) (interface{}, error) { + return key.PublicKey, nil + }) + require.NoError(t, err) + claims := decodedIdToken.Claims + assert.NotEmpty(t, claims["at_hash"]) + idTokenExp := internal.ExtractJwtExpClaim(t, idToken) + require.NotEmpty(t, idTokenExp) + internal.RequireEqualTime(t, time.Now().Add(*internal.TestLifespans.RefreshTokenGrantIDTokenLifespan).UTC(), *idTokenExp, time.Minute) + }, + }, + { + description: "should fail because missing subject claim", + areq: &fosite.AccessRequest{ + GrantTypes: []string{"refresh_token"}, + Request: fosite.Request{ + GrantedScope: []string{"openid"}, + Client: &fosite.DefaultClient{ + GrantTypes: []string{"refresh_token"}, + // ResponseTypes: []string{"id_token"}, + }, + Session: &openid.DefaultSession{ + Subject: "foo", + Claims: &jwt.IDTokenClaims{}, + }, + }, + }, + expectedErr: fosite.ErrServerError, + }, + { + description: "should fail because missing session", + areq: &fosite.AccessRequest{ + GrantTypes: []string{"refresh_token"}, + Request: fosite.Request{ + GrantedScope: []string{"openid"}, + Client: &fosite.DefaultClient{ + GrantTypes: []string{"refresh_token"}, + }, + }, + }, + expectedErr: fosite.ErrServerError, + }, + } { + t.Run("case="+c.description, func(t *testing.T) { + aresp := fosite.NewAccessResponse() + err := h.PopulateTokenEndpointResponse(context.Background(), c.areq, aresp) + if c.expectedErr != nil { + require.EqualError(t, err, c.expectedErr.Error(), "%v", err) + } else { + require.NoError(t, err) + } + + if c.check != nil { + c.check(t, aresp) + } + }) + } +} diff --git a/fosite/handler/openid/helper.go b/fosite/handler/openid/helper.go new file mode 100644 index 00000000000..db92ac6e227 --- /dev/null +++ b/fosite/handler/openid/helper.go @@ -0,0 +1,97 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid + +import ( + "bytes" + "context" + "crypto/sha256" + "crypto/sha512" + "encoding/base64" + "strconv" + "time" + + "github.com/ory/hydra/v2/fosite" +) + +type IDTokenHandleHelper struct { + IDTokenStrategy OpenIDConnectTokenStrategyProvider +} + +func (i *IDTokenHandleHelper) GetAccessTokenHash(ctx context.Context, requester fosite.AccessRequester, responder fosite.AccessResponder) string { + token := responder.GetAccessToken() + // The session should always be a openid.Session but best to safely cast + if session, ok := requester.GetSession().(Session); ok { + val, err := i.ComputeHash(ctx, session, token) + if err != nil { + // this should never happen + panic(err) + } + + return val + } + + buffer := bytes.NewBufferString(token) + hash := sha256.New() + // sha256.digest.Write() always returns nil for err, the panic should never happen + _, err := hash.Write(buffer.Bytes()) + if err != nil { + panic(err) + } + hashBuf := bytes.NewBuffer(hash.Sum([]byte{})) + + return base64.RawURLEncoding.EncodeToString(hashBuf.Bytes()[:hashBuf.Len()/2]) +} + +func (i *IDTokenHandleHelper) generateIDToken(ctx context.Context, lifespan time.Duration, fosr fosite.Requester) (token string, err error) { + token, err = i.IDTokenStrategy.OpenIDConnectTokenStrategy().GenerateIDToken(ctx, lifespan, fosr) + if err != nil { + return "", err + } + + return token, nil +} + +func (i *IDTokenHandleHelper) IssueImplicitIDToken(ctx context.Context, lifespan time.Duration, ar fosite.Requester, resp fosite.AuthorizeResponder) error { + token, err := i.generateIDToken(ctx, lifespan, ar) + if err != nil { + return err + } + resp.AddParameter("id_token", token) + return nil +} + +func (i *IDTokenHandleHelper) IssueExplicitIDToken(ctx context.Context, lifespan time.Duration, ar fosite.Requester, resp fosite.AccessResponder) error { + token, err := i.generateIDToken(ctx, lifespan, ar) + if err != nil { + return err + } + + resp.SetExtra("id_token", token) + return nil +} + +// ComputeHash computes the hash using the alg defined in the id_token header +func (i *IDTokenHandleHelper) ComputeHash(ctx context.Context, sess Session, token string) (string, error) { + var err error + hash := sha256.New() + if alg, ok := sess.IDTokenHeaders().Get("alg").(string); ok && len(alg) > 2 { + if hashSize, err := strconv.Atoi(alg[2:]); err == nil { + if hashSize == 384 { + hash = sha512.New384() + } else if hashSize == 512 { + hash = sha512.New() + } + } + } + + buffer := bytes.NewBufferString(token) + _, err = hash.Write(buffer.Bytes()) + if err != nil { + return "", err + } + hashBuf := bytes.NewBuffer(hash.Sum([]byte{})) + + return base64.RawURLEncoding.EncodeToString(hashBuf.Bytes()[:hashBuf.Len()/2]), nil +} diff --git a/fosite/handler/openid/helper_test.go b/fosite/handler/openid/helper_test.go new file mode 100644 index 00000000000..0b3366bb9b7 --- /dev/null +++ b/fosite/handler/openid/helper_test.go @@ -0,0 +1,198 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid_test + +import ( + "context" + "net/url" + "testing" + "time" + + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/internal/gen" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + gomock "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/internal" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +var stratProvider = mockOpenIDConnectTokenStrategyProvider{ + strategy: openid.DefaultStrategy{ + Signer: &jwt.DefaultSigner{ + GetPrivateKey: func(_ context.Context) (interface{}, error) { + return gen.MustRSAKey(), nil + }, + }, + Config: &fosite.Config{ + MinParameterEntropy: fosite.MinParameterEntropy, + }, + }, +} + +var fooErr = errors.New("foo") + +func TestGenerateIDToken(t *testing.T) { + ctrl := gomock.NewController(t) + chgen := internal.NewMockOpenIDConnectTokenStrategy(ctrl) + chgenp := internal.NewMockOpenIDConnectTokenStrategyProvider(ctrl) + t.Cleanup(ctrl.Finish) + + ar := fosite.NewAccessRequest(nil) + sess := &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + }, + Headers: &jwt.Headers{}, + } + h := &openid.IDTokenHandleHelper{IDTokenStrategy: chgenp} + + for k, c := range []struct { + description string + setup func() + expectErr error + }{ + { + description: "should fail because generator failed", + setup: func() { + ar.Form.Set("nonce", "11111111111111111111111111111111111") + ar.SetSession(sess) + chgenp.EXPECT().OpenIDConnectTokenStrategy().Return(chgen).Times(1) + chgen.EXPECT().GenerateIDToken(gomock.Any(), time.Duration(0), ar).Return("", fooErr) + }, + expectErr: fooErr, + }, + { + description: "should pass", + setup: func() { + chgenp.EXPECT().OpenIDConnectTokenStrategy().Return(chgen).Times(1) + chgen.EXPECT().GenerateIDToken(gomock.Any(), time.Duration(0), ar).AnyTimes().Return("asdf", nil) + }, + }, + } { + c.setup() + token, err := openid.CallGenerateIDToken(context.Background(), time.Duration(0), ar, h) + assert.True(t, err == c.expectErr, "(%d) %s\n%s\n%s", k, c.description, err, c.expectErr) + if err == nil { + assert.NotEmpty(t, token, "(%d) %s", k, c.description) + } + t.Logf("Passed test case %d", k) + } +} + +func TestIssueExplicitToken(t *testing.T) { + ctrl := gomock.NewController(t) + resp := internal.NewMockAccessResponder(ctrl) + t.Cleanup(ctrl.Finish) + + ar := fosite.NewAuthorizeRequest() + ar.Form = url.Values{"nonce": {"111111111111"}} + ar.SetSession(&openid.DefaultSession{Claims: &jwt.IDTokenClaims{ + Subject: "peter", + }, Headers: &jwt.Headers{}}) + + resp.EXPECT().SetExtra("id_token", gomock.Any()) + h := &openid.IDTokenHandleHelper{IDTokenStrategy: stratProvider} + err := h.IssueExplicitIDToken(context.Background(), time.Duration(0), ar, resp) + assert.NoError(t, err) +} + +func TestIssueImplicitToken(t *testing.T) { + ctrl := gomock.NewController(t) + resp := internal.NewMockAuthorizeResponder(ctrl) + t.Cleanup(ctrl.Finish) + + ar := fosite.NewAuthorizeRequest() + ar.Form = url.Values{"nonce": {"111111111111"}} + ar.SetSession(&openid.DefaultSession{Claims: &jwt.IDTokenClaims{ + Subject: "peter", + }, Headers: &jwt.Headers{}}) + + resp.EXPECT().AddParameter("id_token", gomock.Any()) + h := &openid.IDTokenHandleHelper{IDTokenStrategy: stratProvider} + err := h.IssueImplicitIDToken(context.Background(), time.Duration(0), ar, resp) + assert.NoError(t, err) +} + +func TestGetAccessTokenHash(t *testing.T) { + ctrl := gomock.NewController(t) + req := internal.NewMockAccessRequester(ctrl) + resp := internal.NewMockAccessResponder(ctrl) + + t.Cleanup(ctrl.Finish) + + req.EXPECT().GetSession().Return(nil) + resp.EXPECT().GetAccessToken().Return("7a35f818-9164-48cb-8c8f-e1217f44228431c41102-d410-4ed5-9276-07ba53dfdcd8") + + h := &openid.IDTokenHandleHelper{IDTokenStrategy: stratProvider} + + hash := h.GetAccessTokenHash(context.Background(), req, resp) + assert.Equal(t, "Zfn_XBitThuDJiETU3OALQ", hash) +} + +func TestGetAccessTokenHashWithDifferentKeyLength(t *testing.T) { + ctrl := gomock.NewController(t) + req := internal.NewMockAccessRequester(ctrl) + resp := internal.NewMockAccessResponder(ctrl) + + t.Cleanup(ctrl.Finish) + + headers := &jwt.Headers{ + Extra: map[string]interface{}{ + "alg": "RS384", + }, + } + req.EXPECT().GetSession().Return(&openid.DefaultSession{Headers: headers}) + resp.EXPECT().GetAccessToken().Return("7a35f818-9164-48cb-8c8f-e1217f44228431c41102-d410-4ed5-9276-07ba53dfdcd8") + + h := &openid.IDTokenHandleHelper{IDTokenStrategy: stratProvider} + + hash := h.GetAccessTokenHash(context.Background(), req, resp) + assert.Equal(t, "VNX38yiOyeqBPheW5jDsWQKa6IjJzK66", hash) +} + +func TestGetAccessTokenHashWithBadAlg(t *testing.T) { + ctrl := gomock.NewController(t) + req := internal.NewMockAccessRequester(ctrl) + resp := internal.NewMockAccessResponder(ctrl) + + t.Cleanup(ctrl.Finish) + + headers := &jwt.Headers{ + Extra: map[string]interface{}{ + "alg": "R", + }, + } + req.EXPECT().GetSession().Return(&openid.DefaultSession{Headers: headers}) + resp.EXPECT().GetAccessToken().Return("7a35f818-9164-48cb-8c8f-e1217f44228431c41102-d410-4ed5-9276-07ba53dfdcd8") + + h := &openid.IDTokenHandleHelper{IDTokenStrategy: stratProvider} + + hash := h.GetAccessTokenHash(context.Background(), req, resp) + assert.Equal(t, "Zfn_XBitThuDJiETU3OALQ", hash) +} + +func TestGetAccessTokenHashWithMissingKeyLength(t *testing.T) { + ctrl := gomock.NewController(t) + req := internal.NewMockAccessRequester(ctrl) + resp := internal.NewMockAccessResponder(ctrl) + + t.Cleanup(ctrl.Finish) + + headers := &jwt.Headers{ + Extra: map[string]interface{}{ + "alg": "RS", + }, + } + req.EXPECT().GetSession().Return(&openid.DefaultSession{Headers: headers}) + resp.EXPECT().GetAccessToken().Return("7a35f818-9164-48cb-8c8f-e1217f44228431c41102-d410-4ed5-9276-07ba53dfdcd8") + + h := &openid.IDTokenHandleHelper{IDTokenStrategy: stratProvider} + + hash := h.GetAccessTokenHash(context.Background(), req, resp) + assert.Equal(t, "Zfn_XBitThuDJiETU3OALQ", hash) +} diff --git a/fosite/handler/openid/storage.go b/fosite/handler/openid/storage.go new file mode 100644 index 00000000000..8bfd8bfe642 --- /dev/null +++ b/fosite/handler/openid/storage.go @@ -0,0 +1,30 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid + +import ( + "context" + + "github.com/ory/hydra/v2/fosite" +) + +var ErrNoSessionFound = fosite.ErrNotFound + +type OpenIDConnectRequestStorage interface { + // CreateOpenIDConnectSession creates an open id connect session + // for a given authorize code. This is relevant for explicit open id connect flow. + CreateOpenIDConnectSession(ctx context.Context, authorizeCode string, requester fosite.Requester) error + + // GetOpenIDConnectSession returns error + // - nil if a session was found, + // - ErrNoSessionFound if no session was found + // - or an arbitrary error if an error occurred. + GetOpenIDConnectSession(ctx context.Context, authorizeCode string, requester fosite.Requester) (fosite.Requester, error) + + // DeleteOpenIDConnectSession removes an open id connect session from the store. + DeleteOpenIDConnectSession(ctx context.Context, authorizeCode string) error +} +type OpenIDConnectRequestStorageProvider interface { + OpenIDConnectRequestStorage() OpenIDConnectRequestStorage +} diff --git a/fosite/handler/openid/strategy.go b/fosite/handler/openid/strategy.go new file mode 100644 index 00000000000..c0216129f20 --- /dev/null +++ b/fosite/handler/openid/strategy.go @@ -0,0 +1,19 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid + +import ( + "context" + "time" + + "github.com/ory/hydra/v2/fosite" +) + +type OpenIDConnectTokenStrategy interface { + GenerateIDToken(ctx context.Context, lifespan time.Duration, requester fosite.Requester) (token string, err error) +} + +type OpenIDConnectTokenStrategyProvider interface { + OpenIDConnectTokenStrategy() OpenIDConnectTokenStrategy +} diff --git a/fosite/handler/openid/strategy_jwt.go b/fosite/handler/openid/strategy_jwt.go new file mode 100644 index 00000000000..38b7e9fa898 --- /dev/null +++ b/fosite/handler/openid/strategy_jwt.go @@ -0,0 +1,233 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid + +import ( + "context" + "strconv" + "time" + + "github.com/ory/x/errorsx" + + "github.com/mohae/deepcopy" + "github.com/pkg/errors" + + "github.com/ory/go-convenience/stringslice" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +const defaultExpiryTime = time.Hour + +type Session interface { + // IDTokenClaims returns a pointer to claims which will be modified in-place by handlers. + // Session should store this pointer and return always the same pointer. + IDTokenClaims() *jwt.IDTokenClaims + // IDTokenHeaders returns a pointer to header values which will be modified in-place by handlers. + // Session should store this pointer and return always the same pointer. + IDTokenHeaders() *jwt.Headers + + fosite.Session +} + +// IDTokenSession is a session container for the id token +type DefaultSession struct { + Claims *jwt.IDTokenClaims `json:"id_token_claims"` + Headers *jwt.Headers `json:"headers"` + ExpiresAt map[fosite.TokenType]time.Time `json:"expires_at"` + Username string `json:"username,omitempty"` + Subject string `json:"subject,omitempty"` +} + +func NewDefaultSession() *DefaultSession { + return &DefaultSession{ + Claims: &jwt.IDTokenClaims{ + RequestedAt: time.Now().UTC(), + }, + Headers: &jwt.Headers{}, + } +} + +func (s *DefaultSession) Clone() fosite.Session { + if s == nil { + return nil + } + + return deepcopy.Copy(s).(fosite.Session) +} + +func (s *DefaultSession) SetExpiresAt(key fosite.TokenType, exp time.Time) { + if s.ExpiresAt == nil { + s.ExpiresAt = make(map[fosite.TokenType]time.Time) + } + s.ExpiresAt[key] = exp +} + +func (s *DefaultSession) GetExpiresAt(key fosite.TokenType) time.Time { + if s.ExpiresAt == nil { + s.ExpiresAt = make(map[fosite.TokenType]time.Time) + } + + if _, ok := s.ExpiresAt[key]; !ok { + return time.Time{} + } + return s.ExpiresAt[key] +} + +func (s *DefaultSession) GetUsername() string { + if s == nil { + return "" + } + return s.Username +} + +func (s *DefaultSession) SetSubject(subject string) { + s.Subject = subject +} + +func (s *DefaultSession) GetSubject() string { + if s == nil { + return "" + } + + return s.Subject +} + +func (s *DefaultSession) IDTokenHeaders() *jwt.Headers { + if s.Headers == nil { + s.Headers = &jwt.Headers{} + } + return s.Headers +} + +func (s *DefaultSession) IDTokenClaims() *jwt.IDTokenClaims { + if s.Claims == nil { + s.Claims = &jwt.IDTokenClaims{} + } + return s.Claims +} + +type DefaultStrategy struct { + jwt.Signer + + Config interface { + fosite.IDTokenIssuerProvider + fosite.IDTokenLifespanProvider + fosite.MinParameterEntropyProvider + } +} + +// GenerateIDToken returns a JWT string. +// +// lifespan is ignored if requester.GetSession().IDTokenClaims().ExpiresAt is not zero. +func (h DefaultStrategy) GenerateIDToken(ctx context.Context, lifespan time.Duration, requester fosite.Requester) (token string, err error) { + if lifespan == 0 { + lifespan = defaultExpiryTime + } + + sess, ok := requester.GetSession().(Session) + if !ok { + return "", errorsx.WithStack(fosite.ErrServerError.WithDebug("Failed to generate id token because session must be of type fosite/handler/openid.Session.")) + } + + claims := sess.IDTokenClaims() + if claims.Subject == "" { + return "", errorsx.WithStack(fosite.ErrServerError.WithDebug("Failed to generate id token because subject is an empty string.")) + } + + if requester.GetRequestForm().Get("grant_type") != "refresh_token" { + maxAge, err := strconv.ParseInt(requester.GetRequestForm().Get("max_age"), 10, 64) + if err != nil { + maxAge = 0 + } + + // Adds a bit of wiggle room for timing issues + if claims.AuthTime.After(time.Now().UTC().Add(time.Second * 5)) { + return "", errorsx.WithStack(fosite.ErrServerError.WithDebug("Failed to validate OpenID Connect request because authentication time is in the future.")) + } + + if maxAge > 0 { + if claims.AuthTime.IsZero() { + return "", errorsx.WithStack(fosite.ErrServerError.WithDebug("Failed to generate id token because authentication time claim is required when max_age is set.")) + } else if claims.RequestedAt.IsZero() { + return "", errorsx.WithStack(fosite.ErrServerError.WithDebug("Failed to generate id token because requested at claim is required when max_age is set.")) + } else if claims.AuthTime.Add(time.Second * time.Duration(maxAge)).Before(claims.RequestedAt) { + return "", errorsx.WithStack(fosite.ErrServerError.WithDebug("Failed to generate id token because authentication time does not satisfy max_age time.")) + } + } + + prompt := requester.GetRequestForm().Get("prompt") + if prompt != "" { + if claims.AuthTime.IsZero() { + return "", errorsx.WithStack(fosite.ErrServerError.WithDebug("Unable to determine validity of prompt parameter because auth_time is missing in id token claims.")) + } + } + + switch prompt { + case "none": + if !claims.AuthTime.Equal(claims.RequestedAt) && claims.AuthTime.After(claims.RequestedAt) { + return "", errorsx.WithStack(fosite.ErrServerError. + WithDebugf("Failed to generate id token because prompt was set to 'none' but auth_time ('%s') happened after the authorization request ('%s') was registered, indicating that the user was logged in during this request which is not allowed.", claims.AuthTime, claims.RequestedAt)) + } + case "login": + if !claims.AuthTime.Equal(claims.RequestedAt) && claims.AuthTime.Before(claims.RequestedAt) { + return "", errorsx.WithStack(fosite.ErrServerError. + WithDebugf("Failed to generate id token because prompt was set to 'login' but auth_time ('%s') happened before the authorization request ('%s') was registered, indicating that the user was not re-authenticated which is forbidden.", claims.AuthTime, claims.RequestedAt)) + } + } + + // If acr_values was requested but no acr value was provided in the ID token, fall back to level 0 which means least + // confidence in authentication. + if requester.GetRequestForm().Get("acr_values") != "" && claims.AuthenticationContextClassReference == "" { + claims.AuthenticationContextClassReference = "0" + } + + if tokenHintString := requester.GetRequestForm().Get("id_token_hint"); tokenHintString != "" { + tokenHint, err := h.Signer.Decode(ctx, tokenHintString) + var ve *jwt.ValidationError + if errors.As(err, &ve) && ve.Has(jwt.ValidationErrorExpired) { + // Expired ID Tokens are allowed as values to id_token_hint + } else if err != nil { + return "", errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebugf("Unable to decode id token from 'id_token_hint' parameter because %s.", err.Error())) + } + + if hintSub, _ := tokenHint.Claims["sub"].(string); hintSub == "" { + return "", errorsx.WithStack(fosite.ErrServerError.WithDebug("Provided id token from 'id_token_hint' does not have a subject.")) + } else if hintSub != claims.Subject { + return "", errorsx.WithStack(fosite.ErrServerError.WithDebug("Subject from authorization mismatches id token subject from 'id_token_hint'.")) + } + } + } + + if claims.ExpiresAt.IsZero() { + claims.ExpiresAt = time.Now().UTC().Add(lifespan) + } + + if claims.ExpiresAt.Before(time.Now().UTC()) { + return "", errorsx.WithStack(fosite.ErrServerError.WithDebug("Failed to generate id token because expiry claim can not be in the past.")) + } + + if claims.AuthTime.IsZero() { + claims.AuthTime = time.Now().Truncate(time.Second).UTC() + } + + if claims.Issuer == "" { + claims.Issuer = h.Config.GetIDTokenIssuer(ctx) + } + + // OPTIONAL. String value used to associate a Client session with an ID Token, and to mitigate replay attacks. + if nonce := requester.GetRequestForm().Get("nonce"); len(nonce) == 0 { + } else if len(nonce) > 0 && len(nonce) < h.Config.GetMinParameterEntropy(ctx) { + // We're assuming that using less then, by default, 8 characters for the state can not be considered "unguessable" + return "", errorsx.WithStack(fosite.ErrInsufficientEntropy.WithHintf("Parameter 'nonce' is set but does not satisfy the minimum entropy of %d characters.", h.Config.GetMinParameterEntropy(ctx))) + } else if len(nonce) > 0 { + claims.Nonce = nonce + } + + claims.Audience = stringslice.Unique(append(claims.Audience, requester.GetClient().GetID())) + claims.IssuedAt = time.Now().UTC() + + token, _, err = h.Signer.Generate(ctx, claims.ToMapClaims(), sess.IDTokenHeaders()) + return token, err +} diff --git a/fosite/handler/openid/strategy_jwt_test.go b/fosite/handler/openid/strategy_jwt_test.go new file mode 100644 index 00000000000..be0edd98fc1 --- /dev/null +++ b/fosite/handler/openid/strategy_jwt_test.go @@ -0,0 +1,287 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid_test + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +func TestJWTStrategy_GenerateIDToken(t *testing.T) { + j := &openid.DefaultStrategy{ + Signer: &jwt.DefaultSigner{ + GetPrivateKey: func(_ context.Context) (interface{}, error) { + return key, nil + }, + }, + Config: &fosite.Config{ + MinParameterEntropy: fosite.MinParameterEntropy, + }, + } + + var req *fosite.AccessRequest + for k, c := range []struct { + description string + setup func() + expectErr bool + }{ + { + setup: func() { + req = fosite.NewAccessRequest(&openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + }, + Headers: &jwt.Headers{}, + }) + req.Form.Set("nonce", "some-secure-nonce-state") + }, + expectErr: false, + }, + { + setup: func() { + req = fosite.NewAccessRequest(&openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + AuthTime: time.Now().UTC(), + RequestedAt: time.Now().UTC(), + }, + Headers: &jwt.Headers{}, + }) + req.Form.Set("nonce", "some-secure-nonce-state") + req.Form.Set("max_age", "1234") + }, + expectErr: false, + }, + { + setup: func() { + req = fosite.NewAccessRequest(&openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + ExpiresAt: time.Now().UTC().Add(-time.Hour), + }, + Headers: &jwt.Headers{}, + }) + req.Form.Set("nonce", "some-secure-nonce-state") + }, + expectErr: true, + }, + { + setup: func() { + req = fosite.NewAccessRequest(&openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + }, + Headers: &jwt.Headers{}, + }) + req.Form.Set("nonce", "some-secure-nonce-state") + req.Form.Set("max_age", "1234") + }, + expectErr: true, + }, + { + setup: func() { + req = fosite.NewAccessRequest(&openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{}, + Headers: &jwt.Headers{}, + }) + req.Form.Set("nonce", "some-secure-nonce-state") + }, + expectErr: true, + }, + { + setup: func() { + req = fosite.NewAccessRequest(&openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + }, + Headers: &jwt.Headers{}, + }) + }, + expectErr: false, + }, + { + description: "should pass because max_age was requested and auth_time happened after initial request time", + setup: func() { + req = fosite.NewAccessRequest(&openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + AuthTime: time.Now().UTC(), + RequestedAt: time.Now().UTC(), + }, + Headers: &jwt.Headers{}, + }) + req.Form.Set("max_age", "60") + }, + expectErr: false, + }, + { + description: "should fail because max_age was requested and auth_time has expired", + setup: func() { + req = fosite.NewAccessRequest(&openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + AuthTime: time.Now().Add(-time.Hour).UTC(), + }, + Headers: &jwt.Headers{}, + }) + req.Form.Set("max_age", "60") + }, + expectErr: true, + }, + { + description: "should fail because prompt=none was requested and auth_time indicates fresh login", + setup: func() { + req = fosite.NewAccessRequest(&openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + AuthTime: time.Now().UTC(), + RequestedAt: time.Now().Add(-time.Minute), + }, + Headers: &jwt.Headers{}, + }) + req.Form.Set("prompt", "none") + }, + expectErr: true, + }, + { + description: "should pass because prompt=none was requested and auth_time indicates fresh login but grant type is refresh_token", + setup: func() { + req = fosite.NewAccessRequest(&openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + AuthTime: time.Now().UTC(), + RequestedAt: time.Now().Add(-time.Minute), + }, + Headers: &jwt.Headers{}, + }) + req.Form.Set("prompt", "none") + req.Form.Set("grant_type", "refresh_token") + }, + expectErr: false, + }, + { + description: "should pass because prompt=none was requested and auth_time indicates old login", + setup: func() { + req = fosite.NewAccessRequest(&openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + AuthTime: time.Now().Add(-time.Hour).UTC(), + RequestedAt: time.Now().Add(-time.Minute), + }, + Headers: &jwt.Headers{}, + }) + req.Form.Set("prompt", "none") + }, + expectErr: false, + }, + { + description: "should pass because prompt=login was requested and auth_time indicates fresh login", + setup: func() { + req = fosite.NewAccessRequest(&openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + AuthTime: time.Now().UTC(), + RequestedAt: time.Now().Add(-time.Minute), + }, + Headers: &jwt.Headers{}, + }) + req.Form.Set("prompt", "login") + }, + expectErr: false, + }, + { + description: "should fail because prompt=login was requested and auth_time indicates old login", + setup: func() { + req = fosite.NewAccessRequest(&openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + AuthTime: time.Now().Add(-time.Hour).UTC(), + RequestedAt: time.Now().Add(-time.Minute), + }, + Headers: &jwt.Headers{}, + }) + req.Form.Set("prompt", "login") + }, + expectErr: true, + }, + { + description: "should pass because id_token_hint subject matches subject from claims", + setup: func() { + req = fosite.NewAccessRequest(&openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + AuthTime: time.Now().Add(-time.Hour).UTC(), + RequestedAt: time.Now().Add(-time.Minute), + }, + Headers: &jwt.Headers{}, + }) + token, _ := j.GenerateIDToken(context.TODO(), time.Duration(0), fosite.NewAccessRequest(&openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + }, + Headers: &jwt.Headers{}, + })) + req.Form.Set("id_token_hint", token) + }, + expectErr: false, + }, + { + description: "should pass even though token is expired", + setup: func() { + req = fosite.NewAccessRequest(&openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + AuthTime: time.Now().Add(-time.Hour).UTC(), + RequestedAt: time.Now().Add(-time.Minute), + }, + Headers: &jwt.Headers{}, + }) + token, _ := j.GenerateIDToken(context.TODO(), time.Duration(0), fosite.NewAccessRequest(&openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + ExpiresAt: time.Now().Add(-time.Hour).UTC(), + }, + Headers: &jwt.Headers{}, + })) + req.Form.Set("id_token_hint", token) + }, + expectErr: false, + }, + { + description: "should fail because id_token_hint subject does not match subject from claims", + setup: func() { + req = fosite.NewAccessRequest(&openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + AuthTime: time.Now().Add(-time.Hour).UTC(), + RequestedAt: time.Now().Add(-time.Minute), + }, + Headers: &jwt.Headers{}, + }) + token, _ := j.GenerateIDToken(context.TODO(), time.Duration(0), fosite.NewAccessRequest(&openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{Subject: "alice"}, Headers: &jwt.Headers{}, + })) + req.Form.Set("id_token_hint", token) + }, + expectErr: true, + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", k, c.description), func(t *testing.T) { + c.setup() + token, err := j.GenerateIDToken(context.TODO(), time.Duration(0), req) + assert.Equal(t, c.expectErr, err != nil, "%d: %+v", k, err) + if !c.expectErr { + assert.NotEmpty(t, token) + } + }) + } +} diff --git a/fosite/handler/openid/validator.go b/fosite/handler/openid/validator.go new file mode 100644 index 00000000000..d6911b205df --- /dev/null +++ b/fosite/handler/openid/validator.go @@ -0,0 +1,163 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid + +import ( + "context" + "strconv" + "strings" + "time" + + "github.com/ory/x/errorsx" + + "github.com/pkg/errors" + + "github.com/ory/go-convenience/stringslice" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +var defaultPrompts = []string{"login", "none", "consent", "select_account"} + +type openIDConnectRequestValidatorConfigProvider interface { + fosite.RedirectSecureCheckerProvider + fosite.AllowedPromptsProvider +} + +type OpenIDConnectRequestValidator struct { + Strategy jwt.Signer + Config interface { + fosite.RedirectSecureCheckerProvider + fosite.AllowedPromptsProvider + } +} + +func NewOpenIDConnectRequestValidator(strategy jwt.Signer, config openIDConnectRequestValidatorConfigProvider) *OpenIDConnectRequestValidator { + return &OpenIDConnectRequestValidator{ + Strategy: strategy, + Config: config, + } +} + +func (v *OpenIDConnectRequestValidator) ValidatePrompt(ctx context.Context, req fosite.AuthorizeRequester) error { + // prompt is case sensitive! + requiredPrompt := fosite.RemoveEmpty(strings.Split(req.GetRequestForm().Get("prompt"), " ")) + + if req.GetClient().IsPublic() { + // Threat: Malicious Client Obtains Existing Authorization by Fraud + // https://tools.ietf.org/html/rfc6819#section-4.2.3 + // + // Authorization servers should not automatically process repeat + // authorizations to public clients unless the client is validated + // using a pre-registered redirect URI + + // Client Impersonation + // https://tools.ietf.org/html/rfc8252#section-8.6# + // + // As stated in Section 10.2 of OAuth 2.0 [RFC6749], the authorization + // server SHOULD NOT process authorization requests automatically + // without user consent or interaction, except when the identity of the + // client can be assured. This includes the case where the user has + // previously approved an authorization request for a given client id -- + // unless the identity of the client can be proven, the request SHOULD + // be processed as if no previous request had been approved. + + checker := v.Config.GetRedirectSecureChecker(ctx) + if stringslice.Has(requiredPrompt, "none") { + if !checker(ctx, req.GetRedirectURI()) { + return errorsx.WithStack(fosite.ErrConsentRequired.WithHint("OAuth 2.0 Client is marked public and redirect uri is not considered secure (https missing), but \"prompt=none\" was requested.")) + } + } + } + + availablePrompts := v.Config.GetAllowedPrompts(ctx) + if len(availablePrompts) == 0 { + availablePrompts = defaultPrompts + } + + if !isWhitelisted(requiredPrompt, availablePrompts) { + return errorsx.WithStack(fosite.ErrInvalidRequest.WithHintf("Used unknown value '%s' for prompt parameter", requiredPrompt)) + } + + if stringslice.Has(requiredPrompt, "none") && len(requiredPrompt) > 1 { + // If this parameter contains none with any other value, an error is returned. + return errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("Parameter 'prompt' was set to 'none', but contains other values as well which is not allowed.")) + } + + maxAge, err := strconv.ParseInt(req.GetRequestForm().Get("max_age"), 10, 64) + if err != nil { + maxAge = 0 + } + + session, ok := req.GetSession().(Session) + if !ok { + return errorsx.WithStack(fosite.ErrServerError.WithDebug("Failed to validate OpenID Connect request because session is not of type fosite/handler/openid.Session.")) + } + + claims := session.IDTokenClaims() + if claims.Subject == "" { + return errorsx.WithStack(fosite.ErrServerError.WithDebug("Failed to validate OpenID Connect request because session subject is empty.")) + } + + // Adds a bit of wiggle room for timing issues + if claims.AuthTime.After(time.Now().UTC().Add(time.Second * 5)) { + return errorsx.WithStack(fosite.ErrServerError.WithDebug("Failed to validate OpenID Connect request because authentication time is in the future.")) + } + + if maxAge > 0 { + if claims.AuthTime.IsZero() { + return errorsx.WithStack(fosite.ErrServerError.WithDebug("Failed to validate OpenID Connect request because authentication time claim is required when max_age is set.")) + } else if claims.RequestedAt.IsZero() { + return errorsx.WithStack(fosite.ErrServerError.WithDebug("Failed to validate OpenID Connect request because requested at claim is required when max_age is set.")) + } else if claims.AuthTime.Add(time.Second * time.Duration(maxAge)).Before(claims.RequestedAt) { + return errorsx.WithStack(fosite.ErrLoginRequired.WithDebug("Failed to validate OpenID Connect request because authentication time does not satisfy max_age time.")) + } + } + + if stringslice.Has(requiredPrompt, "none") { + if claims.AuthTime.IsZero() { + return errorsx.WithStack(fosite.ErrServerError.WithDebug("Failed to validate OpenID Connect request because because auth_time is missing from session.")) + } + if !claims.AuthTime.Equal(claims.RequestedAt) && claims.AuthTime.After(claims.RequestedAt) { + // !claims.AuthTime.Truncate(time.Second).Equal(claims.RequestedAt) && claims.AuthTime.Truncate(time.Second).Before(claims.RequestedAt) { + return errorsx.WithStack(fosite.ErrLoginRequired.WithHintf("Failed to validate OpenID Connect request because prompt was set to 'none' but auth_time ('%s') happened after the authorization request ('%s') was registered, indicating that the user was logged in during this request which is not allowed.", claims.AuthTime, claims.RequestedAt)) + } + } + + if stringslice.Has(requiredPrompt, "login") { + if claims.AuthTime.Before(claims.RequestedAt) { + return errorsx.WithStack(fosite.ErrLoginRequired.WithHintf("Failed to validate OpenID Connect request because prompt was set to 'login' but auth_time ('%s') happened before the authorization request ('%s') was registered, indicating that the user was not re-authenticated which is forbidden.", claims.AuthTime, claims.RequestedAt)) + } + } + + idTokenHint := req.GetRequestForm().Get("id_token_hint") + if idTokenHint == "" { + return nil + } + + tokenHint, err := v.Strategy.Decode(ctx, idTokenHint) + var ve *jwt.ValidationError + if errors.As(err, &ve) && ve.Has(jwt.ValidationErrorExpired) { + // Expired tokens are ok + } else if err != nil { + return errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("Failed to validate OpenID Connect request as decoding id token from id_token_hint parameter failed.").WithWrap(err).WithDebug(err.Error())) + } + + if hintSub, _ := tokenHint.Claims["sub"].(string); hintSub == "" { + return errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("Failed to validate OpenID Connect request because provided id token from id_token_hint does not have a subject.")) + } else if hintSub != claims.Subject { + return errorsx.WithStack(fosite.ErrLoginRequired.WithHint("Failed to validate OpenID Connect request because the subject from provided id token from id_token_hint does not match the current session's subject.")) + } + + return nil +} + +func isWhitelisted(items []string, whiteList []string) bool { + for _, item := range items { + if !stringslice.Has(whiteList, item) { + return false + } + } + return true +} diff --git a/fosite/handler/openid/validator_test.go b/fosite/handler/openid/validator_test.go new file mode 100644 index 00000000000..dbdac9eec42 --- /dev/null +++ b/fosite/handler/openid/validator_test.go @@ -0,0 +1,276 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openid_test + +import ( + "context" + "fmt" + "net/url" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +func TestValidatePrompt(t *testing.T) { + config := &fosite.Config{ + MinParameterEntropy: fosite.MinParameterEntropy, + } + j := &openid.DefaultStrategy{ + Signer: &jwt.DefaultSigner{ + GetPrivateKey: func(_ context.Context) (interface{}, error) { + return key, nil + }, + }, + Config: &fosite.Config{ + MinParameterEntropy: fosite.MinParameterEntropy, + }, + } + + v := openid.NewOpenIDConnectRequestValidator(j, config) + + genIDToken := func(c jwt.IDTokenClaims) string { + s, _, err := j.Generate(context.TODO(), c.ToMapClaims(), jwt.NewHeaders()) + require.NoError(t, err) + return s + } + + for k, tc := range []struct { + d string + prompt string + redirectURL string + isPublic bool + expectErr bool + idTokenHint string + s *openid.DefaultSession + }{ + { + d: "should fail because prompt=none should not work together with public clients and http non-localhost", + prompt: "none", + isPublic: true, + expectErr: true, + redirectURL: "http://foo-bar/", + s: &openid.DefaultSession{ + Subject: "foo", + Claims: &jwt.IDTokenClaims{ + Subject: "foo", + RequestedAt: time.Now().UTC(), + AuthTime: time.Now().UTC().Add(-time.Minute), + }, + }, + }, + { + d: "should pass because prompt=none works for public clients and http localhost", + prompt: "none", + isPublic: true, + expectErr: false, + redirectURL: "http://localhost/", + s: &openid.DefaultSession{ + Subject: "foo", + Claims: &jwt.IDTokenClaims{ + Subject: "foo", + RequestedAt: time.Now().UTC(), + AuthTime: time.Now().UTC().Add(-time.Minute), + }, + }, + }, + { + d: "should pass", + prompt: "none", + isPublic: true, + expectErr: false, + redirectURL: "https://foo-bar/", + s: &openid.DefaultSession{ + Subject: "foo", + Claims: &jwt.IDTokenClaims{ + Subject: "foo", + RequestedAt: time.Now().UTC(), + AuthTime: time.Now().UTC().Add(-time.Minute), + }, + }, + }, + { + d: "should fail because prompt=none requires an auth time being set", + prompt: "none", + isPublic: false, + expectErr: true, + s: &openid.DefaultSession{ + Subject: "foo", + Claims: &jwt.IDTokenClaims{ + Subject: "foo", + RequestedAt: time.Now().UTC(), + }, + }, + }, + { + d: "should fail because prompt=none and auth time is recent (after requested at)", + prompt: "none", + isPublic: false, + expectErr: true, + s: &openid.DefaultSession{ + Subject: "foo", + Claims: &jwt.IDTokenClaims{ + Subject: "foo", + RequestedAt: time.Now().UTC().Add(-time.Minute), + AuthTime: time.Now().UTC(), + }, + }, + }, + { + d: "should pass because prompt=none and auth time is in the past (before requested at)", + prompt: "none", + isPublic: false, + expectErr: false, + s: &openid.DefaultSession{ + Subject: "foo", + Claims: &jwt.IDTokenClaims{ + Subject: "foo", + RequestedAt: time.Now().UTC(), + AuthTime: time.Now().UTC().Add(-time.Minute), + }, + }, + }, + { + d: "should fail because prompt=none can not be used together with other prompts", + prompt: "none login", + isPublic: false, + expectErr: true, + s: &openid.DefaultSession{ + Subject: "foo", + Claims: &jwt.IDTokenClaims{ + Subject: "foo", + RequestedAt: time.Now().UTC(), + AuthTime: time.Now().UTC(), + }, + }, + }, + { + d: "should fail because prompt=foo is an unknown value", + prompt: "foo", + isPublic: false, + expectErr: true, + s: &openid.DefaultSession{ + Subject: "foo", + Claims: &jwt.IDTokenClaims{ + Subject: "foo", + RequestedAt: time.Now().UTC(), + AuthTime: time.Now().UTC(), + }, + }, + }, + { + d: "should pass because requesting consent and login works with public clients", + prompt: "login consent", + isPublic: true, + expectErr: false, + s: &openid.DefaultSession{ + Subject: "foo", + Claims: &jwt.IDTokenClaims{ + Subject: "foo", + RequestedAt: time.Now().UTC().Add(-time.Second * 5), + AuthTime: time.Now().UTC().Add(-time.Second), + }, + }, + }, + { + d: "should pass because requesting consent and login works with confidential clients", + prompt: "login consent", + isPublic: false, + expectErr: false, + s: &openid.DefaultSession{ + Subject: "foo", + Claims: &jwt.IDTokenClaims{ + Subject: "foo", + RequestedAt: time.Now().UTC().Add(-time.Second * 5), + AuthTime: time.Now().UTC().Add(-time.Second), + }, + }, + }, + { + d: "should fail subject from ID token does not match subject from session", + prompt: "login", + isPublic: false, + expectErr: true, + s: &openid.DefaultSession{ + Subject: "foo", + Claims: &jwt.IDTokenClaims{ + Subject: "foo", + RequestedAt: time.Now().UTC(), + AuthTime: time.Now().UTC().Add(-time.Second), + }, + }, + idTokenHint: genIDToken(jwt.IDTokenClaims{ + Subject: "bar", + RequestedAt: time.Now(), + ExpiresAt: time.Now().Add(time.Hour), + }), + }, + { + d: "should pass subject from ID token matches subject from session", + prompt: "", + isPublic: false, + expectErr: false, + s: &openid.DefaultSession{ + Subject: "foo", + Claims: &jwt.IDTokenClaims{ + Subject: "foo", + RequestedAt: time.Now().UTC(), + AuthTime: time.Now().UTC().Add(-time.Second), + }, + }, + idTokenHint: genIDToken(jwt.IDTokenClaims{ + Subject: "foo", + RequestedAt: time.Now(), + ExpiresAt: time.Now().Add(time.Hour), + }), + }, + { + d: "should pass subject from ID token matches subject from session even though id token is expired", + prompt: "", + isPublic: false, + expectErr: false, + s: &openid.DefaultSession{ + Subject: "foo", + Claims: &jwt.IDTokenClaims{ + Subject: "foo", + RequestedAt: time.Now().UTC(), + AuthTime: time.Now().UTC().Add(-time.Second), + ExpiresAt: time.Now().UTC().Add(-time.Second), + }, + }, + idTokenHint: genIDToken(jwt.IDTokenClaims{ + Subject: "foo", + RequestedAt: time.Now(), + ExpiresAt: time.Now().Add(time.Hour), + }), + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", k, tc.d), func(t *testing.T) { + t.Logf("%s", tc.idTokenHint) + err := v.ValidatePrompt(context.TODO(), &fosite.AuthorizeRequest{ + Request: fosite.Request{ + Form: url.Values{"prompt": {tc.prompt}, "id_token_hint": {tc.idTokenHint}}, + Client: &fosite.DefaultClient{Public: tc.isPublic}, + Session: tc.s, + }, + RedirectURI: parse(tc.redirectURL), + }) + if tc.expectErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } +} + +func parse(u string) *url.URL { + o, _ := url.Parse(u) + return o +} diff --git a/fosite/handler/par/flow_pushed_authorize.go b/fosite/handler/par/flow_pushed_authorize.go new file mode 100644 index 00000000000..411cecae0af --- /dev/null +++ b/fosite/handler/par/flow_pushed_authorize.go @@ -0,0 +1,87 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package par + +import ( + "context" + "encoding/base64" + "fmt" + "net/url" + "time" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/token/hmac" + "github.com/ory/x/errorsx" +) + +const ( + defaultPARKeyLength = 32 +) + +var b64 = base64.URLEncoding.WithPadding(base64.NoPadding) + +// PushedAuthorizeHandler handles the PAR request +type PushedAuthorizeHandler struct { + Storage fosite.PARStorageProvider + Config fosite.Configurator +} + +// HandlePushedAuthorizeEndpointRequest handles a pushed authorize endpoint request. To extend the handler's capabilities, the http request +// is passed along, if further information retrieval is required. If the handler feels that he is not responsible for +// the pushed authorize request, he must return nil and NOT modify session nor responder neither requester. +func (c *PushedAuthorizeHandler) HandlePushedAuthorizeEndpointRequest(ctx context.Context, ar fosite.AuthorizeRequester, resp fosite.PushedAuthorizeResponder) error { + configProvider, ok := c.Config.(fosite.PushedAuthorizeRequestConfigProvider) + if !ok { + return errorsx.WithStack(fosite.ErrServerError.WithHint(fosite.ErrorPARNotSupported).WithDebug(fosite.DebugPARConfigMissing)) + } + + if !ar.GetResponseTypes().HasOneOf("token", "code", "id_token") { + return nil + } + + if !c.secureChecker(ctx, ar.GetRedirectURI()) { + return errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("Redirect URL is using an insecure protocol, http is only allowed for hosts with suffix 'localhost', for example: http://myapp.localhost/.")) + } + + client := ar.GetClient() + for _, scope := range ar.GetRequestedScopes() { + if !c.Config.GetScopeStrategy(ctx)(client.GetScopes(), scope) { + return errorsx.WithStack(fosite.ErrInvalidScope.WithHintf("The OAuth 2.0 Client is not allowed to request scope '%s'.", scope)) + } + } + + if err := c.Config.GetAudienceStrategy(ctx)(client.GetAudience(), ar.GetRequestedAudience()); err != nil { + return err + } + + expiresIn := configProvider.GetPushedAuthorizeContextLifespan(ctx) + if ar.GetSession() != nil { + ar.GetSession().SetExpiresAt(fosite.PushedAuthorizeRequestContext, time.Now().UTC().Add(expiresIn)) + } + + // generate an ID + stateKey, err := hmac.RandomBytes(defaultPARKeyLength) + if err != nil { + return errorsx.WithStack(fosite.ErrInsufficientEntropy.WithHint("Unable to generate the random part of the request_uri.").WithWrap(err).WithDebug(err.Error())) + } + + requestURI := fmt.Sprintf("%s%s", configProvider.GetPushedAuthorizeRequestURIPrefix(ctx), b64.EncodeToString(stateKey)) + + // store + if err = c.Storage.PARStorage().CreatePARSession(ctx, requestURI, ar); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithHint("Unable to store the PAR session").WithWrap(err).WithDebug(err.Error())) + } + + resp.SetRequestURI(requestURI) + resp.SetExpiresIn(int(expiresIn.Seconds())) + return nil +} + +func (c *PushedAuthorizeHandler) secureChecker(ctx context.Context, u *url.URL) bool { + isRedirectURISecure := c.Config.GetRedirectSecureChecker(ctx) + if isRedirectURISecure == nil { + isRedirectURISecure = fosite.IsRedirectURISecure + } + return isRedirectURISecure(ctx, u) +} diff --git a/fosite/handler/par/flow_pushed_authorize_test.go b/fosite/handler/par/flow_pushed_authorize_test.go new file mode 100644 index 00000000000..81b6cd3d62e --- /dev/null +++ b/fosite/handler/par/flow_pushed_authorize_test.go @@ -0,0 +1,136 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package par_test + +import ( + "context" + "net/url" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/fosite/storage" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/par" +) + +func parseURL(uu string) *url.URL { + u, _ := url.Parse(uu) + return u +} + +func TestAuthorizeCode_HandleAuthorizeEndpointRequest(t *testing.T) { + requestURIPrefix := "urn:ietf:params:oauth:request_uri_diff:" + store := storage.NewMemoryStore() + handler := par.PushedAuthorizeHandler{ + Storage: store, + Config: &fosite.Config{ + PushedAuthorizeContextLifespan: 30 * time.Minute, + PushedAuthorizeRequestURIPrefix: requestURIPrefix, + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + }, + } + for _, c := range []struct { + handler par.PushedAuthorizeHandler + areq *fosite.AuthorizeRequest + description string + expectErr error + expect func(t *testing.T, areq *fosite.AuthorizeRequest, aresp *fosite.PushedAuthorizeResponse) + }{ + { + handler: handler, + areq: &fosite.AuthorizeRequest{ + ResponseTypes: fosite.Arguments{""}, + Request: *fosite.NewRequest(), + }, + description: "should pass because not responsible for handling an empty response type", + }, + { + handler: handler, + areq: &fosite.AuthorizeRequest{ + ResponseTypes: fosite.Arguments{"foo"}, + Request: *fosite.NewRequest(), + }, + description: "should pass because not responsible for handling an invalid response type", + }, + { + handler: handler, + areq: &fosite.AuthorizeRequest{ + ResponseTypes: fosite.Arguments{"code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + ResponseTypes: fosite.Arguments{"code"}, + RedirectURIs: []string{"http://asdf.com/cb"}, + }, + }, + RedirectURI: parseURL("http://asdf.com/cb"), + }, + description: "should fail because redirect uri is not https", + expectErr: fosite.ErrInvalidRequest, + }, + { + handler: handler, + areq: &fosite.AuthorizeRequest{ + ResponseTypes: fosite.Arguments{"code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + ResponseTypes: fosite.Arguments{"code"}, + RedirectURIs: []string{"https://asdf.com/cb"}, + Audience: []string{"https://www.ory.sh/api"}, + }, + RequestedAudience: []string{"https://www.ory.sh/not-api"}, + }, + RedirectURI: parseURL("https://asdf.com/cb"), + }, + description: "should fail because audience doesn't match", + expectErr: fosite.ErrInvalidRequest, + }, + { + handler: handler, + areq: &fosite.AuthorizeRequest{ + ResponseTypes: fosite.Arguments{"code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + ResponseTypes: fosite.Arguments{"code"}, + RedirectURIs: []string{"https://asdf.de/cb"}, + Audience: []string{"https://www.ory.sh/api"}, + }, + RequestedAudience: []string{"https://www.ory.sh/api"}, + GrantedScope: fosite.Arguments{"a", "b"}, + Session: &fosite.DefaultSession{ + ExpiresAt: map[fosite.TokenType]time.Time{fosite.AccessToken: time.Now().UTC().Add(time.Hour)}, + }, + RequestedAt: time.Now().UTC(), + }, + State: "superstate", + RedirectURI: parseURL("https://asdf.de/cb"), + }, + description: "should pass", + expect: func(t *testing.T, areq *fosite.AuthorizeRequest, aresp *fosite.PushedAuthorizeResponse) { + requestURI := aresp.RequestURI + assert.NotEmpty(t, requestURI) + assert.True(t, strings.HasPrefix(requestURI, requestURIPrefix), "requestURI does not match: %s", requestURI) + }, + }, + } { + t.Run("case="+c.description, func(t *testing.T) { + aresp := &fosite.PushedAuthorizeResponse{} + err := c.handler.HandlePushedAuthorizeEndpointRequest(context.Background(), c.areq, aresp) + if c.expectErr != nil { + require.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + } + + if c.expect != nil { + c.expect(t, c.areq, aresp) + } + }) + } +} diff --git a/fosite/handler/pkce/export_test.go b/fosite/handler/pkce/export_test.go new file mode 100644 index 00000000000..03db7dc87d0 --- /dev/null +++ b/fosite/handler/pkce/export_test.go @@ -0,0 +1,14 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package pkce + +import ( + "context" + + "github.com/ory/hydra/v2/fosite" +) + +func CallValidate(ctx context.Context, challenge, method string, client fosite.Client, handler *Handler) error { + return handler.validate(ctx, challenge, method, client) +} diff --git a/fosite/handler/pkce/handler.go b/fosite/handler/pkce/handler.go new file mode 100644 index 00000000000..b72616191cc --- /dev/null +++ b/fosite/handler/pkce/handler.go @@ -0,0 +1,243 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package pkce + +import ( + "context" + "crypto/sha256" + "encoding/base64" + "regexp" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/x/errorsx" + + "github.com/pkg/errors" +) + +var _ fosite.TokenEndpointHandler = (*Handler)(nil) + +type Handler struct { + Strategy oauth2.AuthorizeCodeStrategyProvider + Storage PKCERequestStorageProvider + Config interface { + fosite.EnforcePKCEProvider + fosite.EnforcePKCEForPublicClientsProvider + fosite.EnablePKCEPlainChallengeMethodProvider + } +} + +var _ fosite.TokenEndpointHandler = (*Handler)(nil) + +var verifierWrongFormat = regexp.MustCompile("[^\\w\\.\\-~]") + +func (c *Handler) HandleAuthorizeEndpointRequest(ctx context.Context, ar fosite.AuthorizeRequester, resp fosite.AuthorizeResponder) error { + // This let's us define multiple response types, for example open id connect's id_token + if !ar.GetResponseTypes().Has("code") { + return nil + } + + challenge := ar.GetRequestForm().Get("code_challenge") + method := ar.GetRequestForm().Get("code_challenge_method") + client := ar.GetClient() + + if err := c.validate(ctx, challenge, method, client); err != nil { + return err + } + + // We don't need a session if it's not enforced and the PKCE parameters are not provided by the client. + if challenge == "" && method == "" { + return nil + } + + code := resp.GetCode() + if len(code) == 0 { + return errorsx.WithStack(fosite.ErrServerError.WithDebug("The PKCE handler must be loaded after the authorize code handler.")) + } + + signature := c.Strategy.AuthorizeCodeStrategy().AuthorizeCodeSignature(ctx, code) + if err := c.Storage.PKCERequestStorage().CreatePKCERequestSession(ctx, signature, ar.Sanitize([]string{ + "code_challenge", + "code_challenge_method", + })); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + return nil +} + +func (c *Handler) validate(ctx context.Context, challenge, method string, client fosite.Client) error { + if len(challenge) == 0 { + // If the server requires Proof Key for Code Exchange (PKCE) by OAuth + // clients and the client does not send the "code_challenge" in + // the request, the authorization endpoint MUST return the authorization + // error response with the "error" value set to "invalid_request". The + // "error_description" or the response of "error_uri" SHOULD explain the + // nature of error, e.g., code challenge required. + return c.validateNoPKCE(ctx, client) + } + + // If the server supporting PKCE does not support the requested + // transformation, the authorization endpoint MUST return the + // authorization error response with "error" value set to + // "invalid_request". The "error_description" or the response of + // "error_uri" SHOULD explain the nature of error, e.g., transform + // algorithm not supported. + switch method { + case "S256": + break + case "plain": + fallthrough + case "": + if !c.Config.GetEnablePKCEPlainChallengeMethod(ctx) { + return errorsx.WithStack(fosite.ErrInvalidRequest. + WithHint("Clients must use code_challenge_method=S256, plain is not allowed."). + WithDebug("The server is configured in a way that enforces PKCE S256 as challenge method for clients.")) + } + default: + return errorsx.WithStack(fosite.ErrInvalidRequest. + WithHint("The code_challenge_method is not supported, use S256 instead.")) + } + return nil +} + +func (c *Handler) validateNoPKCE(ctx context.Context, client fosite.Client) error { + if c.Config.GetEnforcePKCE(ctx) { + return errorsx.WithStack(fosite.ErrInvalidRequest. + WithHint("Clients must include a code_challenge when performing the authorize code flow, but it is missing."). + WithDebug("The server is configured in a way that enforces PKCE for clients.")) + } + if c.Config.GetEnforcePKCEForPublicClients(ctx) && client.IsPublic() { + return errorsx.WithStack(fosite.ErrInvalidRequest. + WithHint("This client must include a code_challenge when performing the authorize code flow, but it is missing."). + WithDebug("The server is configured in a way that enforces PKCE for this client.")) + } + return nil +} + +func (c *Handler) HandleTokenEndpointRequest(ctx context.Context, request fosite.AccessRequester) error { + if !c.CanHandleTokenEndpointRequest(ctx, request) { + return errorsx.WithStack(fosite.ErrUnknownRequest) + } + + // code_verifier + // REQUIRED. Code verifier + // + // The "code_challenge_method" is bound to the Authorization Code when + // the Authorization Code is issued. That is the method that the token + // endpoint MUST use to verify the "code_verifier". + verifier := request.GetRequestForm().Get("code_verifier") + + code := request.GetRequestForm().Get("code") + signature := c.Strategy.AuthorizeCodeStrategy().AuthorizeCodeSignature(ctx, code) + pkceRequest, err := c.Storage.PKCERequestStorage().GetPKCERequestSession(ctx, signature, request.GetSession()) + + nv := len(verifier) + + if errors.Is(err, fosite.ErrNotFound) { + if nv == 0 { + return c.validateNoPKCE(ctx, request.GetClient()) + } + + return errorsx.WithStack(fosite.ErrInvalidGrant.WithHint("Unable to find initial PKCE data tied to this request").WithWrap(err).WithDebug(err.Error())) + } else if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + if err := c.Storage.PKCERequestStorage().DeletePKCERequestSession(ctx, signature); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + challenge := pkceRequest.GetRequestForm().Get("code_challenge") + method := pkceRequest.GetRequestForm().Get("code_challenge_method") + client := pkceRequest.GetClient() + if err := c.validate(ctx, challenge, method, client); err != nil { + return err + } + + nc := len(challenge) + + if !c.Config.GetEnforcePKCE(ctx) && nc == 0 && nv == 0 { + return nil + } + + // NOTE: The code verifier SHOULD have enough entropy to make it + // impractical to guess the value. It is RECOMMENDED that the output of + // a suitable random number generator be used to create a 32-octet + // sequence. The octet sequence is then base64url-encoded to produce a + // 43-octet URL safe string to use as the code verifier. + + // Validation + if nv < 43 { + return errorsx.WithStack(fosite.ErrInvalidGrant. + WithHint("The PKCE code verifier must be at least 43 characters.")) + } else if nv > 128 { + return errorsx.WithStack(fosite.ErrInvalidGrant. + WithHint("The PKCE code verifier can not be longer than 128 characters.")) + } else if verifierWrongFormat.MatchString(verifier) { + return errorsx.WithStack(fosite.ErrInvalidGrant. + WithHint("The PKCE code verifier must only contain [a-Z], [0-9], '-', '.', '_', '~'.")) + } else if nc == 0 { + return errorsx.WithStack(fosite.ErrInvalidGrant. + WithHint("The PKCE code verifier was provided but the code challenge was absent from the authorization request.")) + } + + // Upon receipt of the request at the token endpoint, the server + // verifies it by calculating the code challenge from the received + // "code_verifier" and comparing it with the previously associated + // "code_challenge", after first transforming it according to the + // "code_challenge_method" method specified by the client. + // + // If the "code_challenge_method" from Section 4.3 was "S256", the + // received "code_verifier" is hashed by SHA-256, base64url-encoded, and + // then compared to the "code_challenge", i.e.: + // + // BASE64URL-ENCODE(SHA256(ASCII(code_verifier))) == code_challenge + // + // If the "code_challenge_method" from Section 4.3 was "plain", they are + // compared directly, i.e.: + // + // code_verifier == code_challenge. + // + // If the values are equal, the token endpoint MUST continue processing + // as normal (as defined by OAuth 2.0 [RFC6749]). If the values are not + // equal, an error response indicating "invalid_grant" as described in + // Section 5.2 of [RFC6749] MUST be returned. + switch method { + case "S256": + hash := sha256.New() + if _, err := hash.Write([]byte(verifier)); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + if base64.RawURLEncoding.EncodeToString(hash.Sum([]byte{})) != challenge { + return errorsx.WithStack(fosite.ErrInvalidGrant. + WithHint("The PKCE code challenge did not match the code verifier.")) + } + break + case "plain": + fallthrough + default: + if verifier != challenge { + return errorsx.WithStack(fosite.ErrInvalidGrant. + WithHint("The PKCE code challenge did not match the code verifier.")) + } + } + + return nil +} + +func (c *Handler) PopulateTokenEndpointResponse(ctx context.Context, requester fosite.AccessRequester, responder fosite.AccessResponder) error { + return nil +} + +func (c *Handler) CanSkipClientAuth(ctx context.Context, requester fosite.AccessRequester) bool { + return false +} + +func (c *Handler) CanHandleTokenEndpointRequest(ctx context.Context, requester fosite.AccessRequester) bool { + // grant_type REQUIRED. + // Value MUST be set to "authorization_code" + return requester.GetGrantTypes().ExactOne("authorization_code") +} diff --git a/fosite/handler/pkce/handler_test.go b/fosite/handler/pkce/handler_test.go new file mode 100644 index 00000000000..d960c46a953 --- /dev/null +++ b/fosite/handler/pkce/handler_test.go @@ -0,0 +1,424 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package pkce_test + +import ( + "context" + "crypto/sha256" + "encoding/base64" + "fmt" + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/handler/pkce" + "github.com/ory/hydra/v2/fosite/storage" +) + +type mockCodeStrategy struct { + signature string +} + +func (m *mockCodeStrategy) AuthorizeCodeSignature(ctx context.Context, token string) string { + return m.signature +} + +func (m *mockCodeStrategy) GenerateAuthorizeCode(ctx context.Context, requester fosite.Requester) (token string, signature string, err error) { + return "", "", nil +} + +func (m *mockCodeStrategy) ValidateAuthorizeCode(ctx context.Context, requester fosite.Requester, token string) (err error) { + return nil +} + +type mockStrategyProvider struct { + strategy oauth2.AuthorizeCodeStrategy +} + +func (p *mockStrategyProvider) AuthorizeCodeStrategy() oauth2.AuthorizeCodeStrategy { + return p.strategy +} + +func TestPKCEHandleAuthorizeEndpointRequest(t *testing.T) { + var config fosite.Config + h := &pkce.Handler{ + Storage: storage.NewMemoryStore(), + Strategy: &compose.CommonStrategyProvider{CoreStrategy: oauth2.NewHMACSHAStrategy(nil, nil)}, + Config: &config, + } + w := fosite.NewAuthorizeResponse() + r := fosite.NewAuthorizeRequest() + c := &fosite.DefaultClient{} + r.Client = c + + w.AddParameter("code", "foo") + + r.Form.Add("code_challenge", "challenge") + r.Form.Add("code_challenge_method", "plain") + + r.ResponseTypes = fosite.Arguments{} + require.NoError(t, h.HandleAuthorizeEndpointRequest(context.Background(), r, w)) + + r.ResponseTypes = fosite.Arguments{"code"} + require.Error(t, h.HandleAuthorizeEndpointRequest(context.Background(), r, w)) + + r.ResponseTypes = fosite.Arguments{"code", "id_token"} + require.Error(t, h.HandleAuthorizeEndpointRequest(context.Background(), r, w)) + + c.Public = true + config.EnablePKCEPlainChallengeMethod = true + require.NoError(t, h.HandleAuthorizeEndpointRequest(context.Background(), r, w)) + + c.Public = false + config.EnablePKCEPlainChallengeMethod = true + require.NoError(t, h.HandleAuthorizeEndpointRequest(context.Background(), r, w)) + + config.EnablePKCEPlainChallengeMethod = false + require.Error(t, h.HandleAuthorizeEndpointRequest(context.Background(), r, w)) + + r.Form.Set("code_challenge_method", "S256") + r.Form.Set("code_challenge", "") + config.EnforcePKCE = true + require.Error(t, h.HandleAuthorizeEndpointRequest(context.Background(), r, w)) + + r.Form.Set("code_challenge", "challenge") + require.NoError(t, h.HandleAuthorizeEndpointRequest(context.Background(), r, w)) +} + +func TestPKCEHandlerValidate(t *testing.T) { + s := storage.NewMemoryStore() + ms := &mockCodeStrategy{} + msp := &mockStrategyProvider{strategy: ms} + config := &fosite.Config{} + h := &pkce.Handler{Storage: s, Strategy: msp, Config: config} + pc := &fosite.DefaultClient{Public: true} + + s256verifier := "KGCt4m8AmjUvIR5ArTByrmehjtbxn1A49YpTZhsH8N7fhDr7LQayn9xx6mck" + hash := sha256.New() + hash.Write([]byte(s256verifier)) + s256challenge := base64.RawURLEncoding.EncodeToString(hash.Sum([]byte{})) + + for k, tc := range []struct { + d string + grant string + force bool + enablePlain bool + challenge string + method string + verifier string + code string + expectErr error + expectErrDesc string + client *fosite.DefaultClient + }{ + { + d: "fails because not auth code flow", + grant: "not_authorization_code", + expectErr: fosite.ErrUnknownRequest, + expectErrDesc: "The handler is not responsible for this request.", + }, + { + d: "passes with private client", + grant: "authorization_code", + challenge: "foofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoo", + verifier: "foofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoo", + method: "plain", + client: &fosite.DefaultClient{Public: false}, + enablePlain: true, + force: true, + code: "valid-code-1", + }, + { + d: "fails because invalid code", + grant: "authorization_code", + client: pc, + code: "invalid-code-2", + verifier: "foofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoo", + expectErr: fosite.ErrInvalidGrant, + expectErrDesc: "The provided authorization grant (e.g., authorization code, resource owner credentials) or refresh token is invalid, expired, revoked, does not match the redirection URI used in the authorization request, or was issued to another client. Unable to find initial PKCE data tied to this request not_found", + }, + { + d: "passes because auth code flow but pkce is not forced and no challenge given", + grant: "authorization_code", + client: pc, + code: "valid-code-3", + }, + { + d: "fails because auth code flow and pkce challenge given but plain is disabled", + grant: "authorization_code", + challenge: "foo", + client: pc, + code: "valid-code-4", + expectErr: fosite.ErrInvalidRequest, + expectErrDesc: "The request is missing a required parameter, includes an invalid parameter value, includes a parameter more than once, or is otherwise malformed. Clients must use code_challenge_method=S256, plain is not allowed. The server is configured in a way that enforces PKCE S256 as challenge method for clients.", + }, + { + d: "passes", + grant: "authorization_code", + challenge: "foofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoo", + verifier: "foofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoo", + client: pc, + enablePlain: true, + force: true, + code: "valid-code-5", + }, + { + d: "passes", + grant: "authorization_code", + challenge: "foofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoo", + verifier: "foofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoo", + method: "plain", + client: pc, + enablePlain: true, + force: true, + code: "valid-code-6", + }, + { + d: "fails because challenge and verifier do not match", + grant: "authorization_code", + challenge: "not-foo", + verifier: "foofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoo", + method: "plain", + client: pc, + enablePlain: true, + code: "valid-code-7", + expectErr: fosite.ErrInvalidGrant, + expectErrDesc: "The provided authorization grant (e.g., authorization code, resource owner credentials) or refresh token is invalid, expired, revoked, does not match the redirection URI used in the authorization request, or was issued to another client. The PKCE code challenge did not match the code verifier.", + }, + { + d: "fails because challenge and verifier do not match", + grant: "authorization_code", + challenge: "not-foonot-foonot-foonot-foonot-foonot-foonot-foonot-foo", + verifier: "foofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoo", + client: pc, + enablePlain: true, + code: "valid-code-8", + expectErr: fosite.ErrInvalidGrant, + expectErrDesc: "The provided authorization grant (e.g., authorization code, resource owner credentials) or refresh token is invalid, expired, revoked, does not match the redirection URI used in the authorization request, or was issued to another client. The PKCE code challenge did not match the code verifier.", + }, + { + d: "fails because verifier is too short", + grant: "authorization_code", + challenge: "foo", + verifier: "foo", + method: "S256", + client: pc, + force: true, + code: "valid-code-9", + expectErr: fosite.ErrInvalidGrant, + expectErrDesc: "The provided authorization grant (e.g., authorization code, resource owner credentials) or refresh token is invalid, expired, revoked, does not match the redirection URI used in the authorization request, or was issued to another client. The PKCE code verifier must be at least 43 characters.", + }, + { + d: "fails because verifier is too long", + grant: "authorization_code", + challenge: "foo", + verifier: "foofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoo", + method: "S256", + client: pc, + force: true, + code: "valid-code-10", + expectErr: fosite.ErrInvalidGrant, + expectErrDesc: "The provided authorization grant (e.g., authorization code, resource owner credentials) or refresh token is invalid, expired, revoked, does not match the redirection URI used in the authorization request, or was issued to another client. The PKCE code verifier can not be longer than 128 characters.", + }, + { + d: "fails because verifier is malformed", + grant: "authorization_code", + challenge: "foo", + verifier: `(!"/$%Z&$T()/)OUZI>$"&=/T(PUOI>"%/)TUOI&/(O/()RGTE>=/(%"/()="$/)(=()=/R/()=))`, + method: "S256", + client: pc, + force: true, + code: "valid-code-11", + expectErr: fosite.ErrInvalidGrant, + expectErrDesc: "The provided authorization grant (e.g., authorization code, resource owner credentials) or refresh token is invalid, expired, revoked, does not match the redirection URI used in the authorization request, or was issued to another client. The PKCE code verifier must only contain [a-Z], [0-9], '-', '.', '_', '~'.", + }, + { + d: "fails because challenge and verifier do not match", + grant: "authorization_code", + challenge: "Zm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9v", + verifier: "Zm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9v", + method: "S256", + client: pc, + force: true, + code: "valid-code-12", + expectErr: fosite.ErrInvalidGrant, + expectErrDesc: "The provided authorization grant (e.g., authorization code, resource owner credentials) or refresh token is invalid, expired, revoked, does not match the redirection URI used in the authorization request, or was issued to another client. The PKCE code challenge did not match the code verifier.", + }, + { + d: "passes because challenge and verifier match", + grant: "authorization_code", + challenge: s256challenge, + verifier: s256verifier, + method: "S256", + client: pc, + force: true, + code: "valid-code-13", + }, + { + d: "passes when not forced because no challenge or verifier", + grant: "authorization_code", + client: pc, + code: "valid-code-14", + }, + { + d: "fails when not forced because verifier provided when no challenge", + grant: "authorization_code", + client: pc, + code: "valid-code-15", + verifier: "Zm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9vZm9v", + expectErr: fosite.ErrInvalidGrant, + expectErrDesc: "The provided authorization grant (e.g., authorization code, resource owner credentials) or refresh token is invalid, expired, revoked, does not match the redirection URI used in the authorization request, or was issued to another client. The PKCE code verifier was provided but the code challenge was absent from the authorization request.", + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", k, tc.d), func(t *testing.T) { + config.EnablePKCEPlainChallengeMethod = tc.enablePlain + config.EnforcePKCE = tc.force + ms.signature = tc.code + ar := fosite.NewAuthorizeRequest() + + if len(tc.challenge) != 0 { + ar.Form.Add("code_challenge", tc.challenge) + } + + if len(tc.method) != 0 { + ar.Form.Add("code_challenge_method", tc.method) + } + + ar.Client = tc.client + + require.NoError(t, s.CreatePKCERequestSession(context.Background(), fmt.Sprintf("valid-code-%d", k), ar)) + + r := fosite.NewAccessRequest(nil) + r.Client = tc.client + r.GrantTypes = fosite.Arguments{tc.grant} + + if len(tc.verifier) != 0 { + r.Form.Add("code_verifier", tc.verifier) + } + + err := h.HandleTokenEndpointRequest(context.Background(), r) + + if tc.expectErr == nil { + assert.NoError(t, err) + } else { + assert.EqualError(t, err, tc.expectErr.Error(), "%+v", err) + + if len(tc.expectErrDesc) != 0 { + assert.EqualError(t, newtesterr(err), tc.expectErrDesc) + } + } + }) + } +} + +func TestPKCEHandleTokenEndpointRequest(t *testing.T) { + for k, tc := range []struct { + d string + force bool + forcePublic bool + enablePlain bool + challenge string + method string + expectErr bool + client *fosite.DefaultClient + }{ + { + d: "should pass because pkce is not enforced", + }, + { + d: "should fail because plain is not enabled and method is empty which defaults to plain", + expectErr: true, + force: true, + }, + { + d: "should fail because force is enabled and no challenge was given", + force: true, + enablePlain: true, + expectErr: true, + method: "S256", + }, + { + d: "should fail because forcePublic is enabled, the client is public, and no challenge was given", + forcePublic: true, + client: &fosite.DefaultClient{Public: true}, + expectErr: true, + method: "S256", + }, + { + d: "should fail because although force is enabled and a challenge was given, plain is disabled", + force: true, + expectErr: true, + method: "plain", + challenge: "challenge", + }, + { + d: "should fail because although force is enabled and a challenge was given, plain is disabled and method is empty", + force: true, + expectErr: true, + challenge: "challenge", + }, + { + d: "should fail because invalid challenge method", + force: true, + expectErr: true, + method: "invalid", + challenge: "challenge", + }, + { + d: "should pass because force is enabled with challenge given and method is S256", + force: true, + method: "S256", + challenge: "challenge", + }, + { + d: "should pass because forcePublic is enabled with challenge given and method is S256", + forcePublic: true, + client: &fosite.DefaultClient{Public: true}, + method: "S256", + challenge: "challenge", + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", k, tc.d), func(t *testing.T) { + h := &pkce.Handler{ + Config: &fosite.Config{ + EnforcePKCE: tc.force, + EnforcePKCEForPublicClients: tc.forcePublic, + EnablePKCEPlainChallengeMethod: tc.enablePlain, + }, + } + + if tc.expectErr { + assert.Error(t, pkce.CallValidate(context.Background(), tc.challenge, tc.method, tc.client, h)) + } else { + assert.NoError(t, pkce.CallValidate(context.Background(), tc.challenge, tc.method, tc.client, h)) + } + }) + } +} + +func newtesterr(err error) error { + if err == nil { + return nil + } + + var e *fosite.RFC6749Error + if errors.As(err, &e) { + return &testerr{e} + } + + return err +} + +type testerr struct { + *fosite.RFC6749Error +} + +func (e *testerr) Error() string { + return e.RFC6749Error.WithExposeDebug(true).GetDescription() +} diff --git a/fosite/handler/pkce/storage.go b/fosite/handler/pkce/storage.go new file mode 100644 index 00000000000..172a6f84b9e --- /dev/null +++ b/fosite/handler/pkce/storage.go @@ -0,0 +1,21 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package pkce + +import ( + "context" + + "github.com/ory/hydra/v2/fosite" +) + +type ( + PKCERequestStorage interface { + GetPKCERequestSession(ctx context.Context, signature string, session fosite.Session) (fosite.Requester, error) + CreatePKCERequestSession(ctx context.Context, signature string, requester fosite.Requester) error + DeletePKCERequestSession(ctx context.Context, signature string) error + } + PKCERequestStorageProvider interface { + PKCERequestStorage() PKCERequestStorage + } +) diff --git a/fosite/handler/rfc7523/handler.go b/fosite/handler/rfc7523/handler.go new file mode 100644 index 00000000000..aeed4463c72 --- /dev/null +++ b/fosite/handler/rfc7523/handler.go @@ -0,0 +1,346 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package rfc7523 + +import ( + "context" + "strings" + "time" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/x/errorsx" + + "github.com/go-jose/go-jose/v3" + "github.com/go-jose/go-jose/v3/jwt" +) + +// #nosec:gosec G101 - False Positive +const grantTypeJWTBearer = "urn:ietf:params:oauth:grant-type:jwt-bearer" + +var _ fosite.TokenEndpointHandler = (*Handler)(nil) + +type Handler struct { + Storage interface { + oauth2.AccessTokenStorageProvider + RFC7523KeyStorageProvider + } + Strategy oauth2.AccessTokenStrategyProvider + Config interface { + fosite.AccessTokenLifespanProvider + fosite.RefreshTokenLifespanProvider + fosite.TokenURLProvider + fosite.GrantTypeJWTBearerCanSkipClientAuthProvider + fosite.GrantTypeJWTBearerIDOptionalProvider + fosite.GrantTypeJWTBearerIssuedDateOptionalProvider + fosite.GetJWTMaxDurationProvider + fosite.AudienceStrategyProvider + fosite.ScopeStrategyProvider + } +} + +// HandleTokenEndpointRequest implements https://tools.ietf.org/html/rfc6749#section-4.1.3 (everything) and +// https://tools.ietf.org/html/rfc7523#section-2.1 (everything) +func (c *Handler) HandleTokenEndpointRequest(ctx context.Context, request fosite.AccessRequester) error { + if err := c.CheckRequest(ctx, request); err != nil { + return err + } + + assertion := request.GetRequestForm().Get("assertion") + if assertion == "" { + return errorsx.WithStack(fosite.ErrInvalidRequest.WithHintf("The assertion request parameter must be set when using grant_type of '%s'.", grantTypeJWTBearer)) + } + + token, err := jwt.ParseSigned(assertion) + if err != nil { + return errorsx.WithStack(fosite.ErrInvalidGrant. + WithHint("Unable to parse JSON Web Token passed in \"assertion\" request parameter."). + WithWrap(err).WithDebug(err.Error()), + ) + } + + // Check fo required claims in token, so we can later find public key based on them. + if err := c.validateTokenPreRequisites(token); err != nil { + return err + } + + key, err := c.findPublicKeyForToken(ctx, token) + if err != nil { + return err + } + + claims := jwt.Claims{} + if err := token.Claims(key, &claims); err != nil { + return errorsx.WithStack(fosite.ErrInvalidGrant. + WithHint("Unable to verify the integrity of the 'assertion' value."). + WithWrap(err).WithDebug(err.Error()), + ) + } + + if err := c.validateTokenClaims(ctx, claims, key); err != nil { + return err + } + + scopes, err := c.Storage.RFC7523KeyStorage().GetPublicKeyScopes(ctx, claims.Issuer, claims.Subject, key.KeyID) + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + for _, scope := range request.GetRequestedScopes() { + if !c.Config.GetScopeStrategy(ctx)(scopes, scope) { + return errorsx.WithStack(fosite.ErrInvalidScope.WithHintf("The public key registered for issuer \"%s\" and subject \"%s\" is not allowed to request scope \"%s\".", claims.Issuer, claims.Subject, scope)) + } + } + + if claims.ID != "" { + if err := c.Storage.RFC7523KeyStorage().MarkJWTUsedForTime(ctx, claims.ID, claims.Expiry.Time()); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + } + + for _, scope := range request.GetRequestedScopes() { + request.GrantScope(scope) + } + + for _, audience := range claims.Audience { + request.GrantAudience(audience) + } + + session, err := c.getSessionFromRequest(request) + if err != nil { + return err + } + + atLifespan := fosite.GetEffectiveLifespan(request.GetClient(), fosite.GrantTypeJWTBearer, fosite.AccessToken, c.Config.GetAccessTokenLifespan(ctx)) + session.SetExpiresAt(fosite.AccessToken, time.Now().UTC().Add(atLifespan).Round(time.Second)) + session.SetSubject(claims.Subject) + + return nil +} + +func (c *Handler) PopulateTokenEndpointResponse(ctx context.Context, request fosite.AccessRequester, response fosite.AccessResponder) error { + if err := c.CheckRequest(ctx, request); err != nil { + return err + } + + atLifespan := fosite.GetEffectiveLifespan(request.GetClient(), fosite.GrantTypeJWTBearer, fosite.AccessToken, c.Config.GetAccessTokenLifespan(ctx)) + _, err := c.IssueAccessToken(ctx, atLifespan, request, response) + return err +} + +func (c *Handler) IssueAccessToken(ctx context.Context, atLifespan time.Duration, requester fosite.AccessRequester, responder fosite.AccessResponder) (signature string, err error) { + token, signature, err := c.Strategy.AccessTokenStrategy().GenerateAccessToken(ctx, requester) + if err != nil { + return "", err + } else if err := c.Storage.AccessTokenStorage().CreateAccessTokenSession(ctx, signature, requester.Sanitize([]string{})); err != nil { + return "", err + } + + if !requester.GetSession().GetExpiresAt(fosite.AccessToken).IsZero() { + atLifespan = time.Duration(requester.GetSession().GetExpiresAt(fosite.AccessToken).UnixNano() - time.Now().UTC().UnixNano()) + } + + responder.SetAccessToken(token) + responder.SetTokenType("bearer") + responder.SetExpiresIn(atLifespan) + responder.SetScopes(requester.GetGrantedScopes()) + + return signature, nil +} + +func (c *Handler) CanSkipClientAuth(ctx context.Context, requester fosite.AccessRequester) bool { + return c.Config.GetGrantTypeJWTBearerCanSkipClientAuth(ctx) +} + +func (c *Handler) CanHandleTokenEndpointRequest(ctx context.Context, requester fosite.AccessRequester) bool { + // grant_type REQUIRED. + // Value MUST be set to "urn:ietf:params:oauth:grant-type:jwt-bearer" + return requester.GetGrantTypes().ExactOne(grantTypeJWTBearer) +} + +func (c *Handler) CheckRequest(ctx context.Context, request fosite.AccessRequester) error { + if !c.CanHandleTokenEndpointRequest(ctx, request) { + return errorsx.WithStack(fosite.ErrUnknownRequest) + } + + // Client Authentication is optional: + // + // Authentication of the client is optional, as described in + // Section 3.2.1 of OAuth 2.0 [RFC6749] and consequently, the + // "client_id" is only needed when a form of client authentication that + // relies on the parameter is used. + + // if client is authenticated, check grant types + if !c.CanSkipClientAuth(ctx, request) && !request.GetClient().GetGrantTypes().Has(grantTypeJWTBearer) { + return errorsx.WithStack(fosite.ErrUnauthorizedClient.WithHintf("The OAuth 2.0 Client is not allowed to use authorization grant \"%s\".", grantTypeJWTBearer)) + } + + return nil +} + +func (c *Handler) validateTokenPreRequisites(token *jwt.JSONWebToken) error { + unverifiedClaims := jwt.Claims{} + if err := token.UnsafeClaimsWithoutVerification(&unverifiedClaims); err != nil { + return errorsx.WithStack(fosite.ErrInvalidGrant. + WithHint("Looks like there are no claims in JWT in \"assertion\" request parameter."). + WithWrap(err).WithDebug(err.Error()), + ) + } + if unverifiedClaims.Issuer == "" { + return errorsx.WithStack(fosite.ErrInvalidGrant. + WithHint("The JWT in \"assertion\" request parameter MUST contain an \"iss\" (issuer) claim."), + ) + } + if unverifiedClaims.Subject == "" { + return errorsx.WithStack(fosite.ErrInvalidGrant. + WithHint("The JWT in \"assertion\" request parameter MUST contain a \"sub\" (subject) claim."), + ) + } + + return nil +} + +func (c *Handler) findPublicKeyForToken(ctx context.Context, token *jwt.JSONWebToken) (*jose.JSONWebKey, error) { + unverifiedClaims := jwt.Claims{} + if err := token.UnsafeClaimsWithoutVerification(&unverifiedClaims); err != nil { + return nil, errorsx.WithStack(fosite.ErrInvalidRequest.WithWrap(err).WithDebug(err.Error())) + } + + var keyID string + for _, header := range token.Headers { + if header.KeyID != "" { + keyID = header.KeyID + break + } + } + + keyNotFoundErr := fosite.ErrInvalidGrant.WithHintf( + "No public JWK was registered for issuer \"%s\" and subject \"%s\", and public key is required to check signature of JWT in \"assertion\" request parameter.", + unverifiedClaims.Issuer, + unverifiedClaims.Subject, + ) + if keyID != "" { + key, err := c.Storage.RFC7523KeyStorage().GetPublicKey(ctx, unverifiedClaims.Issuer, unverifiedClaims.Subject, keyID) + if err != nil { + return nil, errorsx.WithStack(keyNotFoundErr.WithWrap(err).WithDebug(err.Error())) + } + return key, nil + } + + keys, err := c.Storage.RFC7523KeyStorage().GetPublicKeys(ctx, unverifiedClaims.Issuer, unverifiedClaims.Subject) + if err != nil { + return nil, errorsx.WithStack(keyNotFoundErr.WithWrap(err).WithDebug(err.Error())) + } + + claims := jwt.Claims{} + for _, key := range keys.Keys { + err := token.Claims(key, &claims) + if err == nil { + return &key, nil + } + } + + return nil, errorsx.WithStack(keyNotFoundErr) +} + +func (c *Handler) validateTokenClaims(ctx context.Context, claims jwt.Claims, key *jose.JSONWebKey) error { + if len(claims.Audience) == 0 { + return errorsx.WithStack(fosite.ErrInvalidGrant. + WithHint("The JWT in \"assertion\" request parameter MUST contain an \"aud\" (audience) claim."), + ) + } + + if !audienceMatchesTokenURLs(claims, c.Config.GetTokenURLs(ctx)) { + return errorsx.WithStack(fosite.ErrInvalidGrant. + WithHintf( + `The JWT in "assertion" request parameter MUST contain an "aud" (audience) claim containing a value "%s" that identifies the authorization server as an intended audience.`, + strings.Join(c.Config.GetTokenURLs(ctx), `" or "`))) + } + + if claims.Expiry == nil { + return errorsx.WithStack(fosite.ErrInvalidGrant. + WithHint("The JWT in \"assertion\" request parameter MUST contain an \"exp\" (expiration time) claim."), + ) + } + + if claims.Expiry.Time().Before(time.Now()) { + return errorsx.WithStack(fosite.ErrInvalidGrant. + WithHint("The JWT in \"assertion\" request parameter expired."), + ) + } + + if claims.NotBefore != nil && !claims.NotBefore.Time().Before(time.Now()) { + return errorsx.WithStack(fosite.ErrInvalidGrant. + WithHintf( + "The JWT in \"assertion\" request parameter contains an \"nbf\" (not before) claim, that identifies the time '%s' before which the token MUST NOT be accepted.", + claims.NotBefore.Time().Format(time.RFC3339), + ), + ) + } + + if !c.Config.GetGrantTypeJWTBearerIssuedDateOptional(ctx) && claims.IssuedAt == nil { + return errorsx.WithStack(fosite.ErrInvalidGrant. + WithHint("The JWT in \"assertion\" request parameter MUST contain an \"iat\" (issued at) claim."), + ) + } + + var issuedDate time.Time + if claims.IssuedAt != nil { + issuedDate = claims.IssuedAt.Time() + } else { + issuedDate = time.Now() + } + if claims.Expiry.Time().Sub(issuedDate) > c.Config.GetJWTMaxDuration(ctx) { + return errorsx.WithStack(fosite.ErrInvalidGrant. + WithHintf( + "The JWT in \"assertion\" request parameter contains an \"exp\" (expiration time) claim with value \"%s\" that is unreasonably far in the future, considering token issued at \"%s\".", + claims.Expiry.Time().Format(time.RFC3339), + issuedDate.Format(time.RFC3339), + ), + ) + } + + if !c.Config.GetGrantTypeJWTBearerIDOptional(ctx) && claims.ID == "" { + return errorsx.WithStack(fosite.ErrInvalidGrant. + WithHint("The JWT in \"assertion\" request parameter MUST contain an \"jti\" (JWT ID) claim."), + ) + } + + if claims.ID != "" { + used, err := c.Storage.RFC7523KeyStorage().IsJWTUsed(ctx, claims.ID) + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + if used { + return errorsx.WithStack(fosite.ErrJTIKnown) + } + } + + return nil +} + +func audienceMatchesTokenURLs(claims jwt.Claims, tokenURLs []string) bool { + for _, tokenURL := range tokenURLs { + if claims.Audience.Contains(tokenURL) { + return true + } + } + return false +} + +type extendedSession interface { + Session + fosite.Session +} + +func (c *Handler) getSessionFromRequest(requester fosite.AccessRequester) (extendedSession, error) { + session := requester.GetSession() + if jwtSession, ok := session.(extendedSession); !ok { + return nil, errorsx.WithStack( + fosite.ErrServerError.WithHintf("Session must be of type *rfc7523.Session but got type: %T", session), + ) + } else { + return jwtSession, nil + } +} diff --git a/fosite/handler/rfc7523/handler_test.go b/fosite/handler/rfc7523/handler_test.go new file mode 100644 index 00000000000..2f7ace9dd12 --- /dev/null +++ b/fosite/handler/rfc7523/handler_test.go @@ -0,0 +1,1035 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package rfc7523_test + +import ( + "context" + "crypto/rand" + "crypto/rsa" + "crypto/x509" + "encoding/pem" + "errors" + "fmt" + mrand "math/rand" + "net/url" + "strconv" + "strings" + "testing" + "time" + + "github.com/go-jose/go-jose/v3" + "github.com/go-jose/go-jose/v3/jwt" + "github.com/stretchr/testify/suite" + "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/rfc7523" + "github.com/ory/hydra/v2/fosite/internal" +) + +// #nosec:gosec G101 - False Positive +const grantTypeJWTBearer = "urn:ietf:params:oauth:grant-type:jwt-bearer" + +// Define the suite, and absorb the built-in basic suite +// functionality from testify - including a T() method which +// returns the current testing context. +type AuthorizeJWTGrantRequestHandlerTestSuite struct { + suite.Suite + + privateKey *rsa.PrivateKey + mockCtrl *gomock.Controller + mockStore *internal.MockRFC7523KeyStorage + mockStoreProvider *internal.MockRFC7523KeyStorageProvider + mockAccessTokenStrategy *internal.MockAccessTokenStrategy + mockAccessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider + mockAccessTokenStore *internal.MockAccessTokenStorage + mockAccessTokenStoreProvider *internal.MockAccessTokenStorageProvider + accessRequest *fosite.AccessRequest + handler *rfc7523.Handler +} + +// Setup before each test in the suite. +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) SetupSuite() { + // This is an insecure, test-only key from RFC 9500, Section 2.1. + // It can be used in tests to avoid slow key generation. + block, _ := pem.Decode([]byte(strings.ReplaceAll( + `-----BEGIN RSA TESTING KEY----- +MIIEowIBAAKCAQEAsPnoGUOnrpiSqt4XynxA+HRP7S+BSObI6qJ7fQAVSPtRkqso +tWxQYLEYzNEx5ZSHTGypibVsJylvCfuToDTfMul8b/CZjP2Ob0LdpYrNH6l5hvFE +89FU1nZQF15oVLOpUgA7wGiHuEVawrGfey92UE68mOyUVXGweJIVDdxqdMoPvNNU +l86BU02vlBiESxOuox+dWmuVV7vfYZ79Toh/LUK43YvJh+rhv4nKuF7iHjVjBd9s +B6iDjj70HFldzOQ9r8SRI+9NirupPTkF5AKNe6kUhKJ1luB7S27ZkvB3tSTT3P59 +3VVJvnzOjaA1z6Cz+4+eRvcysqhrRgFlwI9TEwIDAQABAoIBAEEYiyDP29vCzx/+ +dS3LqnI5BjUuJhXUnc6AWX/PCgVAO+8A+gZRgvct7PtZb0sM6P9ZcLrweomlGezI +FrL0/6xQaa8bBr/ve/a8155OgcjFo6fZEw3Dz7ra5fbSiPmu4/b/kvrg+Br1l77J +aun6uUAs1f5B9wW+vbR7tzbT/mxaUeDiBzKpe15GwcvbJtdIVMa2YErtRjc1/5B2 +BGVXyvlJv0SIlcIEMsHgnAFOp1ZgQ08aDzvilLq8XVMOahAhP1O2A3X8hKdXPyrx +IVWE9bS9ptTo+eF6eNl+d7htpKGEZHUxinoQpWEBTv+iOoHsVunkEJ3vjLP3lyI/ +fY0NQ1ECgYEA3RBXAjgvIys2gfU3keImF8e/TprLge1I2vbWmV2j6rZCg5r/AS0u +pii5CvJ5/T5vfJPNgPBy8B/yRDs+6PJO1GmnlhOkG9JAIPkv0RBZvR0PMBtbp6nT +Y3yo1lwamBVBfY6rc0sLTzosZh2aGoLzrHNMQFMGaauORzBFpY5lU50CgYEAzPHl +u5DI6Xgep1vr8QvCUuEesCOgJg8Yh1UqVoY/SmQh6MYAv1I9bLGwrb3WW/7kqIoD +fj0aQV5buVZI2loMomtU9KY5SFIsPV+JuUpy7/+VE01ZQM5FdY8wiYCQiVZYju9X +Wz5LxMNoz+gT7pwlLCsC4N+R8aoBk404aF1gum8CgYAJ7VTq7Zj4TFV7Soa/T1eE +k9y8a+kdoYk3BASpCHJ29M5R2KEA7YV9wrBklHTz8VzSTFTbKHEQ5W5csAhoL5Fo +qoHzFFi3Qx7MHESQb9qHyolHEMNx6QdsHUn7rlEnaTTyrXh3ifQtD6C0yTmFXUIS +CW9wKApOrnyKJ9nI0HcuZQKBgQCMtoV6e9VGX4AEfpuHvAAnMYQFgeBiYTkBKltQ +XwozhH63uMMomUmtSG87Sz1TmrXadjAhy8gsG6I0pWaN7QgBuFnzQ/HOkwTm+qKw +AsrZt4zeXNwsH7QXHEJCFnCmqw9QzEoZTrNtHJHpNboBuVnYcoueZEJrP8OnUG3r +UjmopwKBgAqB2KYYMUqAOvYcBnEfLDmyZv9BTVNHbR2lKkMYqv5LlvDaBxVfilE0 +2riO4p6BaAdvzXjKeRrGNEKoHNBpOSfYCOM16NjL8hIZB1CaV3WbT5oY+jp7Mzd5 +7d56RZOE+ERK2uz/7JX9VSsM/LbH9pJibd4e8mikDS9ntciqOH/3 +-----END RSA TESTING KEY-----`, "TESTING KEY", "PRIVATE KEY"))) + s.privateKey, _ = x509.ParsePKCS1PrivateKey(block.Bytes) +} + +// Will run after all the tests in the suite have been run. +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TearDownSuite() { +} + +// Will run after each test in the suite. +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TearDownTest() { + s.mockCtrl.Finish() +} + +// Setup before each test. +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) SetupTest() { + s.mockCtrl = gomock.NewController(s.T()) + s.mockStore = internal.NewMockRFC7523KeyStorage(s.mockCtrl) + s.mockStoreProvider = internal.NewMockRFC7523KeyStorageProvider(s.mockCtrl) + s.mockAccessTokenStrategy = internal.NewMockAccessTokenStrategy(s.mockCtrl) + s.mockAccessTokenStrategyProvider = internal.NewMockAccessTokenStrategyProvider(s.mockCtrl) + s.mockAccessTokenStore = internal.NewMockAccessTokenStorage(s.mockCtrl) + s.mockAccessTokenStoreProvider = internal.NewMockAccessTokenStorageProvider(s.mockCtrl) + + mockStorage := struct { + *internal.MockAccessTokenStorageProvider + *internal.MockRFC7523KeyStorageProvider + }{ + MockAccessTokenStorageProvider: s.mockAccessTokenStoreProvider, + MockRFC7523KeyStorageProvider: s.mockStoreProvider, + } + + s.accessRequest = fosite.NewAccessRequest(new(fosite.DefaultSession)) + s.accessRequest.Form = url.Values{} + s.accessRequest.Client = &fosite.DefaultClient{GrantTypes: []string{grantTypeJWTBearer}} + s.handler = &rfc7523.Handler{ + Storage: mockStorage, + Strategy: s.mockAccessTokenStrategyProvider, + Config: &fosite.Config{ + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + TokenURL: "https://www.example.com/token", + GrantTypeJWTBearerCanSkipClientAuth: false, + GrantTypeJWTBearerIDOptional: false, + GrantTypeJWTBearerIssuedDateOptional: false, + GrantTypeJWTBearerMaxDuration: time.Hour * 24 * 30, + }, + } +} + +// In order for 'go test' to run this suite, we need to create +// a normal test function and pass our suite to suite.Run. +func TestAuthorizeJWTGrantRequestHandlerTestSuite(t *testing.T) { + suite.Run(t, new(AuthorizeJWTGrantRequestHandlerTestSuite)) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestRequestWithInvalidGrantType() { + // arrange + s.accessRequest.GrantTypes = []string{"authorization_code"} + + // act + err := s.handler.HandleTokenEndpointRequest(context.Background(), s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrUnknownRequest)) + s.EqualError(err, fosite.ErrUnknownRequest.Error(), "expected error, because of invalid grant type") +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestClientIsNotRegisteredForGrantType() { + // arrange + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + s.accessRequest.Client = &fosite.DefaultClient{GrantTypes: []string{"authorization_code"}} + s.handler.Config.(*fosite.Config).GrantTypeJWTBearerCanSkipClientAuth = false + + // act + err := s.handler.HandleTokenEndpointRequest(context.Background(), s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrUnauthorizedClient)) + s.EqualError(err, fosite.ErrUnauthorizedClient.Error(), "expected error, because client is not registered to use this grant type") + s.Equal( + "The OAuth 2.0 Client is not allowed to use authorization grant \"urn:ietf:params:oauth:grant-type:jwt-bearer\".", + fosite.ErrorToRFC6749Error(err).HintField, + ) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestRequestWithoutAssertion() { + // arrange + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + + // act + err := s.handler.HandleTokenEndpointRequest(context.Background(), s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrInvalidRequest)) + s.EqualError(err, fosite.ErrInvalidRequest.Error(), "expected error, because of missing assertion") + s.Equal( + "The assertion request parameter must be set when using grant_type of 'urn:ietf:params:oauth:grant-type:jwt-bearer'.", + fosite.ErrorToRFC6749Error(err).HintField, + ) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestRequestWithMalformedAssertion() { + // arrange + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + s.accessRequest.Form.Add("assertion", "fjigjgfkjgkf") + + // act + err := s.handler.HandleTokenEndpointRequest(context.Background(), s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrInvalidGrant)) + s.EqualError(err, fosite.ErrInvalidGrant.Error(), "expected error, because of malformed assertion") + s.Equal( + "Unable to parse JSON Web Token passed in \"assertion\" request parameter.", + fosite.ErrorToRFC6749Error(err).HintField, + ) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestRequestAssertionWithoutIssuer() { + // arrange + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + cl := s.createStandardClaim() + cl.Issuer = "" + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + + // act + err := s.handler.HandleTokenEndpointRequest(context.Background(), s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrInvalidGrant)) + s.EqualError(err, fosite.ErrInvalidGrant.Error(), "expected error, because of missing issuer claim in assertion") + s.Equal( + "The JWT in \"assertion\" request parameter MUST contain an \"iss\" (issuer) claim.", + fosite.ErrorToRFC6749Error(err).HintField, + ) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestRequestAssertionWithoutSubject() { + // arrange + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + cl := s.createStandardClaim() + cl.Subject = "" + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + + // act + err := s.handler.HandleTokenEndpointRequest(context.Background(), s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrInvalidGrant)) + s.EqualError(err, fosite.ErrInvalidGrant.Error(), "expected error, because of missing subject claim in assertion") + s.Equal( + "The JWT in \"assertion\" request parameter MUST contain a \"sub\" (subject) claim.", + fosite.ErrorToRFC6749Error(err).HintField, + ) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestNoMatchingPublicKeyToCheckAssertionSignature() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + cl := s.createStandardClaim() + keyID := "my_key" + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(1) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(nil, fosite.ErrNotFound) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrInvalidGrant)) + s.EqualError(err, fosite.ErrInvalidGrant.Error(), "expected error, because of missing public key to check assertion") + s.Equal( + fmt.Sprintf( + "No public JWK was registered for issuer \"%s\" and subject \"%s\", and public key is required to check signature of JWT in \"assertion\" request parameter.", + cl.Issuer, cl.Subject, + ), + fosite.ErrorToRFC6749Error(err).HintField, + ) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestNoMatchingPublicKeysToCheckAssertionSignature() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "" // provide no hint of what key was used to sign assertion + cl := s.createStandardClaim() + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(1) + s.mockStore.EXPECT().GetPublicKeys(ctx, cl.Issuer, cl.Subject).Return(nil, fosite.ErrNotFound) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrInvalidGrant)) + s.EqualError(err, fosite.ErrInvalidGrant.Error(), "expected error, because of missing public keys to check assertion") + s.Equal( + fmt.Sprintf( + "No public JWK was registered for issuer \"%s\" and subject \"%s\", and public key is required to check signature of JWT in \"assertion\" request parameter.", + cl.Issuer, cl.Subject, + ), + fosite.ErrorToRFC6749Error(err).HintField, + ) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestWrongPublicKeyToCheckAssertionSignature() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "wrong_key" + cl := s.createStandardClaim() + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + jwk := s.createRandomTestJWK() + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(1) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&jwk, nil) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrInvalidGrant)) + s.EqualError(err, fosite.ErrInvalidGrant.Error(), "expected error, because wrong public key was registered for assertion") + s.Equal("Unable to verify the integrity of the 'assertion' value.", fosite.ErrorToRFC6749Error(err).HintField) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestWrongPublicKeysToCheckAssertionSignature() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "" // provide no hint of what key was used to sign assertion + cl := s.createStandardClaim() + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(1) + s.mockStore.EXPECT().GetPublicKeys(ctx, cl.Issuer, cl.Subject).Return(s.createJWS(s.createRandomTestJWK(), s.createRandomTestJWK()), nil) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrInvalidGrant)) + s.EqualError(err, fosite.ErrInvalidGrant.Error(), "expected error, because wrong public keys was registered for assertion") + s.Equal( + fmt.Sprintf( + "No public JWK was registered for issuer \"%s\" and subject \"%s\", and public key is required to check signature of JWT in \"assertion\" request parameter.", + cl.Issuer, cl.Subject, + ), + fosite.ErrorToRFC6749Error(err).HintField, + ) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestNoAudienceInAssertion() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + cl := s.createStandardClaim() + cl.Audience = []string{} + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(1) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrInvalidGrant)) + s.EqualError(err, fosite.ErrInvalidGrant.Error(), "expected error, because of missing audience claim in assertion") + s.Equal( + "The JWT in \"assertion\" request parameter MUST contain an \"aud\" (audience) claim.", + fosite.ErrorToRFC6749Error(err).HintField, + ) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestNotValidAudienceInAssertion() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + cl := s.createStandardClaim() + cl.Audience = jwt.Audience{"leela", "fry"} + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(1) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrInvalidGrant)) + s.EqualError(err, fosite.ErrInvalidGrant.Error(), "expected error, because of invalid audience claim in assertion") + s.Equal( + fmt.Sprintf( + `The JWT in "assertion" request parameter MUST contain an "aud" (audience) claim containing a value "%s" that identifies the authorization server as an intended audience.`, + strings.Join(s.handler.Config.GetTokenURLs(ctx), `" or "`), + ), + fosite.ErrorToRFC6749Error(err).HintField, + ) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestNoExpirationInAssertion() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + cl := s.createStandardClaim() + cl.Expiry = nil + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(1) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrInvalidGrant)) + s.EqualError(err, fosite.ErrInvalidGrant.Error(), "expected error, because of missing expiration claim in assertion") + s.Equal( + "The JWT in \"assertion\" request parameter MUST contain an \"exp\" (expiration time) claim.", + fosite.ErrorToRFC6749Error(err).HintField, + ) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestExpiredAssertion() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + cl := s.createStandardClaim() + cl.Expiry = jwt.NewNumericDate(time.Now().AddDate(0, -1, 0)) + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(1) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrInvalidGrant)) + s.EqualError(err, fosite.ErrInvalidGrant.Error(), "expected error, because assertion expired") + s.Equal( + "The JWT in \"assertion\" request parameter expired.", + fosite.ErrorToRFC6749Error(err).HintField, + ) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestAssertionNotAcceptedBeforeDate() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + nbf := time.Now().AddDate(0, 1, 0) + cl := s.createStandardClaim() + cl.NotBefore = jwt.NewNumericDate(nbf) + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(1) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrInvalidGrant)) + s.EqualError(err, fosite.ErrInvalidGrant.Error(), "expected error, nbf claim in assertion indicates, that assertion can not be accepted now") + s.Equal( + fmt.Sprintf( + "The JWT in \"assertion\" request parameter contains an \"nbf\" (not before) claim, that identifies the time '%s' before which the token MUST NOT be accepted.", + nbf.Format(time.RFC3339), + ), + fosite.ErrorToRFC6749Error(err).HintField, + ) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestAssertionWithoutRequiredIssueDate() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + cl := s.createStandardClaim() + cl.IssuedAt = nil + s.handler.Config.(*fosite.Config).GrantTypeJWTBearerIssuedDateOptional = false + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(1) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrInvalidGrant)) + s.EqualError(err, fosite.ErrInvalidGrant.Error(), "expected error, because of missing iat claim in assertion") + s.Equal( + "The JWT in \"assertion\" request parameter MUST contain an \"iat\" (issued at) claim.", + fosite.ErrorToRFC6749Error(err).HintField, + ) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestAssertionWithIssueDateFarInPast() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + issuedAt := time.Now().AddDate(0, 0, -31) + cl := s.createStandardClaim() + cl.IssuedAt = jwt.NewNumericDate(issuedAt) + s.handler.Config.(*fosite.Config).GrantTypeJWTBearerIssuedDateOptional = false + s.handler.Config.(*fosite.Config).GrantTypeJWTBearerMaxDuration = time.Hour * 24 * 30 + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(1) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrInvalidGrant)) + s.EqualError(err, fosite.ErrInvalidGrant.Error(), "expected error, because assertion was issued far in the past") + s.Equal( + fmt.Sprintf( + "The JWT in \"assertion\" request parameter contains an \"exp\" (expiration time) claim with value \"%s\" that is unreasonably far in the future, considering token issued at \"%s\".", + cl.Expiry.Time().Format(time.RFC3339), + cl.IssuedAt.Time().Format(time.RFC3339), + ), + fosite.ErrorToRFC6749Error(err).HintField, + ) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestAssertionWithExpirationDateFarInFuture() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + cl := s.createStandardClaim() + cl.IssuedAt = jwt.NewNumericDate(time.Now().AddDate(0, 0, -15)) + cl.Expiry = jwt.NewNumericDate(time.Now().AddDate(0, 0, 20)) + s.handler.Config.(*fosite.Config).GrantTypeJWTBearerIssuedDateOptional = false + s.handler.Config.(*fosite.Config).GrantTypeJWTBearerMaxDuration = time.Hour * 24 * 30 + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(1) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrInvalidGrant)) + s.EqualError(err, fosite.ErrInvalidGrant.Error(), "expected error, because assertion will expire unreasonably far in the future.") + s.Equal( + fmt.Sprintf( + "The JWT in \"assertion\" request parameter contains an \"exp\" (expiration time) claim with value \"%s\" that is unreasonably far in the future, considering token issued at \"%s\".", + cl.Expiry.Time().Format(time.RFC3339), + cl.IssuedAt.Time().Format(time.RFC3339), + ), + fosite.ErrorToRFC6749Error(err).HintField, + ) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestAssertionWithExpirationDateFarInFutureWithNoIssuerDate() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + cl := s.createStandardClaim() + cl.IssuedAt = nil + cl.Expiry = jwt.NewNumericDate(time.Now().AddDate(0, 0, 31)) + s.handler.Config.(*fosite.Config).GrantTypeJWTBearerIssuedDateOptional = true + s.handler.Config.(*fosite.Config).GrantTypeJWTBearerMaxDuration = time.Hour * 24 * 30 + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(1) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrInvalidGrant)) + s.EqualError(err, fosite.ErrInvalidGrant.Error(), "expected error, because assertion will expire unreasonably far in the future.") +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestAssertionWithoutRequiredTokenID() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + cl := s.createStandardClaim() + cl.ID = "" + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(1) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrInvalidGrant)) + s.EqualError(err, fosite.ErrInvalidGrant.Error(), "expected error, because of missing jti claim in assertion") + s.Equal( + "The JWT in \"assertion\" request parameter MUST contain an \"jti\" (JWT ID) claim.", + fosite.ErrorToRFC6749Error(err).HintField, + ) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestAssertionAlreadyUsed() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + cl := s.createStandardClaim() + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(2) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil).Times(1) + s.mockStore.EXPECT().IsJWTUsed(ctx, cl.ID).Return(true, nil).Times(1) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrJTIKnown)) + s.EqualError(err, fosite.ErrJTIKnown.Error(), "expected error, because assertion was used") +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestErrWhenCheckingIfJWTWasUsed() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + cl := s.createStandardClaim() + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(2) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil) + s.mockStore.EXPECT().IsJWTUsed(ctx, cl.ID).Return(false, fosite.ErrServerError) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrServerError)) + s.EqualError(err, fosite.ErrServerError.Error(), "expected error, because error occurred while trying to check if jwt was used") +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestErrWhenMarkingJWTAsUsed() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + cl := s.createStandardClaim() + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(4) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil) + s.mockStore.EXPECT().GetPublicKeyScopes(ctx, cl.Issuer, cl.Subject, keyID).Return([]string{"valid_scope"}, nil) + s.mockStore.EXPECT().IsJWTUsed(ctx, cl.ID).Return(false, nil) + s.mockStore.EXPECT().MarkJWTUsedForTime(ctx, cl.ID, cl.Expiry.Time()).Return(fosite.ErrServerError) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrServerError)) + s.EqualError(err, fosite.ErrServerError.Error(), "expected error, because error occurred while trying to mark jwt as used") +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestErrWhileFetchingPublicKeyScope() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + cl := s.createStandardClaim() + + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(3) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil) + s.mockStore.EXPECT().GetPublicKeyScopes(ctx, cl.Issuer, cl.Subject, keyID).Return([]string{}, fosite.ErrServerError) + s.mockStore.EXPECT().IsJWTUsed(ctx, cl.ID).Return(false, nil) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrServerError)) + s.EqualError(err, fosite.ErrServerError.Error(), "expected error, because error occurred while fetching public key scopes") +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestAssertionWithInvalidScopes() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + cl := s.createStandardClaim() + + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.accessRequest.RequestedScope = []string{"some_scope"} + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(3) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil) + s.mockStore.EXPECT().GetPublicKeyScopes(ctx, cl.Issuer, cl.Subject, keyID).Return([]string{"valid_scope"}, nil) + s.mockStore.EXPECT().IsJWTUsed(ctx, cl.ID).Return(false, nil) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.True(errors.Is(err, fosite.ErrInvalidScope)) + s.EqualError(err, fosite.ErrInvalidScope.Error(), "expected error, because requested scopes don't match allowed scope for this assertion") + s.Equal( + "The public key registered for issuer \"trusted_issuer\" and subject \"some_ro\" is not allowed to request scope \"some_scope\".", + fosite.ErrorToRFC6749Error(err).HintField, + ) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestValidAssertion() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + cl := s.createStandardClaim() + + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.accessRequest.RequestedScope = []string{"valid_scope"} + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(4) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil) + s.mockStore.EXPECT().GetPublicKeyScopes(ctx, cl.Issuer, cl.Subject, keyID).Return([]string{"valid_scope", "openid"}, nil) + s.mockStore.EXPECT().IsJWTUsed(ctx, cl.ID).Return(false, nil) + s.mockStore.EXPECT().MarkJWTUsedForTime(ctx, cl.ID, cl.Expiry.Time()).Return(nil) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.NoError(err, "no error expected, because assertion must be valid") +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestAssertionIsValidWhenNoScopesPassed() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + cl := s.createStandardClaim() + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(4) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil) + s.mockStore.EXPECT().GetPublicKeyScopes(ctx, cl.Issuer, cl.Subject, keyID).Return([]string{"valid_scope"}, nil) + s.mockStore.EXPECT().IsJWTUsed(ctx, cl.ID).Return(false, nil) + s.mockStore.EXPECT().MarkJWTUsedForTime(ctx, cl.ID, cl.Expiry.Time()).Return(nil) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.NoError(err, "no error expected, because assertion must be valid") +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestAssertionIsValidWhenJWTIDIsOptional() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + cl := s.createStandardClaim() + s.handler.Config.(*fosite.Config).GrantTypeJWTBearerIDOptional = true + cl.ID = "" + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(2) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil) + s.mockStore.EXPECT().GetPublicKeyScopes(ctx, cl.Issuer, cl.Subject, keyID).Return([]string{"valid_scope"}, nil) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.NoError(err, "no error expected, because assertion must be valid, when no jti claim and it is allowed by option") +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestAssertionIsValidWhenJWTIssuedDateOptional() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + cl := s.createStandardClaim() + cl.IssuedAt = nil + s.handler.Config.(*fosite.Config).GrantTypeJWTBearerIssuedDateOptional = true + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(4) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil) + s.mockStore.EXPECT().GetPublicKeyScopes(ctx, cl.Issuer, cl.Subject, keyID).Return([]string{"valid_scope"}, nil) + s.mockStore.EXPECT().IsJWTUsed(ctx, cl.ID).Return(false, nil) + s.mockStore.EXPECT().MarkJWTUsedForTime(ctx, cl.ID, cl.Expiry.Time()).Return(nil) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.NoError(err, "no error expected, because assertion must be valid, when no iss claim and it is allowed by option") +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) TestRequestIsValidWhenClientAuthOptional() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + keyID := "my_key" + pubKey := s.createJWK(s.privateKey.Public(), keyID) + cl := s.createStandardClaim() + s.accessRequest.Client = &fosite.DefaultClient{} + s.handler.Config.(*fosite.Config).GrantTypeJWTBearerCanSkipClientAuth = true + s.accessRequest.Form.Add("assertion", s.createTestAssertion(cl, keyID)) + s.mockStoreProvider.EXPECT().RFC7523KeyStorage().Return(s.mockStore).Times(4) + s.mockStore.EXPECT().GetPublicKey(ctx, cl.Issuer, cl.Subject, keyID).Return(&pubKey, nil) + s.mockStore.EXPECT().GetPublicKeyScopes(ctx, cl.Issuer, cl.Subject, keyID).Return([]string{"valid_scope"}, nil) + s.mockStore.EXPECT().IsJWTUsed(ctx, cl.ID).Return(false, nil) + s.mockStore.EXPECT().MarkJWTUsedForTime(ctx, cl.ID, cl.Expiry.Time()).Return(nil) + + // act + err := s.handler.HandleTokenEndpointRequest(ctx, s.accessRequest) + + // assert + s.NoError(err, "no error expected, because request must be valid, when no client unauthenticated and it is allowed by option") +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) createTestAssertion(cl jwt.Claims, keyID string) string { + jwk := jose.JSONWebKey{Key: s.privateKey, KeyID: keyID, Algorithm: string(jose.RS256)} + sig, err := jose.NewSigner(jose.SigningKey{Algorithm: jose.RS256, Key: jwk}, (&jose.SignerOptions{}).WithType("JWT")) + if err != nil { + s.FailNowf("failed to create test assertion", "failed to create signer: %s", err.Error()) + } + + raw, err := jwt.Signed(sig).Claims(cl).CompactSerialize() + if err != nil { + s.FailNowf("failed to create test assertion", "failed to sign assertion: %s", err.Error()) + } + + return raw +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) createStandardClaim() jwt.Claims { + return jwt.Claims{ + Issuer: "trusted_issuer", + Subject: "some_ro", + Audience: jwt.Audience{"https://www.example.com/token", "leela", "fry"}, + Expiry: jwt.NewNumericDate(time.Now().UTC().AddDate(0, 0, 23)), + NotBefore: nil, + IssuedAt: jwt.NewNumericDate(time.Now().UTC().AddDate(0, 0, -7)), + ID: "my_token", + } +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) createRandomTestJWK() jose.JSONWebKey { + privateKey, err := rsa.GenerateKey(rand.Reader, 2048) + if err != nil { + s.FailNowf("failed to create random test JWK", "failed to generate RSA private key: %s", err.Error()) + } + + return s.createJWK(privateKey.Public(), strconv.Itoa(mrand.Int())) +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) createJWK(key interface{}, keyID string) jose.JSONWebKey { + return jose.JSONWebKey{ + Key: key, + KeyID: keyID, + Algorithm: string(jose.RS256), + Use: "sig", + } +} + +func (s *AuthorizeJWTGrantRequestHandlerTestSuite) createJWS(keys ...jose.JSONWebKey) *jose.JSONWebKeySet { + return &jose.JSONWebKeySet{Keys: keys} +} + +// Define the suite, and absorb the built-in basic suite +// functionality from testify - including a T() method which +// returns the current testing context. +type AuthorizeJWTGrantPopulateTokenEndpointTestSuite struct { + suite.Suite + + privateKey *rsa.PrivateKey + mockCtrl *gomock.Controller + mockStore *internal.MockRFC7523KeyStorage + mockStoreProvider *internal.MockRFC7523KeyStorageProvider + mockAccessTokenStrategy *internal.MockAccessTokenStrategy + mockAccessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider + mockAccessTokenStore *internal.MockAccessTokenStorage + mockAccessTokenStoreProvider *internal.MockAccessTokenStorageProvider + accessRequest *fosite.AccessRequest + accessResponse *fosite.AccessResponse + handler *rfc7523.Handler +} + +// Setup before each test in the suite. +func (s *AuthorizeJWTGrantPopulateTokenEndpointTestSuite) SetupSuite() { + privateKey, err := rsa.GenerateKey(rand.Reader, 2048) + if err != nil { + s.FailNowf("failed to setup test suite", "failed to generate RSA private key: %s", err.Error()) + } + s.privateKey = privateKey +} + +// Will run after all the tests in the suite have been run. +func (s *AuthorizeJWTGrantPopulateTokenEndpointTestSuite) TearDownSuite() { +} + +// Will run after each test in the suite. +func (s *AuthorizeJWTGrantPopulateTokenEndpointTestSuite) TearDownTest() { + s.mockCtrl.Finish() +} + +// Setup before each test. +func (s *AuthorizeJWTGrantPopulateTokenEndpointTestSuite) SetupTest() { + s.mockCtrl = gomock.NewController(s.T()) + s.mockStore = internal.NewMockRFC7523KeyStorage(s.mockCtrl) + s.mockStoreProvider = internal.NewMockRFC7523KeyStorageProvider(s.mockCtrl) + s.mockAccessTokenStrategy = internal.NewMockAccessTokenStrategy(s.mockCtrl) + s.mockAccessTokenStrategyProvider = internal.NewMockAccessTokenStrategyProvider(s.mockCtrl) + s.mockAccessTokenStore = internal.NewMockAccessTokenStorage(s.mockCtrl) + s.mockAccessTokenStoreProvider = internal.NewMockAccessTokenStorageProvider(s.mockCtrl) + + mockStorage := struct { + *internal.MockAccessTokenStorageProvider + *internal.MockRFC7523KeyStorageProvider + }{ + MockAccessTokenStorageProvider: s.mockAccessTokenStoreProvider, + MockRFC7523KeyStorageProvider: s.mockStoreProvider, + } + + s.accessRequest = fosite.NewAccessRequest(new(fosite.DefaultSession)) + s.accessRequest.Form = url.Values{} + s.accessRequest.Client = &fosite.DefaultClient{GrantTypes: []string{grantTypeJWTBearer}} + s.accessResponse = fosite.NewAccessResponse() + s.handler = &rfc7523.Handler{ + Storage: mockStorage, + Strategy: s.mockAccessTokenStrategyProvider, + Config: &fosite.Config{ + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + TokenURL: "https://www.example.com/token", + GrantTypeJWTBearerCanSkipClientAuth: false, + GrantTypeJWTBearerIDOptional: false, + GrantTypeJWTBearerIssuedDateOptional: false, + GrantTypeJWTBearerMaxDuration: time.Hour * 24 * 30, + AccessTokenLifespan: time.Hour, + }, + } +} + +// In order for 'go test' to run this suite, we need to create +// a normal test function and pass our suite to suite.Run. +func TestAuthorizeJWTGrantPopulateTokenEndpointTestSuite(t *testing.T) { + suite.Run(t, new(AuthorizeJWTGrantPopulateTokenEndpointTestSuite)) +} + +func (s *AuthorizeJWTGrantPopulateTokenEndpointTestSuite) TestRequestWithInvalidGrantType() { + // arrange + s.accessRequest.GrantTypes = []string{"authorization_code"} + + // act + err := s.handler.PopulateTokenEndpointResponse(context.Background(), s.accessRequest, s.accessResponse) + + // assert + s.True(errors.Is(err, fosite.ErrUnknownRequest)) + s.EqualError(err, fosite.ErrUnknownRequest.Error(), "expected error, because of invalid grant type") +} + +func (s *AuthorizeJWTGrantPopulateTokenEndpointTestSuite) TestClientIsNotRegisteredForGrantType() { + // arrange + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + s.accessRequest.Client = &fosite.DefaultClient{GrantTypes: []string{"authorization_code"}} + s.handler.Config.(*fosite.Config).GrantTypeJWTBearerCanSkipClientAuth = false + + // act + err := s.handler.PopulateTokenEndpointResponse(context.Background(), s.accessRequest, s.accessResponse) + + // assert + s.True(errors.Is(err, fosite.ErrUnauthorizedClient)) + s.EqualError(err, fosite.ErrUnauthorizedClient.Error(), "expected error, because client is not registered to use this grant type") + s.Equal( + "The OAuth 2.0 Client is not allowed to use authorization grant \"urn:ietf:params:oauth:grant-type:jwt-bearer\".", + fosite.ErrorToRFC6749Error(err).HintField, + ) +} + +func (s *AuthorizeJWTGrantPopulateTokenEndpointTestSuite) TestAccessTokenIssuedSuccessfully() { + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + token := "token" + sig := "sig" + s.mockAccessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(s.mockAccessTokenStrategy).Times(1) + s.mockAccessTokenStrategy.EXPECT().GenerateAccessToken(ctx, s.accessRequest).Return(token, sig, nil) + s.mockAccessTokenStoreProvider.EXPECT().AccessTokenStorage().Return(s.mockAccessTokenStore).Times(1) + s.mockAccessTokenStore.EXPECT().CreateAccessTokenSession(ctx, sig, s.accessRequest.Sanitize([]string{})) + + // act + err := s.handler.PopulateTokenEndpointResponse(context.Background(), s.accessRequest, s.accessResponse) + + // assert + s.NoError(err, "no error expected") + s.Equal(s.accessResponse.AccessToken, token, "access token expected in response") + s.Equal(s.accessResponse.TokenType, "bearer", "token type expected to be \"bearer\"") + s.Equal( + s.accessResponse.GetExtra("expires_in"), int64(s.handler.Config.GetAccessTokenLifespan(context.TODO()).Seconds()), + "token expiration time expected in response to be equal to AccessTokenLifespan setting in handler", + ) + s.Equal(s.accessResponse.GetExtra("scope"), "", "no scopes expected in response") + s.Nil(s.accessResponse.GetExtra("refresh_token"), "refresh token not expected in response") +} + +func (s *AuthorizeJWTGrantPopulateTokenEndpointTestSuite) TestAccessTokenIssuedSuccessfullyWithCustomLifespan() { + s.accessRequest.Client = &fosite.DefaultClientWithCustomTokenLifespans{ + DefaultClient: &fosite.DefaultClient{ + GrantTypes: []string{grantTypeJWTBearer}, + }, + TokenLifespans: &internal.TestLifespans, + } + // arrange + ctx := context.Background() + s.accessRequest.GrantTypes = []string{grantTypeJWTBearer} + token := "token" + sig := "sig" + s.mockAccessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(s.mockAccessTokenStrategy).Times(1) + s.mockAccessTokenStrategy.EXPECT().GenerateAccessToken(ctx, s.accessRequest).Return(token, sig, nil) + s.mockAccessTokenStoreProvider.EXPECT().AccessTokenStorage().Return(s.mockAccessTokenStore).Times(1) + s.mockAccessTokenStore.EXPECT().CreateAccessTokenSession(ctx, sig, s.accessRequest.Sanitize([]string{})) + + // act + err := s.handler.PopulateTokenEndpointResponse(context.Background(), s.accessRequest, s.accessResponse) + + // assert + s.NoError(err, "no error expected") + s.Equal(s.accessResponse.AccessToken, token, "access token expected in response") + s.Equal(s.accessResponse.TokenType, "bearer", "token type expected to be \"bearer\"") + s.Equal( + s.accessResponse.GetExtra("expires_in"), int64(internal.TestLifespans.JwtBearerGrantAccessTokenLifespan.Seconds()), + "token expiration time expected in response to be equal to the pertinent AccessTokenLifespan setting in client", + ) + s.Equal(s.accessResponse.GetExtra("scope"), "", "no scopes expected in response") + s.Nil(s.accessResponse.GetExtra("refresh_token"), "refresh token not expected in response") +} diff --git a/fosite/handler/rfc7523/session.go b/fosite/handler/rfc7523/session.go new file mode 100644 index 00000000000..2e445514cd2 --- /dev/null +++ b/fosite/handler/rfc7523/session.go @@ -0,0 +1,10 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package rfc7523 + +// Session must be implemented by the session if RFC7523 is to be supported. +type Session interface { + // SetSubject sets the session's subject. + SetSubject(subject string) +} diff --git a/fosite/handler/rfc7523/storage.go b/fosite/handler/rfc7523/storage.go new file mode 100644 index 00000000000..378a5513e38 --- /dev/null +++ b/fosite/handler/rfc7523/storage.go @@ -0,0 +1,37 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package rfc7523 + +import ( + "context" + "time" + + "github.com/go-jose/go-jose/v3" +) + +// RFC7523KeyStorage holds information needed to validate jwt assertion in authorization grants. +type RFC7523KeyStorage interface { + // GetPublicKey returns public key, issued by 'issuer', and assigned for subject. Public key is used to check + // signature of jwt assertion in authorization grants. + GetPublicKey(ctx context.Context, issuer string, subject string, keyId string) (*jose.JSONWebKey, error) + + // GetPublicKeys returns public key, set issued by 'issuer', and assigned for subject. + GetPublicKeys(ctx context.Context, issuer string, subject string) (*jose.JSONWebKeySet, error) + + // GetPublicKeyScopes returns assigned scope for assertion, identified by public key, issued by 'issuer'. + GetPublicKeyScopes(ctx context.Context, issuer string, subject string, keyId string) ([]string, error) + + // IsJWTUsed returns true, if JWT is not known yet or it can not be considered valid, because it must be already + // expired. + IsJWTUsed(ctx context.Context, jti string) (bool, error) + + // MarkJWTUsedForTime marks JWT as used for a time passed in exp parameter. This helps ensure that JWTs are not + // replayed by maintaining the set of used "jti" values for the length of time for which the JWT would be + // considered valid based on the applicable "exp" instant. (https://tools.ietf.org/html/rfc7523#section-3) + MarkJWTUsedForTime(ctx context.Context, jti string, exp time.Time) error +} + +type RFC7523KeyStorageProvider interface { + RFC7523KeyStorage() RFC7523KeyStorage +} diff --git a/fosite/handler/rfc8628/auth_handler.go b/fosite/handler/rfc8628/auth_handler.go new file mode 100644 index 00000000000..4310e09fc56 --- /dev/null +++ b/fosite/handler/rfc8628/auth_handler.go @@ -0,0 +1,95 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package rfc8628 + +import ( + "context" + "fmt" + "time" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/x/errorsx" + + "github.com/pkg/errors" +) + +// MaxAttempts for retrying the generation of user codes. +const MaxAttempts = 3 + +// DeviceAuthHandler is a response handler for the Device Authorisation Grant as +// defined in https://tools.ietf.org/html/rfc8628#section-3.1 +type DeviceAuthHandler struct { + Storage interface { + DeviceAuthStorageProvider + oauth2.AccessTokenStorageProvider + oauth2.RefreshTokenStorageProvider + } + Strategy interface { + DeviceRateLimitStrategyProvider + DeviceCodeStrategyProvider + UserCodeStrategyProvider + } + Config interface { + fosite.DeviceProvider + fosite.DeviceAndUserCodeLifespanProvider + } +} + +// HandleDeviceEndpointRequest implements https://tools.ietf.org/html/rfc8628#section-3.1 +func (d *DeviceAuthHandler) HandleDeviceEndpointRequest(ctx context.Context, dar fosite.DeviceRequester, resp fosite.DeviceResponder) error { + var err error + + deviceCode, userCode, err := d.handleDeviceAuthSession(ctx, dar) + if err != nil { + return err + } + + // Populate the response fields + resp.SetDeviceCode(deviceCode) + resp.SetUserCode(userCode) + resp.SetVerificationURI(d.Config.GetDeviceVerificationURL(ctx)) + resp.SetVerificationURIComplete(d.Config.GetDeviceVerificationURL(ctx) + "?user_code=" + userCode) + resp.SetExpiresIn(int64(time.Until(dar.GetSession().GetExpiresAt(fosite.UserCode)).Seconds())) + resp.SetInterval(int(d.Config.GetDeviceAuthTokenPollingInterval(ctx).Seconds())) + return nil +} + +func (d *DeviceAuthHandler) handleDeviceAuthSession(ctx context.Context, dar fosite.DeviceRequester) (string, string, error) { + var userCode, userCodeSignature string + + deviceCode, deviceCodeSignature, err := d.Strategy.DeviceCodeStrategy().GenerateDeviceCode(ctx) + if err != nil { + return "", "", errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + dar.GetSession().SetExpiresAt(fosite.UserCode, time.Now().UTC().Add(d.Config.GetDeviceAndUserCodeLifespan(ctx)).Round(time.Second)) + dar.GetSession().SetExpiresAt(fosite.DeviceCode, time.Now().UTC().Add(d.Config.GetDeviceAndUserCodeLifespan(ctx)).Round(time.Second)) + // Note: the retries are added here because we need to ensure uniqueness of user codes. + // The chances of duplicates should however be diminishing, because they are the same + // chance an attacker will be able to hit a valid code with few guesses. However, as + // used codes will probably still be around for some time before they get cleaned, + // the chances of hitting a duplicate here can be higher. + // Three retries should be plenty, as otherwise the entropy is definitely off. + for i := 0; i < MaxAttempts; i++ { + userCode, userCodeSignature, err = d.Strategy.UserCodeStrategy().GenerateUserCode(ctx) + if err != nil { + return "", "", errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + err = d.Storage.DeviceAuthStorage().CreateDeviceAuthSession(ctx, deviceCodeSignature, userCodeSignature, dar.Sanitize(nil).(fosite.DeviceRequester)) + if err == nil { + break + } + if !errors.Is(err, fosite.ErrExistingUserCodeSignature) { + return "", "", errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + } + + if err != nil { + errMsg := fmt.Sprintf("Exceeded user-code generation max attempts %v: %s", MaxAttempts, err.Error()) + return "", "", errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(errMsg)) + } + return deviceCode, userCode, nil +} diff --git a/fosite/handler/rfc8628/auth_handler_test.go b/fosite/handler/rfc8628/auth_handler_test.go new file mode 100644 index 00000000000..26e847a4c1b --- /dev/null +++ b/fosite/handler/rfc8628/auth_handler_test.go @@ -0,0 +1,304 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package rfc8628_test + +import ( + "context" + "errors" + "fmt" + "testing" + "time" + + gomock "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite/internal" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/rfc8628" + "github.com/ory/hydra/v2/fosite/storage" +) + +type mockStrategyProvider struct { + strategy *rfc8628.DefaultDeviceStrategy +} + +func (p mockStrategyProvider) DeviceRateLimitStrategy() rfc8628.DeviceRateLimitStrategy { + return p.strategy +} + +func (p mockStrategyProvider) DeviceCodeStrategy() rfc8628.DeviceCodeStrategy { + return p.strategy +} + +func (p mockStrategyProvider) UserCodeStrategy() rfc8628.UserCodeStrategy { + return p.strategy +} + +func Test_HandleDeviceEndpointRequest(t *testing.T) { + store := storage.NewMemoryStore() + handler := rfc8628.DeviceAuthHandler{ + Storage: store, + Strategy: mockStrategyProvider{strategy: &hmacshaStrategyDefault}, + Config: &fosite.Config{ + DeviceAndUserCodeLifespan: time.Minute * 10, + DeviceAuthTokenPollingInterval: time.Second * 5, + DeviceVerificationURL: "www.test.com", + AccessTokenLifespan: time.Hour, + RefreshTokenLifespan: time.Hour, + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + RefreshTokenScopes: []string{"offline"}, + }, + } + + req := &fosite.DeviceRequest{ + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + Audience: []string{"https://www.ory.sh/api"}, + }, + Session: &fosite.DefaultSession{}, + }, + } + resp := fosite.NewDeviceResponse() + err := handler.HandleDeviceEndpointRequest(context.Background(), req, resp) + + require.NoError(t, err) + assert.NotEmpty(t, resp.GetDeviceCode()) + assert.NotEmpty(t, resp.GetUserCode()) + assert.Equal(t, len(resp.GetUserCode()), 8) + assert.Contains(t, resp.GetDeviceCode(), "ory_dc_") + assert.Contains(t, resp.GetDeviceCode(), ".") + assert.Equal(t, resp.GetVerificationURI(), "www.test.com") +} + +func Test_HandleDeviceEndpointRequestWithRetry(t *testing.T) { + var mockDeviceAuthStorage *internal.MockDeviceAuthStorage + var mockDeviceAuthStorageProvider *internal.MockDeviceAuthStorageProvider + var mockAccessTokenStorageProvider *internal.MockAccessTokenStorageProvider + var mockRefreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider + var mockDeviceRateLimitStrategyProvider *internal.MockDeviceRateLimitStrategyProvider + var mockDeviceCodeStrategy *internal.MockDeviceCodeStrategy + var mockDeviceCodeStrategyProvider *internal.MockDeviceCodeStrategyProvider + var mockUserCodeStrategy *internal.MockUserCodeStrategy + var mockUserCodeStrategyProvider *internal.MockUserCodeStrategyProvider + + ctx := context.Background() + req := &fosite.DeviceRequest{ + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + Audience: []string{"https://www.ory.sh/api"}, + }, + Session: &fosite.DefaultSession{}, + }, + } + + testCases := []struct { + description string + setup func() + check func(t *testing.T, resp *fosite.DeviceResponse) + expectError error + }{ + { + description: "should pass when generating a unique user code at the first attempt", + setup: func() { + mockDeviceCodeStrategyProvider.EXPECT().DeviceCodeStrategy().Return(mockDeviceCodeStrategy) + mockDeviceCodeStrategy. + EXPECT(). + GenerateDeviceCode(ctx). + Return("deviceCode", "signature", nil) + mockUserCodeStrategyProvider.EXPECT().UserCodeStrategy().Return(mockUserCodeStrategy) + mockUserCodeStrategy. + EXPECT(). + GenerateUserCode(ctx). + Return("userCode", "signature2", nil). + Times(1) + mockDeviceAuthStorageProvider. + EXPECT(). + DeviceAuthStorage(). + Return(mockDeviceAuthStorage). + Times(1) + mockDeviceAuthStorage. + EXPECT(). + CreateDeviceAuthSession(ctx, "signature", "signature2", gomock.Any()). + Return(nil) + }, + check: func(t *testing.T, resp *fosite.DeviceResponse) { + assert.Equal(t, "userCode", resp.GetUserCode()) + }, + }, + { + description: "should pass when generating a unique user code within allowed attempts", + setup: func() { + mockDeviceCodeStrategyProvider.EXPECT().DeviceCodeStrategy().Return(mockDeviceCodeStrategy) + mockDeviceCodeStrategy. + EXPECT(). + GenerateDeviceCode(ctx). + Return("deviceCode", "signature", nil) + gomock.InOrder( + mockUserCodeStrategyProvider.EXPECT().UserCodeStrategy().Return(mockUserCodeStrategy), + mockUserCodeStrategy. + EXPECT(). + GenerateUserCode(ctx). + Return("duplicatedUserCode", "duplicatedSignature", nil), + mockDeviceAuthStorageProvider. + EXPECT(). + DeviceAuthStorage(). + Return(mockDeviceAuthStorage). + Times(1), + mockDeviceAuthStorage. + EXPECT(). + CreateDeviceAuthSession(ctx, "signature", "duplicatedSignature", gomock.Any()). + Return(fosite.ErrExistingUserCodeSignature), + mockUserCodeStrategyProvider.EXPECT().UserCodeStrategy().Return(mockUserCodeStrategy), + mockUserCodeStrategy. + EXPECT(). + GenerateUserCode(ctx). + Return("uniqueUserCode", "uniqueSignature", nil), + mockDeviceAuthStorageProvider. + EXPECT(). + DeviceAuthStorage(). + Return(mockDeviceAuthStorage). + Times(1), + mockDeviceAuthStorage. + EXPECT(). + CreateDeviceAuthSession(ctx, "signature", "uniqueSignature", gomock.Any()). + Return(nil), + ) + }, + check: func(t *testing.T, resp *fosite.DeviceResponse) { + assert.Equal(t, "uniqueUserCode", resp.GetUserCode()) + }, + }, + { + description: "should fail after maximum retries to generate a unique user code", + setup: func() { + mockDeviceCodeStrategyProvider.EXPECT().DeviceCodeStrategy().Return(mockDeviceCodeStrategy) + mockDeviceCodeStrategy. + EXPECT(). + GenerateDeviceCode(ctx). + Return("deviceCode", "signature", nil) + mockUserCodeStrategyProvider.EXPECT().UserCodeStrategy().Return(mockUserCodeStrategy).Times(rfc8628.MaxAttempts) + mockUserCodeStrategy. + EXPECT(). + GenerateUserCode(ctx). + Return("duplicatedUserCode", "duplicatedSignature", nil). + Times(rfc8628.MaxAttempts) + mockDeviceAuthStorageProvider. + EXPECT(). + DeviceAuthStorage(). + Return(mockDeviceAuthStorage). + Times(rfc8628.MaxAttempts) + mockDeviceAuthStorage. + EXPECT(). + CreateDeviceAuthSession(ctx, "signature", "duplicatedSignature", gomock.Any()). + Return(fosite.ErrExistingUserCodeSignature). + Times(rfc8628.MaxAttempts) + }, + check: func(t *testing.T, resp *fosite.DeviceResponse) { + assert.Empty(t, resp.GetUserCode()) + }, + expectError: fosite.ErrServerError, + }, + { + description: "should fail if another error is returned", + setup: func() { + mockDeviceCodeStrategyProvider.EXPECT().DeviceCodeStrategy().Return(mockDeviceCodeStrategy) + mockDeviceCodeStrategy. + EXPECT(). + GenerateDeviceCode(ctx). + Return("deviceCode", "signature", nil) + mockUserCodeStrategyProvider.EXPECT().UserCodeStrategy().Return(mockUserCodeStrategy) + mockUserCodeStrategy. + EXPECT(). + GenerateUserCode(ctx). + Return("userCode", "userCodeSignature", nil) + mockDeviceAuthStorageProvider. + EXPECT(). + DeviceAuthStorage(). + Return(mockDeviceAuthStorage). + Times(1) + mockDeviceAuthStorage. + EXPECT(). + CreateDeviceAuthSession(ctx, "signature", "userCodeSignature", gomock.Any()). + Return(errors.New("some error")) + }, + check: func(t *testing.T, resp *fosite.DeviceResponse) { + assert.Empty(t, resp.GetUserCode()) + }, + expectError: fosite.ErrServerError, + }, + } + + for _, testCase := range testCases { + t.Run(fmt.Sprintf("scenario=%s", testCase.description), func(t *testing.T) { + ctrl := gomock.NewController(t) + t.Cleanup(ctrl.Finish) + + mockDeviceAuthStorage = internal.NewMockDeviceAuthStorage(ctrl) + mockDeviceAuthStorageProvider = internal.NewMockDeviceAuthStorageProvider(ctrl) + mockAccessTokenStorageProvider = internal.NewMockAccessTokenStorageProvider(ctrl) + mockRefreshTokenStorageProvider = internal.NewMockRefreshTokenStorageProvider(ctrl) + mockDeviceRateLimitStrategyProvider = internal.NewMockDeviceRateLimitStrategyProvider(ctrl) + mockDeviceCodeStrategy = internal.NewMockDeviceCodeStrategy(ctrl) + mockDeviceCodeStrategyProvider = internal.NewMockDeviceCodeStrategyProvider(ctrl) + mockUserCodeStrategy = internal.NewMockUserCodeStrategy(ctrl) + mockUserCodeStrategyProvider = internal.NewMockUserCodeStrategyProvider(ctrl) + + mockStorage := struct { + *internal.MockDeviceAuthStorageProvider + *internal.MockAccessTokenStorageProvider + *internal.MockRefreshTokenStorageProvider + }{ + MockDeviceAuthStorageProvider: mockDeviceAuthStorageProvider, + MockAccessTokenStorageProvider: mockAccessTokenStorageProvider, + MockRefreshTokenStorageProvider: mockRefreshTokenStorageProvider, + } + + mockStrategy := struct { + *internal.MockDeviceRateLimitStrategyProvider + *internal.MockDeviceCodeStrategyProvider + *internal.MockUserCodeStrategyProvider + }{ + MockDeviceRateLimitStrategyProvider: mockDeviceRateLimitStrategyProvider, + MockDeviceCodeStrategyProvider: mockDeviceCodeStrategyProvider, + MockUserCodeStrategyProvider: mockUserCodeStrategyProvider, + } + + h := rfc8628.DeviceAuthHandler{ + Storage: mockStorage, + Strategy: mockStrategy, + Config: &fosite.Config{ + DeviceAndUserCodeLifespan: time.Minute * 10, + DeviceAuthTokenPollingInterval: time.Second * 5, + DeviceVerificationURL: "www.test.com", + AccessTokenLifespan: time.Hour, + RefreshTokenLifespan: time.Hour, + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + RefreshTokenScopes: []string{"offline"}, + }, + } + + if testCase.setup != nil { + testCase.setup() + } + + resp := fosite.NewDeviceResponse() + err := h.HandleDeviceEndpointRequest(ctx, req, resp) + + if testCase.expectError != nil { + require.EqualError(t, err, testCase.expectError.Error(), "%+v", err) + } else { + require.NoError(t, err, "%+v", err) + } + + if testCase.check != nil { + testCase.check(t, resp) + } + }) + } +} diff --git a/fosite/handler/rfc8628/storage.go b/fosite/handler/rfc8628/storage.go new file mode 100644 index 00000000000..865f5889d35 --- /dev/null +++ b/fosite/handler/rfc8628/storage.go @@ -0,0 +1,32 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package rfc8628 + +import ( + "context" + + "github.com/ory/hydra/v2/fosite" +) + +// DeviceAuthStorage handles the device auth session storage +type DeviceAuthStorage interface { + // CreateDeviceAuthSession stores the device auth request session. + CreateDeviceAuthSession(ctx context.Context, deviceCodeSignature, userCodeSignature string, request fosite.DeviceRequester) (err error) + + // GetDeviceCodeSession hydrates the session based on the given device code and returns the device request. + // If the device code has been invalidated with `InvalidateDeviceCodeSession`, this + // method should return the ErrInvalidatedDeviceCode error. + // + // Make sure to also return the fosite.Requester value when returning the fosite.ErrInvalidatedDeviceCode error! + GetDeviceCodeSession(ctx context.Context, signature string, session fosite.Session) (request fosite.DeviceRequester, err error) + + // InvalidateDeviceCodeSession is called when a device code is being used. The state of the device + // code should be set to invalid and consecutive requests to GetDeviceCodeSession should return the + // ErrInvalidatedDeviceCode error. + InvalidateDeviceCodeSession(ctx context.Context, signature string) (err error) +} + +type DeviceAuthStorageProvider interface { + DeviceAuthStorage() DeviceAuthStorage +} diff --git a/fosite/handler/rfc8628/strategy.go b/fosite/handler/rfc8628/strategy.go new file mode 100644 index 00000000000..b224b452b90 --- /dev/null +++ b/fosite/handler/rfc8628/strategy.go @@ -0,0 +1,52 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package rfc8628 + +import ( + "context" + + "github.com/ory/hydra/v2/fosite" +) + +// DeviceRateLimitStrategy handles the rate limiting strategy +type DeviceRateLimitStrategy interface { + // ShouldRateLimit checks whether the token request should be rate-limited + ShouldRateLimit(ctx context.Context, code string) (bool, error) +} + +type DeviceRateLimitStrategyProvider interface { + DeviceRateLimitStrategy() DeviceRateLimitStrategy +} + +// DeviceCodeStrategy handles the device_code strategy +type DeviceCodeStrategy interface { + // DeviceCodeSignature calculates the signature of a device_code + DeviceCodeSignature(ctx context.Context, code string) (signature string, err error) + + // GenerateDeviceCode generates a new device code and signature + GenerateDeviceCode(ctx context.Context) (code string, signature string, err error) + + // ValidateDeviceCode validates the device_code + ValidateDeviceCode(ctx context.Context, r fosite.DeviceRequester, code string) (err error) +} + +type DeviceCodeStrategyProvider interface { + DeviceCodeStrategy() DeviceCodeStrategy +} + +// UserCodeStrategy handles the user_code strategy +type UserCodeStrategy interface { + // UserCodeSignature calculates the signature of a user_code + UserCodeSignature(ctx context.Context, code string) (signature string, err error) + + // GenerateUserCode generates a new user code and signature + GenerateUserCode(ctx context.Context) (code string, signature string, err error) + + // ValidateUserCode validates the user_code + ValidateUserCode(ctx context.Context, r fosite.DeviceRequester, code string) (err error) +} + +type UserCodeStrategyProvider interface { + UserCodeStrategy() UserCodeStrategy +} diff --git a/fosite/handler/rfc8628/strategy_hmacsha.go b/fosite/handler/rfc8628/strategy_hmacsha.go new file mode 100644 index 00000000000..4969b300c8d --- /dev/null +++ b/fosite/handler/rfc8628/strategy_hmacsha.go @@ -0,0 +1,101 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package rfc8628 + +import ( + "context" + "strings" + "time" + + "github.com/ory/x/errorsx" + + "github.com/ory/x/randx" + + "github.com/ory/hydra/v2/fosite" + enigma "github.com/ory/hydra/v2/fosite/token/hmac" +) + +var ( + _ DeviceRateLimitStrategy = (*DefaultDeviceStrategy)(nil) + _ DeviceCodeStrategy = (*DefaultDeviceStrategy)(nil) + _ UserCodeStrategy = (*DefaultDeviceStrategy)(nil) +) + +// DefaultDeviceStrategy implements the default device strategy +type DefaultDeviceStrategy struct { + Enigma *enigma.HMACStrategy + Config interface { + fosite.DeviceProvider + fosite.DeviceAndUserCodeLifespanProvider + fosite.UserCodeProvider + } +} + +// GenerateUserCode generates a user_code +func (h *DefaultDeviceStrategy) GenerateUserCode(ctx context.Context) (string, string, error) { + seq, err := randx.RuneSequence(h.Config.GetUserCodeLength(ctx), h.Config.GetUserCodeSymbols(ctx)) + if err != nil { + return "", "", err + } + userCode := string(seq) + signUserCode, signErr := h.UserCodeSignature(ctx, userCode) + if signErr != nil { + return "", "", err + } + return userCode, signUserCode, nil +} + +// UserCodeSignature generates a user_code signature +func (h *DefaultDeviceStrategy) UserCodeSignature(ctx context.Context, token string) (string, error) { + return h.Enigma.GenerateHMACForString(ctx, token) +} + +// ValidateUserCode validates a user_code +// This function only checks if the device request session is active as we cannot verify the authenticity of the token. +// Unlike other tokens, the user_code is of limited length, which means that we cannot include the HMAC signature in the token itself. +// The only way to check the validity of the user_code is to check if its signature is stored in storage. +func (h *DefaultDeviceStrategy) ValidateUserCode(ctx context.Context, r fosite.DeviceRequester, code string) error { + exp := r.GetSession().GetExpiresAt(fosite.UserCode) + if exp.IsZero() && r.GetRequestedAt().Add(h.Config.GetDeviceAndUserCodeLifespan(ctx)).Before(time.Now().UTC()) { + return errorsx.WithStack(fosite.ErrDeviceExpiredToken.WithHintf("User code expired at '%s'.", r.GetRequestedAt().Add(h.Config.GetDeviceAndUserCodeLifespan(ctx)))) + } + if !exp.IsZero() && exp.Before(time.Now().UTC()) { + return errorsx.WithStack(fosite.ErrDeviceExpiredToken.WithHintf("User code expired at '%s'.", exp)) + } + return nil +} + +// GenerateDeviceCode generates a device_code +func (h *DefaultDeviceStrategy) GenerateDeviceCode(ctx context.Context) (string, string, error) { + token, sig, err := h.Enigma.Generate(ctx) + if err != nil { + return "", "", err + } + + return "ory_dc_" + token, sig, nil +} + +// DeviceCodeSignature generates a device_code signature +func (h *DefaultDeviceStrategy) DeviceCodeSignature(ctx context.Context, token string) (string, error) { + return h.Enigma.Signature(token), nil +} + +// ValidateDeviceCode validates a device_code +func (h *DefaultDeviceStrategy) ValidateDeviceCode(ctx context.Context, r fosite.DeviceRequester, code string) error { + exp := r.GetSession().GetExpiresAt(fosite.DeviceCode) + if exp.IsZero() && r.GetRequestedAt().Add(h.Config.GetDeviceAndUserCodeLifespan(ctx)).Before(time.Now().UTC()) { + return errorsx.WithStack(fosite.ErrDeviceExpiredToken.WithHintf("Device code expired at '%s'.", r.GetRequestedAt().Add(h.Config.GetDeviceAndUserCodeLifespan(ctx)))) + } + + if !exp.IsZero() && exp.Before(time.Now().UTC()) { + return errorsx.WithStack(fosite.ErrDeviceExpiredToken.WithHintf("Device code expired at '%s'.", exp)) + } + + return h.Enigma.Validate(ctx, strings.TrimPrefix(code, "ory_dc_")) +} + +// ShouldRateLimit is used to decide whether a request should be rate-limited +func (h *DefaultDeviceStrategy) ShouldRateLimit(context context.Context, code string) (bool, error) { + return false, nil +} diff --git a/fosite/handler/rfc8628/strategy_hmacsha_test.go b/fosite/handler/rfc8628/strategy_hmacsha_test.go new file mode 100644 index 00000000000..b420ce8ec2c --- /dev/null +++ b/fosite/handler/rfc8628/strategy_hmacsha_test.go @@ -0,0 +1,112 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package rfc8628_test + +import ( + "context" + "fmt" + "regexp" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/rfc8628" + "github.com/ory/hydra/v2/fosite/token/hmac" +) + +var hmacshaStrategyDefault = rfc8628.DefaultDeviceStrategy{ + Enigma: &hmac.HMACStrategy{Config: &fosite.Config{GlobalSecret: []byte("foobarfoobarfoobarfoobarfoobarfoobarfoobarfoobar")}}, + Config: &fosite.Config{ + AccessTokenLifespan: time.Minute * 24, + AuthorizeCodeLifespan: time.Minute * 24, + DeviceAndUserCodeLifespan: time.Minute * 24, + DeviceAuthTokenPollingInterval: 400 * time.Millisecond, + }, +} + +var hmacValidCase = fosite.DeviceRequest{ + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + Secret: []byte("foobarfoobarfoobarfoobar"), + }, + Session: &fosite.DefaultSession{ + ExpiresAt: map[fosite.TokenType]time.Time{ + fosite.UserCode: time.Now().UTC().Add(time.Hour), + fosite.DeviceCode: time.Now().UTC().Add(time.Hour), + }, + }, + }, +} + +func TestHMACUserCode(t *testing.T) { + for k, c := range []struct { + r fosite.DeviceRequester + pass bool + }{ + { + r: &hmacValidCase, + pass: true, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + userCode, signature, err := hmacshaStrategyDefault.GenerateUserCode(context.TODO()) + assert.NoError(t, err) + regex := regexp.MustCompile("[ABCDEFGHIJKLMNOPQRSTUVWXYZ]{8}") + assert.Equal(t, len(regex.FindString(userCode)), len(userCode)) + + err = hmacshaStrategyDefault.ValidateUserCode(context.TODO(), c.r, userCode) + if c.pass { + assert.NoError(t, err) + validate, _ := hmacshaStrategyDefault.Enigma.GenerateHMACForString(context.TODO(), userCode) + assert.Equal(t, signature, validate) + testSign, err := hmacshaStrategyDefault.UserCodeSignature(context.TODO(), userCode) + assert.NoError(t, err) + assert.Equal(t, testSign, signature) + } else { + assert.Error(t, err) + } + }) + } +} + +func TestHMACDeviceCode(t *testing.T) { + for k, c := range []struct { + r fosite.DeviceRequester + pass bool + }{ + { + r: &hmacValidCase, + pass: true, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + token, signature, err := hmacshaStrategyDefault.GenerateDeviceCode(context.TODO()) + assert.NoError(t, err) + assert.Equal(t, strings.Split(token, ".")[1], signature) + assert.Contains(t, token, "ory_dc_") + + for k, token := range []string{ + token, + strings.TrimPrefix(token, "ory_dc_"), + } { + t.Run(fmt.Sprintf("prefix=%v", k == 0), func(t *testing.T) { + err = hmacshaStrategyDefault.ValidateDeviceCode(context.TODO(), c.r, token) + if c.pass { + assert.NoError(t, err) + validate := hmacshaStrategyDefault.Enigma.Signature(token) + assert.Equal(t, signature, validate) + testSign, err := hmacshaStrategyDefault.DeviceCodeSignature(context.TODO(), token) + assert.NoError(t, err) + assert.Equal(t, testSign, signature) + } else { + assert.Error(t, err) + } + }) + } + }) + } +} diff --git a/fosite/handler/rfc8628/token_handler.go b/fosite/handler/rfc8628/token_handler.go new file mode 100644 index 00000000000..c4639ca2b12 --- /dev/null +++ b/fosite/handler/rfc8628/token_handler.go @@ -0,0 +1,309 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package rfc8628 + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/pkg/errors" + + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/x/errorsx" + + "github.com/ory/hydra/v2/fosite" +) + +var _ fosite.TokenEndpointHandler = (*DeviceCodeTokenEndpointHandler)(nil) + +// DeviceCodeTokenEndpointHandler is a token response handler for +// - the Authorize code grant using the explicit grant type as defined in https://tools.ietf.org/html/rfc6749#section-4.1 +// - the Device Authorization Grant as defined in https://www.rfc-editor.org/rfc/rfc8628 +type DeviceCodeTokenEndpointHandler struct { + Storage interface { + DeviceAuthStorageProvider + oauth2.AccessTokenStorageProvider + oauth2.RefreshTokenStorageProvider + oauth2.TokenRevocationStorageProvider + } + Strategy interface { + DeviceRateLimitStrategyProvider + DeviceCodeStrategyProvider + UserCodeStrategyProvider + oauth2.AccessTokenStrategyProvider + oauth2.RefreshTokenStrategyProvider + } + Config interface { + fosite.AccessTokenLifespanProvider + fosite.RefreshTokenLifespanProvider + fosite.RefreshTokenScopesProvider + } +} + +func (c *DeviceCodeTokenEndpointHandler) CanSkipClientAuth(ctx context.Context, requester fosite.AccessRequester) bool { + return false +} + +func (c *DeviceCodeTokenEndpointHandler) CanHandleTokenEndpointRequest(ctx context.Context, requester fosite.AccessRequester) bool { + return requester.GetGrantTypes().ExactOne(string(fosite.GrantTypeDeviceCode)) +} + +func (v DeviceCodeTokenEndpointHandler) CanHandleRequest(requester fosite.AccessRequester) bool { + return requester.GetGrantTypes().ExactOne(string(fosite.GrantTypeDeviceCode)) +} + +func (c *DeviceCodeTokenEndpointHandler) PopulateTokenEndpointResponse(ctx context.Context, requester fosite.AccessRequester, responder fosite.AccessResponder) error { + if !c.CanHandleTokenEndpointRequest(ctx, requester) { + return errorsx.WithStack(fosite.ErrUnknownRequest) + } + + var code, signature string + var err error + if code, signature, err = c.deviceCode(ctx, requester); err != nil { + return err + } + + var ar fosite.DeviceRequester + if ar, err = c.session(ctx, requester, signature); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + if err = c.Strategy.DeviceCodeStrategy().ValidateDeviceCode(ctx, ar, code); err != nil { + return errorsx.WithStack(err) + } + + for _, scope := range ar.GetGrantedScopes() { + requester.GrantScope(scope) + } + + for _, audience := range ar.GetGrantedAudience() { + requester.GrantAudience(audience) + } + + var accessToken, accessTokenSignature string + accessToken, accessTokenSignature, err = c.Strategy.AccessTokenStrategy().GenerateAccessToken(ctx, requester) + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + var refreshToken, refreshTokenSignature string + if c.canIssueRefreshToken(ctx, requester) { + refreshToken, refreshTokenSignature, err = c.Strategy.RefreshTokenStrategy().GenerateRefreshToken(ctx, requester) + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + } + + ctx, err = fosite.MaybeBeginTx(ctx, c.Storage) + if err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + defer func() { + if err != nil { + if rollBackTxnErr := fosite.MaybeRollbackTx(ctx, c.Storage); rollBackTxnErr != nil { + err = errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebugf("error: %s; rollback error: %s", err, rollBackTxnErr)) + } + } + }() + + if err = c.Storage.DeviceAuthStorage().InvalidateDeviceCodeSession(ctx, signature); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + if err = c.Storage.AccessTokenStorage().CreateAccessTokenSession(ctx, accessTokenSignature, requester.Sanitize([]string{})); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + if refreshTokenSignature != "" { + if err = c.Storage.RefreshTokenStorage().CreateRefreshTokenSession(ctx, refreshTokenSignature, accessTokenSignature, requester.Sanitize([]string{})); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + } + + lifeSpan := fosite.GetEffectiveLifespan(requester.GetClient(), c.getGrantType(requester), fosite.AccessToken, c.Config.GetAccessTokenLifespan(ctx)) + responder.SetAccessToken(accessToken) + responder.SetTokenType("bearer") + responder.SetExpiresIn(getExpiresIn(requester, fosite.AccessToken, lifeSpan, time.Now().UTC())) + responder.SetScopes(requester.GetGrantedScopes()) + if refreshToken != "" { + responder.SetExtra("refresh_token", refreshToken) + } + + if err = fosite.MaybeCommitTx(ctx, c.Storage); err != nil { + return errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + return nil +} + +func (c *DeviceCodeTokenEndpointHandler) HandleTokenEndpointRequest(ctx context.Context, requester fosite.AccessRequester) error { + if !c.CanHandleTokenEndpointRequest(ctx, requester) { + return errorsx.WithStack(errorsx.WithStack(fosite.ErrUnknownRequest)) + } + + var err error + if err = c.validateGrantTypes(requester); err != nil { + return err + } + + var code, signature string + if code, signature, err = c.deviceCode(ctx, requester); err != nil { + return err + } + + if err = c.validateCode(ctx, requester, code); err != nil { + return errorsx.WithStack(err) + } + + var ar fosite.DeviceRequester + if ar, err = c.session(ctx, requester, signature); err != nil { + if ar != nil && (errors.Is(err, fosite.ErrInvalidatedAuthorizeCode) || errors.Is(err, fosite.ErrInvalidatedDeviceCode)) { + return c.revokeTokens(ctx, requester.GetID()) + } + + return err + } + + if err = c.Strategy.DeviceCodeStrategy().ValidateDeviceCode(ctx, ar, code); err != nil { + return errorsx.WithStack(err) + } + + // Override scopes + requester.SetRequestedScopes(ar.GetRequestedScopes()) + + // Override audiences + requester.SetRequestedAudience(ar.GetRequestedAudience()) + + // The authorization server MUST ensure that + // the authorization code was issued to the authenticated confidential client, + // or if the client is public, ensure that the code was issued to "client_id" in the request + if ar.GetClient().GetID() != requester.GetClient().GetID() { + return errorsx.WithStack(fosite.ErrInvalidGrant.WithHint("The OAuth 2.0 Client ID from this request does not match the one from the authorize request.")) + } + + // Checking of POST client_id skipped, because + // if the client type is confidential or the client was issued client credentials (or assigned other authentication requirements), + // the client MUST authenticate with the authorization server as described in Section 3.2.1. + requester.SetSession(ar.GetSession()) + requester.SetID(ar.GetID()) + + atLifespan := fosite.GetEffectiveLifespan(requester.GetClient(), c.getGrantType(requester), fosite.AccessToken, c.Config.GetAccessTokenLifespan(ctx)) + requester.GetSession().SetExpiresAt(fosite.AccessToken, time.Now().UTC().Add(atLifespan).Round(time.Second)) + + rtLifespan := fosite.GetEffectiveLifespan(requester.GetClient(), c.getGrantType(requester), fosite.RefreshToken, c.Config.GetRefreshTokenLifespan(ctx)) + if rtLifespan > -1 { + requester.GetSession().SetExpiresAt(fosite.RefreshToken, time.Now().UTC().Add(rtLifespan).Round(time.Second)) + } + + return nil +} + +func (c *DeviceCodeTokenEndpointHandler) canIssueRefreshToken(ctx context.Context, requester fosite.Requester) bool { + scopes := c.Config.GetRefreshTokenScopes(ctx) + + // Require one of the refresh token scopes, if set. + if len(scopes) > 0 && !requester.GetGrantedScopes().HasOneOf(scopes...) { + return false + } + + // Do not issue a refresh token to clients that cannot use the refresh token grant type. + if !requester.GetClient().GetGrantTypes().Has("refresh_token") { + return false + } + + return true +} + +func (c *DeviceCodeTokenEndpointHandler) revokeTokens(ctx context.Context, reqId string) error { + hint := "The authorization code has already been used." + var debug strings.Builder + + revokeAndAppendErr := func(tokenType string, revokeFunc func(context.Context, string) error) { + if err := revokeFunc(ctx, reqId); err != nil { + hint += fmt.Sprintf(" Additionally, an error occurred during processing the %s token revocation.", tokenType) + debug.WriteString(fmt.Sprintf("Revocation of %s token lead to error %s.", tokenType, err.Error())) + } + } + + revokeAndAppendErr("access", c.Storage.TokenRevocationStorage().RevokeAccessToken) + revokeAndAppendErr("refresh", c.Storage.TokenRevocationStorage().RevokeRefreshToken) + + return errorsx.WithStack(fosite.ErrInvalidGrant.WithHint(hint).WithDebug(debug.String())) +} + +func (c DeviceCodeTokenEndpointHandler) deviceCode(ctx context.Context, requester fosite.AccessRequester) (code string, signature string, err error) { + code = requester.GetRequestForm().Get("device_code") + + signature, err = c.Strategy.DeviceCodeStrategy().DeviceCodeSignature(ctx, code) + if err != nil { + return "", "", errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + return +} + +func (c DeviceCodeTokenEndpointHandler) validateCode(ctx context.Context, requester fosite.Requester, code string) error { + shouldRateLimit, err := c.Strategy.DeviceRateLimitStrategy().ShouldRateLimit(ctx, code) + if err != nil { + return err + } + if shouldRateLimit { + return errorsx.WithStack(fosite.ErrSlowDown) + } + return nil +} + +func (s DeviceCodeTokenEndpointHandler) session(ctx context.Context, requester fosite.AccessRequester, codeSignature string) (fosite.DeviceRequester, error) { + req, err := s.Storage.DeviceAuthStorage().GetDeviceCodeSession(ctx, codeSignature, requester.GetSession()) + + if err != nil && errors.Is(err, fosite.ErrInvalidatedDeviceCode) { + if req != nil { + return req, err + } + + return req, fosite.ErrServerError. + WithHint("Misconfigured code lead to an error that prohibited the OAuth 2.0 Framework from processing this request."). + WithDebug("\"GetDeviceCodeSession\" must return a value for \"fosite.Requester\" when returning \"ErrInvalidatedDeviceCode\".") + } + + if err != nil && errors.Is(err, fosite.ErrNotFound) { + return nil, errorsx.WithStack(fosite.ErrInvalidGrant.WithWrap(err).WithDebug(err.Error())) + } + + if err != nil { + return nil, errorsx.WithStack(fosite.ErrServerError.WithWrap(err).WithDebug(err.Error())) + } + + state := req.GetUserCodeState() + + if state == fosite.UserCodeUnused { + return nil, fosite.ErrAuthorizationPending + } + if state == fosite.UserCodeRejected { + return nil, fosite.ErrAccessDenied + } + + return req, err +} + +func (v DeviceCodeTokenEndpointHandler) validateGrantTypes(requester fosite.AccessRequester) error { + if !requester.GetClient().GetGrantTypes().Has(string(fosite.GrantTypeDeviceCode)) { + return errorsx.WithStack(fosite.ErrUnauthorizedClient.WithHint("The OAuth 2.0 Client is not allowed to use authorization grant \"urn:ietf:params:oauth:grant-type:device_code\".")) + } + + return nil +} + +func (v DeviceCodeTokenEndpointHandler) getGrantType(requester fosite.AccessRequester) fosite.GrantType { + return fosite.GrantTypeDeviceCode +} + +func getExpiresIn(r fosite.Requester, key fosite.TokenType, defaultLifespan time.Duration, now time.Time) time.Duration { + if r.GetSession().GetExpiresAt(key).IsZero() { + return defaultLifespan + } + return time.Duration(r.GetSession().GetExpiresAt(key).UnixNano() - now.UnixNano()) +} diff --git a/fosite/handler/rfc8628/token_handler_test.go b/fosite/handler/rfc8628/token_handler_test.go new file mode 100644 index 00000000000..338c469dc13 --- /dev/null +++ b/fosite/handler/rfc8628/token_handler_test.go @@ -0,0 +1,1046 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package rfc8628_test + +import ( + "context" + "errors" + "fmt" + "net/url" + "testing" + "time" + + gomock "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite/internal" + + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/handler/rfc8628" + "github.com/ory/hydra/v2/fosite/token/hmac" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/storage" +) + +var hmacshaStrategyOAuth = oauth2.NewHMACSHAStrategy( + &hmac.HMACStrategy{Config: &fosite.Config{GlobalSecret: []byte("foobarfoobarfoobarfoobarfoobarfoobarfoobarfoobar")}}, + &fosite.Config{ + AccessTokenLifespan: time.Hour * 24, + AuthorizeCodeLifespan: time.Hour * 24, + }, +) + +var RFC8628HMACSHAStrategy = rfc8628.DefaultDeviceStrategy{ + Enigma: &hmac.HMACStrategy{Config: &fosite.Config{GlobalSecret: []byte("foobarfoobarfoobarfoobarfoobarfoobarfoobarfoobar")}}, + Config: &fosite.Config{ + DeviceAndUserCodeLifespan: time.Minute * 30, + }, +} + +type mockDeviceCodeStrategyProvider struct { + deviceRateLimitStrategy rfc8628.DeviceRateLimitStrategy + deviceCodeStrategy rfc8628.DeviceCodeStrategy + userCodeStrategy rfc8628.UserCodeStrategy + coreStrategy oauth2.CoreStrategy +} + +func (t *mockDeviceCodeStrategyProvider) DeviceRateLimitStrategy() rfc8628.DeviceRateLimitStrategy { + return t.deviceRateLimitStrategy +} + +func (t *mockDeviceCodeStrategyProvider) DeviceCodeStrategy() rfc8628.DeviceCodeStrategy { + return t.deviceCodeStrategy +} + +func (t *mockDeviceCodeStrategyProvider) UserCodeStrategy() rfc8628.UserCodeStrategy { + return t.userCodeStrategy +} + +func (t *mockDeviceCodeStrategyProvider) AccessTokenStrategy() oauth2.AccessTokenStrategy { + return t.coreStrategy +} + +func (t *mockDeviceCodeStrategyProvider) RefreshTokenStrategy() oauth2.RefreshTokenStrategy { + return t.coreStrategy +} + +func TestDeviceUserCode_HandleTokenEndpointRequest(t *testing.T) { + for k, strategy := range map[string]struct { + oauth2.CoreStrategy + rfc8628.DefaultDeviceStrategy + }{ + "hmac": {hmacshaStrategyOAuth, RFC8628HMACSHAStrategy}, + } { + t.Run("strategy="+k, func(t *testing.T) { + store := storage.NewMemoryStore() + + h := rfc8628.DeviceCodeTokenEndpointHandler{ + Strategy: &mockDeviceCodeStrategyProvider{ + deviceRateLimitStrategy: &strategy.DefaultDeviceStrategy, + deviceCodeStrategy: &strategy.DefaultDeviceStrategy, + userCodeStrategy: &strategy.DefaultDeviceStrategy, + coreStrategy: strategy.CoreStrategy, + }, + Storage: store, + Config: &fosite.Config{ + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + DeviceAndUserCodeLifespan: time.Minute, + }, + } + + testCases := []struct { + description string + areq *fosite.AccessRequest + authreq *fosite.DeviceRequest + setup func(t *testing.T, areq *fosite.AccessRequest, authreq *fosite.DeviceRequest) + check func(t *testing.T, areq *fosite.AccessRequest, authreq *fosite.DeviceRequest) + expectErr error + }{ + { + description: "should fail because not responsible for handling the request", + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"authorization_code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + }, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + expectErr: fosite.ErrUnknownRequest, + }, + { + description: "should fail because client is not granted the correct grant type", + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + ID: "foo", + GrantTypes: []string{""}, + }, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + expectErr: fosite.ErrUnauthorizedClient, + }, + { + description: "should fail because device code could not be retrieved", + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + ID: "foo", + GrantTypes: []string{"urn:ietf:params:oauth:grant-type:device_code"}, + }, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + setup: func(t *testing.T, areq *fosite.AccessRequest, _ *fosite.DeviceRequest) { + deviceCode, _, err := strategy.GenerateDeviceCode(context.TODO()) + require.NoError(t, err) + areq.Form = url.Values{"device_code": {deviceCode}} + }, + expectErr: fosite.ErrInvalidGrant, + }, + { + description: "should fail because user has not completed the browser flow", + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + Request: fosite.Request{ + Form: url.Values{}, + Client: &fosite.DefaultClient{ + ID: "foo", + GrantTypes: []string{"urn:ietf:params:oauth:grant-type:device_code"}, + }, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + authreq: &fosite.DeviceRequest{ + UserCodeState: fosite.UserCodeUnused, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + ID: "foo", + GrantTypes: []string{"urn:ietf:params:oauth:grant-type:device_code"}, + }, + RequestedScope: fosite.Arguments{"foo"}, + GrantedScope: fosite.Arguments{"foo"}, + Session: &fosite.DefaultSession{ + ExpiresAt: map[fosite.TokenType]time.Time{ + fosite.DeviceCode: time.Now().Add(-time.Hour).UTC(), + }, + }, + RequestedAt: time.Now().Add(-2 * time.Hour).UTC(), + }, + }, + setup: func(t *testing.T, areq *fosite.AccessRequest, authreq *fosite.DeviceRequest) { + code, signature, err := strategy.GenerateDeviceCode(context.TODO()) + require.NoError(t, err) + _, userCodeSignature, err := strategy.GenerateUserCode(context.TODO()) + require.NoError(t, err) + areq.Form.Add("device_code", code) + + require.NoError(t, store.CreateDeviceAuthSession(context.TODO(), signature, userCodeSignature, authreq)) + }, + expectErr: fosite.ErrAuthorizationPending, + }, + { + description: "should fail because device code has expired", + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + Request: fosite.Request{ + Form: url.Values{}, + Client: &fosite.DefaultClient{ + ID: "foo", + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + }, + GrantedScope: fosite.Arguments{"foo", "offline"}, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + authreq: &fosite.DeviceRequest{ + UserCodeState: fosite.UserCodeAccepted, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ID: "foo", GrantTypes: []string{"urn:ietf:params:oauth:grant-type:device_code"}}, + RequestedScope: fosite.Arguments{"foo"}, + GrantedScope: fosite.Arguments{"foo"}, + Session: &fosite.DefaultSession{ + ExpiresAt: map[fosite.TokenType]time.Time{ + fosite.DeviceCode: time.Now().Add(-time.Hour).UTC(), + }, + }, + RequestedAt: time.Now().Add(-2 * time.Hour).UTC(), + }, + }, + setup: func(t *testing.T, areq *fosite.AccessRequest, authreq *fosite.DeviceRequest) { + code, signature, err := strategy.GenerateDeviceCode(context.TODO()) + require.NoError(t, err) + _, userCodeSignature, err := strategy.GenerateUserCode(context.TODO()) + require.NoError(t, err) + areq.Form.Add("device_code", code) + + require.NoError(t, store.CreateDeviceAuthSession(context.TODO(), signature, userCodeSignature, authreq)) + }, + expectErr: fosite.ErrDeviceExpiredToken, + }, + { + description: "should fail because client mismatch", + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + ID: "foo", + GrantTypes: []string{"urn:ietf:params:oauth:grant-type:device_code"}, + }, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + authreq: &fosite.DeviceRequest{ + UserCodeState: fosite.UserCodeAccepted, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ID: "bar"}, + RequestedScope: fosite.Arguments{"foo"}, + GrantedScope: fosite.Arguments{"foo"}, + Session: &fosite.DefaultSession{ + ExpiresAt: map[fosite.TokenType]time.Time{ + fosite.DeviceCode: time.Now().Add(time.Hour).UTC(), + }, + }, + }, + }, + setup: func(t *testing.T, areq *fosite.AccessRequest, authreq *fosite.DeviceRequest) { + token, signature, err := strategy.GenerateDeviceCode(context.TODO()) + require.NoError(t, err) + _, userCodeSignature, err := strategy.GenerateUserCode(context.TODO()) + require.NoError(t, err) + areq.Form = url.Values{"device_code": {token}} + + require.NoError(t, store.CreateDeviceAuthSession(context.TODO(), signature, userCodeSignature, authreq)) + }, + expectErr: fosite.ErrInvalidGrant, + }, + { + description: "should pass", + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + ID: "foo", + GrantTypes: []string{"urn:ietf:params:oauth:grant-type:device_code"}, + }, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + authreq: &fosite.DeviceRequest{ + UserCodeState: fosite.UserCodeAccepted, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + ID: "foo", + GrantTypes: []string{"urn:ietf:params:oauth:grant-type:device_code"}, + }, + RequestedScope: fosite.Arguments{"foo"}, + GrantedScope: fosite.Arguments{"foo"}, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + setup: func(t *testing.T, areq *fosite.AccessRequest, authreq *fosite.DeviceRequest) { + token, signature, err := strategy.GenerateDeviceCode(context.TODO()) + require.NoError(t, err) + _, userCodeSignature, err := strategy.GenerateUserCode(context.TODO()) + require.NoError(t, err) + + areq.Form = url.Values{"device_code": {token}} + require.NoError(t, store.CreateDeviceAuthSession(context.TODO(), signature, userCodeSignature, authreq)) + }, + }, + } + + for i, testCase := range testCases { + t.Run(fmt.Sprintf("case=%d/description=%s", i, testCase.description), func(t *testing.T) { + if testCase.setup != nil { + testCase.setup(t, testCase.areq, testCase.authreq) + } + + t.Logf("Processing %+v", testCase.areq.Client) + + err := h.HandleTokenEndpointRequest(context.Background(), testCase.areq) + if testCase.expectErr != nil { + require.EqualError(t, err, testCase.expectErr.Error(), "%+v", err) + } else { + require.NoError(t, err, "%+v", err) + if testCase.check != nil { + testCase.check(t, testCase.areq, testCase.authreq) + } + } + }) + } + }) + } +} + +func TestDeviceUserCode_HandleTokenEndpointRequest_RateLimiting(t *testing.T) { + for k, strategy := range map[string]struct { + oauth2.CoreStrategy + rfc8628.DefaultDeviceStrategy + }{ + "hmac": {hmacshaStrategyOAuth, RFC8628HMACSHAStrategy}, + } { + t.Run("strategy="+k, func(t *testing.T) { + store := storage.NewMemoryStore() + + h := rfc8628.DeviceCodeTokenEndpointHandler{ + Strategy: &mockDeviceCodeStrategyProvider{ + deviceRateLimitStrategy: &strategy.DefaultDeviceStrategy, + deviceCodeStrategy: &strategy.DefaultDeviceStrategy, + userCodeStrategy: &strategy.DefaultDeviceStrategy, + coreStrategy: strategy.CoreStrategy, + }, + Storage: store, + Config: &fosite.Config{ + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + DeviceAndUserCodeLifespan: time.Minute, + }, + } + areq := &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + Request: fosite.Request{ + Form: url.Values{}, + Client: &fosite.DefaultClient{ + ID: "foo", + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + }, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + } + authreq := &fosite.DeviceRequest{ + UserCodeState: fosite.UserCodeAccepted, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ID: "foo", GrantTypes: []string{"urn:ietf:params:oauth:grant-type:device_code"}}, + RequestedScope: fosite.Arguments{"foo"}, + GrantedScope: fosite.Arguments{"foo"}, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + } + + token, signature, err := strategy.GenerateDeviceCode(context.TODO()) + require.NoError(t, err) + _, userCodeSignature, err := strategy.GenerateUserCode(context.TODO()) + require.NoError(t, err) + + areq.Form = url.Values{"device_code": {token}} + require.NoError(t, store.CreateDeviceAuthSession(context.TODO(), signature, userCodeSignature, authreq)) + err = h.HandleTokenEndpointRequest(context.Background(), areq) + require.NoError(t, err, "%+v", err) + err = h.HandleTokenEndpointRequest(context.Background(), areq) + require.Error(t, fosite.ErrSlowDown, err) + time.Sleep(10 * time.Second) + err = h.HandleTokenEndpointRequest(context.Background(), areq) + require.NoError(t, err, "%+v", err) + }) + } +} + +func TestDeviceUserCode_PopulateTokenEndpointResponse(t *testing.T) { + for k, strategy := range map[string]struct { + oauth2.CoreStrategy + rfc8628.DefaultDeviceStrategy + }{ + "hmac": {hmacshaStrategyOAuth, RFC8628HMACSHAStrategy}, + } { + t.Run("strategy="+k, func(t *testing.T) { + store := storage.NewMemoryStore() + + testCases := []struct { + description string + areq *fosite.AccessRequest + authreq *fosite.DeviceRequest + setup func(t *testing.T, areq *fosite.AccessRequest, authreq *fosite.DeviceRequest, config *fosite.Config) + check func(t *testing.T, aresp *fosite.AccessResponse) + expectErr error + }{ + { + description: "should fail because not responsible for handling the request", + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"authorization_code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + }, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + expectErr: fosite.ErrUnknownRequest, + }, + { + description: "should fail because device code cannot be retrieved", + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + Request: fosite.Request{ + Form: url.Values{}, + Client: &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + }, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + setup: func(t *testing.T, areq *fosite.AccessRequest, _ *fosite.DeviceRequest, _ *fosite.Config) { + code, _, err := strategy.GenerateDeviceCode(context.TODO()) + require.NoError(t, err) + areq.Form.Set("device_code", code) + }, + expectErr: fosite.ErrServerError, + }, + { + description: "should pass with offline scope and refresh token grant type", + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + Request: fosite.Request{ + Form: url.Values{}, + Client: &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code", "refresh_token"}, + }, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + authreq: &fosite.DeviceRequest{ + UserCodeState: fosite.UserCodeAccepted, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ID: "foo", GrantTypes: []string{"urn:ietf:params:oauth:grant-type:device_code"}}, + RequestedScope: fosite.Arguments{"foo", "bar", "offline"}, + GrantedScope: fosite.Arguments{"foo", "offline"}, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + setup: func(t *testing.T, areq *fosite.AccessRequest, authreq *fosite.DeviceRequest, _ *fosite.Config) { + code, signature, err := strategy.GenerateDeviceCode(context.TODO()) + require.NoError(t, err) + _, userCodeSignature, err := strategy.GenerateUserCode(context.TODO()) + require.NoError(t, err) + areq.Form.Add("device_code", code) + + require.NoError(t, store.CreateDeviceAuthSession(context.TODO(), signature, userCodeSignature, authreq)) + }, + check: func(t *testing.T, aresp *fosite.AccessResponse) { + assert.NotEmpty(t, aresp.AccessToken) + assert.Equal(t, "bearer", aresp.TokenType) + assert.NotEmpty(t, aresp.GetExtra("refresh_token")) + assert.NotEmpty(t, aresp.GetExtra("expires_in")) + assert.Equal(t, "foo offline", aresp.GetExtra("scope")) + }, + }, + { + description: "should pass with refresh token grant type", + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + Request: fosite.Request{ + Form: url.Values{}, + Client: &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code", "refresh_token"}, + }, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + authreq: &fosite.DeviceRequest{ + UserCodeState: fosite.UserCodeAccepted, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ID: "foo", GrantTypes: []string{"urn:ietf:params:oauth:grant-type:device_code"}}, + RequestedScope: fosite.Arguments{"foo", "bar"}, + GrantedScope: fosite.Arguments{"foo"}, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + setup: func(t *testing.T, areq *fosite.AccessRequest, authreq *fosite.DeviceRequest, config *fosite.Config) { + config.RefreshTokenScopes = []string{} + code, signature, err := strategy.GenerateDeviceCode(context.TODO()) + require.NoError(t, err) + _, userCodeSignature, err := strategy.GenerateUserCode(context.TODO()) + require.NoError(t, err) + areq.Form.Add("device_code", code) + + require.NoError(t, store.CreateDeviceAuthSession(context.TODO(), signature, userCodeSignature, authreq)) + }, + check: func(t *testing.T, aresp *fosite.AccessResponse) { + assert.NotEmpty(t, aresp.AccessToken) + assert.Equal(t, "bearer", aresp.TokenType) + assert.NotEmpty(t, aresp.GetExtra("refresh_token")) + assert.NotEmpty(t, aresp.GetExtra("expires_in")) + assert.Equal(t, "foo", aresp.GetExtra("scope")) + }, + }, + { + description: "pass and response should not have refresh token", + areq: &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + Request: fosite.Request{ + Form: url.Values{}, + Client: &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + }, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + authreq: &fosite.DeviceRequest{ + UserCodeState: fosite.UserCodeAccepted, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ID: "foo", GrantTypes: []string{"urn:ietf:params:oauth:grant-type:device_code"}}, + RequestedScope: fosite.Arguments{"foo", "bar"}, + GrantedScope: fosite.Arguments{"foo"}, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + }, + setup: func(t *testing.T, areq *fosite.AccessRequest, authreq *fosite.DeviceRequest, config *fosite.Config) { + code, signature, err := strategy.GenerateDeviceCode(context.TODO()) + require.NoError(t, err) + _, userCodeSignature, err := strategy.GenerateUserCode(context.TODO()) + require.NoError(t, err) + areq.Form.Add("device_code", code) + + require.NoError(t, store.CreateDeviceAuthSession(context.TODO(), signature, userCodeSignature, authreq)) + }, + check: func(t *testing.T, aresp *fosite.AccessResponse) { + assert.NotEmpty(t, aresp.AccessToken) + assert.Equal(t, "bearer", aresp.TokenType) + assert.Empty(t, aresp.GetExtra("refresh_token")) + assert.NotEmpty(t, aresp.GetExtra("expires_in")) + assert.Equal(t, "foo", aresp.GetExtra("scope")) + }, + }, + } + + for _, testCase := range testCases { + t.Run("case="+testCase.description, func(t *testing.T) { + config := &fosite.Config{ + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + AccessTokenLifespan: time.Minute, + RefreshTokenScopes: []string{"offline"}, + } + + h := rfc8628.DeviceCodeTokenEndpointHandler{ + Strategy: &mockDeviceCodeStrategyProvider{ + deviceRateLimitStrategy: &strategy.DefaultDeviceStrategy, + deviceCodeStrategy: &strategy.DefaultDeviceStrategy, + userCodeStrategy: &strategy.DefaultDeviceStrategy, + coreStrategy: strategy.CoreStrategy, + }, + Storage: store, + Config: config, + } + + if testCase.setup != nil { + testCase.setup(t, testCase.areq, testCase.authreq, config) + } + + aresp := fosite.NewAccessResponse() + err := h.PopulateTokenEndpointResponse(context.TODO(), testCase.areq, aresp) + + if testCase.expectErr != nil { + require.EqualError(t, err, testCase.expectErr.Error(), "%+v", err) + } else { + require.NoError(t, err, "%+v", err) + } + + if testCase.check != nil { + testCase.check(t, aresp) + } + }) + } + }) + } +} + +func TestDeviceUserCodeTransactional_HandleTokenEndpointRequest(t *testing.T) { + var mockTransactional *internal.MockTransactional + + var mockDeviceAuthStorage *internal.MockDeviceAuthStorage + var mockDeviceAuthStorageProvider *internal.MockDeviceAuthStorageProvider + var mockAccessTokenStorage *internal.MockAccessTokenStorage + var mockAccessTokenStorageProvider *internal.MockAccessTokenStorageProvider + var mockRefreshTokenStorage *internal.MockRefreshTokenStorage + var mockRefreshTokenStorageProvider *internal.MockRefreshTokenStorageProvider + var mockTokenRevocationStorageProvider *internal.MockTokenRevocationStorageProvider + + var mockDeviceRateLimitStrategyProvider *internal.MockDeviceRateLimitStrategyProvider + var mockDeviceCodeStrategy *internal.MockDeviceCodeStrategy + var mockDeviceCodeStrategyProvider *internal.MockDeviceCodeStrategyProvider + var mockUserCodeStrategyProvider *internal.MockUserCodeStrategyProvider + var mockAccessTokenStrategy *internal.MockAccessTokenStrategy + var mockAccessTokenStrategyProvider *internal.MockAccessTokenStrategyProvider + var mockRefreshTokenStrategy *internal.MockRefreshTokenStrategy + var mockRefreshTokenStrategyProvider *internal.MockRefreshTokenStrategyProvider + + deviceStrategy := RFC8628HMACSHAStrategy + + authreq := &fosite.DeviceRequest{ + UserCodeState: fosite.UserCodeAccepted, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ID: "foo", GrantTypes: []string{"urn:ietf:params:oauth:grant-type:device_code"}}, + RequestedScope: fosite.Arguments{"foo", "offline"}, + GrantedScope: fosite.Arguments{"foo", "offline"}, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + } + + areq := &fosite.AccessRequest{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code"}, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + GrantTypes: fosite.Arguments{"urn:ietf:params:oauth:grant-type:device_code", "refresh_token"}, + }, + Session: &fosite.DefaultSession{}, + RequestedAt: time.Now().UTC(), + }, + } + aresp := fosite.NewAccessResponse() + propagatedContext := context.Background() + + code, _, err := deviceStrategy.GenerateDeviceCode(context.Background()) + require.NoError(t, err) + areq.Form = url.Values{"device_code": {code}} + + testCases := []struct { + description string + setup func() + expectError error + }{ + { + description: "transaction should be committed successfully if no errors occur", + setup: func() { + mockDeviceCodeStrategyProvider.EXPECT().DeviceCodeStrategy().Return(mockDeviceCodeStrategy).Times(2) + mockDeviceAuthStorageProvider.EXPECT().DeviceAuthStorage().Return(mockDeviceAuthStorage).Times(2) + mockAccessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(mockAccessTokenStrategy).Times(1) + mockRefreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(mockRefreshTokenStrategy).Times(1) + mockAccessTokenStorageProvider.EXPECT().AccessTokenStorage().Return(mockAccessTokenStorage).Times(1) + mockRefreshTokenStorageProvider.EXPECT().RefreshTokenStorage().Return(mockRefreshTokenStorage).Times(1) + + mockDeviceCodeStrategy. + EXPECT(). + DeviceCodeSignature(gomock.Any(), gomock.Any()). + Return(gomock.Any().String(), nil) + mockDeviceAuthStorage. + EXPECT(). + GetDeviceCodeSession(gomock.Any(), gomock.Any(), gomock.Any()). + Return(authreq, nil). + Times(1) + mockDeviceCodeStrategy. + EXPECT(). + ValidateDeviceCode(gomock.Any(), gomock.Any(), gomock.Any()). + Return(nil) + mockAccessTokenStrategy. + EXPECT(). + GenerateAccessToken(gomock.Any(), gomock.Any()). + Return(gomock.Any().String(), gomock.Any().String(), nil) + mockRefreshTokenStrategy. + EXPECT(). + GenerateRefreshToken(gomock.Any(), gomock.Any()). + Return(gomock.Any().String(), gomock.Any().String(), nil) + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil). + Times(1) + mockDeviceAuthStorage. + EXPECT(). + InvalidateDeviceCodeSession(propagatedContext, gomock.Any()). + Return(nil). + Times(1) + mockAccessTokenStorage. + EXPECT(). + CreateAccessTokenSession(propagatedContext, gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + mockRefreshTokenStorage. + EXPECT(). + CreateRefreshTokenSession(propagatedContext, gomock.Any(), gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + mockTransactional. + EXPECT(). + Commit(propagatedContext). + Return(nil). + Times(1) + }, + }, + { + description: "transaction should be rolled back if `InvalidateDeviceCodeSession` returns an error", + setup: func() { + mockDeviceCodeStrategyProvider.EXPECT().DeviceCodeStrategy().Return(mockDeviceCodeStrategy).Times(2) + mockDeviceAuthStorageProvider.EXPECT().DeviceAuthStorage().Return(mockDeviceAuthStorage).Times(2) + mockAccessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(mockAccessTokenStrategy).Times(1) + mockRefreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(mockRefreshTokenStrategy).Times(1) + + mockDeviceCodeStrategy. + EXPECT(). + DeviceCodeSignature(gomock.Any(), gomock.Any()). + Return(gomock.Any().String(), nil) + mockDeviceAuthStorage. + EXPECT(). + GetDeviceCodeSession(gomock.Any(), gomock.Any(), gomock.Any()). + Return(authreq, nil). + Times(1) + mockDeviceCodeStrategy. + EXPECT(). + ValidateDeviceCode(gomock.Any(), gomock.Any(), gomock.Any()). + Return(nil) + mockAccessTokenStrategy. + EXPECT(). + GenerateAccessToken(gomock.Any(), gomock.Any()). + Return(gomock.Any().String(), gomock.Any().String(), nil) + mockRefreshTokenStrategy. + EXPECT(). + GenerateRefreshToken(gomock.Any(), gomock.Any()). + Return(gomock.Any().String(), gomock.Any().String(), nil) + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil). + Times(1) + mockDeviceAuthStorage. + EXPECT(). + InvalidateDeviceCodeSession(gomock.Any(), gomock.Any()). + Return(errors.New("Whoops, a nasty database error occurred!")). + Times(1) + mockTransactional. + EXPECT(). + Rollback(propagatedContext). + Return(nil). + Times(1) + }, + expectError: fosite.ErrServerError, + }, + { + description: "transaction should be rolled back if `CreateAccessTokenSession` returns an error", + setup: func() { + mockDeviceCodeStrategyProvider.EXPECT().DeviceCodeStrategy().Return(mockDeviceCodeStrategy).Times(2) + mockDeviceAuthStorageProvider.EXPECT().DeviceAuthStorage().Return(mockDeviceAuthStorage).Times(2) + mockAccessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(mockAccessTokenStrategy).Times(1) + mockRefreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(mockRefreshTokenStrategy).Times(1) + mockAccessTokenStorageProvider.EXPECT().AccessTokenStorage().Return(mockAccessTokenStorage).Times(1) + + mockDeviceCodeStrategy. + EXPECT(). + DeviceCodeSignature(gomock.Any(), gomock.Any()). + Return(gomock.Any().String(), nil) + mockDeviceAuthStorage. + EXPECT(). + GetDeviceCodeSession(gomock.Any(), gomock.Any(), gomock.Any()). + Return(authreq, nil). + Times(1) + mockDeviceCodeStrategy. + EXPECT(). + ValidateDeviceCode(gomock.Any(), gomock.Any(), gomock.Any()). + Return(nil) + mockAccessTokenStrategy. + EXPECT(). + GenerateAccessToken(gomock.Any(), gomock.Any()). + Return(gomock.Any().String(), gomock.Any().String(), nil) + mockRefreshTokenStrategy. + EXPECT(). + GenerateRefreshToken(gomock.Any(), gomock.Any()). + Return(gomock.Any().String(), gomock.Any().String(), nil) + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil). + Times(1) + mockDeviceAuthStorage. + EXPECT(). + InvalidateDeviceCodeSession(propagatedContext, gomock.Any()). + Return(nil). + Times(1) + mockAccessTokenStorage. + EXPECT(). + CreateAccessTokenSession(propagatedContext, gomock.Any(), gomock.Any()). + Return(errors.New("Whoops, a nasty database error occurred!")). + Times(1) + mockTransactional. + EXPECT(). + Rollback(propagatedContext). + Return(nil). + Times(1) + }, + expectError: fosite.ErrServerError, + }, + { + description: "should result in a server error if transaction cannot be created", + setup: func() { + mockDeviceCodeStrategyProvider.EXPECT().DeviceCodeStrategy().Return(mockDeviceCodeStrategy).Times(2) + mockDeviceAuthStorageProvider.EXPECT().DeviceAuthStorage().Return(mockDeviceAuthStorage).Times(1) + mockAccessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(mockAccessTokenStrategy).Times(1) + mockRefreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(mockRefreshTokenStrategy).Times(1) + + mockDeviceCodeStrategy. + EXPECT(). + DeviceCodeSignature(gomock.Any(), gomock.Any()). + Return(gomock.Any().String(), nil) + mockDeviceAuthStorage. + EXPECT(). + GetDeviceCodeSession(gomock.Any(), gomock.Any(), gomock.Any()). + Return(authreq, nil). + Times(1) + mockDeviceCodeStrategy. + EXPECT(). + ValidateDeviceCode(gomock.Any(), gomock.Any(), gomock.Any()). + Return(nil) + mockAccessTokenStrategy. + EXPECT(). + GenerateAccessToken(gomock.Any(), gomock.Any()). + Return(gomock.Any().String(), gomock.Any().String(), nil) + mockRefreshTokenStrategy. + EXPECT(). + GenerateRefreshToken(gomock.Any(), gomock.Any()). + Return(gomock.Any().String(), gomock.Any().String(), nil) + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(nil, errors.New("Whoops, unable to create transaction!")) + }, + expectError: fosite.ErrServerError, + }, + { + description: "should result in a server error if transaction cannot be rolled back", + setup: func() { + mockDeviceCodeStrategyProvider.EXPECT().DeviceCodeStrategy().Return(mockDeviceCodeStrategy).Times(2) + mockDeviceAuthStorageProvider.EXPECT().DeviceAuthStorage().Return(mockDeviceAuthStorage).Times(2) + mockAccessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(mockAccessTokenStrategy).Times(1) + mockRefreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(mockRefreshTokenStrategy).Times(1) + + mockDeviceCodeStrategy. + EXPECT(). + DeviceCodeSignature(gomock.Any(), gomock.Any()). + Return(gomock.Any().String(), nil) + mockDeviceAuthStorage. + EXPECT(). + GetDeviceCodeSession(gomock.Any(), gomock.Any(), gomock.Any()). + Return(authreq, nil). + Times(1) + mockDeviceCodeStrategy. + EXPECT(). + ValidateDeviceCode(gomock.Any(), gomock.Any(), gomock.Any()). + Return(nil) + mockAccessTokenStrategy. + EXPECT(). + GenerateAccessToken(gomock.Any(), gomock.Any()). + Return(gomock.Any().String(), gomock.Any().String(), nil) + mockRefreshTokenStrategy. + EXPECT(). + GenerateRefreshToken(gomock.Any(), gomock.Any()). + Return(gomock.Any().String(), gomock.Any().String(), nil) + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil). + Times(1) + mockDeviceAuthStorage. + EXPECT(). + InvalidateDeviceCodeSession(gomock.Any(), gomock.Any()). + Return(errors.New("Whoops, a nasty database error occurred!")). + Times(1) + mockTransactional. + EXPECT(). + Rollback(propagatedContext). + Return(errors.New("Whoops, unable to rollback transaction!")). + Times(1) + }, + expectError: fosite.ErrServerError, + }, + { + description: "should result in a server error if transaction cannot be committed", + setup: func() { + mockDeviceCodeStrategyProvider.EXPECT().DeviceCodeStrategy().Return(mockDeviceCodeStrategy).Times(2) + mockDeviceAuthStorageProvider.EXPECT().DeviceAuthStorage().Return(mockDeviceAuthStorage).Times(2) + mockAccessTokenStrategyProvider.EXPECT().AccessTokenStrategy().Return(mockAccessTokenStrategy).Times(1) + mockRefreshTokenStrategyProvider.EXPECT().RefreshTokenStrategy().Return(mockRefreshTokenStrategy).Times(1) + mockAccessTokenStorageProvider.EXPECT().AccessTokenStorage().Return(mockAccessTokenStorage).Times(1) + mockRefreshTokenStorageProvider.EXPECT().RefreshTokenStorage().Return(mockRefreshTokenStorage).Times(1) + + mockDeviceCodeStrategy. + EXPECT(). + DeviceCodeSignature(gomock.Any(), gomock.Any()). + Return(gomock.Any().String(), nil) + mockDeviceAuthStorage. + EXPECT(). + GetDeviceCodeSession(gomock.Any(), gomock.Any(), gomock.Any()). + Return(authreq, nil). + Times(1) + mockDeviceCodeStrategy. + EXPECT(). + ValidateDeviceCode(gomock.Any(), gomock.Any(), gomock.Any()). + Return(nil) + mockAccessTokenStrategy. + EXPECT(). + GenerateAccessToken(gomock.Any(), gomock.Any()). + Return(gomock.Any().String(), gomock.Any().String(), nil) + mockRefreshTokenStrategy. + EXPECT(). + GenerateRefreshToken(gomock.Any(), gomock.Any()). + Return(gomock.Any().String(), gomock.Any().String(), nil) + mockTransactional. + EXPECT(). + BeginTX(propagatedContext). + Return(propagatedContext, nil). + Times(1) + mockDeviceAuthStorage. + EXPECT(). + InvalidateDeviceCodeSession(propagatedContext, gomock.Any()). + Return(nil). + Times(1) + mockAccessTokenStorage. + EXPECT(). + CreateAccessTokenSession(propagatedContext, gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + mockRefreshTokenStorage. + EXPECT(). + CreateRefreshTokenSession(propagatedContext, gomock.Any(), gomock.Any(), gomock.Any()). + Return(nil). + Times(1) + mockTransactional. + EXPECT(). + Commit(propagatedContext). + Return(errors.New("Whoops, unable to commit transaction!")). + Times(1) + mockTransactional. + EXPECT(). + Rollback(propagatedContext). + Return(nil). + Times(1) + }, + expectError: fosite.ErrServerError, + }, + } + + for _, testCase := range testCases { + t.Run(fmt.Sprintf("scenario=%s", testCase.description), func(t *testing.T) { + ctrl := gomock.NewController(t) + t.Cleanup(ctrl.Finish) + + mockTransactional = internal.NewMockTransactional(ctrl) + + mockDeviceAuthStorage = internal.NewMockDeviceAuthStorage(ctrl) + mockDeviceAuthStorageProvider = internal.NewMockDeviceAuthStorageProvider(ctrl) + mockAccessTokenStorage = internal.NewMockAccessTokenStorage(ctrl) + mockAccessTokenStorageProvider = internal.NewMockAccessTokenStorageProvider(ctrl) + mockRefreshTokenStorage = internal.NewMockRefreshTokenStorage(ctrl) + mockRefreshTokenStorageProvider = internal.NewMockRefreshTokenStorageProvider(ctrl) + mockTokenRevocationStorageProvider = internal.NewMockTokenRevocationStorageProvider(ctrl) + + mockDeviceRateLimitStrategyProvider = internal.NewMockDeviceRateLimitStrategyProvider(ctrl) + mockDeviceCodeStrategy = internal.NewMockDeviceCodeStrategy(ctrl) + mockDeviceCodeStrategyProvider = internal.NewMockDeviceCodeStrategyProvider(ctrl) + mockUserCodeStrategyProvider = internal.NewMockUserCodeStrategyProvider(ctrl) + mockAccessTokenStrategy = internal.NewMockAccessTokenStrategy(ctrl) + mockAccessTokenStrategyProvider = internal.NewMockAccessTokenStrategyProvider(ctrl) + mockRefreshTokenStrategy = internal.NewMockRefreshTokenStrategy(ctrl) + mockRefreshTokenStrategyProvider = internal.NewMockRefreshTokenStrategyProvider(ctrl) + + mockStorage := struct { + *internal.MockDeviceAuthStorageProvider + *internal.MockAccessTokenStorageProvider + *internal.MockRefreshTokenStorageProvider + *internal.MockTokenRevocationStorageProvider + *internal.MockTransactional + }{ + MockDeviceAuthStorageProvider: mockDeviceAuthStorageProvider, + MockAccessTokenStorageProvider: mockAccessTokenStorageProvider, + MockRefreshTokenStorageProvider: mockRefreshTokenStorageProvider, + MockTokenRevocationStorageProvider: mockTokenRevocationStorageProvider, + MockTransactional: mockTransactional, + } + + mockStrategy := struct { + *internal.MockDeviceRateLimitStrategyProvider + *internal.MockDeviceCodeStrategyProvider + *internal.MockUserCodeStrategyProvider + *internal.MockAccessTokenStrategyProvider + *internal.MockRefreshTokenStrategyProvider + }{ + MockDeviceRateLimitStrategyProvider: mockDeviceRateLimitStrategyProvider, + MockDeviceCodeStrategyProvider: mockDeviceCodeStrategyProvider, + MockUserCodeStrategyProvider: mockUserCodeStrategyProvider, + MockAccessTokenStrategyProvider: mockAccessTokenStrategyProvider, + MockRefreshTokenStrategyProvider: mockRefreshTokenStrategyProvider, + } + + testCase.setup() + + h := rfc8628.DeviceCodeTokenEndpointHandler{ + Strategy: mockStrategy, + Storage: mockStorage, + Config: &fosite.Config{ + ScopeStrategy: fosite.HierarchicScopeStrategy, + AudienceMatchingStrategy: fosite.DefaultAudienceMatchingStrategy, + DeviceAndUserCodeLifespan: time.Minute, + }, + } + + if err = h.PopulateTokenEndpointResponse(propagatedContext, areq, aresp); testCase.expectError != nil { + assert.EqualError(t, err, testCase.expectError.Error()) + } + }) + } +} diff --git a/fosite/handler/verifiable/handler.go b/fosite/handler/verifiable/handler.go new file mode 100644 index 00000000000..805b3eaa2f7 --- /dev/null +++ b/fosite/handler/verifiable/handler.go @@ -0,0 +1,65 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package verifiable + +import ( + "context" + "time" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/x/errorsx" +) + +const ( + draftScope = "userinfo_credential_draft_00" + draftNonceField = "c_nonce_draft_00" + draftNonceExpField = "c_nonce_expires_in_draft_00" +) + +type Handler struct { + Config interface { + fosite.VerifiableCredentialsNonceLifespanProvider + } + NonceManagerProvider +} + +var _ fosite.TokenEndpointHandler = (*Handler)(nil) + +func (c *Handler) HandleTokenEndpointRequest(ctx context.Context, request fosite.AccessRequester) error { + if !c.CanHandleTokenEndpointRequest(ctx, request) { + return errorsx.WithStack(fosite.ErrUnknownRequest) + } + + return nil +} + +func (c *Handler) PopulateTokenEndpointResponse( + ctx context.Context, + request fosite.AccessRequester, + response fosite.AccessResponder, +) error { + if !c.CanHandleTokenEndpointRequest(ctx, request) { + return errorsx.WithStack(fosite.ErrUnknownRequest) + } + + lifespan := c.Config.GetVerifiableCredentialsNonceLifespan(ctx) + expiry := time.Now().UTC().Add(lifespan) + nonce, err := c.NonceManager().NewNonce(ctx, response.GetAccessToken(), expiry) + if err != nil { + return err + } + + response.SetExtra(draftNonceField, nonce) + response.SetExtra(draftNonceExpField, int64(lifespan.Seconds())) + + return nil +} + +func (c *Handler) CanSkipClientAuth(context.Context, fosite.AccessRequester) bool { + return false +} + +func (c *Handler) CanHandleTokenEndpointRequest(_ context.Context, requester fosite.AccessRequester) bool { + return requester.GetGrantedScopes().Has("openid", draftScope) +} diff --git a/fosite/handler/verifiable/handler_test.go b/fosite/handler/verifiable/handler_test.go new file mode 100644 index 00000000000..b882e7b18f6 --- /dev/null +++ b/fosite/handler/verifiable/handler_test.go @@ -0,0 +1,77 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package verifiable + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/assert" + gomock "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/internal" +) + +type mockNonceManagerProvider struct{ n NonceManager } + +func (m mockNonceManagerProvider) NonceManager() NonceManager { return m.n } + +type mockNonceManager struct{ t *testing.T } + +func (m *mockNonceManager) NewNonce(ctx context.Context, accessToken string, expiresAt time.Time) (string, error) { + assert.Equal(m.t, "fake access token", accessToken) + assert.WithinDuration(m.t, time.Now().Add(time.Hour), expiresAt, 5*time.Second) + return "mocked nonce", nil +} + +func (m *mockNonceManager) IsNonceValid(context.Context, string, string) error { + return nil +} + +func TestHandler(t *testing.T) { + t.Parallel() + ctx := context.Background() + + t.Run("case=correct scopes", func(t *testing.T) { + t.Parallel() + handler := newHandler(t) + ctrl := gomock.NewController(t) + t.Cleanup(ctrl.Finish) + + req := internal.NewMockAccessRequester(ctrl) + req.EXPECT().GetGrantedScopes().Return(fosite.Arguments{"openid", draftScope}).AnyTimes() + + resp := internal.NewMockAccessResponder(ctrl) + resp.EXPECT().GetAccessToken().Return("fake access token") + resp.EXPECT().SetExtra(gomock.Eq(draftNonceField), gomock.Eq("mocked nonce")) + resp.EXPECT().SetExtra(gomock.Eq(draftNonceExpField), gomock.Any()) + + assert.NoError(t, handler.HandleTokenEndpointRequest(ctx, req)) + assert.NoError(t, handler.PopulateTokenEndpointResponse(ctx, req, resp)) + }) + + t.Run("case=incorrect scopes", func(t *testing.T) { + t.Parallel() + handler := newHandler(t) + ctrl := gomock.NewController(t) + t.Cleanup(ctrl.Finish) + + req := internal.NewMockAccessRequester(ctrl) + req.EXPECT().GetGrantedScopes().Return(fosite.Arguments{"openid"}).AnyTimes() + + resp := internal.NewMockAccessResponder(ctrl) + + assert.ErrorIs(t, handler.HandleTokenEndpointRequest(ctx, req), fosite.ErrUnknownRequest) + assert.ErrorIs(t, handler.PopulateTokenEndpointResponse(ctx, req, resp), fosite.ErrUnknownRequest) + }) +} + +func newHandler(t *testing.T) *Handler { + return &Handler{ + Config: new(fosite.Config), + NonceManagerProvider: mockNonceManagerProvider{n: &mockNonceManager{t: t}}, + } +} diff --git a/fosite/handler/verifiable/nonce.go b/fosite/handler/verifiable/nonce.go new file mode 100644 index 00000000000..4bc548042fc --- /dev/null +++ b/fosite/handler/verifiable/nonce.go @@ -0,0 +1,21 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package verifiable + +import ( + "context" + "time" +) + +type NonceManager interface { + // NewNonce creates a new nonce bound to the access token valid until the given expiry time. + NewNonce(ctx context.Context, accessToken string, expiresAt time.Time) (string, error) + + // IsNonceValid checks if the given nonce is valid for the given access token and not expired. + IsNonceValid(ctx context.Context, accessToken string, nonce string) error +} + +type NonceManagerProvider interface { + NonceManager() NonceManager +} diff --git a/fosite/hash.go b/fosite/hash.go new file mode 100644 index 00000000000..10da2b2b33a --- /dev/null +++ b/fosite/hash.go @@ -0,0 +1,16 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import "context" + +// Hasher defines how a oauth2-compatible hasher should look like. +type Hasher interface { + // Compare compares data with a hash and returns an error + // if the two do not match. + Compare(ctx context.Context, hash, data []byte) error + + // Hash creates a hash from data or returns an error. + Hash(ctx context.Context, data []byte) ([]byte, error) +} diff --git a/fosite/hash_bcrypt.go b/fosite/hash_bcrypt.go new file mode 100644 index 00000000000..47a721fb290 --- /dev/null +++ b/fosite/hash_bcrypt.go @@ -0,0 +1,40 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + + "github.com/ory/x/errorsx" + + "golang.org/x/crypto/bcrypt" +) + +const DefaultBCryptWorkFactor = 12 + +// BCrypt implements the Hasher interface by using BCrypt. +type BCrypt struct { + Config interface { + BCryptCostProvider + } +} + +func (b *BCrypt) Hash(ctx context.Context, data []byte) ([]byte, error) { + wf := b.Config.GetBCryptCost(ctx) + if wf == 0 { + wf = DefaultBCryptWorkFactor + } + s, err := bcrypt.GenerateFromPassword(data, wf) + if err != nil { + return nil, errorsx.WithStack(err) + } + return s, nil +} + +func (b *BCrypt) Compare(ctx context.Context, hash, data []byte) error { + if err := bcrypt.CompareHashAndPassword(hash, data); err != nil { + return errorsx.WithStack(err) + } + return nil +} diff --git a/fosite/hash_bcrypt_test.go b/fosite/hash_bcrypt_test.go new file mode 100644 index 00000000000..97196ff91b9 --- /dev/null +++ b/fosite/hash_bcrypt_test.go @@ -0,0 +1,104 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "golang.org/x/crypto/bcrypt" +) + +func TestCompare(t *testing.T) { + workfactor := 10 + hasher := &BCrypt{Config: &Config{HashCost: workfactor}} + + expectedPassword := "hello world" + expectedPasswordHash, err := hasher.Hash(context.TODO(), []byte(expectedPassword)) + assert.NoError(t, err) + assert.NotNil(t, expectedPasswordHash) + + testCases := []struct { + testDescription string + providedPassword string + shouldError bool + }{ + { + testDescription: "should not return an error if hash of provided password matches hash of expected password", + providedPassword: expectedPassword, + shouldError: false, + }, + { + testDescription: "should return an error if hash of provided password does not match hash of expected password", + providedPassword: "some invalid password", + shouldError: true, + }, + } + + for _, test := range testCases { + t.Run(test.testDescription, func(t *testing.T) { + hash, err := hasher.Hash(context.TODO(), []byte(test.providedPassword)) + assert.NoError(t, err) + assert.NotNil(t, hash) + + err = hasher.Compare(context.TODO(), expectedPasswordHash, []byte(test.providedPassword)) + if test.shouldError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestHash(t *testing.T) { + validWorkFactor := 10 + invalidWorkFactor := 1000 // this is an invalid work factor that will cause the call to Hash to fail! + password := []byte("bar") + + testCases := []struct { + testDescription string + workFactor int + shouldError bool + }{ + { + testDescription: "should succeed if work factor is valid", + workFactor: validWorkFactor, + shouldError: false, + }, + { + testDescription: "should fail with error if work factor is invalid", + workFactor: invalidWorkFactor, + shouldError: true, + }, + } + + for _, test := range testCases { + t.Run(test.testDescription, func(t *testing.T) { + hasher := &BCrypt{Config: &Config{HashCost: test.workFactor}} + _, err := hasher.Hash(context.TODO(), password) + if test.shouldError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestDefaultWorkFactor(t *testing.T) { + b := &BCrypt{Config: &Config{}} + data := []byte("secrets") + hash, err := b.Hash(context.TODO(), data) + if err != nil { + t.Fatal(err) + } + + cost, err := bcrypt.Cost(hash) + assert.NoError(t, err) + if cost != 12 { + t.Errorf("got cost factor %d", cost) + } +} diff --git a/fosite/helper.go b/fosite/helper.go new file mode 100644 index 00000000000..1f5d6911b35 --- /dev/null +++ b/fosite/helper.go @@ -0,0 +1,45 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "fmt" + "strings" +) + +// StringInSlice returns true if needle exists in haystack +func StringInSlice(needle string, haystack []string) bool { + for _, b := range haystack { + if strings.ToLower(b) == strings.ToLower(needle) { + return true + } + } + return false +} + +func RemoveEmpty(args []string) (ret []string) { + for _, v := range args { + v = strings.TrimSpace(v) + if v != "" { + ret = append(ret, v) + } + } + return +} + +// EscapeJSONString does a poor man's JSON encoding. Useful when we do not want to use full JSON encoding +// because we just had an error doing the JSON encoding. The characters that MUST be escaped: quotation mark, +// reverse solidus, and the control characters (U+0000 through U+001F). +// See: https://tools.ietf.org/html/std90#section-7 +func EscapeJSONString(str string) string { + // Escape reverse solidus. + str = strings.ReplaceAll(str, `\`, `\\`) + // Escape control characters. + for r := rune(0); r < ' '; r++ { + str = strings.ReplaceAll(str, string(r), fmt.Sprintf(`\u%04x`, r)) + } + // Escape quotation mark. + str = strings.ReplaceAll(str, `"`, `\"`) + return str +} diff --git a/fosite/helper_test.go b/fosite/helper_test.go new file mode 100644 index 00000000000..5c026479177 --- /dev/null +++ b/fosite/helper_test.go @@ -0,0 +1,40 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestStringInSlice(t *testing.T) { + for k, c := range []struct { + needle string + haystack []string + ok bool + }{ + {needle: "foo", haystack: []string{"foo", "bar"}, ok: true}, + {needle: "bar", haystack: []string{"foo", "bar"}, ok: true}, + {needle: "baz", haystack: []string{"foo", "bar"}, ok: false}, + {needle: "foo", haystack: []string{"bar"}, ok: false}, + {needle: "bar", haystack: []string{"bar"}, ok: true}, + {needle: "foo", haystack: []string{}, ok: false}, + } { + assert.Equal(t, c.ok, StringInSlice(c.needle, c.haystack), "%d", k) + t.Logf("Passed test case %d", k) + } +} + +func TestEscapeJSONString(t *testing.T) { + for _, str := range []string{"", "foobar", `foo"bar`, `foo\bar`, "foo\n\tbar"} { + escaped := EscapeJSONString(str) + var unmarshaled string + err := json.Unmarshal([]byte(`"`+escaped+`"`), &unmarshaled) + require.NoError(t, err, str) + assert.Equal(t, str, unmarshaled, str) + } +} diff --git a/fosite/i18n/default_catalog.go b/fosite/i18n/default_catalog.go new file mode 100644 index 00000000000..e99df17c596 --- /dev/null +++ b/fosite/i18n/default_catalog.go @@ -0,0 +1,96 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package i18n + +import ( + "net/http" + + "golang.org/x/text/language" + "golang.org/x/text/message" +) + +// DefaultMessage is a single message in the locale bundle +// identified by 'ID'. +type DefaultMessage struct { + ID string `json:"id"` + FormattedMessage string `json:"msg"` +} + +// DefaultLocaleBundle is a bundle of messages for the specified +// locale. The language tag can be arbitrary to allow for +// unsupported/unknown languages used by custom clients. +type DefaultLocaleBundle struct { + LangTag string `json:"lang"` + Messages []*DefaultMessage `json:"messages"` +} + +// defaultMessageCatalog is a catalog of all locale bundles. +type defaultMessageCatalog struct { + Bundles []*DefaultLocaleBundle + + matcher language.Matcher +} + +func NewDefaultMessageCatalog(bundles []*DefaultLocaleBundle) MessageCatalog { + c := &defaultMessageCatalog{ + Bundles: bundles, + } + + for _, v := range c.Bundles { + if err := v.Init(); err != nil { + continue + } + } + + c.makeMatcher() + return c +} + +// Init initializes the default catalog with the +// list of messages. The lang tag must parse, otherwise this +// func will panic. +func (l *DefaultLocaleBundle) Init() error { + tag := language.MustParse(l.LangTag) + for _, m := range l.Messages { + if err := message.SetString(tag, m.ID, m.FormattedMessage); err != nil { + return err + } + } + + return nil +} + +func (c *defaultMessageCatalog) GetMessage(ID string, tag language.Tag, v ...interface{}) string { + matchedTag, _, _ := c.matcher.Match(tag) + p := message.NewPrinter(matchedTag) + + result := p.Sprintf(ID, v...) + if result == ID && tag != language.English { + return c.GetMessage(ID, language.English, v...) + } + + return result +} + +func (c *defaultMessageCatalog) GetLangFromRequest(r *http.Request) language.Tag { + lang, _ := r.Cookie("lang") + accept := r.Header.Get("Accept-Language") + tag, _ := language.MatchStrings(c.matcher, lang.String(), accept) + + return tag +} + +func (c *defaultMessageCatalog) makeMatcher() { + result := []language.Tag{language.English} + defLangs := message.DefaultCatalog.Languages() + // remove "en" if was already in the list of languages + for i, t := range defLangs { + if t == language.English { + result = append(result, defLangs[:i]...) + result = append(result, defLangs[i+1:]...) + } + } + + c.matcher = language.NewMatcher(defLangs) +} diff --git a/fosite/i18n/i18n.go b/fosite/i18n/i18n.go new file mode 100644 index 00000000000..a446ff2b904 --- /dev/null +++ b/fosite/i18n/i18n.go @@ -0,0 +1,46 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package i18n + +import ( + "net/http" + + "golang.org/x/text/language" +) + +// MessageCatalog declares the interface to get globalized messages +type MessageCatalog interface { + GetMessage(ID string, tag language.Tag, v ...interface{}) string + GetLangFromRequest(r *http.Request) language.Tag +} + +// GetMessage is a helper func to get the translated message based on +// the message ID and lang. If no matching message is found, it uses +// ID as the message itself. +func GetMessage(c MessageCatalog, ID string, tag language.Tag, v ...interface{}) string { + return GetMessageOrDefault(c, ID, tag, ID, v...) +} + +// GetMessageOrDefault is a helper func to get the translated message based on +// the message ID and lang. If no matching message is found, it returns the +// 'def' message. +func GetMessageOrDefault(c MessageCatalog, ID string, tag language.Tag, def string, v ...interface{}) string { + if c != nil { + if s := c.GetMessage(ID, tag, v...); s != ID { + return s + } + } + + return def +} + +// GetLangFromRequest is a helper func to get the language tag based on the +// HTTP request and the constructed message catalog. +func GetLangFromRequest(c MessageCatalog, r *http.Request) language.Tag { + if c != nil { + return c.GetLangFromRequest(r) + } + + return language.English +} diff --git a/fosite/i18n/i18n_test.go b/fosite/i18n/i18n_test.go new file mode 100644 index 00000000000..94a185a814d --- /dev/null +++ b/fosite/i18n/i18n_test.go @@ -0,0 +1,48 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package i18n + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "golang.org/x/text/language" +) + +func TestSimpleTranslation(t *testing.T) { + catalog := NewDefaultMessageCatalog([]*DefaultLocaleBundle{ + { + LangTag: "en", + Messages: []*DefaultMessage{ + { + ID: "badRequestMethod", + FormattedMessage: "HTTP method is '%s', expected 'POST'.", + }, + { + ID: "badRequestBody", + FormattedMessage: "Unable to parse HTTP body, make sure to send a properly formatted form request body.", + }, + }, + }, + { + LangTag: "es", + Messages: []*DefaultMessage{ + { + ID: "badRequestMethod", + FormattedMessage: "El método HTTP es '%s', esperado 'POST'.", + }, + { + ID: "badRequestBody", + FormattedMessage: "No se puede analizar el cuerpo HTTP, asegúrese de enviar un cuerpo de solicitud de formulario con el formato adecuado.", + }, + }, + }, + }) + + msg := GetMessage(catalog, "badRequestMethod", language.Spanish, "GET") + assert.Equal(t, msg, "El método HTTP es 'GET', esperado 'POST'.") + + msg = GetMessage(catalog, "badRequestBody", language.English, "GET") + assert.Equal(t, msg, "Unable to parse HTTP body, make sure to send a properly formatted form request body.") +} diff --git a/fosite/i18n_helper.go b/fosite/i18n_helper.go new file mode 100644 index 00000000000..7a51573fe7b --- /dev/null +++ b/fosite/i18n_helper.go @@ -0,0 +1,48 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "github.com/pkg/errors" + "golang.org/x/text/language" + + "github.com/ory/hydra/v2/fosite/i18n" + "github.com/ory/x/errorsx" +) + +// AddLocalizerToErr augments the error object with the localizer +// based on the language set in the requester object. This is primarily +// required for response writers like introspection that do not take in +// the requester in the Write* function that produces the translated +// message. +// See - WriteIntrospectionError, for example. +func AddLocalizerToErr(catalog i18n.MessageCatalog, err error, requester Requester) error { + return AddLocalizerToErrWithLang(catalog, getLangFromRequester(requester), err) +} + +// AddLocalizerToErrWithLang augments the error object with the localizer +// based on the language passed in. This is primarily +// required for response writers like introspection that do not take in +// the requester in the Write* function that produces the translated +// message. +// See - WriteIntrospectionError, for example. +func AddLocalizerToErrWithLang(catalog i18n.MessageCatalog, lang language.Tag, err error) error { + var e RFC6749Error + if errors.As(err, &e) { + return e.WithLocalizer(catalog, lang) + } else if errors.As(errorsx.Cause(err), &e) { + return e.WithLocalizer(catalog, lang) + } + return err +} + +func getLangFromRequester(requester Requester) language.Tag { + lang := language.English + g11nContext, ok := requester.(G11NContext) + if ok { + lang = g11nContext.GetLang() + } + + return lang +} diff --git a/fosite/i18n_helper_test.go b/fosite/i18n_helper_test.go new file mode 100644 index 00000000000..d4dcaf7e020 --- /dev/null +++ b/fosite/i18n_helper_test.go @@ -0,0 +1,52 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "golang.org/x/text/language" + + "github.com/ory/hydra/v2/fosite/i18n" +) + +func TestErrorTranslation(t *testing.T) { + catalog := i18n.NewDefaultMessageCatalog([]*i18n.DefaultLocaleBundle{ + { + LangTag: "en", + Messages: []*i18n.DefaultMessage{ + { + ID: "badRequestMethod", + FormattedMessage: "HTTP method is '%s', expected 'POST'.", + }, + { + ID: "invalid_request", + FormattedMessage: "The request is missing a required parameter, includes an invalid parameter value, includes a parameter more than once, or is otherwise malformed.", + }, + }, + }, + { + LangTag: "es", + Messages: []*i18n.DefaultMessage{ + { + ID: "badRequestMethod", + FormattedMessage: "El método HTTP es '%s', esperado 'POST'.", + }, + { + ID: "invalid_request", + FormattedMessage: "A la solicitud le falta un parámetro obligatorio, incluye un valor de parámetro no válido, incluye un parámetro más de una vez o tiene un formato incorrecto.", + }, + }, + }, + }) + + errWithNoCatalog := ErrInvalidRequest.WithHintIDOrDefaultf("badRequestMethod", "HTTP method is '%s', expected 'POST'.", "GET") + errWithCatalog := errWithNoCatalog.WithLocalizer(catalog, language.Spanish) + + assert.Equal(t, "The request is missing a required parameter, includes an invalid parameter value, includes a parameter more than once, or is otherwise malformed. HTTP method is 'GET', expected 'POST'.", + errWithNoCatalog.GetDescription(), "Message does not match when no catalog is specified") + assert.Equal(t, "A la solicitud le falta un parámetro obligatorio, incluye un valor de parámetro no válido, incluye un parámetro más de una vez o tiene un formato incorrecto. El método HTTP es 'GET', esperado 'POST'.", + errWithCatalog.GetDescription(), "Message does not match when catalog is specified") +} diff --git a/fosite/integration/authorize_code_grant_public_client_pkce_test.go b/fosite/integration/authorize_code_grant_public_client_pkce_test.go new file mode 100644 index 00000000000..72fbb2ae17e --- /dev/null +++ b/fosite/integration/authorize_code_grant_public_client_pkce_test.go @@ -0,0 +1,127 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "testing" + + "github.com/magiconair/properties/assert" + "github.com/stretchr/testify/require" + goauth "golang.org/x/oauth2" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/handler/oauth2" // "github.com/stretchr/testify/assert" +) + +func TestAuthorizeCodeFlowWithPublicClientAndPKCE(t *testing.T) { + for _, strategy := range []oauth2.CoreStrategyProvider{ + hmacStrategyProvider, + } { + runAuthorizeCodeGrantWithPublicClientAndPKCETest(t, strategy) + } +} + +func runAuthorizeCodeGrantWithPublicClientAndPKCETest(t *testing.T, strategy oauth2.CoreStrategyProvider) { + c := new(fosite.Config) + c.EnforcePKCE = true + c.EnablePKCEPlainChallengeMethod = true + f := compose.Compose(c, fositeStore, strategy, compose.OAuth2AuthorizeExplicitFactory, compose.OAuth2PKCEFactory, compose.OAuth2TokenIntrospectionFactory) + ts := mockServer(t, f, &fosite.DefaultSession{}) + defer ts.Close() + + oauthClient := newOAuth2Client(ts) + oauthClient.ClientSecret = "" + oauthClient.ClientID = "public-client" + fositeStore.Clients["public-client"].(*fosite.DefaultClient).RedirectURIs[0] = ts.URL + "/callback" + + var authCodeUrl string + var verifier string + for k, c := range []struct { + description string + setup func() + authStatusCode int + tokenStatusCode int + }{ + { + description: "should fail because no challenge was given", + setup: func() { + authCodeUrl = oauthClient.AuthCodeURL("12345678901234567890") + }, + authStatusCode: http.StatusNotAcceptable, + }, + { + description: "should pass", + setup: func() { + verifier = "somechallengesomechallengesomechallengesomechallengesomechallengesomechallenge" + authCodeUrl = oauthClient.AuthCodeURL("12345678901234567890") + "&code_challenge=somechallengesomechallengesomechallengesomechallengesomechallengesomechallenge" + }, + authStatusCode: http.StatusOK, + }, + { + description: "should fail because the verifier is mismatching", + setup: func() { + verifier = "failchallengefailchallengefailchallengefailchallengefailchallengefailchallengefailchallengefailchallenge" + authCodeUrl = oauthClient.AuthCodeURL("12345678901234567890") + "&code_challenge=somechallengesomechallengesomechallengesomechallengesomechallengesomechallengesomechallengesomechallenge" + }, + authStatusCode: http.StatusOK, + tokenStatusCode: http.StatusBadRequest, + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", k, c.description), func(t *testing.T) { + c.setup() + + t.Logf("Got url: %s", authCodeUrl) + + resp, err := http.Get(authCodeUrl) + require.NoError(t, err) + require.Equal(t, resp.StatusCode, c.authStatusCode) + + if resp.StatusCode == http.StatusOK { + // This should fail because no verifier was given + // _, err := oauthClient.Exchange(goauth.NoContext, resp.Request.URL.Query().Get("code")) + // require.Error(t, err) + // require.Empty(t, token.AccessToken) + t.Logf("Got redirect url: %s", resp.Request.URL) + + resp, err := http.PostForm(ts.URL+"/token", url.Values{ + "code": {resp.Request.URL.Query().Get("code")}, + "grant_type": {"authorization_code"}, + "client_id": {"public-client"}, + "redirect_uri": {ts.URL + "/callback"}, + "code_verifier": {verifier}, + }) + require.NoError(t, err) + defer func(Body io.ReadCloser) { _ = Body.Close() }(resp.Body) + + body, err := io.ReadAll(resp.Body) + require.NoError(t, err) + + if c.tokenStatusCode != 0 { + require.Equal(t, c.tokenStatusCode, resp.StatusCode) + token := goauth.Token{} + require.NoError(t, json.Unmarshal(body, &token)) + require.Empty(t, token.AccessToken) + return + } + + assert.Equal(t, resp.StatusCode, http.StatusOK) + token := goauth.Token{} + require.NoError(t, json.Unmarshal(body, &token)) + + require.NotEmpty(t, token.AccessToken, "Got body: %s", string(body)) + + httpClient := oauthClient.Client(goauth.NoContext, &token) + resp, err = httpClient.Get(ts.URL + "/info") + require.NoError(t, err) + assert.Equal(t, http.StatusOK, resp.StatusCode) + } + }) + } +} diff --git a/fosite/integration/authorize_code_grant_public_client_test.go b/fosite/integration/authorize_code_grant_public_client_test.go new file mode 100644 index 00000000000..555ef7300ef --- /dev/null +++ b/fosite/integration/authorize_code_grant_public_client_test.go @@ -0,0 +1,113 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "encoding/json" + "fmt" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + goauth "golang.org/x/oauth2" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/handler/oauth2" +) + +func TestAuthorizeCodeFlowWithPublicClient(t *testing.T) { + for _, strategy := range []oauth2.CoreStrategyProvider{ + hmacStrategyProvider, + } { + runAuthorizeCodeGrantWithPublicClientTest(t, strategy) + } +} + +func runAuthorizeCodeGrantWithPublicClientTest(t *testing.T, strategy oauth2.CoreStrategyProvider) { + f := compose.Compose(new(fosite.Config), fositeStore, strategy, compose.OAuth2AuthorizeExplicitFactory, compose.OAuth2TokenIntrospectionFactory) + ts := mockServer(t, f, &fosite.DefaultSession{Subject: "foo-sub"}) + defer ts.Close() + + oauthClient := newOAuth2Client(ts) + oauthClient.ClientSecret = "" + oauthClient.ClientID = "public-client" + fositeStore.Clients["public-client"].(*fosite.DefaultClient).RedirectURIs[0] = ts.URL + "/callback" + + var state string + for k, c := range []struct { + description string + setup func() + check func(t *testing.T, r *http.Response) + params []goauth.AuthCodeOption + authStatusCode int + }{ + { + description: "should fail because of audience", + params: []goauth.AuthCodeOption{goauth.SetAuthURLParam("audience", "https://www.ory.sh/not-api")}, + setup: func() { + state = "12345678901234567890" + }, + authStatusCode: http.StatusNotAcceptable, + }, + { + description: "should fail because of scope", + params: []goauth.AuthCodeOption{}, + setup: func() { + oauthClient.Scopes = []string{"not-exist"} + state = "12345678901234567890" + }, + authStatusCode: http.StatusNotAcceptable, + }, + { + description: "should pass with proper audience", + params: []goauth.AuthCodeOption{goauth.SetAuthURLParam("audience", "https://www.ory.sh/api")}, + setup: func() { + state = "12345678901234567890" + oauthClient.Scopes = []string{"fosite"} + }, + check: func(t *testing.T, r *http.Response) { + var b fosite.AccessRequest + b.Client = new(fosite.DefaultClient) + b.Session = new(defaultSession) + require.NoError(t, json.NewDecoder(r.Body).Decode(&b)) + assert.EqualValues(t, fosite.Arguments{"https://www.ory.sh/api"}, b.RequestedAudience) + assert.EqualValues(t, fosite.Arguments{"https://www.ory.sh/api"}, b.GrantedAudience) + assert.EqualValues(t, "foo-sub", b.Session.(*defaultSession).Subject) + }, + authStatusCode: http.StatusOK, + }, + { + description: "should pass", + setup: func() { + state = "12345678901234567890" + }, + authStatusCode: http.StatusOK, + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", k, c.description), func(t *testing.T) { + c.setup() + + resp, err := http.Get(oauthClient.AuthCodeURL(state, c.params...)) + require.NoError(t, err) + require.Equal(t, c.authStatusCode, resp.StatusCode) + + if resp.StatusCode == http.StatusOK { + token, err := oauthClient.Exchange(goauth.NoContext, resp.Request.URL.Query().Get("code")) + require.NoError(t, err) + require.NotEmpty(t, token.AccessToken) + + httpClient := oauthClient.Client(goauth.NoContext, token) + resp, err := httpClient.Get(ts.URL + "/info") + require.NoError(t, err) + assert.Equal(t, http.StatusOK, resp.StatusCode) + + if c.check != nil { + c.check(t, resp) + } + } + }) + } +} diff --git a/fosite/integration/authorize_code_grant_test.go b/fosite/integration/authorize_code_grant_test.go new file mode 100644 index 00000000000..c4bd67eeee2 --- /dev/null +++ b/fosite/integration/authorize_code_grant_test.go @@ -0,0 +1,183 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "encoding/json" + "fmt" + "net/http" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + goauth "golang.org/x/oauth2" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/internal" +) + +func TestAuthorizeCodeFlow(t *testing.T) { + for _, strategy := range []oauth2.CoreStrategyProvider{ + hmacStrategyProvider, + } { + runAuthorizeCodeGrantTest(t, strategy) + } +} + +func TestAuthorizeCodeFlowDupeCode(t *testing.T) { + for _, strategy := range []oauth2.CoreStrategyProvider{ + hmacStrategyProvider, + } { + runAuthorizeCodeGrantDupeCodeTest(t, strategy) + } +} + +func runAuthorizeCodeGrantTest(t *testing.T, strategy oauth2.CoreStrategyProvider) { + f := compose.Compose(new(fosite.Config), fositeStore, strategy, compose.OAuth2AuthorizeExplicitFactory, compose.OAuth2TokenIntrospectionFactory) + ts := mockServer(t, f, &openid.DefaultSession{Subject: "foo-sub"}) + defer ts.Close() + + oauthClient := newOAuth2Client(ts) + fositeStore.Clients["my-client"].(*fosite.DefaultClient).RedirectURIs[0] = ts.URL + "/callback" + fositeStore.Clients["custom-lifespan-client"].(*fosite.DefaultClientWithCustomTokenLifespans).RedirectURIs[0] = ts.URL + "/callback" + + var state string + for k, c := range []struct { + description string + setup func() + check func(t *testing.T, r *http.Response, token *goauth.Token) + params []goauth.AuthCodeOption + authStatusCode int + }{ + { + description: "should fail because of audience", + params: []goauth.AuthCodeOption{goauth.SetAuthURLParam("audience", "https://www.ory.sh/not-api")}, + setup: func() { + oauthClient = newOAuth2Client(ts) + state = "12345678901234567890" + }, + authStatusCode: http.StatusNotAcceptable, + }, + { + description: "should fail because of scope", + params: []goauth.AuthCodeOption{}, + setup: func() { + oauthClient = newOAuth2Client(ts) + oauthClient.Scopes = []string{"not-exist"} + state = "12345678901234567890" + }, + authStatusCode: http.StatusNotAcceptable, + }, + { + description: "should pass with proper audience", + params: []goauth.AuthCodeOption{goauth.SetAuthURLParam("audience", "https://www.ory.sh/api")}, + setup: func() { + oauthClient = newOAuth2Client(ts) + state = "12345678901234567890" + }, + check: func(t *testing.T, r *http.Response, _ *goauth.Token) { + var b fosite.AccessRequest + b.Client = new(fosite.DefaultClient) + b.Session = new(defaultSession) + require.NoError(t, json.NewDecoder(r.Body).Decode(&b)) + assert.EqualValues(t, fosite.Arguments{"https://www.ory.sh/api"}, b.RequestedAudience) + assert.EqualValues(t, fosite.Arguments{"https://www.ory.sh/api"}, b.GrantedAudience) + assert.EqualValues(t, "foo-sub", b.Session.(*defaultSession).Subject) + }, + authStatusCode: http.StatusOK, + }, + { + description: "should pass", + setup: func() { + oauthClient = newOAuth2Client(ts) + state = "12345678901234567890" + }, + authStatusCode: http.StatusOK, + }, + { + description: "should pass with custom client token lifespans", + setup: func() { + oauthClient = newOAuth2Client(ts) + oauthClient.ClientID = "custom-lifespan-client" + oauthClient.Scopes = []string{"fosite", "offline"} + state = "12345678901234567890" + }, + check: func(t *testing.T, r *http.Response, token *goauth.Token) { + var b fosite.AccessRequest + b.Client = new(fosite.DefaultClient) + b.Session = new(defaultSession) + require.NoError(t, json.NewDecoder(r.Body).Decode(&b)) + atExp := b.Session.GetExpiresAt(fosite.AccessToken) + internal.RequireEqualTime(t, time.Now().UTC().Add(*internal.TestLifespans.AuthorizationCodeGrantAccessTokenLifespan), atExp, time.Minute) + atExpIn := time.Duration(token.Extra("expires_in").(float64)) * time.Second + internal.RequireEqualDuration(t, *internal.TestLifespans.AuthorizationCodeGrantAccessTokenLifespan, atExpIn, time.Minute) + rtExp := b.Session.GetExpiresAt(fosite.RefreshToken) + internal.RequireEqualTime(t, time.Now().UTC().Add(*internal.TestLifespans.AuthorizationCodeGrantRefreshTokenLifespan), rtExp, time.Minute) + }, + authStatusCode: http.StatusOK, + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", k, c.description), func(t *testing.T) { + c.setup() + + resp, err := http.Get(oauthClient.AuthCodeURL(state, c.params...)) + require.NoError(t, err) + require.Equal(t, c.authStatusCode, resp.StatusCode) + + if resp.StatusCode == http.StatusOK { + token, err := oauthClient.Exchange(goauth.NoContext, resp.Request.URL.Query().Get("code")) + require.NoError(t, err) + require.NotEmpty(t, token.AccessToken) + + httpClient := oauthClient.Client(goauth.NoContext, token) + resp, err := httpClient.Get(ts.URL + "/info") + require.NoError(t, err) + assert.Equal(t, http.StatusOK, resp.StatusCode) + + if c.check != nil { + c.check(t, resp, token) + } + } + }) + } +} + +func runAuthorizeCodeGrantDupeCodeTest(t *testing.T, strategy oauth2.CoreStrategyProvider) { + f := compose.Compose(new(fosite.Config), fositeStore, strategy, compose.OAuth2AuthorizeExplicitFactory, compose.OAuth2TokenIntrospectionFactory) + ts := mockServer(t, f, &fosite.DefaultSession{}) + defer ts.Close() + + newOAuth2Client(ts) + fositeStore.Clients["my-client"].(*fosite.DefaultClient).RedirectURIs[0] = ts.URL + "/callback" + + oauthClient := newOAuth2Client(ts) + state := "12345678901234567890" + + resp, err := http.Get(oauthClient.AuthCodeURL(state)) + require.NoError(t, err) + require.Equal(t, http.StatusOK, resp.StatusCode) + + token, err := oauthClient.Exchange(goauth.NoContext, resp.Request.URL.Query().Get("code")) + require.NoError(t, err) + require.NotEmpty(t, token.AccessToken) + + req, err := http.NewRequest("GET", ts.URL+"/info", nil) + require.NoError(t, err) + req.Header.Set("Authorization", "Bearer "+token.AccessToken) + + resp, err = http.DefaultClient.Do(req) + require.NoError(t, err) + require.Equal(t, http.StatusOK, resp.StatusCode) + + _, err = oauthClient.Exchange(goauth.NoContext, resp.Request.URL.Query().Get("code")) + require.Error(t, err) + + resp, err = http.DefaultClient.Get(ts.URL + "/info") + require.NoError(t, err) + require.Equal(t, http.StatusUnauthorized, resp.StatusCode) +} diff --git a/fosite/integration/authorize_device_grant_request_test.go b/fosite/integration/authorize_device_grant_request_test.go new file mode 100644 index 00000000000..d8ec764d567 --- /dev/null +++ b/fosite/integration/authorize_device_grant_request_test.go @@ -0,0 +1,243 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + goauth "golang.org/x/oauth2" + + "github.com/ory/x/uuidx" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/internal/gen" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +func TestDeviceFlow(t *testing.T) { + t.Run("device auth", deviceAuth) + t.Run("exchange for access token", exchangeForAccessToken) +} + +func deviceAuth(t *testing.T) { + session := &fosite.DefaultSession{} + + fc := &fosite.Config{ + DeviceVerificationURL: "https://example.com/", + RefreshTokenLifespan: -1, + GlobalSecret: []byte("some-secret-thats-random-some-secret-thats-random-"), + } + f := compose.ComposeAllEnabled(fc, fositeStore, gen.MustRSAKey()) + ts := mockServer(t, f, session) + defer ts.Close() + + oauthClient := &goauth.Config{ + ClientID: "device-client", + ClientSecret: "foobar", + Endpoint: goauth.Endpoint{ + TokenURL: ts.URL + tokenRelativePath, + DeviceAuthURL: ts.URL + deviceAuthRelativePath, + }, + } + for _, c := range []struct { + description string + setup func() + err bool + check func(t *testing.T, token *goauth.DeviceAuthResponse, err error) + cleanUp func() + }{ + { + description: "should fail with invalid_grant", + setup: func() { + fositeStore.Clients["device-client"].(*fosite.DefaultClient).GrantTypes = []string{"authorization_code"} + }, + err: true, + check: func(t *testing.T, token *goauth.DeviceAuthResponse, err error) { + assert.ErrorContains(t, err, "invalid_grant") + }, + cleanUp: func() { + fositeStore.Clients["device-client"].(*fosite.DefaultClient).GrantTypes = []string{"urn:ietf:params:oauth:grant-type:device_code", "refresh_token"} + }, + }, + { + description: "should fail with invalid_scope", + setup: func() { + oauthClient.Scopes = []string{"openid"} + fositeStore.Clients["device-client"].(*fosite.DefaultClient).Scopes = []string{"profile"} + }, + err: true, + check: func(t *testing.T, token *goauth.DeviceAuthResponse, err error) { + assert.ErrorContains(t, err, "invalid_scope") + }, + cleanUp: func() { + oauthClient.Scopes = []string{} + fositeStore.Clients["device-client"].(*fosite.DefaultClient).Scopes = []string{"fosite", "offline", "openid"} + }, + }, + { + description: "should fail with invalid_client", + setup: func() { + oauthClient.ClientID = "123" + }, + err: true, + check: func(t *testing.T, token *goauth.DeviceAuthResponse, err error) { + assert.ErrorContains(t, err, "invalid_client") + }, + cleanUp: func() { + oauthClient.ClientID = "device-client" + }, + }, + { + description: "should pass", + setup: func() {}, + err: false, + }, + } { + t.Run(fmt.Sprintf("description=%s", c.description), func(t *testing.T) { + c.setup() + + resp, err := oauthClient.DeviceAuth(t.Context()) + require.Equalf(t, c.err, err != nil, "got %+v", err) + if !c.err { + assert.NotEmpty(t, resp.DeviceCode) + assert.NotEmpty(t, resp.UserCode) + assert.NotEmpty(t, resp.Interval) + assert.NotEmpty(t, resp.VerificationURI) + assert.NotEmpty(t, resp.VerificationURIComplete) + } + + if c.check != nil { + c.check(t, resp, err) + } + + if c.cleanUp != nil { + c.cleanUp() + } + }) + } +} + +func exchangeForAccessToken(t *testing.T) { + session := newIDSession(&jwt.IDTokenClaims{Subject: "peter"}) + + fc := &fosite.Config{ + DeviceVerificationURL: "https://example.com/", + RefreshTokenLifespan: -1, + GlobalSecret: []byte("some-secret-thats-random-some-secret-thats-random-"), + DeviceAuthTokenPollingInterval: -1, + } + f := compose.ComposeAllEnabled(fc, fositeStore, gen.MustRSAKey()) + ts := mockServer(t, f, session) + + for _, c := range []struct { + description string + updateClients func(*fosite.DefaultClient, *goauth.Config) + params []goauth.AuthCodeOption + check func(t *testing.T, token *goauth.Token, cl *goauth.Config, err error) + }{ + { + description: "should fail with invalid grant type", + params: []goauth.AuthCodeOption{goauth.SetAuthURLParam("grant_type", "invalid_grant_type")}, + check: func(t *testing.T, _ *goauth.Token, _ *goauth.Config, err error) { + assert.ErrorContains(t, err, "invalid_request") + }, + }, + { + description: "should fail with wrong grant type", + updateClients: func(cl *fosite.DefaultClient, _ *goauth.Config) { + cl.GrantTypes = []string{"authorization_code"} + }, + params: []goauth.AuthCodeOption{}, + check: func(t *testing.T, _ *goauth.Token, _ *goauth.Config, err error) { + assert.ErrorContains(t, err, "unauthorized_client") + }, + }, + { + description: "should fail with invalid device code", + params: []goauth.AuthCodeOption{goauth.SetAuthURLParam("device_code", "invalid_device_code")}, + check: func(t *testing.T, _ *goauth.Token, _ *goauth.Config, err error) { + assert.ErrorContains(t, err, "invalid_grant") + }, + }, + { + description: "should fail with invalid client id", + updateClients: func(_ *fosite.DefaultClient, cl *goauth.Config) { + cl.ClientID = uuidx.NewV4().String() + }, + check: func(t *testing.T, _ *goauth.Token, _ *goauth.Config, err error) { + assert.ErrorContains(t, err, "invalid_client") + }, + }, + { + description: "should pass", + check: func(t *testing.T, token *goauth.Token, cl *goauth.Config, err error) { + assert.Equal(t, "bearer", token.TokenType) + assert.NotEmpty(t, token.AccessToken) + assert.NotEmpty(t, token.RefreshToken) + assert.NotEmpty(t, token.Extra("id_token")) + + tokenSource := cl.TokenSource(t.Context(), token) + refreshed, err := tokenSource.Token() + require.NoError(t, err) + + assert.NotEmpty(t, refreshed.AccessToken) + assert.NotEmpty(t, refreshed.RefreshToken) + assert.NotEmpty(t, refreshed.Extra("id_token")) + }, + }, + } { + t.Run(fmt.Sprintf("description=%s", c.description), func(t *testing.T) { + clientID := uuidx.NewV4().String() + fCl := &fosite.DefaultClient{ + ID: clientID, + Secret: []byte(`$2a$10$IxMdI6d.LIRZPpSfEwNoeu4rY3FhDREsxFJXikcgdRRAStxUlsuEO`), // = "foobar" + GrantTypes: []string{"urn:ietf:params:oauth:grant-type:device_code", "refresh_token"}, + Scopes: []string{"fosite", "offline", "openid"}, + Audience: []string{tokenURL}, + Public: true, + } + cl := &goauth.Config{ + ClientID: clientID, + ClientSecret: "foobar", + Endpoint: goauth.Endpoint{ + TokenURL: ts.URL + tokenRelativePath, + DeviceAuthURL: ts.URL + deviceAuthRelativePath, + }, + Scopes: []string{"openid", "fosite", "offline"}, + } + + fositeStore.Clients[fCl.ID] = fCl + + resp, err := cl.DeviceAuth(t.Context()) + require.NoError(t, err) + + if c.updateClients != nil { + c.updateClients(fCl, cl) + fositeStore.Clients[fCl.ID] = fCl + } + + resp.Interval = 1 // speed up tests + deviceCodeSignature, err := compose.NewDeviceStrategy(fc).DeviceCodeSignature(t.Context(), resp.DeviceCode) + require.NoError(t, err) + + req, err := fositeStore.GetDeviceCodeSession(t.Context(), deviceCodeSignature, nil) + require.NoError(t, err) + require.NoError(t, fositeStore.CreateOpenIDConnectSession(t.Context(), deviceCodeSignature, req)) + + d := fositeStore.DeviceAuths[deviceCodeSignature] + d.SetUserCodeState(fosite.UserCodeAccepted) + fositeStore.DeviceAuths[deviceCodeSignature] = d + + t.Parallel() + + token, err := cl.DeviceAccessToken(t.Context(), resp, c.params...) + + c.check(t, token, cl, err) + }) + } +} diff --git a/fosite/integration/authorize_form_post_test.go b/fosite/integration/authorize_form_post_test.go new file mode 100644 index 00000000000..52c61f8b94f --- /dev/null +++ b/fosite/integration/authorize_form_post_test.go @@ -0,0 +1,221 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "context" + "fmt" + "net/http" + "net/url" + "strings" + "testing" + + "github.com/ory/hydra/v2/fosite/internal/gen" + + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/internal" + "github.com/ory/hydra/v2/fosite/token/jwt" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + goauth "golang.org/x/oauth2" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" +) + +type formPostTestCase struct { + description string + setup func() + check checkFunc + responseType string +} + +type checkFunc func(t *testing.T, stateFromServer string, code string, token goauth.Token, iDToken string, cparam url.Values, err map[string]string) + +func TestAuthorizeFormPostResponseMode(t *testing.T) { + session := &defaultSession{ + DefaultSession: &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + }, + Headers: &jwt.Headers{}, + }, + } + config := &fosite.Config{ResponseModeHandlerExtension: &decoratedFormPostResponse{}, GlobalSecret: []byte("some-secret-thats-random-some-secret-thats-random-")} + f := compose.ComposeAllEnabled(config, fositeStore, gen.MustRSAKey()) + ts := mockServer(t, f, session) + defer ts.Close() + + oauthClient := newOAuth2Client(ts) + defaultClient := fositeStore.Clients["my-client"].(*fosite.DefaultClient) + defaultClient.RedirectURIs[0] = ts.URL + "/callback" + responseModeClient := &fosite.DefaultResponseModeClient{ + DefaultClient: defaultClient, + ResponseModes: []fosite.ResponseModeType{fosite.ResponseModeFormPost, fosite.ResponseModeFormPost, "decorated_form_post"}, + } + fositeStore.Clients["response-mode-client"] = responseModeClient + oauthClient.ClientID = "response-mode-client" + + var state string + for k, c := range []formPostTestCase{ + { + description: "implicit grant #1 test with form_post", + responseType: "id_token%20token", + setup: func() { + state = "12345678901234567890" + oauthClient.Scopes = []string{"openid"} + }, + check: func(t *testing.T, stateFromServer string, code string, token goauth.Token, iDToken string, cparam url.Values, err map[string]string) { + assert.EqualValues(t, state, stateFromServer) + assert.NotEmpty(t, token.TokenType) + assert.NotEmpty(t, token.AccessToken) + assert.NotEmpty(t, token.Expiry) + assert.NotEmpty(t, iDToken) + }, + }, + { + description: "implicit grant #2 test with form_post", + responseType: "id_token", + setup: func() { + state = "12345678901234567890" + oauthClient.Scopes = []string{"openid"} + }, + check: func(t *testing.T, stateFromServer string, code string, token goauth.Token, iDToken string, cparam url.Values, err map[string]string) { + assert.EqualValues(t, state, stateFromServer) + assert.NotEmpty(t, iDToken) + }, + }, + { + description: "Authorization code grant test with form_post", + responseType: "code", + setup: func() { + state = "12345678901234567890" + }, + check: func(t *testing.T, stateFromServer string, code string, token goauth.Token, iDToken string, cparam url.Values, err map[string]string) { + assert.EqualValues(t, state, stateFromServer) + assert.NotEmpty(t, code) + }, + }, + { + description: "Hybrid #1 grant test with form_post", + responseType: "token%20code", + setup: func() { + state = "12345678901234567890" + oauthClient.Scopes = []string{"openid"} + }, + check: func(t *testing.T, stateFromServer string, code string, token goauth.Token, iDToken string, cparam url.Values, err map[string]string) { + assert.EqualValues(t, state, stateFromServer) + assert.NotEmpty(t, code) + assert.NotEmpty(t, token.TokenType) + assert.NotEmpty(t, token.AccessToken) + assert.NotEmpty(t, token.Expiry) + }, + }, + { + description: "Hybrid #2 grant test with form_post", + responseType: "token%20id_token%20code", + setup: func() { + state = "12345678901234567890" + oauthClient.Scopes = []string{"openid"} + }, + check: func(t *testing.T, stateFromServer string, code string, token goauth.Token, iDToken string, cparam url.Values, err map[string]string) { + assert.EqualValues(t, state, stateFromServer) + assert.NotEmpty(t, code) + assert.NotEmpty(t, iDToken) + assert.NotEmpty(t, token.TokenType) + assert.NotEmpty(t, token.AccessToken) + assert.NotEmpty(t, token.Expiry) + }, + }, + { + description: "Hybrid #3 grant test with form_post", + responseType: "id_token%20code", + setup: func() { + state = "12345678901234567890" + oauthClient.Scopes = []string{"openid"} + }, + check: func(t *testing.T, stateFromServer string, code string, token goauth.Token, iDToken string, cparam url.Values, err map[string]string) { + assert.EqualValues(t, state, stateFromServer) + assert.NotEmpty(t, code) + assert.NotEmpty(t, iDToken) + }, + }, + { + description: "error message test for form_post response", + responseType: "foo", + setup: func() { + state = "12345678901234567890" + }, + check: func(t *testing.T, stateFromServer string, code string, token goauth.Token, iDToken string, cparam url.Values, err map[string]string) { + assert.EqualValues(t, state, stateFromServer) + assert.NotEmpty(t, err["ErrorField"]) + assert.NotEmpty(t, err["DescriptionField"]) + }, + }, + } { + // Test canonical form_post + t.Run(fmt.Sprintf("case=%d/description=%s", k, c.description), testFormPost(&state, false, c, oauthClient, "form_post")) + + // Test decorated form_post response + c.check = decorateCheck(c.check) + t.Run(fmt.Sprintf("case=%d/description=decorated_%s", k, c.description), testFormPost(&state, true, c, oauthClient, "decorated_form_post")) + } +} + +func testFormPost(state *string, customResponse bool, c formPostTestCase, oauthClient *goauth.Config, responseMode string) func(t *testing.T) { + return func(t *testing.T) { + c.setup() + authURL := strings.Replace(oauthClient.AuthCodeURL(*state, goauth.SetAuthURLParam("response_mode", responseMode), goauth.SetAuthURLParam("nonce", "111111111")), "response_type=code", "response_type="+c.responseType, -1) + client := &http.Client{ + CheckRedirect: func(req *http.Request, via []*http.Request) error { + return errors.New("Dont follow redirects") + }, + } + resp, err := client.Get(authURL) + require.NoError(t, err) + require.Equal(t, http.StatusOK, resp.StatusCode) + code, state, token, iDToken, cparam, errResp := internal.ParseFormPostResponse(t, fositeStore.Clients["response-mode-client"].GetRedirectURIs()[0], resp.Body) + c.check(t, state, code, iDToken, token, cparam, errResp) + } +} + +func decorateCheck(cf checkFunc) checkFunc { + return func(t *testing.T, stateFromServer string, code string, token goauth.Token, iDToken string, cparam url.Values, err map[string]string) { + cf(t, stateFromServer, code, token, iDToken, cparam, err) + if len(err) > 0 { + assert.Contains(t, cparam, "custom_err_param") + return + } + assert.Contains(t, cparam, "custom_param") + } +} + +// This test type provides an example implementation +// of a custom response mode handler. +// In this case it decorates the `form_post` response mode +// with some additional custom parameters +type decoratedFormPostResponse struct { +} + +func (m *decoratedFormPostResponse) ResponseModes() fosite.ResponseModeTypes { + return fosite.ResponseModeTypes{"decorated_form_post"} +} + +func (m *decoratedFormPostResponse) WriteAuthorizeResponse(ctx context.Context, rw http.ResponseWriter, ar fosite.AuthorizeRequester, resp fosite.AuthorizeResponder) { + rw.Header().Add("Content-Type", "text/html;charset=UTF-8") + resp.AddParameter("custom_param", "foo") + fosite.WriteAuthorizeFormPostResponse(ar.GetRedirectURI().String(), resp.GetParameters(), fosite.GetPostFormHTMLTemplate(ctx, + fosite.NewOAuth2Provider(nil, new(fosite.Config))), rw) +} + +func (m *decoratedFormPostResponse) WriteAuthorizeError(ctx context.Context, rw http.ResponseWriter, ar fosite.AuthorizeRequester, err error) { + rfcerr := fosite.ErrorToRFC6749Error(err) + errors := rfcerr.ToValues() + errors.Set("state", ar.GetState()) + errors.Add("custom_err_param", "bar") + fosite.WriteAuthorizeFormPostResponse(ar.GetRedirectURI().String(), errors, fosite.GetPostFormHTMLTemplate(ctx, + fosite.NewOAuth2Provider(nil, new(fosite.Config))), rw) +} diff --git a/fosite/integration/authorize_implicit_grant_test.go b/fosite/integration/authorize_implicit_grant_test.go new file mode 100644 index 00000000000..6f2541fdd38 --- /dev/null +++ b/fosite/integration/authorize_implicit_grant_test.go @@ -0,0 +1,130 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "encoding/json" + "fmt" + "net/http" + "net/url" + "strconv" + "strings" + "testing" + "time" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + goauth "golang.org/x/oauth2" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/handler/oauth2" +) + +func TestAuthorizeImplicitFlow(t *testing.T) { + for _, strategy := range []oauth2.CoreStrategyProvider{ + hmacStrategyProvider, + } { + runTestAuthorizeImplicitGrant(t, strategy) + } +} + +func runTestAuthorizeImplicitGrant(t *testing.T, strategy oauth2.CoreStrategyProvider) { + f := compose.Compose(new(fosite.Config), fositeStore, strategy, compose.OAuth2AuthorizeImplicitFactory, compose.OAuth2TokenIntrospectionFactory) + ts := mockServer(t, f, &fosite.DefaultSession{}) + defer ts.Close() + + oauthClient := newOAuth2Client(ts) + fositeStore.Clients["my-client"].(*fosite.DefaultClient).RedirectURIs[0] = ts.URL + "/callback" + + var state string + for k, c := range []struct { + description string + setup func() + check func(t *testing.T, r *http.Response) + params []goauth.AuthCodeOption + authStatusCode int + }{ + { + description: "should fail because of audience", + params: []goauth.AuthCodeOption{goauth.SetAuthURLParam("audience", "https://www.ory.sh/not-api")}, + setup: func() { + state = "12345678901234567890" + }, + authStatusCode: http.StatusNotAcceptable, + }, + { + description: "should fail because of scope", + params: []goauth.AuthCodeOption{}, + setup: func() { + oauthClient.Scopes = []string{"not-exist"} + state = "12345678901234567890" + }, + authStatusCode: http.StatusNotAcceptable, + }, + { + description: "should pass with proper audience", + params: []goauth.AuthCodeOption{goauth.SetAuthURLParam("audience", "https://www.ory.sh/api")}, + setup: func() { + state = "12345678901234567890" + oauthClient.Scopes = []string{"fosite"} + }, + check: func(t *testing.T, r *http.Response) { + var b fosite.AccessRequest + b.Client = new(fosite.DefaultClient) + b.Session = new(defaultSession) + require.NoError(t, json.NewDecoder(r.Body).Decode(&b)) + assert.EqualValues(t, fosite.Arguments{"https://www.ory.sh/api"}, b.RequestedAudience) + assert.EqualValues(t, fosite.Arguments{"https://www.ory.sh/api"}, b.GrantedAudience) + assert.EqualValues(t, "foo-sub", b.Session.(*defaultSession).Subject) + }, + authStatusCode: http.StatusOK, + }, + { + description: "should pass", + setup: func() { + state = "12345678901234567890" + }, + authStatusCode: http.StatusOK, + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", k, c.description), func(t *testing.T) { + c.setup() + + var callbackURL *url.URL + authURL := strings.Replace(oauthClient.AuthCodeURL(state, c.params...), "response_type=code", "response_type=token", -1) + client := &http.Client{ + CheckRedirect: func(req *http.Request, via []*http.Request) error { + callbackURL = req.URL + return errors.New("Dont follow redirects") + }, + } + resp, err := client.Get(authURL) + require.Error(t, err) + + if resp.StatusCode == http.StatusOK { + fragment, err := url.ParseQuery(callbackURL.Fragment) + require.NoError(t, err) + expires, err := strconv.Atoi(fragment.Get("expires_in")) + require.NoError(t, err) + token := &goauth.Token{ + AccessToken: fragment.Get("access_token"), + TokenType: fragment.Get("token_type"), + RefreshToken: fragment.Get("refresh_token"), + Expiry: time.Now().UTC().Add(time.Duration(expires) * time.Second), + } + + httpClient := oauthClient.Client(goauth.NoContext, token) + resp, err := httpClient.Get(ts.URL + "/info") + require.NoError(t, err) + assert.Equal(t, http.StatusOK, resp.StatusCode) + + if c.check != nil { + c.check(t, resp) + } + } + }) + } +} diff --git a/fosite/integration/authorize_jwt_bearer_required_iat_test.go b/fosite/integration/authorize_jwt_bearer_required_iat_test.go new file mode 100644 index 00000000000..6704ced42a4 --- /dev/null +++ b/fosite/integration/authorize_jwt_bearer_required_iat_test.go @@ -0,0 +1,112 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "context" + "net/http" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/go-jose/go-jose/v3/jwt" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/integration/clients" +) + +type authorizeJWTBearerRequiredIATSuite struct { + suite.Suite + + client *clients.JWTBearer +} + +func (s *authorizeJWTBearerRequiredIATSuite) TestBadResponseWithoutIssuedAt() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + ID: uuid.New().String(), + }, + }, []string{"fosite"}) + + s.assertBadResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerRequiredIATSuite) TestSuccessResponseWithIssuedAt() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + ID: uuid.New().String(), + }, + }, []string{"fosite"}) + + s.assertSuccessResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerRequiredIATSuite) getClient() *clients.JWTBearer { + client := *s.client + + return &client +} + +func (s *authorizeJWTBearerRequiredIATSuite) assertSuccessResponse(t *testing.T, token *clients.Token, err error) { + require.NoError(t, err) + require.NotNil(t, token) + + assert.Equal(t, token.TokenType, "bearer") + assert.Empty(t, token.RefreshToken) + assert.NotEmpty(t, token.ExpiresIn) + assert.NotEmpty(t, token.AccessToken) +} + +func (s *authorizeJWTBearerRequiredIATSuite) assertBadResponse(t *testing.T, token *clients.Token, err error) { + assert.Nil(t, token) + assert.NotNil(t, err) + + retrieveError, ok := err.(*clients.RequestError) + assert.True(t, ok) + assert.Equal(t, retrieveError.Response.StatusCode, http.StatusBadRequest) +} + +func TestAuthorizeJWTBearerRequiredIATSuite(t *testing.T) { + provider := compose.Compose( + &fosite.Config{ + GrantTypeJWTBearerCanSkipClientAuth: true, + GrantTypeJWTBearerIDOptional: true, + GrantTypeJWTBearerIssuedDateOptional: false, + TokenURL: tokenURL, + }, + fositeStore, + jwtStrategyProvider, + compose.OAuth2ClientCredentialsGrantFactory, + compose.RFC7523AssertionGrantFactory, + ) + testServer := mockServer(t, provider, &fosite.DefaultSession{}) + defer testServer.Close() + + client := newJWTBearerAppClient(testServer) + if err := client.SetPrivateKey(firstKeyID, firstPrivateKey); err != nil { + assert.Nil(t, err) + } + + suite.Run(t, &authorizeJWTBearerRequiredIATSuite{ + client: client, + }) +} diff --git a/fosite/integration/authorize_jwt_bearer_required_jti_test.go b/fosite/integration/authorize_jwt_bearer_required_jti_test.go new file mode 100644 index 00000000000..1f15797b3a0 --- /dev/null +++ b/fosite/integration/authorize_jwt_bearer_required_jti_test.go @@ -0,0 +1,110 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "context" + "net/http" + "testing" + "time" + + "github.com/go-jose/go-jose/v3/jwt" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/integration/clients" +) + +type authorizeJWTBearerRequiredJtiSuite struct { + suite.Suite + + client *clients.JWTBearer +} + +func (s *authorizeJWTBearerRequiredJtiSuite) TestBadResponseWithoutJTI() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + }, + }, []string{"fosite"}) + + s.assertBadResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerRequiredJtiSuite) TestSuccessResponseWithJTI() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + ID: uuid.New().String(), + }, + }, []string{"fosite"}) + + s.assertSuccessResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerRequiredJtiSuite) getClient() *clients.JWTBearer { + client := *s.client + + return &client +} + +func (s *authorizeJWTBearerRequiredJtiSuite) assertSuccessResponse(t *testing.T, token *clients.Token, err error) { + assert.Nil(t, err) + assert.NotNil(t, token) + + assert.Equal(t, token.TokenType, "bearer") + assert.Empty(t, token.RefreshToken) + assert.NotEmpty(t, token.ExpiresIn) + assert.NotEmpty(t, token.AccessToken) +} + +func (s *authorizeJWTBearerRequiredJtiSuite) assertBadResponse(t *testing.T, token *clients.Token, err error) { + assert.Nil(t, token) + assert.NotNil(t, err) + + retrieveError, ok := err.(*clients.RequestError) + assert.True(t, ok) + assert.Equal(t, retrieveError.Response.StatusCode, http.StatusBadRequest) +} + +func TestAuthorizeJWTBearerRequiredJtiSuite(t *testing.T) { + provider := compose.Compose( + &fosite.Config{ + GrantTypeJWTBearerCanSkipClientAuth: true, + GrantTypeJWTBearerIDOptional: false, + GrantTypeJWTBearerIssuedDateOptional: true, + TokenURL: tokenURL, + }, + fositeStore, + jwtStrategyProvider, + compose.OAuth2ClientCredentialsGrantFactory, + compose.RFC7523AssertionGrantFactory, + ) + testServer := mockServer(t, provider, &fosite.DefaultSession{}) + defer testServer.Close() + + client := newJWTBearerAppClient(testServer) + if err := client.SetPrivateKey(firstKeyID, firstPrivateKey); err != nil { + assert.Nil(t, err) + } + + suite.Run(t, &authorizeJWTBearerRequiredJtiSuite{ + client: client, + }) +} diff --git a/fosite/integration/authorize_jwt_bearer_test.go b/fosite/integration/authorize_jwt_bearer_test.go new file mode 100644 index 00000000000..d04aca7a727 --- /dev/null +++ b/fosite/integration/authorize_jwt_bearer_test.go @@ -0,0 +1,433 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "context" + "errors" + "net/http" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/go-jose/go-jose/v3/jwt" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/integration/clients" +) + +type authorizeJWTBearerSuite struct { + suite.Suite + + client *clients.JWTBearer +} + +func (s *authorizeJWTBearerSuite) TestSuccessResponseWithRequiredParamsOnly() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + }, + }, []string{"fosite"}) + + s.assertSuccessResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerSuite) TestSuccessResponseWithMultipleAudienceInAssertion() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL, "https://example.com/oauth"}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + }, + }, []string{"fosite"}) + + s.assertSuccessResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerSuite) TestSuccessResponseWithMultipleScopesInRequest() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + }, + }, []string{"fosite", "gitlab"}) + + s.assertSuccessResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerSuite) TestSuccessResponseWithoutScopes() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + }, + }, nil) + + s.assertSuccessResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerSuite) TestSuccessResponseWithExtraClaim() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + }, + PrivateClaims: map[string]interface{}{"extraClaim": "extraClaimValue"}, + }, []string{"fosite"}) + + s.assertSuccessResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerSuite) TestSuccessResponseWithNotBeforeClaim() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + NotBefore: jwt.NewNumericDate(time.Now()), + }, + }, []string{"fosite"}) + + s.assertSuccessResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerSuite) TestSuccessResponseWithJTIClaim() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + ID: uuid.New().String(), + }, + }, []string{"fosite"}) + + s.assertSuccessResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerSuite) TestSuccessResponse() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL, "example.com"}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + NotBefore: jwt.NewNumericDate(time.Now().Add(-time.Hour)), + ID: uuid.New().String(), + }, + PrivateClaims: map[string]interface{}{"random": "random"}, + }, nil) + + s.assertSuccessResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerSuite) TestBadResponseWithExpiredJWT() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(-time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + }, + }, []string{"fosite"}) + + s.assertBadResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerSuite) TestBadResponseWithExpiryMaxDuration() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(365 * 24 * time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + }, + }, []string{"fosite"}) + + s.assertBadResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerSuite) TestBadResponseWithInvalidPrivateKey() { + ctx := context.Background() + client := s.getClient() + wrongPrivateKey := secondPrivateKey + + if err := client.SetPrivateKey(firstKeyID, wrongPrivateKey); err != nil { + assert.Nil(s.T(), err) + } + + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + }, + }, nil) + + s.assertBadResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerSuite) TestBadResponseWithInvalidKeyID() { + ctx := context.Background() + client := s.getClient() + + if err := client.SetPrivateKey("wrongKeyID", firstPrivateKey); err != nil { + assert.Nil(s.T(), err) + } + + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + }, + }, nil) + + s.assertBadResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerSuite) TestBadResponseWithInvalidAudience() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{"https://example.com/oauth"}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + }, + }, nil) + + s.assertBadResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerSuite) TestBadResponseForSecondRequestWithSameJTI() { + ctx := context.Background() + client := s.getClient() + config := &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + ID: uuid.New().String(), + }, + } + + _, err := client.GetToken(ctx, config, nil) + require.NoError(s.T(), err) + token2, err := client.GetToken(ctx, config, nil) + + s.assertBadResponse(s.T(), token2, err) +} + +func (s *authorizeJWTBearerSuite) TestSuccessResponseForSecondRequestWithSameJTIAfterFirstExpired() { + ctx := context.Background() + client := s.getClient() + config := &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Second)), + IssuedAt: jwt.NewNumericDate(time.Now().Add(-time.Hour)), + ID: uuid.New().String(), + }, + } + + _, err := client.GetToken(ctx, config, nil) + require.NoError(s.T(), err) + + time.Sleep(time.Second) + config.Expiry = jwt.NewNumericDate(time.Now().Add(time.Hour)) + + token2, err := client.GetToken(ctx, config, nil) + + s.assertSuccessResponse(s.T(), token2, err) +} + +func (s *authorizeJWTBearerSuite) TestBadResponseWithNotBeforeLaterThenIssueAt() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + NotBefore: jwt.NewNumericDate(time.Now().Add(time.Hour)), + }, + }, nil) + + s.assertBadResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerSuite) TestBadResponseWithoutSubject() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: "", + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + }, + }, nil) + + s.assertBadResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerSuite) TestBadResponseWithWrongSubject() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: "wrong_subject", + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + }, + }, nil) + + s.assertBadResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerSuite) TestBadResponseWithWrongIssuer() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: "wrong_issuer", + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + }, + }, nil) + + s.assertBadResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerSuite) TestBadResponseWithWrongScope() { + ctx := context.Background() + client := s.getClient() + token, err := client.GetToken(ctx, &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + }, + }, []string{"fosite", "permission"}) + + s.assertBadResponse(s.T(), token, err) +} + +func (s *authorizeJWTBearerSuite) getClient() *clients.JWTBearer { + client := *s.client + + return &client +} + +func (s *authorizeJWTBearerSuite) assertSuccessResponse(t *testing.T, token *clients.Token, err error) { + assert.Nil(t, err) + require.NotNil(t, token) + + assert.Equal(t, token.TokenType, "bearer") + assert.Empty(t, token.RefreshToken) + assert.NotEmpty(t, token.ExpiresIn) + assert.NotEmpty(t, token.AccessToken) +} + +func (s *authorizeJWTBearerSuite) assertBadResponse(t *testing.T, token *clients.Token, err error) { + assert.Nil(t, token) + assert.NotNil(t, err) + + var retrieveError *clients.RequestError + ok := errors.As(err, &retrieveError) + assert.True(t, ok) + assert.Equal(t, retrieveError.Response.StatusCode, http.StatusBadRequest) +} + +func TestAuthorizeJWTBearerSuite(t *testing.T) { + provider := compose.Compose( + &fosite.Config{ + GrantTypeJWTBearerCanSkipClientAuth: true, + GrantTypeJWTBearerIDOptional: true, + GrantTypeJWTBearerIssuedDateOptional: true, + GrantTypeJWTBearerMaxDuration: 24 * time.Hour, + TokenURL: tokenURL, + }, + fositeStore, + jwtStrategyProvider, + compose.OAuth2ClientCredentialsGrantFactory, + compose.RFC7523AssertionGrantFactory, + ) + testServer := mockServer(t, provider, &fosite.DefaultSession{}) + defer testServer.Close() + + client := newJWTBearerAppClient(testServer) + if err := client.SetPrivateKey(firstKeyID, firstPrivateKey); err != nil { + assert.Nil(t, err) + } + + suite.Run(t, &authorizeJWTBearerSuite{ + client: client, + }) +} diff --git a/fosite/integration/authorize_response_mode_test.go b/fosite/integration/authorize_response_mode_test.go new file mode 100644 index 00000000000..6b87a667a87 --- /dev/null +++ b/fosite/integration/authorize_response_mode_test.go @@ -0,0 +1,280 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "fmt" + "net/http" + "net/url" + "strconv" + "strings" + "testing" + "time" + + "github.com/ory/hydra/v2/fosite/internal/gen" + + "github.com/stretchr/testify/assert" + + "github.com/pkg/errors" + + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/internal" + "github.com/ory/hydra/v2/fosite/token/jwt" + + "github.com/stretchr/testify/require" + goauth "golang.org/x/oauth2" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" +) + +func TestAuthorizeResponseModes(t *testing.T) { + session := &defaultSession{ + DefaultSession: &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + }, + Headers: &jwt.Headers{}, + }, + } + f := compose.ComposeAllEnabled(&fosite.Config{ + UseLegacyErrorFormat: true, + GlobalSecret: []byte("some-secret-thats-random-some-secret-thats-random-"), + }, fositeStore, gen.MustRSAKey()) + ts := mockServer(t, f, session) + defer ts.Close() + + oauthClient := newOAuth2Client(ts) + defaultClient := fositeStore.Clients["my-client"].(*fosite.DefaultClient) + defaultClient.RedirectURIs[0] = ts.URL + "/callback" + responseModeClient := &fosite.DefaultResponseModeClient{ + DefaultClient: defaultClient, + ResponseModes: []fosite.ResponseModeType{}, + } + fositeStore.Clients["response-mode-client"] = responseModeClient + oauthClient.ClientID = "response-mode-client" + + var state string + for k, c := range []struct { + description string + setup func() + check func(t *testing.T, stateFromServer string, code string, token goauth.Token, iDToken string, err map[string]string) + responseType string + responseMode string + }{ + { + description: "Should give err because implicit grant with response mode query", + responseType: "id_token%20token", + responseMode: "query", + setup: func() { + state = "12345678901234567890" + oauthClient.Scopes = []string{"openid"} + responseModeClient.ResponseModes = []fosite.ResponseModeType{fosite.ResponseModeQuery} + }, + check: func(t *testing.T, stateFromServer string, code string, token goauth.Token, iDToken string, err map[string]string) { + assert.NotEmpty(t, err["ErrorField"]) + assert.NotEmpty(t, err["DescriptionField"]) + assert.Equal(t, "Insecure response_mode 'query' for the response_type '[id_token token]'.", err["HintField"]) + }, + }, + { + description: "Should pass implicit grant with response mode form_post", + responseType: "id_token%20token", + responseMode: "form_post", + setup: func() { + state = "12345678901234567890" + oauthClient.Scopes = []string{"openid"} + responseModeClient.ResponseModes = []fosite.ResponseModeType{fosite.ResponseModeFormPost} + }, + check: func(t *testing.T, stateFromServer string, code string, token goauth.Token, iDToken string, err map[string]string) { + assert.EqualValues(t, state, stateFromServer) + assert.NotEmpty(t, token.TokenType) + assert.NotEmpty(t, token.AccessToken) + assert.NotEmpty(t, token.Expiry) + assert.NotEmpty(t, iDToken) + }, + }, + { + description: "Should fail because response mode form_post is not allowed by the client", + responseType: "id_token%20token", + responseMode: "form_post", + setup: func() { + state = "12345678901234567890" + oauthClient.Scopes = []string{"openid"} + responseModeClient.ResponseModes = []fosite.ResponseModeType{fosite.ResponseModeQuery} + }, + check: func(t *testing.T, stateFromServer string, code string, token goauth.Token, iDToken string, err map[string]string) { + assert.NotEmpty(t, err["ErrorField"]) + assert.NotEmpty(t, err["DescriptionField"]) + assert.Equal(t, "The client is not allowed to request response_mode 'form_post'.", err["HintField"]) + }, + }, + { + description: "Should fail because response mode form_post is not allowed by the client without legacy format", + responseType: "id_token%20token", + responseMode: "form_post", + setup: func() { + state = "12345678901234567890" + oauthClient.Scopes = []string{"openid"} + responseModeClient.ResponseModes = []fosite.ResponseModeType{fosite.ResponseModeQuery} + f.(*fosite.Fosite).Config.(*fosite.Config).UseLegacyErrorFormat = false + }, + check: func(t *testing.T, stateFromServer string, code string, token goauth.Token, iDToken string, err map[string]string) { + f.(*fosite.Fosite).Config.(*fosite.Config).UseLegacyErrorFormat = true // reset + assert.NotEmpty(t, err["ErrorField"]) + assert.Contains(t, err["DescriptionField"], "The client is not allowed to request response_mode 'form_post'.") + assert.Empty(t, err["HintField"]) + }, + }, + { + description: "Should pass Authorization code grant test with response mode fragment", + responseType: "code", + responseMode: "fragment", + setup: func() { + state = "12345678901234567890" + responseModeClient.ResponseModes = []fosite.ResponseModeType{fosite.ResponseModeFragment} + }, + check: func(t *testing.T, stateFromServer string, code string, token goauth.Token, iDToken string, err map[string]string) { + assert.EqualValues(t, state, stateFromServer) + assert.NotEmpty(t, code) + }, + }, + { + description: "Should pass Authorization code grant test with response mode form_post", + responseType: "code", + responseMode: "form_post", + setup: func() { + state = "12345678901234567890" + responseModeClient.ResponseModes = []fosite.ResponseModeType{fosite.ResponseModeFormPost} + }, + check: func(t *testing.T, stateFromServer string, code string, token goauth.Token, iDToken string, err map[string]string) { + assert.EqualValues(t, state, stateFromServer) + assert.NotEmpty(t, code) + }, + }, + { + description: "Should fail Hybrid grant test with query", + responseType: "token%20code", + responseMode: "query", + setup: func() { + state = "12345678901234567890" + oauthClient.Scopes = []string{"openid"} + responseModeClient.ResponseModes = []fosite.ResponseModeType{fosite.ResponseModeQuery} + }, + check: func(t *testing.T, stateFromServer string, code string, token goauth.Token, iDToken string, err map[string]string) { + //assert.EqualValues(t, state, stateFromServer) + assert.NotEmpty(t, err["ErrorField"]) + assert.NotEmpty(t, err["DescriptionField"]) + assert.Equal(t, "Insecure response_mode 'query' for the response_type '[token code]'.", err["HintField"]) + }, + }, + { + description: "Should fail Hybrid grant test with query without legacy fields", + responseType: "token%20code", + responseMode: "query", + setup: func() { + state = "12345678901234567890" + oauthClient.Scopes = []string{"openid"} + responseModeClient.ResponseModes = []fosite.ResponseModeType{fosite.ResponseModeQuery} + f.(*fosite.Fosite).Config.(*fosite.Config).UseLegacyErrorFormat = false + }, + check: func(t *testing.T, stateFromServer string, code string, token goauth.Token, iDToken string, err map[string]string) { + f.(*fosite.Fosite).Config.(*fosite.Config).UseLegacyErrorFormat = true // reset + + //assert.EqualValues(t, state, stateFromServer) + assert.NotEmpty(t, err["ErrorField"]) + assert.Contains(t, err["DescriptionField"], "Insecure response_mode 'query' for the response_type '[token code]'.") + assert.Empty(t, err["HintField"]) + assert.Empty(t, err["DebugField"]) + }, + }, + { + description: "Should pass Hybrid grant test with form_post", + responseType: "token%20code", + responseMode: "form_post", + setup: func() { + state = "12345678901234567890" + oauthClient.Scopes = []string{"openid"} + responseModeClient.ResponseModes = []fosite.ResponseModeType{fosite.ResponseModeFormPost} + }, + check: func(t *testing.T, stateFromServer string, code string, token goauth.Token, iDToken string, err map[string]string) { + assert.EqualValues(t, state, stateFromServer) + assert.NotEmpty(t, code) + assert.NotEmpty(t, token.TokenType) + assert.NotEmpty(t, token.AccessToken) + assert.NotEmpty(t, token.Expiry) + }, + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", k, c.description), func(t *testing.T) { + c.setup() + authURL := strings.Replace(oauthClient.AuthCodeURL(state, goauth.SetAuthURLParam("response_mode", c.responseMode), goauth.SetAuthURLParam("nonce", "111111111")), "response_type=code", "response_type="+c.responseType, -1) + + var ( + callbackURL *url.URL + redirErr = errors.New("Dont follow redirects") + ) + + client := &http.Client{ + CheckRedirect: func(req *http.Request, via []*http.Request) error { + callbackURL = req.URL + return redirErr + }, + } + + var ( + code, state, iDToken string + token goauth.Token + errResp map[string]string + ) + + resp, err := client.Get(authURL) + if fosite.ResponseModeType(c.responseMode) == fosite.ResponseModeFragment { + // fragment + require.EqualError(t, errors.Unwrap(err), redirErr.Error()) + fragment, err := url.ParseQuery(callbackURL.Fragment) + require.NoError(t, err) + code, state, iDToken, token, errResp = getParameters(t, fragment) + } else if fosite.ResponseModeType(c.responseMode) == fosite.ResponseModeQuery { + // query + require.EqualError(t, errors.Unwrap(err), redirErr.Error()) + query, err := url.ParseQuery(callbackURL.RawQuery) + require.NoError(t, err) + code, state, iDToken, token, errResp = getParameters(t, query) + } else if fosite.ResponseModeType(c.responseMode) == fosite.ResponseModeFormPost { + // form_post + require.NoError(t, err) + code, state, iDToken, token, _, errResp = internal.ParseFormPostResponse(t, fositeStore.Clients["response-mode-client"].GetRedirectURIs()[0], resp.Body) + } else { + t.FailNow() + } + + c.check(t, state, code, token, iDToken, errResp) + }) + } +} + +func getParameters(t *testing.T, param url.Values) (code, state, iDToken string, token goauth.Token, errResp map[string]string) { + errResp = make(map[string]string) + if param.Get("error") != "" { + errResp["ErrorField"] = param.Get("error") + errResp["DescriptionField"] = param.Get("error_description") + errResp["HintField"] = param.Get("error_hint") + } else { + code = param.Get("code") + state = param.Get("state") + iDToken = param.Get("id_token") + token = goauth.Token{ + AccessToken: param.Get("access_token"), + TokenType: param.Get("token_type"), + RefreshToken: param.Get("refresh_token"), + } + if param.Get("expires_in") != "" { + expires, err := strconv.Atoi(param.Get("expires_in")) + require.NoError(t, err) + token.Expiry = time.Now().UTC().Add(time.Duration(expires) * time.Second) + } + } + return +} diff --git a/fosite/integration/client_credentials_grant_test.go b/fosite/integration/client_credentials_grant_test.go new file mode 100644 index 00000000000..bba3b546004 --- /dev/null +++ b/fosite/integration/client_credentials_grant_test.go @@ -0,0 +1,150 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "net/http/httptest" + "net/url" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/tidwall/gjson" + goauth "golang.org/x/oauth2" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/internal" +) + +func TestClientCredentialsFlow(t *testing.T) { + for _, strategy := range []oauth2.CoreStrategyProvider{ + hmacStrategyProvider, + } { + runClientCredentialsGrantTest(t, strategy) + } +} + +func introspect(t *testing.T, ts *httptest.Server, token string, p interface{}, username, password string) { + req, err := http.NewRequest("POST", ts.URL+"/introspect", strings.NewReader(url.Values{"token": {token}}.Encode())) + require.NoError(t, err) + req.SetBasicAuth(username, password) + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + r, err := http.DefaultClient.Do(req) + require.NoError(t, err) + defer func(Body io.ReadCloser) { _ = Body.Close() }(r.Body) + body, err := io.ReadAll(r.Body) + require.NoError(t, err) + assert.Equal(t, http.StatusOK, r.StatusCode, "%s", body) + require.NoError(t, json.Unmarshal(body, p)) +} + +func runClientCredentialsGrantTest(t *testing.T, strategy oauth2.CoreStrategyProvider) { + f := compose.Compose(new(fosite.Config), fositeStore, strategy, compose.OAuth2ClientCredentialsGrantFactory, compose.OAuth2TokenIntrospectionFactory) + ts := mockServer(t, f, &fosite.DefaultSession{}) + defer ts.Close() + + oauthClient := newOAuth2AppClient(ts) + fositeStore.Clients["my-client"].(*fosite.DefaultClient).RedirectURIs[0] = ts.URL + "/callback" + fositeStore.Clients["custom-lifespan-client"].(*fosite.DefaultClientWithCustomTokenLifespans).RedirectURIs[0] = ts.URL + "/callback" + for k, c := range []struct { + description string + setup func() + err bool + check func(t *testing.T, token *goauth.Token) + params url.Values + }{ + { + description: "should fail because of ungranted scopes", + setup: func() { + oauthClient.Scopes = []string{"unknown"} + }, + err: true, + }, + { + description: "should fail because of ungranted audience", + params: url.Values{"audience": {"https://www.ory.sh/not-api"}}, + setup: func() { + oauthClient.Scopes = []string{"fosite"} + }, + err: true, + }, + { + params: url.Values{"audience": {"https://www.ory.sh/api"}}, + description: "should pass", + setup: func() { + }, + check: func(t *testing.T, token *goauth.Token) { + var j json.RawMessage + introspect(t, ts, token.AccessToken, &j, oauthClient.ClientID, oauthClient.ClientSecret) + assert.Equal(t, oauthClient.ClientID, gjson.GetBytes(j, "client_id").String()) + assert.Equal(t, "fosite", gjson.GetBytes(j, "scope").String()) + }, + }, + { + description: "should pass", + setup: func() { + }, + check: func(t *testing.T, token *goauth.Token) { + var j json.RawMessage + introspect(t, ts, token.AccessToken, &j, oauthClient.ClientID, oauthClient.ClientSecret) + introspect(t, ts, token.AccessToken, &j, oauthClient.ClientID, oauthClient.ClientSecret) + assert.Equal(t, oauthClient.ClientID, gjson.GetBytes(j, "client_id").String()) + assert.Equal(t, "fosite", gjson.GetBytes(j, "scope").String()) + atReq, ok := fositeStore.AccessTokens[strings.Split(token.AccessToken, ".")[1]] + require.True(t, ok) + atExp := atReq.GetSession().GetExpiresAt(fosite.AccessToken) + internal.RequireEqualTime(t, time.Now().UTC().Add(time.Hour), atExp, time.Minute) + atExpIn := time.Duration(token.Extra("expires_in").(float64)) * time.Second + internal.RequireEqualDuration(t, time.Hour, atExpIn, time.Minute) + }, + }, + { + description: "should pass with custom client token lifespans", + setup: func() { + oauthClient.ClientID = "custom-lifespan-client" + }, + check: func(t *testing.T, token *goauth.Token) { + var j json.RawMessage + introspect(t, ts, token.AccessToken, &j, oauthClient.ClientID, oauthClient.ClientSecret) + introspect(t, ts, token.AccessToken, &j, oauthClient.ClientID, oauthClient.ClientSecret) + assert.Equal(t, oauthClient.ClientID, gjson.GetBytes(j, "client_id").String()) + assert.Equal(t, "fosite", gjson.GetBytes(j, "scope").String()) + + atReq, ok := fositeStore.AccessTokens[strings.Split(token.AccessToken, ".")[1]] + require.True(t, ok) + atExp := atReq.GetSession().GetExpiresAt(fosite.AccessToken) + internal.RequireEqualTime(t, time.Now().UTC().Add(*internal.TestLifespans.ClientCredentialsGrantAccessTokenLifespan), atExp, time.Minute) + atExpIn := time.Duration(token.Extra("expires_in").(float64)) * time.Second + internal.RequireEqualDuration(t, *internal.TestLifespans.ClientCredentialsGrantAccessTokenLifespan, atExpIn, time.Minute) + rtExp := atReq.GetSession().GetExpiresAt(fosite.RefreshToken) + internal.RequireEqualTime(t, time.Time{}, rtExp, time.Minute) + }, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + c.setup() + + oauthClient.EndpointParams = c.params + token, err := oauthClient.Token(t.Context()) + require.Equal(t, c.err, err != nil, "(%d) %s\n%s\n%s", k, c.description, c.err, err) + if !c.err { + assert.NotEmpty(t, token.AccessToken, "(%d) %s\n%s", k, c.description, token) + } + + if c.check != nil { + c.check(t, token) + } + + t.Logf("Passed test case %d", k) + }) + } +} diff --git a/fosite/integration/clients/error.go b/fosite/integration/clients/error.go new file mode 100644 index 00000000000..46dc0a350fc --- /dev/null +++ b/fosite/integration/clients/error.go @@ -0,0 +1,18 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package clients + +import ( + "fmt" + "net/http" +) + +type RequestError struct { + Response *http.Response + Body []byte +} + +func (r *RequestError) Error() string { + return fmt.Sprintf("oauth2: cannot fetch token: %v\nResponse: %s", r.Response.Status, r.Body) +} diff --git a/fosite/integration/clients/introspect.go b/fosite/integration/clients/introspect.go new file mode 100644 index 00000000000..9124a359e1c --- /dev/null +++ b/fosite/integration/clients/introspect.go @@ -0,0 +1,102 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/url" + "strings" +) + +type IntrospectForm struct { + Token string + Scopes []string +} + +type IntrospectResponse struct { + Active bool `json:"active"` + ClientID string `json:"client_id,omitempty"` + Scope string `json:"scope,omitempty"` + Audience []string `json:"aud,omitempty"` + ExpiresAt int64 `json:"exp,omitempty"` + IssuedAt int64 `json:"iat,omitempty"` + Subject string `json:"sub,omitempty"` + Username string `json:"username,omitempty"` +} + +type Introspect struct { + endpointURL string + client *http.Client +} + +func (c *Introspect) IntrospectToken( + ctx context.Context, + form IntrospectForm, + header map[string]string, +) (*IntrospectResponse, error) { + data := url.Values{} + data.Set("token", form.Token) + data.Set("scope", strings.Join(form.Scopes, " ")) + + request, err := c.getRequest(ctx, data, header) + if err != nil { + return nil, err + } + + response, err := c.client.Do(request) + if err != nil { + return nil, err + } + + defer func(Body io.ReadCloser) { _ = Body.Close() }(response.Body) + + body, err := io.ReadAll(response.Body) + if err != nil { + return nil, err + } + + if c := response.StatusCode; c < 200 || c > 299 { + return nil, &RequestError{ + Response: response, + Body: body, + } + } + + result := &IntrospectResponse{} + + if err := json.Unmarshal(body, result); err != nil { + return nil, err + } + + return result, nil +} + +func (c *Introspect) getRequest( + ctx context.Context, + data url.Values, + header map[string]string, +) (*http.Request, error) { + request, err := http.NewRequestWithContext(ctx, "POST", c.endpointURL, strings.NewReader(data.Encode())) + if err != nil { + return nil, err + } + + request.Header.Set("Content-Type", "application/x-www-form-urlencoded") + + for header, value := range header { + request.Header.Set(header, value) + } + + return request, nil +} + +func NewIntrospectClient(endpointURL string) *Introspect { + return &Introspect{ + endpointURL: endpointURL, + client: &http.Client{}, + } +} diff --git a/fosite/integration/clients/jwt_bearer.go b/fosite/integration/clients/jwt_bearer.go new file mode 100644 index 00000000000..4436136b9bc --- /dev/null +++ b/fosite/integration/clients/jwt_bearer.go @@ -0,0 +1,134 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package clients + +import ( + "context" + "crypto/rsa" + "encoding/json" + "io" + "net/http" + "net/url" + "strings" + + "github.com/go-jose/go-jose/v3" + "github.com/go-jose/go-jose/v3/jwt" +) + +// #nosec:gosec G101 - False Positive +const jwtBearerGrantType = "urn:ietf:params:oauth:grant-type:jwt-bearer" + +type JWTBearer struct { + tokenURL string + client *http.Client + + Signer jose.Signer +} + +type Token struct { + AccessToken string `json:"access_token"` + TokenType string `json:"token_type,omitempty"` + RefreshToken string `json:"refresh_token,omitempty"` + ExpiresIn int64 `json:"expires_in,omitempty"` +} + +type Header struct { + Algorithm string `json:"alg"` + Typ string `json:"typ"` + KeyID string `json:"kid,omitempty"` +} + +type JWTBearerPayload struct { + *jwt.Claims + + PrivateClaims map[string]interface{} +} + +func (c *JWTBearer) SetPrivateKey(keyID string, privateKey *rsa.PrivateKey) error { + jwk := jose.JSONWebKey{Key: privateKey, KeyID: keyID, Algorithm: string(jose.RS256)} + signingKey := jose.SigningKey{ + Algorithm: jose.RS256, + Key: jwk, + } + signerOptions := &jose.SignerOptions{} + signerOptions.WithType("JWT") + + sig, err := jose.NewSigner(signingKey, signerOptions) + if err != nil { + return err + } + + c.Signer = sig + + return nil +} + +func (c *JWTBearer) GetToken(ctx context.Context, payloadData *JWTBearerPayload, scope []string) (*Token, error) { + builder := jwt.Signed(c.Signer). + Claims(payloadData.Claims). + Claims(payloadData.PrivateClaims) + + assertion, err := builder.CompactSerialize() + if err != nil { + return nil, err + } + + requestBodyReader, err := c.getRequestBodyReader(assertion, scope) + if err != nil { + return nil, err + } + + request, err := http.NewRequestWithContext(ctx, "POST", c.tokenURL, requestBodyReader) + if err != nil { + return nil, err + } + + request.Header.Set("Content-Type", "application/x-www-form-urlencoded") + + response, err := c.client.Do(request) + if err != nil { + return nil, err + } + + defer func(Body io.ReadCloser) { _ = Body.Close() }(response.Body) + + body, err := io.ReadAll(response.Body) + if err != nil { + return nil, err + } + + if c := response.StatusCode; c < 200 || c > 299 { + return nil, &RequestError{ + Response: response, + Body: body, + } + } + + token := &Token{} + + if err := json.Unmarshal(body, token); err != nil { + return nil, err + } + + return token, err +} + +func (c *JWTBearer) getRequestBodyReader(assertion string, scope []string) (io.Reader, error) { + data := url.Values{} + data.Set("grant_type", jwtBearerGrantType) + data.Set("assertion", string(assertion)) + + if len(scope) != 0 { + data.Set("scope", strings.Join(scope, " ")) + } + + return strings.NewReader(data.Encode()), nil +} + +func NewJWTBearer(tokenURL string) *JWTBearer { + return &JWTBearer{ + client: &http.Client{}, + tokenURL: tokenURL, + } +} diff --git a/fosite/integration/helper_endpoints_test.go b/fosite/integration/helper_endpoints_test.go new file mode 100644 index 00000000000..6961d200708 --- /dev/null +++ b/fosite/integration/helper_endpoints_test.go @@ -0,0 +1,217 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "encoding/json" + "net/http" + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" +) + +func tokenRevocationHandler(t *testing.T, oauth2 fosite.OAuth2Provider, session fosite.Session) func(rw http.ResponseWriter, req *http.Request) { + return func(rw http.ResponseWriter, req *http.Request) { + ctx := fosite.NewContext() + err := oauth2.NewRevocationRequest(ctx, req) + if err != nil { + t.Logf("Revoke request failed because %+v", err) + } + oauth2.WriteRevocationResponse(req.Context(), rw, err) + } +} + +func tokenIntrospectionHandler(t *testing.T, oauth2 fosite.OAuth2Provider, session fosite.Session) func(rw http.ResponseWriter, req *http.Request) { + return func(rw http.ResponseWriter, req *http.Request) { + ctx := fosite.NewContext() + ar, err := oauth2.NewIntrospectionRequest(ctx, req, session) + if err != nil { + t.Logf("Introspection request failed because: %+v", err) + oauth2.WriteIntrospectionError(req.Context(), rw, err) + return + } + + oauth2.WriteIntrospectionResponse(req.Context(), rw, ar) + } +} + +func tokenInfoHandler(t *testing.T, oauth2 fosite.OAuth2Provider, session fosite.Session) func(rw http.ResponseWriter, req *http.Request) { + return func(rw http.ResponseWriter, req *http.Request) { + ctx := fosite.NewContext() + _, resp, err := oauth2.IntrospectToken(ctx, fosite.AccessTokenFromRequest(req), fosite.AccessToken, session) + if err != nil { + t.Logf("Info request failed because: %+v", err) + var e *fosite.RFC6749Error + require.True(t, errors.As(err, &e)) + http.Error(rw, e.DescriptionField, e.CodeField) + return + } + + t.Logf("Introspecting caused: %+v", resp) + + if err := json.NewEncoder(rw).Encode(resp); err != nil { + panic(err) + } + } +} + +func authEndpointHandler(t *testing.T, oauth2 fosite.OAuth2Provider, session fosite.Session) func(rw http.ResponseWriter, req *http.Request) { + return func(rw http.ResponseWriter, req *http.Request) { + ctx := fosite.NewContext() + + ar, err := oauth2.NewAuthorizeRequest(ctx, req) + if err != nil { + t.Logf("Access request failed because: %+v", err) + t.Logf("Request: %+v", ar) + oauth2.WriteAuthorizeError(req.Context(), rw, ar, err) + return + } + + if ar.GetRequestedScopes().Has("fosite") { + ar.GrantScope("fosite") + } + + if ar.GetRequestedScopes().Has("offline") { + ar.GrantScope("offline") + } + + if ar.GetRequestedScopes().Has("openid") { + ar.GrantScope("openid") + } + + for _, a := range ar.GetRequestedAudience() { + ar.GrantAudience(a) + } + + // Normally, this would be the place where you would check if the user is logged in and gives his consent. + // For this test, let's assume that the user exists, is logged in, and gives his consent... + + response, err := oauth2.NewAuthorizeResponse(ctx, ar, session) + if err != nil { + t.Logf("Access request failed because: %+v", err) + t.Logf("Request: %+v", ar) + oauth2.WriteAuthorizeError(req.Context(), rw, ar, err) + return + } + + oauth2.WriteAuthorizeResponse(req.Context(), rw, ar, response) + } +} + +func authCallbackHandler(t *testing.T) func(rw http.ResponseWriter, req *http.Request) { + return func(rw http.ResponseWriter, req *http.Request) { + q := req.URL.Query() + if q.Get("code") == "" && q.Get("error") == "" { + assert.NotEmpty(t, q.Get("code")) + assert.NotEmpty(t, q.Get("error")) + } + + if q.Get("code") != "" { + _, _ = rw.Write([]byte("code: ok")) + } + if q.Get("error") != "" { + rw.WriteHeader(http.StatusNotAcceptable) + _, _ = rw.Write([]byte("error: " + q.Get("error"))) + } + + } +} + +func tokenEndpointHandler(t *testing.T, provider fosite.OAuth2Provider) func(rw http.ResponseWriter, req *http.Request) { + return func(rw http.ResponseWriter, req *http.Request) { + _ = req.ParseMultipartForm(1 << 20) + ctx := fosite.NewContext() + + accessRequest, err := provider.NewAccessRequest(ctx, req, &oauth2.JWTSession{}) + if err != nil { + t.Logf("Access request failed because: %+v", err) + t.Logf("Request: %+v", accessRequest) + provider.WriteAccessError(req.Context(), rw, accessRequest, err) + return + } + + if accessRequest.GetRequestedScopes().Has("fosite") { + accessRequest.GrantScope("fosite") + } + + response, err := provider.NewAccessResponse(ctx, accessRequest) + if err != nil { + t.Logf("Access request failed because: %+v", err) + t.Logf("Request: %+v", accessRequest) + provider.WriteAccessError(req.Context(), rw, accessRequest, err) + return + } + + provider.WriteAccessResponse(req.Context(), rw, accessRequest, response) + } +} + +func pushedAuthorizeRequestHandler(t *testing.T, oauth2 fosite.OAuth2Provider, session fosite.Session) func(rw http.ResponseWriter, req *http.Request) { + return func(rw http.ResponseWriter, req *http.Request) { + ctx := fosite.NewContext() + + ar, err := oauth2.NewPushedAuthorizeRequest(ctx, req) + if err != nil { + t.Logf("PAR request failed because: %+v", err) + t.Logf("Request: %+v", ar) + oauth2.WritePushedAuthorizeError(ctx, rw, ar, err) + return + } + + response, err := oauth2.NewPushedAuthorizeResponse(ctx, ar, session) + if err != nil { + t.Logf("PAR response failed because: %+v", err) + t.Logf("Request: %+v", ar) + oauth2.WritePushedAuthorizeError(ctx, rw, ar, err) + return + } + + oauth2.WritePushedAuthorizeResponse(ctx, rw, ar, response) + } +} + +func deviceAuthorizationEndpointHandler(t *testing.T, oauth2 fosite.OAuth2Provider, session fosite.Session) func(rw http.ResponseWriter, req *http.Request) { + return func(rw http.ResponseWriter, req *http.Request) { + ctx := fosite.NewContext() + + r, err := oauth2.NewDeviceRequest(ctx, req) + if err != nil { + t.Logf("Device auth request failed because: %+v", err) + t.Logf("Request: %+v", r) + oauth2.WriteAccessError(ctx, rw, r, err) + return + } + + if r.GetRequestedScopes().Has("fosite") { + r.GrantScope("fosite") + } + + if r.GetRequestedScopes().Has("offline") { + r.GrantScope("offline") + } + + if r.GetRequestedScopes().Has("openid") { + r.GrantScope("openid") + } + + for _, a := range r.GetRequestedAudience() { + r.GrantAudience(a) + } + + response, err := oauth2.NewDeviceResponse(ctx, r, session) + if err != nil { + t.Logf("Device auth response failed because: %+v", err) + t.Logf("Request: %+v", r) + oauth2.WriteAccessError(ctx, rw, r, err) + return + } + + oauth2.WriteDeviceResponse(ctx, rw, r, response) + } +} diff --git a/fosite/integration/helper_setup_test.go b/fosite/integration/helper_setup_test.go new file mode 100644 index 00000000000..0820be8fec9 --- /dev/null +++ b/fosite/integration/helper_setup_test.go @@ -0,0 +1,218 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "context" + "crypto" + "crypto/rand" + "crypto/rsa" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/go-jose/go-jose/v3" + goauth "golang.org/x/oauth2" + "golang.org/x/oauth2/clientcredentials" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/integration/clients" + "github.com/ory/hydra/v2/fosite/internal" + "github.com/ory/hydra/v2/fosite/internal/gen" + "github.com/ory/hydra/v2/fosite/storage" + "github.com/ory/hydra/v2/fosite/token/hmac" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +const ( + firstKeyID = "123" + secondKeyID = "321" + + firstJWTBearerIssuer = "first@example.com" + secondJWTBearerIssuer = "second@example.com" + + firstJWTBearerSubject = "first-service-client" + secondJWTBearerSubject = "second-service-client" + + tokenURL = "https://www.ory.sh/api" + tokenRelativePath = "/token" + + deviceAuthRelativePath = "/device/auth" +) + +var ( + firstPrivateKey, _ = rsa.GenerateKey(rand.Reader, 2048) + secondPrivateKey, _ = rsa.GenerateKey(rand.Reader, 2048) +) + +var fositeStore = &storage.MemoryStore{ + Clients: map[string]fosite.Client{ + "my-client": &fosite.DefaultClient{ + ID: "my-client", + Secret: []byte(`$2a$10$IxMdI6d.LIRZPpSfEwNoeu4rY3FhDREsxFJXikcgdRRAStxUlsuEO`), // = "foobar" + RedirectURIs: []string{"http://localhost:3846/callback"}, + ResponseTypes: []string{"id_token", "code", "token", "token code", "id_token code", "token id_token", "token code id_token"}, + GrantTypes: []string{"implicit", "refresh_token", "authorization_code", "password", "client_credentials", "urn:ietf:params:oauth:grant-type:device_code"}, + Scopes: []string{"fosite", "offline", "openid"}, + Audience: []string{tokenURL}, + }, + "custom-lifespan-client": &fosite.DefaultClientWithCustomTokenLifespans{ + DefaultClient: &fosite.DefaultClient{ + ID: "custom-lifespan-client", + Secret: []byte(`$2a$10$IxMdI6d.LIRZPpSfEwNoeu4rY3FhDREsxFJXikcgdRRAStxUlsuEO`), // = "foobar" + RotatedSecrets: [][]byte{[]byte(`$2y$10$X51gLxUQJ.hGw1epgHTE5u0bt64xM0COU7K9iAp.OFg8p2pUd.1zC `)}, // = "foobaz", + RedirectURIs: []string{"http://localhost:3846/callback"}, + ResponseTypes: []string{"id_token", "code", "token", "id_token token", "code id_token", "code token", "code id_token token"}, + GrantTypes: []string{"implicit", "refresh_token", "authorization_code", "password", "client_credentials"}, + Scopes: []string{"fosite", "openid", "photos", "offline"}, + }, + TokenLifespans: &internal.TestLifespans, + }, + "public-client": &fosite.DefaultClient{ + ID: "public-client", + Secret: []byte{}, + Public: true, + RedirectURIs: []string{"http://localhost:3846/callback"}, + ResponseTypes: []string{"id_token", "code", "code id_token"}, + GrantTypes: []string{"refresh_token", "authorization_code"}, + Scopes: []string{"fosite", "offline", "openid"}, + Audience: []string{tokenURL}, + }, + "device-client": &fosite.DefaultClient{ + ID: "device-client", + Secret: []byte(`$2a$10$IxMdI6d.LIRZPpSfEwNoeu4rY3FhDREsxFJXikcgdRRAStxUlsuEO`), // = "foobar" + GrantTypes: []string{"urn:ietf:params:oauth:grant-type:device_code", "refresh_token"}, + Scopes: []string{"fosite", "offline", "openid"}, + Audience: []string{tokenURL}, + Public: true, + }, + }, + Users: map[string]storage.MemoryUserRelation{ + "peter": { + Username: "peter", + Password: "secret", + }, + }, + IssuerPublicKeys: map[string]storage.IssuerPublicKeys{ + firstJWTBearerIssuer: createIssuerPublicKey( + firstJWTBearerIssuer, + firstJWTBearerSubject, + firstKeyID, + firstPrivateKey.Public(), + []string{"fosite", "gitlab", "example.com", "docker"}, + ), + secondJWTBearerIssuer: createIssuerPublicKey( + secondJWTBearerIssuer, + secondJWTBearerSubject, + secondKeyID, + secondPrivateKey.Public(), + []string{"fosite"}, + ), + }, + BlacklistedJTIs: map[string]time.Time{}, + AuthorizeCodes: map[string]storage.StoreAuthorizeCode{}, + PKCES: map[string]fosite.Requester{}, + AccessTokens: map[string]fosite.Requester{}, + RefreshTokens: map[string]storage.StoreRefreshToken{}, + IDSessions: map[string]fosite.Requester{}, + AccessTokenRequestIDs: map[string]string{}, + RefreshTokenRequestIDs: map[string]string{}, + PARSessions: map[string]fosite.AuthorizeRequester{}, + DeviceAuths: map[string]fosite.DeviceRequester{}, + DeviceCodesRequestIDs: map[string]storage.DeviceAuthPair{}, + UserCodesRequestIDs: map[string]string{}, +} + +type defaultSession struct { + *openid.DefaultSession +} + +var accessTokenLifespan = time.Hour + +var authCodeLifespan = time.Minute + +func createIssuerPublicKey(issuer, subject, keyID string, key crypto.PublicKey, scopes []string) storage.IssuerPublicKeys { + return storage.IssuerPublicKeys{ + Issuer: issuer, + KeysBySub: map[string]storage.SubjectPublicKeys{ + subject: { + Subject: subject, + Keys: map[string]storage.PublicKeyScopes{ + keyID: { + Key: &jose.JSONWebKey{ + Key: key, + Algorithm: string(jose.RS256), + Use: "sig", + KeyID: keyID, + }, + Scopes: scopes, + }, + }, + }, + }, + } +} + +func newOAuth2Client(ts *httptest.Server) *goauth.Config { + return &goauth.Config{ + ClientID: "my-client", + ClientSecret: "foobar", + RedirectURL: ts.URL + "/callback", + Scopes: []string{"fosite"}, + Endpoint: goauth.Endpoint{ + TokenURL: ts.URL + tokenRelativePath, + AuthURL: ts.URL + "/auth", + AuthStyle: goauth.AuthStyleInHeader, + }, + } +} + +func newOAuth2AppClient(ts *httptest.Server) *clientcredentials.Config { + return &clientcredentials.Config{ + ClientID: "my-client", + ClientSecret: "foobar", + Scopes: []string{"fosite"}, + TokenURL: ts.URL + tokenRelativePath, + } +} + +func newJWTBearerAppClient(ts *httptest.Server) *clients.JWTBearer { + return clients.NewJWTBearer(ts.URL + tokenRelativePath) +} + +var ( + defaultConfig = &fosite.Config{AccessTokenLifespan: accessTokenLifespan, AuthorizeCodeLifespan: authCodeLifespan} + defaultRSAKey = gen.MustRSAKey() + defaultSigner = &jwt.DefaultSigner{GetPrivateKey: func(ctx context.Context) (interface{}, error) { return defaultRSAKey, nil }} + hmacStrategy = oauth2.NewHMACSHAStrategy( + &hmac.HMACStrategy{Config: &fosite.Config{GlobalSecret: []byte("some-super-cool-secret-that-nobody-knows")}}, + defaultConfig, + ) + hmacStrategyProvider = &compose.CommonStrategyProvider{CoreStrategy: hmacStrategy, Signer: defaultSigner} + jwtStrategy = &oauth2.DefaultJWTStrategy{ + Signer: defaultSigner, + Config: defaultConfig, + } + jwtStrategyProvider = &compose.CommonStrategyProvider{CoreStrategy: hmacStrategy, AccessTokenStrat: jwtStrategy, Signer: defaultSigner} +) + +func mockServer(t *testing.T, f fosite.OAuth2Provider, session fosite.Session) *httptest.Server { + router := http.NewServeMux() + router.HandleFunc("/auth", authEndpointHandler(t, f, session)) + router.HandleFunc(tokenRelativePath, tokenEndpointHandler(t, f)) + router.HandleFunc("/callback", authCallbackHandler(t)) + router.HandleFunc("/info", tokenInfoHandler(t, f, session)) + router.HandleFunc("/introspect", tokenIntrospectionHandler(t, f, session)) + router.HandleFunc("/revoke", tokenRevocationHandler(t, f, session)) + router.HandleFunc("/par", pushedAuthorizeRequestHandler(t, f, session)) + router.HandleFunc(deviceAuthRelativePath, deviceAuthorizationEndpointHandler(t, f, session)) + + ts := httptest.NewServer(router) + t.Cleanup(ts.Close) + return ts +} diff --git a/fosite/integration/introspect_jwt_bearer_token_test.go b/fosite/integration/introspect_jwt_bearer_token_test.go new file mode 100644 index 00000000000..6c16fe6cb8d --- /dev/null +++ b/fosite/integration/introspect_jwt_bearer_token_test.go @@ -0,0 +1,273 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "context" + "net/http" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/go-jose/go-jose/v3/jwt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/integration/clients" +) + +type introspectJWTBearerTokenSuite struct { + suite.Suite + + clientJWT *clients.JWTBearer + clientIntrospect *clients.Introspect + clientTokenPayload *clients.JWTBearerPayload + appTokenPayload *clients.JWTBearerPayload + + authorizationHeader string + scopes []string + audience []string +} + +func (s *introspectJWTBearerTokenSuite) SetupTest() { + s.scopes = []string{"fosite"} + s.audience = []string{tokenURL, "https://example.com"} + + s.clientTokenPayload = &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: firstJWTBearerIssuer, + Subject: firstJWTBearerSubject, + Audience: s.audience, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + }, + } + + s.appTokenPayload = &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: secondJWTBearerIssuer, + Subject: secondJWTBearerSubject, + Audience: s.audience, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + }, + } +} + +func (s *introspectJWTBearerTokenSuite) TestSuccessResponseWithMultipleScopesToken() { + ctx := context.Background() + + scopes := []string{"fosite", "docker"} + token, err := s.getJWTClient().GetToken(ctx, s.clientTokenPayload, scopes) + require.NoError(s.T(), err) + + response, err := s.clientIntrospect.IntrospectToken( + ctx, + clients.IntrospectForm{ + Token: token.AccessToken, + Scopes: nil, + }, + map[string]string{"Authorization": s.authorizationHeader}, + ) + + s.assertSuccessResponse(s.T(), response, err, firstJWTBearerSubject) + assert.Equal(s.T(), strings.Split(response.Scope, " "), scopes) +} + +func (s *introspectJWTBearerTokenSuite) TestUnActiveResponseWithInvalidScopes() { + ctx := context.Background() + + token, err := s.getJWTClient().GetToken(ctx, s.clientTokenPayload, s.scopes) + require.NoError(s.T(), err) + + response, err := s.clientIntrospect.IntrospectToken( + ctx, + clients.IntrospectForm{ + Token: token.AccessToken, + Scopes: []string{"invalid"}, + }, + map[string]string{"Authorization": s.authorizationHeader}, + ) + + require.NoError(s.T(), err) + assert.NotNil(s.T(), response) + assert.False(s.T(), response.Active) +} + +func (s *introspectJWTBearerTokenSuite) TestSuccessResponseWithoutScopesForIntrospection() { + ctx := context.Background() + + token, err := s.getJWTClient().GetToken(ctx, s.clientTokenPayload, s.scopes) + require.NoError(s.T(), err) + + response, err := s.clientIntrospect.IntrospectToken( + ctx, + clients.IntrospectForm{ + Token: token.AccessToken, + Scopes: nil, + }, + map[string]string{"Authorization": s.authorizationHeader}, + ) + + s.assertSuccessResponse(s.T(), response, err, firstJWTBearerSubject) +} + +func (s *introspectJWTBearerTokenSuite) TestSuccessResponseWithoutScopes() { + ctx := context.Background() + + token, err := s.getJWTClient().GetToken(ctx, s.clientTokenPayload, nil) + require.NoError(s.T(), err) + + response, err := s.clientIntrospect.IntrospectToken( + ctx, + clients.IntrospectForm{ + Token: token.AccessToken, + Scopes: nil, + }, + map[string]string{"Authorization": s.authorizationHeader}, + ) + + s.assertSuccessResponse(s.T(), response, err, firstJWTBearerSubject) +} + +func (s *introspectJWTBearerTokenSuite) TestSubjectHasAccessToScopeButNotInited() { + ctx := context.Background() + + token, err := s.getJWTClient().GetToken(ctx, s.clientTokenPayload, nil) + require.NoError(s.T(), err) + + response, err := s.clientIntrospect.IntrospectToken( + ctx, + clients.IntrospectForm{ + Token: token.AccessToken, + Scopes: s.scopes, + }, + map[string]string{"Authorization": s.authorizationHeader}, + ) + + require.NoError(s.T(), err) + assert.NotNil(s.T(), response) + assert.False(s.T(), response.Active) +} + +func (s *introspectJWTBearerTokenSuite) TestTheSameTokenInRequestAndHeader() { + ctx := context.Background() + token, err := s.getJWTClient().GetToken(ctx, s.clientTokenPayload, s.scopes) + require.NoError(s.T(), err) + + response, err := s.clientIntrospect.IntrospectToken( + ctx, + clients.IntrospectForm{ + Token: token.AccessToken, + Scopes: nil, + }, + map[string]string{"Authorization": "bearer " + token.AccessToken}, + ) + + s.assertUnauthorizedResponse(s.T(), response, err) +} + +func (s *introspectJWTBearerTokenSuite) TestUnauthorizedResponseForRequestWithoutAuthorization() { + ctx := context.Background() + token, err := s.getJWTClient().GetToken(ctx, s.clientTokenPayload, s.scopes) + require.NoError(s.T(), err) + + response, err := s.clientIntrospect.IntrospectToken( + ctx, + clients.IntrospectForm{ + Token: token.AccessToken, + Scopes: nil, + }, + nil, + ) + + s.assertUnauthorizedResponse(s.T(), response, err) +} + +func (s *introspectJWTBearerTokenSuite) getJWTClient() *clients.JWTBearer { + client := *s.clientJWT + + return &client +} + +func (s *introspectJWTBearerTokenSuite) assertSuccessResponse( + t *testing.T, + response *clients.IntrospectResponse, + err error, + subject string, +) { + assert.Nil(t, err) + assert.NotNil(t, response) + + assert.True(t, response.Active) + assert.Equal(t, response.Subject, subject) + assert.NotEmpty(t, response.ExpiresAt) + assert.NotEmpty(t, response.IssuedAt) + assert.Equal(t, response.Audience, s.audience) + + tokenDuration := time.Unix(response.ExpiresAt, 0).Sub(time.Unix(response.IssuedAt, 0)) + assert.Less(t, int64(tokenDuration), int64(time.Hour+time.Minute)) + assert.Greater(t, int64(tokenDuration), int64(time.Hour-time.Minute)) +} + +func (s *introspectJWTBearerTokenSuite) assertUnauthorizedResponse( + t *testing.T, + response *clients.IntrospectResponse, + err error, +) { + assert.Nil(t, response) + assert.NotNil(t, err) + + retrieveError, ok := err.(*clients.RequestError) + assert.True(t, ok) + assert.Equal(t, retrieveError.Response.StatusCode, http.StatusUnauthorized) +} + +func TestIntrospectJWTBearerTokenSuite(t *testing.T) { + provider := compose.Compose( + &fosite.Config{ + GrantTypeJWTBearerCanSkipClientAuth: true, + GrantTypeJWTBearerIDOptional: true, + GrantTypeJWTBearerIssuedDateOptional: true, + AccessTokenLifespan: time.Hour, + TokenURL: tokenURL, + }, + fositeStore, + jwtStrategyProvider, + compose.OAuth2ClientCredentialsGrantFactory, + compose.RFC7523AssertionGrantFactory, + compose.OAuth2TokenIntrospectionFactory, + ) + testServer := mockServer(t, provider, &fosite.DefaultSession{}) + defer testServer.Close() + + client := newJWTBearerAppClient(testServer) + if err := client.SetPrivateKey(secondKeyID, secondPrivateKey); err != nil { + assert.Nil(t, err) + } + + token, err := client.GetToken(context.Background(), &clients.JWTBearerPayload{ + Claims: &jwt.Claims{ + Issuer: secondJWTBearerIssuer, + Subject: secondJWTBearerSubject, + Audience: []string{tokenURL}, + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + }, + }, []string{"fosite"}) + if err != nil { + assert.Nil(t, err) + } + + if err := client.SetPrivateKey(firstKeyID, firstPrivateKey); err != nil { + assert.Nil(t, err) + } + + suite.Run(t, &introspectJWTBearerTokenSuite{ + clientJWT: client, + clientIntrospect: clients.NewIntrospectClient(testServer.URL + "/introspect"), + authorizationHeader: "bearer " + token.AccessToken, + }) +} diff --git a/fosite/integration/introspect_token_test.go b/fosite/integration/introspect_token_test.go new file mode 100644 index 00000000000..db39fa94551 --- /dev/null +++ b/fosite/integration/introspect_token_test.go @@ -0,0 +1,129 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "encoding/json" + "fmt" + "testing" + + "github.com/parnurzeal/gorequest" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + goauth "golang.org/x/oauth2" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/handler/oauth2" +) + +func TestIntrospectToken(t *testing.T) { + for _, c := range []struct { + description string + strategy oauth2.CoreStrategyProvider + factory compose.Factory + }{ + { + description: "HMAC strategy with OAuth2TokenIntrospectionFactory", + strategy: hmacStrategyProvider, + factory: compose.OAuth2TokenIntrospectionFactory, + }, + { + description: "JWT strategy with OAuth2TokenIntrospectionFactory", + strategy: jwtStrategyProvider, + factory: compose.OAuth2TokenIntrospectionFactory, + }, + { + description: "JWT strategy with OAuth2StatelessJWTIntrospectionFactory", + strategy: jwtStrategyProvider, + factory: compose.OAuth2StatelessJWTIntrospectionFactory, + }, + } { + t.Run(c.description, func(t *testing.T) { + t.Parallel() + runIntrospectTokenTest(t, c.strategy, c.factory) + }) + } +} + +func runIntrospectTokenTest(t *testing.T, strategy oauth2.CoreStrategyProvider, introspectionFactory compose.Factory) { + f := compose.Compose(new(fosite.Config), fositeStore, strategy, compose.OAuth2ClientCredentialsGrantFactory, introspectionFactory) + ts := mockServer(t, f, &fosite.DefaultSession{}) + defer ts.Close() + + oauthClient := newOAuth2AppClient(ts) + a, err := oauthClient.Token(goauth.NoContext) + require.NoError(t, err) + b, err := oauthClient.Token(goauth.NoContext) + require.NoError(t, err) + + for k, c := range []struct { + prepare func(*gorequest.SuperAgent) *gorequest.SuperAgent + isActive bool + scopes string + }{ + { + prepare: func(s *gorequest.SuperAgent) *gorequest.SuperAgent { + return s.SetBasicAuth(oauthClient.ClientID, oauthClient.ClientSecret) + }, + isActive: true, + scopes: "", + }, + { + prepare: func(s *gorequest.SuperAgent) *gorequest.SuperAgent { + return s.Set("Authorization", "bearer "+a.AccessToken) + }, + isActive: true, + scopes: "fosite", + }, + { + prepare: func(s *gorequest.SuperAgent) *gorequest.SuperAgent { + return s.Set("Authorization", "bearer "+a.AccessToken) + }, + isActive: true, + scopes: "", + }, + { + prepare: func(s *gorequest.SuperAgent) *gorequest.SuperAgent { + return s.Set("Authorization", "bearer "+a.AccessToken) + }, + isActive: false, + scopes: "foo", + }, + { + prepare: func(s *gorequest.SuperAgent) *gorequest.SuperAgent { + return s.Set("Authorization", "bearer "+b.AccessToken) + }, + isActive: false, + scopes: "", + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + res := struct { + Active bool `json:"active"` + ClientId string `json:"client_id"` + Scope string `json:"scope"` + ExpiresAt float64 `json:"exp"` + IssuedAt float64 `json:"iat"` + }{} + s := gorequest.New() + s = s.Post(ts.URL + "/introspect"). + Type("form"). + SendStruct(map[string]string{"token": b.AccessToken, "scope": c.scopes}) + _, bytes, errs := c.prepare(s).End() + + assert.Nil(t, json.Unmarshal([]byte(bytes), &res)) + t.Logf("Got answer: %s", bytes) + + assert.Len(t, errs, 0) + assert.Equal(t, c.isActive, res.Active) + if c.isActive { + assert.Equal(t, "fosite", res.Scope) + assert.True(t, res.ExpiresAt > 0) + assert.True(t, res.IssuedAt > 0) + assert.True(t, res.IssuedAt < res.ExpiresAt) + } + }) + } +} diff --git a/fosite/integration/oidc_explicit_test.go b/fosite/integration/oidc_explicit_test.go new file mode 100644 index 00000000000..7355e4d0fd3 --- /dev/null +++ b/fosite/integration/oidc_explicit_test.go @@ -0,0 +1,213 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "context" + "fmt" + "io" + "net/http" + "strings" + "testing" + "time" + + "github.com/ory/hydra/v2/fosite/internal/gen" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "golang.org/x/oauth2" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +func newIDSession(j *jwt.IDTokenClaims) *defaultSession { + return &defaultSession{ + DefaultSession: &openid.DefaultSession{ + Claims: j, + Headers: &jwt.Headers{}, + Subject: j.Subject, + }, + } +} + +func TestOpenIDConnectExplicitFlow(t *testing.T) { + f := compose.ComposeAllEnabled(&fosite.Config{ + GlobalSecret: []byte("some-secret-thats-random-some-secret-thats-random-")}, fositeStore, gen.MustRSAKey()) + + for k, c := range []struct { + description string + setup func(oauthClient *oauth2.Config) string + authStatusCode int + authCodeURL string + session *defaultSession + expectAuthErr string + expectTokenErr string + }{ + { + session: newIDSession(&jwt.IDTokenClaims{Subject: "peter"}), + description: "should pass", + setup: func(oauthClient *oauth2.Config) string { + oauthClient.Scopes = []string{"openid"} + return oauthClient.AuthCodeURL("12345678901234567890") + "&nonce=11234123" + }, + authStatusCode: http.StatusOK, + }, + { + session: newIDSession(&jwt.IDTokenClaims{Subject: "peter"}), + description: "should fail registered single redirect uri but no redirect uri in request", + setup: func(oauthClient *oauth2.Config) string { + oauthClient.Scopes = []string{"openid"} + oauthClient.RedirectURL = "" + + return oauthClient.AuthCodeURL("12345678901234567890") + "&nonce=11234123" + }, + authStatusCode: http.StatusBadRequest, + expectAuthErr: `{"error":"invalid_request","error_description":"The request is missing a required parameter, includes an invalid parameter value, includes a parameter more than once, or is otherwise malformed. The 'redirect_uri' parameter is required when using OpenID Connect 1.0."}`, + }, + { + session: newIDSession(&jwt.IDTokenClaims{Subject: "peter"}), + description: "should fail registered single redirect uri but no redirect uri in request", + setup: func(oauthClient *oauth2.Config) string { + oauthClient.Scopes = []string{"openid"} + oauthClient.RedirectURL = "" + + return oauthClient.AuthCodeURL("12345678901234567890") + "&nonce=11234123" + }, + authStatusCode: http.StatusBadRequest, + expectAuthErr: `{"error":"invalid_request","error_description":"The request is missing a required parameter, includes an invalid parameter value, includes a parameter more than once, or is otherwise malformed. The 'redirect_uri' parameter is required when using OpenID Connect 1.0."}`, + }, + { + session: newIDSession(&jwt.IDTokenClaims{Subject: "peter"}), + description: "should fail because nonce is not long enough", + setup: func(oauthClient *oauth2.Config) string { + oauthClient.Scopes = []string{"openid"} + return oauthClient.AuthCodeURL("12345678901234567890") + "&nonce=1" + }, + authStatusCode: http.StatusOK, + expectTokenErr: "insufficient_entropy", + }, + { + session: newIDSession(&jwt.IDTokenClaims{ + Subject: "peter", + RequestedAt: time.Now().UTC(), + AuthTime: time.Now().Add(time.Second).UTC(), + }), + description: "should not pass missing redirect uri", + setup: func(oauthClient *oauth2.Config) string { + oauthClient.RedirectURL = "" + oauthClient.Scopes = []string{"openid"} + return oauthClient.AuthCodeURL("12345678901234567890") + "&nonce=1234567890&prompt=login" + }, + expectAuthErr: `{"error":"invalid_request","error_description":"The request is missing a required parameter, includes an invalid parameter value, includes a parameter more than once, or is otherwise malformed. The 'redirect_uri' parameter is required when using OpenID Connect 1.0."}`, + authStatusCode: http.StatusBadRequest, + }, + { + session: newIDSession(&jwt.IDTokenClaims{Subject: "peter"}), + description: "should fail because state is not long enough", + setup: func(oauthClient *oauth2.Config) string { + oauthClient.Scopes = []string{"openid"} + return oauthClient.AuthCodeURL("123") + "&nonce=1234567890" + }, + expectAuthErr: "invalid_state", + authStatusCode: http.StatusNotAcceptable, // code from internal test callback handler when error occurs + }, + { + session: newIDSession(&jwt.IDTokenClaims{ + Subject: "peter", + RequestedAt: time.Now().UTC(), + AuthTime: time.Now().Add(time.Second).UTC(), + }), + description: "should pass", + setup: func(oauthClient *oauth2.Config) string { + oauthClient.Scopes = []string{"openid"} + return oauthClient.AuthCodeURL("12345678901234567890") + "&nonce=1234567890&prompt=login" + }, + authStatusCode: http.StatusOK, + }, + { + session: newIDSession(&jwt.IDTokenClaims{ + Subject: "peter", + RequestedAt: time.Now().UTC(), + AuthTime: time.Now().Add(time.Second).UTC(), + }), + description: "should not pass missing redirect uri", + setup: func(oauthClient *oauth2.Config) string { + oauthClient.RedirectURL = "" + oauthClient.Scopes = []string{"openid"} + return oauthClient.AuthCodeURL("12345678901234567890") + "&nonce=1234567890&prompt=login" + }, + expectAuthErr: `{"error":"invalid_request","error_description":"The request is missing a required parameter, includes an invalid parameter value, includes a parameter more than once, or is otherwise malformed. The 'redirect_uri' parameter is required when using OpenID Connect 1.0."}`, + authStatusCode: http.StatusBadRequest, + }, + { + session: newIDSession(&jwt.IDTokenClaims{ + Subject: "peter", + RequestedAt: time.Now().UTC(), + AuthTime: time.Now().Add(-time.Minute).UTC(), + }), + description: "should fail because authentication was in the past", + setup: func(oauthClient *oauth2.Config) string { + oauthClient.Scopes = []string{"openid"} + return oauthClient.AuthCodeURL("12345678901234567890") + "&nonce=1234567890&prompt=login" + }, + authStatusCode: http.StatusNotAcceptable, // code from internal test callback handler when error occurs + expectAuthErr: "login_required", + }, + { + session: newIDSession(&jwt.IDTokenClaims{ + Subject: "peter", + RequestedAt: time.Now().UTC(), + AuthTime: time.Now().Add(-time.Minute).UTC(), + }), + description: "should pass because authorization was in the past and no login was required", + setup: func(oauthClient *oauth2.Config) string { + oauthClient.Scopes = []string{"openid"} + return oauthClient.AuthCodeURL("12345678901234567890") + "&nonce=1234567890&prompt=none" + }, + authStatusCode: http.StatusOK, + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", k, c.description), func(t *testing.T) { + ts := mockServer(t, f, c.session) + defer ts.Close() + + oauthClient := newOAuth2Client(ts) + + fositeStore.Clients["my-client"].(*fosite.DefaultClient).RedirectURIs = []string{ts.URL + "/callback"} + + resp, err := http.Get(c.setup(oauthClient)) + require.NoError(t, err) + defer func(body io.ReadCloser) { + require.NoError(t, body.Close()) + }(resp.Body) + + body, _ := io.ReadAll(resp.Body) + require.Equal(t, c.authStatusCode, resp.StatusCode, "Got response: %s", body) + if resp.StatusCode >= 400 { + assert.Equal(t, c.expectAuthErr, strings.Replace(string(body), "error: ", "", 1)) + } + + if c.expectAuthErr != "" { + assert.Empty(t, resp.Request.URL.Query().Get("code")) + } + + if resp.StatusCode == http.StatusOK { + time.Sleep(time.Second) + + token, err := oauthClient.Exchange(context.Background(), resp.Request.URL.Query().Get("code")) + if c.expectTokenErr != "" { + require.Error(t, err) + assert.True(t, strings.Contains(err.Error(), c.expectTokenErr), err.Error()) + } else { + require.NoError(t, err) + assert.NotEmpty(t, token.AccessToken) + assert.NotEmpty(t, token.Extra("id_token")) + } + } + }) + } +} diff --git a/fosite/integration/oidc_implicit_hybrid_public_client_pkce_test.go b/fosite/integration/oidc_implicit_hybrid_public_client_pkce_test.go new file mode 100644 index 00000000000..e16763323be --- /dev/null +++ b/fosite/integration/oidc_implicit_hybrid_public_client_pkce_test.go @@ -0,0 +1,118 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strings" + "testing" + + "github.com/ory/hydra/v2/fosite/internal/gen" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + goauth "golang.org/x/oauth2" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +func TestOIDCImplicitFlowPublicClientPKCE(t *testing.T) { + session := &defaultSession{ + DefaultSession: &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + }, + Headers: &jwt.Headers{}, + }, + } + f := compose.ComposeAllEnabled(&fosite.Config{ + GlobalSecret: []byte("some-secret-thats-random-some-secret-thats-random-"), + }, fositeStore, gen.MustRSAKey()) + ts := mockServer(t, f, session) + defer ts.Close() + + oauthClient := newOAuth2Client(ts) + + oauthClient.ClientSecret = "" + oauthClient.ClientID = "public-client" + oauthClient.Scopes = []string{"openid"} + + fositeStore.Clients["public-client"].(*fosite.DefaultClient).RedirectURIs[0] = ts.URL + "/callback" + + var state = "12345678901234567890" + for k, c := range []struct { + responseType string + description string + nonce string + setup func() + codeVerifier string + codeChallenge string + }{ + { + + responseType: "id_token%20code", + nonce: "1111111111111111", + description: "should pass id token (id_token code) with PKCE applied.", + setup: func() {}, + codeVerifier: "e7343b9bee0847e3b589ccb60d124ff81adcba6067b84f79b092f86249111fdc", + codeChallenge: "J11vOtKUitab04a_N0Ogm0dQBytTgl0fgHzYk4xUryo", + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", k, c.description), func(t *testing.T) { + c.setup() + + var callbackURL *url.URL + authURL := strings.Replace(oauthClient.AuthCodeURL(state), "response_type=code", "response_type="+c.responseType, -1) + + "&nonce=" + c.nonce + "&code_challenge_method=S256&code_challenge=" + c.codeChallenge + client := &http.Client{ + CheckRedirect: func(req *http.Request, via []*http.Request) error { + callbackURL = req.URL + return errors.New("Dont follow redirects") + }, + } + _, err := client.Get(authURL) + require.Error(t, err) + + t.Logf("Response (%d): %s", k, callbackURL.String()) + fragment, err := url.ParseQuery(callbackURL.Fragment) + require.NoError(t, err) + + code := fragment.Get("code") + assert.NotEmpty(t, code) + + assert.NotEmpty(t, fragment.Get("id_token")) + + resp, err := http.PostForm(oauthClient.Endpoint.TokenURL, url.Values{ + "code": {code}, + "grant_type": {"authorization_code"}, + "client_id": {"public-client"}, + "redirect_uri": {ts.URL + "/callback"}, + "code_verifier": {c.codeVerifier}, + }) + require.NoError(t, err) + defer func(Body io.ReadCloser) { + _ = Body.Close() + }(resp.Body) + + body, err := io.ReadAll(resp.Body) + require.NoError(t, err) + + assert.Equal(t, resp.StatusCode, http.StatusOK) + token := goauth.Token{} + require.NoError(t, json.Unmarshal(body, &token)) + + require.NotEmpty(t, token.AccessToken, "Got body: %s", string(body)) + + t.Logf("Passed test case (%d) %s", k, c.description) + }) + } +} diff --git a/fosite/integration/oidc_implicit_hybrid_test.go b/fosite/integration/oidc_implicit_hybrid_test.go new file mode 100644 index 00000000000..427f46a0ff4 --- /dev/null +++ b/fosite/integration/oidc_implicit_hybrid_test.go @@ -0,0 +1,162 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "context" + "fmt" + "net/http" + "net/url" + "strconv" + "strings" + "testing" + "time" + + "github.com/ory/hydra/v2/fosite/internal/gen" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "golang.org/x/oauth2" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +func TestOIDCImplicitFlow(t *testing.T) { + session := &defaultSession{ + DefaultSession: &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + }, + Headers: &jwt.Headers{}, + }, + } + f := compose.ComposeAllEnabled(&fosite.Config{ + GlobalSecret: []byte("some-secret-thats-random-some-secret-thats-random-"), + }, fositeStore, gen.MustRSAKey()) + ts := mockServer(t, f, session) + defer ts.Close() + + oauthClient := newOAuth2Client(ts) + fositeStore.Clients["my-client"].(*fosite.DefaultClient).RedirectURIs[0] = ts.URL + "/callback" + + var state = "12345678901234567890" + for k, c := range []struct { + responseType string + description string + nonce string + setup func() + hasToken bool + hasIdToken bool + hasCode bool + }{ + { + description: "should pass without id token", + responseType: "token", + setup: func() { + oauthClient.Scopes = []string{"fosite"} + }, + hasToken: true, + }, + { + + responseType: "id_token%20token", + nonce: "1111111111111111", + description: "should pass id token (id_token token)", + setup: func() { + oauthClient.Scopes = []string{"fosite", "openid"} + }, + hasToken: true, + hasIdToken: true, + }, + { + + responseType: "token%20id_token%20code", + nonce: "1111111111111111", + description: "should pass id token (code id_token token)", + setup: func() {}, + hasToken: true, + hasCode: true, + hasIdToken: true, + }, + { + + responseType: "token%20code", + nonce: "1111111111111111", + description: "should pass id token (code token)", + setup: func() {}, + hasToken: true, + hasCode: true, + }, + { + + responseType: "id_token%20code", + nonce: "1111111111111111", + description: "should pass id token (id_token code)", + setup: func() {}, + hasCode: true, + hasIdToken: true, + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", k, c.description), func(t *testing.T) { + c.setup() + + var callbackURL *url.URL + authURL := strings.Replace(oauthClient.AuthCodeURL(state), "response_type=code", "response_type="+c.responseType, -1) + "&nonce=" + c.nonce + client := &http.Client{ + CheckRedirect: func(req *http.Request, via []*http.Request) error { + callbackURL = req.URL + return errors.New("Dont follow redirects") + }, + } + resp, err := client.Get(authURL) + require.Error(t, err) + + t.Logf("Response (%d): %s", k, callbackURL.String()) + fragment, err := url.ParseQuery(callbackURL.Fragment) + require.NoError(t, err) + + if c.hasToken { + assert.NotEmpty(t, fragment.Get("access_token")) + } else { + assert.Empty(t, fragment.Get("access_token")) + } + + if c.hasCode { + assert.NotEmpty(t, fragment.Get("code")) + } else { + assert.Empty(t, fragment.Get("code")) + } + + if c.hasIdToken { + assert.NotEmpty(t, fragment.Get("id_token")) + } else { + assert.Empty(t, fragment.Get("id_token")) + } + + if !c.hasToken { + return + } + + expires, err := strconv.Atoi(fragment.Get("expires_in")) + require.NoError(t, err) + + token := &oauth2.Token{ + AccessToken: fragment.Get("access_token"), + TokenType: fragment.Get("token_type"), + RefreshToken: fragment.Get("refresh_token"), + Expiry: time.Now().UTC().Add(time.Duration(expires) * time.Second), + } + + httpClient := oauthClient.Client(context.Background(), token) + resp, err = httpClient.Get(ts.URL + "/info") + require.NoError(t, err) + assert.Equal(t, http.StatusOK, resp.StatusCode) + t.Logf("Passed test case (%d) %s", k, c.description) + }) + } +} diff --git a/fosite/integration/placeholder.go b/fosite/integration/placeholder.go new file mode 100644 index 00000000000..eabb14bb766 --- /dev/null +++ b/fosite/integration/placeholder.go @@ -0,0 +1,4 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration diff --git a/fosite/integration/pushed_authorize_code_grant_test.go b/fosite/integration/pushed_authorize_code_grant_test.go new file mode 100644 index 00000000000..04747ca2114 --- /dev/null +++ b/fosite/integration/pushed_authorize_code_grant_test.go @@ -0,0 +1,190 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/handler/oauth2" +) + +func TestPushedAuthorizeCodeFlow(t *testing.T) { + for _, strategy := range []oauth2.CoreStrategyProvider{ + hmacStrategyProvider, + } { + runPushedAuthorizeCodeGrantTest(t, strategy) + } +} + +func runPushedAuthorizeCodeGrantTest(t *testing.T, strategy oauth2.CoreStrategyProvider) { + f := compose.Compose(new(fosite.Config), fositeStore, strategy, compose.OAuth2AuthorizeExplicitFactory, compose.OAuth2TokenIntrospectionFactory, compose.PushedAuthorizeHandlerFactory) + ts := mockServer(t, f, &fosite.DefaultSession{Subject: "foo-sub"}) + defer ts.Close() + + oauthClient := newOAuth2Client(ts) + fositeStore.Clients["my-client"].(*fosite.DefaultClient).RedirectURIs[0] = ts.URL + "/callback" + + var state string + for k, c := range []struct { + description string + setup func() + check func(t *testing.T, r *http.Response) + params map[string]string + authStatusCode int + parStatusCode int + }{ + { + description: "should fail because of audience", + params: map[string]string{"audience": "https://www.ory.sh/not-api"}, + setup: func() { + oauthClient = newOAuth2Client(ts) + state = "12345678901234567890" + }, + parStatusCode: http.StatusBadRequest, + authStatusCode: http.StatusNotAcceptable, + }, + { + description: "should fail because of scope", + params: nil, + setup: func() { + oauthClient = newOAuth2Client(ts) + oauthClient.Scopes = []string{"not-exist"} + state = "12345678901234567890" + }, + parStatusCode: http.StatusBadRequest, + authStatusCode: http.StatusNotAcceptable, + }, + { + description: "should pass with proper audience", + params: map[string]string{"audience": "https://www.ory.sh/api"}, + setup: func() { + oauthClient = newOAuth2Client(ts) + state = "12345678901234567890" + }, + check: func(t *testing.T, r *http.Response) { + var b fosite.AccessRequest + b.Client = new(fosite.DefaultClient) + b.Session = new(defaultSession) + require.NoError(t, json.NewDecoder(r.Body).Decode(&b)) + assert.EqualValues(t, fosite.Arguments{"https://www.ory.sh/api"}, b.RequestedAudience) + assert.EqualValues(t, fosite.Arguments{"https://www.ory.sh/api"}, b.GrantedAudience) + assert.EqualValues(t, "foo-sub", b.Session.(*defaultSession).Subject) + }, + parStatusCode: http.StatusCreated, + authStatusCode: http.StatusOK, + }, + { + description: "should pass", + setup: func() { + oauthClient = newOAuth2Client(ts) + state = "12345678901234567890" + }, + parStatusCode: http.StatusCreated, + authStatusCode: http.StatusOK, + }, + } { + t.Run(fmt.Sprintf("case=%d/description=%s", k, c.description), func(t *testing.T) { + c.setup() + + // build request from the OAuth client + data := url.Values{} + data.Set("client_id", oauthClient.ClientID) + data.Set("client_secret", oauthClient.ClientSecret) + data.Set("response_type", "code") + data.Set("state", state) + data.Set("scope", strings.Join(oauthClient.Scopes, " ")) + data.Set("redirect_uri", oauthClient.RedirectURL) + for k, v := range c.params { + data.Set(k, v) + } + + req, err := http.NewRequest("POST", ts.URL+"/par", strings.NewReader(data.Encode())) + require.NoError(t, err) + + req.Header.Add("Content-Type", "application/x-www-form-urlencoded") + resp, err := http.DefaultClient.Do(req) + + require.NoError(t, err) + + body, err := checkStatusAndGetBody(t, resp, c.parStatusCode) + require.NoError(t, err, "Unable to get body after PAR. Err=%v", err) + + if resp.StatusCode != http.StatusCreated { + return + } + + m := map[string]interface{}{} + err = json.Unmarshal(body, &m) + + assert.NoError(t, err, "Error occurred when unamrshaling the body: %v", err) + + // validate request_uri + requestURI, _ := m["request_uri"].(string) + assert.NotEmpty(t, requestURI, "request_uri is empty") + assert.Condition(t, func() bool { + return strings.HasPrefix(requestURI, "urn:ietf:params:oauth:request_uri:") + }, "PAR Prefix is incorrect: %s", requestURI) + + // validate expires_in + assert.EqualValues(t, 300, int(m["expires_in"].(float64)), "Invalid expires_in value=%v", m["expires_in"]) + + // call authorize + data = url.Values{} + data.Set("client_id", oauthClient.ClientID) + data.Set("request_uri", m["request_uri"].(string)) + req, err = http.NewRequest("POST", ts.URL+"/auth", strings.NewReader(data.Encode())) + require.NoError(t, err) + + req.Header.Add("Content-Type", "application/x-www-form-urlencoded") + + resp, err = http.DefaultClient.Do(req) + require.NoError(t, err) + require.Equal(t, c.authStatusCode, resp.StatusCode) + if resp.StatusCode != http.StatusOK { + return + } + + require.NotEmpty(t, resp.Request.URL.Query().Get("code"), "Auth code is empty") + + token, err := oauthClient.Exchange(t.Context(), resp.Request.URL.Query().Get("code")) + require.NoError(t, err) + require.NotEmpty(t, token.AccessToken) + + httpClient := oauthClient.Client(t.Context(), token) + resp, err = httpClient.Get(ts.URL + "/info") + require.NoError(t, err) + assert.Equal(t, http.StatusOK, resp.StatusCode) + + if c.check != nil { + c.check(t, resp) + } + }) + } +} + +func checkStatusAndGetBody(t *testing.T, resp *http.Response, expectedStatusCode int) ([]byte, error) { + defer func(Body io.ReadCloser) { _ = Body.Close() }(resp.Body) + + require.Equal(t, expectedStatusCode, resp.StatusCode) + b, err := io.ReadAll(resp.Body) + if err == nil { + fmt.Printf("PAR response: body=%s\n", string(b)) + } + if expectedStatusCode != resp.StatusCode { + return nil, fmt.Errorf("invalid status code %d", resp.StatusCode) + } + + return b, err +} diff --git a/fosite/integration/refresh_token_grant_test.go b/fosite/integration/refresh_token_grant_test.go new file mode 100644 index 00000000000..39ed9d10d48 --- /dev/null +++ b/fosite/integration/refresh_token_grant_test.go @@ -0,0 +1,268 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "net/url" + "strings" + "testing" + "time" + + "github.com/ory/hydra/v2/fosite/internal/gen" + "github.com/ory/x/uuidx" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "golang.org/x/oauth2" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/token/jwt" +) + +type introspectionResponse struct { + Active bool `json:"active"` + ClientID string `json:"client_id,omitempty"` + Scope string `json:"scope,omitempty"` + Audience []string `json:"aud,omitempty"` + ExpiresAt int64 `json:"exp,omitempty"` + IssuedAt int64 `json:"iat,omitempty"` + Subject string `json:"sub,omitempty"` + Username string `json:"username,omitempty"` +} + +func TestRefreshTokenFlow(t *testing.T) { + session := &defaultSession{ + DefaultSession: &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "peter", + }, + Headers: &jwt.Headers{}, + Subject: "peter", + Username: "peteru", + }, + } + fc := new(fosite.Config) + fc.RefreshTokenLifespan = -1 + fc.GlobalSecret = []byte("some-secret-thats-random-some-secret-thats-random-") + f := compose.ComposeAllEnabled(fc, fositeStore, gen.MustRSAKey()) + ts := mockServer(t, f, session) + + fositeStore.Clients["my-client"].(*fosite.DefaultClient).RedirectURIs[0] = ts.URL + "/callback" + + refreshCheckClient := &fosite.DefaultClient{ + ID: "refresh-client", + Secret: []byte(`$2a$10$IxMdI6d.LIRZPpSfEwNoeu4rY3FhDREsxFJXikcgdRRAStxUlsuEO`), // = "foobar" + RedirectURIs: []string{ts.URL + "/callback"}, + ResponseTypes: []string{"id_token", "code", "token", "token code", "id_token code", "token id_token", "token code id_token"}, + GrantTypes: []string{"implicit", "refresh_token", "authorization_code", "password", "client_credentials"}, + Scopes: []string{"fosite", "offline", "openid"}, + Audience: []string{"https://www.ory.sh/api"}, + } + fositeStore.Clients["refresh-client"] = refreshCheckClient + + fositeStore.Clients["my-client"].(*fosite.DefaultClient).RedirectURIs[0] = ts.URL + "/callback" + for _, c := range []struct { + description string + setup func(t *testing.T, c *oauth2.Config) + pass bool + params []oauth2.AuthCodeOption + check func(t *testing.T, c *oauth2.Config, original, refreshed *oauth2.Token, or, rr *introspectionResponse) + beforeRefresh func(t *testing.T) + mockServer func(t *testing.T) *httptest.Server + }{ + { + description: "should fail because refresh scope missing", + setup: func(t *testing.T, c *oauth2.Config) { + c.Scopes = []string{"fosite"} + }, + pass: false, + }, + { + description: "should pass but not yield id token", + setup: func(t *testing.T, c *oauth2.Config) { + c.Scopes = []string{"offline"} + }, + pass: true, + check: func(t *testing.T, c *oauth2.Config, original, refreshed *oauth2.Token, or, rr *introspectionResponse) { + assert.NotEqual(t, original.RefreshToken, refreshed.RefreshToken) + assert.NotEqual(t, original.AccessToken, refreshed.AccessToken) + assert.Nil(t, refreshed.Extra("id_token")) + }, + }, + { + description: "should pass and yield id token", + params: []oauth2.AuthCodeOption{oauth2.SetAuthURLParam("audience", "https://www.ory.sh/api")}, + setup: func(t *testing.T, c *oauth2.Config) { + c.Scopes = []string{"fosite", "offline", "openid"} + }, + pass: true, + check: func(t *testing.T, c *oauth2.Config, original, refreshed *oauth2.Token, or, rr *introspectionResponse) { + assert.NotEqual(t, original.RefreshToken, refreshed.RefreshToken) + assert.NotEqual(t, original.AccessToken, refreshed.AccessToken) + assert.NotEqual(t, original.Extra("id_token"), refreshed.Extra("id_token")) + assert.NotNil(t, refreshed.Extra("id_token")) + + assert.NotEmpty(t, or.Audience) + assert.NotEmpty(t, or.ClientID) + assert.NotEmpty(t, or.Scope) + assert.NotEmpty(t, or.ExpiresAt) + assert.NotEmpty(t, or.IssuedAt) + assert.True(t, or.Active) + assert.EqualValues(t, "peter", or.Subject) + assert.EqualValues(t, "peteru", or.Username) + + assert.EqualValues(t, or.Audience, rr.Audience) + assert.EqualValues(t, or.ClientID, rr.ClientID) + assert.EqualValues(t, or.Scope, rr.Scope) + assert.NotEqual(t, or.ExpiresAt, rr.ExpiresAt) + assert.True(t, or.ExpiresAt < rr.ExpiresAt) + assert.NotEqual(t, or.IssuedAt, rr.IssuedAt) + assert.True(t, or.IssuedAt < rr.IssuedAt) + assert.EqualValues(t, or.Active, rr.Active) + assert.EqualValues(t, or.Subject, rr.Subject) + assert.EqualValues(t, or.Username, rr.Username) + }, + }, + { + description: "should fail because scope is no longer allowed", + setup: func(t *testing.T, c *oauth2.Config) { + c.ClientID = refreshCheckClient.ID + c.Scopes = []string{"fosite", "offline", "openid"} + }, + beforeRefresh: func(t *testing.T) { + refreshCheckClient.Scopes = []string{"offline", "openid"} + }, + pass: false, + }, + { + description: "should fail because audience is no longer allowed", + params: []oauth2.AuthCodeOption{oauth2.SetAuthURLParam("audience", "https://www.ory.sh/api")}, + setup: func(t *testing.T, c *oauth2.Config) { + c.ClientID = refreshCheckClient.ID + c.Scopes = []string{"fosite", "offline", "openid"} + refreshCheckClient.Scopes = []string{"fosite", "offline", "openid"} + }, + beforeRefresh: func(t *testing.T) { + refreshCheckClient.Audience = []string{"https://www.not-ory.sh/api"} + }, + pass: false, + }, + { + description: "should fail with expired refresh token", + setup: func(t *testing.T, c *oauth2.Config) { + fc = new(fosite.Config) + fc.RefreshTokenLifespan = time.Nanosecond + fc.GlobalSecret = []byte("some-secret-thats-random-some-secret-thats-random-") + f = compose.ComposeAllEnabled(fc, fositeStore, gen.MustRSAKey()) + ts := mockServer(t, f, session) + + *c = *newOAuth2Client(ts) + c.Scopes = []string{"fosite", "offline", "openid"} + fositeStore.Clients["my-client"].(*fosite.DefaultClient).RedirectURIs[0] = ts.URL + "/callback" + }, + pass: false, + }, + { + description: "should pass with limited but not expired refresh token", + setup: func(t *testing.T, c *oauth2.Config) { + fc = new(fosite.Config) + fc.RefreshTokenLifespan = time.Minute + fc.GlobalSecret = []byte("some-secret-thats-random-some-secret-thats-random-") + f = compose.ComposeAllEnabled(fc, fositeStore, gen.MustRSAKey()) + ts := mockServer(t, f, session) + + *c = *newOAuth2Client(ts) + c.Scopes = []string{"fosite", "offline", "openid"} + fositeStore.Clients["my-client"].(*fosite.DefaultClient).RedirectURIs[0] = ts.URL + "/callback" + }, + beforeRefresh: func(t *testing.T) { + refreshCheckClient.Audience = []string{} + }, + pass: true, + check: func(_ *testing.T, _ *oauth2.Config, _, _ *oauth2.Token, _, _ *introspectionResponse) {}, + }, + { + description: "should deny access if original token was reused", + setup: func(t *testing.T, c *oauth2.Config) { + c.Scopes = []string{"offline"} + }, + pass: true, + check: func(t *testing.T, c *oauth2.Config, original, refreshed *oauth2.Token, or, rr *introspectionResponse) { + tokenSource := c.TokenSource(t.Context(), original) + _, err := tokenSource.Token() + require.Error(t, err) + require.Equal(t, http.StatusBadRequest, err.(*oauth2.RetrieveError).Response.StatusCode) + + refreshed.Expiry = refreshed.Expiry.Add(-time.Hour * 24) + tokenSource = c.TokenSource(t.Context(), refreshed) + _, err = tokenSource.Token() + require.Error(t, err) + require.Equal(t, http.StatusBadRequest, err.(*oauth2.RetrieveError).Response.StatusCode) + }, + }, + } { + t.Run("case="+c.description, func(t *testing.T) { + // t.Parallel() TODO remove side-effects on the fosite store clients to make this parallel + + oauthClient := newOAuth2Client(ts) + c.setup(t, oauthClient) + + var intro = func(token string, p interface{}) { + req, err := http.NewRequest("POST", ts.URL+"/introspect", strings.NewReader(url.Values{"token": {token}}.Encode())) + require.NoError(t, err) + req.SetBasicAuth("refresh-client", "foobar") + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + r, err := http.DefaultClient.Do(req) + require.NoError(t, err) + assert.Equal(t, http.StatusOK, r.StatusCode) + + dec := json.NewDecoder(r.Body) + dec.DisallowUnknownFields() + require.NoError(t, dec.Decode(p)) + } + + resp, err := http.Get(oauthClient.AuthCodeURL(uuidx.NewV4().String(), c.params...)) + require.NoError(t, err) + require.Equal(t, http.StatusOK, resp.StatusCode) + + if resp.StatusCode != http.StatusOK { + return + } + + token, err := oauthClient.Exchange(t.Context(), resp.Request.URL.Query().Get("code")) + require.NoError(t, err) + require.NotEmpty(t, token.AccessToken) + + var ob introspectionResponse + intro(token.AccessToken, &ob) + + token.Expiry = token.Expiry.Add(-time.Hour * 24) + + if c.beforeRefresh != nil { + c.beforeRefresh(t) + } + + tokenSource := oauthClient.TokenSource(t.Context(), token) + + // This sleep guarantees time difference in exp/iat + time.Sleep(2 * time.Second) + + refreshed, err := tokenSource.Token() + if c.pass { + require.NoError(t, err) + + var rb introspectionResponse + intro(refreshed.AccessToken, &rb) + c.check(t, oauthClient, token, refreshed, &ob, &rb) + } else { + require.Error(t, err) + } + }) + } +} diff --git a/fosite/integration/resource_owner_password_credentials_grant_test.go b/fosite/integration/resource_owner_password_credentials_grant_test.go new file mode 100644 index 00000000000..2da64af83a0 --- /dev/null +++ b/fosite/integration/resource_owner_password_credentials_grant_test.go @@ -0,0 +1,89 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "context" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "golang.org/x/oauth2" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + hst "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/internal" +) + +func TestResourceOwnerPasswordCredentialsFlow(t *testing.T) { + for _, strategy := range []hst.CoreStrategyProvider{ + hmacStrategyProvider, + } { + runResourceOwnerPasswordCredentialsGrantTest(t, strategy) + } +} + +func runResourceOwnerPasswordCredentialsGrantTest(t *testing.T, strategy hst.CoreStrategyProvider) { + f := compose.Compose(new(fosite.Config), fositeStore, strategy, compose.OAuth2ResourceOwnerPasswordCredentialsFactory) + ts := mockServer(t, f, &fosite.DefaultSession{}) + defer ts.Close() + + var username, password string + oauthClient := newOAuth2Client(ts) + for k, c := range []struct { + description string + setup func() + check func(t *testing.T, token *oauth2.Token) + err bool + }{ + { + description: "should fail because invalid password", + setup: func() { + username = "peter" + password = "something-wrong" + }, + err: true, + }, + { + description: "should pass", + setup: func() { + password = "secret" + }, + }, + { + description: "should pass with custom client token lifespans", + setup: func() { + oauthClient = newOAuth2Client(ts) + oauthClient.ClientID = "custom-lifespan-client" + }, + check: func(t *testing.T, token *oauth2.Token) { + s, err := fositeStore.GetAccessTokenSession(context.Background(), strings.Split(token.AccessToken, ".")[1], nil) + require.NoError(t, err) + atExp := s.GetSession().GetExpiresAt(fosite.AccessToken) + internal.RequireEqualTime(t, time.Now().UTC().Add(*internal.TestLifespans.PasswordGrantAccessTokenLifespan), atExp, time.Minute) + atExpIn := time.Duration(token.Extra("expires_in").(float64)) * time.Second + internal.RequireEqualDuration(t, *internal.TestLifespans.PasswordGrantAccessTokenLifespan, atExpIn, time.Minute) + rtExp := s.GetSession().GetExpiresAt(fosite.RefreshToken) + internal.RequireEqualTime(t, time.Now().UTC().Add(*internal.TestLifespans.PasswordGrantRefreshTokenLifespan), rtExp, time.Minute) + }, + }, + } { + c.setup() + + token, err := oauthClient.PasswordCredentialsToken(context.Background(), username, password) + require.Equal(t, c.err, err != nil, "(%d) %s\n%s\n%s", k, c.description, c.err, err) + if !c.err { + assert.NotEmpty(t, token.AccessToken, "(%d) %s\n%s", k, c.description, token) + + if c.check != nil { + c.check(t, token) + } + } + + t.Logf("Passed test case %d", k) + } +} diff --git a/fosite/integration/revoke_token_test.go b/fosite/integration/revoke_token_test.go new file mode 100644 index 00000000000..213b6d36b67 --- /dev/null +++ b/fosite/integration/revoke_token_test.go @@ -0,0 +1,56 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "net/http" + "testing" + + "github.com/parnurzeal/gorequest" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + goauth "golang.org/x/oauth2" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/handler/oauth2" +) + +func TestRevokeToken(t *testing.T) { + for _, strategy := range []oauth2.CoreStrategyProvider{ + hmacStrategyProvider, + } { + runRevokeTokenTest(t, strategy) + } +} + +func runRevokeTokenTest(t *testing.T, strategy oauth2.CoreStrategyProvider) { + f := compose.Compose(new(fosite.Config), fositeStore, strategy, compose.OAuth2ClientCredentialsGrantFactory, compose.OAuth2TokenIntrospectionFactory, compose.OAuth2TokenRevocationFactory) + ts := mockServer(t, f, &fosite.DefaultSession{}) + defer ts.Close() + + oauthClient := newOAuth2AppClient(ts) + token, err := oauthClient.Token(goauth.NoContext) + require.NoError(t, err) + + resp, _, errs := gorequest.New().Post(ts.URL+"/revoke"). + SetBasicAuth(oauthClient.ClientID, oauthClient.ClientSecret). + Type("form"). + SendStruct(map[string]string{"token": "asdf"}).End() + require.Len(t, errs, 0) + assert.Equal(t, 200, resp.StatusCode) + + resp, _, errs = gorequest.New().Post(ts.URL+"/revoke"). + SetBasicAuth(oauthClient.ClientID, oauthClient.ClientSecret). + Type("form"). + SendStruct(map[string]string{"token": token.AccessToken}).End() + require.Len(t, errs, 0) + assert.Equal(t, 200, resp.StatusCode) + + hres, _, errs := gorequest.New().Get(ts.URL+"/info"). + Set("Authorization", "bearer "+token.AccessToken). + End() + require.Len(t, errs, 0) + assert.Equal(t, http.StatusUnauthorized, hres.StatusCode) +} diff --git a/fosite/internal/access_request.go b/fosite/internal/access_request.go new file mode 100644 index 00000000000..e9b0f87f5f6 --- /dev/null +++ b/fosite/internal/access_request.go @@ -0,0 +1,297 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite (interfaces: AccessRequester) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/access_request.go github.com/ory/hydra/v2/fosite AccessRequester +// + +// Package internal is a generated GoMock package. +package internal + +import ( + url "net/url" + reflect "reflect" + time "time" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockAccessRequester is a mock of AccessRequester interface. +type MockAccessRequester struct { + ctrl *gomock.Controller + recorder *MockAccessRequesterMockRecorder + isgomock struct{} +} + +// MockAccessRequesterMockRecorder is the mock recorder for MockAccessRequester. +type MockAccessRequesterMockRecorder struct { + mock *MockAccessRequester +} + +// NewMockAccessRequester creates a new mock instance. +func NewMockAccessRequester(ctrl *gomock.Controller) *MockAccessRequester { + mock := &MockAccessRequester{ctrl: ctrl} + mock.recorder = &MockAccessRequesterMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockAccessRequester) EXPECT() *MockAccessRequesterMockRecorder { + return m.recorder +} + +// AppendRequestedScope mocks base method. +func (m *MockAccessRequester) AppendRequestedScope(scope string) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "AppendRequestedScope", scope) +} + +// AppendRequestedScope indicates an expected call of AppendRequestedScope. +func (mr *MockAccessRequesterMockRecorder) AppendRequestedScope(scope any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AppendRequestedScope", reflect.TypeOf((*MockAccessRequester)(nil).AppendRequestedScope), scope) +} + +// GetClient mocks base method. +func (m *MockAccessRequester) GetClient() fosite.Client { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetClient") + ret0, _ := ret[0].(fosite.Client) + return ret0 +} + +// GetClient indicates an expected call of GetClient. +func (mr *MockAccessRequesterMockRecorder) GetClient() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetClient", reflect.TypeOf((*MockAccessRequester)(nil).GetClient)) +} + +// GetGrantTypes mocks base method. +func (m *MockAccessRequester) GetGrantTypes() fosite.Arguments { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetGrantTypes") + ret0, _ := ret[0].(fosite.Arguments) + return ret0 +} + +// GetGrantTypes indicates an expected call of GetGrantTypes. +func (mr *MockAccessRequesterMockRecorder) GetGrantTypes() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetGrantTypes", reflect.TypeOf((*MockAccessRequester)(nil).GetGrantTypes)) +} + +// GetGrantedAudience mocks base method. +func (m *MockAccessRequester) GetGrantedAudience() fosite.Arguments { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetGrantedAudience") + ret0, _ := ret[0].(fosite.Arguments) + return ret0 +} + +// GetGrantedAudience indicates an expected call of GetGrantedAudience. +func (mr *MockAccessRequesterMockRecorder) GetGrantedAudience() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetGrantedAudience", reflect.TypeOf((*MockAccessRequester)(nil).GetGrantedAudience)) +} + +// GetGrantedScopes mocks base method. +func (m *MockAccessRequester) GetGrantedScopes() fosite.Arguments { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetGrantedScopes") + ret0, _ := ret[0].(fosite.Arguments) + return ret0 +} + +// GetGrantedScopes indicates an expected call of GetGrantedScopes. +func (mr *MockAccessRequesterMockRecorder) GetGrantedScopes() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetGrantedScopes", reflect.TypeOf((*MockAccessRequester)(nil).GetGrantedScopes)) +} + +// GetID mocks base method. +func (m *MockAccessRequester) GetID() string { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetID") + ret0, _ := ret[0].(string) + return ret0 +} + +// GetID indicates an expected call of GetID. +func (mr *MockAccessRequesterMockRecorder) GetID() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetID", reflect.TypeOf((*MockAccessRequester)(nil).GetID)) +} + +// GetRequestForm mocks base method. +func (m *MockAccessRequester) GetRequestForm() url.Values { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetRequestForm") + ret0, _ := ret[0].(url.Values) + return ret0 +} + +// GetRequestForm indicates an expected call of GetRequestForm. +func (mr *MockAccessRequesterMockRecorder) GetRequestForm() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRequestForm", reflect.TypeOf((*MockAccessRequester)(nil).GetRequestForm)) +} + +// GetRequestedAt mocks base method. +func (m *MockAccessRequester) GetRequestedAt() time.Time { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetRequestedAt") + ret0, _ := ret[0].(time.Time) + return ret0 +} + +// GetRequestedAt indicates an expected call of GetRequestedAt. +func (mr *MockAccessRequesterMockRecorder) GetRequestedAt() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRequestedAt", reflect.TypeOf((*MockAccessRequester)(nil).GetRequestedAt)) +} + +// GetRequestedAudience mocks base method. +func (m *MockAccessRequester) GetRequestedAudience() fosite.Arguments { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetRequestedAudience") + ret0, _ := ret[0].(fosite.Arguments) + return ret0 +} + +// GetRequestedAudience indicates an expected call of GetRequestedAudience. +func (mr *MockAccessRequesterMockRecorder) GetRequestedAudience() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRequestedAudience", reflect.TypeOf((*MockAccessRequester)(nil).GetRequestedAudience)) +} + +// GetRequestedScopes mocks base method. +func (m *MockAccessRequester) GetRequestedScopes() fosite.Arguments { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetRequestedScopes") + ret0, _ := ret[0].(fosite.Arguments) + return ret0 +} + +// GetRequestedScopes indicates an expected call of GetRequestedScopes. +func (mr *MockAccessRequesterMockRecorder) GetRequestedScopes() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRequestedScopes", reflect.TypeOf((*MockAccessRequester)(nil).GetRequestedScopes)) +} + +// GetSession mocks base method. +func (m *MockAccessRequester) GetSession() fosite.Session { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetSession") + ret0, _ := ret[0].(fosite.Session) + return ret0 +} + +// GetSession indicates an expected call of GetSession. +func (mr *MockAccessRequesterMockRecorder) GetSession() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetSession", reflect.TypeOf((*MockAccessRequester)(nil).GetSession)) +} + +// GrantAudience mocks base method. +func (m *MockAccessRequester) GrantAudience(audience string) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "GrantAudience", audience) +} + +// GrantAudience indicates an expected call of GrantAudience. +func (mr *MockAccessRequesterMockRecorder) GrantAudience(audience any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GrantAudience", reflect.TypeOf((*MockAccessRequester)(nil).GrantAudience), audience) +} + +// GrantScope mocks base method. +func (m *MockAccessRequester) GrantScope(scope string) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "GrantScope", scope) +} + +// GrantScope indicates an expected call of GrantScope. +func (mr *MockAccessRequesterMockRecorder) GrantScope(scope any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GrantScope", reflect.TypeOf((*MockAccessRequester)(nil).GrantScope), scope) +} + +// Merge mocks base method. +func (m *MockAccessRequester) Merge(requester fosite.Requester) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "Merge", requester) +} + +// Merge indicates an expected call of Merge. +func (mr *MockAccessRequesterMockRecorder) Merge(requester any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Merge", reflect.TypeOf((*MockAccessRequester)(nil).Merge), requester) +} + +// Sanitize mocks base method. +func (m *MockAccessRequester) Sanitize(allowedParameters []string) fosite.Requester { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Sanitize", allowedParameters) + ret0, _ := ret[0].(fosite.Requester) + return ret0 +} + +// Sanitize indicates an expected call of Sanitize. +func (mr *MockAccessRequesterMockRecorder) Sanitize(allowedParameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Sanitize", reflect.TypeOf((*MockAccessRequester)(nil).Sanitize), allowedParameters) +} + +// SetID mocks base method. +func (m *MockAccessRequester) SetID(id string) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetID", id) +} + +// SetID indicates an expected call of SetID. +func (mr *MockAccessRequesterMockRecorder) SetID(id any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetID", reflect.TypeOf((*MockAccessRequester)(nil).SetID), id) +} + +// SetRequestedAudience mocks base method. +func (m *MockAccessRequester) SetRequestedAudience(audience fosite.Arguments) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetRequestedAudience", audience) +} + +// SetRequestedAudience indicates an expected call of SetRequestedAudience. +func (mr *MockAccessRequesterMockRecorder) SetRequestedAudience(audience any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetRequestedAudience", reflect.TypeOf((*MockAccessRequester)(nil).SetRequestedAudience), audience) +} + +// SetRequestedScopes mocks base method. +func (m *MockAccessRequester) SetRequestedScopes(scopes fosite.Arguments) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetRequestedScopes", scopes) +} + +// SetRequestedScopes indicates an expected call of SetRequestedScopes. +func (mr *MockAccessRequesterMockRecorder) SetRequestedScopes(scopes any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetRequestedScopes", reflect.TypeOf((*MockAccessRequester)(nil).SetRequestedScopes), scopes) +} + +// SetSession mocks base method. +func (m *MockAccessRequester) SetSession(session fosite.Session) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetSession", session) +} + +// SetSession indicates an expected call of SetSession. +func (mr *MockAccessRequesterMockRecorder) SetSession(session any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetSession", reflect.TypeOf((*MockAccessRequester)(nil).SetSession), session) +} diff --git a/fosite/internal/access_response.go b/fosite/internal/access_response.go new file mode 100644 index 00000000000..6efcfe2f997 --- /dev/null +++ b/fosite/internal/access_response.go @@ -0,0 +1,162 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite (interfaces: AccessResponder) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/access_response.go github.com/ory/hydra/v2/fosite AccessResponder +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + time "time" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockAccessResponder is a mock of AccessResponder interface. +type MockAccessResponder struct { + ctrl *gomock.Controller + recorder *MockAccessResponderMockRecorder + isgomock struct{} +} + +// MockAccessResponderMockRecorder is the mock recorder for MockAccessResponder. +type MockAccessResponderMockRecorder struct { + mock *MockAccessResponder +} + +// NewMockAccessResponder creates a new mock instance. +func NewMockAccessResponder(ctrl *gomock.Controller) *MockAccessResponder { + mock := &MockAccessResponder{ctrl: ctrl} + mock.recorder = &MockAccessResponderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockAccessResponder) EXPECT() *MockAccessResponderMockRecorder { + return m.recorder +} + +// GetAccessToken mocks base method. +func (m *MockAccessResponder) GetAccessToken() string { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAccessToken") + ret0, _ := ret[0].(string) + return ret0 +} + +// GetAccessToken indicates an expected call of GetAccessToken. +func (mr *MockAccessResponderMockRecorder) GetAccessToken() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccessToken", reflect.TypeOf((*MockAccessResponder)(nil).GetAccessToken)) +} + +// GetExtra mocks base method. +func (m *MockAccessResponder) GetExtra(key string) any { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetExtra", key) + ret0, _ := ret[0].(any) + return ret0 +} + +// GetExtra indicates an expected call of GetExtra. +func (mr *MockAccessResponderMockRecorder) GetExtra(key any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetExtra", reflect.TypeOf((*MockAccessResponder)(nil).GetExtra), key) +} + +// GetTokenType mocks base method. +func (m *MockAccessResponder) GetTokenType() string { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetTokenType") + ret0, _ := ret[0].(string) + return ret0 +} + +// GetTokenType indicates an expected call of GetTokenType. +func (mr *MockAccessResponderMockRecorder) GetTokenType() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTokenType", reflect.TypeOf((*MockAccessResponder)(nil).GetTokenType)) +} + +// SetAccessToken mocks base method. +func (m *MockAccessResponder) SetAccessToken(token string) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetAccessToken", token) +} + +// SetAccessToken indicates an expected call of SetAccessToken. +func (mr *MockAccessResponderMockRecorder) SetAccessToken(token any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetAccessToken", reflect.TypeOf((*MockAccessResponder)(nil).SetAccessToken), token) +} + +// SetExpiresIn mocks base method. +func (m *MockAccessResponder) SetExpiresIn(arg0 time.Duration) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetExpiresIn", arg0) +} + +// SetExpiresIn indicates an expected call of SetExpiresIn. +func (mr *MockAccessResponderMockRecorder) SetExpiresIn(arg0 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetExpiresIn", reflect.TypeOf((*MockAccessResponder)(nil).SetExpiresIn), arg0) +} + +// SetExtra mocks base method. +func (m *MockAccessResponder) SetExtra(key string, value any) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetExtra", key, value) +} + +// SetExtra indicates an expected call of SetExtra. +func (mr *MockAccessResponderMockRecorder) SetExtra(key, value any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetExtra", reflect.TypeOf((*MockAccessResponder)(nil).SetExtra), key, value) +} + +// SetScopes mocks base method. +func (m *MockAccessResponder) SetScopes(scopes fosite.Arguments) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetScopes", scopes) +} + +// SetScopes indicates an expected call of SetScopes. +func (mr *MockAccessResponderMockRecorder) SetScopes(scopes any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetScopes", reflect.TypeOf((*MockAccessResponder)(nil).SetScopes), scopes) +} + +// SetTokenType mocks base method. +func (m *MockAccessResponder) SetTokenType(tokenType string) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetTokenType", tokenType) +} + +// SetTokenType indicates an expected call of SetTokenType. +func (mr *MockAccessResponderMockRecorder) SetTokenType(tokenType any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetTokenType", reflect.TypeOf((*MockAccessResponder)(nil).SetTokenType), tokenType) +} + +// ToMap mocks base method. +func (m *MockAccessResponder) ToMap() map[string]any { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ToMap") + ret0, _ := ret[0].(map[string]any) + return ret0 +} + +// ToMap indicates an expected call of ToMap. +func (mr *MockAccessResponderMockRecorder) ToMap() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ToMap", reflect.TypeOf((*MockAccessResponder)(nil).ToMap)) +} diff --git a/fosite/internal/access_token_storage.go b/fosite/internal/access_token_storage.go new file mode 100644 index 00000000000..262cc7197aa --- /dev/null +++ b/fosite/internal/access_token_storage.go @@ -0,0 +1,89 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: AccessTokenStorage) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/access_token_storage.go github.com/ory/hydra/v2/fosite/handler/oauth2 AccessTokenStorage +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockAccessTokenStorage is a mock of AccessTokenStorage interface. +type MockAccessTokenStorage struct { + ctrl *gomock.Controller + recorder *MockAccessTokenStorageMockRecorder + isgomock struct{} +} + +// MockAccessTokenStorageMockRecorder is the mock recorder for MockAccessTokenStorage. +type MockAccessTokenStorageMockRecorder struct { + mock *MockAccessTokenStorage +} + +// NewMockAccessTokenStorage creates a new mock instance. +func NewMockAccessTokenStorage(ctrl *gomock.Controller) *MockAccessTokenStorage { + mock := &MockAccessTokenStorage{ctrl: ctrl} + mock.recorder = &MockAccessTokenStorageMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockAccessTokenStorage) EXPECT() *MockAccessTokenStorageMockRecorder { + return m.recorder +} + +// CreateAccessTokenSession mocks base method. +func (m *MockAccessTokenStorage) CreateAccessTokenSession(ctx context.Context, signature string, request fosite.Requester) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateAccessTokenSession", ctx, signature, request) + ret0, _ := ret[0].(error) + return ret0 +} + +// CreateAccessTokenSession indicates an expected call of CreateAccessTokenSession. +func (mr *MockAccessTokenStorageMockRecorder) CreateAccessTokenSession(ctx, signature, request any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateAccessTokenSession", reflect.TypeOf((*MockAccessTokenStorage)(nil).CreateAccessTokenSession), ctx, signature, request) +} + +// DeleteAccessTokenSession mocks base method. +func (m *MockAccessTokenStorage) DeleteAccessTokenSession(ctx context.Context, signature string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteAccessTokenSession", ctx, signature) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteAccessTokenSession indicates an expected call of DeleteAccessTokenSession. +func (mr *MockAccessTokenStorageMockRecorder) DeleteAccessTokenSession(ctx, signature any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteAccessTokenSession", reflect.TypeOf((*MockAccessTokenStorage)(nil).DeleteAccessTokenSession), ctx, signature) +} + +// GetAccessTokenSession mocks base method. +func (m *MockAccessTokenStorage) GetAccessTokenSession(ctx context.Context, signature string, session fosite.Session) (fosite.Requester, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAccessTokenSession", ctx, signature, session) + ret0, _ := ret[0].(fosite.Requester) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAccessTokenSession indicates an expected call of GetAccessTokenSession. +func (mr *MockAccessTokenStorageMockRecorder) GetAccessTokenSession(ctx, signature, session any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccessTokenSession", reflect.TypeOf((*MockAccessTokenStorage)(nil).GetAccessTokenSession), ctx, signature, session) +} diff --git a/fosite/internal/access_token_storage_provider.go b/fosite/internal/access_token_storage_provider.go new file mode 100644 index 00000000000..2c248497b67 --- /dev/null +++ b/fosite/internal/access_token_storage_provider.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: AccessTokenStorageProvider) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/access_token_storage_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 AccessTokenStorageProvider +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + oauth2 "github.com/ory/hydra/v2/fosite/handler/oauth2" +) + +// MockAccessTokenStorageProvider is a mock of AccessTokenStorageProvider interface. +type MockAccessTokenStorageProvider struct { + ctrl *gomock.Controller + recorder *MockAccessTokenStorageProviderMockRecorder + isgomock struct{} +} + +// MockAccessTokenStorageProviderMockRecorder is the mock recorder for MockAccessTokenStorageProvider. +type MockAccessTokenStorageProviderMockRecorder struct { + mock *MockAccessTokenStorageProvider +} + +// NewMockAccessTokenStorageProvider creates a new mock instance. +func NewMockAccessTokenStorageProvider(ctrl *gomock.Controller) *MockAccessTokenStorageProvider { + mock := &MockAccessTokenStorageProvider{ctrl: ctrl} + mock.recorder = &MockAccessTokenStorageProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockAccessTokenStorageProvider) EXPECT() *MockAccessTokenStorageProviderMockRecorder { + return m.recorder +} + +// AccessTokenStorage mocks base method. +func (m *MockAccessTokenStorageProvider) AccessTokenStorage() oauth2.AccessTokenStorage { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "AccessTokenStorage") + ret0, _ := ret[0].(oauth2.AccessTokenStorage) + return ret0 +} + +// AccessTokenStorage indicates an expected call of AccessTokenStorage. +func (mr *MockAccessTokenStorageProviderMockRecorder) AccessTokenStorage() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AccessTokenStorage", reflect.TypeOf((*MockAccessTokenStorageProvider)(nil).AccessTokenStorage)) +} diff --git a/fosite/internal/access_token_strategy.go b/fosite/internal/access_token_strategy.go new file mode 100644 index 00000000000..9b2665541dc --- /dev/null +++ b/fosite/internal/access_token_strategy.go @@ -0,0 +1,90 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: AccessTokenStrategy) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/access_token_strategy.go github.com/ory/hydra/v2/fosite/handler/oauth2 AccessTokenStrategy +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockAccessTokenStrategy is a mock of AccessTokenStrategy interface. +type MockAccessTokenStrategy struct { + ctrl *gomock.Controller + recorder *MockAccessTokenStrategyMockRecorder + isgomock struct{} +} + +// MockAccessTokenStrategyMockRecorder is the mock recorder for MockAccessTokenStrategy. +type MockAccessTokenStrategyMockRecorder struct { + mock *MockAccessTokenStrategy +} + +// NewMockAccessTokenStrategy creates a new mock instance. +func NewMockAccessTokenStrategy(ctrl *gomock.Controller) *MockAccessTokenStrategy { + mock := &MockAccessTokenStrategy{ctrl: ctrl} + mock.recorder = &MockAccessTokenStrategyMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockAccessTokenStrategy) EXPECT() *MockAccessTokenStrategyMockRecorder { + return m.recorder +} + +// AccessTokenSignature mocks base method. +func (m *MockAccessTokenStrategy) AccessTokenSignature(ctx context.Context, token string) string { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "AccessTokenSignature", ctx, token) + ret0, _ := ret[0].(string) + return ret0 +} + +// AccessTokenSignature indicates an expected call of AccessTokenSignature. +func (mr *MockAccessTokenStrategyMockRecorder) AccessTokenSignature(ctx, token any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AccessTokenSignature", reflect.TypeOf((*MockAccessTokenStrategy)(nil).AccessTokenSignature), ctx, token) +} + +// GenerateAccessToken mocks base method. +func (m *MockAccessTokenStrategy) GenerateAccessToken(ctx context.Context, requester fosite.Requester) (string, string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GenerateAccessToken", ctx, requester) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(string) + ret2, _ := ret[2].(error) + return ret0, ret1, ret2 +} + +// GenerateAccessToken indicates an expected call of GenerateAccessToken. +func (mr *MockAccessTokenStrategyMockRecorder) GenerateAccessToken(ctx, requester any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GenerateAccessToken", reflect.TypeOf((*MockAccessTokenStrategy)(nil).GenerateAccessToken), ctx, requester) +} + +// ValidateAccessToken mocks base method. +func (m *MockAccessTokenStrategy) ValidateAccessToken(ctx context.Context, requester fosite.Requester, token string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ValidateAccessToken", ctx, requester, token) + ret0, _ := ret[0].(error) + return ret0 +} + +// ValidateAccessToken indicates an expected call of ValidateAccessToken. +func (mr *MockAccessTokenStrategyMockRecorder) ValidateAccessToken(ctx, requester, token any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidateAccessToken", reflect.TypeOf((*MockAccessTokenStrategy)(nil).ValidateAccessToken), ctx, requester, token) +} diff --git a/fosite/internal/access_token_strategy_provider.go b/fosite/internal/access_token_strategy_provider.go new file mode 100644 index 00000000000..bce49e53256 --- /dev/null +++ b/fosite/internal/access_token_strategy_provider.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: AccessTokenStrategyProvider) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/access_token_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 AccessTokenStrategyProvider +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + oauth2 "github.com/ory/hydra/v2/fosite/handler/oauth2" +) + +// MockAccessTokenStrategyProvider is a mock of AccessTokenStrategyProvider interface. +type MockAccessTokenStrategyProvider struct { + ctrl *gomock.Controller + recorder *MockAccessTokenStrategyProviderMockRecorder + isgomock struct{} +} + +// MockAccessTokenStrategyProviderMockRecorder is the mock recorder for MockAccessTokenStrategyProvider. +type MockAccessTokenStrategyProviderMockRecorder struct { + mock *MockAccessTokenStrategyProvider +} + +// NewMockAccessTokenStrategyProvider creates a new mock instance. +func NewMockAccessTokenStrategyProvider(ctrl *gomock.Controller) *MockAccessTokenStrategyProvider { + mock := &MockAccessTokenStrategyProvider{ctrl: ctrl} + mock.recorder = &MockAccessTokenStrategyProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockAccessTokenStrategyProvider) EXPECT() *MockAccessTokenStrategyProviderMockRecorder { + return m.recorder +} + +// AccessTokenStrategy mocks base method. +func (m *MockAccessTokenStrategyProvider) AccessTokenStrategy() oauth2.AccessTokenStrategy { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "AccessTokenStrategy") + ret0, _ := ret[0].(oauth2.AccessTokenStrategy) + return ret0 +} + +// AccessTokenStrategy indicates an expected call of AccessTokenStrategy. +func (mr *MockAccessTokenStrategyProviderMockRecorder) AccessTokenStrategy() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AccessTokenStrategy", reflect.TypeOf((*MockAccessTokenStrategyProvider)(nil).AccessTokenStrategy)) +} diff --git a/fosite/internal/authorize_code_storage.go b/fosite/internal/authorize_code_storage.go new file mode 100644 index 00000000000..4ee8f0e4d87 --- /dev/null +++ b/fosite/internal/authorize_code_storage.go @@ -0,0 +1,89 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: AuthorizeCodeStorage) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/authorize_code_storage.go github.com/ory/hydra/v2/fosite/handler/oauth2 AuthorizeCodeStorage +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockAuthorizeCodeStorage is a mock of AuthorizeCodeStorage interface. +type MockAuthorizeCodeStorage struct { + ctrl *gomock.Controller + recorder *MockAuthorizeCodeStorageMockRecorder + isgomock struct{} +} + +// MockAuthorizeCodeStorageMockRecorder is the mock recorder for MockAuthorizeCodeStorage. +type MockAuthorizeCodeStorageMockRecorder struct { + mock *MockAuthorizeCodeStorage +} + +// NewMockAuthorizeCodeStorage creates a new mock instance. +func NewMockAuthorizeCodeStorage(ctrl *gomock.Controller) *MockAuthorizeCodeStorage { + mock := &MockAuthorizeCodeStorage{ctrl: ctrl} + mock.recorder = &MockAuthorizeCodeStorageMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockAuthorizeCodeStorage) EXPECT() *MockAuthorizeCodeStorageMockRecorder { + return m.recorder +} + +// CreateAuthorizeCodeSession mocks base method. +func (m *MockAuthorizeCodeStorage) CreateAuthorizeCodeSession(ctx context.Context, code string, request fosite.Requester) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateAuthorizeCodeSession", ctx, code, request) + ret0, _ := ret[0].(error) + return ret0 +} + +// CreateAuthorizeCodeSession indicates an expected call of CreateAuthorizeCodeSession. +func (mr *MockAuthorizeCodeStorageMockRecorder) CreateAuthorizeCodeSession(ctx, code, request any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateAuthorizeCodeSession", reflect.TypeOf((*MockAuthorizeCodeStorage)(nil).CreateAuthorizeCodeSession), ctx, code, request) +} + +// GetAuthorizeCodeSession mocks base method. +func (m *MockAuthorizeCodeStorage) GetAuthorizeCodeSession(ctx context.Context, code string, session fosite.Session) (fosite.Requester, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAuthorizeCodeSession", ctx, code, session) + ret0, _ := ret[0].(fosite.Requester) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAuthorizeCodeSession indicates an expected call of GetAuthorizeCodeSession. +func (mr *MockAuthorizeCodeStorageMockRecorder) GetAuthorizeCodeSession(ctx, code, session any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAuthorizeCodeSession", reflect.TypeOf((*MockAuthorizeCodeStorage)(nil).GetAuthorizeCodeSession), ctx, code, session) +} + +// InvalidateAuthorizeCodeSession mocks base method. +func (m *MockAuthorizeCodeStorage) InvalidateAuthorizeCodeSession(ctx context.Context, code string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "InvalidateAuthorizeCodeSession", ctx, code) + ret0, _ := ret[0].(error) + return ret0 +} + +// InvalidateAuthorizeCodeSession indicates an expected call of InvalidateAuthorizeCodeSession. +func (mr *MockAuthorizeCodeStorageMockRecorder) InvalidateAuthorizeCodeSession(ctx, code any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InvalidateAuthorizeCodeSession", reflect.TypeOf((*MockAuthorizeCodeStorage)(nil).InvalidateAuthorizeCodeSession), ctx, code) +} diff --git a/fosite/internal/authorize_code_storage_provider.go b/fosite/internal/authorize_code_storage_provider.go new file mode 100644 index 00000000000..febeafd4cf4 --- /dev/null +++ b/fosite/internal/authorize_code_storage_provider.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: AuthorizeCodeStorageProvider) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/authorize_code_storage_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 AuthorizeCodeStorageProvider +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + oauth2 "github.com/ory/hydra/v2/fosite/handler/oauth2" +) + +// MockAuthorizeCodeStorageProvider is a mock of AuthorizeCodeStorageProvider interface. +type MockAuthorizeCodeStorageProvider struct { + ctrl *gomock.Controller + recorder *MockAuthorizeCodeStorageProviderMockRecorder + isgomock struct{} +} + +// MockAuthorizeCodeStorageProviderMockRecorder is the mock recorder for MockAuthorizeCodeStorageProvider. +type MockAuthorizeCodeStorageProviderMockRecorder struct { + mock *MockAuthorizeCodeStorageProvider +} + +// NewMockAuthorizeCodeStorageProvider creates a new mock instance. +func NewMockAuthorizeCodeStorageProvider(ctrl *gomock.Controller) *MockAuthorizeCodeStorageProvider { + mock := &MockAuthorizeCodeStorageProvider{ctrl: ctrl} + mock.recorder = &MockAuthorizeCodeStorageProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockAuthorizeCodeStorageProvider) EXPECT() *MockAuthorizeCodeStorageProviderMockRecorder { + return m.recorder +} + +// AuthorizeCodeStorage mocks base method. +func (m *MockAuthorizeCodeStorageProvider) AuthorizeCodeStorage() oauth2.AuthorizeCodeStorage { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "AuthorizeCodeStorage") + ret0, _ := ret[0].(oauth2.AuthorizeCodeStorage) + return ret0 +} + +// AuthorizeCodeStorage indicates an expected call of AuthorizeCodeStorage. +func (mr *MockAuthorizeCodeStorageProviderMockRecorder) AuthorizeCodeStorage() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AuthorizeCodeStorage", reflect.TypeOf((*MockAuthorizeCodeStorageProvider)(nil).AuthorizeCodeStorage)) +} diff --git a/fosite/internal/authorize_code_strategy.go b/fosite/internal/authorize_code_strategy.go new file mode 100644 index 00000000000..a29012e0fda --- /dev/null +++ b/fosite/internal/authorize_code_strategy.go @@ -0,0 +1,90 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: AuthorizeCodeStrategy) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/authorize_code_strategy.go github.com/ory/hydra/v2/fosite/handler/oauth2 AuthorizeCodeStrategy +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockAuthorizeCodeStrategy is a mock of AuthorizeCodeStrategy interface. +type MockAuthorizeCodeStrategy struct { + ctrl *gomock.Controller + recorder *MockAuthorizeCodeStrategyMockRecorder + isgomock struct{} +} + +// MockAuthorizeCodeStrategyMockRecorder is the mock recorder for MockAuthorizeCodeStrategy. +type MockAuthorizeCodeStrategyMockRecorder struct { + mock *MockAuthorizeCodeStrategy +} + +// NewMockAuthorizeCodeStrategy creates a new mock instance. +func NewMockAuthorizeCodeStrategy(ctrl *gomock.Controller) *MockAuthorizeCodeStrategy { + mock := &MockAuthorizeCodeStrategy{ctrl: ctrl} + mock.recorder = &MockAuthorizeCodeStrategyMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockAuthorizeCodeStrategy) EXPECT() *MockAuthorizeCodeStrategyMockRecorder { + return m.recorder +} + +// AuthorizeCodeSignature mocks base method. +func (m *MockAuthorizeCodeStrategy) AuthorizeCodeSignature(ctx context.Context, token string) string { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "AuthorizeCodeSignature", ctx, token) + ret0, _ := ret[0].(string) + return ret0 +} + +// AuthorizeCodeSignature indicates an expected call of AuthorizeCodeSignature. +func (mr *MockAuthorizeCodeStrategyMockRecorder) AuthorizeCodeSignature(ctx, token any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AuthorizeCodeSignature", reflect.TypeOf((*MockAuthorizeCodeStrategy)(nil).AuthorizeCodeSignature), ctx, token) +} + +// GenerateAuthorizeCode mocks base method. +func (m *MockAuthorizeCodeStrategy) GenerateAuthorizeCode(ctx context.Context, requester fosite.Requester) (string, string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GenerateAuthorizeCode", ctx, requester) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(string) + ret2, _ := ret[2].(error) + return ret0, ret1, ret2 +} + +// GenerateAuthorizeCode indicates an expected call of GenerateAuthorizeCode. +func (mr *MockAuthorizeCodeStrategyMockRecorder) GenerateAuthorizeCode(ctx, requester any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GenerateAuthorizeCode", reflect.TypeOf((*MockAuthorizeCodeStrategy)(nil).GenerateAuthorizeCode), ctx, requester) +} + +// ValidateAuthorizeCode mocks base method. +func (m *MockAuthorizeCodeStrategy) ValidateAuthorizeCode(ctx context.Context, requester fosite.Requester, token string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ValidateAuthorizeCode", ctx, requester, token) + ret0, _ := ret[0].(error) + return ret0 +} + +// ValidateAuthorizeCode indicates an expected call of ValidateAuthorizeCode. +func (mr *MockAuthorizeCodeStrategyMockRecorder) ValidateAuthorizeCode(ctx, requester, token any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidateAuthorizeCode", reflect.TypeOf((*MockAuthorizeCodeStrategy)(nil).ValidateAuthorizeCode), ctx, requester, token) +} diff --git a/fosite/internal/authorize_code_strategy_provider.go b/fosite/internal/authorize_code_strategy_provider.go new file mode 100644 index 00000000000..697701a9a59 --- /dev/null +++ b/fosite/internal/authorize_code_strategy_provider.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: AuthorizeCodeStrategyProvider) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/authorize_code_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 AuthorizeCodeStrategyProvider +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + oauth2 "github.com/ory/hydra/v2/fosite/handler/oauth2" +) + +// MockAuthorizeCodeStrategyProvider is a mock of AuthorizeCodeStrategyProvider interface. +type MockAuthorizeCodeStrategyProvider struct { + ctrl *gomock.Controller + recorder *MockAuthorizeCodeStrategyProviderMockRecorder + isgomock struct{} +} + +// MockAuthorizeCodeStrategyProviderMockRecorder is the mock recorder for MockAuthorizeCodeStrategyProvider. +type MockAuthorizeCodeStrategyProviderMockRecorder struct { + mock *MockAuthorizeCodeStrategyProvider +} + +// NewMockAuthorizeCodeStrategyProvider creates a new mock instance. +func NewMockAuthorizeCodeStrategyProvider(ctrl *gomock.Controller) *MockAuthorizeCodeStrategyProvider { + mock := &MockAuthorizeCodeStrategyProvider{ctrl: ctrl} + mock.recorder = &MockAuthorizeCodeStrategyProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockAuthorizeCodeStrategyProvider) EXPECT() *MockAuthorizeCodeStrategyProviderMockRecorder { + return m.recorder +} + +// AuthorizeCodeStrategy mocks base method. +func (m *MockAuthorizeCodeStrategyProvider) AuthorizeCodeStrategy() oauth2.AuthorizeCodeStrategy { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "AuthorizeCodeStrategy") + ret0, _ := ret[0].(oauth2.AuthorizeCodeStrategy) + return ret0 +} + +// AuthorizeCodeStrategy indicates an expected call of AuthorizeCodeStrategy. +func (mr *MockAuthorizeCodeStrategyProviderMockRecorder) AuthorizeCodeStrategy() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AuthorizeCodeStrategy", reflect.TypeOf((*MockAuthorizeCodeStrategyProvider)(nil).AuthorizeCodeStrategy)) +} diff --git a/fosite/internal/authorize_endpoint_handler.go b/fosite/internal/authorize_endpoint_handler.go new file mode 100644 index 00000000000..b3d6dbaa300 --- /dev/null +++ b/fosite/internal/authorize_endpoint_handler.go @@ -0,0 +1,60 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite (interfaces: AuthorizeEndpointHandler) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/authorize_endpoint_handler.go github.com/ory/hydra/v2/fosite AuthorizeEndpointHandler +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockAuthorizeEndpointHandler is a mock of AuthorizeEndpointHandler interface. +type MockAuthorizeEndpointHandler struct { + ctrl *gomock.Controller + recorder *MockAuthorizeEndpointHandlerMockRecorder + isgomock struct{} +} + +// MockAuthorizeEndpointHandlerMockRecorder is the mock recorder for MockAuthorizeEndpointHandler. +type MockAuthorizeEndpointHandlerMockRecorder struct { + mock *MockAuthorizeEndpointHandler +} + +// NewMockAuthorizeEndpointHandler creates a new mock instance. +func NewMockAuthorizeEndpointHandler(ctrl *gomock.Controller) *MockAuthorizeEndpointHandler { + mock := &MockAuthorizeEndpointHandler{ctrl: ctrl} + mock.recorder = &MockAuthorizeEndpointHandlerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockAuthorizeEndpointHandler) EXPECT() *MockAuthorizeEndpointHandlerMockRecorder { + return m.recorder +} + +// HandleAuthorizeEndpointRequest mocks base method. +func (m *MockAuthorizeEndpointHandler) HandleAuthorizeEndpointRequest(ctx context.Context, requester fosite.AuthorizeRequester, responder fosite.AuthorizeResponder) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "HandleAuthorizeEndpointRequest", ctx, requester, responder) + ret0, _ := ret[0].(error) + return ret0 +} + +// HandleAuthorizeEndpointRequest indicates an expected call of HandleAuthorizeEndpointRequest. +func (mr *MockAuthorizeEndpointHandlerMockRecorder) HandleAuthorizeEndpointRequest(ctx, requester, responder any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "HandleAuthorizeEndpointRequest", reflect.TypeOf((*MockAuthorizeEndpointHandler)(nil).HandleAuthorizeEndpointRequest), ctx, requester, responder) +} diff --git a/fosite/internal/authorize_endpoint_handlers_provider.go b/fosite/internal/authorize_endpoint_handlers_provider.go new file mode 100644 index 00000000000..dda50604fa3 --- /dev/null +++ b/fosite/internal/authorize_endpoint_handlers_provider.go @@ -0,0 +1,60 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite (interfaces: AuthorizeEndpointHandlersProvider) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/authorize_endpoint_handlers_provider.go github.com/ory/hydra/v2/fosite AuthorizeEndpointHandlersProvider +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockAuthorizeEndpointHandlersProvider is a mock of AuthorizeEndpointHandlersProvider interface. +type MockAuthorizeEndpointHandlersProvider struct { + ctrl *gomock.Controller + recorder *MockAuthorizeEndpointHandlersProviderMockRecorder + isgomock struct{} +} + +// MockAuthorizeEndpointHandlersProviderMockRecorder is the mock recorder for MockAuthorizeEndpointHandlersProvider. +type MockAuthorizeEndpointHandlersProviderMockRecorder struct { + mock *MockAuthorizeEndpointHandlersProvider +} + +// NewMockAuthorizeEndpointHandlersProvider creates a new mock instance. +func NewMockAuthorizeEndpointHandlersProvider(ctrl *gomock.Controller) *MockAuthorizeEndpointHandlersProvider { + mock := &MockAuthorizeEndpointHandlersProvider{ctrl: ctrl} + mock.recorder = &MockAuthorizeEndpointHandlersProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockAuthorizeEndpointHandlersProvider) EXPECT() *MockAuthorizeEndpointHandlersProviderMockRecorder { + return m.recorder +} + +// GetAuthorizeEndpointHandlers mocks base method. +func (m *MockAuthorizeEndpointHandlersProvider) GetAuthorizeEndpointHandlers(ctx context.Context) fosite.AuthorizeEndpointHandlers { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAuthorizeEndpointHandlers", ctx) + ret0, _ := ret[0].(fosite.AuthorizeEndpointHandlers) + return ret0 +} + +// GetAuthorizeEndpointHandlers indicates an expected call of GetAuthorizeEndpointHandlers. +func (mr *MockAuthorizeEndpointHandlersProviderMockRecorder) GetAuthorizeEndpointHandlers(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAuthorizeEndpointHandlers", reflect.TypeOf((*MockAuthorizeEndpointHandlersProvider)(nil).GetAuthorizeEndpointHandlers), ctx) +} diff --git a/fosite/internal/authorize_request.go b/fosite/internal/authorize_request.go new file mode 100644 index 00000000000..f0548d81ca7 --- /dev/null +++ b/fosite/internal/authorize_request.go @@ -0,0 +1,405 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite (interfaces: AuthorizeRequester) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/authorize_request.go github.com/ory/hydra/v2/fosite AuthorizeRequester +// + +// Package internal is a generated GoMock package. +package internal + +import ( + url "net/url" + reflect "reflect" + time "time" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockAuthorizeRequester is a mock of AuthorizeRequester interface. +type MockAuthorizeRequester struct { + ctrl *gomock.Controller + recorder *MockAuthorizeRequesterMockRecorder + isgomock struct{} +} + +// MockAuthorizeRequesterMockRecorder is the mock recorder for MockAuthorizeRequester. +type MockAuthorizeRequesterMockRecorder struct { + mock *MockAuthorizeRequester +} + +// NewMockAuthorizeRequester creates a new mock instance. +func NewMockAuthorizeRequester(ctrl *gomock.Controller) *MockAuthorizeRequester { + mock := &MockAuthorizeRequester{ctrl: ctrl} + mock.recorder = &MockAuthorizeRequesterMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockAuthorizeRequester) EXPECT() *MockAuthorizeRequesterMockRecorder { + return m.recorder +} + +// AppendRequestedScope mocks base method. +func (m *MockAuthorizeRequester) AppendRequestedScope(scope string) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "AppendRequestedScope", scope) +} + +// AppendRequestedScope indicates an expected call of AppendRequestedScope. +func (mr *MockAuthorizeRequesterMockRecorder) AppendRequestedScope(scope any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AppendRequestedScope", reflect.TypeOf((*MockAuthorizeRequester)(nil).AppendRequestedScope), scope) +} + +// DidHandleAllResponseTypes mocks base method. +func (m *MockAuthorizeRequester) DidHandleAllResponseTypes() bool { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DidHandleAllResponseTypes") + ret0, _ := ret[0].(bool) + return ret0 +} + +// DidHandleAllResponseTypes indicates an expected call of DidHandleAllResponseTypes. +func (mr *MockAuthorizeRequesterMockRecorder) DidHandleAllResponseTypes() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DidHandleAllResponseTypes", reflect.TypeOf((*MockAuthorizeRequester)(nil).DidHandleAllResponseTypes)) +} + +// GetClient mocks base method. +func (m *MockAuthorizeRequester) GetClient() fosite.Client { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetClient") + ret0, _ := ret[0].(fosite.Client) + return ret0 +} + +// GetClient indicates an expected call of GetClient. +func (mr *MockAuthorizeRequesterMockRecorder) GetClient() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetClient", reflect.TypeOf((*MockAuthorizeRequester)(nil).GetClient)) +} + +// GetDefaultResponseMode mocks base method. +func (m *MockAuthorizeRequester) GetDefaultResponseMode() fosite.ResponseModeType { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetDefaultResponseMode") + ret0, _ := ret[0].(fosite.ResponseModeType) + return ret0 +} + +// GetDefaultResponseMode indicates an expected call of GetDefaultResponseMode. +func (mr *MockAuthorizeRequesterMockRecorder) GetDefaultResponseMode() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDefaultResponseMode", reflect.TypeOf((*MockAuthorizeRequester)(nil).GetDefaultResponseMode)) +} + +// GetGrantedAudience mocks base method. +func (m *MockAuthorizeRequester) GetGrantedAudience() fosite.Arguments { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetGrantedAudience") + ret0, _ := ret[0].(fosite.Arguments) + return ret0 +} + +// GetGrantedAudience indicates an expected call of GetGrantedAudience. +func (mr *MockAuthorizeRequesterMockRecorder) GetGrantedAudience() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetGrantedAudience", reflect.TypeOf((*MockAuthorizeRequester)(nil).GetGrantedAudience)) +} + +// GetGrantedScopes mocks base method. +func (m *MockAuthorizeRequester) GetGrantedScopes() fosite.Arguments { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetGrantedScopes") + ret0, _ := ret[0].(fosite.Arguments) + return ret0 +} + +// GetGrantedScopes indicates an expected call of GetGrantedScopes. +func (mr *MockAuthorizeRequesterMockRecorder) GetGrantedScopes() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetGrantedScopes", reflect.TypeOf((*MockAuthorizeRequester)(nil).GetGrantedScopes)) +} + +// GetID mocks base method. +func (m *MockAuthorizeRequester) GetID() string { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetID") + ret0, _ := ret[0].(string) + return ret0 +} + +// GetID indicates an expected call of GetID. +func (mr *MockAuthorizeRequesterMockRecorder) GetID() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetID", reflect.TypeOf((*MockAuthorizeRequester)(nil).GetID)) +} + +// GetRedirectURI mocks base method. +func (m *MockAuthorizeRequester) GetRedirectURI() *url.URL { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetRedirectURI") + ret0, _ := ret[0].(*url.URL) + return ret0 +} + +// GetRedirectURI indicates an expected call of GetRedirectURI. +func (mr *MockAuthorizeRequesterMockRecorder) GetRedirectURI() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRedirectURI", reflect.TypeOf((*MockAuthorizeRequester)(nil).GetRedirectURI)) +} + +// GetRequestForm mocks base method. +func (m *MockAuthorizeRequester) GetRequestForm() url.Values { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetRequestForm") + ret0, _ := ret[0].(url.Values) + return ret0 +} + +// GetRequestForm indicates an expected call of GetRequestForm. +func (mr *MockAuthorizeRequesterMockRecorder) GetRequestForm() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRequestForm", reflect.TypeOf((*MockAuthorizeRequester)(nil).GetRequestForm)) +} + +// GetRequestedAt mocks base method. +func (m *MockAuthorizeRequester) GetRequestedAt() time.Time { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetRequestedAt") + ret0, _ := ret[0].(time.Time) + return ret0 +} + +// GetRequestedAt indicates an expected call of GetRequestedAt. +func (mr *MockAuthorizeRequesterMockRecorder) GetRequestedAt() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRequestedAt", reflect.TypeOf((*MockAuthorizeRequester)(nil).GetRequestedAt)) +} + +// GetRequestedAudience mocks base method. +func (m *MockAuthorizeRequester) GetRequestedAudience() fosite.Arguments { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetRequestedAudience") + ret0, _ := ret[0].(fosite.Arguments) + return ret0 +} + +// GetRequestedAudience indicates an expected call of GetRequestedAudience. +func (mr *MockAuthorizeRequesterMockRecorder) GetRequestedAudience() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRequestedAudience", reflect.TypeOf((*MockAuthorizeRequester)(nil).GetRequestedAudience)) +} + +// GetRequestedScopes mocks base method. +func (m *MockAuthorizeRequester) GetRequestedScopes() fosite.Arguments { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetRequestedScopes") + ret0, _ := ret[0].(fosite.Arguments) + return ret0 +} + +// GetRequestedScopes indicates an expected call of GetRequestedScopes. +func (mr *MockAuthorizeRequesterMockRecorder) GetRequestedScopes() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRequestedScopes", reflect.TypeOf((*MockAuthorizeRequester)(nil).GetRequestedScopes)) +} + +// GetResponseMode mocks base method. +func (m *MockAuthorizeRequester) GetResponseMode() fosite.ResponseModeType { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetResponseMode") + ret0, _ := ret[0].(fosite.ResponseModeType) + return ret0 +} + +// GetResponseMode indicates an expected call of GetResponseMode. +func (mr *MockAuthorizeRequesterMockRecorder) GetResponseMode() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetResponseMode", reflect.TypeOf((*MockAuthorizeRequester)(nil).GetResponseMode)) +} + +// GetResponseTypes mocks base method. +func (m *MockAuthorizeRequester) GetResponseTypes() fosite.Arguments { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetResponseTypes") + ret0, _ := ret[0].(fosite.Arguments) + return ret0 +} + +// GetResponseTypes indicates an expected call of GetResponseTypes. +func (mr *MockAuthorizeRequesterMockRecorder) GetResponseTypes() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetResponseTypes", reflect.TypeOf((*MockAuthorizeRequester)(nil).GetResponseTypes)) +} + +// GetSession mocks base method. +func (m *MockAuthorizeRequester) GetSession() fosite.Session { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetSession") + ret0, _ := ret[0].(fosite.Session) + return ret0 +} + +// GetSession indicates an expected call of GetSession. +func (mr *MockAuthorizeRequesterMockRecorder) GetSession() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetSession", reflect.TypeOf((*MockAuthorizeRequester)(nil).GetSession)) +} + +// GetState mocks base method. +func (m *MockAuthorizeRequester) GetState() string { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetState") + ret0, _ := ret[0].(string) + return ret0 +} + +// GetState indicates an expected call of GetState. +func (mr *MockAuthorizeRequesterMockRecorder) GetState() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetState", reflect.TypeOf((*MockAuthorizeRequester)(nil).GetState)) +} + +// GrantAudience mocks base method. +func (m *MockAuthorizeRequester) GrantAudience(audience string) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "GrantAudience", audience) +} + +// GrantAudience indicates an expected call of GrantAudience. +func (mr *MockAuthorizeRequesterMockRecorder) GrantAudience(audience any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GrantAudience", reflect.TypeOf((*MockAuthorizeRequester)(nil).GrantAudience), audience) +} + +// GrantScope mocks base method. +func (m *MockAuthorizeRequester) GrantScope(scope string) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "GrantScope", scope) +} + +// GrantScope indicates an expected call of GrantScope. +func (mr *MockAuthorizeRequesterMockRecorder) GrantScope(scope any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GrantScope", reflect.TypeOf((*MockAuthorizeRequester)(nil).GrantScope), scope) +} + +// IsRedirectURIValid mocks base method. +func (m *MockAuthorizeRequester) IsRedirectURIValid() bool { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "IsRedirectURIValid") + ret0, _ := ret[0].(bool) + return ret0 +} + +// IsRedirectURIValid indicates an expected call of IsRedirectURIValid. +func (mr *MockAuthorizeRequesterMockRecorder) IsRedirectURIValid() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsRedirectURIValid", reflect.TypeOf((*MockAuthorizeRequester)(nil).IsRedirectURIValid)) +} + +// Merge mocks base method. +func (m *MockAuthorizeRequester) Merge(requester fosite.Requester) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "Merge", requester) +} + +// Merge indicates an expected call of Merge. +func (mr *MockAuthorizeRequesterMockRecorder) Merge(requester any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Merge", reflect.TypeOf((*MockAuthorizeRequester)(nil).Merge), requester) +} + +// Sanitize mocks base method. +func (m *MockAuthorizeRequester) Sanitize(allowedParameters []string) fosite.Requester { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Sanitize", allowedParameters) + ret0, _ := ret[0].(fosite.Requester) + return ret0 +} + +// Sanitize indicates an expected call of Sanitize. +func (mr *MockAuthorizeRequesterMockRecorder) Sanitize(allowedParameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Sanitize", reflect.TypeOf((*MockAuthorizeRequester)(nil).Sanitize), allowedParameters) +} + +// SetDefaultResponseMode mocks base method. +func (m *MockAuthorizeRequester) SetDefaultResponseMode(responseMode fosite.ResponseModeType) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetDefaultResponseMode", responseMode) +} + +// SetDefaultResponseMode indicates an expected call of SetDefaultResponseMode. +func (mr *MockAuthorizeRequesterMockRecorder) SetDefaultResponseMode(responseMode any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetDefaultResponseMode", reflect.TypeOf((*MockAuthorizeRequester)(nil).SetDefaultResponseMode), responseMode) +} + +// SetID mocks base method. +func (m *MockAuthorizeRequester) SetID(id string) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetID", id) +} + +// SetID indicates an expected call of SetID. +func (mr *MockAuthorizeRequesterMockRecorder) SetID(id any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetID", reflect.TypeOf((*MockAuthorizeRequester)(nil).SetID), id) +} + +// SetRequestedAudience mocks base method. +func (m *MockAuthorizeRequester) SetRequestedAudience(audience fosite.Arguments) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetRequestedAudience", audience) +} + +// SetRequestedAudience indicates an expected call of SetRequestedAudience. +func (mr *MockAuthorizeRequesterMockRecorder) SetRequestedAudience(audience any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetRequestedAudience", reflect.TypeOf((*MockAuthorizeRequester)(nil).SetRequestedAudience), audience) +} + +// SetRequestedScopes mocks base method. +func (m *MockAuthorizeRequester) SetRequestedScopes(scopes fosite.Arguments) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetRequestedScopes", scopes) +} + +// SetRequestedScopes indicates an expected call of SetRequestedScopes. +func (mr *MockAuthorizeRequesterMockRecorder) SetRequestedScopes(scopes any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetRequestedScopes", reflect.TypeOf((*MockAuthorizeRequester)(nil).SetRequestedScopes), scopes) +} + +// SetResponseTypeHandled mocks base method. +func (m *MockAuthorizeRequester) SetResponseTypeHandled(responseType string) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetResponseTypeHandled", responseType) +} + +// SetResponseTypeHandled indicates an expected call of SetResponseTypeHandled. +func (mr *MockAuthorizeRequesterMockRecorder) SetResponseTypeHandled(responseType any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetResponseTypeHandled", reflect.TypeOf((*MockAuthorizeRequester)(nil).SetResponseTypeHandled), responseType) +} + +// SetSession mocks base method. +func (m *MockAuthorizeRequester) SetSession(session fosite.Session) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetSession", session) +} + +// SetSession indicates an expected call of SetSession. +func (mr *MockAuthorizeRequesterMockRecorder) SetSession(session any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetSession", reflect.TypeOf((*MockAuthorizeRequester)(nil).SetSession), session) +} diff --git a/fosite/internal/authorize_response.go b/fosite/internal/authorize_response.go new file mode 100644 index 00000000000..a57b1e0b925 --- /dev/null +++ b/fosite/internal/authorize_response.go @@ -0,0 +1,111 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite (interfaces: AuthorizeResponder) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/authorize_response.go github.com/ory/hydra/v2/fosite AuthorizeResponder +// + +// Package internal is a generated GoMock package. +package internal + +import ( + http "net/http" + url "net/url" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" +) + +// MockAuthorizeResponder is a mock of AuthorizeResponder interface. +type MockAuthorizeResponder struct { + ctrl *gomock.Controller + recorder *MockAuthorizeResponderMockRecorder + isgomock struct{} +} + +// MockAuthorizeResponderMockRecorder is the mock recorder for MockAuthorizeResponder. +type MockAuthorizeResponderMockRecorder struct { + mock *MockAuthorizeResponder +} + +// NewMockAuthorizeResponder creates a new mock instance. +func NewMockAuthorizeResponder(ctrl *gomock.Controller) *MockAuthorizeResponder { + mock := &MockAuthorizeResponder{ctrl: ctrl} + mock.recorder = &MockAuthorizeResponderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockAuthorizeResponder) EXPECT() *MockAuthorizeResponderMockRecorder { + return m.recorder +} + +// AddHeader mocks base method. +func (m *MockAuthorizeResponder) AddHeader(key, value string) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "AddHeader", key, value) +} + +// AddHeader indicates an expected call of AddHeader. +func (mr *MockAuthorizeResponderMockRecorder) AddHeader(key, value any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AddHeader", reflect.TypeOf((*MockAuthorizeResponder)(nil).AddHeader), key, value) +} + +// AddParameter mocks base method. +func (m *MockAuthorizeResponder) AddParameter(key, value string) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "AddParameter", key, value) +} + +// AddParameter indicates an expected call of AddParameter. +func (mr *MockAuthorizeResponderMockRecorder) AddParameter(key, value any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AddParameter", reflect.TypeOf((*MockAuthorizeResponder)(nil).AddParameter), key, value) +} + +// GetCode mocks base method. +func (m *MockAuthorizeResponder) GetCode() string { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetCode") + ret0, _ := ret[0].(string) + return ret0 +} + +// GetCode indicates an expected call of GetCode. +func (mr *MockAuthorizeResponderMockRecorder) GetCode() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetCode", reflect.TypeOf((*MockAuthorizeResponder)(nil).GetCode)) +} + +// GetHeader mocks base method. +func (m *MockAuthorizeResponder) GetHeader() http.Header { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetHeader") + ret0, _ := ret[0].(http.Header) + return ret0 +} + +// GetHeader indicates an expected call of GetHeader. +func (mr *MockAuthorizeResponderMockRecorder) GetHeader() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetHeader", reflect.TypeOf((*MockAuthorizeResponder)(nil).GetHeader)) +} + +// GetParameters mocks base method. +func (m *MockAuthorizeResponder) GetParameters() url.Values { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetParameters") + ret0, _ := ret[0].(url.Values) + return ret0 +} + +// GetParameters indicates an expected call of GetParameters. +func (mr *MockAuthorizeResponderMockRecorder) GetParameters() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetParameters", reflect.TypeOf((*MockAuthorizeResponder)(nil).GetParameters)) +} diff --git a/fosite/internal/client.go b/fosite/internal/client.go new file mode 100644 index 00000000000..3d245b8fe4e --- /dev/null +++ b/fosite/internal/client.go @@ -0,0 +1,157 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite (interfaces: Client) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/client.go github.com/ory/hydra/v2/fosite Client +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockClient is a mock of Client interface. +type MockClient struct { + ctrl *gomock.Controller + recorder *MockClientMockRecorder + isgomock struct{} +} + +// MockClientMockRecorder is the mock recorder for MockClient. +type MockClientMockRecorder struct { + mock *MockClient +} + +// NewMockClient creates a new mock instance. +func NewMockClient(ctrl *gomock.Controller) *MockClient { + mock := &MockClient{ctrl: ctrl} + mock.recorder = &MockClientMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockClient) EXPECT() *MockClientMockRecorder { + return m.recorder +} + +// GetAudience mocks base method. +func (m *MockClient) GetAudience() fosite.Arguments { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAudience") + ret0, _ := ret[0].(fosite.Arguments) + return ret0 +} + +// GetAudience indicates an expected call of GetAudience. +func (mr *MockClientMockRecorder) GetAudience() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAudience", reflect.TypeOf((*MockClient)(nil).GetAudience)) +} + +// GetGrantTypes mocks base method. +func (m *MockClient) GetGrantTypes() fosite.Arguments { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetGrantTypes") + ret0, _ := ret[0].(fosite.Arguments) + return ret0 +} + +// GetGrantTypes indicates an expected call of GetGrantTypes. +func (mr *MockClientMockRecorder) GetGrantTypes() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetGrantTypes", reflect.TypeOf((*MockClient)(nil).GetGrantTypes)) +} + +// GetHashedSecret mocks base method. +func (m *MockClient) GetHashedSecret() []byte { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetHashedSecret") + ret0, _ := ret[0].([]byte) + return ret0 +} + +// GetHashedSecret indicates an expected call of GetHashedSecret. +func (mr *MockClientMockRecorder) GetHashedSecret() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetHashedSecret", reflect.TypeOf((*MockClient)(nil).GetHashedSecret)) +} + +// GetID mocks base method. +func (m *MockClient) GetID() string { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetID") + ret0, _ := ret[0].(string) + return ret0 +} + +// GetID indicates an expected call of GetID. +func (mr *MockClientMockRecorder) GetID() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetID", reflect.TypeOf((*MockClient)(nil).GetID)) +} + +// GetRedirectURIs mocks base method. +func (m *MockClient) GetRedirectURIs() []string { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetRedirectURIs") + ret0, _ := ret[0].([]string) + return ret0 +} + +// GetRedirectURIs indicates an expected call of GetRedirectURIs. +func (mr *MockClientMockRecorder) GetRedirectURIs() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRedirectURIs", reflect.TypeOf((*MockClient)(nil).GetRedirectURIs)) +} + +// GetResponseTypes mocks base method. +func (m *MockClient) GetResponseTypes() fosite.Arguments { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetResponseTypes") + ret0, _ := ret[0].(fosite.Arguments) + return ret0 +} + +// GetResponseTypes indicates an expected call of GetResponseTypes. +func (mr *MockClientMockRecorder) GetResponseTypes() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetResponseTypes", reflect.TypeOf((*MockClient)(nil).GetResponseTypes)) +} + +// GetScopes mocks base method. +func (m *MockClient) GetScopes() fosite.Arguments { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetScopes") + ret0, _ := ret[0].(fosite.Arguments) + return ret0 +} + +// GetScopes indicates an expected call of GetScopes. +func (mr *MockClientMockRecorder) GetScopes() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetScopes", reflect.TypeOf((*MockClient)(nil).GetScopes)) +} + +// IsPublic mocks base method. +func (m *MockClient) IsPublic() bool { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "IsPublic") + ret0, _ := ret[0].(bool) + return ret0 +} + +// IsPublic indicates an expected call of IsPublic. +func (mr *MockClientMockRecorder) IsPublic() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsPublic", reflect.TypeOf((*MockClient)(nil).IsPublic)) +} diff --git a/fosite/internal/client_manager.go b/fosite/internal/client_manager.go new file mode 100644 index 00000000000..dc05c4fabf7 --- /dev/null +++ b/fosite/internal/client_manager.go @@ -0,0 +1,90 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite (interfaces: ClientManager) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/client_manager.go github.com/ory/hydra/v2/fosite ClientManager +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + time "time" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockClientManager is a mock of ClientManager interface. +type MockClientManager struct { + ctrl *gomock.Controller + recorder *MockClientManagerMockRecorder + isgomock struct{} +} + +// MockClientManagerMockRecorder is the mock recorder for MockClientManager. +type MockClientManagerMockRecorder struct { + mock *MockClientManager +} + +// NewMockClientManager creates a new mock instance. +func NewMockClientManager(ctrl *gomock.Controller) *MockClientManager { + mock := &MockClientManager{ctrl: ctrl} + mock.recorder = &MockClientManagerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockClientManager) EXPECT() *MockClientManagerMockRecorder { + return m.recorder +} + +// ClientAssertionJWTValid mocks base method. +func (m *MockClientManager) ClientAssertionJWTValid(ctx context.Context, jti string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ClientAssertionJWTValid", ctx, jti) + ret0, _ := ret[0].(error) + return ret0 +} + +// ClientAssertionJWTValid indicates an expected call of ClientAssertionJWTValid. +func (mr *MockClientManagerMockRecorder) ClientAssertionJWTValid(ctx, jti any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ClientAssertionJWTValid", reflect.TypeOf((*MockClientManager)(nil).ClientAssertionJWTValid), ctx, jti) +} + +// GetClient mocks base method. +func (m *MockClientManager) GetClient(ctx context.Context, id string) (fosite.Client, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetClient", ctx, id) + ret0, _ := ret[0].(fosite.Client) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetClient indicates an expected call of GetClient. +func (mr *MockClientManagerMockRecorder) GetClient(ctx, id any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetClient", reflect.TypeOf((*MockClientManager)(nil).GetClient), ctx, id) +} + +// SetClientAssertionJWT mocks base method. +func (m *MockClientManager) SetClientAssertionJWT(ctx context.Context, jti string, exp time.Time) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SetClientAssertionJWT", ctx, jti, exp) + ret0, _ := ret[0].(error) + return ret0 +} + +// SetClientAssertionJWT indicates an expected call of SetClientAssertionJWT. +func (mr *MockClientManagerMockRecorder) SetClientAssertionJWT(ctx, jti, exp any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetClientAssertionJWT", reflect.TypeOf((*MockClientManager)(nil).SetClientAssertionJWT), ctx, jti, exp) +} diff --git a/fosite/internal/device_auth_storage.go b/fosite/internal/device_auth_storage.go new file mode 100644 index 00000000000..631f873ce76 --- /dev/null +++ b/fosite/internal/device_auth_storage.go @@ -0,0 +1,89 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/rfc8628 (interfaces: DeviceAuthStorage) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/device_auth_storage.go github.com/ory/hydra/v2/fosite/handler/rfc8628 DeviceAuthStorage +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockDeviceAuthStorage is a mock of DeviceAuthStorage interface. +type MockDeviceAuthStorage struct { + ctrl *gomock.Controller + recorder *MockDeviceAuthStorageMockRecorder + isgomock struct{} +} + +// MockDeviceAuthStorageMockRecorder is the mock recorder for MockDeviceAuthStorage. +type MockDeviceAuthStorageMockRecorder struct { + mock *MockDeviceAuthStorage +} + +// NewMockDeviceAuthStorage creates a new mock instance. +func NewMockDeviceAuthStorage(ctrl *gomock.Controller) *MockDeviceAuthStorage { + mock := &MockDeviceAuthStorage{ctrl: ctrl} + mock.recorder = &MockDeviceAuthStorageMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockDeviceAuthStorage) EXPECT() *MockDeviceAuthStorageMockRecorder { + return m.recorder +} + +// CreateDeviceAuthSession mocks base method. +func (m *MockDeviceAuthStorage) CreateDeviceAuthSession(ctx context.Context, deviceCodeSignature, userCodeSignature string, request fosite.DeviceRequester) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateDeviceAuthSession", ctx, deviceCodeSignature, userCodeSignature, request) + ret0, _ := ret[0].(error) + return ret0 +} + +// CreateDeviceAuthSession indicates an expected call of CreateDeviceAuthSession. +func (mr *MockDeviceAuthStorageMockRecorder) CreateDeviceAuthSession(ctx, deviceCodeSignature, userCodeSignature, request any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateDeviceAuthSession", reflect.TypeOf((*MockDeviceAuthStorage)(nil).CreateDeviceAuthSession), ctx, deviceCodeSignature, userCodeSignature, request) +} + +// GetDeviceCodeSession mocks base method. +func (m *MockDeviceAuthStorage) GetDeviceCodeSession(ctx context.Context, signature string, session fosite.Session) (fosite.DeviceRequester, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetDeviceCodeSession", ctx, signature, session) + ret0, _ := ret[0].(fosite.DeviceRequester) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetDeviceCodeSession indicates an expected call of GetDeviceCodeSession. +func (mr *MockDeviceAuthStorageMockRecorder) GetDeviceCodeSession(ctx, signature, session any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDeviceCodeSession", reflect.TypeOf((*MockDeviceAuthStorage)(nil).GetDeviceCodeSession), ctx, signature, session) +} + +// InvalidateDeviceCodeSession mocks base method. +func (m *MockDeviceAuthStorage) InvalidateDeviceCodeSession(ctx context.Context, signature string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "InvalidateDeviceCodeSession", ctx, signature) + ret0, _ := ret[0].(error) + return ret0 +} + +// InvalidateDeviceCodeSession indicates an expected call of InvalidateDeviceCodeSession. +func (mr *MockDeviceAuthStorageMockRecorder) InvalidateDeviceCodeSession(ctx, signature any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InvalidateDeviceCodeSession", reflect.TypeOf((*MockDeviceAuthStorage)(nil).InvalidateDeviceCodeSession), ctx, signature) +} diff --git a/fosite/internal/device_auth_storage_provider.go b/fosite/internal/device_auth_storage_provider.go new file mode 100644 index 00000000000..aab1eebb5b1 --- /dev/null +++ b/fosite/internal/device_auth_storage_provider.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/rfc8628 (interfaces: DeviceAuthStorageProvider) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/device_auth_storage_provider.go github.com/ory/hydra/v2/fosite/handler/rfc8628 DeviceAuthStorageProvider +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + rfc8628 "github.com/ory/hydra/v2/fosite/handler/rfc8628" +) + +// MockDeviceAuthStorageProvider is a mock of DeviceAuthStorageProvider interface. +type MockDeviceAuthStorageProvider struct { + ctrl *gomock.Controller + recorder *MockDeviceAuthStorageProviderMockRecorder + isgomock struct{} +} + +// MockDeviceAuthStorageProviderMockRecorder is the mock recorder for MockDeviceAuthStorageProvider. +type MockDeviceAuthStorageProviderMockRecorder struct { + mock *MockDeviceAuthStorageProvider +} + +// NewMockDeviceAuthStorageProvider creates a new mock instance. +func NewMockDeviceAuthStorageProvider(ctrl *gomock.Controller) *MockDeviceAuthStorageProvider { + mock := &MockDeviceAuthStorageProvider{ctrl: ctrl} + mock.recorder = &MockDeviceAuthStorageProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockDeviceAuthStorageProvider) EXPECT() *MockDeviceAuthStorageProviderMockRecorder { + return m.recorder +} + +// DeviceAuthStorage mocks base method. +func (m *MockDeviceAuthStorageProvider) DeviceAuthStorage() rfc8628.DeviceAuthStorage { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeviceAuthStorage") + ret0, _ := ret[0].(rfc8628.DeviceAuthStorage) + return ret0 +} + +// DeviceAuthStorage indicates an expected call of DeviceAuthStorage. +func (mr *MockDeviceAuthStorageProviderMockRecorder) DeviceAuthStorage() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeviceAuthStorage", reflect.TypeOf((*MockDeviceAuthStorageProvider)(nil).DeviceAuthStorage)) +} diff --git a/fosite/internal/device_code_strategy.go b/fosite/internal/device_code_strategy.go new file mode 100644 index 00000000000..89c999c405c --- /dev/null +++ b/fosite/internal/device_code_strategy.go @@ -0,0 +1,91 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/rfc8628 (interfaces: DeviceCodeStrategy) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/device_code_strategy.go github.com/ory/hydra/v2/fosite/handler/rfc8628 DeviceCodeStrategy +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockDeviceCodeStrategy is a mock of DeviceCodeStrategy interface. +type MockDeviceCodeStrategy struct { + ctrl *gomock.Controller + recorder *MockDeviceCodeStrategyMockRecorder + isgomock struct{} +} + +// MockDeviceCodeStrategyMockRecorder is the mock recorder for MockDeviceCodeStrategy. +type MockDeviceCodeStrategyMockRecorder struct { + mock *MockDeviceCodeStrategy +} + +// NewMockDeviceCodeStrategy creates a new mock instance. +func NewMockDeviceCodeStrategy(ctrl *gomock.Controller) *MockDeviceCodeStrategy { + mock := &MockDeviceCodeStrategy{ctrl: ctrl} + mock.recorder = &MockDeviceCodeStrategyMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockDeviceCodeStrategy) EXPECT() *MockDeviceCodeStrategyMockRecorder { + return m.recorder +} + +// DeviceCodeSignature mocks base method. +func (m *MockDeviceCodeStrategy) DeviceCodeSignature(ctx context.Context, code string) (string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeviceCodeSignature", ctx, code) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// DeviceCodeSignature indicates an expected call of DeviceCodeSignature. +func (mr *MockDeviceCodeStrategyMockRecorder) DeviceCodeSignature(ctx, code any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeviceCodeSignature", reflect.TypeOf((*MockDeviceCodeStrategy)(nil).DeviceCodeSignature), ctx, code) +} + +// GenerateDeviceCode mocks base method. +func (m *MockDeviceCodeStrategy) GenerateDeviceCode(ctx context.Context) (string, string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GenerateDeviceCode", ctx) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(string) + ret2, _ := ret[2].(error) + return ret0, ret1, ret2 +} + +// GenerateDeviceCode indicates an expected call of GenerateDeviceCode. +func (mr *MockDeviceCodeStrategyMockRecorder) GenerateDeviceCode(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GenerateDeviceCode", reflect.TypeOf((*MockDeviceCodeStrategy)(nil).GenerateDeviceCode), ctx) +} + +// ValidateDeviceCode mocks base method. +func (m *MockDeviceCodeStrategy) ValidateDeviceCode(ctx context.Context, r fosite.DeviceRequester, code string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ValidateDeviceCode", ctx, r, code) + ret0, _ := ret[0].(error) + return ret0 +} + +// ValidateDeviceCode indicates an expected call of ValidateDeviceCode. +func (mr *MockDeviceCodeStrategyMockRecorder) ValidateDeviceCode(ctx, r, code any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidateDeviceCode", reflect.TypeOf((*MockDeviceCodeStrategy)(nil).ValidateDeviceCode), ctx, r, code) +} diff --git a/fosite/internal/device_code_strategy_provider.go b/fosite/internal/device_code_strategy_provider.go new file mode 100644 index 00000000000..47389dd230e --- /dev/null +++ b/fosite/internal/device_code_strategy_provider.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/rfc8628 (interfaces: DeviceCodeStrategyProvider) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/device_code_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/rfc8628 DeviceCodeStrategyProvider +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + rfc8628 "github.com/ory/hydra/v2/fosite/handler/rfc8628" +) + +// MockDeviceCodeStrategyProvider is a mock of DeviceCodeStrategyProvider interface. +type MockDeviceCodeStrategyProvider struct { + ctrl *gomock.Controller + recorder *MockDeviceCodeStrategyProviderMockRecorder + isgomock struct{} +} + +// MockDeviceCodeStrategyProviderMockRecorder is the mock recorder for MockDeviceCodeStrategyProvider. +type MockDeviceCodeStrategyProviderMockRecorder struct { + mock *MockDeviceCodeStrategyProvider +} + +// NewMockDeviceCodeStrategyProvider creates a new mock instance. +func NewMockDeviceCodeStrategyProvider(ctrl *gomock.Controller) *MockDeviceCodeStrategyProvider { + mock := &MockDeviceCodeStrategyProvider{ctrl: ctrl} + mock.recorder = &MockDeviceCodeStrategyProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockDeviceCodeStrategyProvider) EXPECT() *MockDeviceCodeStrategyProviderMockRecorder { + return m.recorder +} + +// DeviceCodeStrategy mocks base method. +func (m *MockDeviceCodeStrategyProvider) DeviceCodeStrategy() rfc8628.DeviceCodeStrategy { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeviceCodeStrategy") + ret0, _ := ret[0].(rfc8628.DeviceCodeStrategy) + return ret0 +} + +// DeviceCodeStrategy indicates an expected call of DeviceCodeStrategy. +func (mr *MockDeviceCodeStrategyProviderMockRecorder) DeviceCodeStrategy() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeviceCodeStrategy", reflect.TypeOf((*MockDeviceCodeStrategyProvider)(nil).DeviceCodeStrategy)) +} diff --git a/fosite/internal/device_rate_limit_strategy.go b/fosite/internal/device_rate_limit_strategy.go new file mode 100644 index 00000000000..46d90e3f419 --- /dev/null +++ b/fosite/internal/device_rate_limit_strategy.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/rfc8628 (interfaces: DeviceRateLimitStrategy) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/device_rate_limit_strategy.go github.com/ory/hydra/v2/fosite/handler/rfc8628 DeviceRateLimitStrategy +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" +) + +// MockDeviceRateLimitStrategy is a mock of DeviceRateLimitStrategy interface. +type MockDeviceRateLimitStrategy struct { + ctrl *gomock.Controller + recorder *MockDeviceRateLimitStrategyMockRecorder + isgomock struct{} +} + +// MockDeviceRateLimitStrategyMockRecorder is the mock recorder for MockDeviceRateLimitStrategy. +type MockDeviceRateLimitStrategyMockRecorder struct { + mock *MockDeviceRateLimitStrategy +} + +// NewMockDeviceRateLimitStrategy creates a new mock instance. +func NewMockDeviceRateLimitStrategy(ctrl *gomock.Controller) *MockDeviceRateLimitStrategy { + mock := &MockDeviceRateLimitStrategy{ctrl: ctrl} + mock.recorder = &MockDeviceRateLimitStrategyMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockDeviceRateLimitStrategy) EXPECT() *MockDeviceRateLimitStrategyMockRecorder { + return m.recorder +} + +// ShouldRateLimit mocks base method. +func (m *MockDeviceRateLimitStrategy) ShouldRateLimit(ctx context.Context, code string) (bool, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ShouldRateLimit", ctx, code) + ret0, _ := ret[0].(bool) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ShouldRateLimit indicates an expected call of ShouldRateLimit. +func (mr *MockDeviceRateLimitStrategyMockRecorder) ShouldRateLimit(ctx, code any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ShouldRateLimit", reflect.TypeOf((*MockDeviceRateLimitStrategy)(nil).ShouldRateLimit), ctx, code) +} diff --git a/fosite/internal/device_rate_limit_strategy_provider.go b/fosite/internal/device_rate_limit_strategy_provider.go new file mode 100644 index 00000000000..570da18181c --- /dev/null +++ b/fosite/internal/device_rate_limit_strategy_provider.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/rfc8628 (interfaces: DeviceRateLimitStrategyProvider) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/device_rate_limit_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/rfc8628 DeviceRateLimitStrategyProvider +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + rfc8628 "github.com/ory/hydra/v2/fosite/handler/rfc8628" +) + +// MockDeviceRateLimitStrategyProvider is a mock of DeviceRateLimitStrategyProvider interface. +type MockDeviceRateLimitStrategyProvider struct { + ctrl *gomock.Controller + recorder *MockDeviceRateLimitStrategyProviderMockRecorder + isgomock struct{} +} + +// MockDeviceRateLimitStrategyProviderMockRecorder is the mock recorder for MockDeviceRateLimitStrategyProvider. +type MockDeviceRateLimitStrategyProviderMockRecorder struct { + mock *MockDeviceRateLimitStrategyProvider +} + +// NewMockDeviceRateLimitStrategyProvider creates a new mock instance. +func NewMockDeviceRateLimitStrategyProvider(ctrl *gomock.Controller) *MockDeviceRateLimitStrategyProvider { + mock := &MockDeviceRateLimitStrategyProvider{ctrl: ctrl} + mock.recorder = &MockDeviceRateLimitStrategyProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockDeviceRateLimitStrategyProvider) EXPECT() *MockDeviceRateLimitStrategyProviderMockRecorder { + return m.recorder +} + +// DeviceRateLimitStrategy mocks base method. +func (m *MockDeviceRateLimitStrategyProvider) DeviceRateLimitStrategy() rfc8628.DeviceRateLimitStrategy { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeviceRateLimitStrategy") + ret0, _ := ret[0].(rfc8628.DeviceRateLimitStrategy) + return ret0 +} + +// DeviceRateLimitStrategy indicates an expected call of DeviceRateLimitStrategy. +func (mr *MockDeviceRateLimitStrategyProviderMockRecorder) DeviceRateLimitStrategy() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeviceRateLimitStrategy", reflect.TypeOf((*MockDeviceRateLimitStrategyProvider)(nil).DeviceRateLimitStrategy)) +} diff --git a/fosite/internal/gen/key.go b/fosite/internal/gen/key.go new file mode 100644 index 00000000000..35edfa61e92 --- /dev/null +++ b/fosite/internal/gen/key.go @@ -0,0 +1,36 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package gen + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "crypto/rsa" +) + +func MustRSAKey() *rsa.PrivateKey { + // #nosec + key, err := rsa.GenerateKey(rand.Reader, 1024) + if err != nil { + panic(err) + } + return key +} + +func MustES256Key() *ecdsa.PrivateKey { + key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + panic(err) + } + return key +} + +func MustES521Key() *ecdsa.PrivateKey { + key, err := ecdsa.GenerateKey(elliptic.P521(), rand.Reader) + if err != nil { + panic(err) + } + return key +} diff --git a/fosite/internal/hash.go b/fosite/internal/hash.go new file mode 100644 index 00000000000..bf5be35ef47 --- /dev/null +++ b/fosite/internal/hash.go @@ -0,0 +1,73 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite (interfaces: Hasher) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/hash.go github.com/ory/hydra/v2/fosite Hasher +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" +) + +// MockHasher is a mock of Hasher interface. +type MockHasher struct { + ctrl *gomock.Controller + recorder *MockHasherMockRecorder + isgomock struct{} +} + +// MockHasherMockRecorder is the mock recorder for MockHasher. +type MockHasherMockRecorder struct { + mock *MockHasher +} + +// NewMockHasher creates a new mock instance. +func NewMockHasher(ctrl *gomock.Controller) *MockHasher { + mock := &MockHasher{ctrl: ctrl} + mock.recorder = &MockHasherMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockHasher) EXPECT() *MockHasherMockRecorder { + return m.recorder +} + +// Compare mocks base method. +func (m *MockHasher) Compare(ctx context.Context, hash, data []byte) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Compare", ctx, hash, data) + ret0, _ := ret[0].(error) + return ret0 +} + +// Compare indicates an expected call of Compare. +func (mr *MockHasherMockRecorder) Compare(ctx, hash, data any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Compare", reflect.TypeOf((*MockHasher)(nil).Compare), ctx, hash, data) +} + +// Hash mocks base method. +func (m *MockHasher) Hash(ctx context.Context, data []byte) ([]byte, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Hash", ctx, data) + ret0, _ := ret[0].([]byte) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Hash indicates an expected call of Hash. +func (mr *MockHasherMockRecorder) Hash(ctx, data any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Hash", reflect.TypeOf((*MockHasher)(nil).Hash), ctx, data) +} diff --git a/fosite/internal/oauth2_client_storage.go b/fosite/internal/oauth2_client_storage.go new file mode 100644 index 00000000000..cf64ddfe796 --- /dev/null +++ b/fosite/internal/oauth2_client_storage.go @@ -0,0 +1,89 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: ClientCredentialsGrantStorage) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/oauth2_client_storage.go github.com/ory/hydra/v2/fosite/handler/oauth2 ClientCredentialsGrantStorage +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockClientCredentialsGrantStorage is a mock of ClientCredentialsGrantStorage interface. +type MockClientCredentialsGrantStorage struct { + ctrl *gomock.Controller + recorder *MockClientCredentialsGrantStorageMockRecorder + isgomock struct{} +} + +// MockClientCredentialsGrantStorageMockRecorder is the mock recorder for MockClientCredentialsGrantStorage. +type MockClientCredentialsGrantStorageMockRecorder struct { + mock *MockClientCredentialsGrantStorage +} + +// NewMockClientCredentialsGrantStorage creates a new mock instance. +func NewMockClientCredentialsGrantStorage(ctrl *gomock.Controller) *MockClientCredentialsGrantStorage { + mock := &MockClientCredentialsGrantStorage{ctrl: ctrl} + mock.recorder = &MockClientCredentialsGrantStorageMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockClientCredentialsGrantStorage) EXPECT() *MockClientCredentialsGrantStorageMockRecorder { + return m.recorder +} + +// CreateAccessTokenSession mocks base method. +func (m *MockClientCredentialsGrantStorage) CreateAccessTokenSession(ctx context.Context, signature string, request fosite.Requester) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateAccessTokenSession", ctx, signature, request) + ret0, _ := ret[0].(error) + return ret0 +} + +// CreateAccessTokenSession indicates an expected call of CreateAccessTokenSession. +func (mr *MockClientCredentialsGrantStorageMockRecorder) CreateAccessTokenSession(ctx, signature, request any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateAccessTokenSession", reflect.TypeOf((*MockClientCredentialsGrantStorage)(nil).CreateAccessTokenSession), ctx, signature, request) +} + +// DeleteAccessTokenSession mocks base method. +func (m *MockClientCredentialsGrantStorage) DeleteAccessTokenSession(ctx context.Context, signature string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteAccessTokenSession", ctx, signature) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteAccessTokenSession indicates an expected call of DeleteAccessTokenSession. +func (mr *MockClientCredentialsGrantStorageMockRecorder) DeleteAccessTokenSession(ctx, signature any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteAccessTokenSession", reflect.TypeOf((*MockClientCredentialsGrantStorage)(nil).DeleteAccessTokenSession), ctx, signature) +} + +// GetAccessTokenSession mocks base method. +func (m *MockClientCredentialsGrantStorage) GetAccessTokenSession(ctx context.Context, signature string, session fosite.Session) (fosite.Requester, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAccessTokenSession", ctx, signature, session) + ret0, _ := ret[0].(fosite.Requester) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAccessTokenSession indicates an expected call of GetAccessTokenSession. +func (mr *MockClientCredentialsGrantStorageMockRecorder) GetAccessTokenSession(ctx, signature, session any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccessTokenSession", reflect.TypeOf((*MockClientCredentialsGrantStorage)(nil).GetAccessTokenSession), ctx, signature, session) +} diff --git a/fosite/internal/oauth2_explicit_storage.go b/fosite/internal/oauth2_explicit_storage.go new file mode 100644 index 00000000000..2f63b9e09b1 --- /dev/null +++ b/fosite/internal/oauth2_explicit_storage.go @@ -0,0 +1,77 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Automatically generated by MockGen. DO NOT EDIT! +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: AuthorizeCodeGrantStorage) + +package internal + +import ( + "context" + + gomock "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite" +) + +// Mock of AuthorizeCodeGrantStorage interface +type MockAuthorizeCodeGrantStorage struct { + ctrl *gomock.Controller + recorder *_MockAuthorizeCodeGrantStorageRecorder +} + +// Recorder for MockAuthorizeCodeGrantStorage (not exported) +type _MockAuthorizeCodeGrantStorageRecorder struct { + mock *MockAuthorizeCodeGrantStorage +} + +func NewMockAuthorizeCodeGrantStorage(ctrl *gomock.Controller) *MockAuthorizeCodeGrantStorage { + mock := &MockAuthorizeCodeGrantStorage{ctrl: ctrl} + mock.recorder = &_MockAuthorizeCodeGrantStorageRecorder{mock} + return mock +} + +func (_m *MockAuthorizeCodeGrantStorage) EXPECT() *_MockAuthorizeCodeGrantStorageRecorder { + return _m.recorder +} + +func (_m *MockAuthorizeCodeGrantStorage) CreateAuthorizeCodeSession(_param0 context.Context, _param1 string, _param2 fosite.Requester) error { + ret := _m.ctrl.Call(_m, "CreateAuthorizeCodeSession", _param0, _param1, _param2) + ret0, _ := ret[0].(error) + return ret0 +} + +func (_mr *_MockAuthorizeCodeGrantStorageRecorder) CreateAuthorizeCodeSession(arg0, arg1, arg2 interface{}) *gomock.Call { + return _mr.mock.ctrl.RecordCall(_mr.mock, "CreateAuthorizeCodeSession", arg0, arg1, arg2) +} + +func (_m *MockAuthorizeCodeGrantStorage) DeleteAuthorizeCodeSession(_param0 context.Context, _param1 string) error { + ret := _m.ctrl.Call(_m, "DeleteAuthorizeCodeSession", _param0, _param1) + ret0, _ := ret[0].(error) + return ret0 +} + +func (_mr *_MockAuthorizeCodeGrantStorageRecorder) DeleteAuthorizeCodeSession(arg0, arg1 interface{}) *gomock.Call { + return _mr.mock.ctrl.RecordCall(_mr.mock, "DeleteAuthorizeCodeSession", arg0, arg1) +} + +func (_m *MockAuthorizeCodeGrantStorage) GetAuthorizeCodeSession(_param0 context.Context, _param1 string, _param2 fosite.Session) (fosite.Requester, error) { + ret := _m.ctrl.Call(_m, "GetAuthorizeCodeSession", _param0, _param1, _param2) + ret0, _ := ret[0].(fosite.Requester) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +func (_mr *_MockAuthorizeCodeGrantStorageRecorder) GetAuthorizeCodeSession(arg0, arg1, arg2 interface{}) *gomock.Call { + return _mr.mock.ctrl.RecordCall(_mr.mock, "GetAuthorizeCodeSession", arg0, arg1, arg2) +} + +func (_m *MockAuthorizeCodeGrantStorage) PersistAuthorizeCodeGrantSession(_param0 context.Context, _param1 string, _param2 string, _param3 string, _param4 fosite.Requester) error { + ret := _m.ctrl.Call(_m, "PersistAuthorizeCodeGrantSession", _param0, _param1, _param2, _param3, _param4) + ret0, _ := ret[0].(error) + return ret0 +} + +func (_mr *_MockAuthorizeCodeGrantStorageRecorder) PersistAuthorizeCodeGrantSession(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call { + return _mr.mock.ctrl.RecordCall(_mr.mock, "PersistAuthorizeCodeGrantSession", arg0, arg1, arg2, arg3, arg4) +} diff --git a/fosite/internal/oauth2_refresh_storage.go b/fosite/internal/oauth2_refresh_storage.go new file mode 100644 index 00000000000..6cb57bca7b4 --- /dev/null +++ b/fosite/internal/oauth2_refresh_storage.go @@ -0,0 +1,77 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Automatically generated by MockGen. DO NOT EDIT! +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: RefreshTokenGrantStorage) + +package internal + +import ( + "context" + + gomock "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite" +) + +// Mock of RefreshTokenGrantStorage interface +type MockRefreshTokenGrantStorage struct { + ctrl *gomock.Controller + recorder *_MockRefreshTokenGrantStorageRecorder +} + +// Recorder for MockRefreshTokenGrantStorage (not exported) +type _MockRefreshTokenGrantStorageRecorder struct { + mock *MockRefreshTokenGrantStorage +} + +func NewMockRefreshTokenGrantStorage(ctrl *gomock.Controller) *MockRefreshTokenGrantStorage { + mock := &MockRefreshTokenGrantStorage{ctrl: ctrl} + mock.recorder = &_MockRefreshTokenGrantStorageRecorder{mock} + return mock +} + +func (_m *MockRefreshTokenGrantStorage) EXPECT() *_MockRefreshTokenGrantStorageRecorder { + return _m.recorder +} + +func (_m *MockRefreshTokenGrantStorage) CreateRefreshTokenSession(_param0 context.Context, _param1 string, _param2 fosite.Requester) error { + ret := _m.ctrl.Call(_m, "CreateRefreshTokenSession", _param0, _param1, _param2) + ret0, _ := ret[0].(error) + return ret0 +} + +func (_mr *_MockRefreshTokenGrantStorageRecorder) CreateRefreshTokenSession(arg0, arg1, arg2 interface{}) *gomock.Call { + return _mr.mock.ctrl.RecordCall(_mr.mock, "CreateRefreshTokenSession", arg0, arg1, arg2) +} + +func (_m *MockRefreshTokenGrantStorage) DeleteRefreshTokenSession(_param0 context.Context, _param1 string) error { + ret := _m.ctrl.Call(_m, "DeleteRefreshTokenSession", _param0, _param1) + ret0, _ := ret[0].(error) + return ret0 +} + +func (_mr *_MockRefreshTokenGrantStorageRecorder) DeleteRefreshTokenSession(arg0, arg1 interface{}) *gomock.Call { + return _mr.mock.ctrl.RecordCall(_mr.mock, "DeleteRefreshTokenSession", arg0, arg1) +} + +func (_m *MockRefreshTokenGrantStorage) GetRefreshTokenSession(_param0 context.Context, _param1 string, _param2 fosite.Session) (fosite.Requester, error) { + ret := _m.ctrl.Call(_m, "GetRefreshTokenSession", _param0, _param1, _param2) + ret0, _ := ret[0].(fosite.Requester) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +func (_mr *_MockRefreshTokenGrantStorageRecorder) GetRefreshTokenSession(arg0, arg1, arg2 interface{}) *gomock.Call { + return _mr.mock.ctrl.RecordCall(_mr.mock, "GetRefreshTokenSession", arg0, arg1, arg2) +} + +func (_m *MockRefreshTokenGrantStorage) PersistRefreshTokenGrantSession(_param0 context.Context, _param1 string, _param2 string, _param3 string, _param4 fosite.Requester) error { + ret := _m.ctrl.Call(_m, "PersistRefreshTokenGrantSession", _param0, _param1, _param2, _param3, _param4) + ret0, _ := ret[0].(error) + return ret0 +} + +func (_mr *_MockRefreshTokenGrantStorageRecorder) PersistRefreshTokenGrantSession(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call { + return _mr.mock.ctrl.RecordCall(_mr.mock, "PersistRefreshTokenGrantSession", arg0, arg1, arg2, arg3, arg4) +} diff --git a/fosite/internal/oauth2_storage.go b/fosite/internal/oauth2_storage.go new file mode 100644 index 00000000000..bb097d53ae9 --- /dev/null +++ b/fosite/internal/oauth2_storage.go @@ -0,0 +1,87 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: CoreStorage) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/oauth2_storage.go github.com/ory/hydra/v2/fosite/handler/oauth2 CoreStorage +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + oauth2 "github.com/ory/hydra/v2/fosite/handler/oauth2" +) + +// MockCoreStorage is a mock of CoreStorage interface. +type MockCoreStorage struct { + ctrl *gomock.Controller + recorder *MockCoreStorageMockRecorder + isgomock struct{} +} + +// MockCoreStorageMockRecorder is the mock recorder for MockCoreStorage. +type MockCoreStorageMockRecorder struct { + mock *MockCoreStorage +} + +// NewMockCoreStorage creates a new mock instance. +func NewMockCoreStorage(ctrl *gomock.Controller) *MockCoreStorage { + mock := &MockCoreStorage{ctrl: ctrl} + mock.recorder = &MockCoreStorageMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockCoreStorage) EXPECT() *MockCoreStorageMockRecorder { + return m.recorder +} + +// AccessTokenStorage mocks base method. +func (m *MockCoreStorage) AccessTokenStorage() oauth2.AccessTokenStorage { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "AccessTokenStorage") + ret0, _ := ret[0].(oauth2.AccessTokenStorage) + return ret0 +} + +// AccessTokenStorage indicates an expected call of AccessTokenStorage. +func (mr *MockCoreStorageMockRecorder) AccessTokenStorage() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AccessTokenStorage", reflect.TypeOf((*MockCoreStorage)(nil).AccessTokenStorage)) +} + +// AuthorizeCodeStorage mocks base method. +func (m *MockCoreStorage) AuthorizeCodeStorage() oauth2.AuthorizeCodeStorage { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "AuthorizeCodeStorage") + ret0, _ := ret[0].(oauth2.AuthorizeCodeStorage) + return ret0 +} + +// AuthorizeCodeStorage indicates an expected call of AuthorizeCodeStorage. +func (mr *MockCoreStorageMockRecorder) AuthorizeCodeStorage() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AuthorizeCodeStorage", reflect.TypeOf((*MockCoreStorage)(nil).AuthorizeCodeStorage)) +} + +// RefreshTokenStorage mocks base method. +func (m *MockCoreStorage) RefreshTokenStorage() oauth2.RefreshTokenStorage { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RefreshTokenStorage") + ret0, _ := ret[0].(oauth2.RefreshTokenStorage) + return ret0 +} + +// RefreshTokenStorage indicates an expected call of RefreshTokenStorage. +func (mr *MockCoreStorageMockRecorder) RefreshTokenStorage() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RefreshTokenStorage", reflect.TypeOf((*MockCoreStorage)(nil).RefreshTokenStorage)) +} diff --git a/fosite/internal/oauth2_strategy.go b/fosite/internal/oauth2_strategy.go new file mode 100644 index 00000000000..0b417fc3d57 --- /dev/null +++ b/fosite/internal/oauth2_strategy.go @@ -0,0 +1,87 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: CoreStrategy) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/oauth2_strategy.go github.com/ory/hydra/v2/fosite/handler/oauth2 CoreStrategy +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + oauth2 "github.com/ory/hydra/v2/fosite/handler/oauth2" +) + +// MockCoreStrategy is a mock of CoreStrategy interface. +type MockCoreStrategy struct { + ctrl *gomock.Controller + recorder *MockCoreStrategyMockRecorder + isgomock struct{} +} + +// MockCoreStrategyMockRecorder is the mock recorder for MockCoreStrategy. +type MockCoreStrategyMockRecorder struct { + mock *MockCoreStrategy +} + +// NewMockCoreStrategy creates a new mock instance. +func NewMockCoreStrategy(ctrl *gomock.Controller) *MockCoreStrategy { + mock := &MockCoreStrategy{ctrl: ctrl} + mock.recorder = &MockCoreStrategyMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockCoreStrategy) EXPECT() *MockCoreStrategyMockRecorder { + return m.recorder +} + +// AccessTokenStrategy mocks base method. +func (m *MockCoreStrategy) AccessTokenStrategy() oauth2.AccessTokenStrategy { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "AccessTokenStrategy") + ret0, _ := ret[0].(oauth2.AccessTokenStrategy) + return ret0 +} + +// AccessTokenStrategy indicates an expected call of AccessTokenStrategy. +func (mr *MockCoreStrategyMockRecorder) AccessTokenStrategy() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AccessTokenStrategy", reflect.TypeOf((*MockCoreStrategy)(nil).AccessTokenStrategy)) +} + +// AuthorizeCodeStrategy mocks base method. +func (m *MockCoreStrategy) AuthorizeCodeStrategy() oauth2.AuthorizeCodeStrategy { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "AuthorizeCodeStrategy") + ret0, _ := ret[0].(oauth2.AuthorizeCodeStrategy) + return ret0 +} + +// AuthorizeCodeStrategy indicates an expected call of AuthorizeCodeStrategy. +func (mr *MockCoreStrategyMockRecorder) AuthorizeCodeStrategy() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AuthorizeCodeStrategy", reflect.TypeOf((*MockCoreStrategy)(nil).AuthorizeCodeStrategy)) +} + +// RefreshTokenStrategy mocks base method. +func (m *MockCoreStrategy) RefreshTokenStrategy() oauth2.RefreshTokenStrategy { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RefreshTokenStrategy") + ret0, _ := ret[0].(oauth2.RefreshTokenStrategy) + return ret0 +} + +// RefreshTokenStrategy indicates an expected call of RefreshTokenStrategy. +func (mr *MockCoreStrategyMockRecorder) RefreshTokenStrategy() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RefreshTokenStrategy", reflect.TypeOf((*MockCoreStrategy)(nil).RefreshTokenStrategy)) +} diff --git a/fosite/internal/open_id_connect_request_storage.go b/fosite/internal/open_id_connect_request_storage.go new file mode 100644 index 00000000000..5d1d77a0a3a --- /dev/null +++ b/fosite/internal/open_id_connect_request_storage.go @@ -0,0 +1,89 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/openid (interfaces: OpenIDConnectRequestStorage) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/open_id_connect_request_storage.go github.com/ory/hydra/v2/fosite/handler/openid OpenIDConnectRequestStorage +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockOpenIDConnectRequestStorage is a mock of OpenIDConnectRequestStorage interface. +type MockOpenIDConnectRequestStorage struct { + ctrl *gomock.Controller + recorder *MockOpenIDConnectRequestStorageMockRecorder + isgomock struct{} +} + +// MockOpenIDConnectRequestStorageMockRecorder is the mock recorder for MockOpenIDConnectRequestStorage. +type MockOpenIDConnectRequestStorageMockRecorder struct { + mock *MockOpenIDConnectRequestStorage +} + +// NewMockOpenIDConnectRequestStorage creates a new mock instance. +func NewMockOpenIDConnectRequestStorage(ctrl *gomock.Controller) *MockOpenIDConnectRequestStorage { + mock := &MockOpenIDConnectRequestStorage{ctrl: ctrl} + mock.recorder = &MockOpenIDConnectRequestStorageMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockOpenIDConnectRequestStorage) EXPECT() *MockOpenIDConnectRequestStorageMockRecorder { + return m.recorder +} + +// CreateOpenIDConnectSession mocks base method. +func (m *MockOpenIDConnectRequestStorage) CreateOpenIDConnectSession(ctx context.Context, authorizeCode string, requester fosite.Requester) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateOpenIDConnectSession", ctx, authorizeCode, requester) + ret0, _ := ret[0].(error) + return ret0 +} + +// CreateOpenIDConnectSession indicates an expected call of CreateOpenIDConnectSession. +func (mr *MockOpenIDConnectRequestStorageMockRecorder) CreateOpenIDConnectSession(ctx, authorizeCode, requester any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateOpenIDConnectSession", reflect.TypeOf((*MockOpenIDConnectRequestStorage)(nil).CreateOpenIDConnectSession), ctx, authorizeCode, requester) +} + +// DeleteOpenIDConnectSession mocks base method. +func (m *MockOpenIDConnectRequestStorage) DeleteOpenIDConnectSession(ctx context.Context, authorizeCode string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteOpenIDConnectSession", ctx, authorizeCode) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteOpenIDConnectSession indicates an expected call of DeleteOpenIDConnectSession. +func (mr *MockOpenIDConnectRequestStorageMockRecorder) DeleteOpenIDConnectSession(ctx, authorizeCode any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteOpenIDConnectSession", reflect.TypeOf((*MockOpenIDConnectRequestStorage)(nil).DeleteOpenIDConnectSession), ctx, authorizeCode) +} + +// GetOpenIDConnectSession mocks base method. +func (m *MockOpenIDConnectRequestStorage) GetOpenIDConnectSession(ctx context.Context, authorizeCode string, requester fosite.Requester) (fosite.Requester, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetOpenIDConnectSession", ctx, authorizeCode, requester) + ret0, _ := ret[0].(fosite.Requester) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetOpenIDConnectSession indicates an expected call of GetOpenIDConnectSession. +func (mr *MockOpenIDConnectRequestStorageMockRecorder) GetOpenIDConnectSession(ctx, authorizeCode, requester any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOpenIDConnectSession", reflect.TypeOf((*MockOpenIDConnectRequestStorage)(nil).GetOpenIDConnectSession), ctx, authorizeCode, requester) +} diff --git a/fosite/internal/open_id_connect_request_storage_provider.go b/fosite/internal/open_id_connect_request_storage_provider.go new file mode 100644 index 00000000000..4b41e254008 --- /dev/null +++ b/fosite/internal/open_id_connect_request_storage_provider.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/openid (interfaces: OpenIDConnectRequestStorageProvider) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/open_id_connect_request_storage_provider.go github.com/ory/hydra/v2/fosite/handler/openid OpenIDConnectRequestStorageProvider +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + openid "github.com/ory/hydra/v2/fosite/handler/openid" +) + +// MockOpenIDConnectRequestStorageProvider is a mock of OpenIDConnectRequestStorageProvider interface. +type MockOpenIDConnectRequestStorageProvider struct { + ctrl *gomock.Controller + recorder *MockOpenIDConnectRequestStorageProviderMockRecorder + isgomock struct{} +} + +// MockOpenIDConnectRequestStorageProviderMockRecorder is the mock recorder for MockOpenIDConnectRequestStorageProvider. +type MockOpenIDConnectRequestStorageProviderMockRecorder struct { + mock *MockOpenIDConnectRequestStorageProvider +} + +// NewMockOpenIDConnectRequestStorageProvider creates a new mock instance. +func NewMockOpenIDConnectRequestStorageProvider(ctrl *gomock.Controller) *MockOpenIDConnectRequestStorageProvider { + mock := &MockOpenIDConnectRequestStorageProvider{ctrl: ctrl} + mock.recorder = &MockOpenIDConnectRequestStorageProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockOpenIDConnectRequestStorageProvider) EXPECT() *MockOpenIDConnectRequestStorageProviderMockRecorder { + return m.recorder +} + +// OpenIDConnectRequestStorage mocks base method. +func (m *MockOpenIDConnectRequestStorageProvider) OpenIDConnectRequestStorage() openid.OpenIDConnectRequestStorage { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "OpenIDConnectRequestStorage") + ret0, _ := ret[0].(openid.OpenIDConnectRequestStorage) + return ret0 +} + +// OpenIDConnectRequestStorage indicates an expected call of OpenIDConnectRequestStorage. +func (mr *MockOpenIDConnectRequestStorageProviderMockRecorder) OpenIDConnectRequestStorage() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "OpenIDConnectRequestStorage", reflect.TypeOf((*MockOpenIDConnectRequestStorageProvider)(nil).OpenIDConnectRequestStorage)) +} diff --git a/fosite/internal/open_id_connect_token_strategy.go b/fosite/internal/open_id_connect_token_strategy.go new file mode 100644 index 00000000000..f4943ca1c93 --- /dev/null +++ b/fosite/internal/open_id_connect_token_strategy.go @@ -0,0 +1,62 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/openid (interfaces: OpenIDConnectTokenStrategy) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/open_id_connect_token_strategy.go github.com/ory/hydra/v2/fosite/handler/openid OpenIDConnectTokenStrategy +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + time "time" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockOpenIDConnectTokenStrategy is a mock of OpenIDConnectTokenStrategy interface. +type MockOpenIDConnectTokenStrategy struct { + ctrl *gomock.Controller + recorder *MockOpenIDConnectTokenStrategyMockRecorder + isgomock struct{} +} + +// MockOpenIDConnectTokenStrategyMockRecorder is the mock recorder for MockOpenIDConnectTokenStrategy. +type MockOpenIDConnectTokenStrategyMockRecorder struct { + mock *MockOpenIDConnectTokenStrategy +} + +// NewMockOpenIDConnectTokenStrategy creates a new mock instance. +func NewMockOpenIDConnectTokenStrategy(ctrl *gomock.Controller) *MockOpenIDConnectTokenStrategy { + mock := &MockOpenIDConnectTokenStrategy{ctrl: ctrl} + mock.recorder = &MockOpenIDConnectTokenStrategyMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockOpenIDConnectTokenStrategy) EXPECT() *MockOpenIDConnectTokenStrategyMockRecorder { + return m.recorder +} + +// GenerateIDToken mocks base method. +func (m *MockOpenIDConnectTokenStrategy) GenerateIDToken(ctx context.Context, lifespan time.Duration, requester fosite.Requester) (string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GenerateIDToken", ctx, lifespan, requester) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GenerateIDToken indicates an expected call of GenerateIDToken. +func (mr *MockOpenIDConnectTokenStrategyMockRecorder) GenerateIDToken(ctx, lifespan, requester any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GenerateIDToken", reflect.TypeOf((*MockOpenIDConnectTokenStrategy)(nil).GenerateIDToken), ctx, lifespan, requester) +} diff --git a/fosite/internal/open_id_connect_token_strategy_provider.go b/fosite/internal/open_id_connect_token_strategy_provider.go new file mode 100644 index 00000000000..a034d754e77 --- /dev/null +++ b/fosite/internal/open_id_connect_token_strategy_provider.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/openid (interfaces: OpenIDConnectTokenStrategyProvider) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/open_id_connect_token_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/openid OpenIDConnectTokenStrategyProvider +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + openid "github.com/ory/hydra/v2/fosite/handler/openid" +) + +// MockOpenIDConnectTokenStrategyProvider is a mock of OpenIDConnectTokenStrategyProvider interface. +type MockOpenIDConnectTokenStrategyProvider struct { + ctrl *gomock.Controller + recorder *MockOpenIDConnectTokenStrategyProviderMockRecorder + isgomock struct{} +} + +// MockOpenIDConnectTokenStrategyProviderMockRecorder is the mock recorder for MockOpenIDConnectTokenStrategyProvider. +type MockOpenIDConnectTokenStrategyProviderMockRecorder struct { + mock *MockOpenIDConnectTokenStrategyProvider +} + +// NewMockOpenIDConnectTokenStrategyProvider creates a new mock instance. +func NewMockOpenIDConnectTokenStrategyProvider(ctrl *gomock.Controller) *MockOpenIDConnectTokenStrategyProvider { + mock := &MockOpenIDConnectTokenStrategyProvider{ctrl: ctrl} + mock.recorder = &MockOpenIDConnectTokenStrategyProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockOpenIDConnectTokenStrategyProvider) EXPECT() *MockOpenIDConnectTokenStrategyProviderMockRecorder { + return m.recorder +} + +// OpenIDConnectTokenStrategy mocks base method. +func (m *MockOpenIDConnectTokenStrategyProvider) OpenIDConnectTokenStrategy() openid.OpenIDConnectTokenStrategy { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "OpenIDConnectTokenStrategy") + ret0, _ := ret[0].(openid.OpenIDConnectTokenStrategy) + return ret0 +} + +// OpenIDConnectTokenStrategy indicates an expected call of OpenIDConnectTokenStrategy. +func (mr *MockOpenIDConnectTokenStrategyProviderMockRecorder) OpenIDConnectTokenStrategy() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "OpenIDConnectTokenStrategy", reflect.TypeOf((*MockOpenIDConnectTokenStrategyProvider)(nil).OpenIDConnectTokenStrategy)) +} diff --git a/fosite/internal/par_storage.go b/fosite/internal/par_storage.go new file mode 100644 index 00000000000..bbb768a333e --- /dev/null +++ b/fosite/internal/par_storage.go @@ -0,0 +1,89 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite (interfaces: PARStorage) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/par_storage.go github.com/ory/hydra/v2/fosite PARStorage +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockPARStorage is a mock of PARStorage interface. +type MockPARStorage struct { + ctrl *gomock.Controller + recorder *MockPARStorageMockRecorder + isgomock struct{} +} + +// MockPARStorageMockRecorder is the mock recorder for MockPARStorage. +type MockPARStorageMockRecorder struct { + mock *MockPARStorage +} + +// NewMockPARStorage creates a new mock instance. +func NewMockPARStorage(ctrl *gomock.Controller) *MockPARStorage { + mock := &MockPARStorage{ctrl: ctrl} + mock.recorder = &MockPARStorageMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockPARStorage) EXPECT() *MockPARStorageMockRecorder { + return m.recorder +} + +// CreatePARSession mocks base method. +func (m *MockPARStorage) CreatePARSession(ctx context.Context, requestURI string, request fosite.AuthorizeRequester) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreatePARSession", ctx, requestURI, request) + ret0, _ := ret[0].(error) + return ret0 +} + +// CreatePARSession indicates an expected call of CreatePARSession. +func (mr *MockPARStorageMockRecorder) CreatePARSession(ctx, requestURI, request any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreatePARSession", reflect.TypeOf((*MockPARStorage)(nil).CreatePARSession), ctx, requestURI, request) +} + +// DeletePARSession mocks base method. +func (m *MockPARStorage) DeletePARSession(ctx context.Context, requestURI string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeletePARSession", ctx, requestURI) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeletePARSession indicates an expected call of DeletePARSession. +func (mr *MockPARStorageMockRecorder) DeletePARSession(ctx, requestURI any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeletePARSession", reflect.TypeOf((*MockPARStorage)(nil).DeletePARSession), ctx, requestURI) +} + +// GetPARSession mocks base method. +func (m *MockPARStorage) GetPARSession(ctx context.Context, requestURI string) (fosite.AuthorizeRequester, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetPARSession", ctx, requestURI) + ret0, _ := ret[0].(fosite.AuthorizeRequester) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetPARSession indicates an expected call of GetPARSession. +func (mr *MockPARStorageMockRecorder) GetPARSession(ctx, requestURI any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetPARSession", reflect.TypeOf((*MockPARStorage)(nil).GetPARSession), ctx, requestURI) +} diff --git a/fosite/internal/par_storage_provider.go b/fosite/internal/par_storage_provider.go new file mode 100644 index 00000000000..31809fa178e --- /dev/null +++ b/fosite/internal/par_storage_provider.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite (interfaces: PARStorageProvider) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/par_storage_provider.go github.com/ory/hydra/v2/fosite PARStorageProvider +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockPARStorageProvider is a mock of PARStorageProvider interface. +type MockPARStorageProvider struct { + ctrl *gomock.Controller + recorder *MockPARStorageProviderMockRecorder + isgomock struct{} +} + +// MockPARStorageProviderMockRecorder is the mock recorder for MockPARStorageProvider. +type MockPARStorageProviderMockRecorder struct { + mock *MockPARStorageProvider +} + +// NewMockPARStorageProvider creates a new mock instance. +func NewMockPARStorageProvider(ctrl *gomock.Controller) *MockPARStorageProvider { + mock := &MockPARStorageProvider{ctrl: ctrl} + mock.recorder = &MockPARStorageProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockPARStorageProvider) EXPECT() *MockPARStorageProviderMockRecorder { + return m.recorder +} + +// PARStorage mocks base method. +func (m *MockPARStorageProvider) PARStorage() fosite.PARStorage { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "PARStorage") + ret0, _ := ret[0].(fosite.PARStorage) + return ret0 +} + +// PARStorage indicates an expected call of PARStorage. +func (mr *MockPARStorageProviderMockRecorder) PARStorage() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PARStorage", reflect.TypeOf((*MockPARStorageProvider)(nil).PARStorage)) +} diff --git a/fosite/internal/pkce_request_storage.go b/fosite/internal/pkce_request_storage.go new file mode 100644 index 00000000000..f6849ce044e --- /dev/null +++ b/fosite/internal/pkce_request_storage.go @@ -0,0 +1,89 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/pkce (interfaces: PKCERequestStorage) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/pkce_request_storage.go github.com/ory/hydra/v2/fosite/handler/pkce PKCERequestStorage +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockPKCERequestStorage is a mock of PKCERequestStorage interface. +type MockPKCERequestStorage struct { + ctrl *gomock.Controller + recorder *MockPKCERequestStorageMockRecorder + isgomock struct{} +} + +// MockPKCERequestStorageMockRecorder is the mock recorder for MockPKCERequestStorage. +type MockPKCERequestStorageMockRecorder struct { + mock *MockPKCERequestStorage +} + +// NewMockPKCERequestStorage creates a new mock instance. +func NewMockPKCERequestStorage(ctrl *gomock.Controller) *MockPKCERequestStorage { + mock := &MockPKCERequestStorage{ctrl: ctrl} + mock.recorder = &MockPKCERequestStorageMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockPKCERequestStorage) EXPECT() *MockPKCERequestStorageMockRecorder { + return m.recorder +} + +// CreatePKCERequestSession mocks base method. +func (m *MockPKCERequestStorage) CreatePKCERequestSession(ctx context.Context, signature string, requester fosite.Requester) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreatePKCERequestSession", ctx, signature, requester) + ret0, _ := ret[0].(error) + return ret0 +} + +// CreatePKCERequestSession indicates an expected call of CreatePKCERequestSession. +func (mr *MockPKCERequestStorageMockRecorder) CreatePKCERequestSession(ctx, signature, requester any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreatePKCERequestSession", reflect.TypeOf((*MockPKCERequestStorage)(nil).CreatePKCERequestSession), ctx, signature, requester) +} + +// DeletePKCERequestSession mocks base method. +func (m *MockPKCERequestStorage) DeletePKCERequestSession(ctx context.Context, signature string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeletePKCERequestSession", ctx, signature) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeletePKCERequestSession indicates an expected call of DeletePKCERequestSession. +func (mr *MockPKCERequestStorageMockRecorder) DeletePKCERequestSession(ctx, signature any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeletePKCERequestSession", reflect.TypeOf((*MockPKCERequestStorage)(nil).DeletePKCERequestSession), ctx, signature) +} + +// GetPKCERequestSession mocks base method. +func (m *MockPKCERequestStorage) GetPKCERequestSession(ctx context.Context, signature string, session fosite.Session) (fosite.Requester, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetPKCERequestSession", ctx, signature, session) + ret0, _ := ret[0].(fosite.Requester) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetPKCERequestSession indicates an expected call of GetPKCERequestSession. +func (mr *MockPKCERequestStorageMockRecorder) GetPKCERequestSession(ctx, signature, session any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetPKCERequestSession", reflect.TypeOf((*MockPKCERequestStorage)(nil).GetPKCERequestSession), ctx, signature, session) +} diff --git a/fosite/internal/pkce_request_storage_provider.go b/fosite/internal/pkce_request_storage_provider.go new file mode 100644 index 00000000000..b6c7028abfa --- /dev/null +++ b/fosite/internal/pkce_request_storage_provider.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/pkce (interfaces: PKCERequestStorageProvider) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/pkce_request_storage_provider.go github.com/ory/hydra/v2/fosite/handler/pkce PKCERequestStorageProvider +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + pkce "github.com/ory/hydra/v2/fosite/handler/pkce" +) + +// MockPKCERequestStorageProvider is a mock of PKCERequestStorageProvider interface. +type MockPKCERequestStorageProvider struct { + ctrl *gomock.Controller + recorder *MockPKCERequestStorageProviderMockRecorder + isgomock struct{} +} + +// MockPKCERequestStorageProviderMockRecorder is the mock recorder for MockPKCERequestStorageProvider. +type MockPKCERequestStorageProviderMockRecorder struct { + mock *MockPKCERequestStorageProvider +} + +// NewMockPKCERequestStorageProvider creates a new mock instance. +func NewMockPKCERequestStorageProvider(ctrl *gomock.Controller) *MockPKCERequestStorageProvider { + mock := &MockPKCERequestStorageProvider{ctrl: ctrl} + mock.recorder = &MockPKCERequestStorageProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockPKCERequestStorageProvider) EXPECT() *MockPKCERequestStorageProviderMockRecorder { + return m.recorder +} + +// PKCERequestStorage mocks base method. +func (m *MockPKCERequestStorageProvider) PKCERequestStorage() pkce.PKCERequestStorage { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "PKCERequestStorage") + ret0, _ := ret[0].(pkce.PKCERequestStorage) + return ret0 +} + +// PKCERequestStorage indicates an expected call of PKCERequestStorage. +func (mr *MockPKCERequestStorageProviderMockRecorder) PKCERequestStorage() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PKCERequestStorage", reflect.TypeOf((*MockPKCERequestStorageProvider)(nil).PKCERequestStorage)) +} diff --git a/fosite/internal/pushed_authorize_endpoint_handler.go b/fosite/internal/pushed_authorize_endpoint_handler.go new file mode 100644 index 00000000000..315701cafb4 --- /dev/null +++ b/fosite/internal/pushed_authorize_endpoint_handler.go @@ -0,0 +1,60 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite (interfaces: PushedAuthorizeEndpointHandler) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/pushed_authorize_endpoint_handler.go github.com/ory/hydra/v2/fosite PushedAuthorizeEndpointHandler +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockPushedAuthorizeEndpointHandler is a mock of PushedAuthorizeEndpointHandler interface. +type MockPushedAuthorizeEndpointHandler struct { + ctrl *gomock.Controller + recorder *MockPushedAuthorizeEndpointHandlerMockRecorder + isgomock struct{} +} + +// MockPushedAuthorizeEndpointHandlerMockRecorder is the mock recorder for MockPushedAuthorizeEndpointHandler. +type MockPushedAuthorizeEndpointHandlerMockRecorder struct { + mock *MockPushedAuthorizeEndpointHandler +} + +// NewMockPushedAuthorizeEndpointHandler creates a new mock instance. +func NewMockPushedAuthorizeEndpointHandler(ctrl *gomock.Controller) *MockPushedAuthorizeEndpointHandler { + mock := &MockPushedAuthorizeEndpointHandler{ctrl: ctrl} + mock.recorder = &MockPushedAuthorizeEndpointHandlerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockPushedAuthorizeEndpointHandler) EXPECT() *MockPushedAuthorizeEndpointHandlerMockRecorder { + return m.recorder +} + +// HandlePushedAuthorizeEndpointRequest mocks base method. +func (m *MockPushedAuthorizeEndpointHandler) HandlePushedAuthorizeEndpointRequest(ctx context.Context, requester fosite.AuthorizeRequester, responder fosite.PushedAuthorizeResponder) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "HandlePushedAuthorizeEndpointRequest", ctx, requester, responder) + ret0, _ := ret[0].(error) + return ret0 +} + +// HandlePushedAuthorizeEndpointRequest indicates an expected call of HandlePushedAuthorizeEndpointRequest. +func (mr *MockPushedAuthorizeEndpointHandlerMockRecorder) HandlePushedAuthorizeEndpointRequest(ctx, requester, responder any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "HandlePushedAuthorizeEndpointRequest", reflect.TypeOf((*MockPushedAuthorizeEndpointHandler)(nil).HandlePushedAuthorizeEndpointRequest), ctx, requester, responder) +} diff --git a/fosite/internal/refresh_token_storage.go b/fosite/internal/refresh_token_storage.go new file mode 100644 index 00000000000..483e2e33508 --- /dev/null +++ b/fosite/internal/refresh_token_storage.go @@ -0,0 +1,103 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: RefreshTokenStorage) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/refresh_token_storage.go github.com/ory/hydra/v2/fosite/handler/oauth2 RefreshTokenStorage +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockRefreshTokenStorage is a mock of RefreshTokenStorage interface. +type MockRefreshTokenStorage struct { + ctrl *gomock.Controller + recorder *MockRefreshTokenStorageMockRecorder + isgomock struct{} +} + +// MockRefreshTokenStorageMockRecorder is the mock recorder for MockRefreshTokenStorage. +type MockRefreshTokenStorageMockRecorder struct { + mock *MockRefreshTokenStorage +} + +// NewMockRefreshTokenStorage creates a new mock instance. +func NewMockRefreshTokenStorage(ctrl *gomock.Controller) *MockRefreshTokenStorage { + mock := &MockRefreshTokenStorage{ctrl: ctrl} + mock.recorder = &MockRefreshTokenStorageMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockRefreshTokenStorage) EXPECT() *MockRefreshTokenStorageMockRecorder { + return m.recorder +} + +// CreateRefreshTokenSession mocks base method. +func (m *MockRefreshTokenStorage) CreateRefreshTokenSession(ctx context.Context, signature, accessSignature string, request fosite.Requester) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateRefreshTokenSession", ctx, signature, accessSignature, request) + ret0, _ := ret[0].(error) + return ret0 +} + +// CreateRefreshTokenSession indicates an expected call of CreateRefreshTokenSession. +func (mr *MockRefreshTokenStorageMockRecorder) CreateRefreshTokenSession(ctx, signature, accessSignature, request any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateRefreshTokenSession", reflect.TypeOf((*MockRefreshTokenStorage)(nil).CreateRefreshTokenSession), ctx, signature, accessSignature, request) +} + +// DeleteRefreshTokenSession mocks base method. +func (m *MockRefreshTokenStorage) DeleteRefreshTokenSession(ctx context.Context, signature string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteRefreshTokenSession", ctx, signature) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteRefreshTokenSession indicates an expected call of DeleteRefreshTokenSession. +func (mr *MockRefreshTokenStorageMockRecorder) DeleteRefreshTokenSession(ctx, signature any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteRefreshTokenSession", reflect.TypeOf((*MockRefreshTokenStorage)(nil).DeleteRefreshTokenSession), ctx, signature) +} + +// GetRefreshTokenSession mocks base method. +func (m *MockRefreshTokenStorage) GetRefreshTokenSession(ctx context.Context, signature string, session fosite.Session) (fosite.Requester, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetRefreshTokenSession", ctx, signature, session) + ret0, _ := ret[0].(fosite.Requester) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetRefreshTokenSession indicates an expected call of GetRefreshTokenSession. +func (mr *MockRefreshTokenStorageMockRecorder) GetRefreshTokenSession(ctx, signature, session any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRefreshTokenSession", reflect.TypeOf((*MockRefreshTokenStorage)(nil).GetRefreshTokenSession), ctx, signature, session) +} + +// RotateRefreshToken mocks base method. +func (m *MockRefreshTokenStorage) RotateRefreshToken(ctx context.Context, requestID, refreshTokenSignature string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RotateRefreshToken", ctx, requestID, refreshTokenSignature) + ret0, _ := ret[0].(error) + return ret0 +} + +// RotateRefreshToken indicates an expected call of RotateRefreshToken. +func (mr *MockRefreshTokenStorageMockRecorder) RotateRefreshToken(ctx, requestID, refreshTokenSignature any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RotateRefreshToken", reflect.TypeOf((*MockRefreshTokenStorage)(nil).RotateRefreshToken), ctx, requestID, refreshTokenSignature) +} diff --git a/fosite/internal/refresh_token_storage_provider.go b/fosite/internal/refresh_token_storage_provider.go new file mode 100644 index 00000000000..a4db76e9131 --- /dev/null +++ b/fosite/internal/refresh_token_storage_provider.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: RefreshTokenStorageProvider) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/refresh_token_storage_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 RefreshTokenStorageProvider +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + oauth2 "github.com/ory/hydra/v2/fosite/handler/oauth2" +) + +// MockRefreshTokenStorageProvider is a mock of RefreshTokenStorageProvider interface. +type MockRefreshTokenStorageProvider struct { + ctrl *gomock.Controller + recorder *MockRefreshTokenStorageProviderMockRecorder + isgomock struct{} +} + +// MockRefreshTokenStorageProviderMockRecorder is the mock recorder for MockRefreshTokenStorageProvider. +type MockRefreshTokenStorageProviderMockRecorder struct { + mock *MockRefreshTokenStorageProvider +} + +// NewMockRefreshTokenStorageProvider creates a new mock instance. +func NewMockRefreshTokenStorageProvider(ctrl *gomock.Controller) *MockRefreshTokenStorageProvider { + mock := &MockRefreshTokenStorageProvider{ctrl: ctrl} + mock.recorder = &MockRefreshTokenStorageProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockRefreshTokenStorageProvider) EXPECT() *MockRefreshTokenStorageProviderMockRecorder { + return m.recorder +} + +// RefreshTokenStorage mocks base method. +func (m *MockRefreshTokenStorageProvider) RefreshTokenStorage() oauth2.RefreshTokenStorage { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RefreshTokenStorage") + ret0, _ := ret[0].(oauth2.RefreshTokenStorage) + return ret0 +} + +// RefreshTokenStorage indicates an expected call of RefreshTokenStorage. +func (mr *MockRefreshTokenStorageProviderMockRecorder) RefreshTokenStorage() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RefreshTokenStorage", reflect.TypeOf((*MockRefreshTokenStorageProvider)(nil).RefreshTokenStorage)) +} diff --git a/fosite/internal/refresh_token_strategy.go b/fosite/internal/refresh_token_strategy.go new file mode 100644 index 00000000000..f403a5784bc --- /dev/null +++ b/fosite/internal/refresh_token_strategy.go @@ -0,0 +1,90 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: RefreshTokenStrategy) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/refresh_token_strategy.go github.com/ory/hydra/v2/fosite/handler/oauth2 RefreshTokenStrategy +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockRefreshTokenStrategy is a mock of RefreshTokenStrategy interface. +type MockRefreshTokenStrategy struct { + ctrl *gomock.Controller + recorder *MockRefreshTokenStrategyMockRecorder + isgomock struct{} +} + +// MockRefreshTokenStrategyMockRecorder is the mock recorder for MockRefreshTokenStrategy. +type MockRefreshTokenStrategyMockRecorder struct { + mock *MockRefreshTokenStrategy +} + +// NewMockRefreshTokenStrategy creates a new mock instance. +func NewMockRefreshTokenStrategy(ctrl *gomock.Controller) *MockRefreshTokenStrategy { + mock := &MockRefreshTokenStrategy{ctrl: ctrl} + mock.recorder = &MockRefreshTokenStrategyMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockRefreshTokenStrategy) EXPECT() *MockRefreshTokenStrategyMockRecorder { + return m.recorder +} + +// GenerateRefreshToken mocks base method. +func (m *MockRefreshTokenStrategy) GenerateRefreshToken(ctx context.Context, requester fosite.Requester) (string, string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GenerateRefreshToken", ctx, requester) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(string) + ret2, _ := ret[2].(error) + return ret0, ret1, ret2 +} + +// GenerateRefreshToken indicates an expected call of GenerateRefreshToken. +func (mr *MockRefreshTokenStrategyMockRecorder) GenerateRefreshToken(ctx, requester any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GenerateRefreshToken", reflect.TypeOf((*MockRefreshTokenStrategy)(nil).GenerateRefreshToken), ctx, requester) +} + +// RefreshTokenSignature mocks base method. +func (m *MockRefreshTokenStrategy) RefreshTokenSignature(ctx context.Context, token string) string { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RefreshTokenSignature", ctx, token) + ret0, _ := ret[0].(string) + return ret0 +} + +// RefreshTokenSignature indicates an expected call of RefreshTokenSignature. +func (mr *MockRefreshTokenStrategyMockRecorder) RefreshTokenSignature(ctx, token any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RefreshTokenSignature", reflect.TypeOf((*MockRefreshTokenStrategy)(nil).RefreshTokenSignature), ctx, token) +} + +// ValidateRefreshToken mocks base method. +func (m *MockRefreshTokenStrategy) ValidateRefreshToken(ctx context.Context, requester fosite.Requester, token string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ValidateRefreshToken", ctx, requester, token) + ret0, _ := ret[0].(error) + return ret0 +} + +// ValidateRefreshToken indicates an expected call of ValidateRefreshToken. +func (mr *MockRefreshTokenStrategyMockRecorder) ValidateRefreshToken(ctx, requester, token any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidateRefreshToken", reflect.TypeOf((*MockRefreshTokenStrategy)(nil).ValidateRefreshToken), ctx, requester, token) +} diff --git a/fosite/internal/refresh_token_strategy_provider.go b/fosite/internal/refresh_token_strategy_provider.go new file mode 100644 index 00000000000..8418985a15b --- /dev/null +++ b/fosite/internal/refresh_token_strategy_provider.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: RefreshTokenStrategyProvider) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/refresh_token_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 RefreshTokenStrategyProvider +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + oauth2 "github.com/ory/hydra/v2/fosite/handler/oauth2" +) + +// MockRefreshTokenStrategyProvider is a mock of RefreshTokenStrategyProvider interface. +type MockRefreshTokenStrategyProvider struct { + ctrl *gomock.Controller + recorder *MockRefreshTokenStrategyProviderMockRecorder + isgomock struct{} +} + +// MockRefreshTokenStrategyProviderMockRecorder is the mock recorder for MockRefreshTokenStrategyProvider. +type MockRefreshTokenStrategyProviderMockRecorder struct { + mock *MockRefreshTokenStrategyProvider +} + +// NewMockRefreshTokenStrategyProvider creates a new mock instance. +func NewMockRefreshTokenStrategyProvider(ctrl *gomock.Controller) *MockRefreshTokenStrategyProvider { + mock := &MockRefreshTokenStrategyProvider{ctrl: ctrl} + mock.recorder = &MockRefreshTokenStrategyProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockRefreshTokenStrategyProvider) EXPECT() *MockRefreshTokenStrategyProviderMockRecorder { + return m.recorder +} + +// RefreshTokenStrategy mocks base method. +func (m *MockRefreshTokenStrategyProvider) RefreshTokenStrategy() oauth2.RefreshTokenStrategy { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RefreshTokenStrategy") + ret0, _ := ret[0].(oauth2.RefreshTokenStrategy) + return ret0 +} + +// RefreshTokenStrategy indicates an expected call of RefreshTokenStrategy. +func (mr *MockRefreshTokenStrategyProviderMockRecorder) RefreshTokenStrategy() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RefreshTokenStrategy", reflect.TypeOf((*MockRefreshTokenStrategyProvider)(nil).RefreshTokenStrategy)) +} diff --git a/fosite/internal/request.go b/fosite/internal/request.go new file mode 100644 index 00000000000..16d4ea1f5d8 --- /dev/null +++ b/fosite/internal/request.go @@ -0,0 +1,283 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite (interfaces: Requester) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/request.go github.com/ory/hydra/v2/fosite Requester +// + +// Package internal is a generated GoMock package. +package internal + +import ( + url "net/url" + reflect "reflect" + time "time" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockRequester is a mock of Requester interface. +type MockRequester struct { + ctrl *gomock.Controller + recorder *MockRequesterMockRecorder + isgomock struct{} +} + +// MockRequesterMockRecorder is the mock recorder for MockRequester. +type MockRequesterMockRecorder struct { + mock *MockRequester +} + +// NewMockRequester creates a new mock instance. +func NewMockRequester(ctrl *gomock.Controller) *MockRequester { + mock := &MockRequester{ctrl: ctrl} + mock.recorder = &MockRequesterMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockRequester) EXPECT() *MockRequesterMockRecorder { + return m.recorder +} + +// AppendRequestedScope mocks base method. +func (m *MockRequester) AppendRequestedScope(scope string) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "AppendRequestedScope", scope) +} + +// AppendRequestedScope indicates an expected call of AppendRequestedScope. +func (mr *MockRequesterMockRecorder) AppendRequestedScope(scope any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AppendRequestedScope", reflect.TypeOf((*MockRequester)(nil).AppendRequestedScope), scope) +} + +// GetClient mocks base method. +func (m *MockRequester) GetClient() fosite.Client { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetClient") + ret0, _ := ret[0].(fosite.Client) + return ret0 +} + +// GetClient indicates an expected call of GetClient. +func (mr *MockRequesterMockRecorder) GetClient() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetClient", reflect.TypeOf((*MockRequester)(nil).GetClient)) +} + +// GetGrantedAudience mocks base method. +func (m *MockRequester) GetGrantedAudience() fosite.Arguments { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetGrantedAudience") + ret0, _ := ret[0].(fosite.Arguments) + return ret0 +} + +// GetGrantedAudience indicates an expected call of GetGrantedAudience. +func (mr *MockRequesterMockRecorder) GetGrantedAudience() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetGrantedAudience", reflect.TypeOf((*MockRequester)(nil).GetGrantedAudience)) +} + +// GetGrantedScopes mocks base method. +func (m *MockRequester) GetGrantedScopes() fosite.Arguments { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetGrantedScopes") + ret0, _ := ret[0].(fosite.Arguments) + return ret0 +} + +// GetGrantedScopes indicates an expected call of GetGrantedScopes. +func (mr *MockRequesterMockRecorder) GetGrantedScopes() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetGrantedScopes", reflect.TypeOf((*MockRequester)(nil).GetGrantedScopes)) +} + +// GetID mocks base method. +func (m *MockRequester) GetID() string { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetID") + ret0, _ := ret[0].(string) + return ret0 +} + +// GetID indicates an expected call of GetID. +func (mr *MockRequesterMockRecorder) GetID() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetID", reflect.TypeOf((*MockRequester)(nil).GetID)) +} + +// GetRequestForm mocks base method. +func (m *MockRequester) GetRequestForm() url.Values { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetRequestForm") + ret0, _ := ret[0].(url.Values) + return ret0 +} + +// GetRequestForm indicates an expected call of GetRequestForm. +func (mr *MockRequesterMockRecorder) GetRequestForm() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRequestForm", reflect.TypeOf((*MockRequester)(nil).GetRequestForm)) +} + +// GetRequestedAt mocks base method. +func (m *MockRequester) GetRequestedAt() time.Time { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetRequestedAt") + ret0, _ := ret[0].(time.Time) + return ret0 +} + +// GetRequestedAt indicates an expected call of GetRequestedAt. +func (mr *MockRequesterMockRecorder) GetRequestedAt() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRequestedAt", reflect.TypeOf((*MockRequester)(nil).GetRequestedAt)) +} + +// GetRequestedAudience mocks base method. +func (m *MockRequester) GetRequestedAudience() fosite.Arguments { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetRequestedAudience") + ret0, _ := ret[0].(fosite.Arguments) + return ret0 +} + +// GetRequestedAudience indicates an expected call of GetRequestedAudience. +func (mr *MockRequesterMockRecorder) GetRequestedAudience() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRequestedAudience", reflect.TypeOf((*MockRequester)(nil).GetRequestedAudience)) +} + +// GetRequestedScopes mocks base method. +func (m *MockRequester) GetRequestedScopes() fosite.Arguments { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetRequestedScopes") + ret0, _ := ret[0].(fosite.Arguments) + return ret0 +} + +// GetRequestedScopes indicates an expected call of GetRequestedScopes. +func (mr *MockRequesterMockRecorder) GetRequestedScopes() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRequestedScopes", reflect.TypeOf((*MockRequester)(nil).GetRequestedScopes)) +} + +// GetSession mocks base method. +func (m *MockRequester) GetSession() fosite.Session { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetSession") + ret0, _ := ret[0].(fosite.Session) + return ret0 +} + +// GetSession indicates an expected call of GetSession. +func (mr *MockRequesterMockRecorder) GetSession() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetSession", reflect.TypeOf((*MockRequester)(nil).GetSession)) +} + +// GrantAudience mocks base method. +func (m *MockRequester) GrantAudience(audience string) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "GrantAudience", audience) +} + +// GrantAudience indicates an expected call of GrantAudience. +func (mr *MockRequesterMockRecorder) GrantAudience(audience any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GrantAudience", reflect.TypeOf((*MockRequester)(nil).GrantAudience), audience) +} + +// GrantScope mocks base method. +func (m *MockRequester) GrantScope(scope string) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "GrantScope", scope) +} + +// GrantScope indicates an expected call of GrantScope. +func (mr *MockRequesterMockRecorder) GrantScope(scope any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GrantScope", reflect.TypeOf((*MockRequester)(nil).GrantScope), scope) +} + +// Merge mocks base method. +func (m *MockRequester) Merge(requester fosite.Requester) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "Merge", requester) +} + +// Merge indicates an expected call of Merge. +func (mr *MockRequesterMockRecorder) Merge(requester any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Merge", reflect.TypeOf((*MockRequester)(nil).Merge), requester) +} + +// Sanitize mocks base method. +func (m *MockRequester) Sanitize(allowedParameters []string) fosite.Requester { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Sanitize", allowedParameters) + ret0, _ := ret[0].(fosite.Requester) + return ret0 +} + +// Sanitize indicates an expected call of Sanitize. +func (mr *MockRequesterMockRecorder) Sanitize(allowedParameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Sanitize", reflect.TypeOf((*MockRequester)(nil).Sanitize), allowedParameters) +} + +// SetID mocks base method. +func (m *MockRequester) SetID(id string) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetID", id) +} + +// SetID indicates an expected call of SetID. +func (mr *MockRequesterMockRecorder) SetID(id any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetID", reflect.TypeOf((*MockRequester)(nil).SetID), id) +} + +// SetRequestedAudience mocks base method. +func (m *MockRequester) SetRequestedAudience(audience fosite.Arguments) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetRequestedAudience", audience) +} + +// SetRequestedAudience indicates an expected call of SetRequestedAudience. +func (mr *MockRequesterMockRecorder) SetRequestedAudience(audience any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetRequestedAudience", reflect.TypeOf((*MockRequester)(nil).SetRequestedAudience), audience) +} + +// SetRequestedScopes mocks base method. +func (m *MockRequester) SetRequestedScopes(scopes fosite.Arguments) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetRequestedScopes", scopes) +} + +// SetRequestedScopes indicates an expected call of SetRequestedScopes. +func (mr *MockRequesterMockRecorder) SetRequestedScopes(scopes any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetRequestedScopes", reflect.TypeOf((*MockRequester)(nil).SetRequestedScopes), scopes) +} + +// SetSession mocks base method. +func (m *MockRequester) SetSession(session fosite.Session) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SetSession", session) +} + +// SetSession indicates an expected call of SetSession. +func (mr *MockRequesterMockRecorder) SetSession(session any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetSession", reflect.TypeOf((*MockRequester)(nil).SetSession), session) +} diff --git a/fosite/internal/resource_owner_password_credentials_grant_storage.go b/fosite/internal/resource_owner_password_credentials_grant_storage.go new file mode 100644 index 00000000000..4433de9be86 --- /dev/null +++ b/fosite/internal/resource_owner_password_credentials_grant_storage.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: ResourceOwnerPasswordCredentialsGrantStorage) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/resource_owner_password_credentials_grant_storage.go github.com/ory/hydra/v2/fosite/handler/oauth2 ResourceOwnerPasswordCredentialsGrantStorage +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" +) + +// MockResourceOwnerPasswordCredentialsGrantStorage is a mock of ResourceOwnerPasswordCredentialsGrantStorage interface. +type MockResourceOwnerPasswordCredentialsGrantStorage struct { + ctrl *gomock.Controller + recorder *MockResourceOwnerPasswordCredentialsGrantStorageMockRecorder + isgomock struct{} +} + +// MockResourceOwnerPasswordCredentialsGrantStorageMockRecorder is the mock recorder for MockResourceOwnerPasswordCredentialsGrantStorage. +type MockResourceOwnerPasswordCredentialsGrantStorageMockRecorder struct { + mock *MockResourceOwnerPasswordCredentialsGrantStorage +} + +// NewMockResourceOwnerPasswordCredentialsGrantStorage creates a new mock instance. +func NewMockResourceOwnerPasswordCredentialsGrantStorage(ctrl *gomock.Controller) *MockResourceOwnerPasswordCredentialsGrantStorage { + mock := &MockResourceOwnerPasswordCredentialsGrantStorage{ctrl: ctrl} + mock.recorder = &MockResourceOwnerPasswordCredentialsGrantStorageMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockResourceOwnerPasswordCredentialsGrantStorage) EXPECT() *MockResourceOwnerPasswordCredentialsGrantStorageMockRecorder { + return m.recorder +} + +// Authenticate mocks base method. +func (m *MockResourceOwnerPasswordCredentialsGrantStorage) Authenticate(ctx context.Context, name, secret string) (string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Authenticate", ctx, name, secret) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Authenticate indicates an expected call of Authenticate. +func (mr *MockResourceOwnerPasswordCredentialsGrantStorageMockRecorder) Authenticate(ctx, name, secret any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Authenticate", reflect.TypeOf((*MockResourceOwnerPasswordCredentialsGrantStorage)(nil).Authenticate), ctx, name, secret) +} diff --git a/fosite/internal/resource_owner_password_credentials_grant_storage_provider.go b/fosite/internal/resource_owner_password_credentials_grant_storage_provider.go new file mode 100644 index 00000000000..cb6c54d8b9d --- /dev/null +++ b/fosite/internal/resource_owner_password_credentials_grant_storage_provider.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: ResourceOwnerPasswordCredentialsGrantStorageProvider) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/resource_owner_password_credentials_grant_storage_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 ResourceOwnerPasswordCredentialsGrantStorageProvider +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + oauth2 "github.com/ory/hydra/v2/fosite/handler/oauth2" +) + +// MockResourceOwnerPasswordCredentialsGrantStorageProvider is a mock of ResourceOwnerPasswordCredentialsGrantStorageProvider interface. +type MockResourceOwnerPasswordCredentialsGrantStorageProvider struct { + ctrl *gomock.Controller + recorder *MockResourceOwnerPasswordCredentialsGrantStorageProviderMockRecorder + isgomock struct{} +} + +// MockResourceOwnerPasswordCredentialsGrantStorageProviderMockRecorder is the mock recorder for MockResourceOwnerPasswordCredentialsGrantStorageProvider. +type MockResourceOwnerPasswordCredentialsGrantStorageProviderMockRecorder struct { + mock *MockResourceOwnerPasswordCredentialsGrantStorageProvider +} + +// NewMockResourceOwnerPasswordCredentialsGrantStorageProvider creates a new mock instance. +func NewMockResourceOwnerPasswordCredentialsGrantStorageProvider(ctrl *gomock.Controller) *MockResourceOwnerPasswordCredentialsGrantStorageProvider { + mock := &MockResourceOwnerPasswordCredentialsGrantStorageProvider{ctrl: ctrl} + mock.recorder = &MockResourceOwnerPasswordCredentialsGrantStorageProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockResourceOwnerPasswordCredentialsGrantStorageProvider) EXPECT() *MockResourceOwnerPasswordCredentialsGrantStorageProviderMockRecorder { + return m.recorder +} + +// ResourceOwnerPasswordCredentialsGrantStorage mocks base method. +func (m *MockResourceOwnerPasswordCredentialsGrantStorageProvider) ResourceOwnerPasswordCredentialsGrantStorage() oauth2.ResourceOwnerPasswordCredentialsGrantStorage { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ResourceOwnerPasswordCredentialsGrantStorage") + ret0, _ := ret[0].(oauth2.ResourceOwnerPasswordCredentialsGrantStorage) + return ret0 +} + +// ResourceOwnerPasswordCredentialsGrantStorage indicates an expected call of ResourceOwnerPasswordCredentialsGrantStorage. +func (mr *MockResourceOwnerPasswordCredentialsGrantStorageProviderMockRecorder) ResourceOwnerPasswordCredentialsGrantStorage() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ResourceOwnerPasswordCredentialsGrantStorage", reflect.TypeOf((*MockResourceOwnerPasswordCredentialsGrantStorageProvider)(nil).ResourceOwnerPasswordCredentialsGrantStorage)) +} diff --git a/fosite/internal/revocation_handler.go b/fosite/internal/revocation_handler.go new file mode 100644 index 00000000000..c640e8590ba --- /dev/null +++ b/fosite/internal/revocation_handler.go @@ -0,0 +1,60 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite (interfaces: RevocationHandler) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/revocation_handler.go github.com/ory/hydra/v2/fosite RevocationHandler +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockRevocationHandler is a mock of RevocationHandler interface. +type MockRevocationHandler struct { + ctrl *gomock.Controller + recorder *MockRevocationHandlerMockRecorder + isgomock struct{} +} + +// MockRevocationHandlerMockRecorder is the mock recorder for MockRevocationHandler. +type MockRevocationHandlerMockRecorder struct { + mock *MockRevocationHandler +} + +// NewMockRevocationHandler creates a new mock instance. +func NewMockRevocationHandler(ctrl *gomock.Controller) *MockRevocationHandler { + mock := &MockRevocationHandler{ctrl: ctrl} + mock.recorder = &MockRevocationHandlerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockRevocationHandler) EXPECT() *MockRevocationHandlerMockRecorder { + return m.recorder +} + +// RevokeToken mocks base method. +func (m *MockRevocationHandler) RevokeToken(ctx context.Context, token string, tokenType fosite.TokenType, client fosite.Client) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RevokeToken", ctx, token, tokenType, client) + ret0, _ := ret[0].(error) + return ret0 +} + +// RevokeToken indicates an expected call of RevokeToken. +func (mr *MockRevocationHandlerMockRecorder) RevokeToken(ctx, token, tokenType, client any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RevokeToken", reflect.TypeOf((*MockRevocationHandler)(nil).RevokeToken), ctx, token, tokenType, client) +} diff --git a/fosite/internal/revocation_handlers_provider.go b/fosite/internal/revocation_handlers_provider.go new file mode 100644 index 00000000000..15dd3619f6a --- /dev/null +++ b/fosite/internal/revocation_handlers_provider.go @@ -0,0 +1,60 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite (interfaces: RevocationHandlersProvider) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/revocation_handlers_provider.go github.com/ory/hydra/v2/fosite RevocationHandlersProvider +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockRevocationHandlersProvider is a mock of RevocationHandlersProvider interface. +type MockRevocationHandlersProvider struct { + ctrl *gomock.Controller + recorder *MockRevocationHandlersProviderMockRecorder + isgomock struct{} +} + +// MockRevocationHandlersProviderMockRecorder is the mock recorder for MockRevocationHandlersProvider. +type MockRevocationHandlersProviderMockRecorder struct { + mock *MockRevocationHandlersProvider +} + +// NewMockRevocationHandlersProvider creates a new mock instance. +func NewMockRevocationHandlersProvider(ctrl *gomock.Controller) *MockRevocationHandlersProvider { + mock := &MockRevocationHandlersProvider{ctrl: ctrl} + mock.recorder = &MockRevocationHandlersProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockRevocationHandlersProvider) EXPECT() *MockRevocationHandlersProviderMockRecorder { + return m.recorder +} + +// GetRevocationHandlers mocks base method. +func (m *MockRevocationHandlersProvider) GetRevocationHandlers(ctx context.Context) fosite.RevocationHandlers { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetRevocationHandlers", ctx) + ret0, _ := ret[0].(fosite.RevocationHandlers) + return ret0 +} + +// GetRevocationHandlers indicates an expected call of GetRevocationHandlers. +func (mr *MockRevocationHandlersProviderMockRecorder) GetRevocationHandlers(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRevocationHandlers", reflect.TypeOf((*MockRevocationHandlersProvider)(nil).GetRevocationHandlers), ctx) +} diff --git a/fosite/internal/rfc7523_key_storage.go b/fosite/internal/rfc7523_key_storage.go new file mode 100644 index 00000000000..d24a640fae4 --- /dev/null +++ b/fosite/internal/rfc7523_key_storage.go @@ -0,0 +1,120 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/rfc7523 (interfaces: RFC7523KeyStorage) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/rfc7523_key_storage.go github.com/ory/hydra/v2/fosite/handler/rfc7523 RFC7523KeyStorage +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + time "time" + + jose "github.com/go-jose/go-jose/v3" + gomock "go.uber.org/mock/gomock" +) + +// MockRFC7523KeyStorage is a mock of RFC7523KeyStorage interface. +type MockRFC7523KeyStorage struct { + ctrl *gomock.Controller + recorder *MockRFC7523KeyStorageMockRecorder + isgomock struct{} +} + +// MockRFC7523KeyStorageMockRecorder is the mock recorder for MockRFC7523KeyStorage. +type MockRFC7523KeyStorageMockRecorder struct { + mock *MockRFC7523KeyStorage +} + +// NewMockRFC7523KeyStorage creates a new mock instance. +func NewMockRFC7523KeyStorage(ctrl *gomock.Controller) *MockRFC7523KeyStorage { + mock := &MockRFC7523KeyStorage{ctrl: ctrl} + mock.recorder = &MockRFC7523KeyStorageMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockRFC7523KeyStorage) EXPECT() *MockRFC7523KeyStorageMockRecorder { + return m.recorder +} + +// GetPublicKey mocks base method. +func (m *MockRFC7523KeyStorage) GetPublicKey(ctx context.Context, issuer, subject, keyId string) (*jose.JSONWebKey, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetPublicKey", ctx, issuer, subject, keyId) + ret0, _ := ret[0].(*jose.JSONWebKey) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetPublicKey indicates an expected call of GetPublicKey. +func (mr *MockRFC7523KeyStorageMockRecorder) GetPublicKey(ctx, issuer, subject, keyId any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetPublicKey", reflect.TypeOf((*MockRFC7523KeyStorage)(nil).GetPublicKey), ctx, issuer, subject, keyId) +} + +// GetPublicKeyScopes mocks base method. +func (m *MockRFC7523KeyStorage) GetPublicKeyScopes(ctx context.Context, issuer, subject, keyId string) ([]string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetPublicKeyScopes", ctx, issuer, subject, keyId) + ret0, _ := ret[0].([]string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetPublicKeyScopes indicates an expected call of GetPublicKeyScopes. +func (mr *MockRFC7523KeyStorageMockRecorder) GetPublicKeyScopes(ctx, issuer, subject, keyId any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetPublicKeyScopes", reflect.TypeOf((*MockRFC7523KeyStorage)(nil).GetPublicKeyScopes), ctx, issuer, subject, keyId) +} + +// GetPublicKeys mocks base method. +func (m *MockRFC7523KeyStorage) GetPublicKeys(ctx context.Context, issuer, subject string) (*jose.JSONWebKeySet, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetPublicKeys", ctx, issuer, subject) + ret0, _ := ret[0].(*jose.JSONWebKeySet) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetPublicKeys indicates an expected call of GetPublicKeys. +func (mr *MockRFC7523KeyStorageMockRecorder) GetPublicKeys(ctx, issuer, subject any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetPublicKeys", reflect.TypeOf((*MockRFC7523KeyStorage)(nil).GetPublicKeys), ctx, issuer, subject) +} + +// IsJWTUsed mocks base method. +func (m *MockRFC7523KeyStorage) IsJWTUsed(ctx context.Context, jti string) (bool, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "IsJWTUsed", ctx, jti) + ret0, _ := ret[0].(bool) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// IsJWTUsed indicates an expected call of IsJWTUsed. +func (mr *MockRFC7523KeyStorageMockRecorder) IsJWTUsed(ctx, jti any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsJWTUsed", reflect.TypeOf((*MockRFC7523KeyStorage)(nil).IsJWTUsed), ctx, jti) +} + +// MarkJWTUsedForTime mocks base method. +func (m *MockRFC7523KeyStorage) MarkJWTUsedForTime(ctx context.Context, jti string, exp time.Time) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "MarkJWTUsedForTime", ctx, jti, exp) + ret0, _ := ret[0].(error) + return ret0 +} + +// MarkJWTUsedForTime indicates an expected call of MarkJWTUsedForTime. +func (mr *MockRFC7523KeyStorageMockRecorder) MarkJWTUsedForTime(ctx, jti, exp any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "MarkJWTUsedForTime", reflect.TypeOf((*MockRFC7523KeyStorage)(nil).MarkJWTUsedForTime), ctx, jti, exp) +} diff --git a/fosite/internal/rfc7523_key_storage_provider.go b/fosite/internal/rfc7523_key_storage_provider.go new file mode 100644 index 00000000000..3d36278f285 --- /dev/null +++ b/fosite/internal/rfc7523_key_storage_provider.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/rfc7523 (interfaces: RFC7523KeyStorageProvider) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/rfc7523_key_storage_provider.go github.com/ory/hydra/v2/fosite/handler/rfc7523 RFC7523KeyStorageProvider +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + rfc7523 "github.com/ory/hydra/v2/fosite/handler/rfc7523" +) + +// MockRFC7523KeyStorageProvider is a mock of RFC7523KeyStorageProvider interface. +type MockRFC7523KeyStorageProvider struct { + ctrl *gomock.Controller + recorder *MockRFC7523KeyStorageProviderMockRecorder + isgomock struct{} +} + +// MockRFC7523KeyStorageProviderMockRecorder is the mock recorder for MockRFC7523KeyStorageProvider. +type MockRFC7523KeyStorageProviderMockRecorder struct { + mock *MockRFC7523KeyStorageProvider +} + +// NewMockRFC7523KeyStorageProvider creates a new mock instance. +func NewMockRFC7523KeyStorageProvider(ctrl *gomock.Controller) *MockRFC7523KeyStorageProvider { + mock := &MockRFC7523KeyStorageProvider{ctrl: ctrl} + mock.recorder = &MockRFC7523KeyStorageProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockRFC7523KeyStorageProvider) EXPECT() *MockRFC7523KeyStorageProviderMockRecorder { + return m.recorder +} + +// RFC7523KeyStorage mocks base method. +func (m *MockRFC7523KeyStorageProvider) RFC7523KeyStorage() rfc7523.RFC7523KeyStorage { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RFC7523KeyStorage") + ret0, _ := ret[0].(rfc7523.RFC7523KeyStorage) + return ret0 +} + +// RFC7523KeyStorage indicates an expected call of RFC7523KeyStorage. +func (mr *MockRFC7523KeyStorageProviderMockRecorder) RFC7523KeyStorage() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RFC7523KeyStorage", reflect.TypeOf((*MockRFC7523KeyStorageProvider)(nil).RFC7523KeyStorage)) +} diff --git a/fosite/internal/rfc8628_code_strategy.go b/fosite/internal/rfc8628_code_strategy.go new file mode 100644 index 00000000000..0388b7fe449 --- /dev/null +++ b/fosite/internal/rfc8628_code_strategy.go @@ -0,0 +1,151 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/rfc8628 (interfaces: RFC8628CodeStrategy) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/rfc8628_code_strategy.go github.com/ory/hydra/v2/fosite/handler/rfc8628 RFC8628CodeStrategy +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockRFC8628CodeStrategy is a mock of RFC8628CodeStrategy interface. +type MockRFC8628CodeStrategy struct { + ctrl *gomock.Controller + recorder *MockRFC8628CodeStrategyMockRecorder + isgomock struct{} +} + +// MockRFC8628CodeStrategyMockRecorder is the mock recorder for MockRFC8628CodeStrategy. +type MockRFC8628CodeStrategyMockRecorder struct { + mock *MockRFC8628CodeStrategy +} + +// NewMockRFC8628CodeStrategy creates a new mock instance. +func NewMockRFC8628CodeStrategy(ctrl *gomock.Controller) *MockRFC8628CodeStrategy { + mock := &MockRFC8628CodeStrategy{ctrl: ctrl} + mock.recorder = &MockRFC8628CodeStrategyMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockRFC8628CodeStrategy) EXPECT() *MockRFC8628CodeStrategyMockRecorder { + return m.recorder +} + +// DeviceCodeSignature mocks base method. +func (m *MockRFC8628CodeStrategy) DeviceCodeSignature(ctx context.Context, code string) (string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeviceCodeSignature", ctx, code) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// DeviceCodeSignature indicates an expected call of DeviceCodeSignature. +func (mr *MockRFC8628CodeStrategyMockRecorder) DeviceCodeSignature(ctx, code any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeviceCodeSignature", reflect.TypeOf((*MockRFC8628CodeStrategy)(nil).DeviceCodeSignature), ctx, code) +} + +// GenerateDeviceCode mocks base method. +func (m *MockRFC8628CodeStrategy) GenerateDeviceCode(ctx context.Context) (string, string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GenerateDeviceCode", ctx) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(string) + ret2, _ := ret[2].(error) + return ret0, ret1, ret2 +} + +// GenerateDeviceCode indicates an expected call of GenerateDeviceCode. +func (mr *MockRFC8628CodeStrategyMockRecorder) GenerateDeviceCode(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GenerateDeviceCode", reflect.TypeOf((*MockRFC8628CodeStrategy)(nil).GenerateDeviceCode), ctx) +} + +// GenerateUserCode mocks base method. +func (m *MockRFC8628CodeStrategy) GenerateUserCode(ctx context.Context) (string, string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GenerateUserCode", ctx) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(string) + ret2, _ := ret[2].(error) + return ret0, ret1, ret2 +} + +// GenerateUserCode indicates an expected call of GenerateUserCode. +func (mr *MockRFC8628CodeStrategyMockRecorder) GenerateUserCode(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GenerateUserCode", reflect.TypeOf((*MockRFC8628CodeStrategy)(nil).GenerateUserCode), ctx) +} + +// ShouldRateLimit mocks base method. +func (m *MockRFC8628CodeStrategy) ShouldRateLimit(ctx context.Context, code string) (bool, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ShouldRateLimit", ctx, code) + ret0, _ := ret[0].(bool) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ShouldRateLimit indicates an expected call of ShouldRateLimit. +func (mr *MockRFC8628CodeStrategyMockRecorder) ShouldRateLimit(ctx, code any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ShouldRateLimit", reflect.TypeOf((*MockRFC8628CodeStrategy)(nil).ShouldRateLimit), ctx, code) +} + +// UserCodeSignature mocks base method. +func (m *MockRFC8628CodeStrategy) UserCodeSignature(ctx context.Context, code string) (string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UserCodeSignature", ctx, code) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UserCodeSignature indicates an expected call of UserCodeSignature. +func (mr *MockRFC8628CodeStrategyMockRecorder) UserCodeSignature(ctx, code any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UserCodeSignature", reflect.TypeOf((*MockRFC8628CodeStrategy)(nil).UserCodeSignature), ctx, code) +} + +// ValidateDeviceCode mocks base method. +func (m *MockRFC8628CodeStrategy) ValidateDeviceCode(ctx context.Context, r fosite.DeviceRequester, code string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ValidateDeviceCode", ctx, r, code) + ret0, _ := ret[0].(error) + return ret0 +} + +// ValidateDeviceCode indicates an expected call of ValidateDeviceCode. +func (mr *MockRFC8628CodeStrategyMockRecorder) ValidateDeviceCode(ctx, r, code any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidateDeviceCode", reflect.TypeOf((*MockRFC8628CodeStrategy)(nil).ValidateDeviceCode), ctx, r, code) +} + +// ValidateUserCode mocks base method. +func (m *MockRFC8628CodeStrategy) ValidateUserCode(ctx context.Context, r fosite.DeviceRequester, code string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ValidateUserCode", ctx, r, code) + ret0, _ := ret[0].(error) + return ret0 +} + +// ValidateUserCode indicates an expected call of ValidateUserCode. +func (mr *MockRFC8628CodeStrategyMockRecorder) ValidateUserCode(ctx, r, code any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidateUserCode", reflect.TypeOf((*MockRFC8628CodeStrategy)(nil).ValidateUserCode), ctx, r, code) +} diff --git a/fosite/internal/rfc8628_core_storage.go b/fosite/internal/rfc8628_core_storage.go new file mode 100644 index 00000000000..ad477f20003 --- /dev/null +++ b/fosite/internal/rfc8628_core_storage.go @@ -0,0 +1,88 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/rfc8628 (interfaces: Storage) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/rfc8628_core_storage.go -mock_names Storage=MockRFC8628Storage github.com/ory/hydra/v2/fosite/handler/rfc8628 Storage +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + oauth2 "github.com/ory/hydra/v2/fosite/handler/oauth2" + rfc8628 "github.com/ory/hydra/v2/fosite/handler/rfc8628" +) + +// MockRFC8628Storage is a mock of Storage interface. +type MockRFC8628Storage struct { + ctrl *gomock.Controller + recorder *MockRFC8628StorageMockRecorder + isgomock struct{} +} + +// MockRFC8628StorageMockRecorder is the mock recorder for MockRFC8628Storage. +type MockRFC8628StorageMockRecorder struct { + mock *MockRFC8628Storage +} + +// NewMockRFC8628Storage creates a new mock instance. +func NewMockRFC8628Storage(ctrl *gomock.Controller) *MockRFC8628Storage { + mock := &MockRFC8628Storage{ctrl: ctrl} + mock.recorder = &MockRFC8628StorageMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockRFC8628Storage) EXPECT() *MockRFC8628StorageMockRecorder { + return m.recorder +} + +// AccessTokenStorage mocks base method. +func (m *MockRFC8628Storage) AccessTokenStorage() oauth2.AccessTokenStorage { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "AccessTokenStorage") + ret0, _ := ret[0].(oauth2.AccessTokenStorage) + return ret0 +} + +// AccessTokenStorage indicates an expected call of AccessTokenStorage. +func (mr *MockRFC8628StorageMockRecorder) AccessTokenStorage() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AccessTokenStorage", reflect.TypeOf((*MockRFC8628Storage)(nil).AccessTokenStorage)) +} + +// DeviceAuthStorage mocks base method. +func (m *MockRFC8628Storage) DeviceAuthStorage() rfc8628.DeviceAuthStorage { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeviceAuthStorage") + ret0, _ := ret[0].(rfc8628.DeviceAuthStorage) + return ret0 +} + +// DeviceAuthStorage indicates an expected call of DeviceAuthStorage. +func (mr *MockRFC8628StorageMockRecorder) DeviceAuthStorage() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeviceAuthStorage", reflect.TypeOf((*MockRFC8628Storage)(nil).DeviceAuthStorage)) +} + +// RefreshTokenStorage mocks base method. +func (m *MockRFC8628Storage) RefreshTokenStorage() oauth2.RefreshTokenStorage { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RefreshTokenStorage") + ret0, _ := ret[0].(oauth2.RefreshTokenStorage) + return ret0 +} + +// RefreshTokenStorage indicates an expected call of RefreshTokenStorage. +func (mr *MockRFC8628StorageMockRecorder) RefreshTokenStorage() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RefreshTokenStorage", reflect.TypeOf((*MockRFC8628Storage)(nil).RefreshTokenStorage)) +} diff --git a/fosite/internal/rw.go b/fosite/internal/rw.go new file mode 100644 index 00000000000..0c9cafc2bf8 --- /dev/null +++ b/fosite/internal/rw.go @@ -0,0 +1,63 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Automatically generated by MockGen. DO NOT EDIT! +// Source: rw.go + +package internal + +import ( + "net/http" + + gomock "go.uber.org/mock/gomock" +) + +// Mock of ResponseWriter interface +type MockResponseWriter struct { + ctrl *gomock.Controller + recorder *_MockResponseWriterRecorder +} + +// Recorder for MockResponseWriter (not exported) +type _MockResponseWriterRecorder struct { + mock *MockResponseWriter +} + +func NewMockResponseWriter(ctrl *gomock.Controller) *MockResponseWriter { + mock := &MockResponseWriter{ctrl: ctrl} + mock.recorder = &_MockResponseWriterRecorder{mock} + return mock +} + +func (_m *MockResponseWriter) EXPECT() *_MockResponseWriterRecorder { + return _m.recorder +} + +func (_m *MockResponseWriter) Header() http.Header { + ret := _m.ctrl.Call(_m, "Header") + ret0, _ := ret[0].(http.Header) + return ret0 +} + +func (_mr *_MockResponseWriterRecorder) Header() *gomock.Call { + return _mr.mock.ctrl.RecordCall(_mr.mock, "Header") +} + +func (_m *MockResponseWriter) Write(_param0 []byte) (int, error) { + ret := _m.ctrl.Call(_m, "Write", _param0) + ret0, _ := ret[0].(int) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +func (_mr *_MockResponseWriterRecorder) Write(arg0 interface{}) *gomock.Call { + return _mr.mock.ctrl.RecordCall(_mr.mock, "Write", arg0) +} + +func (_m *MockResponseWriter) WriteHeader(_param0 int) { + _m.ctrl.Call(_m, "WriteHeader", _param0) +} + +func (_mr *_MockResponseWriterRecorder) WriteHeader(arg0 interface{}) *gomock.Call { + return _mr.mock.ctrl.RecordCall(_mr.mock, "WriteHeader", arg0) +} diff --git a/fosite/internal/storage.go b/fosite/internal/storage.go new file mode 100644 index 00000000000..677d7d0e3b7 --- /dev/null +++ b/fosite/internal/storage.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite (interfaces: Storage) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/storage.go github.com/ory/hydra/v2/fosite Storage +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockStorage is a mock of Storage interface. +type MockStorage struct { + ctrl *gomock.Controller + recorder *MockStorageMockRecorder + isgomock struct{} +} + +// MockStorageMockRecorder is the mock recorder for MockStorage. +type MockStorageMockRecorder struct { + mock *MockStorage +} + +// NewMockStorage creates a new mock instance. +func NewMockStorage(ctrl *gomock.Controller) *MockStorage { + mock := &MockStorage{ctrl: ctrl} + mock.recorder = &MockStorageMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockStorage) EXPECT() *MockStorageMockRecorder { + return m.recorder +} + +// FositeClientManager mocks base method. +func (m *MockStorage) FositeClientManager() fosite.ClientManager { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "FositeClientManager") + ret0, _ := ret[0].(fosite.ClientManager) + return ret0 +} + +// FositeClientManager indicates an expected call of FositeClientManager. +func (mr *MockStorageMockRecorder) FositeClientManager() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FositeClientManager", reflect.TypeOf((*MockStorage)(nil).FositeClientManager)) +} diff --git a/fosite/internal/test_helpers.go b/fosite/internal/test_helpers.go new file mode 100644 index 00000000000..be0213c4135 --- /dev/null +++ b/fosite/internal/test_helpers.go @@ -0,0 +1,162 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package internal + +import ( + "encoding/json" + "fmt" + "io" + "net/url" + "strconv" + "testing" + "time" + + cristaljwt "github.com/cristalhq/jwt/v4" + "github.com/stretchr/testify/require" + "golang.org/x/net/html" + goauth "golang.org/x/oauth2" + + "github.com/ory/hydra/v2/fosite" +) + +func ptr(d time.Duration) *time.Duration { + return &d +} + +var TestLifespans fosite.ClientLifespanConfig = fosite.ClientLifespanConfig{ + AuthorizationCodeGrantAccessTokenLifespan: ptr(31 * time.Hour), + AuthorizationCodeGrantIDTokenLifespan: ptr(32 * time.Hour), + AuthorizationCodeGrantRefreshTokenLifespan: ptr(33 * time.Hour), + ClientCredentialsGrantAccessTokenLifespan: ptr(34 * time.Hour), + ImplicitGrantAccessTokenLifespan: ptr(35 * time.Hour), + ImplicitGrantIDTokenLifespan: ptr(36 * time.Hour), + JwtBearerGrantAccessTokenLifespan: ptr(37 * time.Hour), + PasswordGrantAccessTokenLifespan: ptr(38 * time.Hour), + PasswordGrantRefreshTokenLifespan: ptr(39 * time.Hour), + RefreshTokenGrantIDTokenLifespan: ptr(40 * time.Hour), + RefreshTokenGrantAccessTokenLifespan: ptr(41 * time.Hour), + RefreshTokenGrantRefreshTokenLifespan: ptr(42 * time.Hour), +} + +func RequireEqualDuration(t *testing.T, expected time.Duration, actual time.Duration, precision time.Duration) { + delta := expected - actual + if delta < 0 { + delta = -delta + } + require.Less(t, delta, precision, fmt.Sprintf("expected %s; got %s", expected, actual)) +} + +func RequireEqualTime(t *testing.T, expected time.Time, actual time.Time, precision time.Duration) { + delta := expected.Sub(actual) + if delta < 0 { + delta = -delta + } + require.Less(t, delta, precision, fmt.Sprintf( + "expected %s; got %s", + expected.Format(time.RFC3339Nano), + actual.Format(time.RFC3339Nano), + )) +} + +func ExtractJwtExpClaim(t *testing.T, token string) *time.Time { + jwt, err := cristaljwt.ParseNoVerify([]byte(token)) + require.NoError(t, err) + claims := &cristaljwt.RegisteredClaims{} + require.NoError(t, json.Unmarshal(jwt.Claims(), claims)) + if claims.ExpiresAt == nil { + return nil + } + return &claims.ExpiresAt.Time +} + +func ParseFormPostResponse(t *testing.T, redirectURL string, resp io.Reader) (authorizationCode, stateFromServer, iDToken string, token goauth.Token, customParameters url.Values, rFC6749Error map[string]string) { + token = goauth.Token{} + rFC6749Error = map[string]string{} + customParameters = url.Values{} + + doc, err := html.Parse(resp) + require.NoError(t, err) + + //doc>html>body + body := findBody(doc.FirstChild.FirstChild) + require.Equal(t, "body", body.Data) + + htmlEvent := body.Attr[0].Key + require.Equal(t, "onload", htmlEvent) + + onLoadFunc := body.Attr[0].Val + require.Equal(t, "javascript:document.forms[0].submit()", onLoadFunc) + + form := getNextNoneTextNode(body.FirstChild) + require.NotNil(t, form) + require.Equal(t, "form", form.Data) + + for _, attr := range form.Attr { + if attr.Key == "method" { + require.Equal(t, "post", attr.Val) + } else { + require.Equal(t, redirectURL, attr.Val) + } + } + + for node := getNextNoneTextNode(form.FirstChild); node != nil; node = getNextNoneTextNode(node.NextSibling) { + var k, v string + for _, attr := range node.Attr { + if attr.Key == "name" { + k = attr.Val + } else if attr.Key == "value" { + v = attr.Val + } + } + + switch k { + case "state": + stateFromServer = v + case "code": + authorizationCode = v + case "expires_in": + expires, err := strconv.Atoi(v) + require.NoError(t, err) + token.Expiry = time.Now().UTC().Add(time.Duration(expires) * time.Second) + case "access_token": + token.AccessToken = v + case "token_type": + token.TokenType = v + case "refresh_token": + token.RefreshToken = v + case "error": + rFC6749Error["ErrorField"] = v + case "error_hint": + rFC6749Error["HintField"] = v + case "error_description": + rFC6749Error["DescriptionField"] = v + case "id_token": + iDToken = v + default: + customParameters.Add(k, v) + } + } + + return +} + +func getNextNoneTextNode(node *html.Node) *html.Node { + nextNode := node.NextSibling + if nextNode != nil && nextNode.Type == html.TextNode { + nextNode = getNextNoneTextNode(node.NextSibling) + } + + return nextNode +} + +func findBody(node *html.Node) *html.Node { + if node != nil { + if node.Data == "body" { + return node + } + return findBody(node.NextSibling) + } + + return nil +} diff --git a/fosite/internal/token_endpoint_handler.go b/fosite/internal/token_endpoint_handler.go new file mode 100644 index 00000000000..57f7d314123 --- /dev/null +++ b/fosite/internal/token_endpoint_handler.go @@ -0,0 +1,102 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite (interfaces: TokenEndpointHandler) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/token_endpoint_handler.go github.com/ory/hydra/v2/fosite TokenEndpointHandler +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockTokenEndpointHandler is a mock of TokenEndpointHandler interface. +type MockTokenEndpointHandler struct { + ctrl *gomock.Controller + recorder *MockTokenEndpointHandlerMockRecorder + isgomock struct{} +} + +// MockTokenEndpointHandlerMockRecorder is the mock recorder for MockTokenEndpointHandler. +type MockTokenEndpointHandlerMockRecorder struct { + mock *MockTokenEndpointHandler +} + +// NewMockTokenEndpointHandler creates a new mock instance. +func NewMockTokenEndpointHandler(ctrl *gomock.Controller) *MockTokenEndpointHandler { + mock := &MockTokenEndpointHandler{ctrl: ctrl} + mock.recorder = &MockTokenEndpointHandlerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockTokenEndpointHandler) EXPECT() *MockTokenEndpointHandlerMockRecorder { + return m.recorder +} + +// CanHandleTokenEndpointRequest mocks base method. +func (m *MockTokenEndpointHandler) CanHandleTokenEndpointRequest(ctx context.Context, requester fosite.AccessRequester) bool { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CanHandleTokenEndpointRequest", ctx, requester) + ret0, _ := ret[0].(bool) + return ret0 +} + +// CanHandleTokenEndpointRequest indicates an expected call of CanHandleTokenEndpointRequest. +func (mr *MockTokenEndpointHandlerMockRecorder) CanHandleTokenEndpointRequest(ctx, requester any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CanHandleTokenEndpointRequest", reflect.TypeOf((*MockTokenEndpointHandler)(nil).CanHandleTokenEndpointRequest), ctx, requester) +} + +// CanSkipClientAuth mocks base method. +func (m *MockTokenEndpointHandler) CanSkipClientAuth(ctx context.Context, requester fosite.AccessRequester) bool { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CanSkipClientAuth", ctx, requester) + ret0, _ := ret[0].(bool) + return ret0 +} + +// CanSkipClientAuth indicates an expected call of CanSkipClientAuth. +func (mr *MockTokenEndpointHandlerMockRecorder) CanSkipClientAuth(ctx, requester any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CanSkipClientAuth", reflect.TypeOf((*MockTokenEndpointHandler)(nil).CanSkipClientAuth), ctx, requester) +} + +// HandleTokenEndpointRequest mocks base method. +func (m *MockTokenEndpointHandler) HandleTokenEndpointRequest(ctx context.Context, requester fosite.AccessRequester) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "HandleTokenEndpointRequest", ctx, requester) + ret0, _ := ret[0].(error) + return ret0 +} + +// HandleTokenEndpointRequest indicates an expected call of HandleTokenEndpointRequest. +func (mr *MockTokenEndpointHandlerMockRecorder) HandleTokenEndpointRequest(ctx, requester any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "HandleTokenEndpointRequest", reflect.TypeOf((*MockTokenEndpointHandler)(nil).HandleTokenEndpointRequest), ctx, requester) +} + +// PopulateTokenEndpointResponse mocks base method. +func (m *MockTokenEndpointHandler) PopulateTokenEndpointResponse(ctx context.Context, requester fosite.AccessRequester, responder fosite.AccessResponder) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "PopulateTokenEndpointResponse", ctx, requester, responder) + ret0, _ := ret[0].(error) + return ret0 +} + +// PopulateTokenEndpointResponse indicates an expected call of PopulateTokenEndpointResponse. +func (mr *MockTokenEndpointHandlerMockRecorder) PopulateTokenEndpointResponse(ctx, requester, responder any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PopulateTokenEndpointResponse", reflect.TypeOf((*MockTokenEndpointHandler)(nil).PopulateTokenEndpointResponse), ctx, requester, responder) +} diff --git a/fosite/internal/token_introspector.go b/fosite/internal/token_introspector.go new file mode 100644 index 00000000000..f62cb3e53e2 --- /dev/null +++ b/fosite/internal/token_introspector.go @@ -0,0 +1,61 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite (interfaces: TokenIntrospector) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/token_introspector.go github.com/ory/hydra/v2/fosite TokenIntrospector +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockTokenIntrospector is a mock of TokenIntrospector interface. +type MockTokenIntrospector struct { + ctrl *gomock.Controller + recorder *MockTokenIntrospectorMockRecorder + isgomock struct{} +} + +// MockTokenIntrospectorMockRecorder is the mock recorder for MockTokenIntrospector. +type MockTokenIntrospectorMockRecorder struct { + mock *MockTokenIntrospector +} + +// NewMockTokenIntrospector creates a new mock instance. +func NewMockTokenIntrospector(ctrl *gomock.Controller) *MockTokenIntrospector { + mock := &MockTokenIntrospector{ctrl: ctrl} + mock.recorder = &MockTokenIntrospectorMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockTokenIntrospector) EXPECT() *MockTokenIntrospectorMockRecorder { + return m.recorder +} + +// IntrospectToken mocks base method. +func (m *MockTokenIntrospector) IntrospectToken(ctx context.Context, token string, tokenUse fosite.TokenType, accessRequest fosite.AccessRequester, scopes []string) (fosite.TokenType, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "IntrospectToken", ctx, token, tokenUse, accessRequest, scopes) + ret0, _ := ret[0].(fosite.TokenType) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// IntrospectToken indicates an expected call of IntrospectToken. +func (mr *MockTokenIntrospectorMockRecorder) IntrospectToken(ctx, token, tokenUse, accessRequest, scopes any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IntrospectToken", reflect.TypeOf((*MockTokenIntrospector)(nil).IntrospectToken), ctx, token, tokenUse, accessRequest, scopes) +} diff --git a/fosite/internal/token_revocation_storage.go b/fosite/internal/token_revocation_storage.go new file mode 100644 index 00000000000..2de16185e03 --- /dev/null +++ b/fosite/internal/token_revocation_storage.go @@ -0,0 +1,72 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: TokenRevocationStorage) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/token_revocation_storage.go github.com/ory/hydra/v2/fosite/handler/oauth2 TokenRevocationStorage +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" +) + +// MockTokenRevocationStorage is a mock of TokenRevocationStorage interface. +type MockTokenRevocationStorage struct { + ctrl *gomock.Controller + recorder *MockTokenRevocationStorageMockRecorder + isgomock struct{} +} + +// MockTokenRevocationStorageMockRecorder is the mock recorder for MockTokenRevocationStorage. +type MockTokenRevocationStorageMockRecorder struct { + mock *MockTokenRevocationStorage +} + +// NewMockTokenRevocationStorage creates a new mock instance. +func NewMockTokenRevocationStorage(ctrl *gomock.Controller) *MockTokenRevocationStorage { + mock := &MockTokenRevocationStorage{ctrl: ctrl} + mock.recorder = &MockTokenRevocationStorageMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockTokenRevocationStorage) EXPECT() *MockTokenRevocationStorageMockRecorder { + return m.recorder +} + +// RevokeAccessToken mocks base method. +func (m *MockTokenRevocationStorage) RevokeAccessToken(ctx context.Context, requestID string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RevokeAccessToken", ctx, requestID) + ret0, _ := ret[0].(error) + return ret0 +} + +// RevokeAccessToken indicates an expected call of RevokeAccessToken. +func (mr *MockTokenRevocationStorageMockRecorder) RevokeAccessToken(ctx, requestID any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RevokeAccessToken", reflect.TypeOf((*MockTokenRevocationStorage)(nil).RevokeAccessToken), ctx, requestID) +} + +// RevokeRefreshToken mocks base method. +func (m *MockTokenRevocationStorage) RevokeRefreshToken(ctx context.Context, requestID string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RevokeRefreshToken", ctx, requestID) + ret0, _ := ret[0].(error) + return ret0 +} + +// RevokeRefreshToken indicates an expected call of RevokeRefreshToken. +func (mr *MockTokenRevocationStorageMockRecorder) RevokeRefreshToken(ctx, requestID any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RevokeRefreshToken", reflect.TypeOf((*MockTokenRevocationStorage)(nil).RevokeRefreshToken), ctx, requestID) +} diff --git a/fosite/internal/token_revocation_storage_provider.go b/fosite/internal/token_revocation_storage_provider.go new file mode 100644 index 00000000000..96071900a2b --- /dev/null +++ b/fosite/internal/token_revocation_storage_provider.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/oauth2 (interfaces: TokenRevocationStorageProvider) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/token_revocation_storage_provider.go github.com/ory/hydra/v2/fosite/handler/oauth2 TokenRevocationStorageProvider +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + oauth2 "github.com/ory/hydra/v2/fosite/handler/oauth2" +) + +// MockTokenRevocationStorageProvider is a mock of TokenRevocationStorageProvider interface. +type MockTokenRevocationStorageProvider struct { + ctrl *gomock.Controller + recorder *MockTokenRevocationStorageProviderMockRecorder + isgomock struct{} +} + +// MockTokenRevocationStorageProviderMockRecorder is the mock recorder for MockTokenRevocationStorageProvider. +type MockTokenRevocationStorageProviderMockRecorder struct { + mock *MockTokenRevocationStorageProvider +} + +// NewMockTokenRevocationStorageProvider creates a new mock instance. +func NewMockTokenRevocationStorageProvider(ctrl *gomock.Controller) *MockTokenRevocationStorageProvider { + mock := &MockTokenRevocationStorageProvider{ctrl: ctrl} + mock.recorder = &MockTokenRevocationStorageProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockTokenRevocationStorageProvider) EXPECT() *MockTokenRevocationStorageProviderMockRecorder { + return m.recorder +} + +// TokenRevocationStorage mocks base method. +func (m *MockTokenRevocationStorageProvider) TokenRevocationStorage() oauth2.TokenRevocationStorage { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "TokenRevocationStorage") + ret0, _ := ret[0].(oauth2.TokenRevocationStorage) + return ret0 +} + +// TokenRevocationStorage indicates an expected call of TokenRevocationStorage. +func (mr *MockTokenRevocationStorageProviderMockRecorder) TokenRevocationStorage() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "TokenRevocationStorage", reflect.TypeOf((*MockTokenRevocationStorageProvider)(nil).TokenRevocationStorage)) +} diff --git a/fosite/internal/transactional.go b/fosite/internal/transactional.go new file mode 100644 index 00000000000..d15858c6205 --- /dev/null +++ b/fosite/internal/transactional.go @@ -0,0 +1,87 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite (interfaces: Transactional) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/transactional.go github.com/ory/hydra/v2/fosite Transactional +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" +) + +// MockTransactional is a mock of Transactional interface. +type MockTransactional struct { + ctrl *gomock.Controller + recorder *MockTransactionalMockRecorder + isgomock struct{} +} + +// MockTransactionalMockRecorder is the mock recorder for MockTransactional. +type MockTransactionalMockRecorder struct { + mock *MockTransactional +} + +// NewMockTransactional creates a new mock instance. +func NewMockTransactional(ctrl *gomock.Controller) *MockTransactional { + mock := &MockTransactional{ctrl: ctrl} + mock.recorder = &MockTransactionalMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockTransactional) EXPECT() *MockTransactionalMockRecorder { + return m.recorder +} + +// BeginTX mocks base method. +func (m *MockTransactional) BeginTX(ctx context.Context) (context.Context, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "BeginTX", ctx) + ret0, _ := ret[0].(context.Context) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// BeginTX indicates an expected call of BeginTX. +func (mr *MockTransactionalMockRecorder) BeginTX(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BeginTX", reflect.TypeOf((*MockTransactional)(nil).BeginTX), ctx) +} + +// Commit mocks base method. +func (m *MockTransactional) Commit(ctx context.Context) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Commit", ctx) + ret0, _ := ret[0].(error) + return ret0 +} + +// Commit indicates an expected call of Commit. +func (mr *MockTransactionalMockRecorder) Commit(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Commit", reflect.TypeOf((*MockTransactional)(nil).Commit), ctx) +} + +// Rollback mocks base method. +func (m *MockTransactional) Rollback(ctx context.Context) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Rollback", ctx) + ret0, _ := ret[0].(error) + return ret0 +} + +// Rollback indicates an expected call of Rollback. +func (mr *MockTransactionalMockRecorder) Rollback(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Rollback", reflect.TypeOf((*MockTransactional)(nil).Rollback), ctx) +} diff --git a/fosite/internal/user_code_strategy.go b/fosite/internal/user_code_strategy.go new file mode 100644 index 00000000000..cb0df1371f3 --- /dev/null +++ b/fosite/internal/user_code_strategy.go @@ -0,0 +1,91 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/rfc8628 (interfaces: UserCodeStrategy) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/user_code_strategy.go github.com/ory/hydra/v2/fosite/handler/rfc8628 UserCodeStrategy +// + +// Package internal is a generated GoMock package. +package internal + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + fosite "github.com/ory/hydra/v2/fosite" +) + +// MockUserCodeStrategy is a mock of UserCodeStrategy interface. +type MockUserCodeStrategy struct { + ctrl *gomock.Controller + recorder *MockUserCodeStrategyMockRecorder + isgomock struct{} +} + +// MockUserCodeStrategyMockRecorder is the mock recorder for MockUserCodeStrategy. +type MockUserCodeStrategyMockRecorder struct { + mock *MockUserCodeStrategy +} + +// NewMockUserCodeStrategy creates a new mock instance. +func NewMockUserCodeStrategy(ctrl *gomock.Controller) *MockUserCodeStrategy { + mock := &MockUserCodeStrategy{ctrl: ctrl} + mock.recorder = &MockUserCodeStrategyMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockUserCodeStrategy) EXPECT() *MockUserCodeStrategyMockRecorder { + return m.recorder +} + +// GenerateUserCode mocks base method. +func (m *MockUserCodeStrategy) GenerateUserCode(ctx context.Context) (string, string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GenerateUserCode", ctx) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(string) + ret2, _ := ret[2].(error) + return ret0, ret1, ret2 +} + +// GenerateUserCode indicates an expected call of GenerateUserCode. +func (mr *MockUserCodeStrategyMockRecorder) GenerateUserCode(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GenerateUserCode", reflect.TypeOf((*MockUserCodeStrategy)(nil).GenerateUserCode), ctx) +} + +// UserCodeSignature mocks base method. +func (m *MockUserCodeStrategy) UserCodeSignature(ctx context.Context, code string) (string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UserCodeSignature", ctx, code) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UserCodeSignature indicates an expected call of UserCodeSignature. +func (mr *MockUserCodeStrategyMockRecorder) UserCodeSignature(ctx, code any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UserCodeSignature", reflect.TypeOf((*MockUserCodeStrategy)(nil).UserCodeSignature), ctx, code) +} + +// ValidateUserCode mocks base method. +func (m *MockUserCodeStrategy) ValidateUserCode(ctx context.Context, r fosite.DeviceRequester, code string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ValidateUserCode", ctx, r, code) + ret0, _ := ret[0].(error) + return ret0 +} + +// ValidateUserCode indicates an expected call of ValidateUserCode. +func (mr *MockUserCodeStrategyMockRecorder) ValidateUserCode(ctx, r, code any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidateUserCode", reflect.TypeOf((*MockUserCodeStrategy)(nil).ValidateUserCode), ctx, r, code) +} diff --git a/fosite/internal/user_code_strategy_provider.go b/fosite/internal/user_code_strategy_provider.go new file mode 100644 index 00000000000..76666b8e875 --- /dev/null +++ b/fosite/internal/user_code_strategy_provider.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ory/hydra/v2/fosite/handler/rfc8628 (interfaces: UserCodeStrategyProvider) +// +// Generated by this command: +// +// mockgen -package internal -destination internal/user_code_strategy_provider.go github.com/ory/hydra/v2/fosite/handler/rfc8628 UserCodeStrategyProvider +// + +// Package internal is a generated GoMock package. +package internal + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" + + rfc8628 "github.com/ory/hydra/v2/fosite/handler/rfc8628" +) + +// MockUserCodeStrategyProvider is a mock of UserCodeStrategyProvider interface. +type MockUserCodeStrategyProvider struct { + ctrl *gomock.Controller + recorder *MockUserCodeStrategyProviderMockRecorder + isgomock struct{} +} + +// MockUserCodeStrategyProviderMockRecorder is the mock recorder for MockUserCodeStrategyProvider. +type MockUserCodeStrategyProviderMockRecorder struct { + mock *MockUserCodeStrategyProvider +} + +// NewMockUserCodeStrategyProvider creates a new mock instance. +func NewMockUserCodeStrategyProvider(ctrl *gomock.Controller) *MockUserCodeStrategyProvider { + mock := &MockUserCodeStrategyProvider{ctrl: ctrl} + mock.recorder = &MockUserCodeStrategyProviderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockUserCodeStrategyProvider) EXPECT() *MockUserCodeStrategyProviderMockRecorder { + return m.recorder +} + +// UserCodeStrategy mocks base method. +func (m *MockUserCodeStrategyProvider) UserCodeStrategy() rfc8628.UserCodeStrategy { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UserCodeStrategy") + ret0, _ := ret[0].(rfc8628.UserCodeStrategy) + return ret0 +} + +// UserCodeStrategy indicates an expected call of UserCodeStrategy. +func (mr *MockUserCodeStrategyProviderMockRecorder) UserCodeStrategy() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UserCodeStrategy", reflect.TypeOf((*MockUserCodeStrategyProvider)(nil).UserCodeStrategy)) +} diff --git a/fosite/introspect.go b/fosite/introspect.go new file mode 100644 index 00000000000..6ca14457c1b --- /dev/null +++ b/fosite/introspect.go @@ -0,0 +1,68 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "net/http" + "strings" + + "go.opentelemetry.io/otel/trace" + + "github.com/ory/x/errorsx" + "github.com/ory/x/otelx" + + "github.com/pkg/errors" +) + +type TokenIntrospector interface { + IntrospectToken(ctx context.Context, token string, tokenUse TokenUse, accessRequest AccessRequester, scopes []string) (TokenUse, error) +} + +func AccessTokenFromRequest(req *http.Request) string { + // According to https://tools.ietf.org/html/rfc6750 you can pass tokens through: + // - Form-Encoded Body Parameter. Recommended, more likely to appear. e.g.: Authorization: Bearer mytoken123 + // - URI Query Parameter e.g. access_token=mytoken123 + + auth := req.Header.Get("Authorization") + split := strings.SplitN(auth, " ", 2) + if len(split) != 2 || !strings.EqualFold(split[0], "bearer") { + // Nothing in Authorization header, try access_token + // Empty string returned if there's no such parameter + if err := req.ParseMultipartForm(1 << 20); err != nil && err != http.ErrNotMultipart { + return "" + } + return req.Form.Get("access_token") + } + + return split[1] +} + +func (f *Fosite) IntrospectToken(ctx context.Context, token string, tokenUse TokenUse, session Session, scopes ...string) (_ TokenUse, _ AccessRequester, err error) { + ctx, span := trace.SpanFromContext(ctx).TracerProvider().Tracer("github.com/ory/hydra/v2/fosite").Start(ctx, "Fosite.IntrospectToken") + defer otelx.End(span, &err) + + var found = false + var foundTokenUse TokenUse = "" + + ar := NewAccessRequest(session) + for _, validator := range f.Config.GetTokenIntrospectionHandlers(ctx) { + tu, err := validator.IntrospectToken(ctx, token, tokenUse, ar, scopes) + if err == nil { + found = true + foundTokenUse = tu + } else if errors.Is(err, ErrUnknownRequest) { + // do nothing + } else { + rfcerr := ErrorToRFC6749Error(err) + return "", nil, errorsx.WithStack(rfcerr) + } + } + + if !found { + return "", nil, errorsx.WithStack(ErrRequestUnauthorized.WithHint("Unable to find a suitable validation strategy for the token, thus it is invalid.")) + } + + return foundTokenUse, ar, nil +} diff --git a/fosite/introspect_test.go b/fosite/introspect_test.go new file mode 100644 index 00000000000..ba091920517 --- /dev/null +++ b/fosite/introspect_test.go @@ -0,0 +1,114 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "context" + "fmt" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + gomock "go.uber.org/mock/gomock" + + . "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/internal" + "github.com/ory/hydra/v2/fosite/storage" +) + +func TestAccessTokenFromRequestNoToken(t *testing.T) { + req, _ := http.NewRequest("GET", "http://example.com/test", nil) + + assert.Equal(t, AccessTokenFromRequest(req), "", "No token should produce an empty string") +} + +func TestAccessTokenFromRequestHeader(t *testing.T) { + token := "TokenFromHeader" + + req, _ := http.NewRequest("GET", "http://example.com/test", nil) + req.Header.Add("Authorization", "Bearer "+token) + + assert.Equal(t, AccessTokenFromRequest(req), token, "Token should be obtainable from header") +} + +func TestAccessTokenFromRequestQuery(t *testing.T) { + token := "TokenFromQueryParam" + + req, _ := http.NewRequest("GET", "http://example.com/test?access_token="+token, nil) + + assert.Equal(t, AccessTokenFromRequest(req), token, "Token should be obtainable from access_token query parameter") +} + +func TestIntrospect(t *testing.T) { + ctrl := gomock.NewController(t) + validator := internal.NewMockTokenIntrospector(ctrl) + t.Cleanup(ctrl.Finish) + + config := new(Config) + f := compose.ComposeAllEnabled(config, storage.NewMemoryStore(), nil).(*Fosite) + + req, _ := http.NewRequest("GET", "http://example.com/test", nil) + req.Header.Add("Authorization", "bearer some-token") + + for k, c := range []struct { + description string + scopes []string + setup func() + expectErr error + }{ + { + description: "should fail", + scopes: []string{}, + setup: func() { + }, + expectErr: ErrRequestUnauthorized, + }, + { + description: "should fail", + scopes: []string{"foo"}, + setup: func() { + config.TokenIntrospectionHandlers = TokenIntrospectionHandlers{validator} + validator.EXPECT().IntrospectToken(gomock.Any(), "some-token", gomock.Any(), gomock.Any(), gomock.Any()).Return(TokenUse(""), ErrUnknownRequest) + }, + expectErr: ErrRequestUnauthorized, + }, + { + description: "should fail", + scopes: []string{"foo"}, + setup: func() { + validator.EXPECT().IntrospectToken(gomock.Any(), "some-token", gomock.Any(), gomock.Any(), gomock.Any()).Return(TokenUse(""), ErrInvalidClient) + }, + expectErr: ErrInvalidClient, + }, + { + description: "should pass", + setup: func() { + validator.EXPECT().IntrospectToken(gomock.Any(), "some-token", gomock.Any(), gomock.Any(), gomock.Any()).Do(func(ctx context.Context, _ string, _ TokenUse, accessRequest AccessRequester, _ []string) { + accessRequest.(*AccessRequest).GrantedScope = []string{"bar"} + }).Return(TokenUse(""), nil) + }, + }, + { + description: "should pass", + scopes: []string{"bar"}, + setup: func() { + validator.EXPECT().IntrospectToken(gomock.Any(), "some-token", gomock.Any(), gomock.Any(), gomock.Any()).Do(func(ctx context.Context, _ string, _ TokenType, accessRequest AccessRequester, _ []string) { + accessRequest.(*AccessRequest).GrantedScope = []string{"bar"} + }).Return(TokenUse(""), nil) + }, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + c.setup() + _, _, err := f.IntrospectToken(context.Background(), AccessTokenFromRequest(req), AccessToken, nil, c.scopes...) + if c.expectErr != nil { + assert.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + } + }) + } +} diff --git a/fosite/introspection_request_handler.go b/fosite/introspection_request_handler.go new file mode 100644 index 00000000000..e0b4c51cbc3 --- /dev/null +++ b/fosite/introspection_request_handler.go @@ -0,0 +1,192 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "net/http" + "net/url" + "strings" + + "go.opentelemetry.io/otel/trace" + "golang.org/x/text/language" + + "github.com/ory/x/errorsx" + "github.com/ory/x/otelx" +) + +// NewIntrospectionRequest initiates token introspection as defined in +// https://tools.ietf.org/html/rfc7662#section-2.1 +// +// The protected resource calls the introspection endpoint using an HTTP +// POST [RFC7231] request with parameters sent as +// "application/x-www-form-urlencoded" data as defined in +// [W3C.REC-html5-20141028]. The protected resource sends a parameter +// representing the token along with optional parameters representing +// additional context that is known by the protected resource to aid the +// authorization server in its response. +// +// * token +// REQUIRED. The string value of the token. For access tokens, this +// is the "access_token" value returned from the token endpoint +// defined in OAuth 2.0 [RFC6749], Section 5.1. For refresh tokens, +// this is the "refresh_token" value returned from the token endpoint +// as defined in OAuth 2.0 [RFC6749], Section 5.1. Other token types +// are outside the scope of this specification. +// +// * token_type_hint +// OPTIONAL. A hint about the type of the token submitted for +// introspection. The protected resource MAY pass this parameter to +// help the authorization server optimize the token lookup. If the +// server is unable to locate the token using the given hint, it MUST +// extend its search across all of its supported token types. An +// authorization server MAY ignore this parameter, particularly if it +// is able to detect the token type automatically. Values for this +// field are defined in the "OAuth Token Type Hints" registry defined +// in OAuth Token Revocation [RFC7009]. +// +// The introspection endpoint MAY accept other OPTIONAL parameters to +// provide further context to the query. For instance, an authorization +// server may desire to know the IP address of the client accessing the +// protected resource to determine if the correct client is likely to be +// presenting the token. The definition of this or any other parameters +// are outside the scope of this specification, to be defined by service +// documentation or extensions to this specification. If the +// authorization server is unable to determine the state of the token +// without additional information, it SHOULD return an introspection +// response indicating the token is not active as described in +// Section 2.2. +// +// To prevent token scanning attacks, the endpoint MUST also require +// some form of authorization to access this endpoint, such as client +// authentication as described in OAuth 2.0 [RFC6749] or a separate +// OAuth 2.0 access token such as the bearer token described in OAuth +// 2.0 Bearer Token Usage [RFC6750]. The methods of managing and +// validating these authentication credentials are out of scope of this +// specification. +// +// For example, the following shows a protected resource calling the +// token introspection endpoint to query about an OAuth 2.0 bearer +// token. The protected resource is using a separate OAuth 2.0 bearer +// token to authorize this call. +// +// The following is a non-normative example request: +// +// POST /introspect HTTP/1.1 +// Host: server.example.com +// Accept: application/json +// Content-Type: application/x-www-form-urlencoded +// Authorization: Bearer 23410913-abewfq.123483 +// +// token=2YotnFZFEjr1zCsicMWpAA +// +// In this example, the protected resource uses a client identifier and +// client secret to authenticate itself to the introspection endpoint. +// The protected resource also sends a token type hint indicating that +// it is inquiring about an access token. +// +// The following is a non-normative example request: +// +// POST /introspect HTTP/1.1 +// Host: server.example.com +// Accept: application/json +// Content-Type: application/x-www-form-urlencoded +// Authorization: Basic czZCaGRSa3F0MzpnWDFmQmF0M2JW +// +// token=mF_9.B5f-4.1JqM&token_type_hint=access_token +func (f *Fosite) NewIntrospectionRequest(ctx context.Context, r *http.Request, session Session) (_ IntrospectionResponder, err error) { + ctx, span := trace.SpanFromContext(ctx).TracerProvider().Tracer("github.com/ory/hydra/v2/fosite").Start(ctx, "Fosite.NewIntrospectionRequest") + defer otelx.End(span, &err) + + ctx = context.WithValue(ctx, RequestContextKey, r) + + if r.Method != "POST" { + return &IntrospectionResponse{Active: false}, errorsx.WithStack(ErrInvalidRequest.WithHintf("HTTP method is '%s' but expected 'POST'.", r.Method)) + } else if err := r.ParseMultipartForm(1 << 20); err != nil && err != http.ErrNotMultipart { + return &IntrospectionResponse{Active: false}, errorsx.WithStack(ErrInvalidRequest.WithHint("Unable to parse HTTP body, make sure to send a properly formatted form request body.").WithWrap(err).WithDebug(err.Error())) + } else if len(r.PostForm) == 0 { + return &IntrospectionResponse{Active: false}, errorsx.WithStack(ErrInvalidRequest.WithHint("The POST body can not be empty.")) + } + + token := r.PostForm.Get("token") + tokenTypeHint := r.PostForm.Get("token_type_hint") + scope := r.PostForm.Get("scope") + if clientToken := AccessTokenFromRequest(r); clientToken != "" { + if token == clientToken { + return &IntrospectionResponse{Active: false}, errorsx.WithStack(ErrRequestUnauthorized.WithHint("Bearer and introspection token are identical.")) + } + + if tu, _, err := f.IntrospectToken(ctx, clientToken, AccessToken, session.Clone()); err != nil { + return &IntrospectionResponse{Active: false}, errorsx.WithStack(ErrRequestUnauthorized.WithHint("HTTP Authorization header missing, malformed, or credentials used are invalid.")) + } else if tu != "" && tu != AccessToken { + return &IntrospectionResponse{Active: false}, errorsx.WithStack(ErrRequestUnauthorized.WithHintf("HTTP Authorization header did not provide a token of type 'access_token', got type '%s'.", tu)) + } + } else { + id, secret, ok := r.BasicAuth() + if !ok { + return &IntrospectionResponse{Active: false}, errorsx.WithStack(ErrRequestUnauthorized.WithHint("HTTP Authorization header missing.")) + } + + clientID, err := url.QueryUnescape(id) + if err != nil { + return &IntrospectionResponse{Active: false}, errorsx.WithStack(ErrRequestUnauthorized.WithHint("Unable to decode OAuth 2.0 Client ID from HTTP basic authorization header, make sure it is properly encoded.").WithWrap(err).WithDebug(err.Error())) + } + + clientSecret, err := url.QueryUnescape(secret) + if err != nil { + return &IntrospectionResponse{Active: false}, errorsx.WithStack(ErrRequestUnauthorized.WithHint("Unable to decode OAuth 2.0 Client Secret from HTTP basic authorization header, make sure it is properly encoded.").WithWrap(err).WithDebug(err.Error())) + } + + client, err := f.Store.FositeClientManager().GetClient(ctx, clientID) + if err != nil { + return &IntrospectionResponse{Active: false}, errorsx.WithStack(ErrRequestUnauthorized.WithHint("Unable to find OAuth 2.0 Client from HTTP basic authorization header.").WithWrap(err).WithDebug(err.Error())) + } + + // Enforce client authentication + if err := f.checkClientSecret(ctx, client, []byte(clientSecret)); err != nil { + return &IntrospectionResponse{Active: false}, errorsx.WithStack(ErrRequestUnauthorized.WithHint("OAuth 2.0 Client credentials are invalid.")) + } + } + + tu, ar, err := f.IntrospectToken(ctx, token, TokenUse(tokenTypeHint), session, RemoveEmpty(strings.Split(scope, " "))...) + if err != nil { + return &IntrospectionResponse{Active: false}, errorsx.WithStack(ErrInactiveToken.WithHint("An introspection strategy indicated that the token is inactive.").WithWrap(err).WithDebug(err.Error())) + } + accessTokenType := "" + + if tu == AccessToken { + accessTokenType = BearerAccessToken + } + + return &IntrospectionResponse{ + Active: true, + AccessRequester: ar, + TokenUse: tu, + AccessTokenType: accessTokenType, + }, nil +} + +type IntrospectionResponse struct { + Active bool `json:"active"` + AccessRequester AccessRequester `json:"extra"` + TokenUse TokenUse `json:"token_use,omitempty"` + AccessTokenType string `json:"token_type,omitempty"` + Lang language.Tag `json:"-"` +} + +func (r *IntrospectionResponse) IsActive() bool { + return r.Active +} + +func (r *IntrospectionResponse) GetAccessRequester() AccessRequester { + return r.AccessRequester +} + +func (r *IntrospectionResponse) GetTokenUse() TokenUse { + return r.TokenUse +} + +func (r *IntrospectionResponse) GetAccessTokenType() string { + return r.AccessTokenType +} diff --git a/fosite/introspection_request_handler_test.go b/fosite/introspection_request_handler_test.go new file mode 100644 index 00000000000..8b4679ad6e1 --- /dev/null +++ b/fosite/introspection_request_handler_test.go @@ -0,0 +1,222 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "context" + "fmt" + "net/http" + "net/url" + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + gomock "go.uber.org/mock/gomock" + + "github.com/ory/hydra/v2/fosite" + . "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/internal" + "github.com/ory/hydra/v2/fosite/storage" +) + +func TestIntrospectionResponseTokenUse(t *testing.T) { + ctrl := gomock.NewController(t) + validator := internal.NewMockTokenIntrospector(ctrl) + t.Cleanup(ctrl.Finish) + + ctx := gomock.AssignableToTypeOf(context.WithValue(context.TODO(), ContextKey("test"), nil)) + + config := new(Config) + f := compose.ComposeAllEnabled(config, storage.NewExampleStore(), nil).(*Fosite) + httpreq := &http.Request{ + Method: "POST", + Header: http.Header{ + "Authorization": []string{"bearer some-token"}, + }, + PostForm: url.Values{ + "token": []string{"introspect-token"}, + }, + } + for k, c := range []struct { + description string + setup func() + expectedTU TokenUse + expectedATT string + }{ + { + description: "introspecting access token", + setup: func() { + config.TokenIntrospectionHandlers = TokenIntrospectionHandlers{validator} + validator.EXPECT().IntrospectToken(ctx, "some-token", gomock.Any(), gomock.Any(), gomock.Any()).Return(TokenUse(""), nil) + validator.EXPECT().IntrospectToken(ctx, "introspect-token", gomock.Any(), gomock.Any(), gomock.Any()).Return(AccessToken, nil) + }, + expectedATT: BearerAccessToken, + expectedTU: AccessToken, + }, + { + description: "introspecting refresh token", + setup: func() { + config.TokenIntrospectionHandlers = TokenIntrospectionHandlers{validator} + validator.EXPECT().IntrospectToken(ctx, "some-token", gomock.Any(), gomock.Any(), gomock.Any()).Return(TokenUse(""), nil) + validator.EXPECT().IntrospectToken(ctx, "introspect-token", gomock.Any(), gomock.Any(), gomock.Any()).Return(RefreshToken, nil) + }, + expectedATT: "", + expectedTU: RefreshToken, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + c.setup() + res, err := f.NewIntrospectionRequest(context.TODO(), httpreq, &DefaultSession{}) + require.NoError(t, err) + assert.Equal(t, c.expectedATT, res.GetAccessTokenType()) + assert.Equal(t, c.expectedTU, res.GetTokenUse()) + }) + } +} + +func TestIntrospectionResponse(t *testing.T) { + r := &fosite.IntrospectionResponse{ + AccessRequester: fosite.NewAccessRequest(nil), + Active: true, + } + + assert.Equal(t, r.AccessRequester, r.GetAccessRequester()) + assert.Equal(t, r.Active, r.IsActive()) +} + +func TestNewIntrospectionRequest(t *testing.T) { + ctrl := gomock.NewController(t) + validator := internal.NewMockTokenIntrospector(ctrl) + t.Cleanup(ctrl.Finish) + + ctx := gomock.AssignableToTypeOf(context.WithValue(context.TODO(), ContextKey("test"), nil)) + + config := new(Config) + f := compose.ComposeAllEnabled(config, storage.NewExampleStore(), nil).(*Fosite) + httpreq := &http.Request{ + Method: "POST", + Header: http.Header{}, + Form: url.Values{}, + } + newErr := errors.New("asdf") + + for k, c := range []struct { + description string + setup func() + expectErr error + isActive bool + }{ + { + description: "should fail", + setup: func() { + }, + expectErr: ErrInvalidRequest, + }, + { + description: "should fail", + setup: func() { + config.TokenIntrospectionHandlers = TokenIntrospectionHandlers{validator} + httpreq = &http.Request{ + Method: "POST", + Header: http.Header{ + "Authorization": []string{"bearer some-token"}, + }, + PostForm: url.Values{ + "token": []string{"introspect-token"}, + }, + } + validator.EXPECT().IntrospectToken(ctx, "some-token", gomock.Any(), gomock.Any(), gomock.Any()).Return(TokenUse(""), nil) + validator.EXPECT().IntrospectToken(ctx, "introspect-token", gomock.Any(), gomock.Any(), gomock.Any()).Return(TokenUse(""), newErr) + }, + isActive: false, + expectErr: ErrInactiveToken, + }, + { + description: "should pass", + setup: func() { + config.TokenIntrospectionHandlers = TokenIntrospectionHandlers{validator} + httpreq = &http.Request{ + Method: "POST", + Header: http.Header{ + "Authorization": []string{"bearer some-token"}, + }, + PostForm: url.Values{ + "token": []string{"introspect-token"}, + }, + } + validator.EXPECT().IntrospectToken(ctx, "some-token", gomock.Any(), gomock.Any(), gomock.Any()).Return(TokenUse(""), nil) + validator.EXPECT().IntrospectToken(ctx, "introspect-token", gomock.Any(), gomock.Any(), gomock.Any()).Return(TokenUse(""), nil) + }, + isActive: true, + }, + { + description: "should pass with basic auth if username and password encoded", + setup: func() { + config.TokenIntrospectionHandlers = TokenIntrospectionHandlers{validator} + httpreq = &http.Request{ + Method: "POST", + Header: http.Header{ + // Basic Authorization with username=encoded:client and password=encoded&password + "Authorization": []string{"Basic ZW5jb2RlZCUzQWNsaWVudDplbmNvZGVkJTI2cGFzc3dvcmQ="}, + }, + PostForm: url.Values{ + "token": []string{"introspect-token"}, + }, + } + validator.EXPECT().IntrospectToken(ctx, "introspect-token", gomock.Any(), gomock.Any(), gomock.Any()).Return(TokenUse(""), nil) + }, + isActive: true, + }, + { + description: "should pass with basic auth if username and password not encoded", + setup: func() { + config.TokenIntrospectionHandlers = TokenIntrospectionHandlers{validator} + httpreq = &http.Request{ + Method: "POST", + Header: http.Header{ + // Basic Authorization with username=my-client and password=foobar + "Authorization": []string{"Basic bXktY2xpZW50OmZvb2Jhcg=="}, + }, + PostForm: url.Values{ + "token": []string{"introspect-token"}, + }, + } + validator.EXPECT().IntrospectToken(ctx, "introspect-token", gomock.Any(), gomock.Any(), gomock.Any()).Return(TokenUse(""), nil) + }, + isActive: true, + }, + { + description: "should pass with basic auth if username and password not encoded", + setup: func() { + config.TokenIntrospectionHandlers = TokenIntrospectionHandlers{validator} + httpreq = &http.Request{ + Method: "POST", + Header: http.Header{ + // Basic Authorization with username=my-client and password=foobaz + "Authorization": []string{"Basic bXktY2xpZW50OmZvb2Jheg=="}, + }, + PostForm: url.Values{ + "token": []string{"introspect-token"}, + }, + } + validator.EXPECT().IntrospectToken(ctx, "introspect-token", gomock.Any(), gomock.Any(), gomock.Any()).Return(TokenUse(""), nil) + }, + isActive: true, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + c.setup() + res, err := f.NewIntrospectionRequest(context.TODO(), httpreq, &DefaultSession{}) + + if c.expectErr != nil { + assert.EqualError(t, err, c.expectErr.Error()) + } else { + require.NoError(t, err) + assert.Equal(t, c.isActive, res.IsActive()) + } + }) + } +} diff --git a/fosite/introspection_response_writer.go b/fosite/introspection_response_writer.go new file mode 100644 index 00000000000..7e136a7501b --- /dev/null +++ b/fosite/introspection_response_writer.go @@ -0,0 +1,234 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "encoding/json" + "net/http" + "strings" + + "github.com/pkg/errors" +) + +// WriteIntrospectionError responds with token metadata discovered by token introspection as defined in +// https://tools.ietf.org/html/rfc7662#section-2.2 +// +// If the protected resource uses OAuth 2.0 client credentials to +// authenticate to the introspection endpoint and its credentials are +// invalid, the authorization server responds with an HTTP 401 +// (Unauthorized) as described in Section 5.2 of OAuth 2.0 [RFC6749]. +// +// If the protected resource uses an OAuth 2.0 bearer token to authorize +// its call to the introspection endpoint and the token used for +// authorization does not contain sufficient privileges or is otherwise +// invalid for this request, the authorization server responds with an +// HTTP 401 code as described in Section 3 of OAuth 2.0 Bearer Token +// Usage [RFC6750]. +// +// Note that a properly formed and authorized query for an inactive or +// otherwise invalid token (or a token the protected resource is not +// allowed to know about) is not considered an error response by this +// specification. In these cases, the authorization server MUST instead +// respond with an introspection response with the "active" field set to +// "false" as described in Section 2.2. +func (f *Fosite) WriteIntrospectionError(ctx context.Context, rw http.ResponseWriter, err error) { + if err == nil { + return + } + + // Inactive token errors should never written out as an error. + if !errors.Is(err, ErrInactiveToken) && (errors.Is(err, ErrInvalidRequest) || errors.Is(err, ErrRequestUnauthorized)) { + f.writeJsonError(ctx, rw, nil, err) + return + } + + rw.Header().Set("Content-Type", "application/json;charset=UTF-8") + rw.Header().Set("Cache-Control", "no-store") + rw.Header().Set("Pragma", "no-cache") + _ = json.NewEncoder(rw).Encode(struct { + Active bool `json:"active"` + }{Active: false}) +} + +// WriteIntrospectionResponse responds with an error if token introspection failed as defined in +// https://tools.ietf.org/html/rfc7662#section-2.3 +// +// The server responds with a JSON object [RFC7159] in "application/ +// json" format with the following top-level members. +// +// * active +// REQUIRED. Boolean indicator of whether or not the presented token +// is currently active. The specifics of a token's "active" state +// will vary depending on the implementation of the authorization +// server and the information it keeps about its tokens, but a "true" +// value return for the "active" property will generally indicate +// that a given token has been issued by this authorization server, +// has not been revoked by the resource owner, and is within its +// given time window of validity (e.g., after its issuance time and +// before its expiration time). See Section 4 for information on +// implementation of such checks. +// +// * scope +// OPTIONAL. A JSON string containing a space-separated list of +// scopes associated with this token, in the format described in +// Section 3.3 of OAuth 2.0 [RFC6749]. +// +// * client_id +// OPTIONAL. Client identifier for the OAuth 2.0 client that +// requested this token. +// +// * username +// OPTIONAL. Human-readable identifier for the resource owner who +// authorized this token. +// +// * token_type +// OPTIONAL. Type of the token as defined in Section 5.1 of OAuth +// 2.0 [RFC6749]. +// +// * exp +// OPTIONAL. Integer timestamp, measured in the number of seconds +// since January 1 1970 UTC, indicating when this token will expire, +// as defined in JWT [RFC7519]. +// +// * iat +// OPTIONAL. Integer timestamp, measured in the number of seconds +// since January 1 1970 UTC, indicating when this token was +// originally issued, as defined in JWT [RFC7519]. +// +// * nbf +// OPTIONAL. Integer timestamp, measured in the number of seconds +// since January 1 1970 UTC, indicating when this token is not to be +// used before, as defined in JWT [RFC7519]. +// +// * sub +// OPTIONAL. Subject of the token, as defined in JWT [RFC7519]. +// Usually a machine-readable identifier of the resource owner who +// authorized this token. +// +// * aud +// OPTIONAL. Service-specific string identifier or list of string +// identifiers representing the intended audience for this token, as +// defined in JWT [RFC7519]. +// +// * iss +// OPTIONAL. String representing the issuer of this token, as +// defined in JWT [RFC7519]. +// +// * jti +// OPTIONAL. String identifier for the token, as defined in JWT +// [RFC7519]. +// +// Specific implementations MAY extend this structure with their own +// service-specific response names as top-level members of this JSON +// object. Response names intended to be used across domains MUST be +// registered in the "OAuth Token Introspection Response" registry +// defined in Section 3.1. +// +// The authorization server MAY respond differently to different +// protected resources making the same request. For instance, an +// authorization server MAY limit which scopes from a given token are +// returned for each protected resource to prevent a protected resource +// from learning more about the larger network than is necessary for its +// operation. +// +// The response MAY be cached by the protected resource to improve +// performance and reduce load on the introspection endpoint, but at the +// cost of liveness of the information used by the protected resource to +// make authorization decisions. See Section 4 for more information +// regarding the trade off when the response is cached. +// +// For example, the following response contains a set of information +// about an active token: +// +// The following is a non-normative example response: +// +// HTTP/1.1 200 OK +// Content-Type: application/json +// +// { +// "active": true, +// "client_id": "l238j323ds-23ij4", +// "username": "jdoe", +// "scope": "read write dolphin", +// "sub": "Z5O3upPC88QrAjx00dis", +// "aud": "https://protected.example.net/resource", +// "iss": "https://server.example.com/", +// "exp": 1419356238, +// "iat": 1419350238, +// "extension_field": "twenty-seven" +// } +// +// If the introspection call is properly authorized but the token is not +// active, does not exist on this server, or the protected resource is +// not allowed to introspect this particular token, then the +// authorization server MUST return an introspection response with the +// "active" field set to "false". Note that to avoid disclosing too +// much of the authorization server's state to a third party, the +// authorization server SHOULD NOT include any additional information +// about an inactive token, including why the token is inactive. +// +// The following is a non-normative example response for a token that +// has been revoked or is otherwise invalid: +// +// HTTP/1.1 200 OK +// Content-Type: application/json +// +// { +// "active": false +// } +func (f *Fosite) WriteIntrospectionResponse(ctx context.Context, rw http.ResponseWriter, r IntrospectionResponder) { + rw.Header().Set("Content-Type", "application/json;charset=UTF-8") + rw.Header().Set("Cache-Control", "no-store") + rw.Header().Set("Pragma", "no-cache") + + if !r.IsActive() { + _ = json.NewEncoder(rw).Encode(&struct { + Active bool `json:"active"` + }{Active: false}) + return + } + + response := map[string]interface{}{ + "active": true, + } + + extraClaimsSession, ok := r.GetAccessRequester().GetSession().(ExtraClaimsSession) + if ok { + extraClaims := extraClaimsSession.GetExtraClaims() + for name, value := range extraClaims { + switch name { + // We do not allow these to be set through extra claims. + case "exp", "client_id", "scope", "iat", "sub", "aud", "username": + continue + default: + response[name] = value + } + } + } + + if !r.GetAccessRequester().GetSession().GetExpiresAt(AccessToken).IsZero() { + response["exp"] = r.GetAccessRequester().GetSession().GetExpiresAt(AccessToken).Unix() + } + if r.GetAccessRequester().GetClient().GetID() != "" { + response["client_id"] = r.GetAccessRequester().GetClient().GetID() + } + if len(r.GetAccessRequester().GetGrantedScopes()) > 0 { + response["scope"] = strings.Join(r.GetAccessRequester().GetGrantedScopes(), " ") + } + if !r.GetAccessRequester().GetRequestedAt().IsZero() { + response["iat"] = r.GetAccessRequester().GetRequestedAt().Unix() + } + if r.GetAccessRequester().GetSession().GetSubject() != "" { + response["sub"] = r.GetAccessRequester().GetSession().GetSubject() + } + if len(r.GetAccessRequester().GetGrantedAudience()) > 0 { + response["aud"] = r.GetAccessRequester().GetGrantedAudience() + } + if r.GetAccessRequester().GetSession().GetUsername() != "" { + response["username"] = r.GetAccessRequester().GetSession().GetUsername() + } + + _ = json.NewEncoder(rw).Encode(response) +} diff --git a/fosite/introspection_response_writer_test.go b/fosite/introspection_response_writer_test.go new file mode 100644 index 00000000000..39954658d71 --- /dev/null +++ b/fosite/introspection_response_writer_test.go @@ -0,0 +1,172 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/ory/x/errorsx" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + gomock "go.uber.org/mock/gomock" + + . "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/internal" +) + +func TestWriteIntrospectionError(t *testing.T) { + f := &Fosite{Config: new(Config)} + c := gomock.NewController(t) + defer c.Finish() + + rw := internal.NewMockResponseWriter(c) + rw.EXPECT().WriteHeader(http.StatusUnauthorized) + rw.EXPECT().Header().AnyTimes().Return(http.Header{}) + rw.EXPECT().Write(gomock.Any()) + f.WriteIntrospectionError(context.Background(), rw, errorsx.WithStack(ErrRequestUnauthorized)) + + rw.EXPECT().WriteHeader(http.StatusBadRequest) + rw.EXPECT().Write(gomock.Any()) + f.WriteIntrospectionError(context.Background(), rw, errorsx.WithStack(ErrInvalidRequest)) + + rw.EXPECT().Write([]byte("{\"active\":false}\n")) + f.WriteIntrospectionError(context.Background(), rw, errors.New("")) + + rw.EXPECT().Write([]byte("{\"active\":false}\n")) + f.WriteIntrospectionError(context.Background(), rw, errorsx.WithStack(ErrInactiveToken.WithWrap(ErrRequestUnauthorized))) + + f.WriteIntrospectionError(context.Background(), rw, nil) +} + +func TestWriteIntrospectionResponse(t *testing.T) { + f := new(Fosite) + c := gomock.NewController(t) + defer c.Finish() + + rw := internal.NewMockResponseWriter(c) + rw.EXPECT().Write(gomock.Any()).AnyTimes() + rw.EXPECT().Header().AnyTimes().Return(http.Header{}) + f.WriteIntrospectionResponse(context.Background(), rw, &IntrospectionResponse{ + AccessRequester: NewAccessRequest(nil), + }) +} + +func TestWriteIntrospectionResponseBody(t *testing.T) { + f := new(Fosite) + ires := &IntrospectionResponse{} + rw := httptest.NewRecorder() + + for _, c := range []struct { + description string + setup func() + active bool + hasExp bool + hasExtra bool + }{ + { + description: "should success for not expired access token", + setup: func() { + ires.Active = true + ires.TokenUse = AccessToken + sess := &DefaultSession{} + sess.SetExpiresAt(ires.TokenUse, time.Now().Add(time.Hour*2)) + ires.AccessRequester = NewAccessRequest(sess) + }, + active: true, + hasExp: true, + hasExtra: false, + }, + { + description: "should success for expired access token", + setup: func() { + ires.Active = false + ires.TokenUse = AccessToken + sess := &DefaultSession{} + sess.SetExpiresAt(ires.TokenUse, time.Now().Add(-time.Hour*2)) + ires.AccessRequester = NewAccessRequest(sess) + }, + active: false, + hasExp: false, + hasExtra: false, + }, + { + description: "should success for ExpiresAt not set access token", + setup: func() { + ires.Active = true + ires.TokenUse = AccessToken + sess := &DefaultSession{} + sess.SetExpiresAt(ires.TokenUse, time.Time{}) + ires.AccessRequester = NewAccessRequest(sess) + }, + active: true, + hasExp: false, + hasExtra: false, + }, + { + description: "should output extra claims", + setup: func() { + ires.Active = true + ires.TokenUse = AccessToken + sess := &DefaultSession{} + sess.GetExtraClaims()["extra"] = "foobar" + // We try to set these, but they should be ignored. + for _, field := range []string{"exp", "client_id", "scope", "iat", "sub", "aud", "username"} { + sess.GetExtraClaims()[field] = "invalid" + } + sess.SetExpiresAt(ires.TokenUse, time.Time{}) + ires.AccessRequester = NewAccessRequest(sess) + }, + active: true, + hasExp: false, + hasExtra: true, + }, + } { + t.Run(c.description, func(t *testing.T) { + c.setup() + f.WriteIntrospectionResponse(context.Background(), rw, ires) + var params struct { + Active bool `json:"active"` + Exp *int64 `json:"exp"` + Iat *int64 `json:"iat"` + Extra string `json:"extra"` + ClientId string `json:"client_id"` + Scope string `json:"scope"` + Subject string `json:"sub"` + Audience string `json:"aud"` + Username string `json:"username"` + } + assert.Equal(t, 200, rw.Code) + err := json.NewDecoder(rw.Body).Decode(¶ms) + require.NoError(t, err) + assert.Equal(t, c.active, params.Active) + if c.active { + assert.NotNil(t, params.Iat) + if c.hasExp { + assert.NotNil(t, params.Exp) + } else { + assert.Nil(t, params.Exp) + } + if c.hasExtra { + assert.Equal(t, params.Extra, "foobar") + } else { + assert.Empty(t, params.Extra) + } + assert.NotEqual(t, "invalid", params.Exp) + assert.NotEqual(t, "invalid", params.ClientId) + assert.NotEqual(t, "invalid", params.Scope) + assert.NotEqual(t, "invalid", params.Iat) + assert.NotEqual(t, "invalid", params.Subject) + assert.NotEqual(t, "invalid", params.Audience) + assert.NotEqual(t, "invalid", params.Username) + } + }) + } +} diff --git a/fosite/oauth2.go b/fosite/oauth2.go new file mode 100644 index 00000000000..ee920a98ab8 --- /dev/null +++ b/fosite/oauth2.go @@ -0,0 +1,447 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "net/http" + "net/url" + "time" + + "golang.org/x/text/language" +) + +type TokenUse = TokenType + +type TokenType string + +type GrantType string + +const ( + AccessToken TokenType = "access_token" + RefreshToken TokenType = "refresh_token" + AuthorizeCode TokenType = "authorize_code" + IDToken TokenType = "id_token" + UserCode TokenType = "user_code" + DeviceCode TokenType = "device_code" + // PushedAuthorizeRequestContext represents the PAR context object + PushedAuthorizeRequestContext TokenType = "par_context" + + GrantTypeImplicit GrantType = "implicit" + GrantTypeRefreshToken GrantType = "refresh_token" + GrantTypeAuthorizationCode GrantType = "authorization_code" + GrantTypePassword GrantType = "password" + GrantTypeClientCredentials GrantType = "client_credentials" + GrantTypeJWTBearer GrantType = "urn:ietf:params:oauth:grant-type:jwt-bearer" //nolint:gosec // this is not a hardcoded credential + GrantTypeDeviceCode GrantType = "urn:ietf:params:oauth:grant-type:device_code" //nolint:gosec // this is not a hardcoded credential + + BearerAccessToken string = "bearer" +) + +// OAuth2Provider is an interface that enables you to write OAuth2 handlers with only a few lines of code. +// Check Fosite for an implementation of this interface. +type OAuth2Provider interface { + // NewAuthorizeRequest returns an AuthorizeRequest. + // + // The following specs must be considered in any implementation of this method: + // * https://tools.ietf.org/html/rfc6749#section-3.1 + // Extension response types MAY contain a space-delimited (%x20) list of + // values, where the order of values does not matter (e.g., response + // type "a b" is the same as "b a"). The meaning of such composite + // response types is defined by their respective specifications. + // * https://tools.ietf.org/html/rfc6749#section-3.1.2 + // The redirection endpoint URI MUST be an absolute URI as defined by + // [RFC3986] Section 4.3. The endpoint URI MAY include an + // "application/x-www-form-urlencoded" formatted (per Appendix B) query + // component ([RFC3986] Section 3.4), which MUST be retained when adding + // additional query parameters. The endpoint URI MUST NOT include a + // fragment component. + // * https://tools.ietf.org/html/rfc6749#section-3.1.2.2 (everything MUST be implemented) + NewAuthorizeRequest(ctx context.Context, req *http.Request) (AuthorizeRequester, error) + + // NewAuthorizeResponse iterates through all response type handlers and returns their result or + // ErrUnsupportedResponseType if none of the handlers were able to handle it. + // + // The following specs must be considered in any implementation of this method: + // * https://tools.ietf.org/html/rfc6749#section-3.1.1 + // Extension response types MAY contain a space-delimited (%x20) list of + // values, where the order of values does not matter (e.g., response + // type "a b" is the same as "b a"). The meaning of such composite + // response types is defined by their respective specifications. + // If an authorization request is missing the "response_type" parameter, + // or if the response type is not understood, the authorization server + // MUST return an error response as described in Section 4.1.2.1. + NewAuthorizeResponse(ctx context.Context, requester AuthorizeRequester, session Session) (AuthorizeResponder, error) + + // WriteAuthorizeError returns the error codes to the redirection endpoint or shows the error to the user, if no valid + // redirect uri was given. Implements rfc6749#section-4.1.2.1 + // + // The following specs must be considered in any implementation of this method: + // * https://tools.ietf.org/html/rfc6749#section-3.1.2 + // The redirection endpoint URI MUST be an absolute URI as defined by + // [RFC3986] Section 4.3. The endpoint URI MAY include an + // "application/x-www-form-urlencoded" formatted (per Appendix B) query + // component ([RFC3986] Section 3.4), which MUST be retained when adding + // additional query parameters. The endpoint URI MUST NOT include a + // fragment component. + // * https://tools.ietf.org/html/rfc6749#section-4.1.2.1 (everything) + // * https://tools.ietf.org/html/rfc6749#section-3.1.2.2 (everything MUST be implemented) + WriteAuthorizeError(ctx context.Context, rw http.ResponseWriter, requester AuthorizeRequester, err error) + + // WriteAuthorizeResponse persists the AuthorizeSession in the store and redirects the user agent to the provided + // redirect url or returns an error if storage failed. + // + // The following specs must be considered in any implementation of this method: + // * https://tools.ietf.org/html/rfc6749#rfc6749#section-4.1.2.1 + // After completing its interaction with the resource owner, the + // authorization server directs the resource owner's user-agent back to + // the client. The authorization server redirects the user-agent to the + // client's redirection endpoint previously established with the + // authorization server during the client registration process or when + // making the authorization request. + // * https://tools.ietf.org/html/rfc6749#section-3.1.2.2 (everything MUST be implemented) + WriteAuthorizeResponse(ctx context.Context, rw http.ResponseWriter, requester AuthorizeRequester, responder AuthorizeResponder) + + // NewAccessRequest creates a new access request object and validates + // various parameters. + // + // The following specs must be considered in any implementation of this method: + // * https://tools.ietf.org/html/rfc6749#section-3.2 (everything) + // * https://tools.ietf.org/html/rfc6749#section-3.2.1 (everything) + // + // Furthermore the registered handlers should implement their specs accordingly. + NewAccessRequest(ctx context.Context, req *http.Request, session Session) (AccessRequester, error) + + // NewAccessResponse creates a new access response and validates that access_token and token_type are set. + // + // The following specs must be considered in any implementation of this method: + // https://tools.ietf.org/html/rfc6749#section-5.1 + NewAccessResponse(ctx context.Context, requester AccessRequester) (AccessResponder, error) + + // WriteAccessError writes an access request error response. + // + // The following specs must be considered in any implementation of this method: + // * https://tools.ietf.org/html/rfc6749#section-5.2 (everything) + WriteAccessError(ctx context.Context, rw http.ResponseWriter, requester Requester, err error) + + // WriteAccessResponse writes the access response. + // + // The following specs must be considered in any implementation of this method: + // https://tools.ietf.org/html/rfc6749#section-5.1 + WriteAccessResponse(ctx context.Context, rw http.ResponseWriter, requester AccessRequester, responder AccessResponder) + + // NewDeviceRequest validate the OAuth 2.0 Device Authorization Flow Request + // + // The following specs must be considered in any implementation of this method: + // * https://www.rfc-editor.org/rfc/rfc8628#section-3.1 (everything MUST be implemented) + // Parameters sent without a value MUST be treated as if they were + // omitted from the request. The authorization server MUST ignore + // unrecognized request parameters. Request and response parameters + // MUST NOT be included more than once. + NewDeviceRequest(ctx context.Context, req *http.Request) (DeviceRequester, error) + + // NewDeviceResponse persists the DeviceCodeSession and UserCodeSession in the store + // + // The following specs must be considered in any implementation of this method: + // * https://www.rfc-editor.org/rfc/rfc8628#section-3.2 (everything MUST be implemented) + // In response, the authorization server generates a unique device + // verification code and an end-user code that are valid for a limited + // time + NewDeviceResponse(ctx context.Context, requester DeviceRequester, session Session) (DeviceResponder, error) + + // WriteDeviceResponse return to the user both codes and + // some configuration information in a JSON formatted manner + // + // The following specs must be considered in any implementation of this method: + // * https://www.rfc-editor.org/rfc/rfc8628#section-3.2 (everything MUST be implemented) + // Response is an HTTP response body using the + // "application/json" format [RFC8259] with a 200 (OK) status code. + WriteDeviceResponse(ctx context.Context, rw http.ResponseWriter, requester DeviceRequester, responder DeviceResponder) + + // NewRevocationRequest handles incoming token revocation requests and validates various parameters. + // + // The following specs must be considered in any implementation of this method: + // https://tools.ietf.org/html/rfc7009#section-2.1 + NewRevocationRequest(ctx context.Context, r *http.Request) error + + // WriteRevocationResponse writes the revoke response. + // + // The following specs must be considered in any implementation of this method: + // https://tools.ietf.org/html/rfc7009#section-2.2 + WriteRevocationResponse(ctx context.Context, rw http.ResponseWriter, err error) + + // IntrospectToken returns token metadata, if the token is valid. Tokens generated by the authorization endpoint, + // such as the authorization code, can not be introspected. + IntrospectToken(ctx context.Context, token string, tokenUse TokenUse, session Session, scope ...string) (TokenUse, AccessRequester, error) + + // NewIntrospectionRequest initiates token introspection as defined in + // https://tools.ietf.org/html/rfc7662#section-2.1 + NewIntrospectionRequest(ctx context.Context, r *http.Request, session Session) (IntrospectionResponder, error) + + // WriteIntrospectionError responds with an error if token introspection failed as defined in + // https://tools.ietf.org/html/rfc7662#section-2.3 + WriteIntrospectionError(ctx context.Context, rw http.ResponseWriter, err error) + + // WriteIntrospectionResponse responds with token metadata discovered by token introspection as defined in + // https://tools.ietf.org/html/rfc7662#section-2.2 + WriteIntrospectionResponse(ctx context.Context, rw http.ResponseWriter, r IntrospectionResponder) + + // NewPushedAuthorizeRequest validates the request and produces an AuthorizeRequester object that can be stored + NewPushedAuthorizeRequest(ctx context.Context, r *http.Request) (AuthorizeRequester, error) + + // NewPushedAuthorizeResponse executes the handlers and builds the response + NewPushedAuthorizeResponse(ctx context.Context, ar AuthorizeRequester, session Session) (PushedAuthorizeResponder, error) + + // WritePushedAuthorizeResponse writes the PAR response + WritePushedAuthorizeResponse(ctx context.Context, rw http.ResponseWriter, ar AuthorizeRequester, resp PushedAuthorizeResponder) + + // WritePushedAuthorizeError writes the PAR error + WritePushedAuthorizeError(ctx context.Context, rw http.ResponseWriter, ar AuthorizeRequester, err error) +} + +// IntrospectionResponder is the response object that will be returned when token introspection was successful, +// for example when the client is allowed to perform token introspection. Refer to +// https://tools.ietf.org/html/rfc7662#section-2.2 for more details. +type IntrospectionResponder interface { + // IsActive returns true if the introspected token is active and false otherwise. + IsActive() bool + + // GetAccessRequester returns nil when IsActive() is false and the original access request object otherwise. + GetAccessRequester() AccessRequester + + // GetTokenUse optionally returns the type of the token that was introspected. This could be "access_token", "refresh_token", + // or if the type can not be determined an empty string. + GetTokenUse() TokenUse + + // GetAccessTokenType optionally returns the type of the access token that was introspected. This could be "bearer", "mac", + // or empty string if the type of the token is refresh token. + GetAccessTokenType() string +} + +// Requester is an abstract interface for handling requests in Fosite. +type Requester interface { + // SetID sets the unique identifier. + SetID(id string) + + // GetID returns a unique identifier. + GetID() string + + // GetRequestedAt returns the time the request was created. + GetRequestedAt() (requestedAt time.Time) + + // GetClient returns the request's client. + GetClient() (client Client) + + // GetRequestedScopes returns the request's scopes. + GetRequestedScopes() (scopes Arguments) + + // GetRequestedAudience returns the requested audiences for this request. + GetRequestedAudience() (audience Arguments) + + // SetRequestedScopes sets the request's scopes. + SetRequestedScopes(scopes Arguments) + + // SetRequestedAudience sets the requested audience. + SetRequestedAudience(audience Arguments) + + // AppendRequestedScope appends a scope to the request. + AppendRequestedScope(scope string) + + // GetGrantedScopes returns all granted scopes. + GetGrantedScopes() (grantedScopes Arguments) + + // GetGrantedAudience returns all granted audiences. + GetGrantedAudience() (grantedAudience Arguments) + + // GrantScope marks a request's scope as granted. + GrantScope(scope string) + + // GrantAudience marks a request's audience as granted. + GrantAudience(audience string) + + // GetSession returns a pointer to the request's session or nil if none is set. + GetSession() (session Session) + + // SetSession sets the request's session pointer. + SetSession(session Session) + + // GetRequestForm returns the request's form input. + GetRequestForm() url.Values + + // Merge merges the argument into the method receiver. + Merge(requester Requester) + + // Sanitize returns a sanitized clone of the request which can be used for storage. + Sanitize(allowedParameters []string) Requester +} + +// AccessRequester is a token endpoint's request context. +type AccessRequester interface { + // GetGrantTypes returns the requests grant type. + GetGrantTypes() (grantTypes Arguments) + + Requester +} + +// DeviceRequester is an device endpoint's request context. +type DeviceRequester interface { + // GetUserCodeState returns the state of the user code + GetUserCodeState() UserCodeState + + // SetUserCodeState sets the state of the user code + SetUserCodeState(state UserCodeState) + + Requester +} + +// AuthorizeRequester is an authorize endpoint's request context. +type AuthorizeRequester interface { + // GetResponseTypes returns the requested response types + GetResponseTypes() (responseTypes Arguments) + + // SetResponseTypeHandled marks a response_type (e.g. token or code) as handled indicating that the response type + // is supported. + SetResponseTypeHandled(responseType string) + + // DidHandleAllResponseTypes returns if all requested response types have been handled correctly + DidHandleAllResponseTypes() (didHandle bool) + + // GetRedirectURI returns the requested redirect URI + GetRedirectURI() (redirectURL *url.URL) + + // IsRedirectURIValid returns false if the redirect is not rfc-conform (i.e. missing client, not on white list, + // or malformed) + IsRedirectURIValid() (isValid bool) + + // GetState returns the request's state. + GetState() (state string) + + // GetResponseMode returns response_mode of the authorization request + GetResponseMode() ResponseModeType + + // SetDefaultResponseMode sets default response mode for a response type in a flow + SetDefaultResponseMode(responseMode ResponseModeType) + + // GetDefaultResponseMode gets default response mode for a response type in a flow + GetDefaultResponseMode() ResponseModeType + + Requester +} + +// AccessResponder is a token endpoint's response. +type AccessResponder interface { + // SetExtra sets a key value pair for the access response. + SetExtra(key string, value interface{}) + + // GetExtra returns a key's value. + GetExtra(key string) interface{} + + SetExpiresIn(time.Duration) + + SetScopes(scopes Arguments) + + // SetAccessToken sets the responses mandatory access token. + SetAccessToken(token string) + + // SetTokenType set's the responses mandatory token type + SetTokenType(tokenType string) + + // GetAccessToken returns the responses access token. + GetAccessToken() (token string) + + // GetTokenType returns the responses token type. + GetTokenType() (token string) + + // ToMap converts the response to a map. + ToMap() map[string]interface{} +} + +// AuthorizeResponder is an authorization endpoint's response. +type AuthorizeResponder interface { + // GetCode returns the response's authorize code if set. + GetCode() string + + // GetHeader returns the response's header + GetHeader() (header http.Header) + + // AddHeader adds a header key value pair to the response + AddHeader(key, value string) + + // GetParameters returns the response's parameters + GetParameters() (query url.Values) + + // AddParameter adds key value pair to the response + AddParameter(key, value string) +} + +// PushedAuthorizeResponder is the response object for PAR +type PushedAuthorizeResponder interface { + // GetRequestURI returns the request_uri + GetRequestURI() string + // SetRequestURI sets the request_uri + SetRequestURI(requestURI string) + // GetExpiresIn gets the expires_in + GetExpiresIn() int + // SetExpiresIn sets the expires_in + SetExpiresIn(seconds int) + + // GetHeader returns the response's header + GetHeader() (header http.Header) + + // AddHeader adds a header key value pair to the response + AddHeader(key, value string) + + // SetExtra sets a key value pair for the response. + SetExtra(key string, value interface{}) + + // GetExtra returns a key's value. + GetExtra(key string) interface{} + + // ToMap converts the response to a map. + ToMap() map[string]interface{} +} + +// G11NContext is the globalization context +type G11NContext interface { + // GetLang returns the current language in the context + GetLang() language.Tag +} + +// DeviceResponder is the device authorization endpoint's response +type DeviceResponder interface { + // GetDeviceCode returns the device_code + GetDeviceCode() string + // SetDeviceCode sets the device_code + SetDeviceCode(code string) + + // GetUserCode returns the user_code + GetUserCode() string + // SetUserCode sets the user_code + SetUserCode(code string) + + // GetVerificationURI returns the verification_uri + GetVerificationURI() string + // SetVerificationURI sets the verification_uri + SetVerificationURI(uri string) + + // GetVerificationURIComplete returns the verification_uri_complete + GetVerificationURIComplete() string + // SetVerificationURIComplete sets the verification_uri_complete + SetVerificationURIComplete(uri string) + + // GetExpiresIn returns the expires_in + GetExpiresIn() int64 + // SetExpiresIn sets the expires_in + SetExpiresIn(seconds int64) + + // GetInterval returns the interval + GetInterval() int + // SetInterval sets the interval + SetInterval(seconds int) + + // GetHeader returns the response's header + GetHeader() (header http.Header) + // AddHeader adds a header key value pair to the response + AddHeader(key, value string) +} diff --git a/fosite/package-lock.json b/fosite/package-lock.json new file mode 100644 index 00000000000..784ba9b586a --- /dev/null +++ b/fosite/package-lock.json @@ -0,0 +1,1736 @@ +{ + "name": "fosite", + "version": "0.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "devDependencies": { + "license-checker": "^25.0.1", + "ory-prettier-styles": "1.3.0", + "prettier": "2.7.1", + "prettier-plugin-packagejson": "2.2.18" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@types/glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@types/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA==", + "dev": true, + "dependencies": { + "@types/minimatch": "*", + "@types/node": "*" + } + }, + "node_modules/@types/minimatch": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-5.1.2.tgz", + "integrity": "sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==", + "dev": true + }, + "node_modules/@types/node": { + "version": "18.7.15", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.7.15.tgz", + "integrity": "sha512-XnjpaI8Bgc3eBag2Aw4t2Uj/49lLBSStHWfqKvIuXD7FIrZyMLWp8KuAFHAqxMZYTF9l08N1ctUn9YNybZJVmQ==", + "dev": true + }, + "node_modules/abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", + "dev": true + }, + "node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/array-find-index": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz", + "integrity": "sha512-M1HQyIXcBGtVywBt8WVdim+lrNaK7VHp99Qt5pSNziXznKHViIBbXWtfRTpEFpF/c4FdfxNAsCCwPp5phBYJtw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==", + "dev": true + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/debuglog": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/debuglog/-/debuglog-1.0.1.tgz", + "integrity": "sha512-syBZ+rnAK3EgMsH2aYEOLUW7mZSY9Gb+0wUMCFsZvcmiz+HigA0LOcq/HoQqVuGG+EKykunc7QG2bzrponfaSw==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/detect-indent": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-6.1.0.tgz", + "integrity": "sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/detect-newline": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", + "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/dezalgo": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", + "integrity": "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==", + "dev": true, + "dependencies": { + "asap": "^2.0.0", + "wrappy": "1" + } + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/fast-glob": { + "version": "3.2.11", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.11.tgz", + "integrity": "sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fastq": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", + "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "node_modules/git-hooks-list": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/git-hooks-list/-/git-hooks-list-1.0.3.tgz", + "integrity": "sha512-Y7wLWcrLUXwk2noSka166byGCvhMtDRpgHdzCno1UQv/n/Hegp++a2xBWJL1lJarnKD3SWaljD+0z1ztqxuKyQ==", + "dev": true, + "funding": { + "url": "https://github.com/fisker/git-hooks-list?sponsor=1" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/globby": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-10.0.0.tgz", + "integrity": "sha512-3LifW9M4joGZasyYPz2A1U74zbC/45fvpXUvO/9KbSa+VV0aGZarWkfdgKyR9sExNP0t0x0ss/UMJpNpcaTspw==", + "dev": true, + "dependencies": { + "@types/glob": "^7.1.1", + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.0.3", + "glob": "^7.1.3", + "ignore": "^5.1.1", + "merge2": "^1.2.3", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.10", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", + "dev": true + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/hosted-git-info": { + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "dev": true + }, + "node_modules/ignore": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", + "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/is-core-module": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz", + "integrity": "sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==", + "dev": true, + "dependencies": { + "has": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "node_modules/license-checker": { + "version": "25.0.1", + "resolved": "https://registry.npmjs.org/license-checker/-/license-checker-25.0.1.tgz", + "integrity": "sha512-mET5AIwl7MR2IAKYYoVBBpV0OnkKQ1xGj2IMMeEFIs42QAkEVjRtFZGWmQ28WeU7MP779iAgOaOy93Mn44mn6g==", + "dev": true, + "dependencies": { + "chalk": "^2.4.1", + "debug": "^3.1.0", + "mkdirp": "^0.5.1", + "nopt": "^4.0.1", + "read-installed": "~4.0.3", + "semver": "^5.5.0", + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0", + "spdx-satisfies": "^4.0.0", + "treeify": "^1.1.0" + }, + "bin": { + "license-checker": "bin/license-checker" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dev": true, + "dependencies": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.7.tgz", + "integrity": "sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "dev": true, + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "node_modules/nopt": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.3.tgz", + "integrity": "sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg==", + "dev": true, + "dependencies": { + "abbrev": "1", + "osenv": "^0.1.4" + }, + "bin": { + "nopt": "bin/nopt.js" + } + }, + "node_modules/normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "dependencies": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, + "node_modules/npm-normalize-package-bin": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz", + "integrity": "sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==", + "dev": true + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/ory-prettier-styles": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/ory-prettier-styles/-/ory-prettier-styles-1.3.0.tgz", + "integrity": "sha512-Vfn0G6CyLaadwcCamwe1SQCf37ZQfBDgMrhRI70dE/2fbE3Q43/xu7K5c32I5FGt/EliroWty5yBjmdkj0eWug==", + "dev": true + }, + "node_modules/os-homedir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", + "integrity": "sha512-B5JU3cabzk8c67mRRd3ECmROafjYMXbuzlwtqdM8IbS8ktlTix8aFGb2bAGKrSRIlnfKwovGUUr72JUPyOb6kQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/os-tmpdir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/osenv": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz", + "integrity": "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==", + "dev": true, + "dependencies": { + "os-homedir": "^1.0.0", + "os-tmpdir": "^1.0.0" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/prettier": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.7.1.tgz", + "integrity": "sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==", + "dev": true, + "bin": { + "prettier": "bin-prettier.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/prettier-plugin-packagejson": { + "version": "2.2.18", + "resolved": "https://registry.npmjs.org/prettier-plugin-packagejson/-/prettier-plugin-packagejson-2.2.18.tgz", + "integrity": "sha512-iBjQ3IY6IayFrQHhXvg+YvKprPUUiIJ04Vr9+EbeQPfwGajznArIqrN33c5bi4JcIvmLHGROIMOm9aYakJj/CA==", + "dev": true, + "dependencies": { + "sort-package-json": "1.57.0" + }, + "peerDependencies": { + "prettier": ">= 1.16.0" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/read-installed": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/read-installed/-/read-installed-4.0.3.tgz", + "integrity": "sha512-O03wg/IYuV/VtnK2h/KXEt9VIbMUFbk3ERG0Iu4FhLZw0EP0T9znqrYDGn6ncbEsXUFaUjiVAWXHzxwt3lhRPQ==", + "dev": true, + "dependencies": { + "debuglog": "^1.0.1", + "read-package-json": "^2.0.0", + "readdir-scoped-modules": "^1.0.0", + "semver": "2 || 3 || 4 || 5", + "slide": "~1.1.3", + "util-extend": "^1.0.1" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.2" + } + }, + "node_modules/read-package-json": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-2.1.2.tgz", + "integrity": "sha512-D1KmuLQr6ZSJS0tW8hf3WGpRlwszJOXZ3E8Yd/DNRaM5d+1wVRZdHlpGBLAuovjr28LbWvjpWkBHMxpRGGjzNA==", + "dev": true, + "dependencies": { + "glob": "^7.1.1", + "json-parse-even-better-errors": "^2.3.0", + "normalize-package-data": "^2.0.0", + "npm-normalize-package-bin": "^1.0.0" + } + }, + "node_modules/readdir-scoped-modules": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/readdir-scoped-modules/-/readdir-scoped-modules-1.1.0.tgz", + "integrity": "sha512-asaikDeqAQg7JifRsZn1NJZXo9E+VwlyCfbkZhwyISinqk5zNS6266HS5kah6P0SaQKGF6SkNnZVHUzHFYxYDw==", + "dev": true, + "dependencies": { + "debuglog": "^1.0.1", + "dezalgo": "^1.0.0", + "graceful-fs": "^4.1.2", + "once": "^1.3.0" + } + }, + "node_modules/resolve": { + "version": "1.22.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", + "integrity": "sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==", + "dev": true, + "dependencies": { + "is-core-module": "^2.9.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/slide": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/slide/-/slide-1.1.6.tgz", + "integrity": "sha512-NwrtjCg+lZoqhFU8fOwl4ay2ei8PaqCBOUV3/ektPY9trO1yQ1oXEfmHAhKArUVUr/hOHvy5f6AdP17dCM0zMw==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/sort-object-keys": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/sort-object-keys/-/sort-object-keys-1.1.3.tgz", + "integrity": "sha512-855pvK+VkU7PaKYPc+Jjnmt4EzejQHyhhF33q31qG8x7maDzkeFhAAThdCYay11CISO+qAMwjOBP+fPZe0IPyg==", + "dev": true + }, + "node_modules/sort-package-json": { + "version": "1.57.0", + "resolved": "https://registry.npmjs.org/sort-package-json/-/sort-package-json-1.57.0.tgz", + "integrity": "sha512-FYsjYn2dHTRb41wqnv+uEqCUvBpK3jZcTp9rbz2qDTmel7Pmdtf+i2rLaaPMRZeSVM60V3Se31GyWFpmKs4Q5Q==", + "dev": true, + "dependencies": { + "detect-indent": "^6.0.0", + "detect-newline": "3.1.0", + "git-hooks-list": "1.0.3", + "globby": "10.0.0", + "is-plain-obj": "2.1.0", + "sort-object-keys": "^1.1.3" + }, + "bin": { + "sort-package-json": "cli.js" + } + }, + "node_modules/spdx-compare": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/spdx-compare/-/spdx-compare-1.0.0.tgz", + "integrity": "sha512-C1mDZOX0hnu0ep9dfmuoi03+eOdDoz2yvK79RxbcrVEG1NO1Ph35yW102DHWKN4pk80nwCgeMmSY5L25VE4D9A==", + "dev": true, + "dependencies": { + "array-find-index": "^1.0.2", + "spdx-expression-parse": "^3.0.0", + "spdx-ranges": "^2.0.0" + } + }, + "node_modules/spdx-correct": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", + "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==", + "dev": true, + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-exceptions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", + "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==", + "dev": true + }, + "node_modules/spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.12", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.12.tgz", + "integrity": "sha512-rr+VVSXtRhO4OHbXUiAF7xW3Bo9DuuF6C5jH+q/x15j2jniycgKbxU09Hr0WqlSLUs4i4ltHGXqTe7VHclYWyA==", + "dev": true + }, + "node_modules/spdx-ranges": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/spdx-ranges/-/spdx-ranges-2.1.1.tgz", + "integrity": "sha512-mcdpQFV7UDAgLpXEE/jOMqvK4LBoO0uTQg0uvXUewmEFhpiZx5yJSZITHB8w1ZahKdhfZqP5GPEOKLyEq5p8XA==", + "dev": true + }, + "node_modules/spdx-satisfies": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/spdx-satisfies/-/spdx-satisfies-4.0.1.tgz", + "integrity": "sha512-WVzZ/cXAzoNmjCWiEluEA3BjHp5tiUmmhn9MK+X0tBbR9sOqtC6UQwmgCNrAIZvNlMuBUYAaHYfb2oqlF9SwKA==", + "dev": true, + "dependencies": { + "spdx-compare": "^1.0.0", + "spdx-expression-parse": "^3.0.0", + "spdx-ranges": "^2.0.0" + } + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/treeify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/treeify/-/treeify-1.1.0.tgz", + "integrity": "sha512-1m4RA7xVAJrSGrrXGs0L3YTwyvBs2S8PbRHaLZAkFw7JR8oIFwYtysxlBZhYIa7xSyiYJKZ3iGrrk55cGA3i9A==", + "dev": true, + "engines": { + "node": ">=0.6" + } + }, + "node_modules/util-extend": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/util-extend/-/util-extend-1.0.3.tgz", + "integrity": "sha512-mLs5zAK+ctllYBj+iAQvlDCwoxU/WDOUaJkcFudeiAX6OajC6BKXJUa9a+tbtkC11dz2Ufb7h0lyvIOVn4LADA==", + "dev": true + }, + "node_modules/validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + } + }, + "dependencies": { + "@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "requires": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + } + }, + "@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true + }, + "@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "requires": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + } + }, + "@types/glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@types/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA==", + "dev": true, + "requires": { + "@types/minimatch": "*", + "@types/node": "*" + } + }, + "@types/minimatch": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-5.1.2.tgz", + "integrity": "sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==", + "dev": true + }, + "@types/node": { + "version": "18.7.15", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.7.15.tgz", + "integrity": "sha512-XnjpaI8Bgc3eBag2Aw4t2Uj/49lLBSStHWfqKvIuXD7FIrZyMLWp8KuAFHAqxMZYTF9l08N1ctUn9YNybZJVmQ==", + "dev": true + }, + "abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", + "dev": true + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "array-find-index": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz", + "integrity": "sha512-M1HQyIXcBGtVywBt8WVdim+lrNaK7VHp99Qt5pSNziXznKHViIBbXWtfRTpEFpF/c4FdfxNAsCCwPp5phBYJtw==", + "dev": true + }, + "array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true + }, + "asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==", + "dev": true + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "requires": { + "fill-range": "^7.1.1" + } + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "debuglog": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/debuglog/-/debuglog-1.0.1.tgz", + "integrity": "sha512-syBZ+rnAK3EgMsH2aYEOLUW7mZSY9Gb+0wUMCFsZvcmiz+HigA0LOcq/HoQqVuGG+EKykunc7QG2bzrponfaSw==", + "dev": true + }, + "detect-indent": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-6.1.0.tgz", + "integrity": "sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==", + "dev": true + }, + "detect-newline": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", + "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", + "dev": true + }, + "dezalgo": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", + "integrity": "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==", + "dev": true, + "requires": { + "asap": "^2.0.0", + "wrappy": "1" + } + }, + "dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "requires": { + "path-type": "^4.0.0" + } + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true + }, + "fast-glob": { + "version": "3.2.11", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.11.tgz", + "integrity": "sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==", + "dev": true, + "requires": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + } + }, + "fastq": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", + "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", + "dev": true, + "requires": { + "reusify": "^1.0.4" + } + }, + "fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "git-hooks-list": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/git-hooks-list/-/git-hooks-list-1.0.3.tgz", + "integrity": "sha512-Y7wLWcrLUXwk2noSka166byGCvhMtDRpgHdzCno1UQv/n/Hegp++a2xBWJL1lJarnKD3SWaljD+0z1ztqxuKyQ==", + "dev": true + }, + "glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + }, + "globby": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-10.0.0.tgz", + "integrity": "sha512-3LifW9M4joGZasyYPz2A1U74zbC/45fvpXUvO/9KbSa+VV0aGZarWkfdgKyR9sExNP0t0x0ss/UMJpNpcaTspw==", + "dev": true, + "requires": { + "@types/glob": "^7.1.1", + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.0.3", + "glob": "^7.1.3", + "ignore": "^5.1.1", + "merge2": "^1.2.3", + "slash": "^3.0.0" + } + }, + "graceful-fs": { + "version": "4.2.10", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", + "dev": true + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true + }, + "hosted-git-info": { + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "dev": true + }, + "ignore": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", + "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "is-core-module": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz", + "integrity": "sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true + }, + "is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, + "is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true + }, + "json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "license-checker": { + "version": "25.0.1", + "resolved": "https://registry.npmjs.org/license-checker/-/license-checker-25.0.1.tgz", + "integrity": "sha512-mET5AIwl7MR2IAKYYoVBBpV0OnkKQ1xGj2IMMeEFIs42QAkEVjRtFZGWmQ28WeU7MP779iAgOaOy93Mn44mn6g==", + "dev": true, + "requires": { + "chalk": "^2.4.1", + "debug": "^3.1.0", + "mkdirp": "^0.5.1", + "nopt": "^4.0.1", + "read-installed": "~4.0.3", + "semver": "^5.5.0", + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0", + "spdx-satisfies": "^4.0.0", + "treeify": "^1.1.0" + } + }, + "merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true + }, + "micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dev": true, + "requires": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + } + }, + "minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.7.tgz", + "integrity": "sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==", + "dev": true + }, + "mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "dev": true, + "requires": { + "minimist": "^1.2.6" + } + }, + "ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "nopt": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.3.tgz", + "integrity": "sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg==", + "dev": true, + "requires": { + "abbrev": "1", + "osenv": "^0.1.4" + } + }, + "normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "requires": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, + "npm-normalize-package-bin": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz", + "integrity": "sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==", + "dev": true + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "ory-prettier-styles": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/ory-prettier-styles/-/ory-prettier-styles-1.3.0.tgz", + "integrity": "sha512-Vfn0G6CyLaadwcCamwe1SQCf37ZQfBDgMrhRI70dE/2fbE3Q43/xu7K5c32I5FGt/EliroWty5yBjmdkj0eWug==", + "dev": true + }, + "os-homedir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", + "integrity": "sha512-B5JU3cabzk8c67mRRd3ECmROafjYMXbuzlwtqdM8IbS8ktlTix8aFGb2bAGKrSRIlnfKwovGUUr72JUPyOb6kQ==", + "dev": true + }, + "os-tmpdir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==", + "dev": true + }, + "osenv": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz", + "integrity": "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==", + "dev": true, + "requires": { + "os-homedir": "^1.0.0", + "os-tmpdir": "^1.0.0" + } + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true + }, + "path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true + }, + "picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true + }, + "prettier": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.7.1.tgz", + "integrity": "sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==", + "dev": true + }, + "prettier-plugin-packagejson": { + "version": "2.2.18", + "resolved": "https://registry.npmjs.org/prettier-plugin-packagejson/-/prettier-plugin-packagejson-2.2.18.tgz", + "integrity": "sha512-iBjQ3IY6IayFrQHhXvg+YvKprPUUiIJ04Vr9+EbeQPfwGajznArIqrN33c5bi4JcIvmLHGROIMOm9aYakJj/CA==", + "dev": true, + "requires": { + "sort-package-json": "1.57.0" + } + }, + "queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true + }, + "read-installed": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/read-installed/-/read-installed-4.0.3.tgz", + "integrity": "sha512-O03wg/IYuV/VtnK2h/KXEt9VIbMUFbk3ERG0Iu4FhLZw0EP0T9znqrYDGn6ncbEsXUFaUjiVAWXHzxwt3lhRPQ==", + "dev": true, + "requires": { + "debuglog": "^1.0.1", + "graceful-fs": "^4.1.2", + "read-package-json": "^2.0.0", + "readdir-scoped-modules": "^1.0.0", + "semver": "2 || 3 || 4 || 5", + "slide": "~1.1.3", + "util-extend": "^1.0.1" + } + }, + "read-package-json": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-2.1.2.tgz", + "integrity": "sha512-D1KmuLQr6ZSJS0tW8hf3WGpRlwszJOXZ3E8Yd/DNRaM5d+1wVRZdHlpGBLAuovjr28LbWvjpWkBHMxpRGGjzNA==", + "dev": true, + "requires": { + "glob": "^7.1.1", + "json-parse-even-better-errors": "^2.3.0", + "normalize-package-data": "^2.0.0", + "npm-normalize-package-bin": "^1.0.0" + } + }, + "readdir-scoped-modules": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/readdir-scoped-modules/-/readdir-scoped-modules-1.1.0.tgz", + "integrity": "sha512-asaikDeqAQg7JifRsZn1NJZXo9E+VwlyCfbkZhwyISinqk5zNS6266HS5kah6P0SaQKGF6SkNnZVHUzHFYxYDw==", + "dev": true, + "requires": { + "debuglog": "^1.0.1", + "dezalgo": "^1.0.0", + "graceful-fs": "^4.1.2", + "once": "^1.3.0" + } + }, + "resolve": { + "version": "1.22.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", + "integrity": "sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==", + "dev": true, + "requires": { + "is-core-module": "^2.9.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + } + }, + "reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true + }, + "run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "requires": { + "queue-microtask": "^1.2.2" + } + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true + }, + "slide": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/slide/-/slide-1.1.6.tgz", + "integrity": "sha512-NwrtjCg+lZoqhFU8fOwl4ay2ei8PaqCBOUV3/ektPY9trO1yQ1oXEfmHAhKArUVUr/hOHvy5f6AdP17dCM0zMw==", + "dev": true + }, + "sort-object-keys": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/sort-object-keys/-/sort-object-keys-1.1.3.tgz", + "integrity": "sha512-855pvK+VkU7PaKYPc+Jjnmt4EzejQHyhhF33q31qG8x7maDzkeFhAAThdCYay11CISO+qAMwjOBP+fPZe0IPyg==", + "dev": true + }, + "sort-package-json": { + "version": "1.57.0", + "resolved": "https://registry.npmjs.org/sort-package-json/-/sort-package-json-1.57.0.tgz", + "integrity": "sha512-FYsjYn2dHTRb41wqnv+uEqCUvBpK3jZcTp9rbz2qDTmel7Pmdtf+i2rLaaPMRZeSVM60V3Se31GyWFpmKs4Q5Q==", + "dev": true, + "requires": { + "detect-indent": "^6.0.0", + "detect-newline": "3.1.0", + "git-hooks-list": "1.0.3", + "globby": "10.0.0", + "is-plain-obj": "2.1.0", + "sort-object-keys": "^1.1.3" + } + }, + "spdx-compare": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/spdx-compare/-/spdx-compare-1.0.0.tgz", + "integrity": "sha512-C1mDZOX0hnu0ep9dfmuoi03+eOdDoz2yvK79RxbcrVEG1NO1Ph35yW102DHWKN4pk80nwCgeMmSY5L25VE4D9A==", + "dev": true, + "requires": { + "array-find-index": "^1.0.2", + "spdx-expression-parse": "^3.0.0", + "spdx-ranges": "^2.0.0" + } + }, + "spdx-correct": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", + "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==", + "dev": true, + "requires": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-exceptions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", + "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==", + "dev": true + }, + "spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "requires": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-license-ids": { + "version": "3.0.12", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.12.tgz", + "integrity": "sha512-rr+VVSXtRhO4OHbXUiAF7xW3Bo9DuuF6C5jH+q/x15j2jniycgKbxU09Hr0WqlSLUs4i4ltHGXqTe7VHclYWyA==", + "dev": true + }, + "spdx-ranges": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/spdx-ranges/-/spdx-ranges-2.1.1.tgz", + "integrity": "sha512-mcdpQFV7UDAgLpXEE/jOMqvK4LBoO0uTQg0uvXUewmEFhpiZx5yJSZITHB8w1ZahKdhfZqP5GPEOKLyEq5p8XA==", + "dev": true + }, + "spdx-satisfies": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/spdx-satisfies/-/spdx-satisfies-4.0.1.tgz", + "integrity": "sha512-WVzZ/cXAzoNmjCWiEluEA3BjHp5tiUmmhn9MK+X0tBbR9sOqtC6UQwmgCNrAIZvNlMuBUYAaHYfb2oqlF9SwKA==", + "dev": true, + "requires": { + "spdx-compare": "^1.0.0", + "spdx-expression-parse": "^3.0.0", + "spdx-ranges": "^2.0.0" + } + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + }, + "supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + }, + "treeify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/treeify/-/treeify-1.1.0.tgz", + "integrity": "sha512-1m4RA7xVAJrSGrrXGs0L3YTwyvBs2S8PbRHaLZAkFw7JR8oIFwYtysxlBZhYIa7xSyiYJKZ3iGrrk55cGA3i9A==", + "dev": true + }, + "util-extend": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/util-extend/-/util-extend-1.0.3.tgz", + "integrity": "sha512-mLs5zAK+ctllYBj+iAQvlDCwoxU/WDOUaJkcFudeiAX6OajC6BKXJUa9a+tbtkC11dz2Ufb7h0lyvIOVn4LADA==", + "dev": true + }, + "validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "requires": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + } + } +} diff --git a/fosite/package.json b/fosite/package.json new file mode 100644 index 00000000000..38d490046da --- /dev/null +++ b/fosite/package.json @@ -0,0 +1,10 @@ +{ + "private": true, + "prettier": "ory-prettier-styles", + "devDependencies": { + "license-checker": "^25.0.1", + "ory-prettier-styles": "1.3.0", + "prettier": "2.7.1", + "prettier-plugin-packagejson": "2.2.18" + } +} diff --git a/fosite/pushed_authorize_request_handler.go b/fosite/pushed_authorize_request_handler.go new file mode 100644 index 00000000000..aa682b5065d --- /dev/null +++ b/fosite/pushed_authorize_request_handler.go @@ -0,0 +1,79 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "errors" + "net/http" + + "go.opentelemetry.io/otel/trace" + + "github.com/ory/hydra/v2/fosite/i18n" + "github.com/ory/x/errorsx" + "github.com/ory/x/otelx" +) + +const ( + ErrorPARNotSupported = "The OAuth 2.0 provider does not support Pushed Authorization Requests" + DebugPARStorageInvalid = "'PARStorage' not implemented" + DebugPARConfigMissing = "'PushedAuthorizeRequestConfigProvider' not implemented" + DebugPARRequestsHandlerMissing = "'PushedAuthorizeRequestHandlersProvider' not implemented" +) + +// NewPushedAuthorizeRequest validates the request and produces an AuthorizeRequester object that can be stored +func (f *Fosite) NewPushedAuthorizeRequest(ctx context.Context, r *http.Request) (_ AuthorizeRequester, err error) { + ctx, span := trace.SpanFromContext(ctx).TracerProvider().Tracer("github.com/ory/hydra/v2/fosite").Start(ctx, "Fosite.NewPushedAuthorizeRequest") + defer otelx.End(span, &err) + + request := NewAuthorizeRequest() + request.Request.Lang = i18n.GetLangFromRequest(f.Config.GetMessageCatalog(ctx), r) + + if r.Method != "POST" { + return request, errorsx.WithStack(ErrInvalidRequest.WithHintf("HTTP method is '%s', expected 'POST'.", r.Method)) + } + + if err := r.ParseMultipartForm(1 << 20); err != nil && err != http.ErrNotMultipart { + return request, errorsx.WithStack(ErrInvalidRequest.WithHint("Unable to parse HTTP body, make sure to send a properly formatted form request body.").WithWrap(err).WithDebug(err.Error())) + } + request.Form = r.Form + request.State = request.Form.Get("state") + + // Authenticate the client in the same way as at the token endpoint + // (Section 2.3 of [RFC6749]). + client, err := f.AuthenticateClient(ctx, r, r.Form) + if err != nil { + var rfcerr *RFC6749Error + if errors.As(err, &rfcerr) && rfcerr.ErrorField != ErrInvalidClient.ErrorField { + return request, errorsx.WithStack(ErrInvalidClient.WithHint("The requested OAuth 2.0 Client could not be authenticated.").WithWrap(err).WithDebug(err.Error())) + } + + return request, err + } + request.Client = client + + // Reject the request if the "request_uri" authorization request + // parameter is provided. + if r.Form.Get("request_uri") != "" { + return request, errorsx.WithStack(ErrInvalidRequest.WithHint("The request must not contain 'request_uri'.")) + } + + // For private_key_jwt or basic auth client authentication, "client_id" may not inside the form + // However this is required by NewAuthorizeRequest implementation + if len(r.Form.Get("client_id")) == 0 { + r.Form.Set("client_id", client.GetID()) + } + + // Validate as if this is a new authorize request + fr, err := f.newAuthorizeRequest(ctx, r, true) + if err != nil { + return fr, err + } + + if fr.GetRequestedScopes().Has("openid") && r.Form.Get("redirect_uri") == "" { + return fr, errorsx.WithStack(ErrInvalidRequest.WithHint("Query parameter 'redirect_uri' is required when performing an OpenID Connect flow.")) + } + + return fr, nil +} diff --git a/fosite/pushed_authorize_request_handler_test.go b/fosite/pushed_authorize_request_handler_test.go new file mode 100644 index 00000000000..cd0db2decab --- /dev/null +++ b/fosite/pushed_authorize_request_handler_test.go @@ -0,0 +1,686 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "fmt" + "net/http" + "net/url" + "runtime/debug" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + gomock "go.uber.org/mock/gomock" + + . "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/internal" +) + +// Should pass +// +// - https://openid.net/specs/oauth-v2-multiple-response-types-1_0.html#Terminology +// The OAuth 2.0 specification allows for registration of space-separated response_type parameter values. +// If a Response Type contains one of more space characters (%20), it is compared as a space-delimited list of +// values in which the order of values does not matter. +func TestNewPushedAuthorizeRequest(t *testing.T) { + ctrl := gomock.NewController(t) + store := internal.NewMockStorage(ctrl) + clientManager := internal.NewMockClientManager(ctrl) + hasher := internal.NewMockHasher(ctrl) + t.Cleanup(ctrl.Finish) + + config := &Config{ + ScopeStrategy: ExactScopeStrategy, + AudienceMatchingStrategy: DefaultAudienceMatchingStrategy, + ClientSecretsHasher: hasher, + } + + fosite := &Fosite{ + Store: store, + Config: config, + } + + redir, _ := url.Parse("https://foo.bar/cb") + specialCharRedir, _ := url.Parse("web+application://callback") + for _, c := range []struct { + desc string + conf *Fosite + r *http.Request + query url.Values + expectedError error + mock func() + expect *AuthorizeRequest + }{ + /* empty request */ + { + desc: "empty request fails", + conf: fosite, + r: &http.Request{ + Method: "POST", + }, + expectedError: ErrInvalidClient, + mock: func() {}, + }, + /* invalid redirect uri */ + { + desc: "invalid redirect uri fails", + conf: fosite, + query: url.Values{"redirect_uri": []string{"invalid"}}, + expectedError: ErrInvalidClient, + mock: func() {}, + }, + /* invalid client */ + { + desc: "invalid client fails", + conf: fosite, + query: url.Values{"redirect_uri": []string{"https://foo.bar/cb"}}, + expectedError: ErrInvalidClient, + mock: func() {}, + }, + /* redirect client mismatch */ + { + desc: "client and request redirects mismatch", + conf: fosite, + query: url.Values{ + "client_id": []string{"1234"}, + "client_secret": []string{"1234"}, + }, + expectedError: ErrInvalidRequest, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{RedirectURIs: []string{"invalid"}, Scopes: []string{}, Secret: []byte("1234")}, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("1234"))).Return(nil) + }, + }, + /* redirect client mismatch */ + { + desc: "client and request redirects mismatch", + conf: fosite, + query: url.Values{ + "redirect_uri": []string{""}, + "client_id": []string{"1234"}, + "client_secret": []string{"1234"}, + }, + expectedError: ErrInvalidRequest, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{RedirectURIs: []string{"invalid"}, Scopes: []string{}, Secret: []byte("1234")}, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("1234"))).Return(nil) + }, + }, + /* redirect client mismatch */ + { + desc: "client and request redirects mismatch", + conf: fosite, + query: url.Values{ + "redirect_uri": []string{"https://foo.bar/cb"}, + "client_id": []string{"1234"}, + "client_secret": []string{"1234"}, + }, + expectedError: ErrInvalidRequest, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{RedirectURIs: []string{"invalid"}, Scopes: []string{}, Secret: []byte("1234")}, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("1234"))).Return(nil) + }, + }, + /* no state */ + { + desc: "no state", + conf: fosite, + query: url.Values{ + "redirect_uri": []string{"https://foo.bar/cb"}, + "client_id": []string{"1234"}, + "client_secret": []string{"1234"}, + "response_type": []string{"code"}, + }, + expectedError: ErrInvalidState, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{RedirectURIs: []string{"https://foo.bar/cb"}, Scopes: []string{}, Secret: []byte("1234")}, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("1234"))).Return(nil) + }, + }, + /* short state */ + { + desc: "short state", + conf: fosite, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "client_secret": []string{"1234"}, + "response_type": {"code"}, + "state": {"short"}, + }, + expectedError: ErrInvalidState, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{RedirectURIs: []string{"https://foo.bar/cb"}, Scopes: []string{}, Secret: []byte("1234")}, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("1234"))).Return(nil) + }, + }, + /* fails because scope not given */ + { + desc: "should fail because client does not have scope baz", + conf: fosite, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "client_secret": []string{"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar baz"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{RedirectURIs: []string{"https://foo.bar/cb"}, Scopes: []string{"foo", "bar"}, Secret: []byte("1234")}, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("1234"))).Return(nil) + }, + expectedError: ErrInvalidScope, + }, + /* fails because scope not given */ + { + desc: "should fail because client does not have scope baz", + conf: fosite, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "client_secret": []string{"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "audience": {"https://cloud.ory.sh/api https://www.ory.sh/api"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{ + RedirectURIs: []string{"https://foo.bar/cb"}, Scopes: []string{"foo", "bar"}, + Audience: []string{"https://cloud.ory.sh/api"}, + Secret: []byte("1234"), + }, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("1234"))).Return(nil) + }, + expectedError: ErrInvalidRequest, + }, + /* success case */ + { + desc: "should pass", + conf: fosite, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "client_secret": []string{"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "audience": {"https://cloud.ory.sh/api https://www.ory.sh/api"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{ + ResponseTypes: []string{"code token"}, + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + Secret: []byte("1234"), + }, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("1234"))).Return(nil) + }, + expect: &AuthorizeRequest{ + RedirectURI: redir, + ResponseTypes: []string{"code", "token"}, + State: "strong-state", + Request: Request{ + Client: &DefaultClient{ + ResponseTypes: []string{"code token"}, RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + Secret: []byte("1234"), + }, + RequestedScope: []string{"foo", "bar"}, + RequestedAudience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + }, + }, + /* repeated audience parameter */ + { + desc: "repeated audience parameter", + conf: fosite, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "client_secret": []string{"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "audience": {"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{ + ResponseTypes: []string{"code token"}, + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + Secret: []byte("1234"), + }, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("1234"))).Return(nil) + }, + expect: &AuthorizeRequest{ + RedirectURI: redir, + ResponseTypes: []string{"code", "token"}, + State: "strong-state", + Request: Request{ + Client: &DefaultClient{ + ResponseTypes: []string{"code token"}, RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + Secret: []byte("1234"), + }, + RequestedScope: []string{"foo", "bar"}, + RequestedAudience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + }, + }, + /* repeated audience parameter with tricky values */ + { + desc: "repeated audience parameter with tricky values", + conf: fosite, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "client_secret": []string{"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "audience": {"test value", ""}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{ + ResponseTypes: []string{"code token"}, + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"test value"}, + Secret: []byte("1234"), + }, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("1234"))).Return(nil) + }, + expect: &AuthorizeRequest{ + RedirectURI: redir, + ResponseTypes: []string{"code", "token"}, + State: "strong-state", + Request: Request{ + Client: &DefaultClient{ + ResponseTypes: []string{"code token"}, RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"test value"}, + Secret: []byte("1234"), + }, + RequestedScope: []string{"foo", "bar"}, + RequestedAudience: []string{"test value"}, + }, + }, + }, + /* redirect_uri with special character in protocol*/ + { + desc: "redirect_uri with special character", + conf: fosite, + query: url.Values{ + "redirect_uri": {"web+application://callback"}, + "client_id": {"1234"}, + "client_secret": []string{"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "audience": {"https://cloud.ory.sh/api https://www.ory.sh/api"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{ + ResponseTypes: []string{"code token"}, + RedirectURIs: []string{"web+application://callback"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + Secret: []byte("1234"), + }, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("1234"))).Return(nil) + }, + expect: &AuthorizeRequest{ + RedirectURI: specialCharRedir, + ResponseTypes: []string{"code", "token"}, + State: "strong-state", + Request: Request{ + Client: &DefaultClient{ + ResponseTypes: []string{"code token"}, RedirectURIs: []string{"web+application://callback"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + Secret: []byte("1234"), + }, + RequestedScope: []string{"foo", "bar"}, + RequestedAudience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + }, + }, + /* audience with double spaces between values */ + { + desc: "audience with double spaces between values", + conf: fosite, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "client_secret": []string{"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "audience": {"https://cloud.ory.sh/api https://www.ory.sh/api"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{ + ResponseTypes: []string{"code token"}, + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + Secret: []byte("1234"), + }, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("1234"))).Return(nil) + }, + expect: &AuthorizeRequest{ + RedirectURI: redir, + ResponseTypes: []string{"code", "token"}, + State: "strong-state", + Request: Request{ + Client: &DefaultClient{ + ResponseTypes: []string{"code token"}, RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + Secret: []byte("1234"), + }, + RequestedScope: []string{"foo", "bar"}, + RequestedAudience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + }, + }, + /* fails because unknown response_mode*/ + { + desc: "should fail because unknown response_mode", + conf: fosite, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "client_secret": []string{"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "response_mode": {"unknown"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{RedirectURIs: []string{"https://foo.bar/cb"}, Scopes: []string{"foo", "bar"}, ResponseTypes: []string{"code token"}, Secret: []byte("1234")}, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("1234"))).Return(nil) + }, + expectedError: ErrUnsupportedResponseMode, + }, + /* fails because response_mode is requested but the OAuth 2.0 client doesn't support response mode */ + { + desc: "should fail because response_mode is requested but the OAuth 2.0 client doesn't support response mode", + conf: fosite, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "client_secret": []string{"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "response_mode": {"form_post"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{RedirectURIs: []string{"https://foo.bar/cb"}, Scopes: []string{"foo", "bar"}, ResponseTypes: []string{"code token"}, Secret: []byte("1234")}, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("1234"))).Return(nil) + }, + expectedError: ErrUnsupportedResponseMode, + }, + /* fails because requested response mode is not allowed */ + { + desc: "should fail because requested response mode is not allowed", + conf: fosite, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "client_secret": []string{"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "response_mode": {"form_post"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultResponseModeClient{ + DefaultClient: &DefaultClient{ + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + ResponseTypes: []string{"code token"}, + Secret: []byte("1234"), + }, + ResponseModes: []ResponseModeType{ResponseModeQuery}, + }, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("1234"))).Return(nil) + }, + expectedError: ErrUnsupportedResponseMode, + }, + /* success with response mode */ + { + desc: "success with response mode", + conf: fosite, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "client_secret": []string{"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "response_mode": {"form_post"}, + "audience": {"https://cloud.ory.sh/api https://www.ory.sh/api"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultResponseModeClient{ + DefaultClient: &DefaultClient{ + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + ResponseTypes: []string{"code token"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + Secret: []byte("1234"), + }, + ResponseModes: []ResponseModeType{ResponseModeFormPost}, + }, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("1234"))).Return(nil) + }, + expect: &AuthorizeRequest{ + RedirectURI: redir, + ResponseTypes: []string{"code", "token"}, + State: "strong-state", + Request: Request{ + Client: &DefaultResponseModeClient{ + DefaultClient: &DefaultClient{ + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + ResponseTypes: []string{"code token"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + Secret: []byte("1234"), + }, + ResponseModes: []ResponseModeType{ResponseModeFormPost}, + }, + RequestedScope: []string{"foo", "bar"}, + RequestedAudience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + }, + }, + /* determine correct response mode if default */ + { + desc: "success with response mode", + conf: fosite, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "client_secret": []string{"1234"}, + "response_type": {"code"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "audience": {"https://cloud.ory.sh/api https://www.ory.sh/api"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultResponseModeClient{ + DefaultClient: &DefaultClient{ + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + ResponseTypes: []string{"code"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + Secret: []byte("1234"), + }, + ResponseModes: []ResponseModeType{ResponseModeQuery}, + }, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("1234"))).Return(nil) + }, + expect: &AuthorizeRequest{ + RedirectURI: redir, + ResponseTypes: []string{"code"}, + State: "strong-state", + Request: Request{ + Client: &DefaultResponseModeClient{ + DefaultClient: &DefaultClient{ + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + ResponseTypes: []string{"code"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + Secret: []byte("1234"), + }, + ResponseModes: []ResponseModeType{ResponseModeQuery}, + }, + RequestedScope: []string{"foo", "bar"}, + RequestedAudience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + }, + }, + /* determine correct response mode if default */ + { + desc: "success with response mode", + conf: fosite, + query: url.Values{ + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "client_secret": []string{"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "audience": {"https://cloud.ory.sh/api https://www.ory.sh/api"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultResponseModeClient{ + DefaultClient: &DefaultClient{ + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + ResponseTypes: []string{"code token"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + Secret: []byte("1234"), + }, + ResponseModes: []ResponseModeType{ResponseModeFragment}, + }, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("1234"))).Return(nil) + }, + expect: &AuthorizeRequest{ + RedirectURI: redir, + ResponseTypes: []string{"code", "token"}, + State: "strong-state", + Request: Request{ + Client: &DefaultResponseModeClient{ + DefaultClient: &DefaultClient{ + RedirectURIs: []string{"https://foo.bar/cb"}, + Scopes: []string{"foo", "bar"}, + ResponseTypes: []string{"code token"}, + Audience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + Secret: []byte("1234"), + }, + ResponseModes: []ResponseModeType{ResponseModeFragment}, + }, + RequestedScope: []string{"foo", "bar"}, + RequestedAudience: []string{"https://cloud.ory.sh/api", "https://www.ory.sh/api"}, + }, + }, + }, + /* fails because request_uri is included */ + { + desc: "should fail because request_uri is provided in the request", + conf: fosite, + query: url.Values{ + "request_uri": {"https://foo.bar/ru"}, + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "client_secret": []string{"1234"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "response_mode": {"form_post"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{RedirectURIs: []string{"https://foo.bar/cb"}, Scopes: []string{"foo", "bar"}, ResponseTypes: []string{"code token"}, Secret: []byte("1234")}, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("1234"))).Return(nil) + }, + expectedError: ErrInvalidRequest.WithHint("The request must not contain 'request_uri'."), + }, + /* fails because of invalid client credentials */ + { + desc: "should fail because of invalid client creds", + conf: fosite, + query: url.Values{ + "request_uri": {"https://foo.bar/ru"}, + "redirect_uri": {"https://foo.bar/cb"}, + "client_id": {"1234"}, + "client_secret": []string{"4321"}, + "response_type": {"code token"}, + "state": {"strong-state"}, + "scope": {"foo bar"}, + "response_mode": {"form_post"}, + }, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).MaxTimes(2) + clientManager.EXPECT().GetClient(gomock.Any(), "1234").Return(&DefaultClient{RedirectURIs: []string{"https://foo.bar/cb"}, Scopes: []string{"foo", "bar"}, ResponseTypes: []string{"code token"}, Secret: []byte("1234")}, nil).MaxTimes(2) + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("1234")), gomock.Eq([]byte("4321"))).Return(fmt.Errorf("invalid hash")) + }, + expectedError: ErrInvalidClient, + }, + } { + t.Run(fmt.Sprintf("case=%s", c.desc), func(t *testing.T) { + ctx := NewContext() + + c.mock() + if c.r == nil { + c.r = &http.Request{ + Header: http.Header{}, + Method: "POST", + } + if c.query != nil { + c.r.URL = &url.URL{RawQuery: c.query.Encode()} + } + } + + ar, err := c.conf.NewPushedAuthorizeRequest(ctx, c.r) + if c.expectedError != nil { + assert.EqualError(t, err, c.expectedError.Error(), "Stack: %s", string(debug.Stack())) + // https://github.com/ory/hydra/issues/1642 + AssertObjectKeysEqual(t, &AuthorizeRequest{State: c.query.Get("state")}, ar, "State") + } else { + require.NoError(t, err) + AssertObjectKeysEqual(t, c.expect, ar, "ResponseTypes", "RequestedAudience", "RequestedScope", "Client", "RedirectURI", "State") + assert.NotNil(t, ar.GetRequestedAt()) + } + }) + } +} diff --git a/fosite/pushed_authorize_response.go b/fosite/pushed_authorize_response.go new file mode 100644 index 00000000000..048788a8fe2 --- /dev/null +++ b/fosite/pushed_authorize_response.go @@ -0,0 +1,61 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import "net/http" + +// PushedAuthorizeResponse is the response object for PAR +type PushedAuthorizeResponse struct { + RequestURI string `json:"request_uri"` + ExpiresIn int `json:"expires_in"` + Header http.Header + Extra map[string]interface{} +} + +// GetRequestURI gets +func (a *PushedAuthorizeResponse) GetRequestURI() string { + return a.RequestURI +} + +// SetRequestURI sets +func (a *PushedAuthorizeResponse) SetRequestURI(requestURI string) { + a.RequestURI = requestURI +} + +// GetExpiresIn gets +func (a *PushedAuthorizeResponse) GetExpiresIn() int { + return a.ExpiresIn +} + +// SetExpiresIn sets +func (a *PushedAuthorizeResponse) SetExpiresIn(seconds int) { + a.ExpiresIn = seconds +} + +// GetHeader gets +func (a *PushedAuthorizeResponse) GetHeader() http.Header { + return a.Header +} + +// AddHeader adds +func (a *PushedAuthorizeResponse) AddHeader(key, value string) { + a.Header.Add(key, value) +} + +// SetExtra sets +func (a *PushedAuthorizeResponse) SetExtra(key string, value interface{}) { + a.Extra[key] = value +} + +// GetExtra gets +func (a *PushedAuthorizeResponse) GetExtra(key string) interface{} { + return a.Extra[key] +} + +// ToMap converts to a map +func (a *PushedAuthorizeResponse) ToMap() map[string]interface{} { + a.Extra["request_uri"] = a.RequestURI + a.Extra["expires_in"] = a.ExpiresIn + return a.Extra +} diff --git a/fosite/pushed_authorize_response_writer.go b/fosite/pushed_authorize_response_writer.go new file mode 100644 index 00000000000..84d34ee869f --- /dev/null +++ b/fosite/pushed_authorize_response_writer.go @@ -0,0 +1,95 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + + "go.opentelemetry.io/otel/trace" + + "github.com/ory/x/errorsx" + "github.com/ory/x/otelx" +) + +// NewPushedAuthorizeResponse executes the handlers and builds the response +func (f *Fosite) NewPushedAuthorizeResponse(ctx context.Context, ar AuthorizeRequester, session Session) (_ PushedAuthorizeResponder, err error) { + ctx, span := trace.SpanFromContext(ctx).TracerProvider().Tracer("github.com/ory/hydra/v2/fosite").Start(ctx, "Fosite.NewPushedAuthorizeResponse") + defer otelx.End(span, &err) + + // Get handlers. If no handlers are defined, this is considered a misconfigured Fosite instance. + handlersProvider, ok := f.Config.(PushedAuthorizeRequestHandlersProvider) + if !ok { + return nil, errorsx.WithStack(ErrServerError.WithHint(ErrorPARNotSupported).WithDebug(DebugPARRequestsHandlerMissing)) + } + + var resp = &PushedAuthorizeResponse{ + Header: http.Header{}, + Extra: map[string]interface{}{}, + } + + ctx = context.WithValue(ctx, AuthorizeRequestContextKey, ar) + ctx = context.WithValue(ctx, PushedAuthorizeResponseContextKey, resp) + + ar.SetSession(session) + for _, h := range handlersProvider.GetPushedAuthorizeEndpointHandlers(ctx) { + if err := h.HandlePushedAuthorizeEndpointRequest(ctx, ar, resp); err != nil { + return nil, err + } + } + + return resp, nil +} + +// WritePushedAuthorizeResponse writes the PAR response +func (f *Fosite) WritePushedAuthorizeResponse(ctx context.Context, rw http.ResponseWriter, ar AuthorizeRequester, resp PushedAuthorizeResponder) { + // Set custom headers, e.g. "X-MySuperCoolCustomHeader" or "X-DONT-CACHE-ME"... + wh := rw.Header() + rh := resp.GetHeader() + for k := range rh { + wh.Set(k, rh.Get(k)) + } + + wh.Set("Cache-Control", "no-store") + wh.Set("Pragma", "no-cache") + wh.Set("Content-Type", "application/json;charset=UTF-8") + + js, err := json.Marshal(resp.ToMap()) + if err != nil { + http.Error(rw, err.Error(), http.StatusInternalServerError) + return + } + + rw.Header().Set("Content-Type", "application/json;charset=UTF-8") + + rw.WriteHeader(http.StatusCreated) + _, _ = rw.Write(js) +} + +// WritePushedAuthorizeError writes the PAR error +func (f *Fosite) WritePushedAuthorizeError(ctx context.Context, rw http.ResponseWriter, ar AuthorizeRequester, err error) { + rw.Header().Set("Cache-Control", "no-store") + rw.Header().Set("Pragma", "no-cache") + rw.Header().Set("Content-Type", "application/json;charset=UTF-8") + + sendDebugMessagesToClient := f.Config.GetSendDebugMessagesToClients(ctx) + rfcerr := ErrorToRFC6749Error(err).WithLegacyFormat(f.Config.GetUseLegacyErrorFormat(ctx)). + WithExposeDebug(sendDebugMessagesToClient).WithLocalizer(f.Config.GetMessageCatalog(ctx), getLangFromRequester(ar)) + + js, err := json.Marshal(rfcerr) + if err != nil { + if sendDebugMessagesToClient { + errorMessage := EscapeJSONString(err.Error()) + http.Error(rw, fmt.Sprintf(`{"error":"server_error","error_description":"%s"}`, errorMessage), http.StatusInternalServerError) + } else { + http.Error(rw, `{"error":"server_error"}`, http.StatusInternalServerError) + } + return + } + + rw.WriteHeader(rfcerr.CodeField) + _, _ = rw.Write(js) +} diff --git a/fosite/pushed_authorize_response_writer_test.go b/fosite/pushed_authorize_response_writer_test.go new file mode 100644 index 00000000000..98fcca2e383 --- /dev/null +++ b/fosite/pushed_authorize_response_writer_test.go @@ -0,0 +1,62 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "context" + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + gomock "go.uber.org/mock/gomock" + + . "github.com/ory/hydra/v2/fosite" + . "github.com/ory/hydra/v2/fosite/internal" +) + +func TestNewPushedAuthorizeResponse(t *testing.T) { + ctrl := gomock.NewController(t) + handlers := []*MockPushedAuthorizeEndpointHandler{NewMockPushedAuthorizeEndpointHandler(ctrl)} + ar := NewMockAuthorizeRequester(ctrl) + t.Cleanup(ctrl.Finish) + + ctx := context.Background() + oauth2 := &Fosite{ + Config: &Config{ + PushedAuthorizeEndpointHandlers: PushedAuthorizeEndpointHandlers{handlers[0]}, + }, + } + ar.EXPECT().SetSession(gomock.Eq(new(DefaultSession))).AnyTimes() + fooErr := errors.New("foo") + for k, c := range []struct { + isErr bool + mock func() + expectErr error + }{ + { + mock: func() { + handlers[0].EXPECT().HandlePushedAuthorizeEndpointRequest(gomock.Any(), gomock.Eq(ar), gomock.Any()).Return(fooErr) + }, + isErr: true, + expectErr: fooErr, + }, + { + mock: func() { + handlers[0].EXPECT().HandlePushedAuthorizeEndpointRequest(gomock.Any(), gomock.Eq(ar), gomock.Any()).Return(nil) + }, + isErr: false, + }, + } { + c.mock() + responder, err := oauth2.NewPushedAuthorizeResponse(ctx, ar, new(DefaultSession)) + assert.Equal(t, c.isErr, err != nil, "%d: %s", k, err) + if err != nil { + assert.Equal(t, c.expectErr, err, "%d: %s", k, err) + assert.Nil(t, responder, "%d", k) + } else { + assert.NotNil(t, responder, "%d", k) + } + t.Logf("Passed test case %d", k) + } +} diff --git a/fosite/request.go b/fosite/request.go new file mode 100644 index 00000000000..42c92abb125 --- /dev/null +++ b/fosite/request.go @@ -0,0 +1,185 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "net/url" + "time" + + "github.com/google/uuid" + "golang.org/x/text/language" +) + +// Request is an implementation of Requester +type Request struct { + ID string `json:"id" gorethink:"id"` + RequestedAt time.Time `json:"requestedAt" gorethink:"requestedAt"` + Client Client `json:"client" gorethink:"client"` + RequestedScope Arguments `json:"scopes" gorethink:"scopes"` + GrantedScope Arguments `json:"grantedScopes" gorethink:"grantedScopes"` + Form url.Values `json:"form" gorethink:"form"` + Session Session `json:"session" gorethink:"session"` + RequestedAudience Arguments `json:"requestedAudience"` + GrantedAudience Arguments `json:"grantedAudience"` + Lang language.Tag `json:"-"` +} + +func NewRequest() *Request { + return &Request{ + Client: &DefaultClient{}, + RequestedScope: Arguments{}, + RequestedAudience: Arguments{}, + GrantedAudience: Arguments{}, + GrantedScope: Arguments{}, + Form: url.Values{}, + RequestedAt: time.Now().UTC(), + } +} + +func (a *Request) GetID() string { + if a.ID == "" { + a.ID = uuid.New().String() + } + return a.ID +} + +func (a *Request) SetID(id string) { + a.ID = id +} + +func (a *Request) GetRequestForm() url.Values { + return a.Form +} + +func (a *Request) GetRequestedAt() time.Time { + return a.RequestedAt +} + +func (a *Request) GetClient() Client { + return a.Client +} + +func (a *Request) GetRequestedScopes() Arguments { + return a.RequestedScope +} + +func (a *Request) SetRequestedScopes(s Arguments) { + a.RequestedScope = nil + for _, scope := range s { + a.AppendRequestedScope(scope) + } +} + +func (a *Request) SetRequestedAudience(s Arguments) { + a.RequestedAudience = nil + for _, scope := range s { + a.AppendRequestedAudience(scope) + } +} + +func (a *Request) AppendRequestedScope(scope string) { + for _, has := range a.RequestedScope { + if scope == has { + return + } + } + a.RequestedScope = append(a.RequestedScope, scope) +} + +func (a *Request) AppendRequestedAudience(audience string) { + for _, has := range a.RequestedAudience { + if audience == has { + return + } + } + a.RequestedAudience = append(a.RequestedAudience, audience) +} + +func (a *Request) GetRequestedAudience() (audience Arguments) { + return a.RequestedAudience +} + +func (a *Request) GrantAudience(audience string) { + for _, has := range a.GrantedAudience { + if audience == has { + return + } + } + a.GrantedAudience = append(a.GrantedAudience, audience) +} + +func (a *Request) GetGrantedScopes() Arguments { + return a.GrantedScope +} + +func (a *Request) GetGrantedAudience() Arguments { + return a.GrantedAudience +} + +func (a *Request) GrantScope(scope string) { + for _, has := range a.GrantedScope { + if scope == has { + return + } + } + a.GrantedScope = append(a.GrantedScope, scope) +} + +func (a *Request) SetSession(session Session) { + a.Session = session +} + +func (a *Request) GetSession() Session { + return a.Session +} + +func (a *Request) Merge(request Requester) { + for _, scope := range request.GetRequestedScopes() { + a.AppendRequestedScope(scope) + } + for _, scope := range request.GetGrantedScopes() { + a.GrantScope(scope) + } + + for _, aud := range request.GetRequestedAudience() { + a.AppendRequestedAudience(aud) + } + for _, aud := range request.GetGrantedAudience() { + a.GrantAudience(aud) + } + + a.ID = request.GetID() + a.RequestedAt = request.GetRequestedAt() + a.Client = request.GetClient() + a.Session = request.GetSession() + + for k, v := range request.GetRequestForm() { + a.Form[k] = v + } +} + +var defaultAllowedParameters = []string{"grant_type", "response_type", "scope", "client_id"} + +func (a *Request) Sanitize(allowedParameters []string) Requester { + b := new(Request) + allowed := map[string]bool{} + for _, v := range append(allowedParameters, defaultAllowedParameters...) { + allowed[v] = true + } + + *b = *a + b.ID = a.GetID() + b.Form = url.Values{} + for k := range a.Form { + if allowed[k] { + b.Form[k] = a.Form[k] + } + } + + return b +} + +func (a *Request) GetLang() language.Tag { + return a.Lang +} diff --git a/fosite/request_test.go b/fosite/request_test.go new file mode 100644 index 00000000000..ec61ec27d67 --- /dev/null +++ b/fosite/request_test.go @@ -0,0 +1,118 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "net/url" + "testing" + "time" + + "github.com/stretchr/testify/assert" + + . "github.com/ory/hydra/v2/fosite" +) + +func TestRequest(t *testing.T) { + r := &Request{ + RequestedAt: time.Now().UTC(), + Client: &DefaultClient{}, + RequestedScope: Arguments{"scope"}, + GrantedScope: Arguments{"scope"}, + RequestedAudience: Arguments{"scope"}, + GrantedAudience: Arguments{"scope"}, + Form: url.Values{"foo": []string{"bar"}}, + Session: new(DefaultSession), + } + + assert.Equal(t, r.RequestedAt, r.GetRequestedAt()) + assert.Equal(t, r.Client, r.GetClient()) + assert.Equal(t, r.GrantedScope, r.GetGrantedScopes()) + assert.Equal(t, r.RequestedScope, r.GetRequestedScopes()) + assert.Equal(t, r.Form, r.GetRequestForm()) + assert.Equal(t, r.Session, r.GetSession()) +} + +func TestMergeRequest(t *testing.T) { + a := &Request{ + ID: "123", + RequestedAt: time.Now().UTC(), + Client: &DefaultClient{ID: "123"}, + RequestedScope: Arguments{"scope-3", "scope-4"}, + RequestedAudience: Arguments{"aud-3", "aud-4"}, + GrantedScope: []string{"scope-1", "scope-2"}, + GrantedAudience: []string{"aud-1", "aud-2"}, + Form: url.Values{"foo": []string{"fasdf"}}, + Session: new(DefaultSession), + } + b := &Request{ + RequestedAt: time.Now().UTC(), + Client: &DefaultClient{}, + RequestedScope: Arguments{}, + GrantedScope: []string{}, + Form: url.Values{}, + Session: new(DefaultSession), + } + + b.Merge(a) + assert.EqualValues(t, a.RequestedAt, b.RequestedAt) + assert.EqualValues(t, a.Client, b.Client) + assert.EqualValues(t, a.RequestedScope, b.RequestedScope) + assert.EqualValues(t, a.RequestedAudience, b.RequestedAudience) + assert.EqualValues(t, a.GrantedScope, b.GrantedScope) + assert.EqualValues(t, a.GrantedAudience, b.GrantedAudience) + assert.EqualValues(t, a.Form, b.Form) + assert.EqualValues(t, a.Session, b.Session) + assert.EqualValues(t, a.ID, b.ID) +} + +func TestSanitizeRequest(t *testing.T) { + a := &Request{ + RequestedAt: time.Now().UTC(), + Client: &DefaultClient{ID: "123"}, + RequestedScope: Arguments{"asdff"}, + GrantedScope: []string{"asdf"}, + Form: url.Values{ + "foo": []string{"fasdf"}, + "bar": []string{"fasdf", "faaaa"}, + "baz": []string{"fasdf"}, + "grant_type": []string{"code"}, + "response_type": []string{"id_token"}, + "client_id": []string{"1234"}, + "scope": []string{"read"}, + }, + Session: new(DefaultSession), + } + + b := a.Sanitize([]string{"bar", "baz"}) + assert.NotEqual(t, a.Form.Encode(), b.GetRequestForm().Encode()) + + assert.Empty(t, b.GetRequestForm().Get("foo")) + assert.Equal(t, "fasdf", b.GetRequestForm().Get("bar")) + assert.Equal(t, []string{"fasdf", "faaaa"}, b.GetRequestForm()["bar"]) + assert.Equal(t, "fasdf", b.GetRequestForm().Get("baz")) + + assert.Equal(t, "fasdf", a.GetRequestForm().Get("foo")) + assert.Equal(t, "fasdf", a.GetRequestForm().Get("bar")) + assert.Equal(t, []string{"fasdf", "faaaa"}, a.GetRequestForm()["bar"]) + assert.Equal(t, "fasdf", a.GetRequestForm().Get("baz")) + assert.Equal(t, "code", a.GetRequestForm().Get("grant_type")) + assert.Equal(t, "id_token", a.GetRequestForm().Get("response_type")) + assert.Equal(t, "1234", a.GetRequestForm().Get("client_id")) + assert.Equal(t, "read", a.GetRequestForm().Get("scope")) +} + +func TestIdentifyRequest(t *testing.T) { + a := &Request{ + RequestedAt: time.Now().UTC(), + Client: &DefaultClient{}, + RequestedScope: Arguments{}, + GrantedScope: []string{}, + Form: url.Values{"foo": []string{"bar"}}, + Session: new(DefaultSession), + } + + b := a.Sanitize([]string{}) + b.GetID() + assert.Equal(t, a.ID, b.GetID()) +} diff --git a/fosite/response_handler.go b/fosite/response_handler.go new file mode 100644 index 00000000000..901185a7ce8 --- /dev/null +++ b/fosite/response_handler.go @@ -0,0 +1,57 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "net/http" +) + +// ResponseModeHandler provides a contract for handling custom response modes +type ResponseModeHandler interface { + // ResponseModes returns a set of supported response modes handled + // by the interface implementation. + // + // In an authorize request with any of the provide response modes + // methods `WriteAuthorizeResponse` and `WriteAuthorizeError` will be + // invoked to write the successful or error authorization responses respectively. + ResponseModes() ResponseModeTypes + + // WriteAuthorizeResponse writes successful responses + // + // Following headers are expected to be set by default: + // header.Set("Cache-Control", "no-store") + // header.Set("Pragma", "no-cache") + WriteAuthorizeResponse(ctx context.Context, rw http.ResponseWriter, ar AuthorizeRequester, resp AuthorizeResponder) + + // WriteAuthorizeError writes error responses + // + // Following headers are expected to be set by default: + // header.Set("Cache-Control", "no-store") + // header.Set("Pragma", "no-cache") + WriteAuthorizeError(ctx context.Context, rw http.ResponseWriter, ar AuthorizeRequester, err error) +} + +type ResponseModeTypes []ResponseModeType + +func (rs ResponseModeTypes) Has(item ResponseModeType) bool { + for _, r := range rs { + if r == item { + return true + } + } + return false +} + +func NewDefaultResponseModeHandler() *DefaultResponseModeHandler { + return new(DefaultResponseModeHandler) +} + +type DefaultResponseModeHandler struct{} + +func (d *DefaultResponseModeHandler) ResponseModes() ResponseModeTypes { return nil } +func (d *DefaultResponseModeHandler) WriteAuthorizeResponse(ctx context.Context, rw http.ResponseWriter, ar AuthorizeRequester, resp AuthorizeResponder) { +} +func (d *DefaultResponseModeHandler) WriteAuthorizeError(ctx context.Context, rw http.ResponseWriter, ar AuthorizeRequester, err error) { +} diff --git a/fosite/revoke_handler.go b/fosite/revoke_handler.go new file mode 100644 index 00000000000..172addda253 --- /dev/null +++ b/fosite/revoke_handler.go @@ -0,0 +1,123 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + + "go.opentelemetry.io/otel/trace" + + "github.com/ory/x/errorsx" + "github.com/ory/x/otelx" + + "github.com/pkg/errors" +) + +// NewRevocationRequest handles incoming token revocation requests and +// validates various parameters as specified in: +// https://tools.ietf.org/html/rfc7009#section-2.1 +// +// The authorization server first validates the client credentials (in +// case of a confidential client) and then verifies whether the token +// was issued to the client making the revocation request. If this +// validation fails, the request is refused and the client is informed +// of the error by the authorization server as described below. +// +// In the next step, the authorization server invalidates the token. +// The invalidation takes place immediately, and the token cannot be +// used again after the revocation. +// +// * https://tools.ietf.org/html/rfc7009#section-2.2 +// An invalid token type hint value is ignored by the authorization +// server and does not influence the revocation response. +func (f *Fosite) NewRevocationRequest(ctx context.Context, r *http.Request) (err error) { + ctx, span := trace.SpanFromContext(ctx).TracerProvider().Tracer("github.com/ory/hydra/v2/fosite").Start(ctx, "Fosite.NewRevocationRequest") + defer otelx.End(span, &err) + + ctx = context.WithValue(ctx, RequestContextKey, r) + + if r.Method != "POST" { + return errorsx.WithStack(ErrInvalidRequest.WithHintf("HTTP method is '%s' but expected 'POST'.", r.Method)) + } else if err := r.ParseMultipartForm(1 << 20); err != nil && err != http.ErrNotMultipart { + return errorsx.WithStack(ErrInvalidRequest.WithHint("Unable to parse HTTP body, make sure to send a properly formatted form request body.").WithWrap(err).WithDebug(err.Error())) + } else if len(r.PostForm) == 0 { + return errorsx.WithStack(ErrInvalidRequest.WithHint("The POST body can not be empty.")) + } + + client, err := f.AuthenticateClient(ctx, r, r.PostForm) + if err != nil { + return err + } + + token := r.PostForm.Get("token") + tokenTypeHint := TokenType(r.PostForm.Get("token_type_hint")) + + var found = false + for _, loader := range f.Config.GetRevocationHandlers(ctx) { + if err := loader.RevokeToken(ctx, token, tokenTypeHint, client); err == nil { + found = true + } else if errors.Is(err, ErrUnknownRequest) { + // do nothing + } else if err != nil { + return err + } + } + + if !found { + return errorsx.WithStack(ErrInvalidRequest) + } + + return nil +} + +// WriteRevocationResponse writes a token revocation response as specified in: +// https://tools.ietf.org/html/rfc7009#section-2.2 +// +// The authorization server responds with HTTP status code 200 if the +// token has been revoked successfully or if the client submitted an +// invalid token. +// +// Note: invalid tokens do not cause an error response since the client +// cannot handle such an error in a reasonable way. Moreover, the +// purpose of the revocation request, invalidating the particular token, +// is already achieved. +func (f *Fosite) WriteRevocationResponse(ctx context.Context, rw http.ResponseWriter, err error) { + rw.Header().Set("Cache-Control", "no-store") + rw.Header().Set("Pragma", "no-cache") + + if err == nil { + rw.WriteHeader(http.StatusOK) + return + } + + if errors.Is(err, ErrInvalidRequest) { + rw.Header().Set("Content-Type", "application/json;charset=UTF-8") + + js, err := json.Marshal(ErrInvalidRequest) + if err != nil { + http.Error(rw, fmt.Sprintf(`{"error": "%s"}`, err.Error()), http.StatusInternalServerError) + return + } + + rw.WriteHeader(ErrInvalidRequest.CodeField) + _, _ = rw.Write(js) + } else if errors.Is(err, ErrInvalidClient) { + rw.Header().Set("Content-Type", "application/json;charset=UTF-8") + + js, err := json.Marshal(ErrInvalidClient) + if err != nil { + http.Error(rw, fmt.Sprintf(`{"error": "%s"}`, err.Error()), http.StatusInternalServerError) + return + } + + rw.WriteHeader(ErrInvalidClient.CodeField) + _, _ = rw.Write(js) + } else { + // 200 OK + rw.WriteHeader(http.StatusOK) + } +} diff --git a/fosite/revoke_handler_test.go b/fosite/revoke_handler_test.go new file mode 100644 index 00000000000..7dda31b8f74 --- /dev/null +++ b/fosite/revoke_handler_test.go @@ -0,0 +1,257 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite_test + +import ( + "context" + "fmt" + "net/http" + "net/http/httptest" + "net/url" + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + gomock "go.uber.org/mock/gomock" + + . "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/internal" +) + +func TestNewRevocationRequest(t *testing.T) { + ctrl := gomock.NewController(t) + store := internal.NewMockStorage(ctrl) + clientManager := internal.NewMockClientManager(ctrl) + handler := internal.NewMockRevocationHandler(ctrl) + hasher := internal.NewMockHasher(ctrl) + t.Cleanup(ctrl.Finish) + + client := &DefaultClient{} + config := &Config{ClientSecretsHasher: hasher} + fosite := &Fosite{Store: store, Config: config} + for k, c := range []struct { + header http.Header + form url.Values + mock func() + method string + expectErr error + expect *AccessRequest + handlers RevocationHandlers + }{ + { + header: http.Header{}, + expectErr: ErrInvalidRequest, + method: "GET", + mock: func() {}, + }, + { + header: http.Header{}, + expectErr: ErrInvalidRequest, + method: "POST", + mock: func() {}, + }, + { + header: http.Header{}, + method: "POST", + form: url.Values{ + "token": {"foo"}, + }, + mock: func() {}, + expectErr: ErrInvalidRequest, + }, + { + header: http.Header{ + "Authorization": {basicAuth("foo", "bar")}, + }, + method: "POST", + form: url.Values{ + "token": {"foo"}, + }, + expectErr: ErrInvalidClient, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("foo")).Return(nil, errors.New("")) + }, + }, + { + header: http.Header{ + "Authorization": {basicAuth("foo", "bar")}, + }, + method: "POST", + form: url.Values{ + "token": {"foo"}, + }, + expectErr: ErrInvalidClient, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("foo")).Return(client, nil) + client.Secret = []byte("foo") + client.Public = false + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("foo")), gomock.Eq([]byte("bar"))).Return(errors.New("")) + }, + }, + { + header: http.Header{ + "Authorization": {basicAuth("foo", "bar")}, + }, + method: "POST", + form: url.Values{ + "token": {"foo"}, + }, + expectErr: nil, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("foo")).Return(client, nil) + client.Secret = []byte("foo") + client.Public = false + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("foo")), gomock.Eq([]byte("bar"))).Return(nil) + handler.EXPECT().RevokeToken(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) + }, + handlers: RevocationHandlers{handler}, + }, + { + header: http.Header{ + "Authorization": {basicAuth("foo", "bar")}, + }, + method: "POST", + form: url.Values{ + "token": {"foo"}, + "token_type_hint": {"access_token"}, + }, + expectErr: nil, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("foo")).Return(client, nil) + client.Secret = []byte("foo") + client.Public = false + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("foo")), gomock.Eq([]byte("bar"))).Return(nil) + handler.EXPECT().RevokeToken(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) + }, + handlers: RevocationHandlers{handler}, + }, + { + header: http.Header{ + "Authorization": {basicAuth("foo", "")}, + }, + method: "POST", + form: url.Values{ + "token": {"foo"}, + "token_type_hint": {"refresh_token"}, + }, + expectErr: nil, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("foo")).Return(client, nil) + client.Public = true + handler.EXPECT().RevokeToken(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) + }, + handlers: RevocationHandlers{handler}, + }, + { + header: http.Header{ + "Authorization": {basicAuth("foo", "bar")}, + }, + method: "POST", + form: url.Values{ + "token": {"foo"}, + "token_type_hint": {"refresh_token"}, + }, + expectErr: nil, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("foo")).Return(client, nil) + client.Secret = []byte("foo") + client.Public = false + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("foo")), gomock.Eq([]byte("bar"))).Return(nil) + handler.EXPECT().RevokeToken(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) + }, + handlers: RevocationHandlers{handler}, + }, + { + header: http.Header{ + "Authorization": {basicAuth("foo", "bar")}, + }, + method: "POST", + form: url.Values{ + "token": {"foo"}, + "token_type_hint": {"bar"}, + }, + expectErr: nil, + mock: func() { + store.EXPECT().FositeClientManager().Return(clientManager).Times(1) + clientManager.EXPECT().GetClient(gomock.Any(), gomock.Eq("foo")).Return(client, nil) + client.Secret = []byte("foo") + client.Public = false + hasher.EXPECT().Compare(gomock.Any(), gomock.Eq([]byte("foo")), gomock.Eq([]byte("bar"))).Return(nil) + handler.EXPECT().RevokeToken(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) + }, + handlers: RevocationHandlers{handler}, + }, + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + r := &http.Request{ + Header: c.header, + PostForm: c.form, + Form: c.form, + Method: c.method, + } + c.mock() + ctx := NewContext() + config.RevocationHandlers = c.handlers + err := fosite.NewRevocationRequest(ctx, r) + + if c.expectErr != nil { + assert.EqualError(t, err, c.expectErr.Error()) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestWriteRevocationResponse(t *testing.T) { + ctrl := gomock.NewController(t) + store := internal.NewMockStorage(ctrl) + hasher := internal.NewMockHasher(ctrl) + t.Cleanup(ctrl.Finish) + + config := &Config{ClientSecretsHasher: hasher} + fosite := &Fosite{Store: store, Config: config} + + type args struct { + rw *httptest.ResponseRecorder + err error + } + cases := []struct { + input args + expectCode int + }{ + { + input: args{ + rw: httptest.NewRecorder(), + err: ErrInvalidRequest, + }, + expectCode: ErrInvalidRequest.CodeField, + }, + { + input: args{ + rw: httptest.NewRecorder(), + err: ErrInvalidClient, + }, + expectCode: ErrInvalidClient.CodeField, + }, + { + input: args{ + rw: httptest.NewRecorder(), + err: nil, + }, + expectCode: http.StatusOK, + }, + } + + for _, tc := range cases { + fosite.WriteRevocationResponse(context.Background(), tc.input.rw, tc.input.err) + assert.Equal(t, tc.expectCode, tc.input.rw.Code) + } +} diff --git a/fosite/scope_strategy.go b/fosite/scope_strategy.go new file mode 100644 index 00000000000..358f3b7f364 --- /dev/null +++ b/fosite/scope_strategy.go @@ -0,0 +1,84 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import "strings" + +// ScopeStrategy is a strategy for matching scopes. +type ScopeStrategy func(haystack []string, needle string) bool + +func HierarchicScopeStrategy(haystack []string, needle string) bool { + for _, this := range haystack { + // foo == foo -> true + if this == needle { + return true + } + + // picture.read > picture -> false (scope picture includes read, write, ...) + if len(this) > len(needle) { + continue + } + + needles := strings.Split(needle, ".") + haystack := strings.Split(this, ".") + haystackLen := len(haystack) - 1 + for k, needle := range needles { + if haystackLen < k { + return true + } + + current := haystack[k] + if current != needle { + break + } + } + } + + return false +} + +func ExactScopeStrategy(haystack []string, needle string) bool { + for _, this := range haystack { + if needle == this { + return true + } + } + + return false +} + +func WildcardScopeStrategy(matchers []string, needle string) bool { + needleParts := strings.Split(needle, ".") + for _, matcher := range matchers { + matcherParts := strings.Split(matcher, ".") + if len(matcherParts) > len(needleParts) { + continue + } + + var noteq bool + for k, c := range matcherParts { + // this is the last item and the lengths are different + if k == len(matcherParts)-1 && len(matcherParts) != len(needleParts) { + if c != "*" { + noteq = true + break + } + } + + if c == "*" && len(needleParts[k]) > 0 { + // pass because this satisfies the requirements + continue + } else if c != needleParts[k] { + noteq = true + break + } + } + + if !noteq { + return true + } + } + + return false +} diff --git a/fosite/scope_strategy_test.go b/fosite/scope_strategy_test.go new file mode 100644 index 00000000000..a30a753b846 --- /dev/null +++ b/fosite/scope_strategy_test.go @@ -0,0 +1,134 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestHierarchicScopeStrategy(t *testing.T) { + var strategy ScopeStrategy = HierarchicScopeStrategy + var scopes = []string{} + + assert.False(t, strategy(scopes, "foo.bar.baz")) + assert.False(t, strategy(scopes, "foo.bar")) + assert.False(t, strategy(scopes, "foo")) + + scopes = []string{"foo.bar", "bar.baz", "baz.baz.1", "baz.baz.2", "baz.baz.3", "baz.baz.baz"} + assert.True(t, strategy(scopes, "foo.bar.baz")) + assert.True(t, strategy(scopes, "baz.baz.baz")) + assert.True(t, strategy(scopes, "foo.bar")) + assert.False(t, strategy(scopes, "foo")) + + assert.True(t, strategy(scopes, "bar.baz")) + assert.True(t, strategy(scopes, "bar.baz.zad")) + assert.False(t, strategy(scopes, "bar")) + assert.False(t, strategy(scopes, "baz")) + + scopes = []string{"fosite.key.create", "fosite.key.get", "fosite.key.delete", "fosite.key.update"} + assert.True(t, strategy(scopes, "fosite.key.delete")) + assert.True(t, strategy(scopes, "fosite.key.get")) + assert.True(t, strategy(scopes, "fosite.key.get")) + assert.True(t, strategy(scopes, "fosite.key.update")) + + scopes = []string{"hydra", "openid", "offline"} + assert.False(t, strategy(scopes, "foo.bar")) + assert.False(t, strategy(scopes, "foo")) + assert.True(t, strategy(scopes, "hydra")) + assert.True(t, strategy(scopes, "hydra.bar")) + assert.True(t, strategy(scopes, "openid")) + assert.True(t, strategy(scopes, "openid.baz.bar")) + assert.True(t, strategy(scopes, "offline")) + assert.True(t, strategy(scopes, "offline.baz.bar.baz")) +} + +func TestWildcardScopeStrategy(t *testing.T) { + var strategy ScopeStrategy = WildcardScopeStrategy + var scopes = []string{} + + assert.False(t, strategy(scopes, "foo.bar.baz")) + assert.False(t, strategy(scopes, "foo.bar")) + + scopes = []string{"*"} + assert.False(t, strategy(scopes, "")) + assert.True(t, strategy(scopes, "asdf")) + assert.True(t, strategy(scopes, "asdf.asdf")) + + scopes = []string{"foo"} + assert.False(t, strategy(scopes, "*")) + assert.False(t, strategy(scopes, "foo.*")) + assert.False(t, strategy(scopes, "fo*")) + assert.True(t, strategy(scopes, "foo")) + + scopes = []string{"foo*"} + assert.False(t, strategy(scopes, "foo")) + assert.False(t, strategy(scopes, "fooa")) + assert.False(t, strategy(scopes, "fo")) + assert.True(t, strategy(scopes, "foo*")) + + scopes = []string{"foo.*"} + assert.True(t, strategy(scopes, "foo.bar")) + assert.True(t, strategy(scopes, "foo.baz")) + assert.True(t, strategy(scopes, "foo.bar.baz")) + assert.False(t, strategy(scopes, "foo")) + + scopes = []string{"foo.*.baz"} + assert.True(t, strategy(scopes, "foo.*.baz")) + assert.True(t, strategy(scopes, "foo.bar.baz")) + assert.False(t, strategy(scopes, "foo..baz")) + assert.False(t, strategy(scopes, "foo.baz")) + assert.False(t, strategy(scopes, "foo")) + assert.False(t, strategy(scopes, "foo.bar.bar")) + + scopes = []string{"foo.*.bar.*"} + assert.True(t, strategy(scopes, "foo.baz.bar.baz")) + assert.False(t, strategy(scopes, "foo.baz.baz.bar.baz")) + assert.True(t, strategy(scopes, "foo.baz.bar.bar.bar")) + assert.False(t, strategy(scopes, "foo.baz.bar")) + assert.True(t, strategy(scopes, "foo.*.bar.*.*.*")) + assert.True(t, strategy(scopes, "foo.1.bar.1.2.3.4.5")) + + scopes = []string{"foo.*.bar"} + assert.True(t, strategy(scopes, "foo.bar.bar")) + assert.False(t, strategy(scopes, "foo.bar.bar.bar")) + assert.False(t, strategy(scopes, "foo..bar")) + assert.False(t, strategy(scopes, "foo.bar..bar")) + + scopes = []string{"foo.*.bar.*.baz.*"} + assert.False(t, strategy(scopes, "foo.*.*")) + assert.False(t, strategy(scopes, "foo.*.bar")) + assert.False(t, strategy(scopes, "foo.baz.*")) + assert.False(t, strategy(scopes, "foo.baz.bar")) + assert.False(t, strategy(scopes, "foo.b*.bar")) + assert.True(t, strategy(scopes, "foo.bar.bar.baz.baz.baz")) + assert.True(t, strategy(scopes, "foo.bar.bar.baz.baz.baz.baz")) + assert.False(t, strategy(scopes, "foo.bar.bar.baz.baz")) + assert.False(t, strategy(scopes, "foo.bar.baz.baz.baz.bar")) + + scopes = strings.Fields("hydra.* openid offline hydra") + assert.True(t, strategy(scopes, "hydra.clients")) + assert.True(t, strategy(scopes, "hydra.clients.get")) + assert.True(t, strategy(scopes, "hydra")) + assert.True(t, strategy(scopes, "offline")) + assert.True(t, strategy(scopes, "openid")) +} + +func TestExactScopeStrategy2ScopeStrategy(t *testing.T) { + var strategy ScopeStrategy = ExactScopeStrategy + + scopes := []string{"foo.bar.baz", "foo.bar"} + assert.True(t, strategy(scopes, "foo.bar.baz")) + assert.True(t, strategy(scopes, "foo.bar")) + + assert.False(t, strategy(scopes, "foo.bar.baz.baz")) + assert.False(t, strategy(scopes, "foo.bar.bar")) + + assert.False(t, strategy(scopes, "foo.bar.baz1")) + assert.False(t, strategy(scopes, "foo.bar1")) + + assert.False(t, strategy([]string{}, "foo")) +} diff --git a/fosite/scripts/.gitattributes b/fosite/scripts/.gitattributes new file mode 100644 index 00000000000..dfdb8b771ce --- /dev/null +++ b/fosite/scripts/.gitattributes @@ -0,0 +1 @@ +*.sh text eol=lf diff --git a/fosite/scripts/run-format.sh b/fosite/scripts/run-format.sh new file mode 100755 index 00000000000..28c8a65ab83 --- /dev/null +++ b/fosite/scripts/run-format.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +set -euo pipefail + +cd "$( dirname "${BASH_SOURCE[0]}" )/.." + +goimports -w $(go list -f {{.Dir}} ./... | grep -v vendor | grep -v fosite$) +goimports -w *.go diff --git a/fosite/scripts/test-format.sh b/fosite/scripts/test-format.sh new file mode 100755 index 00000000000..0cc6489652a --- /dev/null +++ b/fosite/scripts/test-format.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +set -euo pipefail + +cd "$( dirname "${BASH_SOURCE[0]}" )/.." + +toformat=$(goimports -l $(go list -f {{.Dir}} ./... | grep -v vendor | grep -v 'fosite$')) +[ -z "$toformat" ] && echo "All files are formatted correctly" +[ -n "$toformat" ] && echo "Please use \`goimports\` to format the following files:" && echo $toformat && exit 1 + +exit 0 diff --git a/fosite/session.go b/fosite/session.go new file mode 100644 index 00000000000..33d31479da3 --- /dev/null +++ b/fosite/session.go @@ -0,0 +1,104 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "time" + + "github.com/mohae/deepcopy" +) + +// Session is an interface that is used to store session data between OAuth2 requests. It can be used to look up +// when a session expires or what the subject's name was. +type Session interface { + // SetExpiresAt sets the expiration time of a token. + // + // session.SetExpiresAt(fosite.AccessToken, time.Now().UTC().Add(time.Hour)) + SetExpiresAt(key TokenType, exp time.Time) + + // GetExpiresAt returns the expiration time of a token if set, or time.IsZero() if not. + // + // session.GetExpiresAt(fosite.AccessToken) + GetExpiresAt(key TokenType) time.Time + + // GetUsername returns the username, if set. This is optional and only used during token introspection. + GetUsername() string + + // GetSubject returns the subject, if set. This is optional and only used during token introspection. + GetSubject() string + + // Clone clones the session. + Clone() Session +} + +// DefaultSession is a default implementation of the session interface. +type DefaultSession struct { + ExpiresAt map[TokenType]time.Time `json:"expires_at"` + Username string `json:"username"` + Subject string `json:"subject"` + Extra map[string]interface{} `json:"extra"` +} + +func (s *DefaultSession) SetExpiresAt(key TokenType, exp time.Time) { + if s.ExpiresAt == nil { + s.ExpiresAt = make(map[TokenType]time.Time) + } + s.ExpiresAt[key] = exp +} + +func (s *DefaultSession) GetExpiresAt(key TokenType) time.Time { + if s.ExpiresAt == nil { + s.ExpiresAt = make(map[TokenType]time.Time) + } + + return s.ExpiresAt[key] +} + +func (s *DefaultSession) GetUsername() string { + if s == nil { + return "" + } + return s.Username +} + +func (s *DefaultSession) SetSubject(subject string) { + s.Subject = subject +} + +func (s *DefaultSession) GetSubject() string { + if s == nil { + return "" + } + + return s.Subject +} + +func (s *DefaultSession) Clone() Session { + if s == nil { + return nil + } + + return deepcopy.Copy(s).(Session) +} + +// ExtraClaimsSession provides an interface for session to store any extra claims. +type ExtraClaimsSession interface { + // GetExtraClaims returns a map to store extra claims. + // The returned value can be modified in-place. + GetExtraClaims() map[string]interface{} +} + +// GetExtraClaims implements ExtraClaimsSession for DefaultSession. +// The returned value can be modified in-place. +func (s *DefaultSession) GetExtraClaims() map[string]interface{} { + if s == nil { + return nil + } + + if s.Extra == nil { + s.Extra = make(map[string]interface{}) + } + + return s.Extra +} diff --git a/fosite/session_test.go b/fosite/session_test.go new file mode 100644 index 00000000000..bb8615d859f --- /dev/null +++ b/fosite/session_test.go @@ -0,0 +1,25 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSession(t *testing.T) { + var s *DefaultSession + assert.Empty(t, s.GetSubject()) + assert.Empty(t, s.GetUsername()) + assert.Nil(t, s.Clone()) +} + +func TestZeroSession(t *testing.T) { + var s *DefaultSession = new(DefaultSession) + assert.Empty(t, s.GetSubject()) + assert.Empty(t, s.GetUsername()) + assert.Empty(t, s.Clone()) + assert.Empty(t, s.GetExpiresAt(AccessToken)) +} diff --git a/fosite/storage.go b/fosite/storage.go new file mode 100644 index 00000000000..979a5fe08c7 --- /dev/null +++ b/fosite/storage.go @@ -0,0 +1,27 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import ( + "context" +) + +// Storage defines fosite's minimal storage interface. +type Storage interface { + FositeClientManager() ClientManager +} + +type PARStorageProvider interface { + PARStorage() PARStorage +} + +// Storage holds information needed to store and retrieve PAR context. +type PARStorage interface { + // CreatePARSession stores the pushed authorization request context. The requestURI is used to derive the key. + CreatePARSession(ctx context.Context, requestURI string, request AuthorizeRequester) error + // GetPARSession gets the push authorization request context. The caller is expected to merge the AuthorizeRequest. + GetPARSession(ctx context.Context, requestURI string) (AuthorizeRequester, error) + // DeletePARSession deletes the context. + DeletePARSession(ctx context.Context, requestURI string) (err error) +} diff --git a/fosite/storage/memory.go b/fosite/storage/memory.go new file mode 100644 index 00000000000..fb2b732f0ca --- /dev/null +++ b/fosite/storage/memory.go @@ -0,0 +1,611 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package storage + +import ( + "context" + "errors" + "sync" + "time" + + "github.com/go-jose/go-jose/v3" + "github.com/google/uuid" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/handler/pkce" + "github.com/ory/hydra/v2/fosite/handler/rfc7523" + "github.com/ory/hydra/v2/fosite/handler/rfc8628" + "github.com/ory/hydra/v2/fosite/internal" +) + +type MemoryUserRelation struct { + Username string + Password string +} + +type IssuerPublicKeys struct { + Issuer string + KeysBySub map[string]SubjectPublicKeys +} + +type SubjectPublicKeys struct { + Subject string + Keys map[string]PublicKeyScopes +} + +type PublicKeyScopes struct { + Key *jose.JSONWebKey + Scopes []string +} + +type DeviceAuthPair struct { + d string + u string +} + +type MemoryStore struct { + Clients map[string]fosite.Client + AuthorizeCodes map[string]StoreAuthorizeCode + IDSessions map[string]fosite.Requester + AccessTokens map[string]fosite.Requester + RefreshTokens map[string]StoreRefreshToken + DeviceAuths map[string]fosite.DeviceRequester + PKCES map[string]fosite.Requester + Users map[string]MemoryUserRelation + BlacklistedJTIs map[string]time.Time + // In-memory request ID to token signatures + AccessTokenRequestIDs map[string]string + RefreshTokenRequestIDs map[string]string + DeviceCodesRequestIDs map[string]DeviceAuthPair + UserCodesRequestIDs map[string]string + // Public keys to check signature in auth grant jwt assertion. + IssuerPublicKeys map[string]IssuerPublicKeys + PARSessions map[string]fosite.AuthorizeRequester + + clientsMutex sync.RWMutex + authorizeCodesMutex sync.RWMutex + idSessionsMutex sync.RWMutex + accessTokensMutex sync.RWMutex + refreshTokensMutex sync.RWMutex + deviceAuthsMutex sync.RWMutex + pkcesMutex sync.RWMutex + usersMutex sync.RWMutex + blacklistedJTIsMutex sync.RWMutex + accessTokenRequestIDsMutex sync.RWMutex + refreshTokenRequestIDsMutex sync.RWMutex + deviceAuthsRequestIDsMutex sync.RWMutex + issuerPublicKeysMutex sync.RWMutex + parSessionsMutex sync.RWMutex +} + +func NewMemoryStore() *MemoryStore { + return &MemoryStore{ + Clients: make(map[string]fosite.Client), + AuthorizeCodes: make(map[string]StoreAuthorizeCode), + IDSessions: make(map[string]fosite.Requester), + AccessTokens: make(map[string]fosite.Requester), + RefreshTokens: make(map[string]StoreRefreshToken), + DeviceAuths: make(map[string]fosite.DeviceRequester), + PKCES: make(map[string]fosite.Requester), + Users: make(map[string]MemoryUserRelation), + AccessTokenRequestIDs: make(map[string]string), + RefreshTokenRequestIDs: make(map[string]string), + DeviceCodesRequestIDs: make(map[string]DeviceAuthPair), + UserCodesRequestIDs: make(map[string]string), + BlacklistedJTIs: make(map[string]time.Time), + IssuerPublicKeys: make(map[string]IssuerPublicKeys), + PARSessions: make(map[string]fosite.AuthorizeRequester), + } +} + +func (s *MemoryStore) FositeClientManager() fosite.ClientManager { + return s +} + +func (s *MemoryStore) AuthorizeCodeStorage() oauth2.AuthorizeCodeStorage { + return s +} + +func (s *MemoryStore) AccessTokenStorage() oauth2.AccessTokenStorage { + return s +} + +func (s *MemoryStore) RefreshTokenStorage() oauth2.RefreshTokenStorage { + return s +} + +func (s *MemoryStore) TokenRevocationStorage() oauth2.TokenRevocationStorage { + return s +} + +func (s *MemoryStore) ResourceOwnerPasswordCredentialsGrantStorage() oauth2.ResourceOwnerPasswordCredentialsGrantStorage { + return s +} + +func (s *MemoryStore) OpenIDConnectRequestStorage() openid.OpenIDConnectRequestStorage { + return s +} + +func (s *MemoryStore) PKCERequestStorage() pkce.PKCERequestStorage { + return s +} + +func (s *MemoryStore) PARStorage() fosite.PARStorage { + return s +} + +func (s *MemoryStore) DeviceAuthStorage() rfc8628.DeviceAuthStorage { + return s +} + +func (s *MemoryStore) RFC7523KeyStorage() rfc7523.RFC7523KeyStorage { + return s +} + +type StoreAuthorizeCode struct { + active bool + fosite.Requester +} + +type StoreRefreshToken struct { + active bool + accessTokenSignature string + fosite.Requester +} + +func NewExampleStore() *MemoryStore { + return &MemoryStore{ + IDSessions: make(map[string]fosite.Requester), + Clients: map[string]fosite.Client{ + "my-client": &fosite.DefaultClient{ + ID: "my-client", + Secret: []byte(`$2a$10$IxMdI6d.LIRZPpSfEwNoeu4rY3FhDREsxFJXikcgdRRAStxUlsuEO`), // = "foobar" + RotatedSecrets: [][]byte{[]byte(`$2y$10$X51gLxUQJ.hGw1epgHTE5u0bt64xM0COU7K9iAp.OFg8p2pUd.1zC `)}, // = "foobaz", + RedirectURIs: []string{"http://localhost:3846/callback"}, + ResponseTypes: []string{"id_token", "code", "token", "id_token token", "code id_token", "code token", "code id_token token"}, + GrantTypes: []string{"implicit", "refresh_token", "authorization_code", "password", "client_credentials"}, + Scopes: []string{"fosite", "openid", "photos", "offline"}, + }, + "custom-lifespan-client": &fosite.DefaultClientWithCustomTokenLifespans{ + DefaultClient: &fosite.DefaultClient{ + ID: "custom-lifespan-client", + Secret: []byte(`$2a$10$IxMdI6d.LIRZPpSfEwNoeu4rY3FhDREsxFJXikcgdRRAStxUlsuEO`), // = "foobar" + RotatedSecrets: [][]byte{[]byte(`$2y$10$X51gLxUQJ.hGw1epgHTE5u0bt64xM0COU7K9iAp.OFg8p2pUd.1zC `)}, // = "foobaz", + RedirectURIs: []string{"http://localhost:3846/callback"}, + ResponseTypes: []string{"id_token", "code", "token", "id_token token", "code id_token", "code token", "code id_token token"}, + GrantTypes: []string{"implicit", "refresh_token", "authorization_code", "password", "client_credentials"}, + Scopes: []string{"fosite", "openid", "photos", "offline"}, + }, + TokenLifespans: &internal.TestLifespans, + }, + "encoded:client": &fosite.DefaultClient{ + ID: "encoded:client", + Secret: []byte(`$2a$10$A7M8b65dSSKGHF0H2sNkn.9Z0hT8U1Nv6OWPV3teUUaczXkVkxuDS`), // = "encoded&password" + RotatedSecrets: nil, + RedirectURIs: []string{"http://localhost:3846/callback"}, + ResponseTypes: []string{"id_token", "code", "token", "id_token token", "code id_token", "code token", "code id_token token"}, + GrantTypes: []string{"implicit", "refresh_token", "authorization_code", "password", "client_credentials"}, + Scopes: []string{"fosite", "openid", "photos", "offline"}, + }, + }, + Users: map[string]MemoryUserRelation{ + "peter": { + // This store simply checks for equality, a real storage implementation would obviously use + // a hashing algorithm for encrypting the user password. + Username: "peter", + Password: "secret", + }, + }, + AuthorizeCodes: map[string]StoreAuthorizeCode{}, + AccessTokens: map[string]fosite.Requester{}, + RefreshTokens: map[string]StoreRefreshToken{}, + PKCES: map[string]fosite.Requester{}, + DeviceAuths: make(map[string]fosite.DeviceRequester), + AccessTokenRequestIDs: map[string]string{}, + RefreshTokenRequestIDs: map[string]string{}, + DeviceCodesRequestIDs: make(map[string]DeviceAuthPair), + UserCodesRequestIDs: make(map[string]string), + IssuerPublicKeys: map[string]IssuerPublicKeys{}, + PARSessions: map[string]fosite.AuthorizeRequester{}, + } +} + +func (s *MemoryStore) CreateOpenIDConnectSession(_ context.Context, authorizeCode string, requester fosite.Requester) error { + s.idSessionsMutex.Lock() + defer s.idSessionsMutex.Unlock() + + s.IDSessions[authorizeCode] = requester + return nil +} + +func (s *MemoryStore) GetOpenIDConnectSession(_ context.Context, authorizeCode string, requester fosite.Requester) (fosite.Requester, error) { + s.idSessionsMutex.RLock() + defer s.idSessionsMutex.RUnlock() + + cl, ok := s.IDSessions[authorizeCode] + if !ok { + return nil, fosite.ErrNotFound + } + return cl, nil +} + +func (s *MemoryStore) DeleteOpenIDConnectSession(_ context.Context, authorizeCode string) error { + s.idSessionsMutex.Lock() + defer s.idSessionsMutex.Unlock() + + delete(s.IDSessions, authorizeCode) + return nil +} + +func (s *MemoryStore) GetClient(_ context.Context, id string) (fosite.Client, error) { + s.clientsMutex.RLock() + defer s.clientsMutex.RUnlock() + + cl, ok := s.Clients[id] + if !ok { + return nil, fosite.ErrNotFound + } + return cl, nil +} + +func (s *MemoryStore) SetTokenLifespans(clientID string, lifespans *fosite.ClientLifespanConfig) error { + s.clientsMutex.RLock() + defer s.clientsMutex.RUnlock() + + if client, ok := s.Clients[clientID]; ok { + if clc, ok := client.(*fosite.DefaultClientWithCustomTokenLifespans); ok { + clc.SetTokenLifespans(lifespans) + return nil + } + return fosite.ErrorToRFC6749Error(errors.New("failed to set token lifespans due to failed client type assertion")) + } + return fosite.ErrNotFound +} + +func (s *MemoryStore) ClientAssertionJWTValid(_ context.Context, jti string) error { + s.blacklistedJTIsMutex.RLock() + defer s.blacklistedJTIsMutex.RUnlock() + + if exp, exists := s.BlacklistedJTIs[jti]; exists && exp.After(time.Now()) { + return fosite.ErrJTIKnown + } + + return nil +} + +func (s *MemoryStore) SetClientAssertionJWT(_ context.Context, jti string, exp time.Time) error { + s.blacklistedJTIsMutex.Lock() + defer s.blacklistedJTIsMutex.Unlock() + + // delete expired jtis + for j, e := range s.BlacklistedJTIs { + if e.Before(time.Now()) { + delete(s.BlacklistedJTIs, j) + } + } + + if _, exists := s.BlacklistedJTIs[jti]; exists { + return fosite.ErrJTIKnown + } + + s.BlacklistedJTIs[jti] = exp + return nil +} + +func (s *MemoryStore) CreateAuthorizeCodeSession(_ context.Context, code string, req fosite.Requester) error { + s.authorizeCodesMutex.Lock() + defer s.authorizeCodesMutex.Unlock() + + s.AuthorizeCodes[code] = StoreAuthorizeCode{active: true, Requester: req} + return nil +} + +func (s *MemoryStore) GetAuthorizeCodeSession(_ context.Context, code string, _ fosite.Session) (fosite.Requester, error) { + s.authorizeCodesMutex.RLock() + defer s.authorizeCodesMutex.RUnlock() + + rel, ok := s.AuthorizeCodes[code] + if !ok { + return nil, fosite.ErrNotFound + } + if !rel.active { + return rel, fosite.ErrInvalidatedAuthorizeCode + } + + return rel.Requester, nil +} + +func (s *MemoryStore) InvalidateAuthorizeCodeSession(ctx context.Context, code string) error { + s.authorizeCodesMutex.Lock() + defer s.authorizeCodesMutex.Unlock() + + rel, ok := s.AuthorizeCodes[code] + if !ok { + return fosite.ErrNotFound + } + rel.active = false + s.AuthorizeCodes[code] = rel + return nil +} + +func (s *MemoryStore) CreatePKCERequestSession(_ context.Context, code string, req fosite.Requester) error { + s.pkcesMutex.Lock() + defer s.pkcesMutex.Unlock() + + s.PKCES[code] = req + return nil +} + +func (s *MemoryStore) GetPKCERequestSession(_ context.Context, code string, _ fosite.Session) (fosite.Requester, error) { + s.pkcesMutex.RLock() + defer s.pkcesMutex.RUnlock() + + rel, ok := s.PKCES[code] + if !ok { + return nil, fosite.ErrNotFound + } + return rel, nil +} + +func (s *MemoryStore) DeletePKCERequestSession(_ context.Context, code string) error { + s.pkcesMutex.Lock() + defer s.pkcesMutex.Unlock() + + delete(s.PKCES, code) + return nil +} + +func (s *MemoryStore) CreateAccessTokenSession(_ context.Context, signature string, req fosite.Requester) error { + // We first lock accessTokenRequestIDsMutex and then accessTokensMutex because this is the same order + // locking happens in RevokeAccessToken and using the same order prevents deadlocks. + s.accessTokenRequestIDsMutex.Lock() + defer s.accessTokenRequestIDsMutex.Unlock() + s.accessTokensMutex.Lock() + defer s.accessTokensMutex.Unlock() + + s.AccessTokens[signature] = req + s.AccessTokenRequestIDs[req.GetID()] = signature + return nil +} + +func (s *MemoryStore) GetAccessTokenSession(_ context.Context, signature string, _ fosite.Session) (fosite.Requester, error) { + s.accessTokensMutex.RLock() + defer s.accessTokensMutex.RUnlock() + + rel, ok := s.AccessTokens[signature] + if !ok { + return nil, fosite.ErrNotFound + } + return rel, nil +} + +func (s *MemoryStore) DeleteAccessTokenSession(_ context.Context, signature string) error { + s.accessTokensMutex.Lock() + defer s.accessTokensMutex.Unlock() + + delete(s.AccessTokens, signature) + return nil +} + +func (s *MemoryStore) CreateRefreshTokenSession(_ context.Context, signature, accessTokenSignature string, req fosite.Requester) error { + // We first lock refreshTokenRequestIDsMutex and then refreshTokensMutex because this is the same order + // locking happens in RevokeRefreshToken and using the same order prevents deadlocks. + s.refreshTokenRequestIDsMutex.Lock() + defer s.refreshTokenRequestIDsMutex.Unlock() + s.refreshTokensMutex.Lock() + defer s.refreshTokensMutex.Unlock() + + s.RefreshTokens[signature] = StoreRefreshToken{active: true, Requester: req, accessTokenSignature: accessTokenSignature} + s.RefreshTokenRequestIDs[req.GetID()] = signature + return nil +} + +func (s *MemoryStore) GetRefreshTokenSession(_ context.Context, signature string, _ fosite.Session) (fosite.Requester, error) { + s.refreshTokensMutex.RLock() + defer s.refreshTokensMutex.RUnlock() + + rel, ok := s.RefreshTokens[signature] + if !ok { + return nil, fosite.ErrNotFound + } + if !rel.active { + return rel, fosite.ErrInactiveToken + } + return rel, nil +} + +func (s *MemoryStore) DeleteRefreshTokenSession(_ context.Context, signature string) error { + s.refreshTokensMutex.Lock() + defer s.refreshTokensMutex.Unlock() + + delete(s.RefreshTokens, signature) + return nil +} + +func (s *MemoryStore) Authenticate(_ context.Context, name string, secret string) (subject string, err error) { + s.usersMutex.RLock() + defer s.usersMutex.RUnlock() + + rel, ok := s.Users[name] + if !ok { + return "", fosite.ErrNotFound + } + if rel.Password != secret { + return "", fosite.ErrNotFound.WithDebug("Invalid credentials") + } + return uuid.New().String(), nil +} + +func (s *MemoryStore) RevokeRefreshToken(ctx context.Context, requestID string) error { + // We first lock refreshTokenRequestIDsMutex and then refreshTokensMutex because this is the same order + // locking happens in CreateRefreshTokenSession and using the same order prevents deadlocks. + s.refreshTokenRequestIDsMutex.Lock() + defer s.refreshTokenRequestIDsMutex.Unlock() + s.refreshTokensMutex.Lock() + defer s.refreshTokensMutex.Unlock() + + if signature, exists := s.RefreshTokenRequestIDs[requestID]; exists { + rel, ok := s.RefreshTokens[signature] + if !ok { + return fosite.ErrNotFound + } + rel.active = false + s.RefreshTokens[signature] = rel + } + return nil +} + +func (s *MemoryStore) RevokeAccessToken(ctx context.Context, requestID string) error { + s.accessTokenRequestIDsMutex.RLock() + defer s.accessTokenRequestIDsMutex.RUnlock() + + if signature, exists := s.AccessTokenRequestIDs[requestID]; exists { + if err := s.DeleteAccessTokenSession(ctx, signature); err != nil { + return err + } + } + return nil +} + +func (s *MemoryStore) GetPublicKey(ctx context.Context, issuer string, subject string, keyId string) (*jose.JSONWebKey, error) { + s.issuerPublicKeysMutex.RLock() + defer s.issuerPublicKeysMutex.RUnlock() + + if issuerKeys, ok := s.IssuerPublicKeys[issuer]; ok { + if subKeys, ok := issuerKeys.KeysBySub[subject]; ok { + if keyScopes, ok := subKeys.Keys[keyId]; ok { + return keyScopes.Key, nil + } + } + } + + return nil, fosite.ErrNotFound +} + +func (s *MemoryStore) GetPublicKeys(ctx context.Context, issuer string, subject string) (*jose.JSONWebKeySet, error) { + s.issuerPublicKeysMutex.RLock() + defer s.issuerPublicKeysMutex.RUnlock() + + if issuerKeys, ok := s.IssuerPublicKeys[issuer]; ok { + if subKeys, ok := issuerKeys.KeysBySub[subject]; ok { + if len(subKeys.Keys) == 0 { + return nil, fosite.ErrNotFound + } + + keys := make([]jose.JSONWebKey, 0, len(subKeys.Keys)) + for _, keyScopes := range subKeys.Keys { + keys = append(keys, *keyScopes.Key) + } + + return &jose.JSONWebKeySet{Keys: keys}, nil + } + } + + return nil, fosite.ErrNotFound +} + +func (s *MemoryStore) GetPublicKeyScopes(ctx context.Context, issuer string, subject string, keyId string) ([]string, error) { + s.issuerPublicKeysMutex.RLock() + defer s.issuerPublicKeysMutex.RUnlock() + + if issuerKeys, ok := s.IssuerPublicKeys[issuer]; ok { + if subKeys, ok := issuerKeys.KeysBySub[subject]; ok { + if keyScopes, ok := subKeys.Keys[keyId]; ok { + return keyScopes.Scopes, nil + } + } + } + + return nil, fosite.ErrNotFound +} + +func (s *MemoryStore) IsJWTUsed(ctx context.Context, jti string) (bool, error) { + err := s.ClientAssertionJWTValid(ctx, jti) + if err != nil { + return true, nil + } + + return false, nil +} + +func (s *MemoryStore) MarkJWTUsedForTime(ctx context.Context, jti string, exp time.Time) error { + return s.SetClientAssertionJWT(ctx, jti, exp) +} + +// CreatePARSession stores the pushed authorization request context. The requestURI is used to derive the key. +func (s *MemoryStore) CreatePARSession(ctx context.Context, requestURI string, request fosite.AuthorizeRequester) error { + s.parSessionsMutex.Lock() + defer s.parSessionsMutex.Unlock() + + s.PARSessions[requestURI] = request + return nil +} + +// GetPARSession gets the push authorization request context. If the request is nil, a new request object +// is created. Otherwise, the same object is updated. +func (s *MemoryStore) GetPARSession(ctx context.Context, requestURI string) (fosite.AuthorizeRequester, error) { + s.parSessionsMutex.RLock() + defer s.parSessionsMutex.RUnlock() + + r, ok := s.PARSessions[requestURI] + if !ok { + return nil, fosite.ErrNotFound + } + + return r, nil +} + +// DeletePARSession deletes the context. +func (s *MemoryStore) DeletePARSession(ctx context.Context, requestURI string) (err error) { + s.parSessionsMutex.Lock() + defer s.parSessionsMutex.Unlock() + + delete(s.PARSessions, requestURI) + return nil +} + +func (s *MemoryStore) RotateRefreshToken(ctx context.Context, requestID string, refreshTokenSignature string) (err error) { + // Graceful token rotation can be implemented here but it's beyond the scope of this example. Check + // the Ory Hydra implementation for reference. + if err := s.RevokeRefreshToken(ctx, requestID); err != nil { + return err + } + return s.RevokeAccessToken(ctx, requestID) +} + +// CreateDeviceAuthSession stores the device auth session +func (s *MemoryStore) CreateDeviceAuthSession(_ context.Context, deviceCodeSignature, userCodeSignature string, req fosite.DeviceRequester) error { + s.deviceAuthsRequestIDsMutex.Lock() + defer s.deviceAuthsRequestIDsMutex.Unlock() + s.deviceAuthsMutex.Lock() + defer s.deviceAuthsMutex.Unlock() + + s.DeviceAuths[deviceCodeSignature] = req + s.DeviceAuths[userCodeSignature] = req + s.DeviceCodesRequestIDs[req.GetID()] = DeviceAuthPair{d: deviceCodeSignature, u: userCodeSignature} + return nil +} + +// GetDeviceCodeSession gets the device code session +func (s *MemoryStore) GetDeviceCodeSession(_ context.Context, signature string, _ fosite.Session) (fosite.DeviceRequester, error) { + s.deviceAuthsMutex.RLock() + defer s.deviceAuthsMutex.RUnlock() + + rel, ok := s.DeviceAuths[signature] + if !ok { + return nil, fosite.ErrNotFound + } + return rel, nil +} + +// InvalidateDeviceCodeSession invalidates the device code session +func (s *MemoryStore) InvalidateDeviceCodeSession(_ context.Context, code string) error { + s.deviceAuthsMutex.Lock() + defer s.deviceAuthsMutex.Unlock() + + delete(s.DeviceAuths, code) + return nil +} diff --git a/fosite/storage/memory_test.go b/fosite/storage/memory_test.go new file mode 100644 index 00000000000..af480ec0109 --- /dev/null +++ b/fosite/storage/memory_test.go @@ -0,0 +1,59 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package storage + +import ( + "context" + "errors" + "sync" + "testing" + + "github.com/ory/hydra/v2/fosite" +) + +func TestMemoryStore_Authenticate(t *testing.T) { + type fields struct { + Users map[string]MemoryUserRelation + } + type args struct { + in0 context.Context + name string + secret string + } + tests := []struct { + name string + fields fields + args args + wantErr error + }{ + { + name: "invalid_password", + args: args{ + name: "peter", + secret: "invalid", + }, + fields: fields{ + Users: map[string]MemoryUserRelation{ + "peter": { + Username: "peter", + Password: "secret", + }, + }, + }, + // ResourceOwnerPasswordCredentialsGrantHandler expects ErrNotFound + wantErr: fosite.ErrNotFound, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + s := &MemoryStore{ + Users: tt.fields.Users, + usersMutex: sync.RWMutex{}, + } + if _, err := s.Authenticate(tt.args.in0, tt.args.name, tt.args.secret); err == nil || !errors.Is(err, tt.wantErr) { + t.Errorf("Authenticate() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} diff --git a/fosite/token/hmac/bytes.go b/fosite/token/hmac/bytes.go new file mode 100644 index 00000000000..cacdcd60d21 --- /dev/null +++ b/fosite/token/hmac/bytes.go @@ -0,0 +1,20 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package hmac + +import ( + "crypto/rand" + "io" + + "github.com/ory/x/errorsx" +) + +// RandomBytes returns n random bytes by reading from crypto/rand.Reader +func RandomBytes(n int) ([]byte, error) { + bytes := make([]byte, n) + if _, err := io.ReadFull(rand.Reader, bytes); err != nil { + return nil, errorsx.WithStack(err) + } + return bytes, nil +} diff --git a/fosite/token/hmac/bytes_test.go b/fosite/token/hmac/bytes_test.go new file mode 100644 index 00000000000..12aa1fc0fab --- /dev/null +++ b/fosite/token/hmac/bytes_test.go @@ -0,0 +1,28 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package hmac + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestRandomBytes(t *testing.T) { + bytes, err := RandomBytes(128) + assert.NoError(t, err) + assert.Len(t, bytes, 128) +} + +func TestPseudoRandomness(t *testing.T) { + runs := 65536 + results := map[string]struct{}{} + for i := 0; i < runs; i++ { + bytes, err := RandomBytes(128) + require.NoError(t, err) + results[string(bytes)] = struct{}{} + } + assert.Len(t, results, runs) +} diff --git a/fosite/token/hmac/hmacsha.go b/fosite/token/hmac/hmacsha.go new file mode 100644 index 00000000000..cad6919e4e5 --- /dev/null +++ b/fosite/token/hmac/hmacsha.go @@ -0,0 +1,201 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Package hmac is the default implementation for generating and validating challenges. It uses SHA-512/256 to +// generate and validate challenges. + +package hmac + +import ( + "context" + "crypto/hmac" + "crypto/sha512" + "encoding/base64" + "fmt" + "strings" + "sync" + + "github.com/ory/x/errorsx" + + "github.com/pkg/errors" + + "github.com/ory/hydra/v2/fosite" +) + +type HMACStrategyConfigurator interface { + fosite.TokenEntropyProvider + fosite.GlobalSecretProvider + fosite.RotatedGlobalSecretsProvider + fosite.HMACHashingProvider +} + +// HMACStrategy is responsible for generating and validating challenges. +type HMACStrategy struct { + sync.Mutex + Config HMACStrategyConfigurator +} + +const ( + minimumEntropy = 32 + minimumSecretLength = 32 +) + +var b64 = base64.URLEncoding.WithPadding(base64.NoPadding) + +// Generate generates a token and a matching signature or returns an error. +// This method implements rfc6819 Section 5.1.4.2.2: Use High Entropy for Secrets. +func (c *HMACStrategy) Generate(ctx context.Context) (string, string, error) { + c.Lock() + defer c.Unlock() + + globalSecret, err := c.Config.GetGlobalSecret(ctx) + if err != nil { + return "", "", err + } + + if len(globalSecret) < minimumSecretLength { + return "", "", errors.Errorf("secret for signing HMAC-SHA512/256 is expected to be 32 byte long, got %d byte", len(globalSecret)) + } + + var signingKey [32]byte + copy(signingKey[:], globalSecret) + + entropy := c.Config.GetTokenEntropy(ctx) + if entropy < minimumEntropy { + entropy = minimumEntropy + } + + // When creating tokens not intended for usage by human users (e.g., + // client secrets or token handles), the authorization server should + // include a reasonable level of entropy in order to mitigate the risk + // of guessing attacks. The token value should be >=128 bits long and + // constructed from a cryptographically strong random or pseudo-random + // number sequence (see [RFC4086] for best current practice) generated + // by the authorization server. + tokenKey, err := RandomBytes(entropy) + if err != nil { + return "", "", errorsx.WithStack(err) + } + + signature := c.generateHMAC(ctx, tokenKey, &signingKey) + + encodedSignature := b64.EncodeToString(signature) + encodedToken := fmt.Sprintf("%s.%s", b64.EncodeToString(tokenKey), encodedSignature) + return encodedToken, encodedSignature, nil +} + +// Validate validates a token and returns its signature or an error if the token is not valid. +func (c *HMACStrategy) Validate(ctx context.Context, token string) (err error) { + var keys [][]byte + + globalSecret, err := c.Config.GetGlobalSecret(ctx) + if err != nil { + return err + } + + if len(globalSecret) > 0 { + keys = append(keys, globalSecret) + } + + rotatedSecrets, err := c.Config.GetRotatedGlobalSecrets(ctx) + if err != nil { + return err + } + + keys = append(keys, rotatedSecrets...) + + if len(keys) == 0 { + return errors.New("a secret for signing HMAC-SHA512/256 is expected to be defined, but none were") + } + + for _, key := range keys { + if err = c.validate(ctx, key, token); err == nil { + return nil + } else if errors.Is(err, fosite.ErrTokenSignatureMismatch) { + // Continue to the next key. The error will be returned if it is the last key. + } else { + return err + } + } + + return err +} + +func (c *HMACStrategy) validate(ctx context.Context, secret []byte, token string) error { + if len(secret) < minimumSecretLength { + return errors.Errorf("secret for signing HMAC-SHA512/256 is expected to be 32 byte long, got %d byte", len(secret)) + } + + var signingKey [32]byte + copy(signingKey[:], secret) + + tokenKey, tokenSignature, ok := strings.Cut(token, ".") + if !ok { + return errorsx.WithStack(fosite.ErrInvalidTokenFormat) + } + + if tokenKey == "" || tokenSignature == "" { + return errorsx.WithStack(fosite.ErrInvalidTokenFormat) + } + + decodedTokenSignature, err := b64.DecodeString(tokenSignature) + if err != nil { + return errorsx.WithStack(err) + } + + decodedTokenKey, err := b64.DecodeString(tokenKey) + if err != nil { + return errorsx.WithStack(err) + } + + expectedMAC := c.generateHMAC(ctx, decodedTokenKey, &signingKey) + if !hmac.Equal(expectedMAC, decodedTokenSignature) { + // Hash is invalid + return errorsx.WithStack(fosite.ErrTokenSignatureMismatch) + } + + return nil +} + +func (*HMACStrategy) Signature(token string) string { + split := strings.Split(token, ".") + if len(split) != 2 { + return "" + } + return split[1] +} + +// GenerateHMACForString returns an HMAC for a string +func (c *HMACStrategy) GenerateHMACForString(ctx context.Context, text string) (string, error) { + var signingKey [32]byte + + secrets, err := c.Config.GetGlobalSecret(ctx) + if err != nil { + return "", err + } + + if len(secrets) < minimumSecretLength { + return "", errors.Errorf("secret for signing HMAC-SHA512/256 is expected to be 32 byte long, got %d byte", len(secrets)) + } + copy(signingKey[:], secrets) + + bytes := []byte(text) + hashBytes := c.generateHMAC(ctx, bytes, &signingKey) + + b64 := base64.RawURLEncoding.EncodeToString(hashBytes) + return b64, nil +} + +func (c *HMACStrategy) generateHMAC(ctx context.Context, data []byte, key *[32]byte) []byte { + hasher := c.Config.GetHMACHasher(ctx) + if hasher == nil { + hasher = sha512.New512_256 + } + h := hmac.New(hasher, key[:]) + // sha512.digest.Write() always returns nil for err, the panic should never happen + _, err := h.Write(data) + if err != nil { + panic(err) + } + return h.Sum(nil) +} diff --git a/fosite/token/hmac/hmacsha_test.go b/fosite/token/hmac/hmacsha_test.go new file mode 100644 index 00000000000..642eab97f6f --- /dev/null +++ b/fosite/token/hmac/hmacsha_test.go @@ -0,0 +1,178 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package hmac + +import ( + "context" + "crypto/sha512" + "fmt" + "testing" + + "github.com/ory/hydra/v2/fosite" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestGenerateFailsWithShortCredentials(t *testing.T) { + cg := HMACStrategy{Config: &fosite.Config{GlobalSecret: []byte("foo")}} + challenge, signature, err := cg.Generate(context.Background()) + require.Error(t, err) + require.Empty(t, challenge) + require.Empty(t, signature) +} + +func TestGenerate(t *testing.T) { + ctx := context.Background() + config := &fosite.Config{ + GlobalSecret: []byte("1234567890123456789012345678901234567890"), + } + cg := HMACStrategy{Config: config} + + for _, entropy := range []int{32, 64} { + t.Run(fmt.Sprintf("entropy=%d", entropy), func(t *testing.T) { + config.TokenEntropy = entropy + + token, signature, err := cg.Generate(ctx) + require.NoError(t, err) + require.NotEmpty(t, token) + require.NotEmpty(t, signature) + + err = cg.Validate(ctx, token) + require.NoError(t, err) + + actualSignature := cg.Signature(token) + assert.Equal(t, signature, actualSignature) + + config.GlobalSecret = append([]byte("not"), config.GlobalSecret...) + err = cg.Validate(ctx, token) + assert.ErrorIs(t, err, fosite.ErrTokenSignatureMismatch) + }) + } +} + +func TestSignature(t *testing.T) { + cg := HMACStrategy{} + + for token, expected := range map[string]string{ + "": "", + "foo": "", + "foo.bar": "bar", + "foo.bar.baz": "", + ".": "", + } { + assert.Equal(t, expected, cg.Signature(token)) + } +} + +func TestValidateSignatureRejects(t *testing.T) { + cg := HMACStrategy{ + Config: &fosite.Config{GlobalSecret: []byte("1234567890123456789012345678901234567890")}, + } + for k, c := range []string{ + "", + " ", + ".", + "foo.", + ".foo", + } { + t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { + err := cg.Validate(context.Background(), c) + assert.ErrorIs(t, err, fosite.ErrInvalidTokenFormat) + }) + } + + err := cg.Validate(context.Background(), "foo.bar") + assert.ErrorIs(t, err, fosite.ErrTokenSignatureMismatch) +} + +func TestValidateWithRotatedKey(t *testing.T) { + ctx := context.Background() + oldGlobalSecret := []byte("1234567890123456789012345678901234567890") + old := HMACStrategy{Config: &fosite.Config{GlobalSecret: oldGlobalSecret}} + now := HMACStrategy{Config: &fosite.Config{ + GlobalSecret: []byte("0000000090123456789012345678901234567890"), + RotatedGlobalSecrets: [][]byte{ + []byte("abcdefgh90123456789012345678901234567890"), + oldGlobalSecret, + }, + }} + + token, _, err := old.Generate(ctx) + require.NoError(t, err) + + assert.ErrorIs(t, now.Validate(ctx, "thisisatoken.withaninvalidsignature"), fosite.ErrTokenSignatureMismatch) + assert.NoError(t, now.Validate(ctx, token)) +} + +func TestValidateWithRotatedKeyInvalid(t *testing.T) { + ctx := context.Background() + oldGlobalSecret := []byte("1234567890123456789012345678901234567890") + old := HMACStrategy{Config: &fosite.Config{GlobalSecret: oldGlobalSecret}} + now := HMACStrategy{Config: &fosite.Config{ + GlobalSecret: []byte("0000000090123456789012345678901234567890"), + RotatedGlobalSecrets: [][]byte{ + []byte("abcdefgh90123456789012345678901"), + oldGlobalSecret, + }}, + } + + token, _, err := old.Generate(ctx) + require.NoError(t, err) + + require.EqualError(t, now.Validate(ctx, token), "secret for signing HMAC-SHA512/256 is expected to be 32 byte long, got 31 byte") + + require.EqualError(t, (&HMACStrategy{Config: &fosite.Config{}}).Validate(ctx, token), "a secret for signing HMAC-SHA512/256 is expected to be defined, but none were") +} + +func TestCustomHMAC(t *testing.T) { + ctx := context.Background() + globalSecret := []byte("1234567890123456789012345678901234567890") + defaultHasher := HMACStrategy{Config: &fosite.Config{ + GlobalSecret: globalSecret, + }} + sha512Hasher := HMACStrategy{Config: &fosite.Config{ + GlobalSecret: globalSecret, + HMACHasher: sha512.New, + }} + + token, _, err := defaultHasher.Generate(ctx) + require.NoError(t, err) + require.ErrorIs(t, sha512Hasher.Validate(ctx, token), fosite.ErrTokenSignatureMismatch) + + token512, _, err := sha512Hasher.Generate(ctx) + require.NoError(t, err) + require.NoError(t, sha512Hasher.Validate(ctx, token512)) + require.ErrorIs(t, defaultHasher.Validate(ctx, token512), fosite.ErrTokenSignatureMismatch) +} + +func TestGenerateFromString(t *testing.T) { + cg := HMACStrategy{Config: &fosite.Config{ + GlobalSecret: []byte("1234567890123456789012345678901234567890")}, + } + for _, c := range []struct { + text string + hash string + }{ + { + text: "", + hash: "-n7EqD-bXkY3yYMH-ctEAGV8XLkU7Y6Bo6pbyT1agGA", + }, + { + text: " ", + hash: "zXJvonHTNSOOGj_QKl4RpIX_zXgD2YfXUfwuDKaTTIg", + }, + { + text: "Test", + hash: "TMeEaHS-cDC2nijiesCNtsOyBqHHtzWqAcWvceQT50g", + }, + { + text: "AnotherTest1234", + hash: "zHYDOZGjzhVjx5r8RlBhpnJemX5JxEEBUjVT01n3IFM", + }, + } { + hash, _ := cg.GenerateHMACForString(context.Background(), c.text) + assert.Equal(t, c.hash, hash) + } +} diff --git a/fosite/token/jwt/claims.go b/fosite/token/jwt/claims.go new file mode 100644 index 00000000000..f60bbcf25e9 --- /dev/null +++ b/fosite/token/jwt/claims.go @@ -0,0 +1,77 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jwt + +import "time" + +// Mapper is the interface used internally to map key-value pairs +type Mapper interface { + ToMap() map[string]interface{} + Add(key string, value interface{}) + Get(key string) interface{} +} + +// ToString will return a string representation of a map +func ToString(i interface{}) string { + if i == nil { + return "" + } + + if s, ok := i.(string); ok { + return s + } + + if sl, ok := i.([]string); ok { + if len(sl) == 1 { + return sl[0] + } + } + + return "" +} + +// ToTime will try to convert a given input to a time.Time structure +func ToTime(i interface{}) time.Time { + if i == nil { + return time.Time{} + } + + if t, ok := i.(int64); ok { + return time.Unix(t, 0).UTC() + } else if t, ok := i.(float64); ok { + return time.Unix(int64(t), 0).UTC() + } else if t, ok := i.(time.Time); ok { + return t + } + + return time.Time{} +} + +// Filter will filter out elements based on keys in a given input map na key-slice +func Filter(elements map[string]interface{}, keys ...string) map[string]interface{} { + var keyIdx = make(map[string]bool) + var result = make(map[string]interface{}) + + for _, key := range keys { + keyIdx[key] = true + } + + for k, e := range elements { + if _, ok := keyIdx[k]; !ok { + result[k] = e + } + } + + return result +} + +// Copy will copy all elements in a map and return a new representational map +func Copy(elements map[string]interface{}) (result map[string]interface{}) { + result = make(map[string]interface{}, len(elements)) + for k, v := range elements { + result[k] = v + } + + return result +} diff --git a/fosite/token/jwt/claims_id_token.go b/fosite/token/jwt/claims_id_token.go new file mode 100644 index 00000000000..225ae1f6e15 --- /dev/null +++ b/fosite/token/jwt/claims_id_token.go @@ -0,0 +1,132 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jwt + +import ( + "time" + + "github.com/google/uuid" +) + +// IDTokenClaims represent the claims used in open id connect requests +type IDTokenClaims struct { + JTI string `json:"jti"` + Issuer string `json:"iss"` + Subject string `json:"sub"` + Audience []string `json:"aud"` + Nonce string `json:"nonce"` + ExpiresAt time.Time `json:"exp"` + IssuedAt time.Time `json:"iat"` + RequestedAt time.Time `json:"rat"` + AuthTime time.Time `json:"auth_time"` + AccessTokenHash string `json:"at_hash"` + AuthenticationContextClassReference string `json:"acr"` + AuthenticationMethodsReferences []string `json:"amr"` + CodeHash string `json:"c_hash"` + Extra map[string]interface{} `json:"ext"` +} + +// ToMap will transform the headers to a map structure +func (c *IDTokenClaims) ToMap() map[string]interface{} { + var ret = Copy(c.Extra) + + if c.Subject != "" { + ret["sub"] = c.Subject + } else { + delete(ret, "sub") + } + + if c.Issuer != "" { + ret["iss"] = c.Issuer + } else { + delete(ret, "iss") + } + + if c.JTI != "" { + ret["jti"] = c.JTI + } else { + ret["jti"] = uuid.New().String() + } + + if len(c.Audience) > 0 { + ret["aud"] = c.Audience + } else { + ret["aud"] = []string{} + } + + if !c.IssuedAt.IsZero() { + ret["iat"] = c.IssuedAt.Unix() + } else { + delete(ret, "iat") + } + + if !c.ExpiresAt.IsZero() { + ret["exp"] = c.ExpiresAt.Unix() + } else { + delete(ret, "exp") + } + + if !c.RequestedAt.IsZero() { + ret["rat"] = c.RequestedAt.Unix() + } else { + delete(ret, "rat") + } + + if len(c.Nonce) > 0 { + ret["nonce"] = c.Nonce + } else { + delete(ret, "nonce") + } + + if len(c.AccessTokenHash) > 0 { + ret["at_hash"] = c.AccessTokenHash + } else { + delete(ret, "at_hash") + } + + if len(c.CodeHash) > 0 { + ret["c_hash"] = c.CodeHash + } else { + delete(ret, "c_hash") + } + + if !c.AuthTime.IsZero() { + ret["auth_time"] = c.AuthTime.Unix() + } else { + delete(ret, "auth_time") + } + + if len(c.AuthenticationContextClassReference) > 0 { + ret["acr"] = c.AuthenticationContextClassReference + } else { + delete(ret, "acr") + } + + if len(c.AuthenticationMethodsReferences) > 0 { + ret["amr"] = c.AuthenticationMethodsReferences + } else { + delete(ret, "amr") + } + + return ret + +} + +// Add will add a key-value pair to the extra field +func (c *IDTokenClaims) Add(key string, value interface{}) { + if c.Extra == nil { + c.Extra = make(map[string]interface{}) + } + c.Extra[key] = value +} + +// Get will get a value from the extra field based on a given key +func (c *IDTokenClaims) Get(key string) interface{} { + return c.ToMap()[key] +} + +// ToMapClaims will return a jwt-go MapClaims representation +func (c IDTokenClaims) ToMapClaims() MapClaims { + return c.ToMap() +} diff --git a/fosite/token/jwt/claims_id_token_test.go b/fosite/token/jwt/claims_id_token_test.go new file mode 100644 index 00000000000..bb0554a7c63 --- /dev/null +++ b/fosite/token/jwt/claims_id_token_test.go @@ -0,0 +1,78 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jwt_test + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + + . "github.com/ory/hydra/v2/fosite/token/jwt" +) + +func TestIDTokenAssert(t *testing.T) { + assert.NoError(t, (&IDTokenClaims{ExpiresAt: time.Now().UTC().Add(time.Hour)}). + ToMapClaims().Valid()) + assert.Error(t, (&IDTokenClaims{ExpiresAt: time.Now().UTC().Add(-time.Hour)}). + ToMapClaims().Valid()) + + assert.NotEmpty(t, (new(IDTokenClaims)).ToMapClaims()["jti"]) +} + +func TestIDTokenClaimsToMap(t *testing.T) { + idTokenClaims := &IDTokenClaims{ + JTI: "foo-id", + Subject: "peter", + IssuedAt: time.Now().UTC().Round(time.Second), + Issuer: "fosite", + Audience: []string{"tests"}, + ExpiresAt: time.Now().UTC().Add(time.Hour).Round(time.Second), + AuthTime: time.Now().UTC(), + RequestedAt: time.Now().UTC(), + AccessTokenHash: "foobar", + CodeHash: "barfoo", + AuthenticationContextClassReference: "acr", + AuthenticationMethodsReferences: []string{"amr"}, + Extra: map[string]interface{}{ + "foo": "bar", + "baz": "bar", + }, + } + assert.Equal(t, map[string]interface{}{ + "jti": idTokenClaims.JTI, + "sub": idTokenClaims.Subject, + "iat": idTokenClaims.IssuedAt.Unix(), + "rat": idTokenClaims.RequestedAt.Unix(), + "iss": idTokenClaims.Issuer, + "aud": idTokenClaims.Audience, + "exp": idTokenClaims.ExpiresAt.Unix(), + "foo": idTokenClaims.Extra["foo"], + "baz": idTokenClaims.Extra["baz"], + "at_hash": idTokenClaims.AccessTokenHash, + "c_hash": idTokenClaims.CodeHash, + "auth_time": idTokenClaims.AuthTime.Unix(), + "acr": idTokenClaims.AuthenticationContextClassReference, + "amr": idTokenClaims.AuthenticationMethodsReferences, + }, idTokenClaims.ToMap()) + + idTokenClaims.Nonce = "foobar" + assert.Equal(t, map[string]interface{}{ + "jti": idTokenClaims.JTI, + "sub": idTokenClaims.Subject, + "iat": idTokenClaims.IssuedAt.Unix(), + "rat": idTokenClaims.RequestedAt.Unix(), + "iss": idTokenClaims.Issuer, + "aud": idTokenClaims.Audience, + "exp": idTokenClaims.ExpiresAt.Unix(), + "foo": idTokenClaims.Extra["foo"], + "baz": idTokenClaims.Extra["baz"], + "at_hash": idTokenClaims.AccessTokenHash, + "c_hash": idTokenClaims.CodeHash, + "auth_time": idTokenClaims.AuthTime.Unix(), + "acr": idTokenClaims.AuthenticationContextClassReference, + "amr": idTokenClaims.AuthenticationMethodsReferences, + "nonce": idTokenClaims.Nonce, + }, idTokenClaims.ToMap()) +} diff --git a/fosite/token/jwt/claims_jwt.go b/fosite/token/jwt/claims_jwt.go new file mode 100644 index 00000000000..7605c38fcc1 --- /dev/null +++ b/fosite/token/jwt/claims_jwt.go @@ -0,0 +1,242 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jwt + +import ( + "strings" + "time" + + "github.com/google/uuid" +) + +// Enum for different types of scope encoding. +type JWTScopeFieldEnum int + +const ( + JWTScopeFieldUnset JWTScopeFieldEnum = iota + JWTScopeFieldList + JWTScopeFieldString + JWTScopeFieldBoth +) + +type JWTClaimsDefaults struct { + ExpiresAt time.Time + IssuedAt time.Time + Issuer string + Scope []string +} + +type JWTClaimsContainer interface { + // With returns a copy of itself with expiresAt, scope, audience set to the given values. + With(expiry time.Time, scope, audience []string) JWTClaimsContainer + + // WithDefaults returns a copy of itself with issuedAt and issuer set to the given default values. If those + // values are already set in the claims, they will not be updated. + WithDefaults(iat time.Time, issuer string) JWTClaimsContainer + + // WithScopeField configures how a scope field should be represented in JWT. + WithScopeField(scopeField JWTScopeFieldEnum) JWTClaimsContainer + + // ToMapClaims returns the claims as a github.com/dgrijalva/jwt-go.MapClaims type. + ToMapClaims() MapClaims +} + +// JWTClaims represent a token's claims. +type JWTClaims struct { + Subject string + Issuer string + Audience []string + JTI string + IssuedAt time.Time + NotBefore time.Time + ExpiresAt time.Time + Scope []string + Extra map[string]interface{} + ScopeField JWTScopeFieldEnum +} + +func (c *JWTClaims) With(expiry time.Time, scope, audience []string) JWTClaimsContainer { + c.ExpiresAt = expiry + c.Scope = scope + c.Audience = audience + return c +} + +func (c *JWTClaims) WithDefaults(iat time.Time, issuer string) JWTClaimsContainer { + if c.IssuedAt.IsZero() { + c.IssuedAt = iat + } + + if c.Issuer == "" { + c.Issuer = issuer + } + return c +} + +func (c *JWTClaims) WithScopeField(scopeField JWTScopeFieldEnum) JWTClaimsContainer { + c.ScopeField = scopeField + return c +} + +// ToMap will transform the headers to a map structure +func (c *JWTClaims) ToMap() map[string]interface{} { + var ret = Copy(c.Extra) + + if c.Subject != "" { + ret["sub"] = c.Subject + } else { + delete(ret, "sub") + } + + if c.Issuer != "" { + ret["iss"] = c.Issuer + } else { + delete(ret, "iss") + } + + if c.JTI != "" { + ret["jti"] = c.JTI + } else { + ret["jti"] = uuid.New().String() + } + + if len(c.Audience) > 0 { + ret["aud"] = c.Audience + } else { + ret["aud"] = []string{} + } + + if !c.IssuedAt.IsZero() { + ret["iat"] = c.IssuedAt.Unix() + } else { + delete(ret, "iat") + } + + if !c.NotBefore.IsZero() { + ret["nbf"] = c.NotBefore.Unix() + } else { + delete(ret, "nbf") + } + + if !c.ExpiresAt.IsZero() { + ret["exp"] = c.ExpiresAt.Unix() + } else { + delete(ret, "exp") + } + + if c.Scope != nil { + // ScopeField default (when value is JWTScopeFieldUnset) is the list for backwards compatibility with old versions of fosite. + if c.ScopeField == JWTScopeFieldUnset || c.ScopeField == JWTScopeFieldList || c.ScopeField == JWTScopeFieldBoth { + ret["scp"] = c.Scope + } + if c.ScopeField == JWTScopeFieldString || c.ScopeField == JWTScopeFieldBoth { + ret["scope"] = strings.Join(c.Scope, " ") + } + } else { + delete(ret, "scp") + delete(ret, "scope") + } + + return ret +} + +// FromMap will set the claims based on a mapping +func (c *JWTClaims) FromMap(m map[string]interface{}) { + c.Extra = make(map[string]interface{}) + for k, v := range m { + switch k { + case "jti": + if s, ok := v.(string); ok { + c.JTI = s + } + case "sub": + if s, ok := v.(string); ok { + c.Subject = s + } + case "iss": + if s, ok := v.(string); ok { + c.Issuer = s + } + case "aud": + if s, ok := v.(string); ok { + c.Audience = []string{s} + } else if s, ok := v.([]string); ok { + c.Audience = s + } + case "iat": + c.IssuedAt = toTime(v, c.IssuedAt) + case "nbf": + c.NotBefore = toTime(v, c.NotBefore) + case "exp": + c.ExpiresAt = toTime(v, c.ExpiresAt) + case "scp": + switch s := v.(type) { + case []string: + c.Scope = s + if c.ScopeField == JWTScopeFieldString { + c.ScopeField = JWTScopeFieldBoth + } else if c.ScopeField == JWTScopeFieldUnset { + c.ScopeField = JWTScopeFieldList + } + case []interface{}: + c.Scope = make([]string, len(s)) + for i, vi := range s { + if s, ok := vi.(string); ok { + c.Scope[i] = s + } + } + if c.ScopeField == JWTScopeFieldString { + c.ScopeField = JWTScopeFieldBoth + } else if c.ScopeField == JWTScopeFieldUnset { + c.ScopeField = JWTScopeFieldList + } + } + case "scope": + if s, ok := v.(string); ok { + c.Scope = strings.Split(s, " ") + if c.ScopeField == JWTScopeFieldList { + c.ScopeField = JWTScopeFieldBoth + } else if c.ScopeField == JWTScopeFieldUnset { + c.ScopeField = JWTScopeFieldString + } + } + default: + c.Extra[k] = v + } + } +} + +func toTime(v interface{}, def time.Time) (t time.Time) { + t = def + switch a := v.(type) { + case float64: + t = time.Unix(int64(a), 0).UTC() + case int64: + t = time.Unix(a, 0).UTC() + } + return +} + +// Add will add a key-value pair to the extra field +func (c *JWTClaims) Add(key string, value interface{}) { + if c.Extra == nil { + c.Extra = make(map[string]interface{}) + } + c.Extra[key] = value +} + +// Get will get a value from the extra field based on a given key +func (c JWTClaims) Get(key string) interface{} { + return c.ToMap()[key] +} + +// ToMapClaims will return a jwt-go MapClaims representation +func (c JWTClaims) ToMapClaims() MapClaims { + return c.ToMap() +} + +// FromMapClaims will populate claims from a jwt-go MapClaims representation +func (c *JWTClaims) FromMapClaims(mc MapClaims) { + c.FromMap(mc) +} diff --git a/fosite/token/jwt/claims_jwt_test.go b/fosite/token/jwt/claims_jwt_test.go new file mode 100644 index 00000000000..5f101e95218 --- /dev/null +++ b/fosite/token/jwt/claims_jwt_test.go @@ -0,0 +1,97 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jwt_test + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + + . "github.com/ory/hydra/v2/fosite/token/jwt" +) + +var jwtClaims = &JWTClaims{ + Subject: "peter", + IssuedAt: time.Now().UTC().Round(time.Second), + Issuer: "fosite", + NotBefore: time.Now().UTC().Round(time.Second), + Audience: []string{"tests"}, + ExpiresAt: time.Now().UTC().Add(time.Hour).Round(time.Second), + JTI: "abcdef", + Scope: []string{"email", "offline"}, + Extra: map[string]interface{}{ + "foo": "bar", + "baz": "bar", + }, + ScopeField: JWTScopeFieldList, +} + +var jwtClaimsMap = map[string]interface{}{ + "sub": jwtClaims.Subject, + "iat": jwtClaims.IssuedAt.Unix(), + "iss": jwtClaims.Issuer, + "nbf": jwtClaims.NotBefore.Unix(), + "aud": jwtClaims.Audience, + "exp": jwtClaims.ExpiresAt.Unix(), + "jti": jwtClaims.JTI, + "scp": []string{"email", "offline"}, + "foo": jwtClaims.Extra["foo"], + "baz": jwtClaims.Extra["baz"], +} + +func TestClaimAddGetString(t *testing.T) { + jwtClaims.Add("foo", "bar") + assert.Equal(t, "bar", jwtClaims.Get("foo")) +} + +func TestClaimsToMapSetsID(t *testing.T) { + assert.NotEmpty(t, (&JWTClaims{}).ToMap()["jti"]) +} + +func TestAssert(t *testing.T) { + assert.Nil(t, (&JWTClaims{ExpiresAt: time.Now().UTC().Add(time.Hour)}). + ToMapClaims().Valid()) + assert.NotNil(t, (&JWTClaims{ExpiresAt: time.Now().UTC().Add(-2 * time.Hour)}). + ToMapClaims().Valid()) + assert.NotNil(t, (&JWTClaims{NotBefore: time.Now().UTC().Add(time.Hour)}). + ToMapClaims().Valid()) + assert.Nil(t, (&JWTClaims{NotBefore: time.Now().UTC().Add(-time.Hour)}). + ToMapClaims().Valid()) + assert.Nil(t, (&JWTClaims{ExpiresAt: time.Now().UTC().Add(time.Hour), + NotBefore: time.Now().UTC().Add(-time.Hour)}).ToMapClaims().Valid()) +} + +func TestClaimsToMap(t *testing.T) { + assert.Equal(t, jwtClaimsMap, jwtClaims.ToMap()) +} + +func TestClaimsFromMap(t *testing.T) { + var claims JWTClaims + claims.FromMap(jwtClaimsMap) + assert.Equal(t, jwtClaims, &claims) +} + +func TestScopeFieldString(t *testing.T) { + jwtClaimsWithString := jwtClaims.WithScopeField(JWTScopeFieldString) + // Making a copy of jwtClaimsMap. + jwtClaimsMapWithString := jwtClaims.ToMap() + delete(jwtClaimsMapWithString, "scp") + jwtClaimsMapWithString["scope"] = "email offline" + assert.Equal(t, jwtClaimsMapWithString, map[string]interface{}(jwtClaimsWithString.ToMapClaims())) + var claims JWTClaims + claims.FromMap(jwtClaimsMapWithString) + assert.Equal(t, jwtClaimsWithString, &claims) +} + +func TestScopeFieldBoth(t *testing.T) { + jwtClaimsWithBoth := jwtClaims.WithScopeField(JWTScopeFieldBoth) + // Making a copy of jwtClaimsMap + jwtClaimsMapWithBoth := jwtClaims.ToMap() + jwtClaimsMapWithBoth["scope"] = "email offline" + assert.Equal(t, jwtClaimsMapWithBoth, map[string]interface{}(jwtClaimsWithBoth.ToMapClaims())) + var claims JWTClaims + claims.FromMap(jwtClaimsMapWithBoth) + assert.Equal(t, jwtClaimsWithBoth, &claims) +} diff --git a/fosite/token/jwt/claims_test.go b/fosite/token/jwt/claims_test.go new file mode 100644 index 00000000000..739020d671a --- /dev/null +++ b/fosite/token/jwt/claims_test.go @@ -0,0 +1,28 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jwt + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestToString(t *testing.T) { + assert.Equal(t, "foo", ToString("foo")) + assert.Equal(t, "foo", ToString([]string{"foo"})) + assert.Empty(t, ToString(1234)) + assert.Empty(t, ToString(nil)) +} + +func TestToTime(t *testing.T) { + assert.Equal(t, time.Time{}, ToTime(nil)) + assert.Equal(t, time.Time{}, ToTime("1234")) + + now := time.Now().UTC().Round(time.Second) + assert.Equal(t, now, ToTime(now)) + assert.Equal(t, now, ToTime(now.Unix())) + assert.Equal(t, now, ToTime(float64(now.Unix()))) +} diff --git a/fosite/token/jwt/header.go b/fosite/token/jwt/header.go new file mode 100644 index 00000000000..8fdaa3ddf51 --- /dev/null +++ b/fosite/token/jwt/header.go @@ -0,0 +1,46 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jwt + +// Headers is the jwt headers +type Headers struct { + Extra map[string]interface{} `json:"extra"` +} + +func NewHeaders() *Headers { + return &Headers{Extra: map[string]interface{}{}} +} + +// ToMap will transform the headers to a map structure +func (h *Headers) ToMap() map[string]interface{} { + var filter = map[string]bool{"alg": true} + var extra = map[string]interface{}{} + + // filter known values from extra. + for k, v := range h.Extra { + if _, ok := filter[k]; !ok { + extra[k] = v + } + } + + return extra +} + +// Add will add a key-value pair to the extra field +func (h *Headers) Add(key string, value interface{}) { + if h.Extra == nil { + h.Extra = make(map[string]interface{}) + } + h.Extra[key] = value +} + +// Get will get a value from the extra field based on a given key +func (h *Headers) Get(key string) interface{} { + return h.Extra[key] +} + +// ToMapClaims will return a jwt-go MapClaims representation +func (h Headers) ToMapClaims() MapClaims { + return h.ToMap() +} diff --git a/fosite/token/jwt/header_test.go b/fosite/token/jwt/header_test.go new file mode 100644 index 00000000000..3c5a666953c --- /dev/null +++ b/fosite/token/jwt/header_test.go @@ -0,0 +1,19 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jwt + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestHeaderToMap(t *testing.T) { + header := &Headers{} + header.Add("foo", "bar") + assert.Equal(t, "bar", header.Get("foo")) + assert.Equal(t, map[string]interface{}{ + "foo": "bar", + }, header.ToMap()) +} diff --git a/fosite/token/jwt/jwt.go b/fosite/token/jwt/jwt.go new file mode 100644 index 00000000000..9c5aa57757f --- /dev/null +++ b/fosite/token/jwt/jwt.go @@ -0,0 +1,199 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Package jwt is able to generate and validate json web tokens. +// Follows https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32 + +package jwt + +import ( + "context" + "crypto" + "crypto/ecdsa" + "crypto/rsa" + "crypto/sha256" + "strings" + + "github.com/go-jose/go-jose/v3" + + "github.com/ory/x/errorsx" + + "github.com/pkg/errors" +) + +type Signer interface { + Generate(ctx context.Context, claims MapClaims, header Mapper) (string, string, error) + Validate(ctx context.Context, token string) (string, error) + Hash(ctx context.Context, in []byte) ([]byte, error) + Decode(ctx context.Context, token string) (*Token, error) + GetSignature(ctx context.Context, token string) (string, error) + GetSigningMethodLength(ctx context.Context) int +} + +var SHA256HashSize = crypto.SHA256.Size() + +type GetPrivateKeyFunc func(ctx context.Context) (interface{}, error) + +// DefaultSigner is responsible for generating and validating JWT challenges +type DefaultSigner struct { + GetPrivateKey GetPrivateKeyFunc +} + +// Generate generates a new authorize code or returns an error. set secret +func (j *DefaultSigner) Generate(ctx context.Context, claims MapClaims, header Mapper) (string, string, error) { + key, err := j.GetPrivateKey(ctx) + if err != nil { + return "", "", err + } + + switch t := key.(type) { + case *jose.JSONWebKey: + return generateToken(claims, header, jose.SignatureAlgorithm(t.Algorithm), t) + case jose.JSONWebKey: + return generateToken(claims, header, jose.SignatureAlgorithm(t.Algorithm), t) + case *rsa.PrivateKey: + return generateToken(claims, header, jose.RS256, t) + case *ecdsa.PrivateKey: + return generateToken(claims, header, jose.ES256, t) + case jose.OpaqueSigner: + switch tt := t.Public().Key.(type) { + case *rsa.PrivateKey: + alg := jose.RS256 + if len(t.Algs()) > 0 { + alg = t.Algs()[0] + } + + return generateToken(claims, header, alg, t) + case *ecdsa.PrivateKey: + alg := jose.ES256 + if len(t.Algs()) > 0 { + alg = t.Algs()[0] + } + + return generateToken(claims, header, alg, t) + default: + return "", "", errors.Errorf("unsupported private / public key pairs: %T, %T", t, tt) + } + default: + return "", "", errors.Errorf("unsupported private key type: %T", t) + } +} + +// Validate validates a token and returns its signature or an error if the token is not valid. +func (j *DefaultSigner) Validate(ctx context.Context, token string) (string, error) { + key, err := j.GetPrivateKey(ctx) + if err != nil { + return "", err + } + + if t, ok := key.(*jose.JSONWebKey); ok { + key = t.Key + } + + switch t := key.(type) { + case *rsa.PrivateKey: + return validateToken(token, t.PublicKey) + case *ecdsa.PrivateKey: + return validateToken(token, t.PublicKey) + case jose.OpaqueSigner: + return validateToken(token, t.Public().Key) + default: + return "", errors.New("Unable to validate token. Invalid PrivateKey type") + } +} + +// Decode will decode a JWT token +func (j *DefaultSigner) Decode(ctx context.Context, token string) (*Token, error) { + key, err := j.GetPrivateKey(ctx) + if err != nil { + return nil, err + } + + if t, ok := key.(*jose.JSONWebKey); ok { + key = t.Key + } + + switch t := key.(type) { + case *rsa.PrivateKey: + return decodeToken(token, t.PublicKey) + case *ecdsa.PrivateKey: + return decodeToken(token, t.PublicKey) + case jose.OpaqueSigner: + return decodeToken(token, t.Public().Key) + default: + return nil, errors.New("Unable to decode token. Invalid PrivateKey type") + } +} + +// GetSignature will return the signature of a token +func (j *DefaultSigner) GetSignature(ctx context.Context, token string) (string, error) { + return getTokenSignature(token) +} + +// Hash will return a given hash based on the byte input or an error upon fail +func (j *DefaultSigner) Hash(ctx context.Context, in []byte) ([]byte, error) { + return hashSHA256(in) +} + +// GetSigningMethodLength will return the length of the signing method +func (j *DefaultSigner) GetSigningMethodLength(ctx context.Context) int { + return SHA256HashSize +} + +func generateToken(claims MapClaims, header Mapper, signingMethod jose.SignatureAlgorithm, privateKey interface{}) (rawToken string, sig string, err error) { + if header == nil || claims == nil { + err = errors.New("either claims or header is nil") + return + } + + token := NewWithClaims(signingMethod, claims) + token.Header = assign(token.Header, header.ToMap()) + + rawToken, err = token.SignedString(privateKey) + if err != nil { + return + } + + sig, err = getTokenSignature(rawToken) + return +} + +func decodeToken(token string, verificationKey interface{}) (*Token, error) { + keyFunc := func(*Token) (interface{}, error) { return verificationKey, nil } + return ParseWithClaims(token, MapClaims{}, keyFunc) +} + +func validateToken(tokenStr string, verificationKey interface{}) (string, error) { + _, err := decodeToken(tokenStr, verificationKey) + if err != nil { + return "", err + } + return getTokenSignature(tokenStr) +} + +func getTokenSignature(token string) (string, error) { + split := strings.Split(token, ".") + if len(split) != 3 { + return "", errors.New("header, body and signature must all be set") + } + return split[2], nil +} + +func hashSHA256(in []byte) ([]byte, error) { + hash := sha256.New() + _, err := hash.Write(in) + if err != nil { + return []byte{}, errorsx.WithStack(err) + } + return hash.Sum([]byte{}), nil +} + +func assign(a, b map[string]interface{}) map[string]interface{} { + for k, w := range b { + if _, ok := a[k]; ok { + continue + } + a[k] = w + } + return a +} diff --git a/fosite/token/jwt/jwt_test.go b/fosite/token/jwt/jwt_test.go new file mode 100644 index 00000000000..84deed57bae --- /dev/null +++ b/fosite/token/jwt/jwt_test.go @@ -0,0 +1,219 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jwt + +import ( + "context" + "fmt" + "strings" + "testing" + "time" + + "github.com/go-jose/go-jose/v3" + + "github.com/ory/hydra/v2/fosite/internal/gen" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +var header = &Headers{ + Extra: map[string]interface{}{ + "foo": "bar", + }, +} + +func TestHash(t *testing.T) { + for k, tc := range []struct { + d string + strategy Signer + }{ + { + d: "RS256", + strategy: &DefaultSigner{GetPrivateKey: func(_ context.Context) (interface{}, error) { + return gen.MustRSAKey(), nil + }}, + }, + { + d: "ES256", + strategy: &DefaultSigner{GetPrivateKey: func(_ context.Context) (interface{}, error) { + return gen.MustES256Key(), nil + }}, + }, + } { + t.Run(fmt.Sprintf("case=%d/strategy=%s", k, tc.d), func(t *testing.T) { + in := []byte("foo") + out, err := tc.strategy.Hash(context.TODO(), in) + assert.NoError(t, err) + assert.NotEqual(t, in, out) + }) + } +} + +func TestAssign(t *testing.T) { + for k, c := range [][]map[string]interface{}{ + { + {"foo": "bar"}, + {"baz": "bar"}, + {"foo": "bar", "baz": "bar"}, + }, + { + {"foo": "bar"}, + {"foo": "baz"}, + {"foo": "bar"}, + }, + { + {}, + {"foo": "baz"}, + {"foo": "baz"}, + }, + { + {"foo": "bar"}, + {"foo": "baz", "bar": "baz"}, + {"foo": "bar", "bar": "baz"}, + }, + } { + assert.EqualValues(t, c[2], assign(c[0], c[1]), "Case %d", k) + } +} + +func TestGenerateJWT(t *testing.T) { + var key interface{} = gen.MustRSAKey() + for k, tc := range []struct { + d string + strategy Signer + resetKey func(strategy Signer) + }{ + { + d: "DefaultSigner", + strategy: &DefaultSigner{ + GetPrivateKey: func(_ context.Context) (interface{}, error) { + return key, nil + }, + }, + resetKey: func(strategy Signer) { + key = gen.MustRSAKey() + }, + }, + { + d: "ES256JWTStrategy", + strategy: &DefaultSigner{ + GetPrivateKey: func(_ context.Context) (interface{}, error) { + return key, nil + }, + }, + resetKey: func(strategy Signer) { + key = &jose.JSONWebKey{ + KeyID: "test-id", + Key: gen.MustES521Key(), + Algorithm: "ES512", + } + }, + }, + { + d: "ES256JWTStrategy", + strategy: &DefaultSigner{ + GetPrivateKey: func(_ context.Context) (interface{}, error) { + return key, nil + }, + }, + resetKey: func(strategy Signer) { + key = gen.MustES256Key() + }, + }, + } { + t.Run(fmt.Sprintf("case=%d/strategy=%s", k, tc.d), func(t *testing.T) { + claims := &JWTClaims{ + ExpiresAt: time.Now().UTC().Add(time.Hour), + } + + token, sig, err := tc.strategy.Generate(context.TODO(), claims.ToMapClaims(), header) + require.NoError(t, err) + require.NotEmpty(t, token) + require.NotEmpty(t, sig) + + decoded, err := tc.strategy.Decode(context.TODO(), token) + require.NoError(t, err) + require.NotNil(t, decoded) + + if k, ok := key.(*jose.JSONWebKey); ok && k.KeyID != "" { + require.Equal(t, k.KeyID, decoded.Header["kid"]) + } + + _, err = tc.strategy.Validate(context.TODO(), token) + require.NoError(t, err) + + _, err = tc.strategy.Validate(context.TODO(), token+"."+"0123456789") + require.Error(t, err) + + partToken := strings.Split(token, ".")[2] + + _, err = tc.strategy.Validate(context.TODO(), partToken) + require.Error(t, err) + + // Reset private key + tc.resetKey(tc.strategy) + + // Lets validate the exp claim + claims = &JWTClaims{ + ExpiresAt: time.Now().UTC().Add(-time.Hour), + } + token, _, err = tc.strategy.Generate(context.TODO(), claims.ToMapClaims(), header) + require.NoError(t, err) + require.NotNil(t, token) + + _, err = tc.strategy.Validate(context.TODO(), token) + require.Error(t, err) + + // Lets validate the nbf claim + claims = &JWTClaims{ + NotBefore: time.Now().UTC().Add(time.Hour), + } + token, _, err = tc.strategy.Generate(context.TODO(), claims.ToMapClaims(), header) + require.NoError(t, err) + require.NotNil(t, token) + // t.Logf("%s.%s", token, sig) + sig, err = tc.strategy.Validate(context.TODO(), token) + require.Error(t, err) + require.Empty(t, sig, "%s", err) + }) + } +} + +func TestValidateSignatureRejectsJWT(t *testing.T) { + for k, tc := range []struct { + d string + strategy Signer + }{ + { + d: "RS256", + strategy: &DefaultSigner{GetPrivateKey: func(_ context.Context) (interface{}, error) { + return gen.MustRSAKey(), nil + }, + }, + }, + { + d: "ES256", + strategy: &DefaultSigner{ + GetPrivateKey: func(_ context.Context) (interface{}, error) { + return gen.MustES256Key(), nil + }, + }, + }, + } { + t.Run(fmt.Sprintf("case=%d/strategy=%s", k, tc.d), func(t *testing.T) { + for k, c := range []string{ + "", + " ", + "foo.bar", + "foo.", + ".foo", + } { + _, err := tc.strategy.Validate(context.TODO(), c) + assert.Error(t, err) + t.Logf("Passed test case %d", k) + } + }) + } +} diff --git a/fosite/token/jwt/map_claims.go b/fosite/token/jwt/map_claims.go new file mode 100644 index 00000000000..d4f75285ce8 --- /dev/null +++ b/fosite/token/jwt/map_claims.go @@ -0,0 +1,195 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jwt + +import ( + "bytes" + "crypto/subtle" + "encoding/json" + "errors" + "time" + + jjson "github.com/go-jose/go-jose/v3/json" + + "github.com/ory/x/errorsx" +) + +var TimeFunc = time.Now + +// MapClaims provides backwards compatible validations not available in `go-jose`. +// It was taken from [here](https://raw.githubusercontent.com/form3tech-oss/jwt-go/master/map_claims.go). +// +// Claims type that uses the map[string]interface{} for JSON decoding +// This is the default claims type if you don't supply one +type MapClaims map[string]interface{} + +// Compares the aud claim against cmp. +// If required is false, this method will return true if the value matches or is unset +func (m MapClaims) VerifyAudience(cmp string, req bool) bool { + var aud []string + switch v := m["aud"].(type) { + case []string: + aud = v + case []interface{}: + for _, a := range v { + vs, ok := a.(string) + if !ok { + return false + } + aud = append(aud, vs) + } + case string: + aud = append(aud, v) + default: + return false + } + return verifyAud(aud, cmp, req) +} + +// Compares the exp claim against cmp. +// If required is false, this method will return true if the value matches or is unset +func (m MapClaims) VerifyExpiresAt(cmp int64, req bool) bool { + if v, ok := m.toInt64("exp"); ok { + return verifyExp(v, cmp, req) + } + return !req +} + +// Compares the iat claim against cmp. +// If required is false, this method will return true if the value matches or is unset +func (m MapClaims) VerifyIssuedAt(cmp int64, req bool) bool { + if v, ok := m.toInt64("iat"); ok { + return verifyIat(v, cmp, req) + } + return !req +} + +// Compares the iss claim against cmp. +// If required is false, this method will return true if the value matches or is unset +func (m MapClaims) VerifyIssuer(cmp string, req bool) bool { + iss, _ := m["iss"].(string) + return verifyIss(iss, cmp, req) +} + +// Compares the nbf claim against cmp. +// If required is false, this method will return true if the value matches or is unset +func (m MapClaims) VerifyNotBefore(cmp int64, req bool) bool { + if v, ok := m.toInt64("nbf"); ok { + return verifyNbf(v, cmp, req) + } + + return !req +} + +func (m MapClaims) toInt64(claim string) (int64, bool) { + switch t := m[claim].(type) { + case float64: + return int64(t), true + case int64: + return t, true + case json.Number: + v, err := t.Int64() + if err == nil { + return v, true + } + vf, err := t.Float64() + if err != nil { + return 0, false + } + + return int64(vf), true + } + return 0, false +} + +// Validates time based claims "exp, iat, nbf". +// There is no accounting for clock skew. +// As well, if any of the above claims are not in the token, it will still +// be considered a valid claim. +func (m MapClaims) Valid() error { + vErr := new(ValidationError) + now := TimeFunc().Unix() + + if !m.VerifyExpiresAt(now, false) { + vErr.Inner = errors.New("Token is expired") + vErr.Errors |= ValidationErrorExpired + } + + if !m.VerifyIssuedAt(now, false) { + vErr.Inner = errors.New("Token used before issued") + vErr.Errors |= ValidationErrorIssuedAt + } + + if !m.VerifyNotBefore(now, false) { + vErr.Inner = errors.New("Token is not valid yet") + vErr.Errors |= ValidationErrorNotValidYet + } + + if vErr.valid() { + return nil + } + + return vErr +} + +func (m MapClaims) UnmarshalJSON(b []byte) error { + // This custom unmarshal allows to configure the + // go-jose decoding settings since there is no other way + // see https://github.com/square/go-jose/issues/353. + // If issue is closed with a better solution + // this custom Unmarshal method can be removed + d := jjson.NewDecoder(bytes.NewReader(b)) + mp := map[string]interface{}(m) + d.SetNumberType(jjson.UnmarshalIntOrFloat) + if err := d.Decode(&mp); err != nil { + return errorsx.WithStack(err) + } + + return nil +} + +func verifyAud(aud []string, cmp string, required bool) bool { + if len(aud) == 0 { + return !required + } + + for _, a := range aud { + if subtle.ConstantTimeCompare([]byte(a), []byte(cmp)) != 0 { + return true + } + } + return false +} + +func verifyExp(exp int64, now int64, required bool) bool { + if exp == 0 { + return !required + } + return now <= exp +} + +func verifyIat(iat int64, now int64, required bool) bool { + if iat == 0 { + return !required + } + return now >= iat +} + +func verifyIss(iss string, cmp string, required bool) bool { + if iss == "" { + return !required + } + if subtle.ConstantTimeCompare([]byte(iss), []byte(cmp)) != 0 { + return true + } else { + return false + } +} + +func verifyNbf(nbf int64, now int64, required bool) bool { + if nbf == 0 { + return !required + } + return now >= nbf +} diff --git a/fosite/token/jwt/map_claims_test.go b/fosite/token/jwt/map_claims_test.go new file mode 100644 index 00000000000..cc4f08219e9 --- /dev/null +++ b/fosite/token/jwt/map_claims_test.go @@ -0,0 +1,98 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jwt + +import "testing" + +// Test taken from taken from [here](https://raw.githubusercontent.com/form3tech-oss/jwt-go/master/map_claims_test.go). +func Test_mapClaims_list_aud(t *testing.T) { + mapClaims := MapClaims{ + "aud": []string{"foo"}, + } + want := true + got := mapClaims.VerifyAudience("foo", true) + + if want != got { + t.Fatalf("Failed to verify claims, wanted: %v got %v", want, got) + } +} + +// This is a custom test to check that an empty +// list with require == false returns valid +func Test_mapClaims_empty_list_aud(t *testing.T) { + mapClaims := MapClaims{ + "aud": []string{}, + } + want := true + got := mapClaims.VerifyAudience("foo", false) + + if want != got { + t.Fatalf("Failed to verify claims, wanted: %v got %v", want, got) + } +} +func Test_mapClaims_list_interface_aud(t *testing.T) { + mapClaims := MapClaims{ + "aud": []interface{}{"foo"}, + } + want := true + got := mapClaims.VerifyAudience("foo", true) + + if want != got { + t.Fatalf("Failed to verify claims, wanted: %v got %v", want, got) + } +} +func Test_mapClaims_string_aud(t *testing.T) { + mapClaims := MapClaims{ + "aud": "foo", + } + want := true + got := mapClaims.VerifyAudience("foo", true) + + if want != got { + t.Fatalf("Failed to verify claims, wanted: %v got %v", want, got) + } +} + +func Test_mapClaims_list_aud_no_match(t *testing.T) { + mapClaims := MapClaims{ + "aud": []string{"bar"}, + } + want := false + got := mapClaims.VerifyAudience("foo", true) + + if want != got { + t.Fatalf("Failed to verify claims, wanted: %v got %v", want, got) + } +} +func Test_mapClaims_string_aud_fail(t *testing.T) { + mapClaims := MapClaims{ + "aud": "bar", + } + want := false + got := mapClaims.VerifyAudience("foo", true) + + if want != got { + t.Fatalf("Failed to verify claims, wanted: %v got %v", want, got) + } +} + +func Test_mapClaims_string_aud_no_claim(t *testing.T) { + mapClaims := MapClaims{} + want := false + got := mapClaims.VerifyAudience("foo", true) + + if want != got { + t.Fatalf("Failed to verify claims, wanted: %v got %v", want, got) + } +} + +func Test_mapClaims_string_aud_no_claim_not_required(t *testing.T) { + mapClaims := MapClaims{} + want := false + got := mapClaims.VerifyAudience("foo", false) + + if want != got { + t.Fatalf("Failed to verify claims, wanted: %v got %v", want, got) + } +} diff --git a/fosite/token/jwt/token.go b/fosite/token/jwt/token.go new file mode 100644 index 00000000000..85acab177f5 --- /dev/null +++ b/fosite/token/jwt/token.go @@ -0,0 +1,244 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jwt + +import ( + "encoding/base64" + "encoding/json" + "fmt" + "reflect" + + "github.com/go-jose/go-jose/v3" + "github.com/go-jose/go-jose/v3/jwt" + + "github.com/ory/x/errorsx" +) + +// Token represets a JWT Token +// This token provide an adaptation to +// transit from [jwt-go](https://github.com/dgrijalva/jwt-go) +// to [go-jose](https://github.com/square/go-jose) +// It provides method signatures compatible with jwt-go but implemented +// using go-json +type Token struct { + Header map[string]interface{} // The first segment of the token + Claims MapClaims // The second segment of the token + Method jose.SignatureAlgorithm + valid bool +} + +const ( + SigningMethodNone = jose.SignatureAlgorithm("none") + // This key should be use to correctly sign and verify alg:none JWT tokens + UnsafeAllowNoneSignatureType unsafeNoneMagicConstant = "none signing method allowed" + + JWTHeaderType = jose.HeaderKey("typ") + JWTHeaderTypeValue = "JWT" +) + +type unsafeNoneMagicConstant string + +// Valid informs if the token was verified against a given verification key +// and claims are valid +func (t *Token) Valid() bool { + return t.valid +} + +// Claims is a port from https://github.com/dgrijalva/jwt-go/blob/master/claims.go +// including its validation methods, which are not available in go-jose library +// +// > For a type to be a Claims object, it must just have a Valid method that determines +// if the token is invalid for any supported reason +type Claims interface { + Valid() error +} + +// NewWithClaims creates an unverified Token with the given claims and signing method +func NewWithClaims(method jose.SignatureAlgorithm, claims MapClaims) *Token { + return &Token{ + Claims: claims, + Method: method, + Header: map[string]interface{}{}, + } +} + +func (t *Token) toJoseHeader() map[jose.HeaderKey]interface{} { + h := map[jose.HeaderKey]interface{}{ + JWTHeaderType: JWTHeaderTypeValue, + } + for k, v := range t.Header { + h[jose.HeaderKey(k)] = v + } + return h +} + +// SignedString provides a compatible `jwt-go` Token.SignedString method +// +// > Get the complete, signed token +func (t *Token) SignedString(k interface{}) (rawToken string, err error) { + if _, ok := k.(unsafeNoneMagicConstant); ok { + rawToken, err = unsignedToken(t) + return + + } + var signer jose.Signer + key := jose.SigningKey{ + Algorithm: t.Method, + Key: k, + } + opts := &jose.SignerOptions{ExtraHeaders: t.toJoseHeader()} + signer, err = jose.NewSigner(key, opts) + if err != nil { + err = errorsx.WithStack(err) + return + } + + // A explicit conversion from type alias MapClaims + // to map[string]interface{} is required because the + // go-jose CompactSerialize() only support explicit maps + // as claims or structs but not type aliases from maps. + claims := map[string]interface{}(t.Claims) + rawToken, err = jwt.Signed(signer).Claims(claims).CompactSerialize() + if err != nil { + err = &ValidationError{Errors: ValidationErrorClaimsInvalid, Inner: err} + return + } + return +} + +func unsignedToken(t *Token) (string, error) { + t.Header["alg"] = "none" + if _, ok := t.Header[string(JWTHeaderType)]; !ok { + t.Header[string(JWTHeaderType)] = JWTHeaderTypeValue + } + hbytes, err := json.Marshal(&t.Header) + if err != nil { + return "", errorsx.WithStack(err) + } + bbytes, err := json.Marshal(&t.Claims) + if err != nil { + return "", errorsx.WithStack(err) + } + h := base64.RawURLEncoding.EncodeToString(hbytes) + b := base64.RawURLEncoding.EncodeToString(bbytes) + return fmt.Sprintf("%v.%v.", h, b), nil +} + +func newToken(parsedToken *jwt.JSONWebToken, claims MapClaims) (*Token, error) { + token := &Token{Claims: claims} + if len(parsedToken.Headers) != 1 { + return nil, &ValidationError{text: fmt.Sprintf("only one header supported, got %v", len(parsedToken.Headers)), Errors: ValidationErrorMalformed} + } + + // copy headers + h := parsedToken.Headers[0] + token.Header = map[string]interface{}{ + "alg": h.Algorithm, + } + if h.KeyID != "" { + token.Header["kid"] = h.KeyID + } + for k, v := range h.ExtraHeaders { + token.Header[string(k)] = v + } + + token.Method = jose.SignatureAlgorithm(h.Algorithm) + + return token, nil +} + +// Parse methods use this callback function to supply +// the key for verification. The function receives the parsed, +// but unverified Token. This allows you to use properties in the +// Header of the token (such as `kid`) to identify which key to use. +type Keyfunc func(*Token) (interface{}, error) + +func Parse(tokenString string, keyFunc Keyfunc) (*Token, error) { + return ParseWithClaims(tokenString, MapClaims{}, keyFunc) +} + +// Parse, validate, and return a token. +// keyFunc will receive the parsed token and should return the key for validating. +// If everything is kosher, err will be nil +func ParseWithClaims(rawToken string, claims MapClaims, keyFunc Keyfunc) (*Token, error) { + // Parse the token. + parsedToken, err := jwt.ParseSigned(rawToken) + if err != nil { + return &Token{}, &ValidationError{Errors: ValidationErrorMalformed, text: err.Error()} + } + + // fill unverified claims + // This conversion is required because go-jose supports + // only marshalling structs or maps but not alias types from maps + // + // The KeyFunc(*Token) function requires the claims to be set into the + // Token, that is an unverified token, therefore an UnsafeClaimsWithoutVerification is done first + // then with the returned key, the claims gets verified. + if err := parsedToken.UnsafeClaimsWithoutVerification(&claims); err != nil { + return nil, &ValidationError{Errors: ValidationErrorClaimsInvalid, text: err.Error()} + } + + // creates an usafe token + token, err := newToken(parsedToken, claims) + if err != nil { + return nil, err + } + + if keyFunc == nil { + // keyFunc was not provided. short circuiting validation + return token, &ValidationError{Errors: ValidationErrorUnverifiable, text: "no Keyfunc was provided."} + } + + // Call keyFunc callback to get verification key + verificationKey, err := keyFunc(token) + if err != nil { + // keyFunc returned an error + if ve, ok := err.(*ValidationError); ok { + return token, ve + } + return token, &ValidationError{Errors: ValidationErrorUnverifiable, Inner: err} + } + if verificationKey == nil { + return token, &ValidationError{Errors: ValidationErrorSignatureInvalid, text: "keyfunc returned a nil verification key"} + } + // To verify signature go-jose requires a pointer to + // public key instead of the public key value. + // The pointer values provides that pointer. + // E.g. transform rsa.PublicKey -> *rsa.PublicKey + verificationKey = pointer(verificationKey) + + // verify signature with returned key + _, validNoneKey := verificationKey.(*unsafeNoneMagicConstant) + isSignedToken := !(token.Method == SigningMethodNone && validNoneKey) + if isSignedToken { + if err := parsedToken.Claims(verificationKey, &claims); err != nil { + return token, &ValidationError{Errors: ValidationErrorSignatureInvalid, text: err.Error()} + } + } + + // Validate claims + // This validation is performed to be backwards compatible + // with jwt-go library behavior + if err := claims.Valid(); err != nil { + if e, ok := err.(*ValidationError); !ok { + err = &ValidationError{Inner: e, Errors: ValidationErrorClaimsInvalid} + } + return token, err + } + + // set token as verified and validated + token.valid = true + return token, nil +} + +// if underline value of v is not a pointer +// it creates a pointer of it and returns it +func pointer(v interface{}) interface{} { + if reflect.ValueOf(v).Kind() != reflect.Ptr { + value := reflect.New(reflect.ValueOf(v).Type()) + value.Elem().Set(reflect.ValueOf(v)) + return value.Interface() + } + return v +} diff --git a/fosite/token/jwt/token_test.go b/fosite/token/jwt/token_test.go new file mode 100644 index 00000000000..f9865f3ffc1 --- /dev/null +++ b/fosite/token/jwt/token_test.go @@ -0,0 +1,566 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jwt + +import ( + "crypto/rand" + "crypto/rsa" + "crypto/x509" + "encoding/pem" + "fmt" + "strings" + "testing" + "time" + + "github.com/ory/hydra/v2/fosite/internal/gen" + + "github.com/go-jose/go-jose/v3" + "github.com/go-jose/go-jose/v3/jwt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestUnsignedToken(t *testing.T) { + var testCases = []struct { + name string + jwtHeaders map[string]interface{} + expectedType string + }{ + { + name: "set JWT as 'typ' when the the type is not specified in the headers", + jwtHeaders: map[string]interface{}{}, + expectedType: "JWT", + }, + { + name: "'typ' set explicitly", + jwtHeaders: map[string]interface{}{"typ": "at+jwt"}, + expectedType: "at+jwt", + }, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + key := UnsafeAllowNoneSignatureType + token := NewWithClaims(SigningMethodNone, MapClaims{ + "aud": "foo", + "exp": time.Now().UTC().Add(time.Hour).Unix(), + "iat": time.Now().UTC().Unix(), + "sub": "nestor", + }) + token.Header = tc.jwtHeaders + rawToken, err := token.SignedString(key) + require.NoError(t, err) + require.NotEmpty(t, rawToken) + parts := strings.Split(rawToken, ".") + require.Len(t, parts, 3) + require.Empty(t, parts[2]) + tk, err := jwt.ParseSigned(rawToken) + require.NoError(t, err) + require.Len(t, tk.Headers, 1) + require.Equal(t, tc.expectedType, tk.Headers[0].ExtraHeaders[jose.HeaderKey("typ")]) + }) + } +} + +func TestJWTHeaders(t *testing.T) { + var testCases = []struct { + name string + jwtHeaders map[string]interface{} + expectedType string + }{ + { + name: "set JWT as 'typ' when the the type is not specified in the headers", + jwtHeaders: map[string]interface{}{}, + expectedType: "JWT", + }, + { + name: "'typ' set explicitly", + jwtHeaders: map[string]interface{}{"typ": "at+jwt"}, + expectedType: "at+jwt", + }, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + rawToken := makeSampleTokenWithCustomHeaders(nil, jose.RS256, tc.jwtHeaders, gen.MustRSAKey()) + tk, err := jwt.ParseSigned(rawToken) + require.NoError(t, err) + require.Len(t, tk.Headers, 1) + require.Equal(t, tk.Headers[0].Algorithm, "RS256") + require.Equal(t, tc.expectedType, tk.Headers[0].ExtraHeaders[jose.HeaderKey("typ")]) + }) + } +} + +var keyFuncError error = fmt.Errorf("error loading key") +var ( + jwtTestDefaultKey *rsa.PublicKey = parseRSAPublicKeyFromPEM(defaultPubKeyPEM) + defaultKeyFunc Keyfunc = func(t *Token) (interface{}, error) { return jwtTestDefaultKey, nil } + emptyKeyFunc Keyfunc = func(t *Token) (interface{}, error) { return nil, nil } + errorKeyFunc Keyfunc = func(t *Token) (interface{}, error) { return nil, keyFuncError } + nilKeyFunc Keyfunc = nil +) + +// Many test cases where taken from https://github.com/dgrijalva/jwt-go/blob/master/parser_test.go +// Test cases related to json.Number where excluded because that is not supported by go-jose, +// it is not used in fosite and therefore not supported. +func TestParser_Parse(t *testing.T) { + var ( + defaultES256PrivateKey = gen.MustES256Key() + defaultSigningKey = parseRSAPrivateKeyFromPEM(defaultPrivateKeyPEM) + publicECDSAKey = func(*Token) (interface{}, error) { return &defaultES256PrivateKey.PublicKey, nil } + noneKey = func(*Token) (interface{}, error) { return UnsafeAllowNoneSignatureType, nil } + randomKey = func(*Token) (interface{}, error) { + k, err := rsa.GenerateKey(rand.Reader, 2048) + require.NoError(t, err) + return &k.PublicKey, nil + } + ) + type expected struct { + errors uint32 + keyFunc Keyfunc + valid bool + claims MapClaims + } + type generate struct { + claims MapClaims + signingKey interface{} // defaultSigningKey + method jose.SignatureAlgorithm // default RS256 + } + type given struct { + name string + tokenString string + generate *generate + } + var jwtTestData = []struct { + expected + given + }{ + { + given: given{ + name: "basic", + tokenString: "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJmb28iOiJiYXIifQ.FhkiHkoESI_cG3NPigFrxEk9Z60_oXrOT2vGm9Pn6RDgYNovYORQmmA0zs1AoAOf09ly2Nx2YAg6ABqAYga1AcMFkJljwxTT5fYphTuqpWdy4BELeSYJx5Ty2gmr8e7RonuUztrdD5WfPqLKMm1Ozp_T6zALpRmwTIW0QPnaBXaQD90FplAg46Iy1UlDKr-Eupy0i5SLch5Q-p2ZpaL_5fnTIUDlxC3pWhJTyx_71qDI-mAA_5lE_VdroOeflG56sSmDxopPEG3bFlSu1eowyBfxtu0_CuVd-M42RU75Zc4Gsj6uV77MBtbMrf4_7M_NUTSgoIF3fRqxrj0NzihIBg", + }, + expected: expected{ + keyFunc: defaultKeyFunc, + claims: MapClaims{"foo": "bar"}, + valid: true, + errors: 0, + }, + }, + { + given: given{ + name: "basic expired", + generate: &generate{ + claims: MapClaims{"foo": "bar", "exp": time.Now().Unix() - 100}, + }, + }, + expected: expected{ + keyFunc: defaultKeyFunc, + claims: MapClaims{"foo": "bar", "exp": time.Now().Unix() - 100}, + valid: false, + errors: ValidationErrorExpired, + }, + }, + { + given: given{ + name: "basic nbf", + generate: &generate{ + claims: MapClaims{"foo": "bar", "nbf": time.Now().Unix() + 100}, + }, + }, + expected: expected{ + keyFunc: defaultKeyFunc, + claims: MapClaims{"foo": "bar", "nbf": time.Now().Unix() + 100}, + valid: false, + errors: ValidationErrorNotValidYet, + }, + }, + { + given: given{ + name: "expired and nbf", + generate: &generate{ + claims: MapClaims{"foo": "bar", "nbf": time.Now().Unix() + 100, "exp": time.Now().Unix() - 100}, + }, + }, + expected: expected{ + keyFunc: defaultKeyFunc, + claims: MapClaims{"foo": "bar", "nbf": time.Now().Unix() + 100, "exp": time.Now().Unix() - 100}, + valid: false, + errors: ValidationErrorNotValidYet | ValidationErrorExpired, + }, + }, + { + given: given{ + name: "basic invalid", + tokenString: "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJmb28iOiJiYXIifQ.EhkiHkoESI_cG3NPigFrxEk9Z60_oXrOT2vGm9Pn6RDgYNovYORQmmA0zs1AoAOf09ly2Nx2YAg6ABqAYga1AcMFkJljwxTT5fYphTuqpWdy4BELeSYJx5Ty2gmr8e7RonuUztrdD5WfPqLKMm1Ozp_T6zALpRmwTIW0QPnaBXaQD90FplAg46Iy1UlDKr-Eupy0i5SLch5Q-p2ZpaL_5fnTIUDlxC3pWhJTyx_71qDI-mAA_5lE_VdroOeflG56sSmDxopPEG3bFlSu1eowyBfxtu0_CuVd-M42RU75Zc4Gsj6uV77MBtbMrf4_7M_NUTSgoIF3fRqxrj0NzihIBg", + }, + expected: expected{ + keyFunc: defaultKeyFunc, + claims: MapClaims{"foo": "bar"}, + valid: false, + errors: ValidationErrorSignatureInvalid, + }, + }, + { + given: given{ + name: "basic nokeyfunc", + tokenString: "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJmb28iOiJiYXIifQ.FhkiHkoESI_cG3NPigFrxEk9Z60_oXrOT2vGm9Pn6RDgYNovYORQmmA0zs1AoAOf09ly2Nx2YAg6ABqAYga1AcMFkJljwxTT5fYphTuqpWdy4BELeSYJx5Ty2gmr8e7RonuUztrdD5WfPqLKMm1Ozp_T6zALpRmwTIW0QPnaBXaQD90FplAg46Iy1UlDKr-Eupy0i5SLch5Q-p2ZpaL_5fnTIUDlxC3pWhJTyx_71qDI-mAA_5lE_VdroOeflG56sSmDxopPEG3bFlSu1eowyBfxtu0_CuVd-M42RU75Zc4Gsj6uV77MBtbMrf4_7M_NUTSgoIF3fRqxrj0NzihIBg", + }, + expected: expected{ + keyFunc: nilKeyFunc, + claims: MapClaims{"foo": "bar"}, + valid: false, + errors: ValidationErrorUnverifiable, + }, + }, + { + given: given{ + name: "basic nokey", + tokenString: "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJmb28iOiJiYXIifQ.FhkiHkoESI_cG3NPigFrxEk9Z60_oXrOT2vGm9Pn6RDgYNovYORQmmA0zs1AoAOf09ly2Nx2YAg6ABqAYga1AcMFkJljwxTT5fYphTuqpWdy4BELeSYJx5Ty2gmr8e7RonuUztrdD5WfPqLKMm1Ozp_T6zALpRmwTIW0QPnaBXaQD90FplAg46Iy1UlDKr-Eupy0i5SLch5Q-p2ZpaL_5fnTIUDlxC3pWhJTyx_71qDI-mAA_5lE_VdroOeflG56sSmDxopPEG3bFlSu1eowyBfxtu0_CuVd-M42RU75Zc4Gsj6uV77MBtbMrf4_7M_NUTSgoIF3fRqxrj0NzihIBg", + }, + expected: expected{ + keyFunc: emptyKeyFunc, + claims: MapClaims{"foo": "bar"}, + valid: false, + errors: ValidationErrorSignatureInvalid, + }, + }, + { + given: given{ + name: "basic errorkey", + tokenString: "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJmb28iOiJiYXIifQ.FhkiHkoESI_cG3NPigFrxEk9Z60_oXrOT2vGm9Pn6RDgYNovYORQmmA0zs1AoAOf09ly2Nx2YAg6ABqAYga1AcMFkJljwxTT5fYphTuqpWdy4BELeSYJx5Ty2gmr8e7RonuUztrdD5WfPqLKMm1Ozp_T6zALpRmwTIW0QPnaBXaQD90FplAg46Iy1UlDKr-Eupy0i5SLch5Q-p2ZpaL_5fnTIUDlxC3pWhJTyx_71qDI-mAA_5lE_VdroOeflG56sSmDxopPEG3bFlSu1eowyBfxtu0_CuVd-M42RU75Zc4Gsj6uV77MBtbMrf4_7M_NUTSgoIF3fRqxrj0NzihIBg", + generate: &generate{ + claims: MapClaims{"foo": "bar"}, + }, + }, + expected: expected{ + keyFunc: errorKeyFunc, + claims: MapClaims{"foo": "bar"}, + valid: false, + errors: ValidationErrorUnverifiable, + }, + }, + { + given: given{ + name: "valid signing method", + generate: &generate{ + claims: MapClaims{"foo": "bar"}, + }, + }, + expected: expected{ + keyFunc: defaultKeyFunc, + claims: MapClaims{"foo": "bar"}, + valid: true, + errors: 0, + }, + }, + { + given: given{ + name: "invalid", + tokenString: "foo_invalid_token", + }, + expected: expected{ + keyFunc: defaultKeyFunc, + claims: MapClaims(nil), + valid: false, + errors: ValidationErrorMalformed, + }, + }, + { + given: given{ + name: "valid format invalid content", + tokenString: "foo.bar.baz", + }, + expected: expected{ + keyFunc: defaultKeyFunc, + claims: MapClaims(nil), + valid: false, + errors: ValidationErrorMalformed, + }, + }, + { + given: given{ + name: "wrong key, expected ECDSA got RSA", + tokenString: "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJmb28iOiJiYXIifQ.FhkiHkoESI_cG3NPigFrxEk9Z60_oXrOT2vGm9Pn6RDgYNovYORQmmA0zs1AoAOf09ly2Nx2YAg6ABqAYga1AcMFkJljwxTT5fYphTuqpWdy4BELeSYJx5Ty2gmr8e7RonuUztrdD5WfPqLKMm1Ozp_T6zALpRmwTIW0QPnaBXaQD90FplAg46Iy1UlDKr-Eupy0i5SLch5Q-p2ZpaL_5fnTIUDlxC3pWhJTyx_71qDI-mAA_5lE_VdroOeflG56sSmDxopPEG3bFlSu1eowyBfxtu0_CuVd-M42RU75Zc4Gsj6uV77MBtbMrf4_7M_NUTSgoIF3fRqxrj0NzihIBg", + }, + expected: expected{ + keyFunc: publicECDSAKey, + claims: MapClaims{"foo": "bar"}, + valid: false, + errors: ValidationErrorSignatureInvalid, + }, + }, + { + given: given{ + name: "should fail, got RSA but found no key", + tokenString: "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJmb28iOiJiYXIifQ.FhkiHkoESI_cG3NPigFrxEk9Z60_oXrOT2vGm9Pn6RDgYNovYORQmmA0zs1AoAOf09ly2Nx2YAg6ABqAYga1AcMFkJljwxTT5fYphTuqpWdy4BELeSYJx5Ty2gmr8e7RonuUztrdD5WfPqLKMm1Ozp_T6zALpRmwTIW0QPnaBXaQD90FplAg46Iy1UlDKr-Eupy0i5SLch5Q-p2ZpaL_5fnTIUDlxC3pWhJTyx_71qDI-mAA_5lE_VdroOeflG56sSmDxopPEG3bFlSu1eowyBfxtu0_CuVd-M42RU75Zc4Gsj6uV77MBtbMrf4_7M_NUTSgoIF3fRqxrj0NzihIBg", + }, + expected: expected{ + keyFunc: emptyKeyFunc, + claims: MapClaims{"foo": "bar"}, + valid: false, + errors: ValidationErrorSignatureInvalid, + }, + }, + { + given: given{ + name: "key does not match", + tokenString: "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJmb28iOiJiYXIifQ.FhkiHkoESI_cG3NPigFrxEk9Z60_oXrOT2vGm9Pn6RDgYNovYORQmmA0zs1AoAOf09ly2Nx2YAg6ABqAYga1AcMFkJljwxTT5fYphTuqpWdy4BELeSYJx5Ty2gmr8e7RonuUztrdD5WfPqLKMm1Ozp_T6zALpRmwTIW0QPnaBXaQD90FplAg46Iy1UlDKr-Eupy0i5SLch5Q-p2ZpaL_5fnTIUDlxC3pWhJTyx_71qDI-mAA_5lE_VdroOeflG56sSmDxopPEG3bFlSu1eowyBfxtu0_CuVd-M42RU75Zc4Gsj6uV77MBtbMrf4_7M_NUTSgoIF3fRqxrj0NzihIBg", + }, + expected: expected{ + keyFunc: randomKey, + claims: MapClaims{"foo": "bar"}, + valid: false, + errors: ValidationErrorSignatureInvalid, + }, + }, + { + given: given{ + name: "used before issued", + generate: &generate{ + claims: MapClaims{"foo": "bar", "iat": time.Now().Unix() + 500}, + }, + }, + expected: expected{ + keyFunc: defaultKeyFunc, + claims: MapClaims{"foo": "bar", "iat": time.Now().Unix() + 500}, + valid: false, + errors: ValidationErrorIssuedAt, + }, + }, + { + given: given{ + name: "valid ECDSA signing method", + generate: &generate{ + claims: MapClaims{"foo": "bar"}, + signingKey: defaultES256PrivateKey, + method: jose.ES256, + }, + }, + expected: expected{ + keyFunc: publicECDSAKey, + claims: MapClaims{"foo": "bar"}, + valid: true, + errors: 0, + }, + }, + { + given: given{ + name: "should pass, valid NONE signing method", + generate: &generate{ + claims: MapClaims{"foo": "bar"}, + signingKey: UnsafeAllowNoneSignatureType, + method: SigningMethodNone, + }, + }, + expected: expected{ + keyFunc: noneKey, + claims: MapClaims{"foo": "bar"}, + valid: true, + errors: 0, + }, + }, + { + given: given{ + name: "should fail, expected RS256 but got NONE", + generate: &generate{ + claims: MapClaims{"foo": "bar"}, + signingKey: UnsafeAllowNoneSignatureType, + method: SigningMethodNone, + }, + }, + expected: expected{ + keyFunc: defaultKeyFunc, + claims: MapClaims{"foo": "bar"}, + valid: false, + errors: ValidationErrorSignatureInvalid, + }, + }, + { + given: given{ + name: "should fail, expected ECDSA but got NONE", + generate: &generate{ + claims: MapClaims{"foo": "bar"}, + signingKey: UnsafeAllowNoneSignatureType, + method: SigningMethodNone, + }, + }, + expected: expected{ + keyFunc: publicECDSAKey, + claims: MapClaims{"foo": "bar"}, + valid: false, + errors: ValidationErrorSignatureInvalid, + }, + }, + } + + // Iterate over test data set and run tests + for _, data := range jwtTestData { + t.Run(data.name, func(t *testing.T) { + if data.generate != nil { + signingKey := data.generate.signingKey + method := data.generate.method + if signingKey == nil { + // use test defaults + signingKey = defaultSigningKey + method = jose.RS256 + } + data.tokenString = makeSampleToken(data.generate.claims, method, signingKey) + } + + // Parse the token + var token *Token + var err error + + // Figure out correct claims type + token, err = ParseWithClaims(data.tokenString, MapClaims{}, data.keyFunc) + // Verify result matches expectation + assert.EqualValues(t, data.claims, token.Claims) + if data.valid && err != nil { + t.Errorf("[%v] Error while verifying token: %T:%v", data.name, err, err) + } + + if !data.valid && err == nil { + t.Errorf("[%v] Invalid token passed validation", data.name) + } + + if (err == nil && !token.Valid()) || (err != nil && token.Valid()) { + t.Errorf("[%v] Inconsistent behavior between returned error and token.Valid", data.name) + } + + if data.errors != 0 { + if err == nil { + t.Errorf("[%v] Expecting error. Didn't get one.", data.name) + } else { + + ve := err.(*ValidationError) + // compare the bitfield part of the error + if e := ve.Errors; e != data.errors { + t.Errorf("[%v] Errors don't match expectation. %v != %v", data.name, e, data.errors) + } + + if err.Error() == keyFuncError.Error() && ve.Inner != keyFuncError { + t.Errorf("[%v] Inner error does not match expectation. %v != %v", data.name, ve.Inner, keyFuncError) + } + } + } + }) + } +} + +func makeSampleToken(c MapClaims, m jose.SignatureAlgorithm, key interface{}) string { + token := NewWithClaims(m, c) + s, e := token.SignedString(key) + + if e != nil { + panic(e.Error()) + } + + return s +} + +func makeSampleTokenWithCustomHeaders(c MapClaims, m jose.SignatureAlgorithm, headers map[string]interface{}, key interface{}) string { + token := NewWithClaims(m, c) + token.Header = headers + s, e := token.SignedString(key) + + if e != nil { + panic(e.Error()) + } + + return s +} + +func parseRSAPublicKeyFromPEM(key []byte) *rsa.PublicKey { + var err error + + // Parse PEM block + var block *pem.Block + if block, _ = pem.Decode(key); block == nil { + panic("not possible to decode") + } + + // Parse the key + var parsedKey interface{} + if parsedKey, err = x509.ParsePKIXPublicKey(block.Bytes); err != nil { + if cert, err := x509.ParseCertificate(block.Bytes); err == nil { + parsedKey = cert.PublicKey + } else { + panic(err) + } + } + + var pkey *rsa.PublicKey + var ok bool + if pkey, ok = parsedKey.(*rsa.PublicKey); !ok { + panic("not an *rsa.PublicKey") + } + + return pkey +} + +func parseRSAPrivateKeyFromPEM(key []byte) *rsa.PrivateKey { + var err error + + // Parse PEM block + var block *pem.Block + if block, _ = pem.Decode(key); block == nil { + panic("unable to decode") + } + + var parsedKey interface{} + if parsedKey, err = x509.ParsePKCS1PrivateKey(block.Bytes); err != nil { + if parsedKey, err = x509.ParsePKCS8PrivateKey(block.Bytes); err != nil { + panic(err) + } + } + + var pkey *rsa.PrivateKey + var ok bool + if pkey, ok = parsedKey.(*rsa.PrivateKey); !ok { + panic("not an rsa private key") + } + + return pkey +} + +var ( + defaultPubKeyPEM = []byte(` +-----BEGIN PUBLIC KEY----- +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4f5wg5l2hKsTeNem/V41 +fGnJm6gOdrj8ym3rFkEU/wT8RDtnSgFEZOQpHEgQ7JL38xUfU0Y3g6aYw9QT0hJ7 +mCpz9Er5qLaMXJwZxzHzAahlfA0icqabvJOMvQtzD6uQv6wPEyZtDTWiQi9AXwBp +HssPnpYGIn20ZZuNlX2BrClciHhCPUIIZOQn/MmqTD31jSyjoQoV7MhhMTATKJx2 +XrHhR+1DcKJzQBSTAGnpYVaqpsARap+nwRipr3nUTuxyGohBTSmjJ2usSeQXHI3b +ODIRe1AuTyHceAbewn8b462yEWKARdpd9AjQW5SIVPfdsz5B6GlYQ5LdYKtznTuy +7wIDAQAB +-----END PUBLIC KEY-----`) + defaultPrivateKeyPEM = []byte(` +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEA4f5wg5l2hKsTeNem/V41fGnJm6gOdrj8ym3rFkEU/wT8RDtn +SgFEZOQpHEgQ7JL38xUfU0Y3g6aYw9QT0hJ7mCpz9Er5qLaMXJwZxzHzAahlfA0i +cqabvJOMvQtzD6uQv6wPEyZtDTWiQi9AXwBpHssPnpYGIn20ZZuNlX2BrClciHhC +PUIIZOQn/MmqTD31jSyjoQoV7MhhMTATKJx2XrHhR+1DcKJzQBSTAGnpYVaqpsAR +ap+nwRipr3nUTuxyGohBTSmjJ2usSeQXHI3bODIRe1AuTyHceAbewn8b462yEWKA +Rdpd9AjQW5SIVPfdsz5B6GlYQ5LdYKtznTuy7wIDAQABAoIBAQCwia1k7+2oZ2d3 +n6agCAbqIE1QXfCmh41ZqJHbOY3oRQG3X1wpcGH4Gk+O+zDVTV2JszdcOt7E5dAy +MaomETAhRxB7hlIOnEN7WKm+dGNrKRvV0wDU5ReFMRHg31/Lnu8c+5BvGjZX+ky9 +POIhFFYJqwCRlopGSUIxmVj5rSgtzk3iWOQXr+ah1bjEXvlxDOWkHN6YfpV5ThdE +KdBIPGEVqa63r9n2h+qazKrtiRqJqGnOrHzOECYbRFYhexsNFz7YT02xdfSHn7gM +IvabDDP/Qp0PjE1jdouiMaFHYnLBbgvlnZW9yuVf/rpXTUq/njxIXMmvmEyyvSDn +FcFikB8pAoGBAPF77hK4m3/rdGT7X8a/gwvZ2R121aBcdPwEaUhvj/36dx596zvY +mEOjrWfZhF083/nYWE2kVquj2wjs+otCLfifEEgXcVPTnEOPO9Zg3uNSL0nNQghj +FuD3iGLTUBCtM66oTe0jLSslHe8gLGEQqyMzHOzYxNqibxcOZIe8Qt0NAoGBAO+U +I5+XWjWEgDmvyC3TrOSf/KCGjtu0TSv30ipv27bDLMrpvPmD/5lpptTFwcxvVhCs +2b+chCjlghFSWFbBULBrfci2FtliClOVMYrlNBdUSJhf3aYSG2Doe6Bgt1n2CpNn +/iu37Y3NfemZBJA7hNl4dYe+f+uzM87cdQ214+jrAoGAXA0XxX8ll2+ToOLJsaNT +OvNB9h9Uc5qK5X5w+7G7O998BN2PC/MWp8H+2fVqpXgNENpNXttkRm1hk1dych86 +EunfdPuqsX+as44oCyJGFHVBnWpm33eWQw9YqANRI+pCJzP08I5WK3osnPiwshd+ +hR54yjgfYhBFNI7B95PmEQkCgYBzFSz7h1+s34Ycr8SvxsOBWxymG5zaCsUbPsL0 +4aCgLScCHb9J+E86aVbbVFdglYa5Id7DPTL61ixhl7WZjujspeXZGSbmq0Kcnckb +mDgqkLECiOJW2NHP/j0McAkDLL4tysF8TLDO8gvuvzNC+WQ6drO2ThrypLVZQ+ry +eBIPmwKBgEZxhqa0gVvHQG/7Od69KWj4eJP28kq13RhKay8JOoN0vPmspXJo1HY3 +CKuHRG+AP579dncdUnOMvfXOtkdM4vk0+hWASBQzM9xzVcztCa+koAugjVaLS9A+ +9uQoqEeVNTckxx0S2bYevRy7hGQmUJTyQm3j1zEUR5jpdbL83Fbq +-----END RSA PRIVATE KEY-----`) +) diff --git a/fosite/token/jwt/validation_error.go b/fosite/token/jwt/validation_error.go new file mode 100644 index 00000000000..b39bafe4c95 --- /dev/null +++ b/fosite/token/jwt/validation_error.go @@ -0,0 +1,51 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jwt + +// Validation provides a backwards compatible error definition +// from `jwt-go` to `go-jose`. +// The sourcecode was taken from https://github.com/dgrijalva/jwt-go/blob/master/errors.go +// +// > The errors that might occur when parsing and validating a token +const ( + ValidationErrorMalformed uint32 = 1 << iota // Token is malformed + ValidationErrorUnverifiable // Token could not be verified because of signing problems + ValidationErrorSignatureInvalid // Signature validation failed + + // Standard Claim validation errors + ValidationErrorAudience // AUD validation failed + ValidationErrorExpired // EXP validation failed + ValidationErrorIssuedAt // IAT validation failed + ValidationErrorIssuer // ISS validation failed + ValidationErrorNotValidYet // NBF validation failed + ValidationErrorId // JTI validation failed + ValidationErrorClaimsInvalid // Generic claims validation error +) + +// The error from Parse if token is not valid +type ValidationError struct { + Inner error // stores the error returned by external dependencies, i.e.: KeyFunc + Errors uint32 // bitfield. see ValidationError... constants + text string // errors that do not have a valid error just have text +} + +// Validation error is an error type +func (e ValidationError) Error() string { + if e.Inner != nil { + return e.Inner.Error() + } else if e.text != "" { + return e.text + } else { + return "token is invalid" + } +} + +// No errors +func (e *ValidationError) valid() bool { + return e.Errors == 0 +} + +func (e *ValidationError) Has(verr uint32) bool { + return (e.Errors & verr) != 0 +} diff --git a/fosite/tools.go b/fosite/tools.go new file mode 100644 index 00000000000..8a9b29c6e10 --- /dev/null +++ b/fosite/tools.go @@ -0,0 +1,13 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +//go:build tools + +package fosite + +import ( + _ "github.com/mattn/goveralls" + _ "go.uber.org/mock/mockgen" + + _ "github.com/ory/go-acc" +) diff --git a/fosite/transactional.go b/fosite/transactional.go new file mode 100644 index 00000000000..be374741425 --- /dev/null +++ b/fosite/transactional.go @@ -0,0 +1,58 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fosite + +import "context" + +// A storage provider that has support for transactions should implement this interface to ensure atomicity for certain flows +// that require transactional semantics. Fosite will call these methods (when atomicity is required) if and only if the storage +// provider has implemented `Transactional`. It is expected that the storage provider will examine context for an existing transaction +// each time a database operation is to be performed. +// +// An implementation of `BeginTX` should attempt to initiate a new transaction and store that under a unique key +// in the context that can be accessible by `Commit` and `Rollback`. The "transactional aware" context will then be +// returned for further propagation, eventually to be consumed by `Commit` or `Rollback` to finish the transaction. +// +// Implementations for `Commit` & `Rollback` should look for the transaction object inside the supplied context using the same +// key used by `BeginTX`. If these methods have been called, it is expected that a txn object should be available in the provided +// context. +type Transactional interface { + BeginTX(ctx context.Context) (context.Context, error) + Commit(ctx context.Context) error + Rollback(ctx context.Context) error +} + +// MaybeBeginTx is a helper function that can be used to initiate a transaction if the supplied storage +// implements the `Transactional` interface. +func MaybeBeginTx(ctx context.Context, storage interface{}) (context.Context, error) { + // the type assertion checks whether the dynamic type of `storage` implements `Transactional` + txnStorage, transactional := storage.(Transactional) + if transactional { + return txnStorage.BeginTX(ctx) + } else { + return ctx, nil + } +} + +// MaybeCommitTx is a helper function that can be used to commit a transaction if the supplied storage +// implements the `Transactional` interface. +func MaybeCommitTx(ctx context.Context, storage interface{}) error { + txnStorage, transactional := storage.(Transactional) + if transactional { + return txnStorage.Commit(ctx) + } else { + return nil + } +} + +// MaybeRollbackTx is a helper function that can be used to rollback a transaction if the supplied storage +// implements the `Transactional` interface. +func MaybeRollbackTx(ctx context.Context, storage interface{}) error { + txnStorage, transactional := storage.(Transactional) + if transactional { + return txnStorage.Rollback(ctx) + } else { + return nil + } +} diff --git a/fositex/config.go b/fositex/config.go index db512574f94..d0eaa11c0c2 100644 --- a/fositex/config.go +++ b/fositex/config.go @@ -12,65 +12,81 @@ import ( "github.com/hashicorp/go-retryablehttp" - "github.com/ory/fosite" - "github.com/ory/fosite/compose" - "github.com/ory/fosite/i18n" - "github.com/ory/fosite/token/jwt" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/oauth2" - "github.com/ory/hydra/persistence" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/i18n" + "github.com/ory/hydra/v2/fosite/token/jwt" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/persistence" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/stringslice" "github.com/ory/x/urlx" ) -type configDependencies interface { - config.Provider - persistence.Provider - x.HTTPClientProvider - GetJWKSFetcherStrategy() fosite.JWKSFetcherStrategy - ClientHasher() fosite.Hasher -} - -type factory func(config fosite.Configurator, storage interface{}, strategy interface{}) interface{} - -type Config struct { - deps configDependencies - - authorizeEndpointHandlers fosite.AuthorizeEndpointHandlers - tokenEndpointHandlers fosite.TokenEndpointHandlers - tokenIntrospectionHandlers fosite.TokenIntrospectionHandlers - revocationHandlers fosite.RevocationHandlers - - *config.DefaultProvider -} +type ( + configDependencies interface { + config.Provider + persistence.Provider + x.HTTPClientProvider + ClientHasher() fosite.Hasher + ExtraFositeFactories() []Factory + } + Factory func(config fosite.Configurator, storage fosite.Storage, strategy interface{}) interface{} + Config struct { + deps configDependencies + + authorizeEndpointHandlers fosite.AuthorizeEndpointHandlers + tokenEndpointHandlers fosite.TokenEndpointHandlers + tokenIntrospectionHandlers fosite.TokenIntrospectionHandlers + revocationHandlers fosite.RevocationHandlers + deviceEndpointHandlers fosite.DeviceEndpointHandlers + jwksFetcherStrategy fosite.JWKSFetcherStrategy + + *config.DefaultProvider + } + ConfigProvider interface { + OAuth2Config() *Config + } +) -var defaultResponseModeHandler = fosite.NewDefaultResponseModeHandler() -var defaultFactories = []factory{ - compose.OAuth2AuthorizeExplicitFactory, - compose.OAuth2AuthorizeImplicitFactory, - compose.OAuth2ClientCredentialsGrantFactory, - compose.OAuth2RefreshTokenGrantFactory, - compose.OpenIDConnectExplicitFactory, - compose.OpenIDConnectHybridFactory, - compose.OpenIDConnectImplicitFactory, - compose.OpenIDConnectRefreshFactory, - compose.OAuth2TokenRevocationFactory, - compose.OAuth2TokenIntrospectionFactory, - compose.OAuth2PKCEFactory, - compose.RFC7523AssertionGrantFactory, -} +var ( + defaultResponseModeHandler = fosite.NewDefaultResponseModeHandler() + defaultFactories = []Factory{ + compose.OAuth2AuthorizeExplicitFactory, + compose.OAuth2AuthorizeImplicitFactory, + compose.OAuth2ClientCredentialsGrantFactory, + compose.OAuth2RefreshTokenGrantFactory, + compose.OpenIDConnectExplicitFactory, + compose.OpenIDConnectHybridFactory, + compose.OpenIDConnectImplicitFactory, + compose.OpenIDConnectRefreshFactory, + compose.OAuth2TokenRevocationFactory, + compose.OAuth2TokenIntrospectionFactory, + compose.OAuth2PKCEFactory, + compose.RFC7523AssertionGrantFactory, + compose.OIDCUserinfoVerifiableCredentialFactory, + compose.RFC8628DeviceFactory, + compose.RFC8628DeviceAuthorizationTokenFactory, + compose.OpenIDConnectDeviceFactory, + } +) func NewConfig(deps configDependencies) *Config { - c := &Config{ + return &Config{ deps: deps, DefaultProvider: deps.Config(), } - return c } -func (c *Config) LoadDefaultHanlders(strategy interface{}) { - for _, factory := range defaultFactories { - res := factory(c, c.deps.Persister(), strategy) +func (c *Config) Config() *config.DefaultProvider { + return c.deps.Config() +} + +func (c *Config) LoadDefaultHandlers(storage fosite.Storage, strategy interface{}) { + factories := append(defaultFactories, c.deps.ExtraFositeFactories()...) + for _, factory := range factories { + res := factory(c, storage, strategy) if ah, ok := res.(fosite.AuthorizeEndpointHandler); ok { c.authorizeEndpointHandlers.Append(ah) } @@ -83,58 +99,71 @@ func (c *Config) LoadDefaultHanlders(strategy interface{}) { if rh, ok := res.(fosite.RevocationHandler); ok { c.revocationHandlers.Append(rh) } + if dh, ok := res.(fosite.DeviceEndpointHandler); ok { + c.deviceEndpointHandlers.Append(dh) + } } } -func (c *Config) GetJWKSFetcherStrategy(ctx context.Context) fosite.JWKSFetcherStrategy { - return c.deps.GetJWKSFetcherStrategy() +func (c *Config) GetJWKSFetcherStrategy(context.Context) fosite.JWKSFetcherStrategy { + if c.jwksFetcherStrategy == nil { + c.jwksFetcherStrategy = fosite.NewDefaultJWKSFetcherStrategy(fosite.JWKSFetcherWithHTTPClientSource( + func(ctx context.Context) *retryablehttp.Client { return c.deps.HTTPClient(ctx) }, + )) + } + return c.jwksFetcherStrategy } func (c *Config) GetHTTPClient(ctx context.Context) *retryablehttp.Client { return c.deps.HTTPClient(ctx) } -func (c *Config) GetAuthorizeEndpointHandlers(ctx context.Context) fosite.AuthorizeEndpointHandlers { +func (c *Config) GetAuthorizeEndpointHandlers(context.Context) fosite.AuthorizeEndpointHandlers { return c.authorizeEndpointHandlers } -func (c *Config) GetTokenEndpointHandlers(ctx context.Context) fosite.TokenEndpointHandlers { +func (c *Config) GetTokenEndpointHandlers(context.Context) fosite.TokenEndpointHandlers { return c.tokenEndpointHandlers } -func (c *Config) GetTokenIntrospectionHandlers(ctx context.Context) (r fosite.TokenIntrospectionHandlers) { +func (c *Config) GetTokenIntrospectionHandlers(context.Context) (r fosite.TokenIntrospectionHandlers) { return c.tokenIntrospectionHandlers } -func (c *Config) GetRevocationHandlers(ctx context.Context) fosite.RevocationHandlers { +func (c *Config) GetRevocationHandlers(context.Context) fosite.RevocationHandlers { return c.revocationHandlers } -func (c *Config) GetGrantTypeJWTBearerCanSkipClientAuth(ctx context.Context) bool { +// GetDeviceEndpointHandlers returns the deviceEndpointHandlers +func (c *Config) GetDeviceEndpointHandlers(context.Context) fosite.DeviceEndpointHandlers { + return c.deviceEndpointHandlers +} + +func (c *Config) GetGrantTypeJWTBearerCanSkipClientAuth(context.Context) bool { return false } -func (c *Config) GetAudienceStrategy(ctx context.Context) fosite.AudienceMatchingStrategy { +func (c *Config) GetAudienceStrategy(context.Context) fosite.AudienceMatchingStrategy { return fosite.DefaultAudienceMatchingStrategy } -func (c *Config) GetOmitRedirectScopeParam(ctx context.Context) bool { +func (c *Config) GetOmitRedirectScopeParam(context.Context) bool { return false } -func (c *Config) GetSanitationWhiteList(ctx context.Context) []string { +func (c *Config) GetSanitationWhiteList(context.Context) []string { return []string{"code", "redirect_uri"} } -func (c *Config) GetEnablePKCEPlainChallengeMethod(ctx context.Context) bool { +func (c *Config) GetEnablePKCEPlainChallengeMethod(context.Context) bool { return false } -func (c *Config) GetDisableRefreshTokenValidation(ctx context.Context) bool { +func (c *Config) GetDisableRefreshTokenValidation(context.Context) bool { return false } -func (c *Config) GetRefreshTokenScopes(ctx context.Context) []string { +func (c *Config) GetRefreshTokenScopes(context.Context) []string { return []string{"offline", "offline_access"} } @@ -142,12 +171,12 @@ func (c *Config) GetMinParameterEntropy(_ context.Context) int { return fosite.MinParameterEntropy } -func (c *Config) GetClientAuthenticationStrategy(ctx context.Context) fosite.ClientAuthenticationStrategy { +func (c *Config) GetClientAuthenticationStrategy(context.Context) fosite.ClientAuthenticationStrategy { // Fosite falls back to the default fosite.Fosite.DefaultClientAuthenticationStrategy when this is nil. return nil } -func (c *Config) GetResponseModeHandlerExtension(ctx context.Context) fosite.ResponseModeHandler { +func (c *Config) GetResponseModeHandlerExtension(context.Context) fosite.ResponseModeHandler { return defaultResponseModeHandler } @@ -155,20 +184,20 @@ func (c *Config) GetSendDebugMessagesToClients(ctx context.Context) bool { return c.deps.Config().GetSendDebugMessagesToClients(ctx) } -func (c *Config) GetMessageCatalog(ctx context.Context) i18n.MessageCatalog { +func (c *Config) GetMessageCatalog(context.Context) i18n.MessageCatalog { // Fosite falls back to the default messages when this is nil. return nil } -func (c *Config) GetSecretsHasher(ctx context.Context) fosite.Hasher { +func (c *Config) GetSecretsHasher(context.Context) fosite.Hasher { return c.deps.ClientHasher() } -func (c *Config) GetTokenEntropy(ctx context.Context) int { +func (c *Config) GetTokenEntropy(context.Context) int { return 32 } -func (c *Config) GetHMACHasher(ctx context.Context) func() hash.Hash { +func (c *Config) GetHMACHasher(context.Context) func() hash.Hash { return sha512.New512_256 } @@ -176,11 +205,11 @@ func (c *Config) GetIDTokenIssuer(ctx context.Context) string { return c.deps.Config().IssuerURL(ctx).String() } -func (c *Config) GetAllowedPrompts(ctx context.Context) []string { - return []string{"login", "none", "consent"} +func (c *Config) GetAllowedPrompts(context.Context) []string { + return []string{"login", "none", "consent", "registration"} } -func (c *Config) GetRedirectSecureChecker(ctx context.Context) func(context.Context, *url.URL) bool { +func (c *Config) GetRedirectSecureChecker(context.Context) func(context.Context, *url.URL) bool { return x.IsRedirectURISecure(c.deps.Config()) } @@ -189,13 +218,21 @@ func (c *Config) GetAccessTokenIssuer(ctx context.Context) string { } func (c *Config) GetJWTScopeField(ctx context.Context) jwt.JWTScopeFieldEnum { - return jwt.JWTScopeFieldList + return c.deps.Config().GetJWTScopeField(ctx) } -func (c *Config) GetFormPostHTMLTemplate(ctx context.Context) *template.Template { +func (c *Config) GetFormPostHTMLTemplate(context.Context) *template.Template { return fosite.DefaultFormPostTemplate } -func (c *Config) GetTokenURL(ctx context.Context) string { - return urlx.AppendPaths(c.deps.Config().PublicURL(ctx), oauth2.TokenPath).String() +func (c *Config) GetTokenURLs(ctx context.Context) []string { + return stringslice.Unique([]string{ + c.deps.Config().OAuth2TokenURL(ctx).String(), + urlx.AppendPaths(c.deps.Config().PublicURL(ctx), oauth2.TokenPath).String(), + }) +} + +// GetDeviceVerificationURL returns the device verification url +func (c *Config) GetDeviceVerificationURL(ctx context.Context) string { + return urlx.AppendPaths(c.deps.Config().PublicURL(ctx), oauth2.DeviceVerificationPath).String() } diff --git a/fositex/token_strategy.go b/fositex/token_strategy.go index 865451ac9ce..6eb49dfd41e 100644 --- a/fositex/token_strategy.go +++ b/fositex/token_strategy.go @@ -5,67 +5,87 @@ package fositex import ( "context" + "strings" - "github.com/ory/fosite" - foauth2 "github.com/ory/fosite/handler/oauth2" - "github.com/ory/hydra/driver/config" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" + foauth2 "github.com/ory/hydra/v2/fosite/handler/oauth2" ) var _ foauth2.CoreStrategy = (*TokenStrategy)(nil) -// TokenStrategy uses the correct token strategy (jwt, opaque) depending on the configuration. -type TokenStrategy struct { - c *config.DefaultProvider - hmac *foauth2.HMACSHAStrategy - jwt *foauth2.DefaultJWTStrategy -} +type ( + // TokenStrategy uses the correct token strategy (jwt, opaque) depending on the configuration. + TokenStrategy struct { + d tokenStrategyDependencies + } + tokenStrategyDependencies interface { + OAuth2HMACStrategy() foauth2.CoreStrategy + OAuth2JWTStrategy() foauth2.AccessTokenStrategy + config.Provider + } +) // NewTokenStrategy returns a new TokenStrategy. -func NewTokenStrategy(c *config.DefaultProvider, hmac *foauth2.HMACSHAStrategy, jwt *foauth2.DefaultJWTStrategy) *TokenStrategy { - return &TokenStrategy{c: c, hmac: hmac, jwt: jwt} -} +func NewTokenStrategy(d tokenStrategyDependencies) *TokenStrategy { return &TokenStrategy{d: d} } // gs returns the configured strategy. -func (t TokenStrategy) gs(ctx context.Context) foauth2.CoreStrategy { - switch ats := t.c.AccessTokenStrategy(ctx); ats { +func (t TokenStrategy) gs(ctx context.Context, additionalSources ...config.AccessTokenStrategySource) foauth2.AccessTokenStrategy { + switch ats := t.d.Config().AccessTokenStrategy(ctx, additionalSources...); ats { case config.AccessTokenJWTStrategy: - return t.jwt + return t.d.OAuth2JWTStrategy() } - return t.hmac + return t.d.OAuth2HMACStrategy() } -func (t TokenStrategy) AccessTokenSignature(ctx context.Context, token string) string { - return t.gs(ctx).AccessTokenSignature(ctx, token) +func (t TokenStrategy) AccessTokenSignature(_ context.Context, token string) string { + return genericSignature(token) } -func (t TokenStrategy) GenerateAccessToken(ctx context.Context, requester fosite.Requester) (token string, signature string, err error) { - return t.gs(ctx).GenerateAccessToken(ctx, requester) +func (t TokenStrategy) GenerateAccessToken(ctx context.Context, requester fosite.Requester) (token, signature string, err error) { + return t.gs(ctx, withRequester(requester)).GenerateAccessToken(ctx, requester) } func (t TokenStrategy) ValidateAccessToken(ctx context.Context, requester fosite.Requester, token string) (err error) { - return t.gs(ctx).ValidateAccessToken(ctx, requester, token) + return t.gs(ctx, withRequester(requester)).ValidateAccessToken(ctx, requester, token) } func (t TokenStrategy) RefreshTokenSignature(ctx context.Context, token string) string { - return t.gs(ctx).RefreshTokenSignature(ctx, token) + return t.d.OAuth2HMACStrategy().RefreshTokenSignature(ctx, token) } -func (t TokenStrategy) GenerateRefreshToken(ctx context.Context, requester fosite.Requester) (token string, signature string, err error) { - return t.gs(ctx).GenerateRefreshToken(ctx, requester) +func (t TokenStrategy) GenerateRefreshToken(ctx context.Context, requester fosite.Requester) (token, signature string, err error) { + return t.d.OAuth2HMACStrategy().GenerateRefreshToken(ctx, requester) } func (t TokenStrategy) ValidateRefreshToken(ctx context.Context, requester fosite.Requester, token string) (err error) { - return t.gs(ctx).ValidateRefreshToken(ctx, requester, token) + return t.d.OAuth2HMACStrategy().ValidateRefreshToken(ctx, requester, token) } func (t TokenStrategy) AuthorizeCodeSignature(ctx context.Context, token string) string { - return t.gs(ctx).AuthorizeCodeSignature(ctx, token) + return t.d.OAuth2HMACStrategy().AuthorizeCodeSignature(ctx, token) } -func (t TokenStrategy) GenerateAuthorizeCode(ctx context.Context, requester fosite.Requester) (token string, signature string, err error) { - return t.gs(ctx).GenerateAuthorizeCode(ctx, requester) +func (t TokenStrategy) GenerateAuthorizeCode(ctx context.Context, requester fosite.Requester) (token, signature string, err error) { + return t.d.OAuth2HMACStrategy().GenerateAuthorizeCode(ctx, requester) } func (t TokenStrategy) ValidateAuthorizeCode(ctx context.Context, requester fosite.Requester, token string) (err error) { - return t.gs(ctx).ValidateAuthorizeCode(ctx, requester, token) + return t.d.OAuth2HMACStrategy().ValidateAuthorizeCode(ctx, requester, token) +} + +func withRequester(requester fosite.Requester) config.AccessTokenStrategySource { + return client.AccessTokenStrategySource(requester.GetClient()) +} + +func genericSignature(token string) string { + switch parts := strings.Split(token, "."); len(parts) { + case 2: + return parts[1] + case 3: + return parts[2] + default: + return "" + } } diff --git a/fositex/token_strategy_test.go b/fositex/token_strategy_test.go new file mode 100644 index 00000000000..846908f21e6 --- /dev/null +++ b/fositex/token_strategy_test.go @@ -0,0 +1,54 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fositex + +import ( + "context" + "testing" + + "github.com/ory/hydra/v2/fosite/token/hmac" + + "github.com/stretchr/testify/assert" + + "github.com/ory/hydra/v2/fosite/handler/oauth2" +) + +// Test that the generic signature function implements the same signature as the +// HMAC and JWT strategies. +func TestAccessTokenSignature(t *testing.T) { + ctx := context.Background() + + t.Run("strategy=DefaultJWTStrategy", func(t *testing.T) { + strategy := new(oauth2.DefaultJWTStrategy) + for _, tc := range []struct{ token string }{ + {""}, + {"foo"}, + // tokens with two parts will be handled by the HMAC strategy + {"foo.bar.baz"}, + {"foo.bar.baz.qux"}, + } { + t.Run("case="+tc.token, func(t *testing.T) { + assert.Equal(t, + strategy.AccessTokenSignature(ctx, tc.token), + genericSignature(tc.token)) + }) + } + }) + t.Run("strategy=HMACStrategy", func(t *testing.T) { + strategy := oauth2.NewHMACSHAStrategy(&hmac.HMACStrategy{}, nil) + for _, tc := range []struct{ token string }{ + {""}, + {"foo"}, + {"foo.bar"}, + // tokens with three parts will be handled by the JWT strategy + {"foo.bar.baz.qux"}, + } { + t.Run("case="+tc.token, func(t *testing.T) { + assert.Equal(t, + strategy.AccessTokenSignature(ctx, tc.token), + genericSignature(tc.token)) + }) + } + }) +} diff --git a/go.mod b/go.mod index 17fb0f25096..d58eb5c80a6 100644 --- a/go.mod +++ b/go.mod @@ -1,273 +1,269 @@ -module github.com/ory/hydra +module github.com/ory/hydra/v2 -go 1.19 +go 1.25 replace ( - github.com/bradleyjkemp/cupaloy/v2 => github.com/aeneasr/cupaloy/v2 v2.6.1-0.20210924214125-3dfdd01210a3 - github.com/dgrijalva/jwt-go => github.com/golang-jwt/jwt/v4 v4.0.0 - github.com/gobuffalo/packr => github.com/gobuffalo/packr v1.30.1 - github.com/gogo/protobuf => github.com/gogo/protobuf v1.3.2 - github.com/mattn/go-sqlite3 => github.com/mattn/go-sqlite3 v1.14.13 - github.com/oleiade/reflections => github.com/oleiade/reflections v1.0.1 + github.com/ory/hydra-client-go/v2 => ./internal/httpclient + github.com/ory/x => ./oryx ) -replace github.com/ory/hydra-client-go/v2 => ./internal/httpclient - require ( - github.com/ThalesIgnite/crypto11 v1.2.4 + github.com/ThalesGroup/crypto11 v1.4.1 + github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 github.com/bradleyjkemp/cupaloy/v2 v2.8.0 - github.com/bxcodec/faker/v3 v3.7.0 github.com/cenkalti/backoff/v3 v3.2.2 + github.com/cristalhq/jwt/v4 v4.0.2 + github.com/dgraph-io/ristretto v1.0.0 github.com/fatih/structs v1.1.0 - github.com/go-bindata/go-bindata v3.1.2+incompatible - github.com/go-swagger/go-swagger v0.30.3 - github.com/gobuffalo/pop/v6 v6.0.8 - github.com/gobuffalo/x v0.0.0-20181007152206-913e47c59ca7 + github.com/go-faker/faker/v4 v4.6.0 + github.com/go-jose/go-jose/v3 v3.0.4 github.com/gobwas/glob v0.2.3 - github.com/gofrs/uuid v4.3.0+incompatible + github.com/gofrs/uuid v4.4.0+incompatible + github.com/golang-jwt/jwt/v5 v5.3.0 github.com/golang/mock v1.6.0 - github.com/google/uuid v1.3.0 - github.com/gorilla/securecookie v1.1.1 - github.com/gorilla/sessions v1.2.1 - github.com/gtank/cryptopasta v0.0.0-20170601214702-1f550f6f2f69 - github.com/hashicorp/go-retryablehttp v0.7.1 - github.com/instana/testify v1.6.2-0.20200721153833-94b1851f4d65 - github.com/jackc/pgx/v4 v4.17.2 - github.com/jmoiron/sqlx v1.3.5 - github.com/julienschmidt/httprouter v1.3.0 - github.com/luna-duclos/instrumentedsql v1.1.3 - github.com/miekg/pkcs11 v1.0.3 - github.com/mikefarah/yq/v4 v4.16.1 + github.com/google/uuid v1.6.0 + github.com/gorilla/securecookie v1.1.2 + github.com/gorilla/sessions v1.4.0 + github.com/hashicorp/go-retryablehttp v0.7.8 + github.com/jackc/pgx/v5 v5.7.5 + github.com/magiconair/properties v1.8.9 + github.com/mattn/goveralls v0.0.12 + github.com/miekg/pkcs11 v1.1.1 github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 - github.com/oleiade/reflections v1.0.1 - github.com/ory/analytics-go/v4 v4.0.3 - github.com/ory/fosite v0.44.0 - github.com/ory/go-acc v0.2.8 - github.com/ory/graceful v0.1.1 - github.com/ory/herodot v0.9.13 - github.com/ory/hydra-client-go/v2 v2.0.1 - github.com/ory/jsonschema/v3 v3.0.7 - github.com/ory/x v0.0.520 + github.com/oleiade/reflections v1.1.0 + github.com/ory/analytics-go/v5 v5.0.1 + github.com/ory/go-acc v0.2.9-0.20230103102148-6b1c9a70dbbe + github.com/ory/go-convenience v0.1.0 + github.com/ory/graceful v0.1.3 + github.com/ory/herodot v0.10.7 + github.com/ory/hydra-client-go/v2 v2.2.1 + github.com/ory/jsonschema/v3 v3.0.9-0.20250317235931-280c5fc7bf0e + github.com/ory/kratos-client-go v1.3.8 + github.com/ory/pop/v6 v6.3.2-0.20251203152233-a32233875f7e + github.com/ory/x v0.0.724 + github.com/parnurzeal/gorequest v0.3.0 github.com/pborman/uuid v1.2.1 github.com/pkg/errors v0.9.1 - github.com/prometheus/client_golang v1.13.0 - github.com/rs/cors v1.8.2 - github.com/sawadashota/encrypta v0.0.2 - github.com/sirupsen/logrus v1.9.0 - github.com/spf13/cobra v1.6.1 - github.com/spf13/pflag v1.0.5 - github.com/stretchr/testify v1.8.1 - github.com/tidwall/gjson v1.14.3 + github.com/prometheus/client_golang v1.23.0 + github.com/rs/cors v1.11.1 + github.com/sawadashota/encrypta v0.0.5 + github.com/sirupsen/logrus v1.9.3 + github.com/spf13/cobra v1.10.1 + github.com/spf13/pflag v1.0.10 + github.com/stretchr/testify v1.11.1 + github.com/tidwall/gjson v1.18.0 github.com/tidwall/sjson v1.2.5 github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80 github.com/toqueteos/webbrowser v1.2.0 - github.com/twmb/murmur3 v1.1.6 + github.com/twmb/murmur3 v1.1.8 github.com/urfave/negroni v1.0.0 - go.opentelemetry.io/otel v1.11.1 - go.step.sm/crypto v0.16.2 - go.uber.org/automaxprocs v1.3.0 - golang.org/x/oauth2 v0.0.0-20221014153046-6fdb5e3db783 - golang.org/x/tools v0.2.0 - gopkg.in/DataDog/dd-trace-go.v1 v1.43.0 - gopkg.in/square/go-jose.v2 v2.6.0 + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0 + go.opentelemetry.io/otel v1.38.0 + go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.37.0 + go.opentelemetry.io/otel/sdk v1.38.0 + go.opentelemetry.io/otel/trace v1.38.0 + go.uber.org/automaxprocs v1.6.0 + go.uber.org/mock v0.5.2 + golang.org/x/crypto v0.45.0 + golang.org/x/exp v0.0.0-20250813145105-42675adae3e6 + golang.org/x/net v0.47.0 + golang.org/x/oauth2 v0.34.0 + golang.org/x/sync v0.18.0 + golang.org/x/text v0.31.0 ) -require github.com/hashicorp/go-cleanhttp v0.5.2 // indirect - require ( - filippo.io/edwards25519 v1.0.0-rc.1 // indirect - github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect - github.com/DataDog/datadog-agent/pkg/obfuscate v0.39.0 // indirect - github.com/DataDog/datadog-go/v5 v5.1.1 // indirect - github.com/DataDog/sketches-go v1.4.1 // indirect + code.dny.dev/ssrf v0.2.0 // indirect + dario.cat/mergo v1.0.2 // indirect + filippo.io/edwards25519 v1.1.0 // indirect + github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c // indirect github.com/Masterminds/goutils v1.1.1 // indirect - github.com/Masterminds/semver/v3 v3.1.1 // indirect - github.com/Masterminds/sprig/v3 v3.2.2 // indirect - github.com/Microsoft/go-winio v0.6.0 // indirect + github.com/Masterminds/semver/v3 v3.4.0 // indirect + github.com/Masterminds/sprig/v3 v3.3.0 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 // indirect - github.com/armon/go-radix v1.0.0 // indirect - github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d // indirect - github.com/avast/retry-go/v4 v4.3.0 // indirect + github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95 // indirect + github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f // indirect + github.com/ProtonMail/gopenpgp/v2 v2.7.5 // indirect + github.com/XSAM/otelsql v0.39.0 // indirect + github.com/a8m/envsubst v1.4.2 // indirect + github.com/alecthomas/participle/v2 v2.1.1 // indirect + github.com/avast/retry-go/v4 v4.6.1 // indirect github.com/aymerick/douceur v0.2.0 // indirect github.com/beorn7/perks v1.0.1 // indirect - github.com/cenkalti/backoff/v4 v4.1.3 // indirect - github.com/cespare/xxhash/v2 v2.1.2 // indirect - github.com/cockroachdb/cockroach-go/v2 v2.2.16 // indirect - github.com/containerd/continuity v0.3.0 // indirect - github.com/cristalhq/jwt/v4 v4.0.2 // indirect - github.com/dave/jennifer v1.4.0 // indirect - github.com/davecgh/go-spew v1.1.1 // indirect - github.com/dgraph-io/ristretto v0.1.1 // indirect - github.com/docker/cli v20.10.21+incompatible // indirect - github.com/docker/distribution v2.8.1+incompatible // indirect - github.com/docker/docker v20.10.21+incompatible // indirect - github.com/docker/go-connections v0.4.0 // indirect + github.com/cenkalti/backoff/v4 v4.3.0 // indirect + github.com/cenkalti/backoff/v5 v5.0.2 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/cloudflare/circl v1.6.1 // indirect + github.com/cockroachdb/cockroach-go/v2 v2.4.1 // indirect + github.com/containerd/continuity v0.4.5 // indirect + github.com/containerd/errdefs v1.0.0 // indirect + github.com/containerd/errdefs/pkg v0.3.0 // indirect + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/dgraph-io/ristretto/v2 v2.2.0 // indirect + github.com/dimchansky/utfbom v1.1.1 // indirect + github.com/distribution/reference v0.6.0 // indirect + github.com/docker/cli v28.3.3+incompatible // indirect + github.com/docker/docker v28.3.3+incompatible // indirect + github.com/docker/go-connections v0.6.0 // indirect github.com/docker/go-units v0.5.0 // indirect - github.com/dustin/go-humanize v1.0.0 // indirect - github.com/ecordell/optgen v0.0.6 // indirect - github.com/elastic/go-licenser v0.4.1 // indirect - github.com/elastic/go-sysinfo v1.8.1 // indirect - github.com/elastic/go-windows v1.0.1 // indirect - github.com/elliotchance/orderedmap v1.4.0 // indirect - github.com/evanphx/json-patch v5.6.0+incompatible // indirect - github.com/fatih/color v1.13.0 // indirect - github.com/felixge/fgprof v0.9.3 // indirect - github.com/felixge/httpsnoop v1.0.3 // indirect - github.com/fsnotify/fsnotify v1.6.0 // indirect - github.com/go-logr/logr v1.2.3 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/elazarl/goproxy v1.7.2 // indirect + github.com/elliotchance/orderedmap v1.7.1 // indirect + github.com/evanphx/json-patch/v5 v5.9.11 // indirect + github.com/fatih/color v1.18.0 // indirect + github.com/felixge/fgprof v0.9.5 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/fsnotify/fsnotify v1.9.0 // indirect + github.com/go-logr/logr v1.4.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect - github.com/go-openapi/analysis v0.21.4 // indirect - github.com/go-openapi/errors v0.20.3 // indirect - github.com/go-openapi/inflect v0.19.0 // indirect - github.com/go-openapi/jsonpointer v0.19.5 // indirect - github.com/go-openapi/jsonreference v0.20.0 // indirect - github.com/go-openapi/loads v0.21.2 // indirect - github.com/go-openapi/runtime v0.24.2 // indirect - github.com/go-openapi/spec v0.20.7 // indirect - github.com/go-openapi/strfmt v0.21.3 // indirect - github.com/go-openapi/swag v0.22.3 // indirect - github.com/go-openapi/validate v0.22.0 // indirect - github.com/go-sql-driver/mysql v1.6.0 // indirect + github.com/go-openapi/analysis v0.23.0 // indirect + github.com/go-openapi/errors v0.22.2 // indirect + github.com/go-openapi/inflect v0.21.0 // indirect + github.com/go-openapi/jsonpointer v0.21.2 // indirect + github.com/go-openapi/jsonreference v0.21.0 // indirect + github.com/go-openapi/loads v0.22.0 // indirect + github.com/go-openapi/runtime v0.28.0 // indirect + github.com/go-openapi/spec v0.21.0 // indirect + github.com/go-openapi/strfmt v0.23.0 // indirect + github.com/go-openapi/swag v0.23.1 // indirect + github.com/go-openapi/validate v0.24.0 // indirect + github.com/go-sql-driver/mysql v1.9.3 // indirect + github.com/go-swagger/go-swagger v0.31.0 // indirect + github.com/go-viper/mapstructure/v2 v2.4.0 // indirect github.com/gobuffalo/envy v1.10.2 // indirect github.com/gobuffalo/fizz v1.14.4 // indirect - github.com/gobuffalo/flect v0.3.0 // indirect - github.com/gobuffalo/github_flavored_markdown v1.1.3 // indirect - github.com/gobuffalo/helpers v0.6.7 // indirect + github.com/gobuffalo/flect v1.0.3 // indirect + github.com/gobuffalo/github_flavored_markdown v1.1.4 // indirect + github.com/gobuffalo/helpers v0.6.10 // indirect github.com/gobuffalo/nulls v0.4.2 // indirect - github.com/gobuffalo/plush/v4 v4.1.16 // indirect + github.com/gobuffalo/plush/v4 v4.1.22 // indirect + github.com/gobuffalo/plush/v5 v5.0.7 // indirect github.com/gobuffalo/tags/v3 v3.1.4 // indirect github.com/gobuffalo/validate/v3 v3.3.3 // indirect - github.com/goccy/go-yaml v1.9.6 // indirect - github.com/gofrs/flock v0.8.1 // indirect + github.com/goccy/go-json v0.10.5 // indirect + github.com/goccy/go-yaml v1.18.0 // indirect + github.com/gofrs/flock v0.12.1 // indirect + github.com/gogo/googleapis v1.4.1 // indirect github.com/gogo/protobuf v1.3.2 // indirect - github.com/golang/glog v1.0.0 // indirect - github.com/golang/protobuf v1.5.2 // indirect - github.com/google/pprof v0.0.0-20221010195024-131d412537ea // indirect + github.com/google/pprof v0.0.0-20250630185457-6e76a2b096b5 // indirect github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect - github.com/gorilla/css v1.0.0 // indirect - github.com/gorilla/handlers v1.5.1 // indirect - github.com/gorilla/websocket v1.5.0 // indirect - github.com/grpc-ecosystem/grpc-gateway/v2 v2.12.0 // indirect + github.com/gorilla/css v1.0.1 // indirect + github.com/gorilla/handlers v1.5.2 // indirect + github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 // indirect + github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1 // indirect + github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/hcl v1.0.0 // indirect - github.com/huandu/xstrings v1.3.2 // indirect - github.com/imdario/mergo v0.3.13 // indirect - github.com/inconshreveable/mousetrap v1.0.1 // indirect + github.com/huandu/xstrings v1.5.0 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/inhies/go-bytesize v0.0.0-20220417184213-4913239db9cf // indirect - github.com/instana/go-sensor v1.46.0 // indirect github.com/jackc/chunkreader/v2 v2.0.1 // indirect - github.com/jackc/pgconn v1.13.0 // indirect + github.com/jackc/pgconn v1.14.3 // indirect github.com/jackc/pgio v1.0.0 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect - github.com/jackc/pgproto3/v2 v2.3.1 // indirect - github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect - github.com/jackc/pgtype v1.12.0 // indirect - github.com/jandelgado/gcov2lcov v1.0.5 // indirect - github.com/jcchavezs/porto v0.4.0 // indirect - github.com/jessevdk/go-flags v1.5.0 // indirect - github.com/jinzhu/copier v0.3.5 // indirect - github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901 // indirect - github.com/joho/godotenv v1.4.0 // indirect + github.com/jackc/pgproto3/v2 v2.3.3 // indirect + github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect + github.com/jackc/puddle/v2 v2.2.2 // indirect + github.com/jaegertracing/jaeger-idl v0.5.0 // indirect + github.com/jessevdk/go-flags v1.6.1 // indirect + github.com/jinzhu/copier v0.4.0 // indirect + github.com/jmoiron/sqlx v1.4.0 // indirect + github.com/joho/godotenv v1.5.1 // indirect github.com/josharian/intern v1.0.0 // indirect + github.com/julienschmidt/httprouter v1.3.1-0.20240130105656-484018016424 // indirect github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect - github.com/knadh/koanf v1.4.4 // indirect - github.com/kr/pretty v0.3.0 // indirect + github.com/knadh/koanf/maps v0.1.2 // indirect + github.com/knadh/koanf/parsers/json v0.1.0 // indirect + github.com/knadh/koanf/parsers/toml v0.1.0 // indirect + github.com/knadh/koanf/parsers/yaml v0.1.0 // indirect + github.com/knadh/koanf/providers/posflag v0.1.0 // indirect + github.com/knadh/koanf/v2 v2.2.2 // indirect + github.com/kr/pretty v0.3.1 // indirect github.com/kr/text v0.2.0 // indirect - github.com/lib/pq v1.10.7 // indirect - github.com/looplab/fsm v0.3.0 // indirect - github.com/magiconair/properties v1.8.6 // indirect - github.com/mailru/easyjson v0.7.7 // indirect - github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.16 // indirect - github.com/mattn/go-sqlite3 v2.0.3+incompatible // indirect - github.com/mattn/goveralls v0.0.11 // indirect - github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect - github.com/microcosm-cc/bluemonday v1.0.21 // indirect + github.com/lib/pq v1.10.9 // indirect + github.com/mailru/easyjson v0.9.0 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-sqlite3 v1.14.32 // indirect + github.com/microcosm-cc/bluemonday v1.0.27 // indirect + github.com/mikefarah/yq/v4 v4.45.1 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect - github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae // indirect - github.com/nyaruka/phonenumbers v1.1.1 // indirect + github.com/moby/docker-image-spec v1.3.1 // indirect + github.com/moby/sys/sequential v0.6.0 // indirect + github.com/moby/sys/user v0.4.0 // indirect + github.com/moby/term v0.5.2 // indirect + github.com/moul/http2curl v1.0.0 // indirect + github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect + github.com/nyaruka/phonenumbers v1.6.5 // indirect github.com/oklog/ulid v1.3.1 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect - github.com/opencontainers/image-spec v1.1.0-rc2 // indirect - github.com/opencontainers/runc v1.1.4 // indirect - github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492 // indirect - github.com/opentracing/opentracing-go v1.2.0 // indirect - github.com/openzipkin-contrib/zipkin-go-opentracing v0.5.0 // indirect - github.com/openzipkin/zipkin-go v0.4.1 // indirect - github.com/ory/dockertest/v3 v3.9.1 // indirect - github.com/ory/go-convenience v0.1.0 // indirect - github.com/ory/viper v1.7.5 // indirect + github.com/opencontainers/image-spec v1.1.1 // indirect + github.com/opencontainers/runc v1.3.3 // indirect + github.com/openzipkin/zipkin-go v0.4.3 // indirect + github.com/ory/dockertest/v3 v3.12.0 // indirect github.com/pelletier/go-toml v1.9.5 // indirect - github.com/pelletier/go-toml/v2 v2.0.1 // indirect - github.com/philhofer/fwd v1.1.1 // indirect + github.com/pelletier/go-toml/v2 v2.2.3 // indirect + github.com/peterhellberg/link v1.2.0 // indirect github.com/pkg/profile v1.7.0 // indirect - github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/prometheus/client_model v0.3.0 // indirect - github.com/prometheus/common v0.37.0 // indirect - github.com/prometheus/procfs v0.8.0 // indirect - github.com/rogpeppe/go-internal v1.9.0 // indirect - github.com/santhosh-tekuri/jsonschema v1.2.4 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/prometheus/client_model v0.6.2 // indirect + github.com/prometheus/common v0.65.0 // indirect + github.com/prometheus/procfs v0.17.0 // indirect + github.com/rogpeppe/go-internal v1.14.1 // indirect + github.com/sagikazarmark/locafero v0.4.0 // indirect + github.com/sagikazarmark/slog-shim v0.1.0 // indirect github.com/seatgeek/logrus-gelf-formatter v0.0.0-20210414080842-5b05eb8ff761 // indirect - github.com/segmentio/backo-go v1.0.1 // indirect - github.com/sergi/go-diff v1.2.0 // indirect - github.com/shopspring/decimal v1.3.1 // indirect + github.com/segmentio/backo-go v1.1.0 // indirect + github.com/sergi/go-diff v1.4.0 // indirect + github.com/shopspring/decimal v1.4.0 // indirect + github.com/smartystreets/goconvey v1.8.1 // indirect github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d // indirect + github.com/sourcegraph/conc v0.3.0 // indirect github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e // indirect - github.com/spf13/afero v1.9.2 // indirect - github.com/spf13/cast v1.5.0 // indirect - github.com/spf13/jwalterweatherman v1.1.0 // indirect - github.com/spf13/viper v1.12.0 // indirect - github.com/subosito/gotenv v1.4.1 // indirect + github.com/spf13/afero v1.11.0 // indirect + github.com/spf13/cast v1.9.2 // indirect + github.com/spf13/viper v1.18.2 // indirect + github.com/ssoready/hyrumtoken v1.0.0 // indirect + github.com/subosito/gotenv v1.6.0 // indirect github.com/thales-e-security/pool v0.0.2 // indirect github.com/tidwall/match v1.1.1 // indirect github.com/tidwall/pretty v1.2.1 // indirect - github.com/timtadh/data-structures v0.5.3 // indirect - github.com/timtadh/lexmachine v0.2.2 // indirect - github.com/tinylib/msgp v1.1.6 // indirect - github.com/uber/jaeger-client-go v2.30.0+incompatible // indirect - github.com/uber/jaeger-lib v2.4.1+incompatible // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect github.com/xeipuuv/gojsonschema v1.2.0 // indirect github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c // indirect - go.elastic.co/apm v1.15.0 // indirect - go.elastic.co/apm/module/apmhttp v1.15.0 // indirect - go.elastic.co/apm/module/apmot v1.15.0 // indirect - go.elastic.co/fastjson v1.1.0 // indirect - go.mongodb.org/mongo-driver v1.10.3 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.36.4 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.36.4 // indirect - go.opentelemetry.io/contrib/propagators/b3 v1.11.1 // indirect - go.opentelemetry.io/contrib/propagators/jaeger v1.11.1 // indirect - go.opentelemetry.io/contrib/samplers/jaegerremote v0.5.2 // indirect - go.opentelemetry.io/otel/bridge/opentracing v1.11.1 // indirect - go.opentelemetry.io/otel/exporters/jaeger v1.11.1 // indirect - go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.11.1 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.9.0 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.9.0 // indirect - go.opentelemetry.io/otel/exporters/zipkin v1.11.1 // indirect - go.opentelemetry.io/otel/metric v0.33.0 // indirect - go.opentelemetry.io/otel/sdk v1.11.1 // indirect - go.opentelemetry.io/otel/trace v1.11.1 // indirect - go.opentelemetry.io/proto/otlp v0.18.0 // indirect - go.uber.org/atomic v1.10.0 // indirect - golang.org/x/crypto v0.1.0 // indirect - golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 // indirect - golang.org/x/mod v0.6.0 // indirect - golang.org/x/net v0.1.0 // indirect - golang.org/x/sync v0.1.0 // indirect - golang.org/x/sys v0.1.0 // indirect - golang.org/x/text v0.4.0 // indirect - golang.org/x/time v0.1.0 // indirect - golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect - google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto v0.0.0-20221025140454-527a21cfbd71 // indirect - google.golang.org/grpc v1.50.1 // indirect - google.golang.org/protobuf v1.28.1 // indirect + github.com/yuin/gopher-lua v1.1.1 // indirect + go.mongodb.org/mongo-driver v1.17.4 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.62.0 // indirect + go.opentelemetry.io/contrib/propagators/b3 v1.37.0 // indirect + go.opentelemetry.io/contrib/propagators/jaeger v1.37.0 // indirect + go.opentelemetry.io/contrib/samplers/jaegerremote v0.31.0 // indirect + go.opentelemetry.io/otel/exporters/jaeger v1.17.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.37.0 // indirect + go.opentelemetry.io/otel/exporters/zipkin v1.37.0 // indirect + go.opentelemetry.io/otel/metric v1.38.0 // indirect + go.opentelemetry.io/proto/otlp v1.7.1 // indirect + go.uber.org/multierr v1.11.0 // indirect + golang.org/x/mod v0.29.0 // indirect + golang.org/x/sys v0.38.0 // indirect + golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8 // indirect + golang.org/x/tools v0.38.0 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20250811230008-5f3141c8851a // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20250811230008-5f3141c8851a // indirect + google.golang.org/grpc v1.74.2 // indirect + google.golang.org/protobuf v1.36.9 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/op/go-logging.v1 v1.0.0-20160211212156-b2cb9fa56473 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect - howett.net/plist v1.0.0 // indirect +) + +tool ( + github.com/go-swagger/go-swagger/cmd/swagger + github.com/golang/mock/mockgen + github.com/mikefarah/yq/v4 + golang.org/x/tools/cmd/goimports ) diff --git a/go.sum b/go.sum index db4ff29fbfc..316c6b94896 100644 --- a/go.sum +++ b/go.sum @@ -1,1053 +1,511 @@ -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= -cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= -cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= -cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= -cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= -cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= -cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= -cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= -cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= -cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= -cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= -cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= -cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= -cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= -cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= -cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= -cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= -cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= -cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= -cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= -cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= -cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= -cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= -cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= -cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= -cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= -cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= -cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= -dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -filippo.io/edwards25519 v1.0.0-rc.1 h1:m0VOOB23frXZvAOK44usCgLWvtsxIoMCTBGJZlpmGfU= -filippo.io/edwards25519 v1.0.0-rc.1/go.mod h1:N1IkdkCkiLB6tki+MYJoSx2JTY9NUlxZE7eHn5EwJns= -github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= -github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/DataDog/datadog-agent/pkg/obfuscate v0.39.0 h1:kXdXGOYeH2pKa3tdNGdrixT2tnXrJ4xLE9WypL4X7HE= -github.com/DataDog/datadog-agent/pkg/obfuscate v0.39.0/go.mod h1:MxVcCIC42tBIjPm93BHdh9/vw2LivRiptj3HygI+GGQ= -github.com/DataDog/datadog-go/v5 v5.1.0/go.mod h1:KhiYb2Badlv9/rofz+OznKoEF5XKTonWyhx5K83AP8E= -github.com/DataDog/datadog-go/v5 v5.1.1 h1:JLZ6s2K1pG2h9GkvEvMdEGqMDyVLEAccdX5TltWcLMU= -github.com/DataDog/datadog-go/v5 v5.1.1/go.mod h1:KhiYb2Badlv9/rofz+OznKoEF5XKTonWyhx5K83AP8E= -github.com/DataDog/sketches-go v1.4.1 h1:j5G6as+9FASM2qC36lvpvQAj9qsv/jUs3FtO8CwZNAY= -github.com/DataDog/sketches-go v1.4.1/go.mod h1:xJIXldczJyyjnbDop7ZZcLxJdV3+7Kra7H1KMgpgkLk= -github.com/HdrHistogram/hdrhistogram-go v1.1.2 h1:5IcZpTvzydCQeHzK4Ef/D5rrSqwxob0t8PQPMybUNFM= +code.dny.dev/ssrf v0.2.0 h1:wCBP990rQQ1CYfRpW+YK1+8xhwUjv189AQ3WMo1jQaI= +code.dny.dev/ssrf v0.2.0/go.mod h1:B+91l25OnyaLIeCx0WRJN5qfJ/4/ZTZxRXgm0lj/2w8= +dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8= +dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA= +filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= +filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= +github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c h1:udKWzYgxTojEKWjV8V+WSxDXJ4NFATAsZjh8iIbsQIg= +github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= -github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= -github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030IGemrRc= github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= -github.com/Masterminds/sprig/v3 v3.2.2 h1:17jRggJu518dr3QaafizSXOjKYp94wKfABxUmyxvxX8= -github.com/Masterminds/sprig/v3 v3.2.2/go.mod h1:UoaO7Yp8KlPnJIYWTFkMaqPUYKTfGFPhxNuwnnxkKlk= -github.com/Microsoft/go-winio v0.5.0/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= -github.com/Microsoft/go-winio v0.5.1/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= -github.com/Microsoft/go-winio v0.6.0 h1:slsWYD/zyx7lCXoZVlvQrj0hPTM1HI4+v1sIda2yDvg= -github.com/Microsoft/go-winio v0.6.0/go.mod h1:cTAf44im0RAYeL23bpB+fzCyDH2MJiz2BO69KH/soAE= +github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0= +github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= +github.com/Masterminds/sprig/v3 v3.3.0 h1:mQh0Yrg1XPo6vjYXgtf5OtijNAKJRNcTdOOGZe3tPhs= +github.com/Masterminds/sprig/v3 v3.3.0/go.mod h1:Zy1iXRYNqNLUolqCpL4uhk6SHUMAOSCzdgBfDb35Lz0= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw= github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk= -github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= -github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= -github.com/ThalesIgnite/crypto11 v1.2.4 h1:3MebRK/U0mA2SmSthXAIZAdUA9w8+ZuKem2O6HuR1f8= -github.com/ThalesIgnite/crypto11 v1.2.4/go.mod h1:ILDKtnCKiQ7zRoNxcp36Y1ZR8LBPmR2E23+wTQe/MlE= -github.com/aeneasr/cupaloy/v2 v2.6.1-0.20210924214125-3dfdd01210a3 h1:/SkiUr3JJzun9QN9cpUVCPri2ZwOFJ3ani+F3vdoCiY= -github.com/aeneasr/cupaloy/v2 v2.6.1-0.20210924214125-3dfdd01210a3/go.mod h1:bm7JXdkRd4BHJk9HpwqAI8BoAY1lps46Enkdqw6aRX0= -github.com/ajg/form v0.0.0-20160822230020-523a5da1a92f/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY= -github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= -github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= -github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= -github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= -github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= -github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= -github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= -github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= -github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= -github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d h1:Byv0BzEl3/e6D5CLfI0j/7hiIEtvGVFPCZ7Ei2oq8iQ= -github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= -github.com/avast/retry-go/v4 v4.3.0 h1:cqI48aXx0BExKoM7XPklDpoHAg7/srPPLAfWG5z62jo= -github.com/avast/retry-go/v4 v4.3.0/go.mod h1:bqOlT4nxk4phk9buiQFaghzjpqdchOSwPgjdfdQBtdg= -github.com/aws/aws-sdk-go-v2 v1.9.2/go.mod h1:cK/D0BBs0b/oWPIcX/Z/obahJK1TT7IPVjy53i/mX/4= -github.com/aws/aws-sdk-go-v2/config v1.8.3/go.mod h1:4AEiLtAb8kLs7vgw2ZV3p2VZ1+hBavOc84hqxVNpCyw= -github.com/aws/aws-sdk-go-v2/credentials v1.4.3/go.mod h1:FNNC6nQZQUuyhq5aE5c7ata8o9e4ECGmS4lAXC7o1mQ= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.6.0/go.mod h1:gqlclDEZp4aqJOancXK6TN24aKhT0W0Ae9MHk3wzTMM= -github.com/aws/aws-sdk-go-v2/internal/ini v1.2.4/go.mod h1:ZcBrrI3zBKlhGFNYWvju0I3TR93I7YIgAfy82Fh4lcQ= -github.com/aws/aws-sdk-go-v2/service/appconfig v1.4.2/go.mod h1:FZ3HkCe+b10uFZZkFdvf98LHW21k49W8o8J366lqVKY= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.3.2/go.mod h1:72HRZDLMtmVQiLG2tLfQcaWLCssELvGl+Zf2WVxMmR8= -github.com/aws/aws-sdk-go-v2/service/sso v1.4.2/go.mod h1:NBvT9R1MEF+Ud6ApJKM0G+IkPchKS7p7c2YPKwHmBOk= -github.com/aws/aws-sdk-go-v2/service/sts v1.7.2/go.mod h1:8EzeIqfWt2wWT4rJVu3f21TfrhJ8AEMzVybRNSb/b4g= -github.com/aws/smithy-go v1.8.0/go.mod h1:SObp3lf9smib00L/v3U2eAKG8FyQ7iLrJnQiAmR5n+E= +github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95 h1:KLq8BE0KwCL+mmXnjLWEAOYO+2l2AE4YMmqG1ZpZHBs= +github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= +github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f h1:tCbYj7/299ekTTXpdwKYF8eBlsYsDVoggDAuAjoK66k= +github.com/ProtonMail/go-mime v0.0.0-20230322103455-7d82a3887f2f/go.mod h1:gcr0kNtGBqin9zDW9GOHcVntrwnjrK+qdJ06mWYBybw= +github.com/ProtonMail/gopenpgp/v2 v2.7.5 h1:STOY3vgES59gNgoOt2w0nyHBjKViB/qSg7NjbQWPJkA= +github.com/ProtonMail/gopenpgp/v2 v2.7.5/go.mod h1:IhkNEDaxec6NyzSI0PlxapinnwPVIESk8/76da3Ct3g= +github.com/ThalesGroup/crypto11 v1.4.1 h1:6YR6aVL8LI8akReXKTEgxf+k0+b8wlV8Ra7tZnCG9y4= +github.com/ThalesGroup/crypto11 v1.4.1/go.mod h1:vggvBwlVrqePDrooq/B32dMXlfEsdsFY+6YlSD7VOy0= +github.com/XSAM/otelsql v0.39.0 h1:4o374mEIMweaeevL7fd8Q3C710Xi2Jh/c8G4Qy9bvCY= +github.com/XSAM/otelsql v0.39.0/go.mod h1:uMOXLUX+wkuAuP0AR3B45NXX7E9lJS2mERa8gqdU8R0= +github.com/a8m/envsubst v1.4.2 h1:4yWIHXOLEJHQEFd4UjrWDrYeYlV7ncFWJOCBRLOZHQg= +github.com/a8m/envsubst v1.4.2/go.mod h1:MVUTQNGQ3tsjOOtKCNd+fl8RzhsXcDvvAEzkhGtlsbY= +github.com/alecthomas/assert/v2 v2.3.0 h1:mAsH2wmvjsuvyBvAmCtm7zFsBlb8mIHx5ySLVdDZXL0= +github.com/alecthomas/assert/v2 v2.3.0/go.mod h1:pXcQ2Asjp247dahGEmsZ6ru0UVwnkhktn7S0bBDLxvQ= +github.com/alecthomas/participle/v2 v2.1.1 h1:hrjKESvSqGHzRb4yW1ciisFJ4p3MGYih6icjJvbsmV8= +github.com/alecthomas/participle/v2 v2.1.1/go.mod h1:Y1+hAs8DHPmc3YUFzqllV+eSQ9ljPTk0ZkPMtEdAx2c= +github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc= +github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= +github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= +github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= +github.com/avast/retry-go/v4 v4.6.1 h1:VkOLRubHdisGrHnTu89g08aQEWEgRU7LVEop3GbIcMk= +github.com/avast/retry-go/v4 v4.6.1/go.mod h1:V6oF8njAwxJ5gRo1Q7Cxab24xs5NCWZBeaHHBklR8mA= github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= -github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= -github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84= -github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 h1:DDGfHa7BWjL4YnC6+E63dPcxHo2sUxDIu8g3QgEJdRY= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= -github.com/bxcodec/faker/v3 v3.7.0 h1:qWAFFwcyVS0ukF0UoJju1wBLO0cuPQ7JdVBPggM8kNo= -github.com/bxcodec/faker/v3 v3.7.0/go.mod h1:gF31YgnMSMKgkvl+fyEo1xuSMbEuieyqfeslGYFjneM= +github.com/bradleyjkemp/cupaloy/v2 v2.8.0 h1:any4BmKE+jGIaMpnU8YgH/I2LPiLBufr6oMMlVBbn9M= +github.com/bradleyjkemp/cupaloy/v2 v2.8.0/go.mod h1:bm7JXdkRd4BHJk9HpwqAI8BoAY1lps46Enkdqw6aRX0= +github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= github.com/cenkalti/backoff/v3 v3.2.2 h1:cfUAAO3yvKMYKPrvhDuHSwQnhZNk/RMHKdZqKTxfm6M= github.com/cenkalti/backoff/v3 v3.2.2/go.mod h1:cIeZDE3IrqwwJl6VUwCN6trj1oXrTS4rc0ij+ULvLYs= -github.com/cenkalti/backoff/v4 v4.1.3 h1:cFAlzYUlVYDysBEH2T5hyJZMh3+5+WCBvSnK6Q8UtC4= -github.com/cenkalti/backoff/v4 v4.1.3/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= -github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= -github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.1.2 h1:YRXhKfTDauu4ajMg1TPgFO5jnlC2HCbmLXMcTG5cbYE= -github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/checkpoint-restore/go-criu/v5 v5.3.0/go.mod h1:E/eQpaFtUKGOOSEBZgmKAcn+zUUwWxqcaKZlF54wK8E= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/cenkalti/backoff/v5 v5.0.2 h1:rIfFVxEf1QsI7E1ZHfp/B4DF/6QBAUhmgkxc0H7Zss8= +github.com/cenkalti/backoff/v5 v5.0.2/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chromedp/cdproto v0.0.0-20230802225258-3cf4e6d46a89/go.mod h1:GKljq0VrfU4D5yc+2qA6OVr8pmO/MBbPEWqWQ/oqGEs= +github.com/chromedp/chromedp v0.9.2/go.mod h1:LkSXJKONWTCHAfQasKFUZI+mxqS4tZqhmtGzzhLsnLs= +github.com/chromedp/sysutil v1.0.0/go.mod h1:kgWmDdq8fTzXYcKIBqIYvRRTnYb9aNS9moAV0xufSww= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/logex v1.2.1/go.mod h1:JLbx6lG2kDbNRFnfkgvh4eRJRPX1QCoOIWomwysCBrQ= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObkaSkeBlk= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= -github.com/cilium/ebpf v0.7.0/go.mod h1:/oI2+1shJiTGAMgl6/RgJr36Eo1jzrRcAWbcXO2usCA= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= -github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= -github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= -github.com/cockroachdb/cockroach-go v0.0.0-20181001143604-e0a95dfd547c/go.mod h1:XGLbWH/ujMcbPbhZq52Nv6UrCghb1yGn//133kEsvDk= -github.com/cockroachdb/cockroach-go/v2 v2.2.16 h1:t9dmZuC9J2W8IDQDSIGXmP+fBuEJSsrGXxWQz4cYqBY= -github.com/cockroachdb/cockroach-go/v2 v2.2.16/go.mod h1:xZ2VHjUEb/cySv0scXBx7YsBnHtLHkR1+w/w73b5i3M= -github.com/codegangsta/negroni v1.0.0/go.mod h1:v0y3T5G7Y1UlFfyxFn/QLRU4a2EuNau2iZY63YTKWo0= -github.com/containerd/console v1.0.3/go.mod h1:7LqA/THxQ86k76b8c/EMSiaJ3h1eZkMkXar0TQ1gf3U= -github.com/containerd/continuity v0.3.0 h1:nisirsYROK15TAMVukJOUyGJjz4BNQJBVsNvAXZJ/eg= -github.com/containerd/continuity v0.3.0/go.mod h1:wJEAIwKOm/pBZuBd0JmeTvnLquTB1Ag8espWhkykbPM= -github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= -github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= -github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= -github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= -github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= -github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= -github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= -github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8= +github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= +github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0= +github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= +github.com/cockroachdb/cockroach-go/v2 v2.4.1 h1:ACVT/zXsuK6waRPVYtDQpsM8pPA7IA/3fkgA02RR/Gw= +github.com/cockroachdb/cockroach-go/v2 v2.4.1/go.mod h1:9U179XbCx4qFWtNhc7BiWLPfuyMVQ7qdAhfrwLz1vH0= +github.com/containerd/continuity v0.4.5 h1:ZRoN1sXq9u7V6QoHMcVWGhOwDFqZ4B9i5H6un1Wh0x4= +github.com/containerd/continuity v0.4.5/go.mod h1:/lNJvtJKUQStBzpVQ1+rasXO1LAWtUQssk28EZvJ3nE= +github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI= +github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M= +github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE= +github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/creack/pty v1.1.11 h1:07n33Z8lZxZ2qwegKbObQohDhXDQxiMMz1NOUGYlesw= -github.com/creack/pty v1.1.11/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY= +github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= github.com/cristalhq/jwt/v4 v4.0.2 h1:g/AD3h0VicDamtlM70GWGElp8kssQEv+5wYd7L9WOhU= github.com/cristalhq/jwt/v4 v4.0.2/go.mod h1:HnYraSNKDRag1DZP92rYHyrjyQHnVEHPNqesmzs+miQ= -github.com/cyphar/filepath-securejoin v0.2.3/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= -github.com/dave/jennifer v1.4.0 h1:tNJFJmLDVTLu+v05mVZ88RINa3vQqnyyWkTKWYz0CwE= -github.com/dave/jennifer v1.4.0/go.mod h1:fIb+770HOpJ2fmN9EPPKOqm1vMGhB+TwXKMZhrIygKg= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/dgraph-io/ristretto v0.0.1/go.mod h1:T40EBc7CJke8TkpiYfGGKAeFjSaxuFXhuXRyumBd6RE= -github.com/dgraph-io/ristretto v0.0.2/go.mod h1:KPxhHT9ZxKefz+PCeOGsrHpl1qZ7i70dGTu2u+Ahh6E= -github.com/dgraph-io/ristretto v0.0.3/go.mod h1:KPxhHT9ZxKefz+PCeOGsrHpl1qZ7i70dGTu2u+Ahh6E= -github.com/dgraph-io/ristretto v0.1.0/go.mod h1:fux0lOrBhrVCJd3lcTHsIJhq1T2rokOu6v9Vcb3Q9ug= -github.com/dgraph-io/ristretto v0.1.1 h1:6CWw5tJNgpegArSHpNHJKldNeq03FQCwYvfMVWajOK8= -github.com/dgraph-io/ristretto v0.1.1/go.mod h1:S1GPSBCYCIhmVNfcth17y2zZtQT6wzkzgwUve0VDWWA= -github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2 h1:tdlZCpZ/P9DhczCTSixgIKmwPv6+wP5DGjqLYw5SUiA= -github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= -github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= -github.com/docker/cli v20.10.21+incompatible h1:qVkgyYUnOLQ98LtXBrwd/duVqPT2X4SHndOuGsfwyhU= -github.com/docker/cli v20.10.21+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= -github.com/docker/distribution v2.8.1+incompatible h1:Q50tZOPR6T/hjNsyc9g8/syEs6bk8XXApsHjKukMl68= -github.com/docker/distribution v2.8.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/docker v20.10.21+incompatible h1:UTLdBmHk3bEY+w8qeO5KttOhy6OmXWsl/FEet9Uswog= -github.com/docker/docker v20.10.21+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= -github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= -github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgraph-io/ristretto v1.0.0 h1:SYG07bONKMlFDUYu5pEu3DGAh8c2OFNzKm6G9J4Si84= +github.com/dgraph-io/ristretto v1.0.0/go.mod h1:jTi2FiYEhQ1NsMmA7DeBykizjOuY88NhKBkepyu1jPc= +github.com/dgraph-io/ristretto/v2 v2.2.0 h1:bkY3XzJcXoMuELV8F+vS8kzNgicwQFAaGINAEJdWGOM= +github.com/dgraph-io/ristretto/v2 v2.2.0/go.mod h1:RZrm63UmcBAaYWC1DotLYBmTvgkrs0+XhBd7Npn7/zI= +github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da h1:aIftn67I1fkbMa512G+w+Pxci9hJPB8oMnkcP3iZF38= +github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= +github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U= +github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/cli v28.3.3+incompatible h1:fp9ZHAr1WWPGdIWBM1b3zLtgCF+83gRdVMTJsUeiyAo= +github.com/docker/cli v28.3.3+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/docker v28.3.3+incompatible h1:Dypm25kh4rmk49v1eiVbsAtpAsYURjYkaKubwuBdxEI= +github.com/docker/docker v28.3.3+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.6.0 h1:LlMG9azAe1TqfR7sO+NJttz1gy6KO7VJBh+pMmjSD94= +github.com/docker/go-connections v0.6.0/go.mod h1:AahvXYshr6JgfUJGdDCs2b5EZG/vmaMAntpSFH5BFKE= github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= -github.com/dustin/go-humanize v0.0.0-20180713052910-9f541cc9db5d/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= -github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/ecordell/optgen v0.0.6 h1:aSknPe6ZUBrjwHGp2+6XfmfCGYGD6W0ZDfCmmsrS7s4= -github.com/ecordell/optgen v0.0.6/go.mod h1:bAPkLVWcBlTX5EkXW0UTPRj3+yjq2I6VLgH8OasuQEM= -github.com/elastic/go-licenser v0.3.1/go.mod h1:D8eNQk70FOCVBl3smCGQt/lv7meBeQno2eI1S5apiHQ= -github.com/elastic/go-licenser v0.4.1 h1:1xDURsc8pL5zYT9R29425J3vkHdt4RT5TNEMeRN48x4= -github.com/elastic/go-licenser v0.4.1/go.mod h1:V56wHMpmdURfibNBggaSBfqgPxyT1Tldns1i87iTEvU= -github.com/elastic/go-sysinfo v1.1.1/go.mod h1:i1ZYdU10oLNfRzq4vq62BEwD2fH8KaWh6eh0ikPT9F0= -github.com/elastic/go-sysinfo v1.8.1 h1:4Yhj+HdV6WjbCRgGdZpPJ8lZQlXZLKDAeIkmQ/VRvi4= -github.com/elastic/go-sysinfo v1.8.1/go.mod h1:JfllUnzoQV/JRYymbH3dO1yggI3mV2oTKSXsDHM+uIM= -github.com/elastic/go-windows v1.0.0/go.mod h1:TsU0Nrp7/y3+VwE82FoZF8gC/XFg/Elz6CcloAxnPgU= -github.com/elastic/go-windows v1.0.1 h1:AlYZOldA+UJ0/2nBuqWdo90GFCgG9xuyw9SYzGUtJm0= -github.com/elastic/go-windows v1.0.1/go.mod h1:FoVvqWSun28vaDQPbj2Elfc0JahhPB7WQEGa3c814Ss= -github.com/elliotchance/orderedmap v1.4.0 h1:wZtfeEONCbx6in1CZyE6bELEt/vFayMvsxqI5SgsR+A= -github.com/elliotchance/orderedmap v1.4.0/go.mod h1:wsDwEaX5jEoyhbs7x93zk2H/qv0zwuhg4inXhDkYqys= -github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= -github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= -github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= -github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/evanphx/json-patch v5.6.0+incompatible h1:jBYDEEiFBPxA0v50tFdvOzQQTCvpL6mnFh5mB2/l16U= -github.com/evanphx/json-patch v5.6.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= -github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= -github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= -github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= -github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= -github.com/fatih/structs v1.0.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o= +github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE= +github.com/elliotchance/orderedmap v1.7.1 h1:8SR2DB391dw0HVI9572ElrY+KU0Q89OCXYwWZx7aAZc= +github.com/elliotchance/orderedmap v1.7.1/go.mod h1:wsDwEaX5jEoyhbs7x93zk2H/qv0zwuhg4inXhDkYqys= +github.com/evanphx/json-patch/v5 v5.9.11 h1:/8HVnzMq13/3x9TPvjG08wUGqBTmZBsCWzjTM0wiaDU= +github.com/evanphx/json-patch/v5 v5.9.11/go.mod h1:3j+LviiESTElxA4p3EMKAB9HXj3/XEtnUf6OZxqIQTM= +github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= +github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= -github.com/felixge/fgprof v0.9.3 h1:VvyZxILNuCiUCSXtPtYmmtGvb65nqXh2QFWc0Wpf2/g= github.com/felixge/fgprof v0.9.3/go.mod h1:RdbpDgzqYVh/T9fPELJyV7EYJuHB55UTEULNun8eiPw= -github.com/felixge/httpsnoop v1.0.1/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/felixge/httpsnoop v1.0.3 h1:s/nj+GCswXYzN5v2DpNMuMQYe+0DDwt5WVCU6CWBdXk= -github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k= -github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= -github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= -github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= +github.com/felixge/fgprof v0.9.5 h1:8+vR6yu2vvSKn08urWyEuxx75NWPEvybbkBirEpsbVY= +github.com/felixge/fgprof v0.9.5/go.mod h1:yKl+ERSa++RYOs32d8K6WEXCB4uXdLls4ZaZPpayhMM= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= +github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= +github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k= +github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/go-bindata/go-bindata v3.1.2+incompatible h1:5vjJMVhowQdPzjE1LdxyFF7YFTXg5IgGVW4gBr5IbvE= -github.com/go-bindata/go-bindata v3.1.2+incompatible/go.mod h1:xK8Dsgwmeed+BBsSy2XTopBn/8uK2HWuGSnA11C3Joo= -github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= -github.com/go-kit/log v0.2.0/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0= -github.com/go-ldap/ldap v3.0.2+incompatible/go.mod h1:qfd9rJvER9Q0/D/Sqn1DfHRoBp40uXYvFoEVrNEPqRc= -github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= -github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= -github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= -github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= +github.com/go-faker/faker/v4 v4.6.0 h1:6aOPzNptRiDwD14HuAnEtlTa+D1IfFuEHO8+vEFwjTs= +github.com/go-faker/faker/v4 v4.6.0/go.mod h1:ZmrHuVtTTm2Em9e0Du6CJ9CADaLEzGXW62z1YqFH0m0= +github.com/go-jose/go-jose/v3 v3.0.4 h1:Wp5HA7bLQcKnf6YYao/4kpRpVMp/yf6+pJKV8WFSaNY= +github.com/go-jose/go-jose/v3 v3.0.4/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= -github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= -github.com/go-openapi/analysis v0.21.2/go.mod h1:HZwRk4RRisyG8vx2Oe6aqeSQcoxRp47Xkp3+K6q+LdY= -github.com/go-openapi/analysis v0.21.4 h1:ZDFLvSNxpDaomuCueM0BlSXxpANBlFYiBvr+GXrvIHc= -github.com/go-openapi/analysis v0.21.4/go.mod h1:4zQ35W4neeZTqh3ol0rv/O8JBbka9QyAgQRPp9y3pfo= -github.com/go-openapi/errors v0.19.8/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M= -github.com/go-openapi/errors v0.19.9/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M= -github.com/go-openapi/errors v0.20.2/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M= -github.com/go-openapi/errors v0.20.3 h1:rz6kiC84sqNQoqrtulzaL/VERgkoCyB6WdEkc2ujzUc= -github.com/go-openapi/errors v0.20.3/go.mod h1:Z3FlZ4I8jEGxjUK+bugx3on2mIAk4txuAOhlsB1FSgk= -github.com/go-openapi/inflect v0.19.0 h1:9jCH9scKIbHeV9m12SmPilScz6krDxKRasNNSNPXu/4= -github.com/go-openapi/inflect v0.19.0/go.mod h1:lHpZVlpIQqLyKwJ4N+YSc9hchQy/i12fJykb83CRBH4= -github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= -github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY= -github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= -github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns= -github.com/go-openapi/jsonreference v0.20.0 h1:MYlu0sBgChmCfJxxUKZ8g1cPWFOB37YSZqewK7OKeyA= -github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo= -github.com/go-openapi/loads v0.21.1/go.mod h1:/DtAMXXneXFjbQMGEtbamCZb+4x7eGwkvZCvBmwUG+g= -github.com/go-openapi/loads v0.21.2 h1:r2a/xFIYeZ4Qd2TnGpWDIQNcP80dIaZgf704za8enro= -github.com/go-openapi/loads v0.21.2/go.mod h1:Jq58Os6SSGz0rzh62ptiu8Z31I+OTHqmULx5e/gJbNw= -github.com/go-openapi/runtime v0.24.2 h1:yX9HMGQbz32M87ECaAhGpJjBmErO3QLcgdZj9BzGx7c= -github.com/go-openapi/runtime v0.24.2/go.mod h1:AKurw9fNre+h3ELZfk6ILsfvPN+bvvlaU/M9q/r9hpk= -github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I= -github.com/go-openapi/spec v0.20.6/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA= -github.com/go-openapi/spec v0.20.7 h1:1Rlu/ZrOCCob0n+JKKJAWhNWMPW8bOZRg8FJaY+0SKI= -github.com/go-openapi/spec v0.20.7/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA= -github.com/go-openapi/strfmt v0.21.0/go.mod h1:ZRQ409bWMj+SOgXofQAGTIo2Ebu72Gs+WaRADcS5iNg= -github.com/go-openapi/strfmt v0.21.1/go.mod h1:I/XVKeLc5+MM5oPNN7P6urMOpuLXEcNrCX/rPGuWb0k= -github.com/go-openapi/strfmt v0.21.2/go.mod h1:I/XVKeLc5+MM5oPNN7P6urMOpuLXEcNrCX/rPGuWb0k= -github.com/go-openapi/strfmt v0.21.3 h1:xwhj5X6CjXEZZHMWy1zKJxvW9AfHC9pkyUjLvHtKG7o= -github.com/go-openapi/strfmt v0.21.3/go.mod h1:k+RzNO0Da+k3FrrynSNN8F7n/peCmQQqbbXjtDfvmGg= -github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= -github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= -github.com/go-openapi/swag v0.21.1/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= -github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g= -github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= -github.com/go-openapi/validate v0.21.0/go.mod h1:rjnrwK57VJ7A8xqfpAOEKRH8yQSGUriMu5/zuPSQ1hg= -github.com/go-openapi/validate v0.22.0 h1:b0QecH6VslW/TxtpKgzpO1SNG7GU2FsaqKdP1E2T50Y= -github.com/go-openapi/validate v0.22.0/go.mod h1:rjnrwK57VJ7A8xqfpAOEKRH8yQSGUriMu5/zuPSQ1hg= -github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= -github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q= -github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= -github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD876Lmtgy7VtROAbHHXk8no= -github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= -github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE= -github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= -github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= -github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= +github.com/go-openapi/analysis v0.23.0 h1:aGday7OWupfMs+LbmLZG4k0MYXIANxcuBTYUC03zFCU= +github.com/go-openapi/analysis v0.23.0/go.mod h1:9mz9ZWaSlV8TvjQHLl2mUW2PbZtemkE8yA5v22ohupo= +github.com/go-openapi/errors v0.22.2 h1:rdxhzcBUazEcGccKqbY1Y7NS8FDcMyIRr0934jrYnZg= +github.com/go-openapi/errors v0.22.2/go.mod h1:+n/5UdIqdVnLIJ6Q9Se8HNGUXYaY6CN8ImWzfi/Gzp0= +github.com/go-openapi/inflect v0.21.0 h1:FoBjBTQEcbg2cJUWX6uwL9OyIW8eqc9k4KhN4lfbeYk= +github.com/go-openapi/inflect v0.21.0/go.mod h1:INezMuUu7SJQc2AyR3WO0DqqYUJSj8Kb4hBd7WtjlAw= +github.com/go-openapi/jsonpointer v0.21.2 h1:AqQaNADVwq/VnkCmQg6ogE+M3FOsKTytwges0JdwVuA= +github.com/go-openapi/jsonpointer v0.21.2/go.mod h1:50I1STOfbY1ycR8jGz8DaMeLCdXiI6aDteEdRNNzpdk= +github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ= +github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4= +github.com/go-openapi/loads v0.22.0 h1:ECPGd4jX1U6NApCGG1We+uEozOAvXvJSF4nnwHZ8Aco= +github.com/go-openapi/loads v0.22.0/go.mod h1:yLsaTCS92mnSAZX5WWoxszLj0u+Ojl+Zs5Stn1oF+rs= +github.com/go-openapi/runtime v0.28.0 h1:gpPPmWSNGo214l6n8hzdXYhPuJcGtziTOgUpvsFWGIQ= +github.com/go-openapi/runtime v0.28.0/go.mod h1:QN7OzcS+XuYmkQLw05akXk0jRH/eZ3kb18+1KwW9gyc= +github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9ZY= +github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk= +github.com/go-openapi/strfmt v0.23.0 h1:nlUS6BCqcnAk0pyhi9Y+kdDVZdZMHfEKQiS4HaMgO/c= +github.com/go-openapi/strfmt v0.23.0/go.mod h1:NrtIpfKtWIygRkKVsxh7XQMDQW5HKQl6S5ik2elW+K4= +github.com/go-openapi/swag v0.23.1 h1:lpsStH0n2ittzTnbaSloVZLuB5+fvSY/+hnagBjSNZU= +github.com/go-openapi/swag v0.23.1/go.mod h1:STZs8TbRvEQQKUA+JZNAm3EWlgaOBGpyFDqQnDHMef0= +github.com/go-openapi/validate v0.24.0 h1:LdfDKwNbpB6Vn40xhTdNZAnfLECL81w+VX3BumrGD58= +github.com/go-openapi/validate v0.24.0/go.mod h1:iyeX1sEufmv3nPbBdX3ieNviWnOZaJ1+zquzJEf2BAQ= github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= -github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/go-stack/stack v1.8.1/go.mod h1:dcoOX6HbPZSZptuspn9bctJ+N/CnF5gGygcUP3XYfe4= -github.com/go-swagger/go-swagger v0.30.3 h1:HuzvdMRed/9Q8vmzVcfNBQByZVtT79DNZxZ18OprdoI= -github.com/go-swagger/go-swagger v0.30.3/go.mod h1:neDPes8r8PCz2JPvHRDj8BTULLh4VJUt7n6MpQqxhHM= -github.com/go-swagger/scan-repo-boundary v0.0.0-20180623220736-973b3573c013 h1:l9rI6sNaZgNC0LnF3MiE+qTmyBA/tZAg1rtyrGbUMK0= -github.com/go-test/deep v1.0.2-0.20181118220953-042da051cf31/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= -github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0= -github.com/gobuffalo/attrs v1.0.3/go.mod h1:KvDJCE0avbufqS0Bw3UV7RQynESY0jjod+572ctX4t8= -github.com/gobuffalo/buffalo v0.12.8-0.20181004233540-fac9bb505aa8/go.mod h1:sLyT7/dceRXJUxSsE813JTQtA3Eb1vjxWfo/N//vXIY= -github.com/gobuffalo/buffalo-plugins v1.0.2/go.mod h1:pOp/uF7X3IShFHyobahTkTLZaeUXwb0GrUTb9ngJWTs= -github.com/gobuffalo/buffalo-plugins v1.0.4/go.mod h1:pWS1vjtQ6uD17MVFWf7i3zfThrEKWlI5+PYLw/NaDB4= -github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY= -github.com/gobuffalo/depgen v0.1.0/go.mod h1:+ifsuy7fhi15RWncXQQKjWS9JPkdah5sZvtHc2RXGlg= -github.com/gobuffalo/envy v1.6.4/go.mod h1:Abh+Jfw475/NWtYMEt+hnJWRiC8INKWibIMyNt1w2Mc= -github.com/gobuffalo/envy v1.6.15/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= -github.com/gobuffalo/envy v1.7.0/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= +github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= +github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo= +github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU= +github.com/go-swagger/go-swagger v0.31.0 h1:H8eOYQnY2u7vNKWDNykv2xJP3pBhRG/R+SOCAmKrLlc= +github.com/go-swagger/go-swagger v0.31.0/go.mod h1:WSigRRWEig8zV6t6Sm8Y+EmUjlzA/HoaZJ5edupq7po= +github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= +github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/gobuffalo/envy v1.10.2 h1:EIi03p9c3yeuRCFPOKcSfajzkLb3hrRjEpHGI8I2Wo4= github.com/gobuffalo/envy v1.10.2/go.mod h1:qGAGwdvDsaEtPhfBzb3o0SfDea8ByGn9j8bKmVft9z8= -github.com/gobuffalo/events v1.0.3/go.mod h1:Txo8WmqScapa7zimEQIwgiJBvMECMe9gJjsKNPN3uZw= -github.com/gobuffalo/fizz v1.0.12/go.mod h1:C0sltPxpYK8Ftvf64kbsQa2yiCZY4RZviurNxXdAKwc= github.com/gobuffalo/fizz v1.14.4 h1:8uume7joF6niTNWN582IQ2jhGTUoa9g1fiV/tIoGdBs= github.com/gobuffalo/fizz v1.14.4/go.mod h1:9/2fGNXNeIFOXEEgTPJwiK63e44RjG+Nc4hfMm1ArGM= -github.com/gobuffalo/flect v0.0.0-20180907193754-dc14d8acaf9f/go.mod h1:rCiQgmAE4axgBNl3jZWzS5rETRYTGOsrixTRaCPzNdA= -github.com/gobuffalo/flect v0.0.0-20181002182613-4571df4b1daf/go.mod h1:rCiQgmAE4axgBNl3jZWzS5rETRYTGOsrixTRaCPzNdA= -github.com/gobuffalo/flect v0.1.0/go.mod h1:d2ehjJqGOH/Kjqcoz+F7jHTBbmDb38yXA598Hb50EGs= -github.com/gobuffalo/flect v0.1.1/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= -github.com/gobuffalo/flect v0.1.3/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= -github.com/gobuffalo/flect v0.3.0 h1:erfPWM+K1rFNIQeRPdeEXxo8yFr/PO17lhRnS8FUrtk= github.com/gobuffalo/flect v0.3.0/go.mod h1:5pf3aGnsvqvCj50AVni7mJJF8ICxGZ8HomberC3pXLE= -github.com/gobuffalo/genny v0.0.0-20180924032338-7af3a40f2252/go.mod h1:tUTQOogrr7tAQnhajMSH6rv1BVev34H2sa1xNHMy94g= -github.com/gobuffalo/genny v0.0.0-20181003150629-3786a0744c5d/go.mod h1:WAd8HmjMVrnkAZbmfgH5dLBUchsZfqzp/WS5sQz+uTM= -github.com/gobuffalo/genny v0.0.0-20181005145118-318a41a134cc/go.mod h1:WAd8HmjMVrnkAZbmfgH5dLBUchsZfqzp/WS5sQz+uTM= -github.com/gobuffalo/genny v0.0.0-20190329151137-27723ad26ef9/go.mod h1:rWs4Z12d1Zbf19rlsn0nurr75KqhYp52EAGGxTbBhNk= -github.com/gobuffalo/genny v0.0.0-20190403191548-3ca520ef0d9e/go.mod h1:80lIj3kVJWwOrXWWMRzzdhW3DsrdjILVil/SFKBzF28= -github.com/gobuffalo/genny v0.1.0/go.mod h1:XidbUqzak3lHdS//TPu2OgiFB+51Ur5f7CSnXZ/JDvo= -github.com/gobuffalo/genny v0.1.1/go.mod h1:5TExbEyY48pfunL4QSXxlDOmdsD44RRq4mVZ0Ex28Xk= -github.com/gobuffalo/genny/v2 v2.1.0/go.mod h1:4yoTNk4bYuP3BMM6uQKYPvtP6WsXFGm2w2EFYZdRls8= -github.com/gobuffalo/gitgen v0.0.0-20190315122116-cc086187d211/go.mod h1:vEHJk/E9DmhejeLeNt7UVvlSGv3ziL+djtTr3yyzcOw= -github.com/gobuffalo/github_flavored_markdown v1.0.4/go.mod h1:uRowCdK+q8d/RF0Kt3/DSalaIXbb0De/dmTqMQdkQ4I= -github.com/gobuffalo/github_flavored_markdown v1.0.5/go.mod h1:U0643QShPF+OF2tJvYNiYDLDGDuQmJZXsf/bHOJPsMY= -github.com/gobuffalo/github_flavored_markdown v1.1.3 h1:rSMPtx9ePkFB22vJ+dH+m/EUBS8doQ3S8LeEXcdwZHk= +github.com/gobuffalo/flect v1.0.3 h1:xeWBM2nui+qnVvNM4S3foBhCAL2XgPU+a7FdpelbTq4= +github.com/gobuffalo/flect v1.0.3/go.mod h1:A5msMlrHtLqh9umBSnvabjsMrCcCpAyzglnDvkbYKHs= github.com/gobuffalo/github_flavored_markdown v1.1.3/go.mod h1:IzgO5xS6hqkDmUh91BW/+Qxo/qYnvfzoz3A7uLkg77I= -github.com/gobuffalo/gogen v0.0.0-20190315121717-8f38393713f5/go.mod h1:V9QVDIxsgKNZs6L2IYiGR8datgMhB577vzTDqypH360= -github.com/gobuffalo/gogen v0.1.0/go.mod h1:8NTelM5qd8RZ15VjQTFkAW6qOMx5wBbW4dSCS3BY8gg= -github.com/gobuffalo/gogen v0.1.1/go.mod h1:y8iBtmHmGc4qa3urIyo1shvOD8JftTtfcKi+71xfDNE= -github.com/gobuffalo/helpers v0.6.7 h1:C9CedoRSfgWg2ZoIkVXgjI5kgmSpL34Z3qdnzpfNVd8= +github.com/gobuffalo/github_flavored_markdown v1.1.4 h1:WacrEGPXUDX+BpU1GM/Y0ADgMzESKNWls9hOTG1MHVs= +github.com/gobuffalo/github_flavored_markdown v1.1.4/go.mod h1:Vl9686qrVVQou4GrHRK/KOG3jCZOKLUqV8MMOAYtlso= github.com/gobuffalo/helpers v0.6.7/go.mod h1:j0u1iC1VqlCaJEEVkZN8Ia3TEzfj/zoXANqyJExTMTA= -github.com/gobuffalo/here v0.6.7 h1:hpfhh+kt2y9JLDfhYUxxCRxQol540jsVfKUZzjlbp8o= +github.com/gobuffalo/helpers v0.6.10 h1:puKDCOrJ0EIq5ScnTRgKyvEZ05xQa+gwRGCpgoh6Ek8= +github.com/gobuffalo/helpers v0.6.10/go.mod h1:r52L6VSnByLJFOmURp1irvzgSakk7RodChi1YbGwk8I= github.com/gobuffalo/httptest v1.5.2 h1:GpGy520SfY1QEmyPvaqmznTpG4gEQqQ82HtHqyNEreM= -github.com/gobuffalo/licenser v0.0.0-20180924033006-eae28e638a42/go.mod h1:Ubo90Np8gpsSZqNScZZkVXXAo5DGhTb+WYFIjlnog8w= -github.com/gobuffalo/logger v0.0.0-20190315122211-86e12af44bc2/go.mod h1:QdxcLw541hSGtBnhUc4gaNIXRjiDppFGaDqzbrBd3v8= -github.com/gobuffalo/logger v1.0.0/go.mod h1:2zbswyIUa45I+c+FLXuWl9zSWEiVuthsk8ze5s8JvPs= -github.com/gobuffalo/logger v1.0.7/go.mod h1:u40u6Bq3VVvaMcy5sRBclD8SXhBYPS0Qk95ubt+1xJM= -github.com/gobuffalo/makr v1.1.5/go.mod h1:Y+o0btAH1kYAMDJW/TX3+oAXEu0bmSLLoC9mIFxtzOw= -github.com/gobuffalo/mapi v1.0.0/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= -github.com/gobuffalo/mapi v1.0.1/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= -github.com/gobuffalo/mapi v1.0.2/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= +github.com/gobuffalo/httptest v1.5.2/go.mod h1:FA23yjsWLGj92mVV74Qtc8eqluc11VqcWr8/C1vxt4g= github.com/gobuffalo/nulls v0.4.2 h1:GAqBR29R3oPY+WCC7JL9KKk9erchaNuV6unsOSZGQkw= github.com/gobuffalo/nulls v0.4.2/go.mod h1:EElw2zmBYafU2R9W4Ii1ByIj177wA/pc0JdjtD0EsH8= -github.com/gobuffalo/packd v0.0.0-20190315124812-a385830c7fc0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= -github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= -github.com/gobuffalo/packd v0.3.0/go.mod h1:zC7QkmNkYVGKPw4tHpBQ+ml7W/3tIebgeo1b36chA3Q= -github.com/gobuffalo/packd v1.0.2/go.mod h1:sUc61tDqGMXON80zpKGp92lDb86Km28jfvX7IAyxFT8= -github.com/gobuffalo/packr v1.30.1/go.mod h1:ljMyFO2EcrnzsHsN99cvbq055Y9OhRrIaviy289eRuk= -github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ= -github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0= -github.com/gobuffalo/packr/v2 v2.5.1/go.mod h1:8f9c96ITobJlPzI44jj+4tHnEKNt0xXWSVlXRN9X1Iw= -github.com/gobuffalo/plush v3.7.16+incompatible/go.mod h1:rQ4zdtUUyZNqULlc6bqd5scsPfLKfT0+TGMChgduDvI= -github.com/gobuffalo/plush/v4 v4.1.16 h1:Y6jVVTLdg1BxRXDIbTJz+J8QRzEAtv5ZwYpGdIFR7VU= github.com/gobuffalo/plush/v4 v4.1.16/go.mod h1:6t7swVsarJ8qSLw1qyAH/KbrcSTwdun2ASEQkOznakg= -github.com/gobuffalo/pop v4.8.2+incompatible/go.mod h1:DwBz3SD5SsHpTZiTubcsFWcVDpJWGsxjVjMPnkiThWg= -github.com/gobuffalo/pop/v6 v6.0.8 h1:9+5ShHYh3x9NDFCITfm/gtKDDRSgOwiY7kA0Hf7N9aQ= -github.com/gobuffalo/pop/v6 v6.0.8/go.mod h1:f4JQ4Zvkffcevz+t+XAwBLStD7IQs19DiIGIDFYw1eA= -github.com/gobuffalo/release v1.0.35/go.mod h1:VtHFAKs61vO3wboCec5xr9JPTjYyWYcvaM3lclkc4x4= -github.com/gobuffalo/release v1.0.38/go.mod h1:VtHFAKs61vO3wboCec5xr9JPTjYyWYcvaM3lclkc4x4= -github.com/gobuffalo/shoulders v1.0.1/go.mod h1:V33CcVmaQ4gRUmHKwq1fiTXuf8Gp/qjQBUL5tHPmvbA= -github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= -github.com/gobuffalo/tags v2.0.11+incompatible/go.mod h1:9XmhOkyaB7UzvuY4UoZO4s67q8/xRMVJEaakauVQYeY= +github.com/gobuffalo/plush/v4 v4.1.22 h1:bPQr5PsiTg54UGMsfvnIAvFmUfxzD/ri+wbpu7PlmTM= +github.com/gobuffalo/plush/v4 v4.1.22/go.mod h1:WiKHJx3qBvfaDVlrv8zT7NCd3dEMaVR/fVxW4wqV17M= +github.com/gobuffalo/plush/v5 v5.0.7 h1:nI8sIt5tZAN2tCZHeaXkH7HAvxvvk3sJHG2TtrKeSHM= +github.com/gobuffalo/plush/v5 v5.0.7/go.mod h1:C08u/VEqzzPBXFF/yqs40P/5Cvc/zlZsMzhCxXyWJmU= github.com/gobuffalo/tags/v3 v3.1.4 h1:X/ydLLPhgXV4h04Hp2xlbI2oc5MDaa7eub6zw8oHjsM= github.com/gobuffalo/tags/v3 v3.1.4/go.mod h1:ArRNo3ErlHO8BtdA0REaZxijuWnWzF6PUXngmMXd2I0= -github.com/gobuffalo/uuid v2.0.3+incompatible/go.mod h1:ErhIzkRhm0FtRuiE/PeORqcw4cVi1RtSpnwYrxuvkfE= -github.com/gobuffalo/validate v2.0.3+incompatible/go.mod h1:N+EtDe0J8252BgfzQUChBgfd6L93m9weay53EWFVsMM= github.com/gobuffalo/validate/v3 v3.3.3 h1:o7wkIGSvZBYBd6ChQoLxkz2y1pfmhbI4jNJYh6PuNJ4= github.com/gobuffalo/validate/v3 v3.3.3/go.mod h1:YC7FsbJ/9hW/VjQdmXPvFqvRis4vrRYFxr69WiNZw6g= -github.com/gobuffalo/x v0.0.0-20181003152136-452098b06085/go.mod h1:WevpGD+5YOreDJznWevcn8NTmQEW5STSBgIkpkjzqXc= -github.com/gobuffalo/x v0.0.0-20181007152206-913e47c59ca7 h1:N0iqtKwkicU8M2rLirTDJxdwuL8I2/8MjMlEayaNSgE= -github.com/gobuffalo/x v0.0.0-20181007152206-913e47c59ca7/go.mod h1:9rDPXaB3kXdKWzMc4odGQQdG2e2DIEmANy5aSJ9yesY= github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= -github.com/goccy/go-yaml v1.9.6 h1:KhAu1zf9JXnm3vbG49aDE0E5uEBUsM4uwD31/58ZWyI= -github.com/goccy/go-yaml v1.9.6/go.mod h1:JubOolP3gh0HpiBc4BLRD4YmjEjHAmIIB2aaXKkTfoE= -github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= -github.com/godbus/dbus/v5 v5.0.6/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= -github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= -github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= -github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM= +github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= +github.com/gobwas/ws v1.2.1/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY= +github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= +github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= +github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw= +github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= +github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= +github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= github.com/gofrs/uuid v4.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= -github.com/gofrs/uuid v4.3.0+incompatible h1:CaSVZxm5B+7o45rtab4jC2G37WGYX1zQfuU2i6DSvnc= -github.com/gofrs/uuid v4.3.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gofrs/uuid v4.4.0+incompatible h1:3qXRTX8/NbyulANqlc0lchS1gqAVxRgsuW1YrTJupqA= +github.com/gofrs/uuid v4.4.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gogo/googleapis v1.4.1 h1:1Yx4Myt7BxzvUr5ldGSbwYiZG6t9wGBZ+8/fX3Wvtq0= +github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/glog v1.0.0 h1:nfP3RFugxnNRyKgeWd4oI1nYvXpxrx8ck8ZrcizshdQ= -github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4= -github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= -github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= +github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= -github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= -github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= -github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= -github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= -github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= -github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20211214055906-6f57359322fd/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg= -github.com/google/pprof v0.0.0-20221010195024-131d412537ea h1:R3VfsTXMMK4JCWZDdxScmnTzu9n9YRsDvguLis0U/b8= -github.com/google/pprof v0.0.0-20221010195024-131d412537ea/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo= -github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/pprof v0.0.0-20240227163752-401108e1b7e7/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= +github.com/google/pprof v0.0.0-20250630185457-6e76a2b096b5 h1:xhMrHhTJ6zxu3gA4enFM9MLn9AY7613teCdFnlUVbSQ= +github.com/google/pprof v0.0.0-20250630185457-6e76a2b096b5/go.mod h1:5hDyRhoBCxViHszMt12TnOpEI4VVi+U8Gm9iphldiMA= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= -github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= -github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= -github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gopherjs/gopherjs v1.17.2 h1:fQnZVsXk8uxXIStYb0N4bGk7jeyTalG/wsZjQ25dO0g= +github.com/gopherjs/gopherjs v1.17.2/go.mod h1:pRRIvn/QzFLrKfvEz3qUuEhtE/zLCWfreZ6J5gM2i+k= github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= -github.com/gorilla/handlers v1.5.1 h1:9lRY6j8DEeeBT10CvO9hGW0gmky0BprnvDI5vfhUHH4= -github.com/gorilla/handlers v1.5.1/go.mod h1:t8XrUpc4KVXb7HGyJ4/cEnwQiaxrX/hz1Zv/4g96P1Q= -github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= -github.com/gorilla/pat v0.0.0-20180118222023-199c85a7f6d1/go.mod h1:YeAe0gNeiNT5hoiZRI4yiOky6jVdNvfO2N6Kav/HmxY= -github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ= -github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= -github.com/gorilla/sessions v1.1.2/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE8ovaJD0w= -github.com/gorilla/sessions v1.1.3/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE8ovaJD0w= -github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI= -github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= -github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= -github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= -github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8= +github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0= +github.com/gorilla/handlers v1.5.2 h1:cLTUSsNkgcwhgRqvCNmdbRWG0A3N4F+M2nWKdScwyEE= +github.com/gorilla/handlers v1.5.2/go.mod h1:dX+xVpaxdSw+q0Qek8SSsl3dfMk3jNddUkMzo0GtH0w= +github.com/gorilla/securecookie v1.1.2 h1:YCIWL56dvtr73r6715mJs5ZvhtnY73hBvEF8kXD8ePA= +github.com/gorilla/securecookie v1.1.2/go.mod h1:NfCASbcHqRSY+3a8tlWJwsQap2VX5pwzwo4h3eOamfo= +github.com/gorilla/sessions v1.4.0 h1:kpIYOp/oi6MG/p5PgxApU8srsSw9tuFbt46Lt7auzqQ= +github.com/gorilla/sessions v1.4.0/go.mod h1:FLWm50oby91+hl7p/wRxDth9bWSuk0qVL2emc7lT5ik= +github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= -github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= -github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4ZsPv9hVvWI6+ch50m39Pf2Ks= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.12.0 h1:kr3j8iIMR4ywO/O0rvksXaJvauGGCMg2zAZIiNZ9uIQ= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.12.0/go.mod h1:ummNFgdgLhhX7aIiy35vVmQNS0rWXknfPE0qe6fmFXg= -github.com/gtank/cryptopasta v0.0.0-20170601214702-1f550f6f2f69 h1:7xsUJsB2NrdcttQPa7JLEaGzvdbk7KvfrjgHZXOQRo0= -github.com/gtank/cryptopasta v0.0.0-20170601214702-1f550f6f2f69/go.mod h1:YLEMZOtU+AZ7dhN9T/IpGhXVGly2bvkJQ+zxj3WeVQo= -github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= -github.com/hashicorp/consul/api v1.13.0/go.mod h1:ZlVrynguJKcYr54zGaDbaL3fOvKC9m72FhPvA8T35KQ= -github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= -github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= -github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1 h1:X5VWvz21y3gzm9Nw/kaUeku/1+uBhcekkmy4IkffJww= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1/go.mod h1:Zanoh4+gvIgluNqcfMVTJueD4wSS5hT7zTt4Mrutd90= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= -github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd/go.mod h1:9bjs9uLqI8l75knNv3lV1kA55veR+WUPSiKIWcQHudI= -github.com/hashicorp/go-hclog v0.8.0/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= -github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= -github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= -github.com/hashicorp/go-hclog v1.2.0 h1:La19f8d7WIlm4ogzNHB0JGqs5AUDAZ2UfCY4sJXcJdM= -github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= -github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= -github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= -github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= -github.com/hashicorp/go-plugin v1.0.1/go.mod h1:++UyYGoz3o5w9ZzAdZxtQKrWWP+iqPBn3cQptSMzBuY= -github.com/hashicorp/go-retryablehttp v0.5.4/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= -github.com/hashicorp/go-retryablehttp v0.6.8/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= -github.com/hashicorp/go-retryablehttp v0.7.1 h1:sUiuQAnLlbvmExtFQs72iFW/HXeUn8Z1aJLQ4LJJbTQ= -github.com/hashicorp/go-retryablehttp v0.7.1/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= -github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= -github.com/hashicorp/go-rootcerts v1.0.1/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= -github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= -github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= -github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A= -github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= -github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-version v1.1.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= -github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= +github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-retryablehttp v0.7.8 h1:ylXZWnqa7Lhqpk0L1P1LzDtGcCR0rPVUrx/c8Unxc48= +github.com/hashicorp/go-retryablehttp v0.7.8/go.mod h1:rjiScheydd+CxvumBsIrFKlx3iS0jrZ7LvzFGFmuKbw= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= -github.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/YAJqrc= -github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= -github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= -github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= -github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= -github.com/hashicorp/vault/api v1.0.4/go.mod h1:gDcqh3WGcR1cpF5AJz/B1UFheUEneMoIospckxBxk6Q= -github.com/hashicorp/vault/sdk v0.1.13/go.mod h1:B+hVj7TpuQY1Y/GPbCpffmgd+tSEwvhkWnjtSYCaS2M= -github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= -github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= -github.com/hjson/hjson-go/v4 v4.0.0 h1:wlm6IYYqHjOdXH1gHev4VoXCaW20HdQAGCxdOEEg2cs= -github.com/hjson/hjson-go/v4 v4.0.0/go.mod h1:KaYt3bTw3zhBjYqnXkYywcYctk0A2nxeEFTse3rH13E= -github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= -github.com/huandu/xstrings v1.3.2 h1:L18LIDzqlW6xN2rEkpdV8+oL/IXWJ1APd+vsdYy4Wdw= -github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= -github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= +github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= +github.com/huandu/xstrings v1.5.0 h1:2ag3IFq9ZDANvthTwTiqSSZLjDc+BedvHPAp5tJy2TI= +github.com/huandu/xstrings v1.5.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/ianlancetaylor/demangle v0.0.0-20210905161508-09a460cdf81d/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w= -github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= -github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= -github.com/imdario/mergo v0.3.13 h1:lFzP57bqS/wsqKssCGmtLAb8A0wKjLGrve2q3PPVcBk= -github.com/imdario/mergo v0.3.13/go.mod h1:4lJ1jqUDcsbIECGy0RUJAXNIhg+6ocWgb1ALK2O4oXg= -github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc= -github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/ianlancetaylor/demangle v0.0.0-20230524184225-eabc099b10ab/go.mod h1:gx7rwoVhcfuVKG5uya9Hs3Sxj7EIvldVofAWIUtGouw= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/inhies/go-bytesize v0.0.0-20220417184213-4913239db9cf h1:FtEj8sfIcaaBfAKrE1Cwb61YDtYq9JxChK1c7AKce7s= github.com/inhies/go-bytesize v0.0.0-20220417184213-4913239db9cf/go.mod h1:yrqSXGoD/4EKfF26AOGzscPOgTTJcyAwM2rpixWT+t4= -github.com/instana/go-sensor v1.46.0 h1:Qx2VgTPZHzRgKSMAlH0Gm8nb/y7BfsUGuRst9XsM17Y= -github.com/instana/go-sensor v1.46.0/go.mod h1:E42MelHWFz11qqaLwvgt0j98v2s2O/bq22UDkGaG0Gg= -github.com/instana/testify v1.6.2-0.20200721153833-94b1851f4d65 h1:T25FL3WEzgmKB0m6XCJNZ65nw09/QIp3T1yXr487D+A= -github.com/instana/testify v1.6.2-0.20200721153833-94b1851f4d65/go.mod h1:nYhEREG/B7HUY7P+LKOrqy53TpIqmJ9JyUShcaEKtGw= -github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= -github.com/jackc/fake v0.0.0-20150926172116-812a484cc733/go.mod h1:WrMFNQdiFJ80sQsxDoMokWK1W5TQtxBFNpzWTD84ibQ= -github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA= -github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE= -github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s= -github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o= -github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY= -github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= -github.com/jackc/pgconn v1.12.0/go.mod h1:ZkhRC59Llhrq3oSfrikvwQ5NaxYExr6twkdkMLaKono= -github.com/jackc/pgconn v1.12.1/go.mod h1:ZkhRC59Llhrq3oSfrikvwQ5NaxYExr6twkdkMLaKono= -github.com/jackc/pgconn v1.13.0 h1:3L1XMNV2Zvca/8BYhzcRFS70Lr0WlDg16Di6SFGAbys= -github.com/jackc/pgconn v1.13.0/go.mod h1:AnowpAqO4CMIIJNZl2VJp+KrkAZciAkhEl0W0JIobpI= +github.com/jackc/pgconn v1.14.3 h1:bVoTr12EGANZz66nZPkMInAV/KHD2TxH9npjXXgiB3w= +github.com/jackc/pgconn v1.14.3/go.mod h1:RZbme4uasqzybK2RK5c65VsHxoyaml09lx3tXOcO/VM= github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= -github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= -github.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd/go.mod h1:hrBW0Enj2AZTNpt/7Y5rr2xe/9Mn757Wtb2xeBzPv2c= github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65 h1:DadwsjnMwFjfWc9y5Wi/+Zz7xoE5ALHsRQlOctkOiHc= github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak= github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= -github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= -github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= -github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= -github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= -github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= -github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= -github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= -github.com/jackc/pgproto3/v2 v2.3.0/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= -github.com/jackc/pgproto3/v2 v2.3.1 h1:nwj7qwf0S+Q7ISFfBndqeLwSwxs+4DPsbRFjECT1Y4Y= -github.com/jackc/pgproto3/v2 v2.3.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= -github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b h1:C8S2+VttkHFdOOCXJe+YGfa4vHYwlt4Zx+IVXQ97jYg= -github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= -github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg= -github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc= -github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= -github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM= -github.com/jackc/pgtype v1.11.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= -github.com/jackc/pgtype v1.12.0 h1:Dlq8Qvcch7kiehm8wPGIW0W3KsCCHJnRacKW0UM8n5w= -github.com/jackc/pgtype v1.12.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= -github.com/jackc/pgx v3.2.0+incompatible/go.mod h1:0ZGrqGqkRlliWnWB4zKnWtjbSWbGkVEFm4TeybAXq+I= -github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= -github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= -github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= -github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs= -github.com/jackc/pgx/v4 v4.16.0/go.mod h1:N0A9sFdWzkw/Jy1lwoiB64F2+ugFZi987zRxcPez/wI= -github.com/jackc/pgx/v4 v4.16.1/go.mod h1:SIhx0D5hoADaiXZVyv+3gSm3LCIIINTVO0PficsvWGQ= -github.com/jackc/pgx/v4 v4.17.2 h1:0Ut0rpeKwvIVbMQ1KbMBU4h6wxehBI535LK6Flheh8E= -github.com/jackc/pgx/v4 v4.17.2/go.mod h1:lcxIZN44yMIrWI78a5CpucdD14hX0SBDbNRvjDBItsw= -github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jackc/puddle v1.2.1/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jackc/puddle v1.3.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jandelgado/gcov2lcov v1.0.4/go.mod h1:NnSxK6TMlg1oGDBfGelGbjgorT5/L3cchlbtgFYZSss= -github.com/jandelgado/gcov2lcov v1.0.5 h1:rkBt40h0CVK4oCb8Dps950gvfd1rYvQ8+cWa346lVU0= -github.com/jandelgado/gcov2lcov v1.0.5/go.mod h1:NnSxK6TMlg1oGDBfGelGbjgorT5/L3cchlbtgFYZSss= -github.com/jcchavezs/porto v0.1.0/go.mod h1:fESH0gzDHiutHRdX2hv27ojnOVFco37hg1W6E9EZF4A= -github.com/jcchavezs/porto v0.4.0 h1:Zj7RligrxmDdKGo6fBO2xYAHxEgrVBfs1YAja20WbV4= -github.com/jcchavezs/porto v0.4.0/go.mod h1:fESH0gzDHiutHRdX2hv27ojnOVFco37hg1W6E9EZF4A= -github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= -github.com/jessevdk/go-flags v1.5.0 h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc= -github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4= -github.com/jinzhu/copier v0.3.5 h1:GlvfUwHk62RokgqVNvYsku0TATCF7bAHVwEXoBh3iJg= -github.com/jinzhu/copier v0.3.5/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg= -github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= -github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= -github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= -github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= -github.com/jmoiron/sqlx v0.0.0-20180614180643-0dae4fefe7c0/go.mod h1:IiEW3SEiiErVyFdH8NTuWjSifiEQKUoyK3LNqr2kCHU= -github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= +github.com/jackc/pgproto3/v2 v2.3.3 h1:1HLSx5H+tXR9pW3in3zaztoEwQYRC9SQaYUHjTSUOag= +github.com/jackc/pgproto3/v2 v2.3.3/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.7.5 h1:JHGfMnQY+IEtGM63d+NGMjoRpysB2JBwDr5fsngwmJs= +github.com/jackc/pgx/v5 v5.7.5/go.mod h1:aruU7o91Tc2q2cFp5h4uP3f6ztExVpyVv88Xl/8Vl8M= +github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= +github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/jaegertracing/jaeger-idl v0.5.0 h1:zFXR5NL3Utu7MhPg8ZorxtCBjHrL3ReM1VoB65FOFGE= +github.com/jaegertracing/jaeger-idl v0.5.0/go.mod h1:ON90zFo9eoyXrt9F/KN8YeF3zxcnujaisMweFY/rg5k= +github.com/jessevdk/go-flags v1.6.1 h1:Cvu5U8UGrLay1rZfv/zP7iLpSHGUZ/Ou68T0iX1bBK4= +github.com/jessevdk/go-flags v1.6.1/go.mod h1:Mk8T1hIAWpOiJiHa9rJASDK2UGWji0EuPGBnNLMooyc= +github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8= +github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg= github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= -github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901 h1:rp+c0RAYOWj8l6qbCUTSiRLG/iKnW3K3/QfPPuSsBt4= -github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901/go.mod h1:Z86h9688Y0wesXCyonoVr47MasHilkuLMqGhRZ4Hpak= -github.com/joho/godotenv v1.2.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= -github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= -github.com/joho/godotenv v1.4.0 h1:3l4+N6zfMWnkbPEXKng2o2/MR5mSwTrBih4ZEkkz1lg= +github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o= +github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY= github.com/joho/godotenv v1.4.0/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= -github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= -github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= -github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= -github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= -github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= -github.com/julienschmidt/httprouter v1.3.0 h1:U0609e9tgbseu3rBINet9P48AI/D3oJs4dN7jwJOQ1U= -github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= -github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4= -github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA= -github.com/karrick/godirwalk v1.10.12/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA= +github.com/julienschmidt/httprouter v1.3.1-0.20240130105656-484018016424 h1:KsUAkP+Y6n+542zpxWiQDUvOqfh3n429HYleEvq/V7M= +github.com/julienschmidt/httprouter v1.3.1-0.20240130105656-484018016424/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= -github.com/knadh/koanf v1.4.4 h1:d2jY5nCCeoaiqvEKSBW9rEc93EfNy/XWgWsSB3j7JEA= -github.com/knadh/koanf v1.4.4/go.mod h1:Hgyjp4y8v44hpZtPzs7JZfRAW5AhN7KfZcwv1RYggDs= -github.com/konsorten/go-windows-terminal-sequences v0.0.0-20180402223658-b729f2633dfe/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= -github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= +github.com/knadh/koanf/maps v0.1.2 h1:RBfmAW5CnZT+PJ1CVc1QSJKf4Xu9kxfQgYVQSu8hpbo= +github.com/knadh/koanf/maps v0.1.2/go.mod h1:npD/QZY3V6ghQDdcQzl1W4ICNVTkohC8E73eI2xW4yI= +github.com/knadh/koanf/parsers/json v0.1.0 h1:dzSZl5pf5bBcW0Acnu20Djleto19T0CfHcvZ14NJ6fU= +github.com/knadh/koanf/parsers/json v0.1.0/go.mod h1:ll2/MlXcZ2BfXD6YJcjVFzhG9P0TdJ207aIBKQhV2hY= +github.com/knadh/koanf/parsers/toml v0.1.0 h1:S2hLqS4TgWZYj4/7mI5m1CQQcWurxUz6ODgOub/6LCI= +github.com/knadh/koanf/parsers/toml v0.1.0/go.mod h1:yUprhq6eo3GbyVXFFMdbfZSo928ksS+uo0FFqNMnO18= +github.com/knadh/koanf/parsers/yaml v0.1.0 h1:ZZ8/iGfRLvKSaMEECEBPM1HQslrZADk8fP1XFUxVI5w= +github.com/knadh/koanf/parsers/yaml v0.1.0/go.mod h1:cvbUDC7AL23pImuQP0oRw/hPuccrNBS2bps8asS0CwY= +github.com/knadh/koanf/providers/posflag v0.1.0 h1:mKJlLrKPcAP7Ootf4pBZWJ6J+4wHYujwipe7Ie3qW6U= +github.com/knadh/koanf/providers/posflag v0.1.0/go.mod h1:SYg03v/t8ISBNrMBRMlojH8OsKowbkXV7giIbBVgbz0= +github.com/knadh/koanf/providers/rawbytes v0.1.0 h1:dpzgu2KO6uf6oCb4aP05KDmKmAmI51k5pe8RYKQ0qME= +github.com/knadh/koanf/providers/rawbytes v0.1.0/go.mod h1:mMTB1/IcJ/yE++A2iEZbY1MLygX7vttU+C+S/YmPu9c= +github.com/knadh/koanf/v2 v2.2.2 h1:ghbduIkpFui3L587wavneC9e3WIliCgiCgdxYO/wd7A= +github.com/knadh/koanf/v2 v2.2.2/go.mod h1:abWQc0cBXLSF/PSOMCB/SK+T13NXDsPvOksbpi5e/9Q= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= -github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= -github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/laher/mergefs v0.1.1 h1:nV2bTS57vrmbMxeR6uvJpI8LyGl3QHj4bLBZO3aUV58= +github.com/laher/mergefs v0.1.1/go.mod h1:FSY1hYy94on4Tz60waRMGdO1awwS23BacqJlqf9lJ9Q= +github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs= github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/lib/pq v1.10.6/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw= -github.com/lib/pq v1.10.7/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/looplab/fsm v0.1.0/go.mod h1:m2VaOfDHxqXBBMgc26m6yUOwkFn8H2AlJDE+jd/uafI= -github.com/looplab/fsm v0.3.0 h1:kIgNS3Yyud1tyxhG8kDqh853B7QqwnlWdgL3TD2s3Sw= -github.com/looplab/fsm v0.3.0/go.mod h1:PmD3fFvQEIsjMEfvZdrCDZ6y8VwKTwWNjlpEr6IKPO4= -github.com/luna-duclos/instrumentedsql v1.1.3 h1:t7mvC0z1jUt5A0UQ6I/0H31ryymuQRnJcWCiqV3lSAA= -github.com/luna-duclos/instrumentedsql v1.1.3/go.mod h1:9J1njvFds+zN7y85EDhN9XNQLANWwZt2ULeIC8yMNYs= -github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= -github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= -github.com/magiconair/properties v1.8.4/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= -github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo= -github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= -github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= -github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= -github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= -github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/magiconair/properties v1.8.9 h1:nWcCbLq1N2v/cpNsy5WvQ37Fb+YElfq20WJ/a8RkpQM= +github.com/magiconair/properties v1.8.9/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= -github.com/markbates/deplist v1.0.4/go.mod h1:gRRbPbbuA8TmMiRvaOzUlRfzfjeCCBqX2A6arxN01MM= -github.com/markbates/deplist v1.0.5/go.mod h1:gRRbPbbuA8TmMiRvaOzUlRfzfjeCCBqX2A6arxN01MM= -github.com/markbates/going v1.0.2/go.mod h1:UWCk3zm0UKefHZ7l8BNqi26UyiEMniznk8naLdTcy6c= -github.com/markbates/grift v1.0.4/go.mod h1:wbmtW74veyx+cgfwFhlnnMWqhoz55rnHR47oMXzsyVs= -github.com/markbates/hmax v1.0.0/go.mod h1:cOkR9dktiESxIMu+65oc/r/bdY4bE8zZw3OLhLx0X2c= -github.com/markbates/inflect v1.0.0/go.mod h1:oTeZL2KHA7CUX6X+fovmK9OvIOFuqu0TwdQrZjLTh88= -github.com/markbates/inflect v1.0.1/go.mod h1:uv3UVNBe5qBIfCm8O8Q+DW+S1EopeyINj+Ikhc7rnCk= -github.com/markbates/oncer v0.0.0-20180924031910-e862a676800b/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= -github.com/markbates/oncer v0.0.0-20180924034138-723ad0170a46/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= -github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= -github.com/markbates/pkger v0.17.1 h1:/MKEtWqtc0mZvu9OinB9UzVN9iYCwLWuyUv4Bw+PCno= -github.com/markbates/refresh v1.4.10/go.mod h1:NDPHvotuZmTmesXxr95C9bjlw1/0frJwtME2dzcVKhc= -github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= -github.com/markbates/sigtx v1.0.0/go.mod h1:QF1Hv6Ic6Ca6W+T+DL0Y/ypborFKyvUY9HmuCD4VeTc= -github.com/markbates/willie v1.0.9/go.mod h1:fsrFVWl91+gXpx/6dv715j7i11fYPfZ9ZGfH0DQzY7w= -github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= -github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= -github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= -github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-isatty v0.0.16 h1:bq3VjFmv/sOjHtdEhmkEV4x1AJtvUvOJ2PFAZ5+peKQ= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-sqlite3 v1.14.13 h1:1tj15ngiFfcZzii7yd82foL+ks+ouQcj8j/TPq3fk1I= -github.com/mattn/go-sqlite3 v1.14.13/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= -github.com/mattn/goveralls v0.0.11 h1:eJXea6R6IFlL1QMKNMzDvvHv/hwGrnvyig4N+0+XiMM= -github.com/mattn/goveralls v0.0.11/go.mod h1:gU8SyhNswsJKchEV93xRQxX6X3Ei4PJdQk/6ZHvrvRk= -github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= -github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= -github.com/microcosm-cc/bluemonday v1.0.1/go.mod h1:hsXNsILzKxV+sX77C5b8FSuKF00vh2OMYv+xgHpAMF4= +github.com/mailru/easyjson v0.9.0 h1:PrnmzHw7262yW8sTBwxi1PdJA3Iw/EKBa8psRf7d9a4= +github.com/mailru/easyjson v0.9.0/go.mod h1:1+xMtQp2MRNVL/V1bOzuP3aP8VNwRW55fQUto+XFtTU= +github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= +github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= +github.com/mattn/go-sqlite3 v1.14.15/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= +github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= +github.com/mattn/go-sqlite3 v1.14.32 h1:JD12Ag3oLy1zQA+BNn74xRgaBbdhbNIDYvQUEuuErjs= +github.com/mattn/go-sqlite3 v1.14.32/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= +github.com/mattn/goveralls v0.0.12 h1:PEEeF0k1SsTjOBQ8FOmrOAoCu4ytuMaWCnWe94zxbCg= +github.com/mattn/goveralls v0.0.12/go.mod h1:44ImGEUfmqH8bBtaMrYKsM65LXfNLWmwaxFGjZwgMSQ= github.com/microcosm-cc/bluemonday v1.0.20/go.mod h1:yfBmMi8mxvaZut3Yytv+jTXRY8mxyjJ0/kQBTElld50= -github.com/microcosm-cc/bluemonday v1.0.21 h1:dNH3e4PSyE4vNX+KlRGHT5KrSvjeUkoNPwEORjffHJg= -github.com/microcosm-cc/bluemonday v1.0.21/go.mod h1:ytNkv4RrDrLJ2pqlsSI46O6IVXmZOBBD4SaJyDwwTkM= -github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= -github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= -github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= -github.com/miekg/pkcs11 v1.0.3-0.20190429190417-a667d056470f/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= -github.com/miekg/pkcs11 v1.0.3 h1:iMwmD7I5225wv84WxIG/bmxz9AXjWvTWIbM/TYHvWtw= -github.com/miekg/pkcs11 v1.0.3/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= -github.com/mikefarah/yq/v4 v4.16.1 h1:Xeu/LdePmqceqcmZoG3caOLA79LJj0RvGpSjUfHaqK8= -github.com/mikefarah/yq/v4 v4.16.1/go.mod h1:mfI3lycn5DjU6N4kfpiR4S7ylu0xZj9XgKSooXYis3g= -github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= -github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= -github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= +github.com/microcosm-cc/bluemonday v1.0.22/go.mod h1:ytNkv4RrDrLJ2pqlsSI46O6IVXmZOBBD4SaJyDwwTkM= +github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk= +github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA= +github.com/miekg/pkcs11 v1.1.1 h1:Ugu9pdy6vAYku5DEpVWVFPYnzV+bxB+iRdbuFSu7TvU= +github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= +github.com/mikefarah/yq/v4 v4.45.1 h1:EW+HjKEVa55pUYFJseEHEHdQ0+ulunY+q42zF3M7ZaQ= +github.com/mikefarah/yq/v4 v4.45.1/go.mod h1:djgN2vD749hpjVNGYTShr5Kmv5LYljhCG3lUTuEe3LM= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= -github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= -github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= -github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= -github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= -github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= -github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.0.0/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.3.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= -github.com/moby/sys/mountinfo v0.5.0/go.mod h1:3bMD3Rg+zkqx8MRYPi7Pyb0Ie97QEBmdxbhnCLlSvSU= -github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae h1:O4SWKdcHVCvYqyDV+9CJA1fcDN2L11Bule0iFy3YlAI= -github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae/go.mod h1:E2VnQOmVuvZB6UYnnDB0qG5Nq/1tD9acaOpo6xmt0Kw= -github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw= +github.com/moby/sys/atomicwriter v0.1.0/go.mod h1:Ul8oqv2ZMNHOceF643P6FKPXeCmYtlQMvpizfsSoaWs= +github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU= +github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko= +github.com/moby/sys/user v0.4.0 h1:jhcMKit7SA80hivmFJcbB1vqmw//wU61Zdui2eQXuMs= +github.com/moby/sys/user v0.4.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs= +github.com/moby/term v0.5.2 h1:6qk3FJAFDs6i/q3W/pQ97SX192qKfZgGjCQqfCJkgzQ= +github.com/moby/term v0.5.2/go.mod h1:d3djjFCrjnB+fl8NJux+EJzu0msscUP+f8it8hPkFLc= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= -github.com/monoculum/formam v0.0.0-20180901015400-4e68be1d79ba/go.mod h1:RKgILGEJq24YyJ2ban8EO0RUVSJlF1pGsEvoLEACr/Q= -github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= -github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ= -github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/nicksnyder/go-i18n v1.10.0/go.mod h1:HrK7VCrbOvQoUAQ7Vpy7i87N7JZZZ7R2xBGjv0j365Q= -github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= -github.com/npillmayer/nestext v0.1.3/go.mod h1:h2lrijH8jpicr25dFY+oAJLyzlya6jhnuG+zWp9L0Uk= -github.com/nyaruka/phonenumbers v1.0.73/go.mod h1:3aiS+PS3DuYwkbK3xdcmRwMiPNECZ0oENH8qUT1lY7Q= -github.com/nyaruka/phonenumbers v1.1.1 h1:fyoZmpLN2VCmAnc51XcrNOUVP2wT1ZzQl348ggIaXII= -github.com/nyaruka/phonenumbers v1.1.1/go.mod h1:cGaEsOrLjIL0iKGqJR5Rfywy86dSkbApEpXuM9KySNA= -github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/moul/http2curl v1.0.0 h1:dRMWoAtb+ePxMlLkrCbAqh4TlPHXvoGUSQ323/9Zahs= +github.com/moul/http2curl v1.0.0/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/nyaruka/phonenumbers v1.6.5 h1:aBCaUhfpRA7hU6fsXk+p7KF1aNx4nQlq9hGeo2qdFg8= +github.com/nyaruka/phonenumbers v1.6.5/go.mod h1:7gjs+Lchqm49adhAKB5cdcng5ZXgt6x7Jgvi0ZorUtU= github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= -github.com/oleiade/reflections v1.0.1 h1:D1XO3LVEYroYskEsoSiGItp9RUxG6jWnCVvrqH0HHQM= -github.com/oleiade/reflections v1.0.1/go.mod h1:rdFxbxq4QXVZWj0F+e9jqjDkc7dbp97vkRixKo2JR60= -github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/gomega v1.4.1/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= -github.com/onsi/gomega v1.4.2/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/oleiade/reflections v1.1.0 h1:D+I/UsXQB4esMathlt0kkZRJZdUDmhv5zGi/HOwYTWo= +github.com/oleiade/reflections v1.1.0/go.mod h1:mCxx0QseeVCHs5Um5HhJeCKVC7AwS8kO67tky4rdisA= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= -github.com/opencontainers/image-spec v1.1.0-rc2 h1:2zx/Stx4Wc5pIPDvIxHXvXtQFW/7XWJGmnM7r3wg034= -github.com/opencontainers/image-spec v1.1.0-rc2/go.mod h1:3OVijpioIKYWTqjiG0zfF6wvoJ4fAXGbjdZuI2NgsRQ= -github.com/opencontainers/runc v1.1.4 h1:nRCz/8sKg6K6jgYAFLDlXzPeITBZJyX28DBVhWD+5dg= -github.com/opencontainers/runc v1.1.4/go.mod h1:1J5XiS+vdZ3wCyZybsuxXZWGrgSr8fFJHLXuG2PsnNg= -github.com/opencontainers/runtime-spec v1.0.3-0.20210326190908-1c3f411f0417/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= -github.com/opencontainers/selinux v1.10.0/go.mod h1:2i0OySw99QjzBBQByd1Gr9gSjvuho1lHsJxIJ3gGbJI= -github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492 h1:lM6RxxfUMrYL/f8bWEUqdXrANWtrL7Nndbm9iFN0DlU= -github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis= -github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= -github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= -github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= -github.com/openzipkin-contrib/zipkin-go-opentracing v0.5.0 h1:uhcF5Jd7rP9DVEL10Siffyepr6SvlKbUsjH5JpNCRi8= -github.com/openzipkin-contrib/zipkin-go-opentracing v0.5.0/go.mod h1:+oCZ5GXXr7KPI/DNOQORPTq5AWHfALJj9c72b0+YsEY= -github.com/openzipkin/zipkin-go v0.4.1 h1:kNd/ST2yLLWhaWrkgchya40TJabe8Hioj9udfPcEO5A= -github.com/openzipkin/zipkin-go v0.4.1/go.mod h1:qY0VqDSN1pOBN94dBc6w2GJlWLiovAyg7Qt6/I9HecM= -github.com/ory/analytics-go/v4 v4.0.3 h1:2zNBQLlm3UiD8U7DdUGLLUBm62ZA5GtbEJ3S5U+xEOI= -github.com/ory/analytics-go/v4 v4.0.3/go.mod h1:A3Chm/3TmM8jw4nqRss+gFhAYHRI5j/HFYH3C1FRahU= -github.com/ory/dockertest/v3 v3.9.1 h1:v4dkG+dlu76goxMiTT2j8zV7s4oPPEppKT8K8p2f1kY= -github.com/ory/dockertest/v3 v3.9.1/go.mod h1:42Ir9hmvaAPm0Mgibk6mBPi7SFvTXxEcnztDYOJ//uM= -github.com/ory/fosite v0.44.0 h1:Z3UjyO11/wlIoa3BotOqcTkfm7kUNA8F7dd8mOMfx0o= -github.com/ory/fosite v0.44.0/go.mod h1:o/G4kAeNn65l6MCod2+KmFfU6JQBSojS7eXys6lKGzM= -github.com/ory/go-acc v0.2.6/go.mod h1:4Kb/UnPcT8qRAk3IAxta+hvVapdxTLWtrr7bFLlEgpw= -github.com/ory/go-acc v0.2.8 h1:rOHHAPQjf0u7eHFGWpiXK+gIu/e0GRSJNr9pDukdNC4= -github.com/ory/go-acc v0.2.8/go.mod h1:iCRZUdGb/7nqvSn8xWZkhfVrtXRZ9Wru2E5rabCjFPI= +github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= +github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= +github.com/opencontainers/runc v1.3.3 h1:qlmBbbhu+yY0QM7jqfuat7M1H3/iXjju3VkP9lkFQr4= +github.com/opencontainers/runc v1.3.3/go.mod h1:D7rL72gfWxVs9cJ2/AayxB0Hlvn9g0gaF1R7uunumSI= +github.com/openzipkin/zipkin-go v0.4.3 h1:9EGwpqkgnwdEIJ+Od7QVSEIH+ocmm5nPat0G7sjsSdg= +github.com/openzipkin/zipkin-go v0.4.3/go.mod h1:M9wCJZFWCo2RiY+o1eBCEMe0Dp2S5LDHcMZmk3RmK7c= +github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0= +github.com/ory/analytics-go/v5 v5.0.1 h1:LX8T5B9FN8KZXOtxgN+R3I4THRRVB6+28IKgKBpXmAM= +github.com/ory/analytics-go/v5 v5.0.1/go.mod h1:lWCiCjAaJkKfgR/BN5DCLMol8BjKS1x+4jxBxff/FF0= +github.com/ory/dockertest/v3 v3.12.0 h1:3oV9d0sDzlSQfHtIaB5k6ghUCVMVLpAY8hwrqoCyRCw= +github.com/ory/dockertest/v3 v3.12.0/go.mod h1:aKNDTva3cp8dwOWwb9cWuX84aH5akkxXRvO7KCwWVjE= +github.com/ory/go-acc v0.2.9-0.20230103102148-6b1c9a70dbbe h1:rvu4obdvqR0fkSIJ8IfgzKOWwZ5kOT2UNfLq81Qk7rc= +github.com/ory/go-acc v0.2.9-0.20230103102148-6b1c9a70dbbe/go.mod h1:z4n3u6as84LbV4YmgjHhnwtccQqzf4cZlSk9f1FhygI= github.com/ory/go-convenience v0.1.0 h1:zouLKfF2GoSGnJwGq+PE/nJAE6dj2Zj5QlTgmMTsTS8= github.com/ory/go-convenience v0.1.0/go.mod h1:uEY/a60PL5c12nYz4V5cHY03IBmwIAEm8TWB0yn9KNs= -github.com/ory/graceful v0.1.1 h1:zx+8tDObLPrG+7Tc8jKYlXsqWnLtOQA1IZ/FAAKHMXU= -github.com/ory/graceful v0.1.1/go.mod h1:zqu70l95WrKHF4AZ6tXHvAqAvpY6M7g6ttaAVcMm7KU= -github.com/ory/herodot v0.9.13 h1:cN/Z4eOkErl/9W7hDIDLb79IO/bfsH+8yscBjRpB4IU= -github.com/ory/herodot v0.9.13/go.mod h1:IWDs9kSvFQqw/cQ8zi5ksyYvITiUU4dI7glUrhZcJYo= -github.com/ory/jsonschema/v3 v3.0.7 h1:GQ9qfZDiJqs4l2d3p56dozCChvejQFZyLKGHYzDzOSo= -github.com/ory/jsonschema/v3 v3.0.7/go.mod h1:g8c8YOtN4TrR2wYeMdT02GDmzJDI0fEW2nI26BECafY= -github.com/ory/viper v1.7.5 h1:+xVdq7SU3e1vNaCsk/ixsfxE4zylk1TJUiJrY647jUE= -github.com/ory/viper v1.7.5/go.mod h1:ypOuyJmEUb3oENywQZRgeAMwqgOyDqwboO1tj3DjTaM= -github.com/ory/x v0.0.520 h1:ryfKHQEViUBv/UdlZhePOG52RpPAooZtK/I7+x58lBI= -github.com/ory/x v0.0.520/go.mod h1:7f32P5XMBLCy6aVT+fUYq3WPcMVpzsjC0C+FovlMNqY= -github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= -github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= -github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= +github.com/ory/graceful v0.1.3 h1:FaeXcHZh168WzS+bqruqWEw/HgXWLdNv2nJ+fbhxbhc= +github.com/ory/graceful v0.1.3/go.mod h1:4zFz687IAF7oNHHiB586U4iL+/4aV09o/PYLE34t2bA= +github.com/ory/herodot v0.10.7 h1:CETBRP4LboLlQCSVTkyQix/a2bVh1rmNhhfxd45khCI= +github.com/ory/herodot v0.10.7/go.mod h1:j6i246U6iX8TStYNKIVQxb2waweQvtOLi+b/9q+OULg= +github.com/ory/jsonschema/v3 v3.0.9-0.20250317235931-280c5fc7bf0e h1:4tUrC7x4YWRVMFp+c64KACNSGchW1zXo4l6Pa9/1hA8= +github.com/ory/jsonschema/v3 v3.0.9-0.20250317235931-280c5fc7bf0e/go.mod h1:XWLxVK4un/iuIcrw+6lCeanbF3NZwO5k6RdLeu/loQk= +github.com/ory/kratos-client-go v1.3.8 h1:S4D5dAURq5C6LbOUU+DgE4ZXxp37IlJG2GngemdF9h0= +github.com/ory/kratos-client-go v1.3.8/go.mod h1:Dc+ANapsPxu+CfdC0yk8TxmvceCmrvNozW+ZGS/xq5o= +github.com/ory/pop/v6 v6.3.2-0.20251203152233-a32233875f7e h1:gsbAteu8HZYnkIF4WVBaxklvF/s5IbcxYcCi6qX93ms= +github.com/ory/pop/v6 v6.3.2-0.20251203152233-a32233875f7e/go.mod h1:PEqjxMcIV87rBhlyDDha76I7/w2W/FHenSq3V3X1A/A= +github.com/parnurzeal/gorequest v0.3.0 h1:SoFyqCDC9COr1xuS6VA8fC8RU7XyrJZN2ona1kEX7FI= +github.com/parnurzeal/gorequest v0.3.0/go.mod h1:3Kh2QUMJoqw3icWAecsyzkpY7UzRfDhbRdTjtNwNiUE= github.com/pborman/uuid v1.2.1 h1:+ZZIw58t/ozdjRaXh/3awHfmWRbzYxJoAdNJxe/3pvw= github.com/pborman/uuid v1.2.1/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= -github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= -github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= -github.com/pelletier/go-toml v1.8.0/go.mod h1:D6yutnOGMveHEPV7VQOuvI/gXY61bv+9bAOTRnLElKs= -github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc= github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= -github.com/pelletier/go-toml/v2 v2.0.1 h1:8e3L2cCQzLFi2CR4g7vGFuFxX7Jl1kKX8gW+iV0GUKU= -github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo= -github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2 h1:JhzVVoYvbOACxoUmOs6V/G4D5nPVUW73rKvXxP4XUJc= -github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2/go.mod h1:iIss55rKnNBTvrwdmkUpLnDpZoAHvWaiq5+iMmen4AE= -github.com/philhofer/fwd v1.1.1 h1:GdGcTjf5RNAxwS4QLsiMzJYj5KEvPJD3Abr261yRQXQ= -github.com/philhofer/fwd v1.1.1/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU= -github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M= +github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc= +github.com/peterhellberg/link v1.2.0 h1:UA5pg3Gp/E0F2WdX7GERiNrPQrM1K6CVJUUWfHa4t6c= +github.com/peterhellberg/link v1.2.0/go.mod h1:gYfAh+oJgQu2SrZHg5hROVRQe1ICoK0/HHJTcE0edxc= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e h1:aoZm08cpOy4WuID//EZDgcC4zIxODThtZNPirFr42+A= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= -github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/profile v1.7.0 h1:hnbDkaNWPCLMO9wGLdBFTIZvzDrDfBM2072E1S9gJkA= github.com/pkg/profile v1.7.0/go.mod h1:8Uer0jas47ZQMJ7VD+OHknK4YDY07LPUC6dEvqDjvNo= -github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= -github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= -github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= -github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= -github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= -github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= -github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= -github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= -github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= -github.com/prometheus/client_golang v1.13.0 h1:b71QUfeo5M8gq2+evJdTPfZhYMAU0uKPkyPJ7TPsloU= -github.com/prometheus/client_golang v1.13.0/go.mod h1:vTeo+zgvILHsnnj/39Ou/1fPN5nJFOEMgftOUOmlvYQ= -github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= -github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.3.0 h1:UBgGFHqYdG/TPFD1B1ogZywDqEkwp3fBMvqdiQ7Xew4= -github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= -github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= -github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= -github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= -github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= -github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= -github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= -github.com/prometheus/common v0.37.0 h1:ccBbHCgIiT9uSoFY0vX8H3zsNR5eLt17/RQLUvn8pXE= -github.com/prometheus/common v0.37.0/go.mod h1:phzohg0JFMnBEFGxTDbfu3QyL5GI8gTQJFhYO5B3mfA= -github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.0-20190425082905-87a4384529e0/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= -github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/prometheus/procfs v0.8.0 h1:ODq8ZFEaYeCaZOJlZZdJA2AbQR98dSHSM1KW/You5mo= -github.com/prometheus/procfs v0.8.0/go.mod h1:z7EfXMXOkbkqb9IINtpCn86r/to3BnA0uaxHdg830/4= -github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= -github.com/rhnvrm/simples3 v0.6.1/go.mod h1:Y+3vYm2V7Y4VijFoJHHTrja6OgPrJ2cBti8dPGkC3sA= -github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= -github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= -github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prashantv/gostub v1.1.0 h1:BTyx3RfQjRHnUWaGF9oQos79AlQ5k8WNktv7VGvVH4g= +github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P1IAHhP5U= +github.com/prometheus/client_golang v1.23.0 h1:ust4zpdl9r4trLY/gSjlm07PuiBq2ynaXXlptpfy8Uc= +github.com/prometheus/client_golang v1.23.0/go.mod h1:i/o0R9ByOnHX0McrTMTyhYvKE4haaf2mW08I+jGAjEE= +github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= +github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= +github.com/prometheus/common v0.65.0 h1:QDwzd+G1twt//Kwj/Ww6E9FQq1iVMmODnILtW1t2VzE= +github.com/prometheus/common v0.65.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8= +github.com/prometheus/procfs v0.17.0 h1:FuLQ+05u4ZI+SS/w9+BWEM2TXiHKsUQ9TADiRH7DuK0= +github.com/prometheus/procfs v0.17.0/go.mod h1:oPQLaDAMRbA+u8H5Pbfq+dl3VDAvHxMUOVhe0wYB2zw= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= -github.com/rs/cors v1.8.2 h1:KCooALfAYGs415Cwu5ABvv9n9509fSiG5SQJn/AQo4U= -github.com/rs/cors v1.8.2/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= -github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= -github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= -github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= -github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= -github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/rs/cors v1.11.1 h1:eU3gRzXLRK57F5rKMGMZURNdIG4EoAmX8k94r9wXWHA= +github.com/rs/cors v1.11.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= -github.com/santhosh-tekuri/jsonschema v1.2.4 h1:hNhW8e7t+H1vgY+1QeEQpveR6D4+OwKPXCfD2aieJis= -github.com/santhosh-tekuri/jsonschema v1.2.4/go.mod h1:TEAUOeZSmIxTTuHatJzrvARHiuO9LYd+cIxzgEHCQI4= -github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= -github.com/sawadashota/encrypta v0.0.2 h1:R46/RxYmYdxI3VOt63B637OVBHzu+fazPyLo5CqK6QE= -github.com/sawadashota/encrypta v0.0.2/go.mod h1:pcPebEvF012kXmZXvfVzwFEr/GUE/ZntaR805jk0nsE= -github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ= +github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4= +github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE= +github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= +github.com/sawadashota/encrypta v0.0.5 h1:boMQmISmg62WEgU5AMu3WDLhi5TAP2iidYP+AxgxMJM= +github.com/sawadashota/encrypta v0.0.5/go.mod h1:jeRi1jWo+bQoBVwXLPhuArSZTA6TOqXj/+wqnFiScik= github.com/seatgeek/logrus-gelf-formatter v0.0.0-20210414080842-5b05eb8ff761 h1:0b8DF5kR0PhRoRXDiEEdzrgBc8UqVY4JWLkQJCRsLME= github.com/seatgeek/logrus-gelf-formatter v0.0.0-20210414080842-5b05eb8ff761/go.mod h1:/THDZYi7F/BsVEcYzYPqdcWFQ+1C2InkawTKfLOAnzg= -github.com/seccomp/libseccomp-golang v0.9.2-0.20220502022130-f33da4d89646/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9gHmZX1yB/z9WDN1C6fg= github.com/segmentio/analytics-go v3.1.0+incompatible/go.mod h1:C7CYBtQWk4vRk2RyLu0qOcbHJ18E3F1HV2C/8JvKN48= github.com/segmentio/backo-go v0.0.0-20200129164019-23eae7c10bd3/go.mod h1:9/Rh6yILuLysoQnZ2oNooD2g7aBnvM7r/fNVxRNWfBc= -github.com/segmentio/backo-go v1.0.1 h1:68RQccglxZeyURy93ASB/2kc9QudzgIDexJ927N++y4= -github.com/segmentio/backo-go v1.0.1/go.mod h1:9/Rh6yILuLysoQnZ2oNooD2g7aBnvM7r/fNVxRNWfBc= +github.com/segmentio/backo-go v1.1.0 h1:cJIfHQUdmLsd8t9IXqf5J8SdrOMn9vMa7cIvOavHAhc= +github.com/segmentio/backo-go v1.1.0/go.mod h1:ckenwdf+v/qbyhVdNPWHnqh2YdJBED1O9cidYyM5J18= github.com/segmentio/conf v1.2.0/go.mod h1:Y3B9O/PqqWqjyxyWWseyj/quPEtMu1zDp/kVbSWWaB0= github.com/segmentio/go-snakecase v1.1.0/go.mod h1:jk1miR5MS7Na32PZUykG89Arm+1BUSYhuGR6b7+hJto= github.com/segmentio/objconv v1.0.1/go.mod h1:auayaH5k3137Cl4SoXTgrzQcuQDmvuVtZgS0fb1Ahys= -github.com/serenize/snaker v0.0.0-20171204205717-a683aaf2d516/go.mod h1:Yow6lPLSAXx2ifx470yD/nUe22Dv5vBvxK/UK9UUTVs= -github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= -github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ= github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= -github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= -github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= -github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk= -github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ= -github.com/shurcooL/highlight_diff v0.0.0-20170515013008-09bb4053de1b/go.mod h1:ZpfEhSmds4ytuByIcDnOLkTHGUI6KNqRNPDLHDk+mUU= -github.com/shurcooL/highlight_go v0.0.0-20170515013102-78fb10f4a5f8/go.mod h1:UDKB5a1T23gOMUJrI+uSuH0VRDStOiUVSjBTRDVBVag= -github.com/shurcooL/octicon v0.0.0-20180602230221-c42b0e3b24d9/go.mod h1:eWdoE5JD4R5UVWDucdOPg1g2fqQRq78IQa9zlOV1vpQ= -github.com/shurcooL/sanitized_anchor_name v0.0.0-20170918181015-86672fcb3f95/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= -github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= -github.com/sirupsen/logrus v1.0.6/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc= -github.com/sirupsen/logrus v1.1.0/go.mod h1:zrgwTnHtNr00buQ1vSptGe8m1f/BbgsPukg8qsT7A+A= -github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= -github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= -github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= -github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= -github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= -github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= -github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/smallstep/assert v0.0.0-20200723003110-82e2b9b3b262 h1:unQFBIznI+VYD1/1fApl1A+9VcBk+9dcqGfnePY87LY= -github.com/smallstep/assert v0.0.0-20200723003110-82e2b9b3b262/go.mod h1:MyOHs9Po2fbM1LHej6sBUT8ozbxmMOFG+E+rx/GSGuc= -github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= -github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= -github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= +github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I= +github.com/sergi/go-diff v1.4.0 h1:n/SP9D5ad1fORl+llWyN+D6qoUETXNZARKjyY2/KVCw= +github.com/sergi/go-diff v1.4.0/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= +github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= +github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/smarty/assertions v1.15.0 h1:cR//PqUBUiQRakZWqBiFFQ9wb8emQGDb0HeGdqGByCY= +github.com/smarty/assertions v1.15.0/go.mod h1:yABtdzeQs6l1brC900WlRNwj6ZR55d7B+E8C6HtKdec= +github.com/smartystreets/goconvey v1.8.1 h1:qGjIddxOk4grTu9JPOU31tVfq3cNdBlNa5sSznIX1xY= +github.com/smartystreets/goconvey v1.8.1/go.mod h1:+/u4qLyY6x1jReYOp7GOM2FSt8aP9CzCZL03bI28W60= github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d h1:yKm7XZV6j9Ev6lojP2XaIshpT4ymkqhMeSghO5Ps00E= github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d/go.mod h1:UdhH50NIW0fCiwBSr0co2m7BnFLdv4fQTgdqdJTHFeE= +github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= +github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e h1:qpG93cPwA5f7s/ZPBJnGOYQNK/vKsaDaseuKT5Asee8= github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e/go.mod h1:HuIsMU8RRBOtsCgI77wP899iHVBQpCmg4ErYMZB+2IA= -github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= -github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= -github.com/spf13/afero v1.5.1/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= -github.com/spf13/afero v1.9.2 h1:j49Hj62F0n+DaZ1dDCvhABaPNSGNkt32oRFxI33IEMw= -github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y= -github.com/spf13/cast v1.2.0/go.mod h1:r2rcYCSwa1IExKTDiTfzaxqT2FNHs8hODu4LnUfgKEg= -github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.3.2-0.20200723214538-8d17101741c8/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w= -github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU= -github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= -github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= -github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE= -github.com/spf13/cobra v1.1.3/go.mod h1:pGADOWyqRD/YMrPZigI/zbliZ2wVD/23d+is3pSWzOo= -github.com/spf13/cobra v1.5.0/go.mod h1:dWXEIy2H428czQCjInthrTRUg7yKbok+2Qi/yBIJoUM= -github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA= -github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY= -github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= -github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= -github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= -github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= -github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.2.1/go.mod h1:P4AexN0a+C9tGAnUFNwDMYYZv3pjFuvmeiMyKRaNVlI= -github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= -github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE= -github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= -github.com/spf13/viper v1.12.0 h1:CZ7eSOd3kZoaYDLbXnmzgQI5RlciuXBMA+18HwHRfZQ= -github.com/spf13/viper v1.12.0/go.mod h1:b6COn30jlNxbm/V2IqWiNWkJ+vZNiMNksliPCiuKtSI= +github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= +github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= +github.com/spf13/cast v1.9.2 h1:SsGfm7M8QOFtEzumm7UZrZdLLquNdzFYfIbEXntcFbE= +github.com/spf13/cast v1.9.2/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo= +github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s= +github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0= +github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= +github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.18.2 h1:LUXCnvUvSM6FXAsj6nnfc8Q2tp1dIgUfY9Kc8GsSOiQ= +github.com/spf13/viper v1.18.2/go.mod h1:EKmWIqdnk5lOcmR72yw6hS+8OPYcwD0jteitLMVB+yk= +github.com/ssoready/hyrumtoken v1.0.0 h1:N/JPJDOuYS7qPSnOvZpPxNVXwtlT3kfzAMEcPrH8ywQ= +github.com/ssoready/hyrumtoken v1.0.0/go.mod h1:h8q768r5Uv6iJKOwsNENIWWUP9kvmLykQox5m3SCpqc= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= @@ -1055,55 +513,32 @@ github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= -github.com/subosito/gotenv v1.4.1 h1:jyEFiXpy21Wm81FBN71l9VoMMV8H8jG+qIK3GCpY6Qs= -github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= -github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= +github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= github.com/thales-e-security/pool v0.0.2 h1:RAPs4q2EbWsTit6tpzuvTFlgFRJ3S8Evf5gtvVDbmPg= github.com/thales-e-security/pool v0.0.2/go.mod h1:qtpMm2+thHtqhLzTwgDBj/OuNnMpupY8mv0Phz0gjhU= github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= -github.com/tidwall/gjson v1.14.3 h1:9jvXn7olKEHU1S9vwoMGliaT8jq1vJ7IH/n9zD9Dnlw= -github.com/tidwall/gjson v1.14.3/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= +github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= -github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= -github.com/timtadh/data-structures v0.5.3 h1:F2tEjoG9qWIyUjbvXVgJqEOGJPMIiYn7U5W5mE+i/vQ= -github.com/timtadh/data-structures v0.5.3/go.mod h1:9R4XODhJ8JdWFEI8P/HJKqxuJctfBQw6fDibMQny2oU= -github.com/timtadh/lexmachine v0.2.2 h1:g55RnjdYazm5wnKv59pwFcBJHOyvTPfDEoz21s4PHmY= -github.com/timtadh/lexmachine v0.2.2/go.mod h1:GBJvD5OAfRn/gnp92zb9KTgHLB7akKyxmVivoYCcjQI= -github.com/tinylib/msgp v1.1.6 h1:i+SbKraHhnrf9M5MYmvQhFnbLhAXSDWF8WWsuyRdocw= -github.com/tinylib/msgp v1.1.6/go.mod h1:75BAfg2hauQhs3qedfdDZmWAPcFMAvJE5b9rGOMufyw= -github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80 h1:nrZ3ySNYwJbSpD6ce9duiP+QkD3JuLCcWkdaehUS/3Y= github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80/go.mod h1:iFyPdL66DjUD96XmzVL3ZntbzcflLnznH0fr99w5VqE= github.com/toqueteos/webbrowser v1.2.0 h1:tVP/gpK69Fx+qMJKsLE7TD8LuGWPnEV71wBN9rrstGQ= github.com/toqueteos/webbrowser v1.2.0/go.mod h1:XWoZq4cyp9WeUeak7w7LXRUQf1F1ATJMir8RTqb4ayM= -github.com/twmb/murmur3 v1.1.6 h1:mqrRot1BRxm+Yct+vavLMou2/iJt0tNVTTC0QoIjaZg= -github.com/twmb/murmur3 v1.1.6/go.mod h1:Qq/R7NUyOfr65zD+6Q5IHKsJLwP7exErjN6lyyq3OSQ= -github.com/uber/jaeger-client-go v2.30.0+incompatible h1:D6wyKGCecFaSRUpo8lCVbaOOb6ThwMmTEbhRwtKR97o= -github.com/uber/jaeger-client-go v2.30.0+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk= -github.com/uber/jaeger-lib v2.4.1+incompatible h1:td4jdvLcExb4cBISKIpHuGoVXh+dVKhn2Um6rjCsSsg= -github.com/uber/jaeger-lib v2.4.1+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U= -github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= -github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= -github.com/unrolled/secure v0.0.0-20180918153822-f340ee86eb8b/go.mod h1:mnPT77IAdsi/kV7+Es7y+pXALeV3h7G6dQF6mNYjcLA= -github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/twmb/murmur3 v1.1.8 h1:8Yt9taO/WN3l08xErzjeschgZU2QSrwm1kclYq+0aRg= +github.com/twmb/murmur3 v1.1.8/go.mod h1:Qq/R7NUyOfr65zD+6Q5IHKsJLwP7exErjN6lyyq3OSQ= github.com/urfave/negroni v1.0.0 h1:kIimOitoypq34K7TG7DUaJ9kq/N4Ofuwi1sjz0KipXc= github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= -github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE= -github.com/vishvananda/netns v0.0.0-20191106174202-0a2b9b5464df/go.mod h1:JP3t17pCcGlemwknint6hfoeCVQrEMVwxRLRjXpq+BU= -github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= -github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= -github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g= -github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= -github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= @@ -1111,648 +546,191 @@ github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHo github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= -github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= -github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c h1:3lbZUMbMiGUW/LMkfsEABsc5zNT9+b1CvsJx47JzJ8g= github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c/go.mod h1:UrdRz5enIKZ63MEE3IF9l2/ebyx59GyGgPi+tICQdmM= -github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= -github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= -go.elastic.co/apm v1.15.0 h1:uPk2g/whK7c7XiZyz/YCUnAUBNPiyNeE3ARX3G6Gx7Q= -go.elastic.co/apm v1.15.0/go.mod h1:dylGv2HKR0tiCV+wliJz1KHtDyuD8SPe69oV7VyK6WY= -go.elastic.co/apm/module/apmhttp v1.15.0 h1:Le/DhI0Cqpr9wG/NIGOkbz7+rOMqJrfE4MRG6q/+leU= -go.elastic.co/apm/module/apmhttp v1.15.0/go.mod h1:NruY6Jq8ALLzWUVUQ7t4wIzn+onKoiP5woJJdTV7GMg= -go.elastic.co/apm/module/apmot v1.15.0 h1:yqarZ4HCIb6dLAzEVSWdppAuRhfrCfm2Z6UL+ubai2A= -go.elastic.co/apm/module/apmot v1.15.0/go.mod h1:BjFz2KOlnjXdnSo0p6nhDDaIEYYX8c6uVHwvkZiLqtQ= -go.elastic.co/fastjson v1.1.0 h1:3MrGBWWVIxe/xvsbpghtkFoPciPhOCmjsR/HfwEeQR4= -go.elastic.co/fastjson v1.1.0/go.mod h1:boNGISWMjQsUPy/t6yqt2/1Wx4YNPSe+mZjlyw9vKKI= -go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= -go.etcd.io/etcd/api/v3 v3.5.4/go.mod h1:5GB2vv4A4AOn3yk7MftYGHkUfGtDHnEraIjym4dYz5A= -go.etcd.io/etcd/client/pkg/v3 v3.5.4/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= -go.etcd.io/etcd/client/v3 v3.5.4/go.mod h1:ZaRkVgBZC+L+dLCjTcF1hRXpgZXQPOvnA/Ak/gq3kiY= -go.mongodb.org/mongo-driver v1.7.3/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R79oO62zWg= -go.mongodb.org/mongo-driver v1.7.5/go.mod h1:VXEWRZ6URJIkUq2SCAyapmhH0ZLRBP+FT4xhp5Zvxng= -go.mongodb.org/mongo-driver v1.8.3/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= -go.mongodb.org/mongo-driver v1.10.0/go.mod h1:wsihk0Kdgv8Kqu1Anit4sfK+22vSFbUrAVEYRhCXrA8= -go.mongodb.org/mongo-driver v1.10.3 h1:XDQEvmh6z1EUsXuIkXE9TaVeqHw6SwS1uf93jFs0HBA= -go.mongodb.org/mongo-driver v1.10.3/go.mod h1:z4XpeoU6w+9Vht+jAFyLgVrD+jGSQQe0+CBWFHNiHt8= -go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= -go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= -go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.36.4 h1:toN8e0U4RWQL4f8H+1eFtaeWe/IkSM3+81qJEDOgShs= -go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.36.4/go.mod h1:u4OeI4ujQmFbpZOOysLUfYrRWOmEVmvzkM2zExVorXM= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.36.4 h1:aUEBEdCa6iamGzg6fuYxDA8ThxvOG240mAvWDU+XLio= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.36.4/go.mod h1:l2MdsbKTocpPS5nQZscqTR9jd8u96VYZdcpF8Sye7mA= -go.opentelemetry.io/contrib/propagators/b3 v1.11.1 h1:icQ6ttRV+r/2fnU46BIo/g/mPu6Rs5Ug8Rtohe3KqzI= -go.opentelemetry.io/contrib/propagators/b3 v1.11.1/go.mod h1:ECIveyMXgnl4gorxFcA7RYjJY/Ql9n20ubhbfDc3QfA= -go.opentelemetry.io/contrib/propagators/jaeger v1.11.1 h1:Gw+P9NQzw4bjNGZXsoDhwwDWLnk4Y1waF8MQZAq/eYM= -go.opentelemetry.io/contrib/propagators/jaeger v1.11.1/go.mod h1:dP/N3ZFADH8azBcZfGXEFNBXpEmPTXYcNj9rkw1+2Oc= -go.opentelemetry.io/contrib/samplers/jaegerremote v0.5.2 h1:Izp9RqrioK/y7J/RXy2c7zd83iKQ4N3td3AMNKNzHiI= -go.opentelemetry.io/contrib/samplers/jaegerremote v0.5.2/go.mod h1:Z0aRlRERn9v/3J2K+ATa6ffKyb8/i+/My/gTzFr3dII= -go.opentelemetry.io/otel v1.11.1 h1:4WLLAmcfkmDk2ukNXJyq3/kiz/3UzCaYq6PskJsaou4= -go.opentelemetry.io/otel v1.11.1/go.mod h1:1nNhXBbWSD0nsL38H6btgnFN2k4i0sNLHNNMZMSbUGE= -go.opentelemetry.io/otel/bridge/opentracing v1.11.1 h1:/ZBsgjXWUpiZ5M9zm+Ft3kuDUGErIGcEJbKRIsFN6jA= -go.opentelemetry.io/otel/bridge/opentracing v1.11.1/go.mod h1:vw9hN4H+G0ek+XQtxP+Mm1McLcmdx2FXHNrWn2bBqxU= -go.opentelemetry.io/otel/exporters/jaeger v1.11.1 h1:F9Io8lqWdGyIbY3/SOGki34LX/l+7OL0gXNxjqwcbuQ= -go.opentelemetry.io/otel/exporters/jaeger v1.11.1/go.mod h1:lRa2w3bQ4R4QN6zYsDgy7tEezgoKEu7Ow2g35Y75+KI= -go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.11.1 h1:X2GndnMCsUPh6CiY2a+frAbNsXaPLbB0soHRYhAZ5Ig= -go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.11.1/go.mod h1:i8vjiSzbiUC7wOQplijSXMYUpNM93DtlS5CbUT+C6oQ= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.9.0 h1:NN90Cuna0CnBg8YNu1Q0V35i2E8LDByFOwHRCq/ZP9I= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.9.0/go.mod h1:0EsCXjZAiiZGnLdEUXM9YjCKuuLZMYyglh2QDXcYKVA= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.9.0 h1:FAF9l8Wjxi9Ad2k/vLTfHZyzXYX72C62wBGpV3G6AIo= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.9.0/go.mod h1:smUdtylgc0YQiUr2PuifS4hBXhAS5xtR6WQhxP1wiNA= -go.opentelemetry.io/otel/exporters/zipkin v1.11.1 h1:JlJ3/oQoyqlrPDCfsSVFcHgGeHvZq+hr1VPWtiYCXTo= -go.opentelemetry.io/otel/exporters/zipkin v1.11.1/go.mod h1:T4S6aVwIS1+MHA+dJHCcPROtZe6ORwnv5vMKPRapsFw= -go.opentelemetry.io/otel/metric v0.33.0 h1:xQAyl7uGEYvrLAiV/09iTJlp1pZnQ9Wl793qbVvED1E= -go.opentelemetry.io/otel/metric v0.33.0/go.mod h1:QlTYc+EnYNq/M2mNk1qDDMRLpqCOj2f/r5c7Fd5FYaI= -go.opentelemetry.io/otel/sdk v1.11.1 h1:F7KmQgoHljhUuJyA+9BiU+EkJfyX5nVVF4wyzWZpKxs= -go.opentelemetry.io/otel/sdk v1.11.1/go.mod h1:/l3FE4SupHJ12TduVjUkZtlfFqDCQJlOlithYrdktys= -go.opentelemetry.io/otel/trace v1.11.1 h1:ofxdnzsNrGBYXbP7t7zpUK281+go5rF7dvdIZXF8gdQ= -go.opentelemetry.io/otel/trace v1.11.1/go.mod h1:f/Q9G7vzk5u91PhbmKbg1Qn0rzH1LJ4vbPHFGkTPtOk= -go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= -go.opentelemetry.io/proto/otlp v0.18.0 h1:W5hyXNComRa23tGpKwG+FRAc4rfF6ZUg1JReK+QHS80= -go.opentelemetry.io/proto/otlp v0.18.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= -go.step.sm/crypto v0.16.2 h1:Pr9aazTwWBBZNogUsOqhOrPSdwAa9pPs+lMB602lnDA= -go.step.sm/crypto v0.16.2/go.mod h1:1WkTOTY+fOX/RY4TnZREp6trQAsBHRQ7nu6QJBiNQF8= -go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= -go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= -go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/atomic v1.10.0 h1:9qC72Qh0+3MqyJbAn8YU5xVq1frD8bn3JtD2oXtafVQ= -go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= -go.uber.org/automaxprocs v1.3.0 h1:II28aZoGdaglS5vVNnspf28lnZpXScxtIozx1lAjdb0= -go.uber.org/automaxprocs v1.3.0/go.mod h1:9CWT6lKIep8U41DDaPiH6eFscnTyjfTANNQNx6LrIcA= -go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= -go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= -go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= -go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= -go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= -go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= -go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= -go4.org/intern v0.0.0-20211027215823-ae77deb06f29 h1:UXLjNohABv4S58tHmeuIZDO6e3mHpW2Dx33gaNt03LE= -go4.org/unsafe/assume-no-moving-gc v0.0.0-20220617031537-928513b29760 h1:FyBZqvoA/jbNzuAWLQE2kG820zMAkcilx6BMjGbL/E4= -golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20180910181607-0e37d006457b/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181001203147-e3636079e1a4/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M= +github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw= +go.mongodb.org/mongo-driver v1.17.4 h1:jUorfmVzljjr0FLzYQsGP8cgN/qzzxlY9Vh0C9KFXVw= +go.mongodb.org/mongo-driver v1.17.4/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.62.0 h1:wCeciVlAfb5DC8MQl/DlmAv/FVPNpQgFvI/71+hatuc= +go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.62.0/go.mod h1:WfEApdZDMlLUAev/0QQpr8EJ/z0VWDKYZ5tF5RH5T1U= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0 h1:Hf9xI/XLML9ElpiHVDNwvqI0hIFlzV8dgIr35kV1kRU= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0/go.mod h1:NfchwuyNoMcZ5MLHwPrODwUF1HWCXWrL31s8gSAdIKY= +go.opentelemetry.io/contrib/propagators/b3 v1.37.0 h1:0aGKdIuVhy5l4GClAjl72ntkZJhijf2wg1S7b5oLoYA= +go.opentelemetry.io/contrib/propagators/b3 v1.37.0/go.mod h1:nhyrxEJEOQdwR15zXrCKI6+cJK60PXAkJ/jRyfhr2mg= +go.opentelemetry.io/contrib/propagators/jaeger v1.37.0 h1:pW+qDVo0jB0rLsNeaP85xLuz20cvsECUcN7TE+D8YTM= +go.opentelemetry.io/contrib/propagators/jaeger v1.37.0/go.mod h1:x7bd+t034hxLTve1hF9Yn9qQJlO/pP8H5pWIt7+gsFM= +go.opentelemetry.io/contrib/samplers/jaegerremote v0.31.0 h1:l8XCsDh7L6Z7PB+vlw1s4ufNab+ayT2RMNdvDE/UyPc= +go.opentelemetry.io/contrib/samplers/jaegerremote v0.31.0/go.mod h1:XAOSk4bqj5vtoiY08bexeiafzxdXeLlxKFnwscvn8Fc= +go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8= +go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM= +go.opentelemetry.io/otel/exporters/jaeger v1.17.0 h1:D7UpUy2Xc2wsi1Ras6V40q806WM07rqoCWzXu7Sqy+4= +go.opentelemetry.io/otel/exporters/jaeger v1.17.0/go.mod h1:nPCqOnEH9rNLKqH/+rrUjiMzHJdV1BlpKcTwRTyKkKI= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.37.0 h1:Ahq7pZmv87yiyn3jeFz/LekZmPLLdKejuO3NcK9MssM= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.37.0/go.mod h1:MJTqhM0im3mRLw1i8uGHnCvUEeS7VwRyxlLC78PA18M= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.37.0 h1:bDMKF3RUSxshZ5OjOTi8rsHGaPKsAt76FaqgvIUySLc= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.37.0/go.mod h1:dDT67G/IkA46Mr2l9Uj7HsQVwsjASyV9SjGofsiUZDA= +go.opentelemetry.io/otel/exporters/zipkin v1.37.0 h1:Z2apuaRnHEjzDAkpbWNPiksz1R0/FCIrJSjiMA43zwI= +go.opentelemetry.io/otel/exporters/zipkin v1.37.0/go.mod h1:ofGu/7fG+bpmjZoiPUUmYDJ4vXWxMT57HmGoegx49uw= +go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA= +go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI= +go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E= +go.opentelemetry.io/otel/sdk v1.38.0/go.mod h1:ghmNdGlVemJI3+ZB5iDEuk4bWA3GkTpW+DOoZMYBVVg= +go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6qT5wthqPoM= +go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA= +go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE= +go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs= +go.opentelemetry.io/proto/otlp v1.7.1 h1:gTOMpGDb0WTBOP8JaO72iL3auEZhVmAQg4ipjOVAtj4= +go.opentelemetry.io/proto/otlp v1.7.1/go.mod h1:b2rVh6rfI/s2pHWNlB7ILJcRALpcNDzKhACevjI+ZnE= +go.uber.org/automaxprocs v1.6.0 h1:O3y2/QNTOdbF+e/dpXNNW7Rx2hZ4sTIPyybbxyNqTUs= +go.uber.org/automaxprocs v1.6.0/go.mod h1:ifeIMSnPZuznNm6jmdzmU3/bfk01Fe2fotchwEFJ8r8= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.uber.org/mock v0.5.2 h1:LbtPTcP8A5k9WPXj54PPPbjcI4Y6lhyOZXn+VS7wNko= +go.uber.org/mock v0.5.2/go.mod h1:wLlUxC2vVTPTaE3UD51E0BGOAElKrILxhVSDYQLld5o= +go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= +go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190313024323-a1f597ede03a/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= -golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= -golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190621222207-cc06ce4a13d4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= -golang.org/x/crypto v0.0.0-20201216223049-8b5274cf687f/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= -golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220517005047-85d78b3ac167/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.1.0 h1:MDRAIl0xIo9Io2xV565hzXHw3zVseKrJKodhohM5CjU= -golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= -golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= -golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= -golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= -golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 h1:VLliZ0d+/avPrXXH+OakdXhpJuEoBZuwh1m2j7U6Iug= -golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= -golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= -golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= -golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= -golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= +golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= +golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= +golang.org/x/exp v0.0.0-20250813145105-42675adae3e6 h1:SbTAbRFnd5kjQXbczszQ0hdk3ctwYf3qBNH9jIsGclE= +golang.org/x/exp v0.0.0-20250813145105-42675adae3e6/go.mod h1:4QTo5u+SEIbbKW1RacMZq1YEfOBqeXa19JeshGi+zc4= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.6.0 h1:b9gGHsz9/HhJ3HF5DHQytPpuwocVTChQJK3AvoLRD5I= -golang.org/x/mod v0.6.0/go.mod h1:4mET923SAdbXp2ki8ey+zGs1SLqsuM2Y0uvdZR/fUNI= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180816102801-aaf60122140d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180921000356-2f5d2388922f/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180926154720-4dfa2610cdf3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.10.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA= +golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= -golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= -golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= -golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM= -golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211216030914-fe4d6282115f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= golang.org/x/net v0.0.0-20221002022538-bcab6841153b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= -golang.org/x/net v0.1.0 h1:hZ/3BUoy5aId7sCpA/Tc5lt8DkFgdVS2onTpJsZ/fl0= -golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210323180902-22b0adad7558/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= -golang.org/x/oauth2 v0.0.0-20221014153046-6fdb5e3db783 h1:nt+Q6cXKz4MosCSpnbMtqiQ8Oz0pxTef2B4Vca2lvfk= -golang.org/x/oauth2 v0.0.0-20221014153046-6fdb5e3db783/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= +golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= +golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= +golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw= +golang.org/x/oauth2 v0.34.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20180816055513-1c9583448a9c/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180906133057-8cf3aee42992/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180921163948-d47a0f339242/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180927150500-dad3d9fb7b6e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181005133103-4497e2df6f9e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190129075346-302c3dd5f1cc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= +golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190419153524-e8e3143a4f4a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190515120540-06a5c4944438/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606203320-7fc4e5ec1444/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191025021431-6c3a3bfe00ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191115151921-52ab43148777/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200602225109-6fdc65e7d980/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210309074719-68d13333faf2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210906170528-6f6e22806c34/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210915083310-ed5796bab164/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211102192858-4dd72447c267/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211116061358-0a5406a5449c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220319134239-a9b59b0215f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220406163625-3f8b81556e12/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220513210249-45d2b4557a2a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.1.0 h1:kunALQeHf1/185U1i0GOB/fy1IPRDDpuoOOqRReG57U= -golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8 h1:LvzTn0GQhWuvKH/kVRS3R3bVAsdQWI7hvfLHGgh9+lU= +golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8/go.mod h1:Pi4ztBfryZoJEkyFTI5/Ocsu2jXyDr6iSdgJiYE/uwE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.0.0-20220722155259-a9ba230a4035/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.1.0 h1:g6Z6vPFA9dYBAF7DWcH6sCcOntplXsDKcliusYijMlw= -golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= +golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20181227161524-e6919f6577db/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.4.0 h1:BrVqGRd7+k1DiOgtnFvAkoQEWQvBc25ouMJM6429SFg= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.1.0 h1:xYY+Bajn2a7VBmTM5GikTmnK8ZuX8YgnQCqZpbBNtmA= -golang.org/x/time v0.1.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= +golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= +golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20181003024731-2f84ea8ef872/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20181006002542-f60d9635b16a/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190329151228-23e29df326fe/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190416151739-9c9e1878f421/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190624180213-70d37148ca0c/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200509030707-2212a7e161a5/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200717024301-6ddee64345a6/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= -golang.org/x/tools v0.0.0-20201022035929-9cf592e881e9/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.2.0 h1:G6AHpWxTMGY1KyEYoAQ5WTtIekUUvDNjan3ugu60JvE= -golang.org/x/tools v0.2.0/go.mod h1:y4OqIKeOV/fWJetJ8bXPU1sEVniLMIyDAZWeHdV+NTA= -golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.8.0/go.mod h1:JxBZ99ISMI5ViVkT1tr6tdNmXeTrcpVSD3vZ1RsRdN4= +golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ= +golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk= -golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= -google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= -google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= -google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= -google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= -google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= -google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= -google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190404172233-64821d5d2107/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= -google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200806141610-86f49bd18e98/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20211020151524-b7c3a969101a/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20221025140454-527a21cfbd71 h1:GEgb2jF5zxsFJpJfg9RoDDWm7tiwc/DDSTE2BtLUkXU= -google.golang.org/genproto v0.0.0-20221025140454-527a21cfbd71/go.mod h1:9qHF0xnpdSfF6knlcsnpzUu5y+rpwgbvsyGAZPBMg4s= -google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= -google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= -google.golang.org/grpc v1.22.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= -google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= -google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= -google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= -google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= -google.golang.org/grpc v1.50.1 h1:DS/BukOZWp8s6p4Dt/tOaJaTQyPyOoCcrjroHuCeLzY= -google.golang.org/grpc v1.50.1/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= -google.golang.org/grpc/examples v0.0.0-20210304020650-930c79186c99 h1:qA8rMbz1wQ4DOFfM2ouD29DG9aHWBm6ZOy9BGxiUMmY= -google.golang.org/grpc/examples v0.0.0-20210304020650-930c79186c99/go.mod h1:Ly7ZA/ARzg8fnPU9TyZIxoz33sEUuWX7txiqs8lPTgE= -google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= -google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= -google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= -google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= -google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= -google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -gopkg.in/DataDog/dd-trace-go.v1 v1.43.0 h1:UE3SNh7T7ZnCrYsDZuUuwN3LFSc5aphaszUF+wMm4Sk= -gopkg.in/DataDog/dd-trace-go.v1 v1.43.0/go.mod h1:YL9g+nlUY7ByCffD5pDytAqy99GNbytRV0EBpKuldM4= -gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U= -gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= -gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc/go.mod h1:m7x9LTH6d71AHyAX77c9yqWCCa3UKHcVEj9y7hAtKDk= -gopkg.in/asn1-ber.v1 v1.0.0-20181015200546-f715ec2f112d/go.mod h1:cuepJuh7vyXfUyUwEgHQXw849cJrilpS5NeIjOWESAw= +google.golang.org/genproto/googleapis/api v0.0.0-20250811230008-5f3141c8851a h1:DMCgtIAIQGZqJXMVzJF4MV8BlWoJh2ZuFiRdAleyr58= +google.golang.org/genproto/googleapis/api v0.0.0-20250811230008-5f3141c8851a/go.mod h1:y2yVLIE/CSMCPXaHnSKXxu1spLPnglFLegmgdY23uuE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250811230008-5f3141c8851a h1:tPE/Kp+x9dMSwUm/uM0JKK0IfdiJkwAbSMSeZBXXJXc= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250811230008-5f3141c8851a/go.mod h1:gw1tLEfykwDz2ET4a12jcXt4couGAm7IwsVaTy0Sflo= +google.golang.org/grpc v1.74.2 h1:WoosgB65DlWVC9FqI82dGsZhWFNBSLjQ84bjROOpMu4= +google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM= +google.golang.org/protobuf v1.36.9 h1:w2gp2mA27hUeUzj9Ex9FBjsBm40zfaDtEWow293U7Iw= +google.golang.org/protobuf v1.36.9/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo= gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE= gopkg.in/go-playground/mold.v2 v2.2.0/go.mod h1:XMyyRsGtakkDPbxXbrA5VODo6bUXyvoDjLd5l3T0XoA= -gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df/go.mod h1:LRQQ+SO6ZHR7tOkpBDuZnXENFzX8qRjMDMyPD6BRkCw= -gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= -gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/ini.v1 v1.57.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/mail.v2 v2.0.0-20180731213649-a0242b2233b4/go.mod h1:htwXN1Qh09vZJ1NVKxQqHPBaCBbzKhp5GzuJEA4VJWw= gopkg.in/op/go-logging.v1 v1.0.0-20160211212156-b2cb9fa56473 h1:6D+BvnJ/j6e222UW8s2qTSe3wGBtvo0MbVQG/c5k8RE= gopkg.in/op/go-logging.v1 v1.0.0-20160211212156-b2cb9fa56473/go.mod h1:N1eN2tsCx0Ydtgjl4cqmbRCsY4/+z4cYDeqwZTk6zog= -gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= -gopkg.in/square/go-jose.v2 v2.3.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= -gopkg.in/square/go-jose.v2 v2.6.0 h1:NGk74WTnPKBNUhNzQX7PYcTLUjoq7mzKk2OKbvwk2iI= -gopkg.in/square/go-jose.v2 v2.6.0/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/validator.v2 v2.0.0-20180514200540-135c24b11c19/go.mod h1:o4V0GXN9/CAmCsvJ0oXYZvrZOe7syiDZSN1GWGZTGzc= -gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0/go.mod h1:WDnlLJ4WF5VGsH/HVa3CI79GS0ol3YnhVnKP89i0kNg= -gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gorm.io/driver/postgres v1.3.5/go.mod h1:EGCWefLFQSVFrHGy4J8EtiHCWX5Q8t0yz2Jt9aKkGzU= -gorm.io/gorm v1.23.4/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk= -gorm.io/gorm v1.23.5/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk= -gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk= -gotest.tools/v3 v3.2.0 h1:I0DwBVMGAx26dttAj1BtJLAkVGncrkkUXfJLC4Flt/I= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= -honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -howett.net/plist v0.0.0-20181124034731-591f970eefbb/go.mod h1:vMygbs4qMhSZSc4lCUl2OEE+rDiIIJAIdR4m7MiMcm0= -howett.net/plist v1.0.0 h1:7CrbWYbPPO/PyNy38b2EB/+gYbjCe2DXBxgtOOZbSQM= -howett.net/plist v1.0.0/go.mod h1:lqaXoTrLY4hg8tnEzNru53gicrbv7rrk+2xJA/7hw9g= -inet.af/netaddr v0.0.0-20220617031823-097006376321 h1:B4dC8ySKTQXasnjDTMsoCMf1sQG4WsMej0WXaHxunmU= -rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= -rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= -sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= +gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU= +gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU= diff --git a/go_mod_indirect_pins.go b/go_mod_indirect_pins.go deleted file mode 100644 index 405389eee81..00000000000 --- a/go_mod_indirect_pins.go +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -//go:build go_mod_indirect_pins -// +build go_mod_indirect_pins - -package main - -import ( - _ "github.com/go-bindata/go-bindata/go-bindata" - _ "github.com/go-swagger/go-swagger/cmd/swagger" - _ "github.com/golang/mock/mockgen" - _ "github.com/mikefarah/yq/v4" - _ "golang.org/x/tools/cmd/goimports" - _ "golang.org/x/tools/cmd/stringer" - _ "gopkg.in/DataDog/dd-trace-go.v1/ddtrace" - - _ "github.com/ory/go-acc" -) diff --git a/health/doc.go b/health/doc.go index bad9c42139c..a0b2f45cbe8 100644 --- a/health/doc.go +++ b/health/doc.go @@ -24,6 +24,8 @@ package health // Responses: // 200: healthStatus // 500: errorOAuth2 +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions func swaggerPublicIsInstanceAlive() {} // Alive returns an ok status if the instance is ready to handle HTTP requests. @@ -47,6 +49,8 @@ func swaggerPublicIsInstanceAlive() {} // Responses: // 200: healthStatus // 500: errorOAuth2 +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions func swaggerAdminIsInstanceAlive() {} // Ready returns an ok status if the instance is ready to handle HTTP requests and all ReadyCheckers are ok. @@ -70,6 +74,8 @@ func swaggerAdminIsInstanceAlive() {} // Responses: // 200: healthStatus // 503: healthNotReadyStatus +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions func swaggerAdminIsInstanceReady() {} // Ready returns an ok status if the instance is ready to handle HTTP requests and all ReadyCheckers are ok. @@ -93,6 +99,8 @@ func swaggerAdminIsInstanceReady() {} // Responses: // 200: healthStatus // 503: healthNotReadyStatus +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions func swaggerPublicIsInstanceReady() {} // Version returns this service's versions. @@ -111,4 +119,6 @@ func swaggerPublicIsInstanceReady() {} // // Responses: // 200: version +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions func swaggerGetVersion() {} diff --git a/health/handler_test.go b/health/handler_test.go index ed0a91b0d37..b5967611e16 100644 --- a/health/handler_test.go +++ b/health/handler_test.go @@ -4,26 +4,23 @@ package health import ( - "context" "net/http" "net/http/httptest" "testing" "github.com/stretchr/testify/assert" - - "github.com/ory/x/contextx" - "github.com/stretchr/testify/require" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/configx" + "github.com/ory/x/dbal" "github.com/ory/x/healthx" + "github.com/ory/x/prometheusx" ) func TestPublicHealthHandler(t *testing.T) { - ctx := context.Background() - doCORSRequest := func(t *testing.T, endpoint string) *http.Response { req, err := http.NewRequest(http.MethodGet, endpoint, nil) require.NoError(t, err) @@ -55,31 +52,29 @@ func TestPublicHealthHandler(t *testing.T) { { name: "with CORS enabled", config: map[string]interface{}{ - "cors.allowed_origins": []string{"https://example.com"}, - "cors.enabled": true, - "cors.allowed_methods": []string{"GET"}, - "cors.allow_credentials": true, + "serve.public.cors.allowed_origins": []string{"https://example.com"}, + "serve.public.cors.enabled": true, + "serve.public.cors.allowed_methods": []string{"GET"}, + "serve.public.cors.allow_credentials": true, }, verifyResponse: expectCORSHeaders, }, { name: "with CORS disabled", config: map[string]interface{}{ - "cors.enabled": false, + "serve.public.cors.enabled": false, }, verifyResponse: expectNoCORSHeaders, }, } { t.Run(tc.name, func(t *testing.T) { - conf := internal.NewConfigurationWithDefaults() - for k, v := range tc.config { - conf.MustSet(ctx, config.PublicInterface.Key(k), v) - } - - reg := internal.NewRegistryMemory(t, conf, &contextx.Default{}) + // we explicitly don't restore from the backup because that has the wrong migration status + reg := testhelpers.NewRegistrySQLFromURL(t, dbal.NewSQLiteTestDatabase(t), false, false, driver.WithConfigOptions(configx.WithValues(tc.config))) + require.NoError(t, reg.Migrator().MigrateUp(t.Context())) + require.NoError(t, reg.InitNetwork(t.Context())) - public := x.NewRouterPublic() - reg.RegisterRoutes(ctx, x.NewRouterAdmin(conf.AdminURL), public) + public := x.NewRouterPublic(prometheusx.NewMetricsManager("", "", "", "")) + reg.RegisterPublicRoutes(t.Context(), public) ts := httptest.NewServer(public) diff --git a/hsm/crypto11_mock_test.go b/hsm/crypto11_mock_test.go index 72487f64782..3b850a09b11 100644 --- a/hsm/crypto11_mock_test.go +++ b/hsm/crypto11_mock_test.go @@ -2,10 +2,9 @@ // SPDX-License-Identifier: Apache-2.0 //go:build hsm -// +build hsm // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/ThalesIgnite/crypto11 (interfaces: SignerDecrypter) +// Source: github.com/ThalesGroup/crypto11 (interfaces: SignerDecrypter) // Package hsm_test is a generated GoMock package. package hsm_test diff --git a/hsm/hsm.go b/hsm/hsm.go index 38304962c9d..e40a1267e9a 100644 --- a/hsm/hsm.go +++ b/hsm/hsm.go @@ -2,16 +2,15 @@ // SPDX-License-Identifier: Apache-2.0 //go:build hsm -// +build hsm package hsm import ( "crypto/elliptic" - "github.com/ThalesIgnite/crypto11" + "github.com/ThalesGroup/crypto11" - "github.com/ory/hydra/driver/config" + "github.com/ory/hydra/v2/driver/config" "github.com/ory/x/logrusx" ) diff --git a/hsm/hsm_mock_test.go b/hsm/hsm_mock_test.go index 4a14243b973..1cf508cf2f3 100644 --- a/hsm/hsm_mock_test.go +++ b/hsm/hsm_mock_test.go @@ -2,7 +2,6 @@ // SPDX-License-Identifier: Apache-2.0 //go:build hsm -// +build hsm // Code generated by MockGen. DO NOT EDIT. // Source: hsm/hsm.go @@ -14,7 +13,7 @@ import ( elliptic "crypto/elliptic" reflect "reflect" - crypto11 "github.com/ThalesIgnite/crypto11" + crypto11 "github.com/ThalesGroup/crypto11" gomock "github.com/golang/mock/gomock" ) diff --git a/hsm/manager_hsm.go b/hsm/manager_hsm.go index 61dceb2b086..41162fe6774 100644 --- a/hsm/manager_hsm.go +++ b/hsm/manager_hsm.go @@ -2,7 +2,6 @@ // SPDX-License-Identifier: Apache-2.0 //go:build hsm -// +build hsm package hsm @@ -16,24 +15,21 @@ import ( "net/http" "sync" - "github.com/ory/hydra/driver/config" - "github.com/ory/x/otelx" - - "github.com/pkg/errors" - - "github.com/pborman/uuid" - - "github.com/ory/fosite" - "github.com/ory/hydra/jwk" - + "github.com/ThalesGroup/crypto11" + "github.com/go-jose/go-jose/v3" + "github.com/go-jose/go-jose/v3/cryptosigner" + "github.com/gofrs/uuid" "github.com/miekg/pkcs11" - - "github.com/ory/hydra/x" - - "github.com/ThalesIgnite/crypto11" + "github.com/pkg/errors" "go.opentelemetry.io/otel" - "gopkg.in/square/go-jose.v2" - "gopkg.in/square/go-jose.v2/cryptosigner" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" + + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/otelx" ) const tracingComponent = "github.com/ory/hydra/hsm" @@ -42,7 +38,7 @@ type KeyManager struct { jwk.Manager sync.RWMutex Context - KeySetPrefix string + c config.DefaultProvider } var ErrPreGeneratedKeys = &fosite.RFC6749Error{ @@ -53,34 +49,32 @@ var ErrPreGeneratedKeys = &fosite.RFC6749Error{ func NewKeyManager(hsm Context, config *config.DefaultProvider) *KeyManager { return &KeyManager{ - Context: hsm, - KeySetPrefix: config.HSMKeySetPrefix(), + Context: hsm, + c: *config, } } -func (m *KeyManager) GenerateAndPersistKeySet(ctx context.Context, set, kid, alg, use string) (*jose.JSONWebKeySet, error) { - ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "hsm.GenerateAndPersistKeySet") - defer span.End() - attrs := map[string]string{ - "set": set, - "kid": kid, - "alg": alg, - "use": use, - } - span.SetAttributes(otelx.StringAttrs(attrs)...) +func (m *KeyManager) GenerateAndPersistKeySet(ctx context.Context, set, kid, alg, use string) (_ *jose.JSONWebKeySet, err error) { + ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "hsm.GenerateAndPersistKeySet", + trace.WithAttributes( + attribute.String("set", set), + attribute.String("kid", kid), + attribute.String("alg", alg), + attribute.String("use", use))) + defer otelx.End(span, &err) m.Lock() defer m.Unlock() set = m.prefixKeySet(set) - err := m.deleteExistingKeySet(set) + err = m.deleteExistingKeySet(set) if err != nil { return nil, err } - if len(kid) == 0 { - kid = uuid.New() + if kid == "" { + kid = uuid.Must(uuid.NewV4()).String() } privateAttrSet, publicAttrSet, err := getKeyPairAttributes(kid, set, use) @@ -88,20 +82,20 @@ func (m *KeyManager) GenerateAndPersistKeySet(ctx context.Context, set, kid, alg return nil, err } - switch { - case alg == "RS256": + switch alg { + case "RS256": key, err := m.GenerateRSAKeyPairWithAttributes(publicAttrSet, privateAttrSet, 4096) if err != nil { return nil, err } return createKeySet(key, kid, alg, use) - case alg == "ES256": + case "ES256": key, err := m.GenerateECDSAKeyPairWithAttributes(publicAttrSet, privateAttrSet, elliptic.P256()) if err != nil { return nil, err } return createKeySet(key, kid, alg, use) - case alg == "ES512": + case "ES512": key, err := m.GenerateECDSAKeyPairWithAttributes(publicAttrSet, privateAttrSet, elliptic.P521()) if err != nil { return nil, err @@ -119,14 +113,10 @@ func (m *KeyManager) GenerateAndPersistKeySet(ctx context.Context, set, kid, alg } } -func (m *KeyManager) GetKey(ctx context.Context, set, kid string) (*jose.JSONWebKeySet, error) { - ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "hsm.GetKey") - defer span.End() - attrs := map[string]string{ - "set": set, - "kid": kid, - } - span.SetAttributes(otelx.StringAttrs(attrs)...) +func (m *KeyManager) GetKey(ctx context.Context, set, kid string) (_ *jose.JSONWebKeySet, err error) { + ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "hsm.GetKey", + trace.WithAttributes(attribute.String("set", set), attribute.String("kid", kid))) + defer otelx.End(span, &err) m.RLock() defer m.RUnlock() @@ -142,7 +132,7 @@ func (m *KeyManager) GetKey(ctx context.Context, set, kid string) (*jose.JSONWeb return nil, errors.WithStack(x.ErrNotFound) } - id, alg, use, err := getKeySetAttributes(m, keyPair, []byte(kid)) + id, alg, use, err := m.getKeySetAttributes(ctx, keyPair, []byte(kid)) if err != nil { return nil, err } @@ -150,13 +140,9 @@ func (m *KeyManager) GetKey(ctx context.Context, set, kid string) (*jose.JSONWeb return createKeySet(keyPair, id, alg, use) } -func (m *KeyManager) GetKeySet(ctx context.Context, set string) (*jose.JSONWebKeySet, error) { - ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "hsm.GetKeySet") - defer span.End() - attrs := map[string]string{ - "set": set, - } - span.SetAttributes(otelx.StringAttrs(attrs)...) +func (m *KeyManager) GetKeySet(ctx context.Context, set string) (_ *jose.JSONWebKeySet, err error) { + ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "hsm.GetKeySet", trace.WithAttributes(attribute.String("set", set))) + otelx.End(span, &err) m.RLock() defer m.RUnlock() @@ -174,7 +160,7 @@ func (m *KeyManager) GetKeySet(ctx context.Context, set string) (*jose.JSONWebKe var keys []jose.JSONWebKey for _, keyPair := range keyPairs { - kid, alg, use, err := getKeySetAttributes(m, keyPair, nil) + kid, alg, use, err := m.getKeySetAttributes(ctx, keyPair, nil) if err != nil { return nil, err } @@ -186,14 +172,12 @@ func (m *KeyManager) GetKeySet(ctx context.Context, set string) (*jose.JSONWebKe }, nil } -func (m *KeyManager) DeleteKey(ctx context.Context, set, kid string) error { - ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "hsm.GetKeySet") - defer span.End() - attrs := map[string]string{ - "set": set, - "kid": kid, - } - span.SetAttributes(otelx.StringAttrs(attrs)...) +func (m *KeyManager) DeleteKey(ctx context.Context, set, kid string) (err error) { + ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "hsm.DeleteKey", + trace.WithAttributes( + attribute.String("set", set), + attribute.String("kid", kid))) + defer otelx.End(span, &err) m.Lock() defer m.Unlock() @@ -216,13 +200,9 @@ func (m *KeyManager) DeleteKey(ctx context.Context, set, kid string) error { return nil } -func (m *KeyManager) DeleteKeySet(ctx context.Context, set string) error { - ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "hsm.GetKeySet") - defer span.End() - attrs := map[string]string{ - "set": set, - } - span.SetAttributes(otelx.StringAttrs(attrs)...) +func (m *KeyManager) DeleteKeySet(ctx context.Context, set string) (err error) { + ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "hsm.DeleteKeySet", trace.WithAttributes(attribute.String("set", set))) + defer otelx.End(span, &err) m.Lock() defer m.Unlock() @@ -263,7 +243,7 @@ func (m *KeyManager) UpdateKeySet(_ context.Context, _ string, _ *jose.JSONWebKe return errors.WithStack(ErrPreGeneratedKeys) } -func getKeySetAttributes(m *KeyManager, key crypto11.Signer, kid []byte) (string, string, string, error) { +func (m *KeyManager) getKeySetAttributes(ctx context.Context, key crypto11.Signer, kid []byte) (string, string, string, error) { if kid == nil { ckaId, err := m.GetAttribute(key, crypto11.CkaId) if err != nil { @@ -276,8 +256,9 @@ func getKeySetAttributes(m *KeyManager, key crypto11.Signer, kid []byte) (string switch k := key.Public().(type) { case *rsa.PublicKey: alg = "RS256" - // TODO Should we validate minimal key length by checking CKA_MODULUS_BITS? - // TODO see https://github.com/ory/hydra/issues/2905 + if k.N.BitLen() < 4096 && !m.c.IsDevelopmentMode(ctx) { + return "", "", "", errors.WithStack(jwk.ErrMinimalRsaKeyLength) + } case *ecdsa.PublicKey: if k.Curve == elliptic.P521() { alg = "ES512" @@ -298,8 +279,7 @@ func getKeySetAttributes(m *KeyManager, key crypto11.Signer, kid []byte) (string return string(kid), alg, use, nil } -func getKeyPairAttributes(kid string, set string, use string) (crypto11.AttributeSet, crypto11.AttributeSet, error) { - +func getKeyPairAttributes(kid, set, use string) (crypto11.AttributeSet, crypto11.AttributeSet, error) { privateAttrSet, err := crypto11.NewAttributeSetWithIDAndLabel([]byte(kid), []byte(set)) if err != nil { return nil, nil, err @@ -361,17 +341,9 @@ func createKeys(key crypto11.Signer, kid, alg, use string) []jose.JSONWebKey { Certificates: []*x509.Certificate{}, CertificateThumbprintSHA1: []uint8{}, CertificateThumbprintSHA256: []uint8{}, - }, { - Algorithm: alg, - Use: use, - Key: key.Public(), - KeyID: kid, - Certificates: []*x509.Certificate{}, - CertificateThumbprintSHA1: []uint8{}, - CertificateThumbprintSHA256: []uint8{}, }} } func (m *KeyManager) prefixKeySet(set string) string { - return fmt.Sprintf("%s%s", m.KeySetPrefix, set) + return fmt.Sprintf("%s%s", m.c.HSMKeySetPrefix(), set) } diff --git a/hsm/manager_hsm_test.go b/hsm/manager_hsm_test.go index 006459d9572..0217d930e87 100644 --- a/hsm/manager_hsm_test.go +++ b/hsm/manager_hsm_test.go @@ -2,7 +2,6 @@ // SPDX-License-Identifier: Apache-2.0 //go:build hsm -// +build hsm package hsm_test @@ -17,123 +16,135 @@ import ( "reflect" "testing" - "github.com/ory/hydra/jwk" - "github.com/ory/x/contextx" - - "github.com/ory/hydra/driver" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/persistence/sql" - "github.com/ory/x/configx" - "github.com/ory/x/logrusx" - - "github.com/ThalesIgnite/crypto11" + "github.com/ThalesGroup/crypto11" + "github.com/go-jose/go-jose/v3" + "github.com/go-jose/go-jose/v3/cryptosigner" "github.com/golang/mock/gomock" "github.com/miekg/pkcs11" "github.com/pborman/uuid" "github.com/pkg/errors" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "gopkg.in/square/go-jose.v2" - "gopkg.in/square/go-jose.v2/cryptosigner" - "github.com/ory/hydra/hsm" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/hsm" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/configx" + "github.com/ory/x/logrusx" ) func TestDefaultKeyManager_HSMEnabled(t *testing.T) { ctrl := gomock.NewController(t) mockHsmContext := NewMockContext(ctrl) defer ctrl.Finish() - l := logrusx.New("", "") - c := config.MustNew(context.Background(), l, configx.SkipValidation()) - c.MustSet(context.Background(), config.KeyDSN, "memory") - c.MustSet(context.Background(), config.HSMEnabled, "true") - reg := driver.NewRegistrySQL() - reg.WithLogger(l) - reg.WithConfig(c) - reg.WithHsmContext(mockHsmContext) - err := reg.Init(context.Background(), false, true, &contextx.TestContextualizer{}) - assert.NoError(t, err) + reg, err := driver.New(t.Context(), + driver.WithConfigOptions(configx.WithValues(map[string]any{ + config.KeyDSN: "memory", + config.HSMEnabled: true, + })), + driver.WithHSMContext(mockHsmContext), + ) + require.NoError(t, err) assert.IsType(t, &jwk.ManagerStrategy{}, reg.KeyManager()) - assert.IsType(t, &sql.Persister{}, reg.SoftwareKeyManager()) } func TestKeyManager_HsmKeySetPrefix(t *testing.T) { ctrl := gomock.NewController(t) hsmContext := NewMockContext(ctrl) defer ctrl.Finish() + l := logrusx.New("", "") + c := config.MustNew(t, l, configx.SkipValidation()) + keySetPrefix := "application_specific_prefix." + c.MustSet(context.Background(), config.HSMKeySetPrefix, keySetPrefix) + m := hsm.NewKeyManager(hsmContext, c) - rsaKey, err := rsa.GenerateKey(rand.Reader, 512) + rsaKey3072, err := rsa.GenerateKey(rand.Reader, 3072) + require.NoError(t, err) + rsaKey4096, err := rsa.GenerateKey(rand.Reader, 4096) require.NoError(t, err) ecdsaKey, err := ecdsa.GenerateKey(elliptic.P521(), rand.Reader) require.NoError(t, err) - rsaKeyPair := NewMockSignerDecrypter(ctrl) - rsaKeyPair.EXPECT().Public().Return(&rsaKey.PublicKey).AnyTimes() + rsaKeyPair3072 := NewMockSignerDecrypter(ctrl) + rsaKeyPair3072.EXPECT().Public().Return(&rsaKey3072.PublicKey).AnyTimes() + + rsaKeyPair4096 := NewMockSignerDecrypter(ctrl) + rsaKeyPair4096.EXPECT().Public().Return(&rsaKey4096.PublicKey).AnyTimes() ecdsaKeyPair := NewMockSignerDecrypter(ctrl) ecdsaKeyPair.EXPECT().Public().Return(&ecdsaKey.PublicKey).AnyTimes() var kid = uuid.New() - keySetPrefix := "application_specific_prefix." expectedPrefixedOpenIDConnectKeyName := fmt.Sprintf("%s%s", keySetPrefix, x.OpenIDConnectKeyName) - m := &hsm.KeyManager{ - Context: hsmContext, - KeySetPrefix: keySetPrefix, - } - t.Run("case=GenerateAndPersistKeySet", func(t *testing.T) { privateAttrSet, publicAttrSet := expectedKeyAttributes(t, expectedPrefixedOpenIDConnectKeyName, kid) hsmContext.EXPECT().FindKeyPairs(gomock.Nil(), gomock.Eq([]byte(expectedPrefixedOpenIDConnectKeyName))).Return(nil, nil) - hsmContext.EXPECT().GenerateRSAKeyPairWithAttributes(gomock.Eq(publicAttrSet), gomock.Eq(privateAttrSet), gomock.Eq(4096)).Return(rsaKeyPair, nil) + hsmContext.EXPECT().GenerateRSAKeyPairWithAttributes(gomock.Eq(publicAttrSet), gomock.Eq(privateAttrSet), gomock.Eq(4096)).Return(rsaKeyPair4096, nil) got, err := m.GenerateAndPersistKeySet(context.TODO(), x.OpenIDConnectKeyName, kid, "RS256", "sig") assert.NoError(t, err) - expectedKeySet := expectedKeySet(rsaKeyPair, kid, "RS256", "sig") + expectedKeySet := expectedKeySet(rsaKeyPair4096, kid, "RS256", "sig") if !reflect.DeepEqual(got, expectedKeySet) { t.Errorf("GenerateAndPersistKeySet() got = %v, want %v", got, expectedKeySet) } }) t.Run("case=GetKey", func(t *testing.T) { - hsmContext.EXPECT().FindKeyPair(gomock.Eq([]byte(kid)), gomock.Eq([]byte(expectedPrefixedOpenIDConnectKeyName))).Return(rsaKeyPair, nil) - hsmContext.EXPECT().GetAttribute(gomock.Eq(rsaKeyPair), gomock.Eq(crypto11.CkaDecrypt)).Return(nil, nil) + hsmContext.EXPECT().FindKeyPair(gomock.Eq([]byte(kid)), gomock.Eq([]byte(expectedPrefixedOpenIDConnectKeyName))).Return(rsaKeyPair4096, nil) + hsmContext.EXPECT().GetAttribute(gomock.Eq(rsaKeyPair4096), gomock.Eq(crypto11.CkaDecrypt)).Return(nil, nil) got, err := m.GetKey(context.TODO(), x.OpenIDConnectKeyName, kid) assert.NoError(t, err) - expectedKeySet := expectedKeySet(rsaKeyPair, kid, "RS256", "sig") + expectedKeySet := expectedKeySet(rsaKeyPair4096, kid, "RS256", "sig") if !reflect.DeepEqual(got, expectedKeySet) { t.Errorf("GetKey() got = %v, want %v", got, expectedKeySet) } }) + t.Run("case=GetKeyMinimalRsaKeyLengthError", func(t *testing.T) { + hsmContext.EXPECT().FindKeyPair(gomock.Eq([]byte(kid)), gomock.Eq([]byte(expectedPrefixedOpenIDConnectKeyName))).Return(rsaKeyPair3072, nil) + + _, err := m.GetKey(context.TODO(), x.OpenIDConnectKeyName, kid) + + assert.ErrorIs(t, err, jwk.ErrMinimalRsaKeyLength) + }) t.Run("case=GetKeySet", func(t *testing.T) { - hsmContext.EXPECT().FindKeyPairs(gomock.Nil(), gomock.Eq([]byte(expectedPrefixedOpenIDConnectKeyName))).Return([]crypto11.Signer{rsaKeyPair}, nil) - hsmContext.EXPECT().GetAttribute(gomock.Eq(rsaKeyPair), gomock.Eq(crypto11.CkaId)).Return(pkcs11.NewAttribute(pkcs11.CKA_ID, []byte(kid)), nil) - hsmContext.EXPECT().GetAttribute(gomock.Eq(rsaKeyPair), gomock.Eq(crypto11.CkaDecrypt)).Return(nil, nil) + hsmContext.EXPECT().FindKeyPairs(gomock.Nil(), gomock.Eq([]byte(expectedPrefixedOpenIDConnectKeyName))).Return([]crypto11.Signer{rsaKeyPair4096}, nil) + hsmContext.EXPECT().GetAttribute(gomock.Eq(rsaKeyPair4096), gomock.Eq(crypto11.CkaId)).Return(pkcs11.NewAttribute(pkcs11.CKA_ID, []byte(kid)), nil) + hsmContext.EXPECT().GetAttribute(gomock.Eq(rsaKeyPair4096), gomock.Eq(crypto11.CkaDecrypt)).Return(nil, nil) got, err := m.GetKeySet(context.TODO(), x.OpenIDConnectKeyName) assert.NoError(t, err) - expectedKeySet := expectedKeySet(rsaKeyPair, kid, "RS256", "sig") + expectedKeySet := expectedKeySet(rsaKeyPair4096, kid, "RS256", "sig") if !reflect.DeepEqual(got, expectedKeySet) { t.Errorf("GetKey() got = %v, want %v", got, expectedKeySet) } }) + t.Run("case=GetKeySetMinimalRsaKeyLengthError", func(t *testing.T) { + hsmContext.EXPECT().FindKeyPairs(gomock.Nil(), gomock.Eq([]byte(expectedPrefixedOpenIDConnectKeyName))).Return([]crypto11.Signer{rsaKeyPair3072}, nil) + hsmContext.EXPECT().GetAttribute(gomock.Eq(rsaKeyPair3072), gomock.Eq(crypto11.CkaId)).Return(pkcs11.NewAttribute(pkcs11.CKA_ID, []byte(kid)), nil) + + _, err := m.GetKeySet(context.TODO(), x.OpenIDConnectKeyName) + + assert.ErrorIs(t, err, jwk.ErrMinimalRsaKeyLength) + }) t.Run("case=DeleteKey", func(t *testing.T) { - hsmContext.EXPECT().FindKeyPair(gomock.Eq([]byte(kid)), gomock.Eq([]byte(expectedPrefixedOpenIDConnectKeyName))).Return(rsaKeyPair, nil) - rsaKeyPair.EXPECT().Delete().Return(nil) + hsmContext.EXPECT().FindKeyPair(gomock.Eq([]byte(kid)), gomock.Eq([]byte(expectedPrefixedOpenIDConnectKeyName))).Return(rsaKeyPair4096, nil) + rsaKeyPair4096.EXPECT().Delete().Return(nil) err := m.DeleteKey(context.TODO(), x.OpenIDConnectKeyName, kid) assert.NoError(t, err) }) t.Run("case=DeleteKeySet", func(t *testing.T) { - hsmContext.EXPECT().FindKeyPairs(gomock.Nil(), gomock.Eq([]byte(expectedPrefixedOpenIDConnectKeyName))).Return([]crypto11.Signer{rsaKeyPair}, nil) - rsaKeyPair.EXPECT().Delete().Return(nil) + hsmContext.EXPECT().FindKeyPairs(gomock.Nil(), gomock.Eq([]byte(expectedPrefixedOpenIDConnectKeyName))).Return([]crypto11.Signer{rsaKeyPair4096}, nil) + rsaKeyPair4096.EXPECT().Delete().Return(nil) err := m.DeleteKeySet(context.TODO(), x.OpenIDConnectKeyName) @@ -145,8 +156,11 @@ func TestKeyManager_GenerateAndPersistKeySet(t *testing.T) { ctrl := gomock.NewController(t) hsmContext := NewMockContext(ctrl) defer ctrl.Finish() + l := logrusx.New("", "") + c := config.MustNew(t, l, configx.SkipValidation()) + m := hsm.NewKeyManager(hsmContext, c) - rsaKey, err := rsa.GenerateKey(rand.Reader, 512) + rsaKey, err := rsa.GenerateKey(rand.Reader, 4096) require.NoError(t, err) ecdsaKey, err := ecdsa.GenerateKey(elliptic.P521(), rand.Reader) @@ -303,9 +317,6 @@ func TestKeyManager_GenerateAndPersistKeySet(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { tt.setup(t) - m := &hsm.KeyManager{ - Context: hsmContext, - } got, err := m.GenerateAndPersistKeySet(tt.args.ctx, tt.args.set, tt.args.kid, tt.args.alg, tt.args.use) if tt.wantErr != nil { require.Nil(t, got) @@ -326,8 +337,11 @@ func TestKeyManager_GetKey(t *testing.T) { ctrl := gomock.NewController(t) hsmContext := NewMockContext(ctrl) defer ctrl.Finish() + l := logrusx.New("", "") + c := config.MustNew(t, l, configx.SkipValidation()) + m := hsm.NewKeyManager(hsmContext, c) - rsaKey, err := rsa.GenerateKey(rand.Reader, 512) + rsaKey, err := rsa.GenerateKey(rand.Reader, 4096) require.NoError(t, err) rsaKeyPair := NewMockSignerDecrypter(ctrl) rsaKeyPair.EXPECT().Public().Return(&rsaKey.PublicKey).AnyTimes() @@ -493,9 +507,6 @@ func TestKeyManager_GetKey(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { tt.setup(t) - m := &hsm.KeyManager{ - Context: hsmContext, - } got, err := m.GetKey(tt.args.ctx, tt.args.set, tt.args.kid) if tt.wantErr != nil { require.Nil(t, got) @@ -516,8 +527,11 @@ func TestKeyManager_GetKeySet(t *testing.T) { ctrl := gomock.NewController(t) hsmContext := NewMockContext(ctrl) defer ctrl.Finish() + l := logrusx.New("", "") + c := config.MustNew(t, l, configx.SkipValidation()) + m := hsm.NewKeyManager(hsmContext, c) - rsaKey, err := rsa.GenerateKey(rand.Reader, 512) + rsaKey, err := rsa.GenerateKey(rand.Reader, 4096) require.NoError(t, err) rsaKid := uuid.New() rsaKeyPair := NewMockSignerDecrypter(ctrl) @@ -641,9 +655,6 @@ func TestKeyManager_GetKeySet(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { tt.setup(t) - m := &hsm.KeyManager{ - Context: hsmContext, - } got, err := m.GetKeySet(tt.args.ctx, tt.args.set) if tt.wantErr != nil { require.Nil(t, got) @@ -664,6 +675,9 @@ func TestKeyManager_DeleteKey(t *testing.T) { ctrl := gomock.NewController(t) hsmContext := NewMockContext(ctrl) defer ctrl.Finish() + l := logrusx.New("", "") + c := config.MustNew(t, l, configx.SkipValidation()) + m := hsm.NewKeyManager(hsmContext, c) rsaKeyPair := NewMockSignerDecrypter(ctrl) @@ -733,9 +747,6 @@ func TestKeyManager_DeleteKey(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { tt.setup(t) - m := &hsm.KeyManager{ - Context: hsmContext, - } if err := m.DeleteKey(tt.args.ctx, tt.args.set, tt.args.kid); len(tt.wantErrMsg) != 0 { require.EqualError(t, err, tt.wantErrMsg) } @@ -747,6 +758,9 @@ func TestKeyManager_DeleteKeySet(t *testing.T) { ctrl := gomock.NewController(t) hsmContext := NewMockContext(ctrl) defer ctrl.Finish() + l := logrusx.New("", "") + c := config.MustNew(t, l, configx.SkipValidation()) + m := hsm.NewKeyManager(hsmContext, c) rsaKeyPair1 := NewMockSignerDecrypter(ctrl) rsaKeyPair2 := NewMockSignerDecrypter(ctrl) @@ -812,9 +826,6 @@ func TestKeyManager_DeleteKeySet(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { tt.setup(t) - m := &hsm.KeyManager{ - Context: hsmContext, - } if err := m.DeleteKeySet(tt.args.ctx, tt.args.set); len(tt.wantErrMsg) != 0 { require.EqualError(t, err, tt.wantErrMsg) } @@ -883,13 +894,5 @@ func createJSONWebKeys(keyPair *MockSignerDecrypter, kid string, alg string, use Certificates: []*x509.Certificate{}, CertificateThumbprintSHA1: []uint8{}, CertificateThumbprintSHA256: []uint8{}, - }, { - Algorithm: alg, - Use: use, - Key: keyPair.Public(), - KeyID: kid, - Certificates: []*x509.Certificate{}, - CertificateThumbprintSHA1: []uint8{}, - CertificateThumbprintSHA256: []uint8{}, }} } diff --git a/hsm/manager_nohsm.go b/hsm/manager_nohsm.go index 01e3a98b32a..3839e596f93 100644 --- a/hsm/manager_nohsm.go +++ b/hsm/manager_nohsm.go @@ -2,7 +2,6 @@ // SPDX-License-Identifier: Apache-2.0 //go:build !hsm -// +build !hsm package hsm @@ -10,14 +9,14 @@ import ( "context" "sync" - "github.com/ory/hydra/driver/config" + "github.com/ory/hydra/v2/driver/config" "github.com/ory/x/logrusx" "github.com/pkg/errors" - "github.com/ory/hydra/jwk" + "github.com/ory/hydra/v2/jwk" - "gopkg.in/square/go-jose.v2" + "github.com/go-jose/go-jose/v3" ) type Context interface { diff --git a/install.sh b/install.sh index 9f7a736b14e..fedfffec39a 100644 --- a/install.sh +++ b/install.sh @@ -271,9 +271,9 @@ http_download_curl() { source_url=$2 header=$3 if [ -z "$header" ]; then - code=$(curl -w '%{http_code}' -sL -o "$local_file" "$source_url") + code=$(curl --retry 7 --retry-connrefused -w '%{http_code}' -sL -o "$local_file" "$source_url") else - code=$(curl -w '%{http_code}' -sL -H "$header" -o "$local_file" "$source_url") + code=$(curl --retry 7 --retry-connrefused -w '%{http_code}' -sL -H "$header" -o "$local_file" "$source_url") fi if [ "$code" != "200" ]; then log_debug "http_download_curl received HTTP status $code" diff --git a/internal/.hydra.yaml b/internal/.hydra.yaml index 37c38723d63..009668a9151 100644 --- a/internal/.hydra.yaml +++ b/internal/.hydra.yaml @@ -74,6 +74,7 @@ webfinger: auth_url: https://example.com/auth token_url: https://example.com/token client_registration_url: https://example.com + device_authorization_url: https://example.com/device_authorization supported_claims: - username supported_scope: @@ -100,6 +101,9 @@ urls: consent: https://consent logout: https://logout error: https://error + device: + verification: https://device + success: https://device/callback post_logout_redirect: https://post_logout strategies: @@ -112,12 +116,18 @@ ttl: refresh_token: 2h id_token: 2h auth_code: 2h + device_user_code: 2h + authentication_session: 24h oauth2: expose_internal_errors: true hashers: bcrypt: cost: 20 + device_authorization: + token_polling_interval: 2h + user_code: + entropy_preset: medium pkce: enforced: true enforced_for_public_clients: true @@ -127,6 +137,8 @@ secrets: - some-random-system-secret cookie: - some-random-cookie-secret + pagination: + - some-random-pagination-secret profiling: cpu @@ -136,11 +148,13 @@ tracing: providers: jaeger: local_agent_address: 127.0.0.1:6831 - propagation: jaeger - max_tag_value_length: 1024 sampling: - type: const - value: 1 + trace_id_ratio: 1 server_url: http://sampling zipkin: server_url: http://zipkin/api/v2/spans + otlp: + insecure: true + server_url: localhost:4318 + sampling: + sampling_ratio: 1.0 diff --git a/internal/certification/scripts/install.sh b/internal/certification/scripts/install.sh index 6690b7df6ab..d7f697a9513 100644 --- a/internal/certification/scripts/install.sh +++ b/internal/certification/scripts/install.sh @@ -20,7 +20,7 @@ mkdir -p go/bin source "$HOME"/.profile # go/dep -curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh +curl --retry 7 --retry-connrefused https://raw.githubusercontent.com/golang/dep/master/install.sh | sh # Docker sudo apt-get update @@ -30,20 +30,19 @@ sudo apt-get -y install \ curl \ gnupg2 \ software-properties-common -curl -fsSL https://download.docker.com/linux/debian/gpg | sudo apt-key add - +curl --retry 7 --retry-connrefused -fsSL https://download.docker.com/linux/debian/gpg | sudo apt-key add - sudo add-apt-repository \ "deb [arch=amd64] https://download.docker.com/linux/debian \ $(lsb_release -cs) \ stable" sudo apt-get update -sudo apt-get -y install docker-ce python-pip +sudo apt-get -y install docker-ce # Dockerize export DOCKERIZE_VERSION=v0.6.1 wget https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-alpine-linux-amd64-$DOCKERIZE_VERSION.tar.gz \ && tar -C /usr/local/bin -xzvf dockerize-alpine-linux-amd64-$DOCKERIZE_VERSION.tar.gz \ && rm dockerize-alpine-linux-amd64-$DOCKERIZE_VERSION.tar.gz -sudo pip install docker-compose git clone https://github.com/ory/hydra-login-consent-node.git diff --git a/internal/certification/scripts/start.sh b/internal/certification/scripts/start.sh index d562ca2e9da..81691e9ede1 100644 --- a/internal/certification/scripts/start.sh +++ b/internal/certification/scripts/start.sh @@ -12,12 +12,13 @@ docker start kong dockerize -wait http://localhost:8001/ -timeout 30s -ip=$(curl ifconfig.co) +ip=$(curl --retry 7 --retry-connrefused ifconfig.co) -curl -i -X DELETE --url http://localhost:8001/apis/hydra-oauth -curl -i -X DELETE --url http://localhost:8001/apis/login-consent +curl --retry 7 --retry-connrefused -i -X DELETE --url http://localhost:8001/apis/hydra-oauth +curl --retry 7 --retry-connrefused -i -X DELETE --url http://localhost:8001/apis/login-consent curl -i -X POST \ + --retry 7 --retry-connrefused \ --url http://localhost:8001/apis/ \ --data 'name=hydra-oauth' \ --data upstream_url=http://"${ip}":9000/ \ @@ -26,6 +27,7 @@ curl -i -X POST \ --data 'preserve_host=true' curl -i -X POST \ + --retry 7 --retry-connrefused \ --url http://localhost:8001/apis/ \ --data 'name=login-consent' \ --data upstream_url=http://"$ip":9001/ \ diff --git a/internal/config/config.yaml b/internal/config/config.yaml index 7ff8bbc37fc..376406d9a66 100644 --- a/internal/config/config.yaml +++ b/internal/config/config.yaml @@ -139,7 +139,7 @@ serve: # Access Log configuration for public server. request_log: - # Disable access log for health endpoints. + # Disable access log for health and metrics endpoints. disable_for_health: false # admin controls the admin daemon serving admin API endpoints like /jwk, /client, ... @@ -365,7 +365,7 @@ strategies: # You may use JSON Web Tokens as access tokens. # # But seriously. Don't do that. It's not a great idea and has a ton of caveats and subtle security implications. Read more: - # -> https://www.ory.sh/docs/hydra/advanced#json-web-tokens + # -> https://www.ory.sh/docs/oauth2-oidc/jwt-access-token # # access_token: jwt @@ -402,6 +402,18 @@ oauth2: session: # store encrypted data in database, default true encrypt_at_rest: true + ## refresh_token_rotation + # By default Refresh Tokens are rotated and invalidated with each use. See https://datatracker.ietf.org/doc/html/draft-ietf-oauth-security-topics#section-4.13.2 for more details + refresh_token_rotation: + # + ## grace_period + # + # Set the grace period for refresh tokens to be reused. Such reused tokens will result in multiple refresh tokens being issued. + # + # Examples: + # - 5s + # - 1m + grace_period: 0s # The secrets section configures secrets used for encryption and signing of several systems. All secrets can be rotated, # for more information on this topic navigate to: @@ -443,21 +455,12 @@ tracing: jaeger: # The address of the jaeger-agent where spans should be sent to local_agent_address: 127.0.0.1:6831 - # The tracing header format - propagation: jaeger - # The maximum length of jaeger tag value - max_tag_value_length: 1024 sampling: - # The type of the sampler you want to use. Supports: - # - const - # - probabilistic - # - ratelimiting - type: const # The value passed to the sampler type that has been configured. # Supported values: This is dependant on the sampling strategy used: # - const: 0 or 1 (all or nothing) # - rateLimiting: a constant rate (e.g. setting this to 3 will sample requests with the rate of 3 traces per second) # - probabilistic: a value between 0..1 - value: 1.0 + trace_id_ratio: 1.0 # The address of jaeger-agent's HTTP sampling server server_url: http://localhost:5778/sampling diff --git a/internal/driver.go b/internal/driver.go deleted file mode 100644 index 23dea1b66ca..00000000000 --- a/internal/driver.go +++ /dev/null @@ -1,141 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package internal - -import ( - "context" - - "sync" - "testing" - - "github.com/ory/x/configx" - - "github.com/stretchr/testify/require" - - "github.com/ory/hydra/x" - "github.com/ory/x/contextx" - "github.com/ory/x/sqlcon/dockertest" - - "github.com/ory/hydra/driver" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/jwk" - "github.com/ory/x/logrusx" -) - -func resetConfig(p *config.DefaultProvider) { - p.MustSet(context.Background(), config.KeyBCryptCost, "4") - p.MustSet(context.Background(), config.KeySubjectIdentifierAlgorithmSalt, "00000000") - p.MustSet(context.Background(), config.KeyGetSystemSecret, []string{"000000000000000000000000000000000000000000000000"}) - p.MustSet(context.Background(), config.KeyGetCookieSecrets, []string{"000000000000000000000000000000000000000000000000"}) - p.MustSet(context.Background(), config.KeyLogLevel, "trace") -} - -func NewConfigurationWithDefaults() *config.DefaultProvider { - p := config.MustNew(context.Background(), logrusx.New("", ""), configx.SkipValidation()) - resetConfig(p) - p.MustSet(context.Background(), config.KeyTLSEnabled, false) - return p -} - -func NewConfigurationWithDefaultsAndHTTPS() *config.DefaultProvider { - p := config.MustNew(context.Background(), logrusx.New("", ""), configx.SkipValidation()) - resetConfig(p) - p.MustSet(context.Background(), config.KeyTLSEnabled, true) - return p -} - -func NewRegistryMemory(t *testing.T, c *config.DefaultProvider, ctxer contextx.Contextualizer) driver.Registry { - return newRegistryDefault(t, "memory", c, true, ctxer) -} - -func NewMockedRegistry(t *testing.T, ctxer contextx.Contextualizer) driver.Registry { - return newRegistryDefault(t, "memory", NewConfigurationWithDefaults(), true, ctxer) -} - -func NewRegistrySQLFromURL(t *testing.T, url string, migrate bool, ctxer contextx.Contextualizer) driver.Registry { - return newRegistryDefault(t, url, NewConfigurationWithDefaults(), migrate, ctxer) -} - -func newRegistryDefault(t *testing.T, url string, c *config.DefaultProvider, migrate bool, ctxer contextx.Contextualizer) driver.Registry { - ctx := context.Background() - c.MustSet(ctx, config.KeyLogLevel, "trace") - c.MustSet(ctx, config.KeyDSN, url) - c.MustSet(ctx, "dev", true) - - r, err := driver.NewRegistryFromDSN(ctx, c, logrusx.New("test_hydra", "master"), false, migrate, ctxer) - require.NoError(t, err) - - return r -} - -func CleanAndMigrate(reg driver.Registry) func(*testing.T) { - return func(t *testing.T) { - x.CleanSQLPop(t, reg.Persister().Connection(context.Background())) - require.NoError(t, reg.Persister().MigrateUp(context.Background())) - t.Log("clean and migrate done") - } -} - -func ConnectToMySQL(t *testing.T) string { - return dockertest.RunTestMySQLWithVersion(t, "11.8") -} - -func ConnectToPG(t *testing.T) string { - return dockertest.RunTestPostgreSQLWithVersion(t, "11.8") -} - -func ConnectToCRDB(t *testing.T) string { - return dockertest.RunTestCockroachDBWithVersion(t, "v22.1.2") -} - -func ConnectDatabases(t *testing.T, migrate bool, ctxer contextx.Contextualizer) (pg, mysql, crdb driver.Registry, clean func(*testing.T)) { - var pgURL, mysqlURL, crdbURL string - wg := sync.WaitGroup{} - - wg.Add(3) - go func() { - pgURL = ConnectToPG(t) - t.Log("Pg done") - wg.Done() - }() - go func() { - mysqlURL = ConnectToMySQL(t) - t.Log("myssql done") - wg.Done() - }() - go func() { - crdbURL = ConnectToCRDB(t) - t.Log("crdb done") - wg.Done() - }() - t.Log("beginning to wait") - wg.Wait() - t.Log("done waiting") - - pg = NewRegistrySQLFromURL(t, pgURL, migrate, ctxer) - mysql = NewRegistrySQLFromURL(t, mysqlURL, migrate, ctxer) - crdb = NewRegistrySQLFromURL(t, crdbURL, migrate, ctxer) - dbs := []driver.Registry{pg, mysql, crdb} - - clean = func(t *testing.T) { - wg := sync.WaitGroup{} - - wg.Add(len(dbs)) - for _, db := range dbs { - go func(db driver.Registry) { - defer wg.Done() - CleanAndMigrate(db)(t) - }(db) - } - wg.Wait() - } - clean(t) - return -} - -func MustEnsureRegistryKeys(r driver.Registry, key string) { - if err := jwk.EnsureAsymmetricKeypairExists(context.Background(), r, "RS256", key); err != nil { - panic(err) - } -} diff --git a/internal/fosite_store.go b/internal/fosite_store.go index 0960b0423c6..81b02167297 100644 --- a/internal/fosite_store.go +++ b/internal/fosite_store.go @@ -4,34 +4,34 @@ package internal import ( - "context" + "testing" - "github.com/ory/hydra/client" - "github.com/ory/hydra/driver" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver" ) -func AddFositeExamples(r driver.Registry) { +func AddFositeExamples(t *testing.T, r *driver.RegistrySQL) { for _, c := range []client.Client{ { - LegacyClientID: "my-client", - Secret: "foobar", - RedirectURIs: []string{"http://localhost:3846/callback"}, - ResponseTypes: []string{"id_token", "code", "token"}, - GrantTypes: []string{"implicit", "refresh_token", "authorization_code", "password", "client_credentials"}, - Scope: "fosite,openid,photos,offline", + ID: "my-client", + Secret: "foobar", + RedirectURIs: []string{"http://localhost:3846/callback"}, + ResponseTypes: []string{"id_token", "code", "token"}, + GrantTypes: []string{"implicit", "refresh_token", "authorization_code", "password", "client_credentials"}, + Scope: "fosite,openid,photos,offline", }, { - LegacyClientID: "encoded:client", - Secret: "encoded&password", - RedirectURIs: []string{"http://localhost:3846/callback"}, - ResponseTypes: []string{"id_token", "code", "token"}, - GrantTypes: []string{"implicit", "refresh_token", "authorization_code", "password", "client_credentials"}, - Scope: "fosite,openid,photos,offline", + ID: "encoded:client", + Secret: "encoded&password", + RedirectURIs: []string{"http://localhost:3846/callback"}, + ResponseTypes: []string{"id_token", "code", "token"}, + GrantTypes: []string{"implicit", "refresh_token", "authorization_code", "password", "client_credentials"}, + Scope: "fosite,openid,photos,offline", }, } { // #nosec G601 - if err := r.ClientManager().CreateClient(context.Background(), &c); err != nil { - panic(err) - } + require.NoError(t, r.ClientManager().CreateClient(t.Context(), &c)) } } diff --git a/internal/httpclient/.openapi-generator/FILES b/internal/httpclient/.openapi-generator/FILES index c83b13fa739..f33f33d926d 100644 --- a/internal/httpclient/.openapi-generator/FILES +++ b/internal/httpclient/.openapi-generator/FILES @@ -10,10 +10,15 @@ api_oidc.go api_wellknown.go client.go configuration.go +docs/AcceptDeviceUserCodeRequest.md docs/AcceptOAuth2ConsentRequest.md docs/AcceptOAuth2ConsentRequestSession.md docs/AcceptOAuth2LoginRequest.md docs/CreateJsonWebKeySet.md +docs/CreateVerifiableCredentialRequestBody.md +docs/CredentialSupportedDraft00.md +docs/DeviceAuthorization.md +docs/DeviceUserAuthRequest.md docs/ErrorOAuth2.md docs/GenericError.md docs/GetVersion200Response.md @@ -25,24 +30,24 @@ docs/IsReady503Response.md docs/JsonPatch.md docs/JsonWebKey.md docs/JsonWebKeySet.md -docs/JwkApi.md -docs/MetadataApi.md -docs/OAuth2Api.md +docs/JwkAPI.md +docs/KeysetPaginationRequestParameters.md +docs/KeysetPaginationResponseHeaders.md +docs/MetadataAPI.md +docs/OAuth2API.md docs/OAuth2Client.md docs/OAuth2ClientTokenLifespans.md docs/OAuth2ConsentRequest.md docs/OAuth2ConsentRequestOpenIDConnectContext.md docs/OAuth2ConsentSession.md -docs/OAuth2ConsentSessionExpiresAt.md docs/OAuth2LoginRequest.md docs/OAuth2LogoutRequest.md docs/OAuth2RedirectTo.md docs/OAuth2TokenExchange.md -docs/OidcApi.md +docs/OidcAPI.md docs/OidcConfiguration.md docs/OidcUserInfo.md -docs/Pagination.md -docs/PaginationHeaders.md +docs/RFC6749ErrorJson.md docs/RejectOAuth2Request.md docs/TokenPagination.md docs/TokenPaginationHeaders.md @@ -51,15 +56,24 @@ docs/TokenPaginationResponseHeaders.md docs/TrustOAuth2JwtGrantIssuer.md docs/TrustedOAuth2JwtGrantIssuer.md docs/TrustedOAuth2JwtGrantJsonWebKey.md +docs/VerifiableCredentialPrimingResponse.md +docs/VerifiableCredentialProof.md +docs/VerifiableCredentialResponse.md +docs/VerifyUserCodeRequest.md docs/Version.md -docs/WellknownApi.md +docs/WellknownAPI.md git_push.sh go.mod go.sum +model_accept_device_user_code_request.go model_accept_o_auth2_consent_request.go model_accept_o_auth2_consent_request_session.go model_accept_o_auth2_login_request.go model_create_json_web_key_set.go +model_create_verifiable_credential_request_body.go +model_credential_supported_draft00.go +model_device_authorization.go +model_device_user_auth_request.go model_error_o_auth2.go model_generic_error.go model_get_version_200_response.go @@ -71,21 +85,21 @@ model_is_ready_503_response.go model_json_patch.go model_json_web_key.go model_json_web_key_set.go +model_keyset_pagination_request_parameters.go +model_keyset_pagination_response_headers.go model_o_auth2_client.go model_o_auth2_client_token_lifespans.go model_o_auth2_consent_request.go model_o_auth2_consent_request_open_id_connect_context.go model_o_auth2_consent_session.go -model_o_auth2_consent_session_expires_at.go model_o_auth2_login_request.go model_o_auth2_logout_request.go model_o_auth2_redirect_to.go model_o_auth2_token_exchange.go model_oidc_configuration.go model_oidc_user_info.go -model_pagination.go -model_pagination_headers.go model_reject_o_auth2_request.go +model_rfc6749_error_json.go model_token_pagination.go model_token_pagination_headers.go model_token_pagination_request_parameters.go @@ -93,6 +107,10 @@ model_token_pagination_response_headers.go model_trust_o_auth2_jwt_grant_issuer.go model_trusted_o_auth2_jwt_grant_issuer.go model_trusted_o_auth2_jwt_grant_json_web_key.go +model_verifiable_credential_priming_response.go +model_verifiable_credential_proof.go +model_verifiable_credential_response.go +model_verify_user_code_request.go model_version.go response.go utils.go diff --git a/internal/httpclient/.openapi-generator/VERSION b/internal/httpclient/.openapi-generator/VERSION index 6d54bbd7751..e465da43155 100644 --- a/internal/httpclient/.openapi-generator/VERSION +++ b/internal/httpclient/.openapi-generator/VERSION @@ -1 +1 @@ -6.0.1 \ No newline at end of file +7.14.0 diff --git a/internal/httpclient/README.md b/internal/httpclient/README.md index 0a0f60380d1..991c8065a8c 100644 --- a/internal/httpclient/README.md +++ b/internal/httpclient/README.md @@ -8,13 +8,14 @@ This API client was generated by the [OpenAPI Generator](https://openapi-generat - API version: - Package version: 1.0.0 +- Generator version: 7.14.0 - Build package: org.openapitools.codegen.languages.GoClientCodegen ## Installation Install the following dependencies: -```shell +```sh go get github.com/stretchr/testify/assert go get golang.org/x/oauth2 go get golang.org/x/net/context @@ -22,13 +23,13 @@ go get golang.org/x/net/context Put the package under your project folder and add the following in import: -```golang -import openapi "github.com/ory/hydra-client-go" +```go +import openapi "github.com/ory/hydra-client-go/v2" ``` To use a proxy, set the environment variable `HTTP_PROXY`: -```golang +```go os.Setenv("HTTP_PROXY", "http://proxy_name:proxy_port") ``` @@ -38,17 +39,17 @@ Default configuration comes with `Servers` field that contains server objects as ### Select Server Configuration -For using other server than the one defined on index 0 set context value `sw.ContextServerIndex` of type `int`. +For using other server than the one defined on index 0 set context value `openapi.ContextServerIndex` of type `int`. -```golang +```go ctx := context.WithValue(context.Background(), openapi.ContextServerIndex, 1) ``` ### Templated Server URL -Templated server URL is formatted using default variables from configuration or from context value `sw.ContextServerVariables` of type `map[string]string`. +Templated server URL is formatted using default variables from configuration or from context value `openapi.ContextServerVariables` of type `map[string]string`. -```golang +```go ctx := context.WithValue(context.Background(), openapi.ContextServerVariables, map[string]string{ "basePath": "v2", }) @@ -60,9 +61,9 @@ Note, enum values are always validated and all unused variables are silently ign Each operation can use different server URL defined using `OperationServers` map in the `Configuration`. An operation is uniquely identified by `"{classname}Service.{nickname}"` string. -Similar rules for overriding default operation server index and variables applies by using `sw.ContextOperationServerIndices` and `sw.ContextOperationServerVariables` context maps. +Similar rules for overriding default operation server index and variables applies by using `openapi.ContextOperationServerIndices` and `openapi.ContextOperationServerVariables` context maps. -``` +```go ctx := context.WithValue(context.Background(), openapi.ContextOperationServerIndices, map[string]int{ "{classname}Service.{nickname}": 2, }) @@ -79,60 +80,69 @@ All URIs are relative to *http://localhost* Class | Method | HTTP request | Description ------------ | ------------- | ------------- | ------------- -*JwkApi* | [**CreateJsonWebKeySet**](docs/JwkApi.md#createjsonwebkeyset) | **Post** /admin/keys/{set} | Create JSON Web Key -*JwkApi* | [**DeleteJsonWebKey**](docs/JwkApi.md#deletejsonwebkey) | **Delete** /admin/keys/{set}/{kid} | Delete JSON Web Key -*JwkApi* | [**DeleteJsonWebKeySet**](docs/JwkApi.md#deletejsonwebkeyset) | **Delete** /admin/keys/{set} | Delete JSON Web Key Set -*JwkApi* | [**GetJsonWebKey**](docs/JwkApi.md#getjsonwebkey) | **Get** /admin/keys/{set}/{kid} | Get JSON Web Key -*JwkApi* | [**GetJsonWebKeySet**](docs/JwkApi.md#getjsonwebkeyset) | **Get** /admin/keys/{set} | Retrieve a JSON Web Key Set -*JwkApi* | [**SetJsonWebKey**](docs/JwkApi.md#setjsonwebkey) | **Put** /admin/keys/{set}/{kid} | Set JSON Web Key -*JwkApi* | [**SetJsonWebKeySet**](docs/JwkApi.md#setjsonwebkeyset) | **Put** /admin/keys/{set} | Update a JSON Web Key Set -*MetadataApi* | [**GetVersion**](docs/MetadataApi.md#getversion) | **Get** /version | Return Running Software Version. -*MetadataApi* | [**IsAlive**](docs/MetadataApi.md#isalive) | **Get** /health/alive | Check HTTP Server Status -*MetadataApi* | [**IsReady**](docs/MetadataApi.md#isready) | **Get** /health/ready | Check HTTP Server and Database Status -*OAuth2Api* | [**AcceptOAuth2ConsentRequest**](docs/OAuth2Api.md#acceptoauth2consentrequest) | **Put** /admin/oauth2/auth/requests/consent/accept | Accept OAuth 2.0 Consent Request -*OAuth2Api* | [**AcceptOAuth2LoginRequest**](docs/OAuth2Api.md#acceptoauth2loginrequest) | **Put** /admin/oauth2/auth/requests/login/accept | Accept OAuth 2.0 Login Request -*OAuth2Api* | [**AcceptOAuth2LogoutRequest**](docs/OAuth2Api.md#acceptoauth2logoutrequest) | **Put** /admin/oauth2/auth/requests/logout/accept | Accept OAuth 2.0 Session Logout Request -*OAuth2Api* | [**CreateOAuth2Client**](docs/OAuth2Api.md#createoauth2client) | **Post** /admin/clients | Create OAuth 2.0 Client -*OAuth2Api* | [**DeleteOAuth2Client**](docs/OAuth2Api.md#deleteoauth2client) | **Delete** /admin/clients/{id} | Delete OAuth 2.0 Client -*OAuth2Api* | [**DeleteOAuth2Token**](docs/OAuth2Api.md#deleteoauth2token) | **Delete** /admin/oauth2/tokens | Delete OAuth 2.0 Access Tokens from specific OAuth 2.0 Client -*OAuth2Api* | [**DeleteTrustedOAuth2JwtGrantIssuer**](docs/OAuth2Api.md#deletetrustedoauth2jwtgrantissuer) | **Delete** /admin/trust/grants/jwt-bearer/issuers/{id} | Delete Trusted OAuth2 JWT Bearer Grant Type Issuer -*OAuth2Api* | [**GetOAuth2Client**](docs/OAuth2Api.md#getoauth2client) | **Get** /admin/clients/{id} | Get an OAuth 2.0 Client -*OAuth2Api* | [**GetOAuth2ConsentRequest**](docs/OAuth2Api.md#getoauth2consentrequest) | **Get** /admin/oauth2/auth/requests/consent | Get OAuth 2.0 Consent Request -*OAuth2Api* | [**GetOAuth2LoginRequest**](docs/OAuth2Api.md#getoauth2loginrequest) | **Get** /admin/oauth2/auth/requests/login | Get OAuth 2.0 Login Request -*OAuth2Api* | [**GetOAuth2LogoutRequest**](docs/OAuth2Api.md#getoauth2logoutrequest) | **Get** /admin/oauth2/auth/requests/logout | Get OAuth 2.0 Session Logout Request -*OAuth2Api* | [**GetTrustedOAuth2JwtGrantIssuer**](docs/OAuth2Api.md#gettrustedoauth2jwtgrantissuer) | **Get** /admin/trust/grants/jwt-bearer/issuers/{id} | Get Trusted OAuth2 JWT Bearer Grant Type Issuer -*OAuth2Api* | [**IntrospectOAuth2Token**](docs/OAuth2Api.md#introspectoauth2token) | **Post** /admin/oauth2/introspect | Introspect OAuth2 Access and Refresh Tokens -*OAuth2Api* | [**ListOAuth2Clients**](docs/OAuth2Api.md#listoauth2clients) | **Get** /admin/clients | List OAuth 2.0 Clients -*OAuth2Api* | [**ListOAuth2ConsentSessions**](docs/OAuth2Api.md#listoauth2consentsessions) | **Get** /admin/oauth2/auth/sessions/consent | List OAuth 2.0 Consent Sessions of a Subject -*OAuth2Api* | [**ListTrustedOAuth2JwtGrantIssuers**](docs/OAuth2Api.md#listtrustedoauth2jwtgrantissuers) | **Get** /admin/trust/grants/jwt-bearer/issuers | List Trusted OAuth2 JWT Bearer Grant Type Issuers -*OAuth2Api* | [**OAuth2Authorize**](docs/OAuth2Api.md#oauth2authorize) | **Get** /oauth2/auth | OAuth 2.0 Authorize Endpoint -*OAuth2Api* | [**Oauth2TokenExchange**](docs/OAuth2Api.md#oauth2tokenexchange) | **Post** /oauth2/token | The OAuth 2.0 Token Endpoint -*OAuth2Api* | [**PatchOAuth2Client**](docs/OAuth2Api.md#patchoauth2client) | **Patch** /admin/clients/{id} | Patch OAuth 2.0 Client -*OAuth2Api* | [**RejectOAuth2ConsentRequest**](docs/OAuth2Api.md#rejectoauth2consentrequest) | **Put** /admin/oauth2/auth/requests/consent/reject | Reject OAuth 2.0 Consent Request -*OAuth2Api* | [**RejectOAuth2LoginRequest**](docs/OAuth2Api.md#rejectoauth2loginrequest) | **Put** /admin/oauth2/auth/requests/login/reject | Reject OAuth 2.0 Login Request -*OAuth2Api* | [**RejectOAuth2LogoutRequest**](docs/OAuth2Api.md#rejectoauth2logoutrequest) | **Put** /admin/oauth2/auth/requests/logout/reject | Reject OAuth 2.0 Session Logout Request -*OAuth2Api* | [**RevokeOAuth2ConsentSessions**](docs/OAuth2Api.md#revokeoauth2consentsessions) | **Delete** /admin/oauth2/auth/sessions/consent | Revoke OAuth 2.0 Consent Sessions of a Subject -*OAuth2Api* | [**RevokeOAuth2LoginSessions**](docs/OAuth2Api.md#revokeoauth2loginsessions) | **Delete** /admin/oauth2/auth/sessions/login | Revokes All OAuth 2.0 Login Sessions of a Subject -*OAuth2Api* | [**RevokeOAuth2Token**](docs/OAuth2Api.md#revokeoauth2token) | **Post** /oauth2/revoke | Revoke OAuth 2.0 Access or Refresh Token -*OAuth2Api* | [**SetOAuth2Client**](docs/OAuth2Api.md#setoauth2client) | **Put** /admin/clients/{id} | Set OAuth 2.0 Client -*OAuth2Api* | [**SetOAuth2ClientLifespans**](docs/OAuth2Api.md#setoauth2clientlifespans) | **Put** /admin/clients/{id}/lifespans | Set OAuth2 Client Token Lifespans -*OAuth2Api* | [**TrustOAuth2JwtGrantIssuer**](docs/OAuth2Api.md#trustoauth2jwtgrantissuer) | **Post** /admin/trust/grants/jwt-bearer/issuers | Trust OAuth2 JWT Bearer Grant Type Issuer -*OidcApi* | [**CreateOidcDynamicClient**](docs/OidcApi.md#createoidcdynamicclient) | **Post** /oauth2/register | Register OAuth2 Client using OpenID Dynamic Client Registration -*OidcApi* | [**DeleteOidcDynamicClient**](docs/OidcApi.md#deleteoidcdynamicclient) | **Delete** /oauth2/register/{id} | Delete OAuth 2.0 Client using the OpenID Dynamic Client Registration Management Protocol -*OidcApi* | [**DiscoverOidcConfiguration**](docs/OidcApi.md#discoveroidcconfiguration) | **Get** /.well-known/openid-configuration | OpenID Connect Discovery -*OidcApi* | [**GetOidcDynamicClient**](docs/OidcApi.md#getoidcdynamicclient) | **Get** /oauth2/register/{id} | Get OAuth2 Client using OpenID Dynamic Client Registration -*OidcApi* | [**GetOidcUserInfo**](docs/OidcApi.md#getoidcuserinfo) | **Get** /userinfo | OpenID Connect Userinfo -*OidcApi* | [**RevokeOidcSession**](docs/OidcApi.md#revokeoidcsession) | **Get** /oauth2/sessions/logout | OpenID Connect Front- and Back-channel Enabled Logout -*OidcApi* | [**SetOidcDynamicClient**](docs/OidcApi.md#setoidcdynamicclient) | **Put** /oauth2/register/{id} | Set OAuth2 Client using OpenID Dynamic Client Registration -*WellknownApi* | [**DiscoverJsonWebKeys**](docs/WellknownApi.md#discoverjsonwebkeys) | **Get** /.well-known/jwks.json | Discover Well-Known JSON Web Keys +*JwkAPI* | [**CreateJsonWebKeySet**](docs/JwkAPI.md#createjsonwebkeyset) | **Post** /admin/keys/{set} | Create JSON Web Key +*JwkAPI* | [**DeleteJsonWebKey**](docs/JwkAPI.md#deletejsonwebkey) | **Delete** /admin/keys/{set}/{kid} | Delete JSON Web Key +*JwkAPI* | [**DeleteJsonWebKeySet**](docs/JwkAPI.md#deletejsonwebkeyset) | **Delete** /admin/keys/{set} | Delete JSON Web Key Set +*JwkAPI* | [**GetJsonWebKey**](docs/JwkAPI.md#getjsonwebkey) | **Get** /admin/keys/{set}/{kid} | Get JSON Web Key +*JwkAPI* | [**GetJsonWebKeySet**](docs/JwkAPI.md#getjsonwebkeyset) | **Get** /admin/keys/{set} | Retrieve a JSON Web Key Set +*JwkAPI* | [**SetJsonWebKey**](docs/JwkAPI.md#setjsonwebkey) | **Put** /admin/keys/{set}/{kid} | Set JSON Web Key +*JwkAPI* | [**SetJsonWebKeySet**](docs/JwkAPI.md#setjsonwebkeyset) | **Put** /admin/keys/{set} | Update a JSON Web Key Set +*MetadataAPI* | [**GetVersion**](docs/MetadataAPI.md#getversion) | **Get** /version | Return Running Software Version. +*MetadataAPI* | [**IsAlive**](docs/MetadataAPI.md#isalive) | **Get** /health/alive | Check HTTP Server Status +*MetadataAPI* | [**IsReady**](docs/MetadataAPI.md#isready) | **Get** /health/ready | Check HTTP Server and Database Status +*OAuth2API* | [**AcceptOAuth2ConsentRequest**](docs/OAuth2API.md#acceptoauth2consentrequest) | **Put** /admin/oauth2/auth/requests/consent/accept | Accept OAuth 2.0 Consent Request +*OAuth2API* | [**AcceptOAuth2LoginRequest**](docs/OAuth2API.md#acceptoauth2loginrequest) | **Put** /admin/oauth2/auth/requests/login/accept | Accept OAuth 2.0 Login Request +*OAuth2API* | [**AcceptOAuth2LogoutRequest**](docs/OAuth2API.md#acceptoauth2logoutrequest) | **Put** /admin/oauth2/auth/requests/logout/accept | Accept OAuth 2.0 Session Logout Request +*OAuth2API* | [**AcceptUserCodeRequest**](docs/OAuth2API.md#acceptusercoderequest) | **Put** /admin/oauth2/auth/requests/device/accept | Accepts a device grant user_code request +*OAuth2API* | [**CreateOAuth2Client**](docs/OAuth2API.md#createoauth2client) | **Post** /admin/clients | Create OAuth 2.0 Client +*OAuth2API* | [**DeleteOAuth2Client**](docs/OAuth2API.md#deleteoauth2client) | **Delete** /admin/clients/{id} | Delete OAuth 2.0 Client +*OAuth2API* | [**DeleteOAuth2Token**](docs/OAuth2API.md#deleteoauth2token) | **Delete** /admin/oauth2/tokens | Delete OAuth 2.0 Access Tokens from specific OAuth 2.0 Client +*OAuth2API* | [**DeleteTrustedOAuth2JwtGrantIssuer**](docs/OAuth2API.md#deletetrustedoauth2jwtgrantissuer) | **Delete** /admin/trust/grants/jwt-bearer/issuers/{id} | Delete Trusted OAuth2 JWT Bearer Grant Type Issuer +*OAuth2API* | [**GetOAuth2Client**](docs/OAuth2API.md#getoauth2client) | **Get** /admin/clients/{id} | Get an OAuth 2.0 Client +*OAuth2API* | [**GetOAuth2ConsentRequest**](docs/OAuth2API.md#getoauth2consentrequest) | **Get** /admin/oauth2/auth/requests/consent | Get OAuth 2.0 Consent Request +*OAuth2API* | [**GetOAuth2LoginRequest**](docs/OAuth2API.md#getoauth2loginrequest) | **Get** /admin/oauth2/auth/requests/login | Get OAuth 2.0 Login Request +*OAuth2API* | [**GetOAuth2LogoutRequest**](docs/OAuth2API.md#getoauth2logoutrequest) | **Get** /admin/oauth2/auth/requests/logout | Get OAuth 2.0 Session Logout Request +*OAuth2API* | [**GetTrustedOAuth2JwtGrantIssuer**](docs/OAuth2API.md#gettrustedoauth2jwtgrantissuer) | **Get** /admin/trust/grants/jwt-bearer/issuers/{id} | Get Trusted OAuth2 JWT Bearer Grant Type Issuer +*OAuth2API* | [**IntrospectOAuth2Token**](docs/OAuth2API.md#introspectoauth2token) | **Post** /admin/oauth2/introspect | Introspect OAuth2 Access and Refresh Tokens +*OAuth2API* | [**ListOAuth2Clients**](docs/OAuth2API.md#listoauth2clients) | **Get** /admin/clients | List OAuth 2.0 Clients +*OAuth2API* | [**ListOAuth2ConsentSessions**](docs/OAuth2API.md#listoauth2consentsessions) | **Get** /admin/oauth2/auth/sessions/consent | List OAuth 2.0 Consent Sessions of a Subject +*OAuth2API* | [**ListTrustedOAuth2JwtGrantIssuers**](docs/OAuth2API.md#listtrustedoauth2jwtgrantissuers) | **Get** /admin/trust/grants/jwt-bearer/issuers | List Trusted OAuth2 JWT Bearer Grant Type Issuers +*OAuth2API* | [**OAuth2Authorize**](docs/OAuth2API.md#oauth2authorize) | **Get** /oauth2/auth | OAuth 2.0 Authorize Endpoint +*OAuth2API* | [**OAuth2DeviceFlow**](docs/OAuth2API.md#oauth2deviceflow) | **Post** /oauth2/device/auth | The OAuth 2.0 Device Authorize Endpoint +*OAuth2API* | [**Oauth2TokenExchange**](docs/OAuth2API.md#oauth2tokenexchange) | **Post** /oauth2/token | The OAuth 2.0 Token Endpoint +*OAuth2API* | [**PatchOAuth2Client**](docs/OAuth2API.md#patchoauth2client) | **Patch** /admin/clients/{id} | Patch OAuth 2.0 Client +*OAuth2API* | [**PerformOAuth2DeviceVerificationFlow**](docs/OAuth2API.md#performoauth2deviceverificationflow) | **Get** /oauth2/device/verify | OAuth 2.0 Device Verification Endpoint +*OAuth2API* | [**RejectOAuth2ConsentRequest**](docs/OAuth2API.md#rejectoauth2consentrequest) | **Put** /admin/oauth2/auth/requests/consent/reject | Reject OAuth 2.0 Consent Request +*OAuth2API* | [**RejectOAuth2LoginRequest**](docs/OAuth2API.md#rejectoauth2loginrequest) | **Put** /admin/oauth2/auth/requests/login/reject | Reject OAuth 2.0 Login Request +*OAuth2API* | [**RejectOAuth2LogoutRequest**](docs/OAuth2API.md#rejectoauth2logoutrequest) | **Put** /admin/oauth2/auth/requests/logout/reject | Reject OAuth 2.0 Session Logout Request +*OAuth2API* | [**RevokeOAuth2ConsentSessions**](docs/OAuth2API.md#revokeoauth2consentsessions) | **Delete** /admin/oauth2/auth/sessions/consent | Revoke OAuth 2.0 Consent Sessions of a Subject +*OAuth2API* | [**RevokeOAuth2LoginSessions**](docs/OAuth2API.md#revokeoauth2loginsessions) | **Delete** /admin/oauth2/auth/sessions/login | Revokes OAuth 2.0 Login Sessions by either a Subject or a SessionID +*OAuth2API* | [**RevokeOAuth2Token**](docs/OAuth2API.md#revokeoauth2token) | **Post** /oauth2/revoke | Revoke OAuth 2.0 Access or Refresh Token +*OAuth2API* | [**SetOAuth2Client**](docs/OAuth2API.md#setoauth2client) | **Put** /admin/clients/{id} | Set OAuth 2.0 Client +*OAuth2API* | [**SetOAuth2ClientLifespans**](docs/OAuth2API.md#setoauth2clientlifespans) | **Put** /admin/clients/{id}/lifespans | Set OAuth2 Client Token Lifespans +*OAuth2API* | [**TrustOAuth2JwtGrantIssuer**](docs/OAuth2API.md#trustoauth2jwtgrantissuer) | **Post** /admin/trust/grants/jwt-bearer/issuers | Trust OAuth2 JWT Bearer Grant Type Issuer +*OidcAPI* | [**CreateOidcDynamicClient**](docs/OidcAPI.md#createoidcdynamicclient) | **Post** /oauth2/register | Register OAuth2 Client using OpenID Dynamic Client Registration +*OidcAPI* | [**CreateVerifiableCredential**](docs/OidcAPI.md#createverifiablecredential) | **Post** /credentials | Issues a Verifiable Credential +*OidcAPI* | [**DeleteOidcDynamicClient**](docs/OidcAPI.md#deleteoidcdynamicclient) | **Delete** /oauth2/register/{id} | Delete OAuth 2.0 Client using the OpenID Dynamic Client Registration Management Protocol +*OidcAPI* | [**DiscoverOidcConfiguration**](docs/OidcAPI.md#discoveroidcconfiguration) | **Get** /.well-known/openid-configuration | OpenID Connect Discovery +*OidcAPI* | [**GetOidcDynamicClient**](docs/OidcAPI.md#getoidcdynamicclient) | **Get** /oauth2/register/{id} | Get OAuth2 Client using OpenID Dynamic Client Registration +*OidcAPI* | [**GetOidcUserInfo**](docs/OidcAPI.md#getoidcuserinfo) | **Get** /userinfo | OpenID Connect Userinfo +*OidcAPI* | [**RevokeOidcSession**](docs/OidcAPI.md#revokeoidcsession) | **Get** /oauth2/sessions/logout | OpenID Connect Front- and Back-channel Enabled Logout +*OidcAPI* | [**SetOidcDynamicClient**](docs/OidcAPI.md#setoidcdynamicclient) | **Put** /oauth2/register/{id} | Set OAuth2 Client using OpenID Dynamic Client Registration +*WellknownAPI* | [**DiscoverJsonWebKeys**](docs/WellknownAPI.md#discoverjsonwebkeys) | **Get** /.well-known/jwks.json | Discover Well-Known JSON Web Keys ## Documentation For Models + - [AcceptDeviceUserCodeRequest](docs/AcceptDeviceUserCodeRequest.md) - [AcceptOAuth2ConsentRequest](docs/AcceptOAuth2ConsentRequest.md) - [AcceptOAuth2ConsentRequestSession](docs/AcceptOAuth2ConsentRequestSession.md) - [AcceptOAuth2LoginRequest](docs/AcceptOAuth2LoginRequest.md) - [CreateJsonWebKeySet](docs/CreateJsonWebKeySet.md) + - [CreateVerifiableCredentialRequestBody](docs/CreateVerifiableCredentialRequestBody.md) + - [CredentialSupportedDraft00](docs/CredentialSupportedDraft00.md) + - [DeviceAuthorization](docs/DeviceAuthorization.md) + - [DeviceUserAuthRequest](docs/DeviceUserAuthRequest.md) - [ErrorOAuth2](docs/ErrorOAuth2.md) - [GenericError](docs/GenericError.md) - [GetVersion200Response](docs/GetVersion200Response.md) @@ -144,20 +154,20 @@ Class | Method | HTTP request | Description - [JsonPatch](docs/JsonPatch.md) - [JsonWebKey](docs/JsonWebKey.md) - [JsonWebKeySet](docs/JsonWebKeySet.md) + - [KeysetPaginationRequestParameters](docs/KeysetPaginationRequestParameters.md) + - [KeysetPaginationResponseHeaders](docs/KeysetPaginationResponseHeaders.md) - [OAuth2Client](docs/OAuth2Client.md) - [OAuth2ClientTokenLifespans](docs/OAuth2ClientTokenLifespans.md) - [OAuth2ConsentRequest](docs/OAuth2ConsentRequest.md) - [OAuth2ConsentRequestOpenIDConnectContext](docs/OAuth2ConsentRequestOpenIDConnectContext.md) - [OAuth2ConsentSession](docs/OAuth2ConsentSession.md) - - [OAuth2ConsentSessionExpiresAt](docs/OAuth2ConsentSessionExpiresAt.md) - [OAuth2LoginRequest](docs/OAuth2LoginRequest.md) - [OAuth2LogoutRequest](docs/OAuth2LogoutRequest.md) - [OAuth2RedirectTo](docs/OAuth2RedirectTo.md) - [OAuth2TokenExchange](docs/OAuth2TokenExchange.md) - [OidcConfiguration](docs/OidcConfiguration.md) - [OidcUserInfo](docs/OidcUserInfo.md) - - [Pagination](docs/Pagination.md) - - [PaginationHeaders](docs/PaginationHeaders.md) + - [RFC6749ErrorJson](docs/RFC6749ErrorJson.md) - [RejectOAuth2Request](docs/RejectOAuth2Request.md) - [TokenPagination](docs/TokenPagination.md) - [TokenPaginationHeaders](docs/TokenPaginationHeaders.md) @@ -166,40 +176,42 @@ Class | Method | HTTP request | Description - [TrustOAuth2JwtGrantIssuer](docs/TrustOAuth2JwtGrantIssuer.md) - [TrustedOAuth2JwtGrantIssuer](docs/TrustedOAuth2JwtGrantIssuer.md) - [TrustedOAuth2JwtGrantJsonWebKey](docs/TrustedOAuth2JwtGrantJsonWebKey.md) + - [VerifiableCredentialPrimingResponse](docs/VerifiableCredentialPrimingResponse.md) + - [VerifiableCredentialProof](docs/VerifiableCredentialProof.md) + - [VerifiableCredentialResponse](docs/VerifiableCredentialResponse.md) + - [VerifyUserCodeRequest](docs/VerifyUserCodeRequest.md) - [Version](docs/Version.md) ## Documentation For Authorization - +Authentication schemes defined for the API: ### basic - **Type**: HTTP basic authentication Example -```golang -auth := context.WithValue(context.Background(), sw.ContextBasicAuth, sw.BasicAuth{ - UserName: "username", - Password: "password", +```go +auth := context.WithValue(context.Background(), openapi.ContextBasicAuth, openapi.BasicAuth{ + UserName: "username", + Password: "password", }) r, err := client.Service.Operation(auth, args) ``` - ### bearer - **Type**: HTTP Bearer token authentication Example -```golang -auth := context.WithValue(context.Background(), sw.ContextAccessToken, "BEARER_TOKEN_STRING") +```go +auth := context.WithValue(context.Background(), openapi.ContextAccessToken, "BEARER_TOKEN_STRING") r, err := client.Service.Operation(auth, args) ``` - ### oauth2 @@ -213,20 +225,20 @@ r, err := client.Service.Operation(auth, args) Example -```golang -auth := context.WithValue(context.Background(), sw.ContextAccessToken, "ACCESSTOKENSTRING") +```go +auth := context.WithValue(context.Background(), openapi.ContextAccessToken, "ACCESSTOKENSTRING") r, err := client.Service.Operation(auth, args) ``` Or via OAuth2 module to automatically refresh tokens and perform user authentication. -```golang +```go import "golang.org/x/oauth2" /* Perform OAuth2 round trip request and obtain a token */ tokenSource := oauth2cfg.TokenSource(createContext(httpClient), &token) -auth := context.WithValue(oauth2.NoContext, sw.ContextOAuth2, tokenSource) +auth := context.WithValue(oauth2.NoContext, openapi.ContextOAuth2, tokenSource) r, err := client.Service.Operation(auth, args) ``` diff --git a/internal/httpclient/api/openapi.yaml b/internal/httpclient/api/openapi.yaml index b75395441f0..57353f6e31c 100644 --- a/internal/httpclient/api/openapi.yaml +++ b/internal/httpclient/api/openapi.yaml @@ -24,59 +24,66 @@ tags: paths: /.well-known/jwks.json: get: - description: "This endpoint returns JSON Web Keys required to verifying OpenID\ - \ Connect ID Tokens and,\nif enabled, OAuth 2.0 JWT Access Tokens. This endpoint\ - \ can be used with client libraries like\n[node-jwks-rsa](https://github.com/auth0/node-jwks-rsa)\ - \ among others." + description: |- + This endpoint returns JSON Web Keys required to verifying OpenID Connect ID Tokens and, + if enabled, OAuth 2.0 JWT Access Tokens. This endpoint can be used with client libraries like + [node-jwks-rsa](https://github.com/auth0/node-jwks-rsa) among others. + + Adding custom keys requires first creating a keyset via the createJsonWebKeySet operation, + and then configuring the webfinger.jwks.broadcast_keys configuration value to include the keyset name. operationId: discoverJsonWebKeys responses: "200": content: application/json: schema: - $ref: '#/components/schemas/jsonWebKeySet' + $ref: "#/components/schemas/jsonWebKeySet" description: jsonWebKeySet default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Discover Well-Known JSON Web Keys tags: - wellknown /.well-known/openid-configuration: get: - description: "A mechanism for an OpenID Connect Relying Party to discover the\ - \ End-User's OpenID Provider and obtain information needed to interact with\ - \ it, including its OAuth 2.0 endpoint locations.\n\nPopular libraries for\ - \ OpenID Connect clients include oidc-client-js (JavaScript), go-oidc (Golang),\ - \ and others.\nFor a full list of clients go here: https://openid.net/developers/certified/" + description: |- + A mechanism for an OpenID Connect Relying Party to discover the End-User's OpenID Provider and obtain information needed to interact with it, including its OAuth 2.0 endpoint locations. + + Popular libraries for OpenID Connect clients include oidc-client-js (JavaScript), go-oidc (Golang), and others. + For a full list of clients go here: https://openid.net/developers/certified/ operationId: discoverOidcConfiguration responses: "200": content: application/json: schema: - $ref: '#/components/schemas/oidcConfiguration' + $ref: "#/components/schemas/oidcConfiguration" description: oidcConfiguration default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: OpenID Connect Discovery tags: - oidc /admin/clients: get: - description: "This endpoint lists all clients in the database, and never returns\ - \ client secrets.\nAs a default it lists the first 100 clients." + description: |- + This endpoint lists all clients in the database, and never returns client secrets. + As a default it lists the first 100 clients. operationId: listOAuth2Clients parameters: - - description: "Items per Page\n\nThis is the number of items per page to return.\n\ - For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination)." + - description: |- + Items per Page + + This is the number of items per page to return. + For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). explode: true in: query name: page_size @@ -84,19 +91,20 @@ paths: schema: default: 250 format: int64 - maximum: 500 + maximum: 1000 minimum: 1 type: integer style: form - - description: "Next Page Token\n\nThe next page token.\nFor details on pagination\ - \ please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination)." + - description: |- + Next Page Token + + The next page token. + For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). explode: true in: query name: page_token required: false schema: - default: "1" - minimum: 1 type: string style: form - description: The name of the clients to filter by. @@ -117,32 +125,22 @@ paths: style: form responses: "200": - content: - application/json: - schema: - items: - $ref: '#/components/schemas/oAuth2Client' - type: array - description: Paginated OAuth2 Client List Response + $ref: "#/components/responses/listOAuth2Clients" default: - content: - application/json: - schema: - $ref: '#/components/schemas/errorOAuth2' - description: Default Error Response + $ref: "#/components/responses/errorOAuth2Default" summary: List OAuth 2.0 Clients tags: - oAuth2 post: - description: "Create a new OAuth 2.0 client. If you pass `client_secret` the\ - \ secret is used, otherwise a random secret\nis generated. The secret is echoed\ - \ in the response. It is not possible to retrieve it later on." + description: |- + Create a new OAuth 2.0 client. If you pass `client_secret` the secret is used, otherwise a random secret + is generated. The secret is echoed in the response. It is not possible to retrieve it later on. operationId: createOAuth2Client requestBody: content: application/json: schema: - $ref: '#/components/schemas/oAuth2Client' + $ref: "#/components/schemas/oAuth2Client" description: OAuth 2.0 Client Request Body required: true x-originalParamName: Body @@ -151,30 +149,24 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/oAuth2Client' + $ref: "#/components/schemas/oAuth2Client" description: oAuth2Client "400": - content: - application/json: - schema: - $ref: '#/components/schemas/errorOAuth2' - description: Bad Request Error Response + $ref: "#/components/responses/errorOAuth2BadRequest" default: - content: - application/json: - schema: - $ref: '#/components/schemas/errorOAuth2' - description: Default Error Response + $ref: "#/components/responses/errorOAuth2Default" summary: Create OAuth 2.0 Client tags: - oAuth2 /admin/clients/{id}: delete: - description: "Delete an existing OAuth 2.0 Client by its ID.\n\nOAuth 2.0 clients\ - \ are used to perform OAuth 2.0 and OpenID Connect flows. Usually, OAuth 2.0\ - \ clients are\ngenerated for applications which want to consume your OAuth\ - \ 2.0 or OpenID Connect capabilities.\n\nMake sure that this endpoint is well\ - \ protected and only callable by first-party components." + description: |- + Delete an existing OAuth 2.0 Client by its ID. + + OAuth 2.0 clients are used to perform OAuth 2.0 and OpenID Connect flows. Usually, OAuth 2.0 clients are + generated for applications which want to consume your OAuth 2.0 or OpenID Connect capabilities. + + Make sure that this endpoint is well protected and only callable by first-party components. operationId: deleteOAuth2Client parameters: - description: The id of the OAuth 2.0 Client. @@ -187,22 +179,22 @@ paths: style: simple responses: "204": - description: "Empty responses are sent when, for example, resources are\ - \ deleted. The HTTP status code for empty responses is\ntypically 201." + $ref: "#/components/responses/emptyResponse" default: content: application/json: schema: - $ref: '#/components/schemas/genericError' + $ref: "#/components/schemas/genericError" description: genericError summary: Delete OAuth 2.0 Client tags: - oAuth2 get: - description: "Get an OAuth 2.0 client by its ID. This endpoint never returns\ - \ the client secret.\n\nOAuth 2.0 clients are used to perform OAuth 2.0 and\ - \ OpenID Connect flows. Usually, OAuth 2.0 clients are\ngenerated for applications\ - \ which want to consume your OAuth 2.0 or OpenID Connect capabilities." + description: |- + Get an OAuth 2.0 client by its ID. This endpoint never returns the client secret. + + OAuth 2.0 clients are used to perform OAuth 2.0 and OpenID Connect flows. Usually, OAuth 2.0 clients are + generated for applications which want to consume your OAuth 2.0 or OpenID Connect capabilities. operationId: getOAuth2Client parameters: - description: The id of the OAuth 2.0 Client. @@ -218,25 +210,21 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/oAuth2Client' + $ref: "#/components/schemas/oAuth2Client" description: oAuth2Client default: - content: - application/json: - schema: - $ref: '#/components/schemas/errorOAuth2' - description: Default Error Response + $ref: "#/components/responses/errorOAuth2Default" summary: Get an OAuth 2.0 Client tags: - oAuth2 patch: - description: "Patch an existing OAuth 2.0 Client using JSON Patch. If you pass\ - \ `client_secret`\nthe secret will be updated and returned via the API. This\ - \ is the\nonly time you will be able to retrieve the client secret, so write\ - \ it down and keep it safe.\n\nOAuth 2.0 clients are used to perform OAuth\ - \ 2.0 and OpenID Connect flows. Usually, OAuth 2.0 clients are\ngenerated\ - \ for applications which want to consume your OAuth 2.0 or OpenID Connect\ - \ capabilities." + description: |- + Patch an existing OAuth 2.0 Client using JSON Patch. If you pass `client_secret` + the secret will be updated and returned via the API. This is the + only time you will be able to retrieve the client secret, so write it down and keep it safe. + + OAuth 2.0 clients are used to perform OAuth 2.0 and OpenID Connect flows. Usually, OAuth 2.0 clients are + generated for applications which want to consume your OAuth 2.0 or OpenID Connect capabilities. operationId: patchOAuth2Client parameters: - description: The id of the OAuth 2.0 Client. @@ -251,7 +239,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/jsonPatchDocument' + $ref: "#/components/schemas/jsonPatchDocument" description: OAuth 2.0 Client JSON Patch Body required: true x-originalParamName: Body @@ -260,31 +248,24 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/oAuth2Client' + $ref: "#/components/schemas/oAuth2Client" description: oAuth2Client "404": - content: - application/json: - schema: - $ref: '#/components/schemas/errorOAuth2' - description: Not Found Error Response + $ref: "#/components/responses/errorOAuth2NotFound" default: - content: - application/json: - schema: - $ref: '#/components/schemas/errorOAuth2' - description: Default Error Response + $ref: "#/components/responses/errorOAuth2Default" summary: Patch OAuth 2.0 Client tags: - oAuth2 put: - description: "Replaces an existing OAuth 2.0 Client with the payload you send.\ - \ If you pass `client_secret` the secret is used,\notherwise the existing\ - \ secret is used.\n\nIf set, the secret is echoed in the response. It is not\ - \ possible to retrieve it later on.\n\nOAuth 2.0 Clients are used to perform\ - \ OAuth 2.0 and OpenID Connect flows. Usually, OAuth 2.0 clients are\ngenerated\ - \ for applications which want to consume your OAuth 2.0 or OpenID Connect\ - \ capabilities." + description: |- + Replaces an existing OAuth 2.0 Client with the payload you send. If you pass `client_secret` the secret is used, + otherwise the existing secret is used. + + If set, the secret is echoed in the response. It is not possible to retrieve it later on. + + OAuth 2.0 Clients are used to perform OAuth 2.0 and OpenID Connect flows. Usually, OAuth 2.0 clients are + generated for applications which want to consume your OAuth 2.0 or OpenID Connect capabilities. operationId: setOAuth2Client parameters: - description: OAuth 2.0 Client ID @@ -299,7 +280,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/oAuth2Client' + $ref: "#/components/schemas/oAuth2Client" description: OAuth 2.0 Client Request Body required: true x-originalParamName: Body @@ -308,26 +289,14 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/oAuth2Client' + $ref: "#/components/schemas/oAuth2Client" description: oAuth2Client "400": - content: - application/json: - schema: - $ref: '#/components/schemas/errorOAuth2' - description: Bad Request Error Response + $ref: "#/components/responses/errorOAuth2BadRequest" "404": - content: - application/json: - schema: - $ref: '#/components/schemas/errorOAuth2' - description: Not Found Error Response + $ref: "#/components/responses/errorOAuth2NotFound" default: - content: - application/json: - schema: - $ref: '#/components/schemas/errorOAuth2' - description: Default Error Response + $ref: "#/components/responses/errorOAuth2Default" summary: Set OAuth 2.0 Client tags: - oAuth2 @@ -349,33 +318,30 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/oAuth2ClientTokenLifespans' + $ref: "#/components/schemas/oAuth2ClientTokenLifespans" x-originalParamName: Body responses: "200": content: application/json: schema: - $ref: '#/components/schemas/oAuth2Client' + $ref: "#/components/schemas/oAuth2Client" description: oAuth2Client default: content: application/json: schema: - $ref: '#/components/schemas/genericError' + $ref: "#/components/schemas/genericError" description: genericError summary: Set OAuth2 Client Token Lifespans tags: - oAuth2 /admin/keys/{set}: delete: - description: "Use this endpoint to delete a complete JSON Web Key Set and all\ - \ the keys in that set.\n\nA JSON Web Key (JWK) is a JavaScript Object Notation\ - \ (JSON) data structure that represents a cryptographic key. A JWK Set is\ - \ a JSON data structure that represents a set of JWKs. A JSON Web Key is identified\ - \ by its set and key id. ORY Hydra uses this functionality to store cryptographic\ - \ keys used for TLS and JSON Web Tokens (such as OpenID Connect ID tokens),\ - \ and allows storing user-defined keys as well." + description: |- + Use this endpoint to delete a complete JSON Web Key Set and all the keys in that set. + + A JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that represents a cryptographic key. A JWK Set is a JSON data structure that represents a set of JWKs. A JSON Web Key is identified by its set and key id. ORY Hydra uses this functionality to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), and allows storing user-defined keys as well. operationId: deleteJsonWebKeySet parameters: - description: The JSON Web Key Set @@ -388,25 +354,21 @@ paths: style: simple responses: "204": - description: "Empty responses are sent when, for example, resources are\ - \ deleted. The HTTP status code for empty responses is\ntypically 201." + $ref: "#/components/responses/emptyResponse" default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Delete JSON Web Key Set tags: - jwk get: - description: "This endpoint can be used to retrieve JWK Sets stored in ORY Hydra.\n\ - \nA JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure\ - \ that represents a cryptographic key. A JWK Set is a JSON data structure\ - \ that represents a set of JWKs. A JSON Web Key is identified by its set and\ - \ key id. ORY Hydra uses this functionality to store cryptographic keys used\ - \ for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), and allows\ - \ storing user-defined keys as well." + description: |- + This endpoint can be used to retrieve JWK Sets stored in ORY Hydra. + + A JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that represents a cryptographic key. A JWK Set is a JSON data structure that represents a set of JWKs. A JSON Web Key is identified by its set and key id. ORY Hydra uses this functionality to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), and allows storing user-defined keys as well. operationId: getJsonWebKeySet parameters: - description: JSON Web Key Set ID @@ -422,28 +384,22 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/jsonWebKeySet' + $ref: "#/components/schemas/jsonWebKeySet" description: jsonWebKeySet default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Retrieve a JSON Web Key Set tags: - jwk post: - description: "This endpoint is capable of generating JSON Web Key Sets for you.\ - \ There a different strategies available, such as symmetric cryptographic\ - \ keys (HS256, HS512) and asymetric cryptographic keys (RS256, ECDSA). If\ - \ the specified JSON Web Key Set does not exist, it will be created.\n\nA\ - \ JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure\ - \ that represents a cryptographic key. A JWK Set is a JSON data structure\ - \ that represents a set of JWKs. A JSON Web Key is identified by its set and\ - \ key id. ORY Hydra uses this functionality to store cryptographic keys used\ - \ for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), and allows\ - \ storing user-defined keys as well." + description: |- + This endpoint is capable of generating JSON Web Key Sets for you. There are different strategies available, such as symmetric cryptographic keys (HS256, HS512) and asymmetric cryptographic keys (RS256, ECDSA). If the specified JSON Web Key Set does not exist, it will be created. + + A JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that represents a cryptographic key. A JWK Set is a JSON data structure that represents a set of JWKs. A JSON Web Key is identified by its set and key id. ORY Hydra uses this functionality to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), and allows storing user-defined keys as well. operationId: createJsonWebKeySet parameters: - description: The JSON Web Key Set ID @@ -458,7 +414,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/createJsonWebKeySet' + $ref: "#/components/schemas/createJsonWebKeySet" required: true x-originalParamName: Body responses: @@ -466,25 +422,22 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/jsonWebKeySet' + $ref: "#/components/schemas/jsonWebKeySet" description: jsonWebKeySet default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Create JSON Web Key tags: - jwk put: - description: "Use this method if you do not want to let Hydra generate the JWKs\ - \ for you, but instead save your own.\n\nA JSON Web Key (JWK) is a JavaScript\ - \ Object Notation (JSON) data structure that represents a cryptographic key.\ - \ A JWK Set is a JSON data structure that represents a set of JWKs. A JSON\ - \ Web Key is identified by its set and key id. ORY Hydra uses this functionality\ - \ to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID\ - \ Connect ID tokens), and allows storing user-defined keys as well." + description: |- + Use this method if you do not want to let Hydra generate the JWKs for you, but instead save your own. + + A JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that represents a cryptographic key. A JWK Set is a JSON data structure that represents a set of JWKs. A JSON Web Key is identified by its set and key id. ORY Hydra uses this functionality to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), and allows storing user-defined keys as well. operationId: setJsonWebKeySet parameters: - description: The JSON Web Key Set ID @@ -499,33 +452,33 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/jsonWebKeySet' + $ref: "#/components/schemas/jsonWebKeySet" x-originalParamName: Body responses: "200": content: application/json: schema: - $ref: '#/components/schemas/jsonWebKeySet' + $ref: "#/components/schemas/jsonWebKeySet" description: jsonWebKeySet default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Update a JSON Web Key Set tags: - jwk /admin/keys/{set}/{kid}: delete: - description: "Use this endpoint to delete a single JSON Web Key.\n\nA JSON Web\ - \ Key (JWK) is a JavaScript Object Notation (JSON) data structure that represents\ - \ a cryptographic key. A\nJWK Set is a JSON data structure that represents\ - \ a set of JWKs. A JSON Web Key is identified by its set and key id. ORY Hydra\ - \ uses\nthis functionality to store cryptographic keys used for TLS and JSON\ - \ Web Tokens (such as OpenID Connect ID tokens),\nand allows storing user-defined\ - \ keys as well." + description: |- + Use this endpoint to delete a single JSON Web Key. + + A JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that represents a cryptographic key. A + JWK Set is a JSON data structure that represents a set of JWKs. A JSON Web Key is identified by its set and key id. ORY Hydra uses + this functionality to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), + and allows storing user-defined keys as well. operationId: deleteJsonWebKey parameters: - description: The JSON Web Key Set @@ -546,13 +499,12 @@ paths: style: simple responses: "204": - description: "Empty responses are sent when, for example, resources are\ - \ deleted. The HTTP status code for empty responses is\ntypically 201." + $ref: "#/components/responses/emptyResponse" default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Delete JSON Web Key tags: @@ -583,25 +535,22 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/jsonWebKeySet' + $ref: "#/components/schemas/jsonWebKeySet" description: jsonWebKeySet default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Get JSON Web Key tags: - jwk put: - description: "Use this method if you do not want to let Hydra generate the JWKs\ - \ for you, but instead save your own.\n\nA JSON Web Key (JWK) is a JavaScript\ - \ Object Notation (JSON) data structure that represents a cryptographic key.\ - \ A JWK Set is a JSON data structure that represents a set of JWKs. A JSON\ - \ Web Key is identified by its set and key id. ORY Hydra uses this functionality\ - \ to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID\ - \ Connect ID tokens), and allows storing user-defined keys as well." + description: |- + Use this method if you do not want to let Hydra generate the JWKs for you, but instead save your own. + + A JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that represents a cryptographic key. A JWK Set is a JSON data structure that represents a set of JWKs. A JSON Web Key is identified by its set and key id. ORY Hydra uses this functionality to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), and allows storing user-defined keys as well. operationId: setJsonWebKey parameters: - description: The JSON Web Key Set ID @@ -624,37 +573,37 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/jsonWebKey' + $ref: "#/components/schemas/jsonWebKey" x-originalParamName: Body responses: "200": content: application/json: schema: - $ref: '#/components/schemas/jsonWebKey' + $ref: "#/components/schemas/jsonWebKey" description: jsonWebKey default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Set JSON Web Key tags: - jwk /admin/oauth2/auth/requests/consent: get: - description: "When an authorization code, hybrid, or implicit OAuth 2.0 Flow\ - \ is initiated, Ory asks the login provider\nto authenticate the subject and\ - \ then tell Ory now about it. If the subject authenticated, he/she must now\ - \ be asked if\nthe OAuth 2.0 Client which initiated the flow should be allowed\ - \ to access the resources on the subject's behalf.\n\nThe consent challenge\ - \ is appended to the consent provider's URL to which the subject's user-agent\ - \ (browser) is redirected to. The consent\nprovider uses that challenge to\ - \ fetch information on the OAuth2 request and then tells Ory if the subject\ - \ accepted\nor rejected the request.\n\nThe default consent provider is available\ - \ via the Ory Managed Account Experience. To customize the consent provider,\ - \ please\nhead over to the OAuth 2.0 documentation." + description: |- + When an authorization code, hybrid, or implicit OAuth 2.0 Flow is initiated, Ory asks the login provider + to authenticate the subject and then tell Ory now about it. If the subject authenticated, he/she must now be asked if + the OAuth 2.0 Client which initiated the flow should be allowed to access the resources on the subject's behalf. + + The consent challenge is appended to the consent provider's URL to which the subject's user-agent (browser) is redirected to. The consent + provider uses that challenge to fetch information on the OAuth2 request and then tells Ory if the subject accepted + or rejected the request. + + The default consent provider is available via the Ory Managed Account Experience. To customize the consent provider, please + head over to the OAuth 2.0 documentation. operationId: getOAuth2ConsentRequest parameters: - description: OAuth 2.0 Consent Request Challenge @@ -670,41 +619,42 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/oAuth2ConsentRequest' + $ref: "#/components/schemas/oAuth2ConsentRequest" description: oAuth2ConsentRequest "410": content: application/json: schema: - $ref: '#/components/schemas/oAuth2RedirectTo' + $ref: "#/components/schemas/oAuth2RedirectTo" description: oAuth2RedirectTo default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Get OAuth 2.0 Consent Request tags: - oAuth2 /admin/oauth2/auth/requests/consent/accept: put: - description: "When an authorization code, hybrid, or implicit OAuth 2.0 Flow\ - \ is initiated, Ory asks the login provider\nto authenticate the subject and\ - \ then tell Ory now about it. If the subject authenticated, he/she must now\ - \ be asked if\nthe OAuth 2.0 Client which initiated the flow should be allowed\ - \ to access the resources on the subject's behalf.\n\nThe consent challenge\ - \ is appended to the consent provider's URL to which the subject's user-agent\ - \ (browser) is redirected to. The consent\nprovider uses that challenge to\ - \ fetch information on the OAuth2 request and then tells Ory if the subject\ - \ accepted\nor rejected the request.\n\nThis endpoint tells Ory that the subject\ - \ has authorized the OAuth 2.0 client to access resources on his/her behalf.\n\ - The consent provider includes additional information, such as session data\ - \ for access and ID tokens, and if the\nconsent request should be used as\ - \ basis for future requests.\n\nThe response contains a redirect URL which\ - \ the consent provider should redirect the user-agent to.\n\nThe default consent\ - \ provider is available via the Ory Managed Account Experience. To customize\ - \ the consent provider, please\nhead over to the OAuth 2.0 documentation." + description: |- + When an authorization code, hybrid, or implicit OAuth 2.0 Flow is initiated, Ory asks the login provider + to authenticate the subject and then tell Ory now about it. If the subject authenticated, he/she must now be asked if + the OAuth 2.0 Client which initiated the flow should be allowed to access the resources on the subject's behalf. + + The consent challenge is appended to the consent provider's URL to which the subject's user-agent (browser) is redirected to. The consent + provider uses that challenge to fetch information on the OAuth2 request and then tells Ory if the subject accepted + or rejected the request. + + This endpoint tells Ory that the subject has authorized the OAuth 2.0 client to access resources on his/her behalf. + The consent provider includes additional information, such as session data for access and ID tokens, and if the + consent request should be used as basis for future requests. + + The response contains a redirect URL which the consent provider should redirect the user-agent to. + + The default consent provider is available via the Ory Managed Account Experience. To customize the consent provider, please + head over to the OAuth 2.0 documentation. operationId: acceptOAuth2ConsentRequest parameters: - description: OAuth 2.0 Consent Request Challenge @@ -719,41 +669,42 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/acceptOAuth2ConsentRequest' + $ref: "#/components/schemas/acceptOAuth2ConsentRequest" x-originalParamName: Body responses: "200": content: application/json: schema: - $ref: '#/components/schemas/oAuth2RedirectTo' + $ref: "#/components/schemas/oAuth2RedirectTo" description: oAuth2RedirectTo default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Accept OAuth 2.0 Consent Request tags: - oAuth2 /admin/oauth2/auth/requests/consent/reject: put: - description: "When an authorization code, hybrid, or implicit OAuth 2.0 Flow\ - \ is initiated, Ory asks the login provider\nto authenticate the subject and\ - \ then tell Ory now about it. If the subject authenticated, he/she must now\ - \ be asked if\nthe OAuth 2.0 Client which initiated the flow should be allowed\ - \ to access the resources on the subject's behalf.\n\nThe consent challenge\ - \ is appended to the consent provider's URL to which the subject's user-agent\ - \ (browser) is redirected to. The consent\nprovider uses that challenge to\ - \ fetch information on the OAuth2 request and then tells Ory if the subject\ - \ accepted\nor rejected the request.\n\nThis endpoint tells Ory that the subject\ - \ has not authorized the OAuth 2.0 client to access resources on his/her behalf.\n\ - The consent provider must include a reason why the consent was not granted.\n\ - \nThe response contains a redirect URL which the consent provider should redirect\ - \ the user-agent to.\n\nThe default consent provider is available via the\ - \ Ory Managed Account Experience. To customize the consent provider, please\n\ - head over to the OAuth 2.0 documentation." + description: |- + When an authorization code, hybrid, or implicit OAuth 2.0 Flow is initiated, Ory asks the login provider + to authenticate the subject and then tell Ory now about it. If the subject authenticated, he/she must now be asked if + the OAuth 2.0 Client which initiated the flow should be allowed to access the resources on the subject's behalf. + + The consent challenge is appended to the consent provider's URL to which the subject's user-agent (browser) is redirected to. The consent + provider uses that challenge to fetch information on the OAuth2 request and then tells Ory if the subject accepted + or rejected the request. + + This endpoint tells Ory that the subject has not authorized the OAuth 2.0 client to access resources on his/her behalf. + The consent provider must include a reason why the consent was not granted. + + The response contains a redirect URL which the consent provider should redirect the user-agent to. + + The default consent provider is available via the Ory Managed Account Experience. To customize the consent provider, please + head over to the OAuth 2.0 documentation. operationId: rejectOAuth2ConsentRequest parameters: - description: OAuth 2.0 Consent Request Challenge @@ -768,37 +719,70 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/rejectOAuth2Request' + $ref: "#/components/schemas/rejectOAuth2Request" x-originalParamName: Body responses: "200": content: application/json: schema: - $ref: '#/components/schemas/oAuth2RedirectTo' + $ref: "#/components/schemas/oAuth2RedirectTo" description: oAuth2RedirectTo default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Reject OAuth 2.0 Consent Request tags: - oAuth2 + /admin/oauth2/auth/requests/device/accept: + put: + description: Accepts a device grant user_code request + operationId: acceptUserCodeRequest + parameters: + - explode: true + in: query + name: device_challenge + required: true + schema: + type: string + style: form + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/acceptDeviceUserCodeRequest" + x-originalParamName: Body + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/oAuth2RedirectTo" + description: oAuth2RedirectTo + default: + content: + application/json: + schema: + $ref: "#/components/schemas/errorOAuth2" + description: errorOAuth2 + summary: Accepts a device grant user_code request + tags: + - oAuth2 /admin/oauth2/auth/requests/login: get: - description: "When an authorization code, hybrid, or implicit OAuth 2.0 Flow\ - \ is initiated, Ory asks the login provider\nto authenticate the subject and\ - \ then tell the Ory OAuth2 Service about it.\n\nPer default, the login provider\ - \ is Ory itself. You may use a different login provider which needs to be\ - \ a web-app\nyou write and host, and it must be able to authenticate (\"show\ - \ the subject a login screen\")\na subject (in OAuth2 the proper name for\ - \ subject is \"resource owner\").\n\nThe authentication challenge is appended\ - \ to the login provider URL to which the subject's user-agent (browser) is\ - \ redirected to. The login\nprovider uses that challenge to fetch information\ - \ on the OAuth2 request and then accept or reject the requested authentication\ - \ process." + description: |- + When an authorization code, hybrid, or implicit OAuth 2.0 Flow is initiated, Ory asks the login provider + to authenticate the subject and then tell the Ory OAuth2 Service about it. + + Per default, the login provider is Ory itself. You may use a different login provider which needs to be a web-app + you write and host, and it must be able to authenticate ("show the subject a login screen") + a subject (in OAuth2 the proper name for subject is "resource owner"). + + The authentication challenge is appended to the login provider URL to which the subject's user-agent (browser) is redirected to. The login + provider uses that challenge to fetch information on the OAuth2 request and then accept or reject the requested authentication process. operationId: getOAuth2LoginRequest parameters: - description: OAuth 2.0 Login Request Challenge @@ -814,36 +798,37 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/oAuth2LoginRequest' + $ref: "#/components/schemas/oAuth2LoginRequest" description: oAuth2LoginRequest "410": content: application/json: schema: - $ref: '#/components/schemas/oAuth2RedirectTo' + $ref: "#/components/schemas/oAuth2RedirectTo" description: oAuth2RedirectTo default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Get OAuth 2.0 Login Request tags: - oAuth2 /admin/oauth2/auth/requests/login/accept: put: - description: "When an authorization code, hybrid, or implicit OAuth 2.0 Flow\ - \ is initiated, Ory asks the login provider\nto authenticate the subject and\ - \ then tell the Ory OAuth2 Service about it.\n\nThe authentication challenge\ - \ is appended to the login provider URL to which the subject's user-agent\ - \ (browser) is redirected to. The login\nprovider uses that challenge to fetch\ - \ information on the OAuth2 request and then accept or reject the requested\ - \ authentication process.\n\nThis endpoint tells Ory that the subject has\ - \ successfully authenticated and includes additional information such as\n\ - the subject's ID and if Ory should remember the subject's subject agent for\ - \ future authentication attempts by setting\na cookie.\n\nThe response contains\ - \ a redirect URL which the login provider should redirect the user-agent to." + description: |- + When an authorization code, hybrid, or implicit OAuth 2.0 Flow is initiated, Ory asks the login provider + to authenticate the subject and then tell the Ory OAuth2 Service about it. + + The authentication challenge is appended to the login provider URL to which the subject's user-agent (browser) is redirected to. The login + provider uses that challenge to fetch information on the OAuth2 request and then accept or reject the requested authentication process. + + This endpoint tells Ory that the subject has successfully authenticated and includes additional information such as + the subject's ID and if Ory should remember the subject's subject agent for future authentication attempts by setting + a cookie. + + The response contains a redirect URL which the login provider should redirect the user-agent to. operationId: acceptOAuth2LoginRequest parameters: - description: OAuth 2.0 Login Request Challenge @@ -858,36 +843,37 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/acceptOAuth2LoginRequest' + $ref: "#/components/schemas/acceptOAuth2LoginRequest" x-originalParamName: Body responses: "200": content: application/json: schema: - $ref: '#/components/schemas/oAuth2RedirectTo' + $ref: "#/components/schemas/oAuth2RedirectTo" description: oAuth2RedirectTo default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Accept OAuth 2.0 Login Request tags: - oAuth2 /admin/oauth2/auth/requests/login/reject: put: - description: "When an authorization code, hybrid, or implicit OAuth 2.0 Flow\ - \ is initiated, Ory asks the login provider\nto authenticate the subject and\ - \ then tell the Ory OAuth2 Service about it.\n\nThe authentication challenge\ - \ is appended to the login provider URL to which the subject's user-agent\ - \ (browser) is redirected to. The login\nprovider uses that challenge to fetch\ - \ information on the OAuth2 request and then accept or reject the requested\ - \ authentication process.\n\nThis endpoint tells Ory that the subject has\ - \ not authenticated and includes a reason why the authentication\nwas denied.\n\ - \nThe response contains a redirect URL which the login provider should redirect\ - \ the user-agent to." + description: |- + When an authorization code, hybrid, or implicit OAuth 2.0 Flow is initiated, Ory asks the login provider + to authenticate the subject and then tell the Ory OAuth2 Service about it. + + The authentication challenge is appended to the login provider URL to which the subject's user-agent (browser) is redirected to. The login + provider uses that challenge to fetch information on the OAuth2 request and then accept or reject the requested authentication process. + + This endpoint tells Ory that the subject has not authenticated and includes a reason why the authentication + was denied. + + The response contains a redirect URL which the login provider should redirect the user-agent to. operationId: rejectOAuth2LoginRequest parameters: - description: OAuth 2.0 Login Request Challenge @@ -902,20 +888,20 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/rejectOAuth2Request' + $ref: "#/components/schemas/rejectOAuth2Request" x-originalParamName: Body responses: "200": content: application/json: schema: - $ref: '#/components/schemas/oAuth2RedirectTo' + $ref: "#/components/schemas/oAuth2RedirectTo" description: oAuth2RedirectTo default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Reject OAuth 2.0 Login Request tags: @@ -937,29 +923,29 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/oAuth2LogoutRequest' + $ref: "#/components/schemas/oAuth2LogoutRequest" description: oAuth2LogoutRequest "410": content: application/json: schema: - $ref: '#/components/schemas/oAuth2RedirectTo' + $ref: "#/components/schemas/oAuth2RedirectTo" description: oAuth2RedirectTo default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Get OAuth 2.0 Session Logout Request tags: - oAuth2 /admin/oauth2/auth/requests/logout/accept: put: - description: "When a user or an application requests Ory OAuth 2.0 to remove\ - \ the session state of a subject, this endpoint is used to confirm that logout\ - \ request.\n\nThe response contains a redirect URL which the consent provider\ - \ should redirect the user-agent to." + description: |- + When a user or an application requests Ory OAuth 2.0 to remove the session state of a subject, this endpoint is used to confirm that logout request. + + The response contains a redirect URL which the consent provider should redirect the user-agent to. operationId: acceptOAuth2LogoutRequest parameters: - description: OAuth 2.0 Logout Request Challenge @@ -975,23 +961,24 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/oAuth2RedirectTo' + $ref: "#/components/schemas/oAuth2RedirectTo" description: oAuth2RedirectTo default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Accept OAuth 2.0 Session Logout Request tags: - oAuth2 /admin/oauth2/auth/requests/logout/reject: put: - description: "When a user or an application requests Ory OAuth 2.0 to remove\ - \ the session state of a subject, this endpoint is used to deny that logout\ - \ request.\nNo HTTP request body is required.\n\nThe response is empty as\ - \ the logout provider has to chose what action to perform next." + description: |- + When a user or an application requests Ory OAuth 2.0 to remove the session state of a subject, this endpoint is used to deny that logout request. + No HTTP request body is required. + + The response is empty as the logout provider has to chose what action to perform next. operationId: rejectOAuth2LogoutRequest parameters: - explode: true @@ -1003,13 +990,12 @@ paths: style: form responses: "204": - description: "Empty responses are sent when, for example, resources are\ - \ deleted. The HTTP status code for empty responses is\ntypically 201." + $ref: "#/components/responses/emptyResponse" default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Reject OAuth 2.0 Session Logout Request tags: @@ -1028,12 +1014,14 @@ paths: explode: true in: query name: subject - required: true + required: false schema: type: string style: form - - description: "OAuth 2.0 Client ID\n\nIf set, deletes only those consent sessions\ - \ that have been granted to the specified OAuth 2.0 Client ID." + - description: |- + OAuth 2.0 Client ID + + If set, deletes only those consent sessions that have been granted to the specified OAuth 2.0 Client ID. explode: true in: query name: client @@ -1041,6 +1029,17 @@ paths: schema: type: string style: form + - description: |- + Consent Request ID + + If set, revoke all token chains derived from this particular consent request ID. + explode: true + in: query + name: consent_request_id + required: false + schema: + type: string + style: form - description: |- Revoke All Consent Sessions @@ -1054,26 +1053,28 @@ paths: style: form responses: "204": - description: "Empty responses are sent when, for example, resources are\ - \ deleted. The HTTP status code for empty responses is\ntypically 201." + $ref: "#/components/responses/emptyResponse" default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Revoke OAuth 2.0 Consent Sessions of a Subject tags: - oAuth2 get: - description: "This endpoint lists all subject's granted consent sessions, including\ - \ client and granted scope.\nIf the subject is unknown or has not granted\ - \ any consent sessions yet, the endpoint returns an\nempty JSON array with\ - \ status code 200 OK." + description: |- + This endpoint lists all subject's granted consent sessions, including client and granted scope. + If the subject is unknown or has not granted any consent sessions yet, the endpoint returns an + empty JSON array with status code 200 OK. operationId: listOAuth2ConsentSessions parameters: - - description: "Items per Page\n\nThis is the number of items per page to return.\n\ - For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination)." + - description: |- + Items per Page + + This is the number of items per page to return. + For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). explode: true in: query name: page_size @@ -1085,8 +1086,11 @@ paths: minimum: 1 type: integer style: form - - description: "Next Page Token\n\nThe next page token.\nFor details on pagination\ - \ please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination)." + - description: |- + Next Page Token + + The next page token. + For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). explode: true in: query name: page_token @@ -1117,23 +1121,30 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/oAuth2ConsentSessions' + $ref: "#/components/schemas/oAuth2ConsentSessions" description: oAuth2ConsentSessions default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: List OAuth 2.0 Consent Sessions of a Subject tags: - oAuth2 /admin/oauth2/auth/sessions/login: delete: - description: "This endpoint invalidates a subject's authentication session.\ - \ After revoking the authentication session, the subject\nhas to re-authenticate\ - \ at the Ory OAuth2 Provider. This endpoint does not invalidate any tokens\ - \ and\ndoes not work with OpenID Connect Front- or Back-channel logout." + description: |- + This endpoint invalidates authentication sessions. After revoking the authentication session(s), the subject + has to re-authenticate at the Ory OAuth2 Provider. This endpoint does not invalidate any tokens. + + If you send the subject in a query param, all authentication sessions that belong to that subject are revoked. + No OpenID Connect Front- or Back-channel logout is performed in this case. + + Alternatively, you can send a SessionID via `sid` query param, in which case, only the session that is connected + to that SessionID is revoked. OpenID Connect Back-channel logout is performed in this case. + + When using Ory for the identity provider, the login provider will also invalidate the session cookie. operationId: revokeOAuth2LoginSessions parameters: - description: |- @@ -1143,48 +1154,57 @@ paths: explode: true in: query name: subject - required: true + required: false + schema: + type: string + style: form + - description: |- + Login Session ID + + The login session to revoke. + explode: true + in: query + name: sid + required: false schema: type: string style: form responses: "204": - description: "Empty responses are sent when, for example, resources are\ - \ deleted. The HTTP status code for empty responses is\ntypically 201." + $ref: "#/components/responses/emptyResponse" default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 - summary: Revokes All OAuth 2.0 Login Sessions of a Subject + summary: Revokes OAuth 2.0 Login Sessions by either a Subject or a SessionID tags: - oAuth2 /admin/oauth2/introspect: post: - description: "The introspection endpoint allows to check if a token (both refresh\ - \ and access) is active or not. An active token\nis neither expired nor revoked.\ - \ If a token is active, additional information on the token will be included.\ - \ You can\nset additional data for a token by setting `session.access_token`\ - \ during the consent flow." + description: |- + The introspection endpoint allows to check if a token (both refresh and access) is active or not. An active token + is neither expired nor revoked. If a token is active, additional information on the token will be included. You can + set additional data for a token by setting `session.access_token` during the consent flow. operationId: introspectOAuth2Token requestBody: content: application/x-www-form-urlencoded: schema: - $ref: '#/components/schemas/introspectOAuth2Token_request' + $ref: "#/components/schemas/introspectOAuth2Token_request" responses: "200": content: application/json: schema: - $ref: '#/components/schemas/introspectedOAuth2Token' + $ref: "#/components/schemas/introspectedOAuth2Token" description: introspectedOAuth2Token default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Introspect OAuth2 Access and Refresh Tokens tags: @@ -1205,13 +1225,12 @@ paths: style: form responses: "204": - description: "Empty responses are sent when, for example, resources are\ - \ deleted. The HTTP status code for empty responses is\ntypically 201." + $ref: "#/components/responses/emptyResponse" default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: Delete OAuth 2.0 Access Tokens from specific OAuth 2.0 Client tags: @@ -1221,21 +1240,33 @@ paths: description: Use this endpoint to list all trusted JWT Bearer Grant Type Issuers. operationId: listTrustedOAuth2JwtGrantIssuers parameters: - - explode: true + - description: |- + Items per Page + + This is the number of items per page to return. + For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). + explode: true in: query - name: MaxItems + name: page_size required: false schema: + default: 250 format: int64 + maximum: 1000 + minimum: 1 type: integer style: form - - explode: true + - description: |- + Next Page Token + + The next page token. + For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). + explode: true in: query - name: DefaultItems + name: page_token required: false schema: - format: int64 - type: integer + type: string style: form - description: "If optional \"issuer\" is supplied, only jwt-bearer grants with\ \ this issuer will be returned." @@ -1251,51 +1282,53 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/trustedOAuth2JwtGrantIssuers' + $ref: "#/components/schemas/trustedOAuth2JwtGrantIssuers" description: trustedOAuth2JwtGrantIssuers default: content: application/json: schema: - $ref: '#/components/schemas/genericError' + $ref: "#/components/schemas/genericError" description: genericError summary: List Trusted OAuth2 JWT Bearer Grant Type Issuers tags: - oAuth2 post: - description: "Use this endpoint to establish a trust relationship for a JWT\ - \ issuer\nto perform JSON Web Token (JWT) Profile for OAuth 2.0 Client Authentication\n\ - and Authorization Grants [RFC7523](https://datatracker.ietf.org/doc/html/rfc7523)." + description: |- + Use this endpoint to establish a trust relationship for a JWT issuer + to perform JSON Web Token (JWT) Profile for OAuth 2.0 Client Authentication + and Authorization Grants [RFC7523](https://datatracker.ietf.org/doc/html/rfc7523). operationId: trustOAuth2JwtGrantIssuer requestBody: content: application/json: schema: - $ref: '#/components/schemas/trustOAuth2JwtGrantIssuer' + $ref: "#/components/schemas/trustOAuth2JwtGrantIssuer" x-originalParamName: Body responses: "201": content: application/json: schema: - $ref: '#/components/schemas/trustedOAuth2JwtGrantIssuer' + $ref: "#/components/schemas/trustedOAuth2JwtGrantIssuer" description: trustedOAuth2JwtGrantIssuer default: content: application/json: schema: - $ref: '#/components/schemas/genericError' + $ref: "#/components/schemas/genericError" description: genericError summary: Trust OAuth2 JWT Bearer Grant Type Issuer tags: - oAuth2 /admin/trust/grants/jwt-bearer/issuers/{id}: delete: - description: "Use this endpoint to delete trusted JWT Bearer Grant Type Issuer.\ - \ The ID is the one returned when you\ncreated the trust relationship.\n\n\ - Once deleted, the associated issuer will no longer be able to perform the\ - \ JSON Web Token (JWT) Profile\nfor OAuth 2.0 Client Authentication and Authorization\ - \ Grant." + description: |- + Use this endpoint to delete trusted JWT Bearer Grant Type Issuer. The ID is the one returned when you + created the trust relationship. + + Once deleted, the associated issuer will no longer be able to perform the JSON Web Token (JWT) Profile + for OAuth 2.0 Client Authentication and Authorization Grant. operationId: deleteTrustedOAuth2JwtGrantIssuer parameters: - description: The id of the desired grant @@ -1308,13 +1341,12 @@ paths: style: simple responses: "204": - description: "Empty responses are sent when, for example, resources are\ - \ deleted. The HTTP status code for empty responses is\ntypically 201." + $ref: "#/components/responses/emptyResponse" default: content: application/json: schema: - $ref: '#/components/schemas/genericError' + $ref: "#/components/schemas/genericError" description: genericError summary: Delete Trusted OAuth2 JWT Bearer Grant Type Issuer tags: @@ -1338,64 +1370,107 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/trustedOAuth2JwtGrantIssuer' + $ref: "#/components/schemas/trustedOAuth2JwtGrantIssuer" description: trustedOAuth2JwtGrantIssuer default: content: application/json: schema: - $ref: '#/components/schemas/genericError' + $ref: "#/components/schemas/genericError" description: genericError summary: Get Trusted OAuth2 JWT Bearer Grant Type Issuer tags: - oAuth2 + /credentials: + post: + description: |- + This endpoint creates a verifiable credential that attests that the user + authenticated with the provided access token owns a certain public/private key + pair. + + More information can be found at + https://openid.net/specs/openid-connect-userinfo-vc-1_0.html. + operationId: createVerifiableCredential + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/CreateVerifiableCredentialRequestBody" + x-originalParamName: Body + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/verifiableCredentialResponse" + description: verifiableCredentialResponse + "400": + content: + application/json: + schema: + $ref: "#/components/schemas/verifiableCredentialPrimingResponse" + description: verifiableCredentialPrimingResponse + default: + content: + application/json: + schema: + $ref: "#/components/schemas/errorOAuth2" + description: errorOAuth2 + summary: Issues a Verifiable Credential + tags: + - oidc /health/alive: get: - description: "This endpoint returns a HTTP 200 status code when Ory Hydra is\ - \ accepting incoming\nHTTP requests. This status does currently not include\ - \ checks whether the database connection is working.\n\nIf the service supports\ - \ TLS Edge Termination, this endpoint does not require the\n`X-Forwarded-Proto`\ - \ header to be set.\n\nBe aware that if you are running multiple nodes of\ - \ this service, the health status will never\nrefer to the cluster state,\ - \ only to a single instance." + description: |- + This endpoint returns a HTTP 200 status code when Ory Hydra is accepting incoming + HTTP requests. This status does currently not include checks whether the database connection is working. + + If the service supports TLS Edge Termination, this endpoint does not require the + `X-Forwarded-Proto` header to be set. + + Be aware that if you are running multiple nodes of this service, the health status will never + refer to the cluster state, only to a single instance. operationId: isAlive responses: "200": content: application/json: schema: - $ref: '#/components/schemas/healthStatus' + $ref: "#/components/schemas/healthStatus" description: Ory Hydra is ready to accept connections. "500": content: application/json: schema: - $ref: '#/components/schemas/genericError' + $ref: "#/components/schemas/genericError" description: genericError summary: Check HTTP Server Status tags: - metadata /health/ready: get: - description: "This endpoint returns a HTTP 200 status code when Ory Hydra is\ - \ up running and the environment dependencies (e.g.\nthe database) are responsive\ - \ as well.\n\nIf the service supports TLS Edge Termination, this endpoint\ - \ does not require the\n`X-Forwarded-Proto` header to be set.\n\nBe aware\ - \ that if you are running multiple nodes of Ory Hydra, the health status will\ - \ never\nrefer to the cluster state, only to a single instance." + description: |- + This endpoint returns a HTTP 200 status code when Ory Hydra is up running and the environment dependencies (e.g. + the database) are responsive as well. + + If the service supports TLS Edge Termination, this endpoint does not require the + `X-Forwarded-Proto` header to be set. + + Be aware that if you are running multiple nodes of Ory Hydra, the health status will never + refer to the cluster state, only to a single instance. operationId: isReady responses: "200": content: application/json: schema: - $ref: '#/components/schemas/isReady_200_response' + $ref: "#/components/schemas/isReady_200_response" description: Ory Hydra is ready to accept requests. "503": content: application/json: schema: - $ref: '#/components/schemas/isReady_503_response' + $ref: "#/components/schemas/isReady_503_response" description: Ory Kratos is not yet ready to accept requests. summary: Check HTTP Server and Database Status tags: @@ -1406,21 +1481,62 @@ paths: Use open source libraries to perform OAuth 2.0 and OpenID Connect available for any programming language. You can find a list of libraries at https://oauth.net/code/ - The Ory SDK is not yet able to this endpoint properly. + This endpoint should not be used via the Ory SDK and is only included for technical reasons. + Instead, use one of the libraries linked above. operationId: oAuth2Authorize responses: "302": - description: "Empty responses are sent when, for example, resources are\ - \ deleted. The HTTP status code for empty responses is\ntypically 201." + $ref: "#/components/responses/emptyResponse" default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 summary: OAuth 2.0 Authorize Endpoint tags: - oAuth2 + /oauth2/device/auth: + post: + description: |- + This endpoint is not documented here because you should never use your own implementation to perform OAuth2 flows. + OAuth2 is a very popular protocol and a library for your programming language will exist. + + To learn more about this flow please refer to the specification: https://tools.ietf.org/html/rfc8628 + operationId: oAuth2DeviceFlow + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/deviceAuthorization" + description: deviceAuthorization + default: + content: + application/json: + schema: + $ref: "#/components/schemas/errorOAuth2" + description: errorOAuth2 + summary: The OAuth 2.0 Device Authorize Endpoint + tags: + - oAuth2 + /oauth2/device/verify: + get: + description: This is the device user verification endpoint. The user is redirected + here when trying to log in using the device flow. + operationId: performOAuth2DeviceVerificationFlow + responses: + "302": + $ref: "#/components/responses/emptyResponse" + default: + content: + application/json: + schema: + $ref: "#/components/schemas/errorOAuth2" + description: errorOAuth2 + summary: OAuth 2.0 Device Verification Endpoint + tags: + - oAuth2 /oauth2/register: post: description: |- @@ -1440,7 +1556,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/oAuth2Client' + $ref: "#/components/schemas/oAuth2Client" description: Dynamic Client Registration Request Body required: true x-originalParamName: Body @@ -1449,37 +1565,29 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/oAuth2Client' + $ref: "#/components/schemas/oAuth2Client" description: oAuth2Client "400": - content: - application/json: - schema: - $ref: '#/components/schemas/errorOAuth2' - description: Bad Request Error Response + $ref: "#/components/responses/errorOAuth2BadRequest" default: - content: - application/json: - schema: - $ref: '#/components/schemas/errorOAuth2' - description: Default Error Response + $ref: "#/components/responses/errorOAuth2Default" summary: Register OAuth2 Client using OpenID Dynamic Client Registration tags: - oidc /oauth2/register/{id}: delete: - description: "This endpoint behaves like the administrative counterpart (`deleteOAuth2Client`)\ - \ but is capable of facing the\npublic internet directly and can be used in\ - \ self-service. It implements the OpenID Connect\nDynamic Client Registration\ - \ Protocol. This feature needs to be enabled in the configuration. This endpoint\n\ - is disabled by default. It can be enabled by an administrator.\n\nTo use this\ - \ endpoint, you will need to present the client's authentication credentials.\ - \ If the OAuth2 Client\nuses the Token Endpoint Authentication Method `client_secret_post`,\ - \ you need to present the client secret in the URL query.\nIf it uses `client_secret_basic`,\ - \ present the Client ID and the Client Secret in the Authorization header.\n\ - \nOAuth 2.0 clients are used to perform OAuth 2.0 and OpenID Connect flows.\ - \ Usually, OAuth 2.0 clients are\ngenerated for applications which want to\ - \ consume your OAuth 2.0 or OpenID Connect capabilities." + description: |- + This endpoint behaves like the administrative counterpart (`deleteOAuth2Client`) but is capable of facing the + public internet directly and can be used in self-service. It implements the OpenID Connect + Dynamic Client Registration Protocol. This feature needs to be enabled in the configuration. This endpoint + is disabled by default. It can be enabled by an administrator. + + To use this endpoint, you will need to present the client's authentication credentials. If the OAuth2 Client + uses the Token Endpoint Authentication Method `client_secret_post`, you need to present the client secret in the URL query. + If it uses `client_secret_basic`, present the Client ID and the Client Secret in the Authorization header. + + OAuth 2.0 clients are used to perform OAuth 2.0 and OpenID Connect flows. Usually, OAuth 2.0 clients are + generated for applications which want to consume your OAuth 2.0 or OpenID Connect capabilities. operationId: deleteOidcDynamicClient parameters: - description: The id of the OAuth 2.0 Client. @@ -1492,13 +1600,12 @@ paths: style: simple responses: "204": - description: "Empty responses are sent when, for example, resources are\ - \ deleted. The HTTP status code for empty responses is\ntypically 201." + $ref: "#/components/responses/emptyResponse" default: content: application/json: schema: - $ref: '#/components/schemas/genericError' + $ref: "#/components/schemas/genericError" description: genericError security: - bearer: [] @@ -1507,14 +1614,14 @@ paths: tags: - oidc get: - description: "This endpoint behaves like the administrative counterpart (`getOAuth2Client`)\ - \ but is capable of facing the\npublic internet directly and can be used in\ - \ self-service. It implements the OpenID Connect\nDynamic Client Registration\ - \ Protocol.\n\nTo use this endpoint, you will need to present the client's\ - \ authentication credentials. If the OAuth2 Client\nuses the Token Endpoint\ - \ Authentication Method `client_secret_post`, you need to present the client\ - \ secret in the URL query.\nIf it uses `client_secret_basic`, present the\ - \ Client ID and the Client Secret in the Authorization header." + description: |- + This endpoint behaves like the administrative counterpart (`getOAuth2Client`) but is capable of facing the + public internet directly and can be used in self-service. It implements the OpenID Connect + Dynamic Client Registration Protocol. + + To use this endpoint, you will need to present the client's authentication credentials. If the OAuth2 Client + uses the Token Endpoint Authentication Method `client_secret_post`, you need to present the client secret in the URL query. + If it uses `client_secret_basic`, present the Client ID and the Client Secret in the Authorization header. operationId: getOidcDynamicClient parameters: - description: The id of the OAuth 2.0 Client. @@ -1530,34 +1637,32 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/oAuth2Client' + $ref: "#/components/schemas/oAuth2Client" description: oAuth2Client default: - content: - application/json: - schema: - $ref: '#/components/schemas/errorOAuth2' - description: Default Error Response + $ref: "#/components/responses/errorOAuth2Default" security: - bearer: [] summary: Get OAuth2 Client using OpenID Dynamic Client Registration tags: - oidc put: - description: "This endpoint behaves like the administrative counterpart (`setOAuth2Client`)\ - \ but is capable of facing the\npublic internet directly to be used by third\ - \ parties. It implements the OpenID Connect\nDynamic Client Registration Protocol.\n\ - \nThis feature is disabled per default. It can be enabled by a system administrator.\n\ - \nIf you pass `client_secret` the secret is used, otherwise the existing secret\ - \ is used. If set, the secret is echoed in the response.\nIt is not possible\ - \ to retrieve it later on.\n\nTo use this endpoint, you will need to present\ - \ the client's authentication credentials. If the OAuth2 Client\nuses the\ - \ Token Endpoint Authentication Method `client_secret_post`, you need to present\ - \ the client secret in the URL query.\nIf it uses `client_secret_basic`, present\ - \ the Client ID and the Client Secret in the Authorization header.\n\nOAuth\ - \ 2.0 clients are used to perform OAuth 2.0 and OpenID Connect flows. Usually,\ - \ OAuth 2.0 clients are\ngenerated for applications which want to consume\ - \ your OAuth 2.0 or OpenID Connect capabilities." + description: |- + This endpoint behaves like the administrative counterpart (`setOAuth2Client`) but is capable of facing the + public internet directly to be used by third parties. It implements the OpenID Connect + Dynamic Client Registration Protocol. + + This feature is disabled per default. It can be enabled by a system administrator. + + If you pass `client_secret` the secret is used, otherwise the existing secret is used. If set, the secret is echoed in the response. + It is not possible to retrieve it later on. + + To use this endpoint, you will need to present the client's authentication credentials. If the OAuth2 Client + uses the Token Endpoint Authentication Method `client_secret_post`, you need to present the client secret in the URL query. + If it uses `client_secret_basic`, present the Client ID and the Client Secret in the Authorization header. + + OAuth 2.0 clients are used to perform OAuth 2.0 and OpenID Connect flows. Usually, OAuth 2.0 clients are + generated for applications which want to consume your OAuth 2.0 or OpenID Connect capabilities. operationId: setOidcDynamicClient parameters: - description: OAuth 2.0 Client ID @@ -1572,7 +1677,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/oAuth2Client' + $ref: "#/components/schemas/oAuth2Client" description: OAuth 2.0 Client Request Body required: true x-originalParamName: Body @@ -1581,20 +1686,12 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/oAuth2Client' + $ref: "#/components/schemas/oAuth2Client" description: oAuth2Client "404": - content: - application/json: - schema: - $ref: '#/components/schemas/errorOAuth2' - description: Not Found Error Response + $ref: "#/components/responses/errorOAuth2NotFound" default: - content: - application/json: - schema: - $ref: '#/components/schemas/errorOAuth2' - description: Default Error Response + $ref: "#/components/responses/errorOAuth2Default" security: - bearer: [] summary: Set OAuth2 Client using OpenID Dynamic Client Registration @@ -1602,27 +1699,25 @@ paths: - oidc /oauth2/revoke: post: - description: "Revoking a token (both access and refresh) means that the tokens\ - \ will be invalid. A revoked access token can no\nlonger be used to make access\ - \ requests, and a revoked refresh token can no longer be used to refresh an\ - \ access token.\nRevoking a refresh token also invalidates the access token\ - \ that was created with it. A token may only be revoked by\nthe client the\ - \ token was generated for." + description: |- + Revoking a token (both access and refresh) means that the tokens will be invalid. A revoked access token can no + longer be used to make access requests, and a revoked refresh token can no longer be used to refresh an access token. + Revoking a refresh token also invalidates the access token that was created with it. A token may only be revoked by + the client the token was generated for. operationId: revokeOAuth2Token requestBody: content: application/x-www-form-urlencoded: schema: - $ref: '#/components/schemas/revokeOAuth2Token_request' + $ref: "#/components/schemas/revokeOAuth2Token_request" responses: "200": - description: "Empty responses are sent when, for example, resources are\ - \ deleted. The HTTP status code for empty responses is\ntypically 201." + $ref: "#/components/responses/emptyResponse" default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 security: - basic: [] @@ -1642,8 +1737,7 @@ paths: operationId: revokeOidcSession responses: "302": - description: "Empty responses are sent when, for example, resources are\ - \ deleted. The HTTP status code for empty responses is\ntypically 201." + $ref: "#/components/responses/emptyResponse" summary: OpenID Connect Front- and Back-channel Enabled Logout tags: - oidc @@ -1653,25 +1747,26 @@ paths: Use open source libraries to perform OAuth 2.0 and OpenID Connect available for any programming language. You can find a list of libraries here https://oauth.net/code/ - The Ory SDK is not yet able to this endpoint properly. + This endpoint should not be used via the Ory SDK and is only included for technical reasons. + Instead, use one of the libraries linked above. operationId: oauth2TokenExchange requestBody: content: application/x-www-form-urlencoded: schema: - $ref: '#/components/schemas/oauth2TokenExchange_request' + $ref: "#/components/schemas/oauth2TokenExchange_request" responses: "200": content: application/json: schema: - $ref: '#/components/schemas/oAuth2TokenExchange' + $ref: "#/components/schemas/oAuth2TokenExchange" description: oAuth2TokenExchange default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 security: - basic: [] @@ -1681,24 +1776,26 @@ paths: - oAuth2 /userinfo: get: - description: "This endpoint returns the payload of the ID Token, including `session.id_token`\ - \ values, of\nthe provided OAuth 2.0 Access Token's consent request.\n\nIn\ - \ the case of authentication error, a WWW-Authenticate header might be set\ - \ in the response\nwith more information about the error. See [the spec](https://datatracker.ietf.org/doc/html/rfc6750#section-3)\n\ - for more details about header format." + description: |- + This endpoint returns the payload of the ID Token, including `session.id_token` values, of + the provided OAuth 2.0 Access Token's consent request. + + In the case of authentication error, a WWW-Authenticate header might be set in the response + with more information about the error. See [the spec](https://datatracker.ietf.org/doc/html/rfc6750#section-3) + for more details about header format. operationId: getOidcUserInfo responses: "200": content: application/json: schema: - $ref: '#/components/schemas/oidcUserInfo' + $ref: "#/components/schemas/oidcUserInfo" description: oidcUserInfo default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: errorOAuth2 security: - oauth2: [] @@ -1707,18 +1804,21 @@ paths: - oidc /version: get: - description: "This endpoint returns the version of Ory Hydra.\n\nIf the service\ - \ supports TLS Edge Termination, this endpoint does not require the\n`X-Forwarded-Proto`\ - \ header to be set.\n\nBe aware that if you are running multiple nodes of\ - \ this service, the version will never\nrefer to the cluster state, only to\ - \ a single instance." + description: |- + This endpoint returns the version of Ory Hydra. + + If the service supports TLS Edge Termination, this endpoint does not require the + `X-Forwarded-Proto` header to be set. + + Be aware that if you are running multiple nodes of this service, the version will never + refer to the cluster state, only to a single instance. operationId: getVersion responses: "200": content: application/json: schema: - $ref: '#/components/schemas/getVersion_200_response' + $ref: "#/components/schemas/getVersion_200_response" description: Returns the Ory Hydra version. summary: Return Running Software Version. tags: @@ -1726,35 +1826,90 @@ paths: components: responses: emptyResponse: - description: "Empty responses are sent when, for example, resources are deleted.\ - \ The HTTP status code for empty responses is\ntypically 201." + description: |- + Empty responses are sent when, for example, resources are deleted. The HTTP status code for empty responses is + typically 204. errorOAuth2BadRequest: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: Bad Request Error Response errorOAuth2Default: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: Default Error Response errorOAuth2NotFound: content: application/json: schema: - $ref: '#/components/schemas/errorOAuth2' + $ref: "#/components/schemas/errorOAuth2" description: Not Found Error Response listOAuth2Clients: content: application/json: schema: items: - $ref: '#/components/schemas/oAuth2Client' + $ref: "#/components/schemas/oAuth2Client" type: array description: Paginated OAuth2 Client List Response schemas: + CreateVerifiableCredentialRequestBody: + example: + types: + - types + - types + format: format + proof: + proof_type: proof_type + jwt: jwt + properties: + format: + type: string + proof: + $ref: "#/components/schemas/VerifiableCredentialProof" + types: + items: + type: string + type: array + title: CreateVerifiableCredentialRequestBody contains the request body to request + a verifiable credential. + type: object + DefaultError: {} + DeviceUserAuthRequest: + properties: + challenge: + description: |- + ID is the identifier ("device challenge") of the device grant request. It is used to + identify the session. + type: string + client: + $ref: "#/components/schemas/oAuth2Client" + handled_at: + format: date-time + title: NullTime implements sql.NullTime functionality. + type: string + request_url: + description: RequestURL is the original Device Authorization URL requested. + type: string + requested_access_token_audience: + items: + type: string + title: "StringSliceJSONFormat represents []string{} which is encoded to/from\ + \ JSON for SQL storage." + type: array + requested_scope: + items: + type: string + title: "StringSliceJSONFormat represents []string{} which is encoded to/from\ + \ JSON for SQL storage." + type: array + required: + - challenge + title: Contains information on an ongoing device grant request. + type: object JSONRawMessage: title: "JSONRawMessage represents a json.RawMessage that works well with JSON,\ \ SQL, and Swagger." @@ -1780,6 +1935,21 @@ components: format: uuid4 nullable: true type: string + RFC6749ErrorJson: + properties: + error: + type: string + error_debug: + type: string + error_description: + type: string + error_hint: + type: string + status_code: + format: int64 + type: integer + title: RFC6749ErrorJson is a helper struct for JSON encoding/decoding of RFC6749Error. + type: object StringSliceJSONFormat: items: type: string @@ -1792,8 +1962,28 @@ components: UUID: format: uuid4 type: string + VerifiableCredentialProof: + example: + proof_type: proof_type + jwt: jwt + properties: + jwt: + type: string + proof_type: + type: string + title: VerifiableCredentialProof contains the proof of a verifiable credential. + type: object + acceptDeviceUserCodeRequest: + description: Contains information on an device verification + properties: + user_code: + type: string + type: object acceptOAuth2ConsentRequest: properties: + context: + title: "JSONRawMessage represents a json.RawMessage that works well with\ + \ JSON, SQL, and Swagger." grant_access_token_audience: items: type: string @@ -1806,23 +1996,19 @@ components: title: "StringSliceJSONFormat represents []string{} which is encoded to/from\ \ JSON for SQL storage." type: array - handled_at: - format: date-time - title: NullTime implements sql.NullTime functionality. - type: string remember: - description: "Remember, if set to true, tells ORY Hydra to remember this\ - \ consent authorization and reuse it if the same\nclient asks the same\ - \ user for the same, or a subset of, scope." + description: |- + Remember, if set to true, tells ORY Hydra to remember this consent authorization and reuse it if the same + client asks the same user for the same, or a subset of, scope. type: boolean remember_for: - description: "RememberFor sets how long the consent authorization should\ - \ be remembered for in seconds. If set to `0`, the\nauthorization will\ - \ be remembered indefinitely." + description: |- + RememberFor sets how long the consent authorization should be remembered for in seconds. If set to `0`, the + authorization will be remembered indefinitely. format: int64 type: integer session: - $ref: '#/components/schemas/acceptOAuth2ConsentRequestSession' + $ref: "#/components/schemas/acceptOAuth2ConsentRequestSession" title: The request payload used to accept a consent request. type: object acceptOAuth2ConsentRequestSession: @@ -1831,13 +2017,11 @@ components: id_token: "" properties: access_token: - description: "AccessToken sets session data for the access and refresh token,\ - \ as well as any future tokens issued by the\nrefresh grant. Keep in mind\ - \ that this data will be available to anyone performing OAuth 2.0 Challenge\ - \ Introspection.\nIf only your services can perform OAuth 2.0 Challenge\ - \ Introspection, this is usually fine. But if third parties\ncan access\ - \ that endpoint as well, sensitive data from the session might be exposed\ - \ to them. Use with care!" + description: |- + AccessToken sets session data for the access and refresh token, as well as any future tokens issued by the + refresh grant. Keep in mind that this data will be available to anyone performing OAuth 2.0 Challenge Introspection. + If only your services can perform OAuth 2.0 Challenge Introspection, this is usually fine. But if third parties + can access that endpoint as well, sensitive data from the session might be exposed to them. Use with care! id_token: description: |- IDToken sets session data for the OpenID Connect ID token. Keep in mind that the session'id payloads are readable @@ -1847,9 +2031,9 @@ components: acceptOAuth2LoginRequest: properties: acr: - description: "ACR sets the Authentication AuthorizationContext Class Reference\ - \ value for this authentication session. You can use it\nto express that,\ - \ for example, a user authenticated using two factor authentication." + description: |- + ACR sets the Authentication AuthorizationContext Class Reference value for this authentication session. You can use it + to express that, for example, a user authenticated using two-factor authentication. type: string amr: items: @@ -1860,36 +2044,49 @@ components: context: title: "JSONRawMessage represents a json.RawMessage that works well with\ \ JSON, SQL, and Swagger." + extend_session_lifespan: + description: |- + Extend OAuth2 authentication session lifespan + + If set to `true`, the OAuth2 authentication cookie lifespan is extended. This is for example useful if you want the user to be able to use `prompt=none` continuously. + + This value can only be set to `true` if the user has an authentication, which is the case if the `skip` value is `true`. + type: boolean force_subject_identifier: - description: "ForceSubjectIdentifier forces the \"pairwise\" user ID of\ - \ the end-user that authenticated. The \"pairwise\" user ID refers to\ - \ the\n(Pairwise Identifier Algorithm)[http://openid.net/specs/openid-connect-core-1_0.html#PairwiseAlg]\ - \ of the OpenID\nConnect specification. It allows you to set an obfuscated\ - \ subject (\"user\") identifier that is unique to the client.\n\nPlease\ - \ note that this changes the user ID on endpoint /userinfo and sub claim\ - \ of the ID Token. It does not change the\nsub claim in the OAuth 2.0\ - \ Introspection.\n\nPer default, ORY Hydra handles this value with its\ - \ own algorithm. In case you want to set this yourself\nyou can use this\ - \ field. Please note that setting this field has no effect if `pairwise`\ - \ is not configured in\nORY Hydra or the OAuth 2.0 Client does not expect\ - \ a pairwise identifier (set via `subject_type` key in the client's\n\ - configuration).\n\nPlease also be aware that ORY Hydra is unable to properly\ - \ compute this value during authentication. This implies\nthat you have\ - \ to compute this value on every authentication process (probably depending\ - \ on the client ID or some\nother unique value).\n\nIf you fail to compute\ - \ the proper value, then authentication processes which have id_token_hint\ - \ set might fail." + description: |- + ForceSubjectIdentifier forces the "pairwise" user ID of the end-user that authenticated. The "pairwise" user ID refers to the + (Pairwise Identifier Algorithm)[http://openid.net/specs/openid-connect-core-1_0.html#PairwiseAlg] of the OpenID + Connect specification. It allows you to set an obfuscated subject ("user") identifier that is unique to the client. + + Please note that this changes the user ID on endpoint /userinfo and sub claim of the ID Token. It does not change the + sub claim in the OAuth 2.0 Introspection. + + Per default, ORY Hydra handles this value with its own algorithm. In case you want to set this yourself + you can use this field. Please note that setting this field has no effect if `pairwise` is not configured in + ORY Hydra or the OAuth 2.0 Client does not expect a pairwise identifier (set via `subject_type` key in the client's + configuration). + + Please also be aware that ORY Hydra is unable to properly compute this value during authentication. This implies + that you have to compute this value on every authentication process (probably depending on the client ID or some + other unique value). + + If you fail to compute the proper value, then authentication processes which have id_token_hint set might fail. + type: string + identity_provider_session_id: + description: |- + IdentityProviderSessionID is the session ID of the end-user that authenticated. + If specified, we will use this value to propagate the logout. type: string remember: - description: "Remember, if set to true, tells ORY Hydra to remember this\ - \ user by telling the user agent (browser) to store\na cookie with authentication\ - \ data. If the same user performs another OAuth 2.0 Authorization Request,\ - \ he/she\nwill not be asked to log in again." + description: |- + Remember, if set to true, tells Ory Hydra to remember this user by telling the user agent (browser) to store + a cookie with authentication data. If the same user performs another OAuth 2.0 Authorization Request, they + will not be asked to log in again. type: boolean remember_for: - description: "RememberFor sets how long the authentication should be remembered\ - \ for in seconds. If set to `0`, the\nauthorization will be remembered\ - \ for the duration of the browser session (using a session cookie)." + description: |- + RememberFor sets how long the authentication should be remembered for in seconds. If set to `0`, the + authorization will be remembered for the duration of the browser session (using a session cookie). format: int64 type: integer subject: @@ -1903,8 +2100,10 @@ components: description: Create JSON Web Key Set Request Body properties: alg: - description: "JSON Web Key Algorithm\n\nThe algorithm to be used for creating\ - \ the key. Supports `RS256`, `ES256`, `ES512`, `HS512`, and `HS256`." + description: |- + JSON Web Key Algorithm + + The algorithm to be used for creating the key. Supports `RS256`, `ES256`, `ES512`, `HS512`, and `HS256`. type: string kid: description: |- @@ -1926,6 +2125,99 @@ components: - kid - use type: object + credentialSupportedDraft00: + description: Includes information about the supported verifiable credentials. + example: + types: + - types + - types + cryptographic_suites_supported: + - cryptographic_suites_supported + - cryptographic_suites_supported + cryptographic_binding_methods_supported: + - cryptographic_binding_methods_supported + - cryptographic_binding_methods_supported + format: format + properties: + cryptographic_binding_methods_supported: + description: |- + OpenID Connect Verifiable Credentials Cryptographic Binding Methods Supported + + Contains a list of cryptographic binding methods supported for signing the proof. + items: + type: string + type: array + cryptographic_suites_supported: + description: |- + OpenID Connect Verifiable Credentials Cryptographic Suites Supported + + Contains a list of cryptographic suites methods supported for signing the proof. + items: + type: string + type: array + format: + description: |- + OpenID Connect Verifiable Credentials Format + + Contains the format that is supported by this authorization server. + type: string + types: + description: |- + OpenID Connect Verifiable Credentials Types + + Contains the types of verifiable credentials supported. + items: + type: string + type: array + title: Verifiable Credentials Metadata (Draft 00) + type: object + deviceAuthorization: + description: "# Ory's OAuth 2.0 Device Authorization API" + example: + user_code: AAAAAA + device_code: ory_dc_smldfksmdfkl.mslkmlkmlk + interval: 5 + verification_uri_complete: https://auth.ory.sh/tv?user_code=AAAAAA + verification_uri: https://auth.ory.sh/tv + expires_in: 16830 + properties: + device_code: + description: The device verification code. + example: ory_dc_smldfksmdfkl.mslkmlkmlk + type: string + expires_in: + description: The lifetime in seconds of the "device_code" and "user_code". + example: 16830 + format: int64 + type: integer + interval: + description: |- + The minimum amount of time in seconds that the client + SHOULD wait between polling requests to the token endpoint. If no + value is provided, clients MUST use 5 as the default. + example: 5 + format: int64 + type: integer + user_code: + description: The end-user verification code. + example: AAAAAA + type: string + verification_uri: + description: |- + The end-user verification URI on the authorization + server. The URI should be short and easy to remember as end users + will be asked to manually type it into their user agent. + example: https://auth.ory.sh/tv + type: string + verification_uri_complete: + description: |- + A verification URI that includes the "user_code" (or + other information with the same function as the "user_code"), + which is designed for non-textual transmission. + example: https://auth.ory.sh/tv?user_code=AAAAAA + type: string + title: OAuth2 Device Flow + type: object errorOAuth2: description: Error example: @@ -1961,6 +2253,15 @@ components: type: integer type: object genericError: + example: + reason: User with ID 1234 does not exist. + request: d7ef54b1-ec15-46e6-bccb-524b82c035e6 + code: 404 + debug: SQL field "foo" is not a bool. + details: "" + id: id + message: The resource could not be found + status: Not Found properties: code: description: The status code @@ -2017,6 +2318,7 @@ components: description: Errors contains a list of errors that caused the not ready status. type: object + title: The not ready status of the service. type: object healthStatus: example: @@ -2025,10 +2327,12 @@ components: status: description: Status always contains "ok". type: string + title: The health status of the service. type: object introspectedOAuth2Token: - description: "Introspection contains an access token's session data as specified\ - \ by\n[IETF RFC 7662](https://tools.ietf.org/html/rfc7662)" + description: |- + Introspection contains an access token's session data as specified by + [IETF RFC 7662](https://tools.ietf.org/html/rfc7662) example: ext: key: "" @@ -2049,14 +2353,16 @@ components: username: username properties: active: - description: "Active is a boolean indicator of whether or not the presented\ - \ token\nis currently active. The specifics of a token's \"active\" state\n\ - will vary depending on the implementation of the authorization\nserver\ - \ and the information it keeps about its tokens, but a \"true\"\nvalue\ - \ return for the \"active\" property will generally indicate\nthat a given\ - \ token has been issued by this authorization server,\nhas not been revoked\ - \ by the resource owner, and is within its\ngiven time window of validity\ - \ (e.g., after its issuance time and\nbefore its expiration time)." + description: |- + Active is a boolean indicator of whether or not the presented token + is currently active. The specifics of a token's "active" state + will vary depending on the implementation of the authorization + server and the information it keeps about its tokens, but a "true" + value return for the "active" property will generally indicate + that a given token has been issued by this authorization server, + has not been revoked by the resource owner, and is within its + given time window of validity (e.g., after its issuance time and + before its expiration time). type: boolean aud: description: Audience contains a list of the token's intended audiences. @@ -2065,13 +2371,13 @@ components: type: array client_id: description: |- - ID is aclient identifier for the OAuth 2.0 client that + ID is a client identifier for the OAuth 2.0 client that requested this token. type: string exp: - description: "Expires at is an integer timestamp, measured in the number\ - \ of seconds\nsince January 1 1970 UTC, indicating when this token will\ - \ expire." + description: |- + Expires at is an integer timestamp, measured in the number of seconds + since January 1 1970 UTC, indicating when this token will expire. format: int64 type: integer ext: @@ -2079,18 +2385,20 @@ components: description: Extra is arbitrary data set by the session. type: object iat: - description: "Issued at is an integer timestamp, measured in the number\ - \ of seconds\nsince January 1 1970 UTC, indicating when this token was\n\ - originally issued." + description: |- + Issued at is an integer timestamp, measured in the number of seconds + since January 1 1970 UTC, indicating when this token was + originally issued. format: int64 type: integer iss: description: IssuerURL is a string representing the issuer of this token type: string nbf: - description: "NotBefore is an integer timestamp, measured in the number\ - \ of seconds\nsince January 1 1970 UTC, indicating when this token is\ - \ not to be\nused before." + description: |- + NotBefore is an integer timestamp, measured in the number of seconds + since January 1 1970 UTC, indicating when this token is not to be + used before. format: int64 type: integer obfuscated_subject: @@ -2104,9 +2412,10 @@ components: scopes associated with this token. type: string sub: - description: "Subject of the token, as defined in JWT [RFC7519].\nUsually\ - \ a machine-readable identifier of the resource owner who\nauthorized\ - \ this token." + description: |- + Subject of the token, as defined in JWT [RFC7519]. + Usually a machine-readable identifier of the resource owner who + authorized this token. type: string token_type: description: "TokenType is the introspected token's type, typically `Bearer`." @@ -2127,8 +2436,10 @@ components: description: A JSONPatch document as defined by RFC 6902 properties: from: - description: "This field is used together with operation \"move\" and uses\ - \ JSON Pointer notation.\n\nLearn more [about JSON Pointers](https://datatracker.ietf.org/doc/html/rfc6901#section-5)." + description: |- + This field is used together with operation "move" and uses JSON Pointer notation. + + Learn more [about JSON Pointers](https://datatracker.ietf.org/doc/html/rfc6901#section-5). example: /name type: string op: @@ -2137,13 +2448,17 @@ components: example: replace type: string path: - description: "The path to the target path. Uses JSON pointer notation.\n\ - \nLearn more [about JSON Pointers](https://datatracker.ietf.org/doc/html/rfc6901#section-5)." + description: |- + The path to the target path. Uses JSON pointer notation. + + Learn more [about JSON Pointers](https://datatracker.ietf.org/doc/html/rfc6901#section-5). example: /name type: string value: - description: "The value to be used within the operations.\n\nLearn more\ - \ [about JSON Pointers](https://datatracker.ietf.org/doc/html/rfc6901#section-5)." + description: |- + The value to be used within the operations. + + Learn more [about JSON Pointers](https://datatracker.ietf.org/doc/html/rfc6901#section-5). example: foobar required: - op @@ -2152,7 +2467,7 @@ components: jsonPatchDocument: description: A JSONPatchDocument request items: - $ref: '#/components/schemas/jsonPatch' + $ref: "#/components/schemas/jsonPatch" type: array jsonWebKey: example: @@ -2177,11 +2492,12 @@ components: alg: RS256 properties: alg: - description: "The \"alg\" (algorithm) parameter identifies the algorithm\ - \ intended for\nuse with the key. The values used should either be registered\ - \ in the\nIANA \"JSON Web Signature and Encryption Algorithms\" registry\n\ - established by [JWA] or be a value that contains a Collision-\nResistant\ - \ Name." + description: |- + The "alg" (algorithm) parameter identifies the algorithm intended for + use with the key. The values used should either be registered in the + IANA "JSON Web Signature and Encryption Algorithms" registry + established by [JWA] or be a value that contains a Collision- + Resistant Name. example: RS256 type: string crv: @@ -2203,23 +2519,25 @@ components: example: GawgguFyGrWKav7AX4VKUg type: string kid: - description: "The \"kid\" (key ID) parameter is used to match a specific\ - \ key. This\nis used, for instance, to choose among a set of keys within\ - \ a JWK Set\nduring key rollover. The structure of the \"kid\" value\ - \ is\nunspecified. When \"kid\" values are used within a JWK Set, different\n\ - keys within the JWK Set SHOULD use distinct \"kid\" values. (One\nexample\ - \ in which different keys might use the same \"kid\" value is if\nthey\ - \ have different \"kty\" (key type) values but are considered to be\n\ - equivalent alternatives by the application using them.) The \"kid\"\n\ - value is a case-sensitive string." + description: |- + The "kid" (key ID) parameter is used to match a specific key. This + is used, for instance, to choose among a set of keys within a JWK Set + during key rollover. The structure of the "kid" value is + unspecified. When "kid" values are used within a JWK Set, different + keys within the JWK Set SHOULD use distinct "kid" values. (One + example in which different keys might use the same "kid" value is if + they have different "kty" (key type) values but are considered to be + equivalent alternatives by the application using them.) The "kid" + value is a case-sensitive string. example: 1603dfe0af8f4596 type: string kty: - description: "The \"kty\" (key type) parameter identifies the cryptographic\ - \ algorithm\nfamily used with the key, such as \"RSA\" or \"EC\". \"kty\"\ - \ values should\neither be registered in the IANA \"JSON Web Key Types\"\ - \ registry\nestablished by [JWA] or be a value that contains a Collision-\n\ - Resistant Name. The \"kty\" value is a case-sensitive string." + description: |- + The "kty" (key type) parameter identifies the cryptographic algorithm + family used with the key, such as "RSA" or "EC". "kty" values should + either be registered in the IANA "JSON Web Key Types" registry + established by [JWA] or be a value that contains a Collision- + Resistant Name. The "kty" value is a case-sensitive string. example: RSA type: string "n": @@ -2246,12 +2564,14 @@ components: example: f83OJ3D2xF1Bg8vub9tLe1gHMzV76e8Tus9uPHvRVEU type: string x5c: - description: "The \"x5c\" (X.509 certificate chain) parameter contains a\ - \ chain of one\nor more PKIX certificates [RFC5280]. The certificate\ - \ chain is\nrepresented as a JSON array of certificate value strings.\ - \ Each\nstring in the array is a base64-encoded (Section 4 of [RFC4648]\ - \ --\nnot base64url-encoded) DER [ITU.X690.1994] PKIX certificate value.\n\ - The PKIX certificate containing the key value MUST be the first\ncertificate." + description: |- + The "x5c" (X.509 certificate chain) parameter contains a chain of one + or more PKIX certificates [RFC5280]. The certificate chain is + represented as a JSON array of certificate value strings. Each + string in the array is a base64-encoded (Section 4 of [RFC4648] -- + not base64url-encoded) DER [ITU.X690.1994] PKIX certificate value. + The PKIX certificate containing the key value MUST be the first + certificate. items: type: string type: array @@ -2308,15 +2628,64 @@ components: alg: RS256 properties: keys: - description: "List of JSON Web Keys\n\nThe value of the \"keys\" parameter\ - \ is an array of JSON Web Key (JWK)\nvalues. By default, the order of\ - \ the JWK values within the array does\nnot imply an order of preference\ - \ among them, although applications\nof JWK Sets can choose to assign\ - \ a meaning to the order for their\npurposes, if desired." + description: |- + List of JSON Web Keys + + The value of the "keys" parameter is an array of JSON Web Key (JWK) + values. By default, the order of the JWK values within the array does + not imply an order of preference among them, although applications + of JWK Sets can choose to assign a meaning to the order for their + purposes, if desired. items: - $ref: '#/components/schemas/jsonWebKey' + $ref: "#/components/schemas/jsonWebKey" type: array type: object + keysetPaginationRequestParameters: + description: "For details on pagination please head over to the [pagination\ + \ documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination)." + properties: + page_size: + default: 250 + description: |- + Items per Page + + This is the number of items per page to return. + For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). + format: int64 + maximum: 1000 + minimum: 1 + type: integer + page_token: + description: |- + Next Page Token + + The next page token. + For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). + type: string + title: Pagination Request Parameters + type: object + keysetPaginationResponseHeaders: + description: |- + The `Link` HTTP header contains multiple links (`first`, `next`) formatted as: + `; rel="first"` + + For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). + properties: + link: + description: |- + The Link HTTP Header + + The `Link` header contains a comma-delimited list of links to the following pages: + + first: The first page of results. + next: The next page of results. + + Pages are omitted if they do not exist. For example, if there is no next page, the `next` link is omitted. Examples: + + ; rel="next" + type: string + title: Pagination Response Header + type: object nullDuration: nullable: true pattern: "^[0-9]+(ns|us|ms|s|m|h)$" @@ -2329,51 +2698,33 @@ components: title: NullTime implements sql.NullTime functionality. type: string oAuth2Client: - description: "OAuth 2.0 Clients are used to perform OAuth 2.0 and OpenID Connect\ - \ flows. Usually, OAuth 2.0 clients are\ngenerated for applications which\ - \ want to consume your OAuth 2.0 or OpenID Connect capabilities." + description: |- + OAuth 2.0 Clients are used to perform OAuth 2.0 and OpenID Connect flows. Usually, OAuth 2.0 clients are + generated for applications which want to consume your OAuth 2.0 or OpenID Connect capabilities. example: metadata: "" - token_endpoint_auth_signing_alg: token_endpoint_auth_signing_alg - client_uri: client_uri - jwt_bearer_grant_access_token_lifespan: jwt_bearer_grant_access_token_lifespan - jwks: "" logo_uri: logo_uri - created_at: 2000-01-23T04:56:07.000+00:00 - registration_client_uri: registration_client_uri allowed_cors_origins: - allowed_cors_origins - allowed_cors_origins refresh_token_grant_access_token_lifespan: refresh_token_grant_access_token_lifespan - registration_access_token: registration_access_token client_id: client_id - token_endpoint_auth_method: token_endpoint_auth_method - userinfo_signed_response_alg: userinfo_signed_response_alg - authorization_code_grant_id_token_lifespan: authorization_code_grant_id_token_lifespan authorization_code_grant_refresh_token_lifespan: authorization_code_grant_refresh_token_lifespan client_credentials_grant_access_token_lifespan: client_credentials_grant_access_token_lifespan - updated_at: 2000-01-23T04:56:07.000+00:00 - scope: scope1 scope-2 scope.3 scope:4 request_uris: - request_uris - request_uris client_secret: client_secret backchannel_logout_session_required: true backchannel_logout_uri: backchannel_logout_uri - client_name: client_name - policy_uri: policy_uri - owner: owner audience: - audience - audience - authorization_code_grant_access_token_lifespan: authorization_code_grant_access_token_lifespan post_logout_redirect_uris: - post_logout_redirect_uris - post_logout_redirect_uris - grant_types: - - grant_types - - grant_types - subject_type: subject_type + device_authorization_grant_id_token_lifespan: device_authorization_grant_id_token_lifespan + device_authorization_grant_access_token_lifespan: device_authorization_grant_access_token_lifespan refresh_token_grant_refresh_token_lifespan: refresh_token_grant_refresh_token_lifespan redirect_uris: - redirect_uris @@ -2382,19 +2733,90 @@ components: frontchannel_logout_session_required: true frontchannel_logout_uri: frontchannel_logout_uri refresh_token_grant_id_token_lifespan: refresh_token_grant_id_token_lifespan + access_token_strategy: access_token_strategy + request_object_signing_alg: request_object_signing_alg + tos_uri: tos_uri + response_types: + - response_types + - response_types + token_endpoint_auth_signing_alg: token_endpoint_auth_signing_alg + client_uri: client_uri + jwt_bearer_grant_access_token_lifespan: jwt_bearer_grant_access_token_lifespan + jwks: + keys: + - d: T_N8I-6He3M8a7X1vWt6TGIx4xB_GP3Mb4SsZSA4v-orvJzzRiQhLlRR81naWYxfQAYt5isDI6_C2L9bdWo4FFPjGQFvNoRX-_sBJyBI_rl-TBgsZYoUlAj3J92WmY2inbA-PwyJfsaIIDceYBC-eX-xiCu6qMqkZi3MwQAFL6bMdPEM0z4JBcwFT3VdiWAIRUuACWQwrXMq672x7fMuaIaHi7XDGgt1ith23CLfaREmJku9PQcchbt_uEY-hqrFY6ntTtS4paWWQj86xLL94S-Tf6v6xkL918PfLSOTq6XCzxvlFwzBJqApnAhbwqLjpPhgUG04EDRrqrSBc5Y1BLevn6Ip5h1AhessBp3wLkQgz_roeckt-ybvzKTjESMuagnpqLvOT7Y9veIug2MwPJZI2VjczRc1vzMs25XrFQ8DpUy-bNdp89TmvAXwctUMiJdgHloJw23Cv03gIUAkDnsTqZmkpbIf-crpgNKFmQP_EDKoe8p_PXZZgfbRri3NoEVGP7Mk6yEu8LjJhClhZaBNjuWw2-KlBfOA3g79mhfBnkInee5KO9mGR50qPk1V-MorUYNTFMZIm0kFE6eYVWFBwJHLKYhHU34DoiK1VP-svZpC2uAMFNA_UJEwM9CQ2b8qe4-5e9aywMvwcuArRkAB5mBIfOaOJao3mfukKAE + e: AQAB + crv: P-256 + use: sig + kid: 1603dfe0af8f4596 + x5c: + - x5c + - x5c + k: GawgguFyGrWKav7AX4VKUg + dp: G4sPXkc6Ya9y8oJW9_ILj4xuppu0lzi_H7VTkS8xj5SdX3coE0oimYwxIi2emTAue0UOa5dpgFGyBJ4c8tQ2VF402XRugKDTP8akYhFo5tAA77Qe_NmtuYZc3C3m3I24G2GvR5sSDxUyAN2zq8Lfn9EUms6rY3Ob8YeiKkTiBj0 + dq: s9lAH9fggBsoFR8Oac2R_E2gw282rT2kGOAhvIllETE1efrA6huUUvMfBcMpn8lqeW6vzznYY5SSQF7pMdC_agI3nG8Ibp1BUb0JUiraRNqUfLhcQb_d9GF4Dh7e74WbRsobRonujTYN1xCaP6TO61jvWrX-L18txXw494Q_cgk + "n": vTqrxUyQPl_20aqf5kXHwDZrel-KovIp8s7ewJod2EXHl8tWlRB3_Rem34KwBfqlKQGp1nqah-51H4Jzruqe0cFP58hPEIt6WqrvnmJCXxnNuIB53iX_uUUXXHDHBeaPCSRoNJzNysjoJ30TIUsKBiirhBa7f235PXbKiHducLevV6PcKxJ5cY8zO286qJLBWSPm-OIevwqsIsSIH44Qtm9sioFikhkbLwoqwWORGAY0nl6XvVOlhADdLjBSqSAeT1FPuCDCnXwzCDR8N9IFB_IjdStFkC-rVt2K5BYfPd0c3yFp_vHR15eRd0zJ8XQ7woBC8Vnsac6Et1pKS59pX6256DPWu8UDdEOolKAPgcd_g2NpA76cAaF_jcT80j9KrEzw8Tv0nJBGesuCjPNjGs_KzdkWTUXt23Hn9QJsdc1MZuaW0iqXBepHYfYoqNelzVte117t4BwVp0kUM6we0IqyXClaZgOI8S-WDBw2_Ovdm8e5NmhYAblEVoygcX8Y46oH6bKiaCQfKCFDMcRgChme7AoE1yZZYsPbaG_3IjPrC4LBMHQw8rM9dWjJ8ImjicvZ1pAm0dx-KHCP3y5PVKrxBDf1zSOsBRkOSjB8TPODnJMz6-jd5hTtZxpZPwPoIdCanTZ3ZD6uRBpTmDwtpRGm63UQs1m5FWPwb0T2IF0 + p: 6NbkXwDWUhi-eR55Cgbf27FkQDDWIamOaDr0rj1q0f1fFEz1W5A_09YvG09Fiv1AO2-D8Rl8gS1Vkz2i0zCSqnyy8A025XOcRviOMK7nIxE4OH_PEsko8dtIrb3TmE2hUXvCkmzw9EsTF1LQBOGC6iusLTXepIC1x9ukCKFZQvdgtEObQ5kzd9Nhq-cdqmSeMVLoxPLd1blviVT9Vm8-y12CtYpeJHOaIDtVPLlBhJiBoPKWg3vxSm4XxIliNOefqegIlsmTIa3MpS6WWlCK3yHhat0Q-rRxDxdyiVdG_wzJvp0Iw_2wms7pe-PgNPYvUWH9JphWP5K38YqEBiJFXQ + kty: RSA + q: 0A1FmpOWR91_RAWpqreWSavNaZb9nXeKiBo0DQGBz32DbqKqQ8S4aBJmbRhJcctjCLjain-ivut477tAUMmzJwVJDDq2MZFwC9Q-4VYZmFU4HJityQuSzHYe64RjN-E_NQ02TWhG3QGW6roq6c57c99rrUsETwJJiwS8M5p15Miuz53DaOjv-uqqFAFfywN5WkxHbraBcjHtMiQuyQbQqkCFh-oanHkwYNeytsNhTu2mQmwR5DR2roZ2nPiFjC6nsdk-A7E3S3wMzYYFw7jvbWWoYWo9vB40_MY2Y0FYQSqcDzcBIcq_0tnnasf3VW4Fdx6m80RzOb2Fsnln7vKXAQ + qi: GyM_p6JrXySiz1toFgKbWV-JdI3jQ4ypu9rbMWx3rQJBfmt0FoYzgUIZEVFEcOqwemRN81zoDAaa-Bk0KWNGDjJHZDdDmFhW3AN7lI-puxk_mHZGJ11rxyR8O55XLSe3SPmRfKwZI6yU24ZxvQKFYItdldUKGzO6Ia6zTKhAVRU + x: f83OJ3D2xF1Bg8vub9tLe1gHMzV76e8Tus9uPHvRVEU + "y": x_FEzRu9m36HLN_tue659LNpXW6pCyStikYjKIWI5a0 + alg: RS256 + - d: T_N8I-6He3M8a7X1vWt6TGIx4xB_GP3Mb4SsZSA4v-orvJzzRiQhLlRR81naWYxfQAYt5isDI6_C2L9bdWo4FFPjGQFvNoRX-_sBJyBI_rl-TBgsZYoUlAj3J92WmY2inbA-PwyJfsaIIDceYBC-eX-xiCu6qMqkZi3MwQAFL6bMdPEM0z4JBcwFT3VdiWAIRUuACWQwrXMq672x7fMuaIaHi7XDGgt1ith23CLfaREmJku9PQcchbt_uEY-hqrFY6ntTtS4paWWQj86xLL94S-Tf6v6xkL918PfLSOTq6XCzxvlFwzBJqApnAhbwqLjpPhgUG04EDRrqrSBc5Y1BLevn6Ip5h1AhessBp3wLkQgz_roeckt-ybvzKTjESMuagnpqLvOT7Y9veIug2MwPJZI2VjczRc1vzMs25XrFQ8DpUy-bNdp89TmvAXwctUMiJdgHloJw23Cv03gIUAkDnsTqZmkpbIf-crpgNKFmQP_EDKoe8p_PXZZgfbRri3NoEVGP7Mk6yEu8LjJhClhZaBNjuWw2-KlBfOA3g79mhfBnkInee5KO9mGR50qPk1V-MorUYNTFMZIm0kFE6eYVWFBwJHLKYhHU34DoiK1VP-svZpC2uAMFNA_UJEwM9CQ2b8qe4-5e9aywMvwcuArRkAB5mBIfOaOJao3mfukKAE + e: AQAB + crv: P-256 + use: sig + kid: 1603dfe0af8f4596 + x5c: + - x5c + - x5c + k: GawgguFyGrWKav7AX4VKUg + dp: G4sPXkc6Ya9y8oJW9_ILj4xuppu0lzi_H7VTkS8xj5SdX3coE0oimYwxIi2emTAue0UOa5dpgFGyBJ4c8tQ2VF402XRugKDTP8akYhFo5tAA77Qe_NmtuYZc3C3m3I24G2GvR5sSDxUyAN2zq8Lfn9EUms6rY3Ob8YeiKkTiBj0 + dq: s9lAH9fggBsoFR8Oac2R_E2gw282rT2kGOAhvIllETE1efrA6huUUvMfBcMpn8lqeW6vzznYY5SSQF7pMdC_agI3nG8Ibp1BUb0JUiraRNqUfLhcQb_d9GF4Dh7e74WbRsobRonujTYN1xCaP6TO61jvWrX-L18txXw494Q_cgk + "n": vTqrxUyQPl_20aqf5kXHwDZrel-KovIp8s7ewJod2EXHl8tWlRB3_Rem34KwBfqlKQGp1nqah-51H4Jzruqe0cFP58hPEIt6WqrvnmJCXxnNuIB53iX_uUUXXHDHBeaPCSRoNJzNysjoJ30TIUsKBiirhBa7f235PXbKiHducLevV6PcKxJ5cY8zO286qJLBWSPm-OIevwqsIsSIH44Qtm9sioFikhkbLwoqwWORGAY0nl6XvVOlhADdLjBSqSAeT1FPuCDCnXwzCDR8N9IFB_IjdStFkC-rVt2K5BYfPd0c3yFp_vHR15eRd0zJ8XQ7woBC8Vnsac6Et1pKS59pX6256DPWu8UDdEOolKAPgcd_g2NpA76cAaF_jcT80j9KrEzw8Tv0nJBGesuCjPNjGs_KzdkWTUXt23Hn9QJsdc1MZuaW0iqXBepHYfYoqNelzVte117t4BwVp0kUM6we0IqyXClaZgOI8S-WDBw2_Ovdm8e5NmhYAblEVoygcX8Y46oH6bKiaCQfKCFDMcRgChme7AoE1yZZYsPbaG_3IjPrC4LBMHQw8rM9dWjJ8ImjicvZ1pAm0dx-KHCP3y5PVKrxBDf1zSOsBRkOSjB8TPODnJMz6-jd5hTtZxpZPwPoIdCanTZ3ZD6uRBpTmDwtpRGm63UQs1m5FWPwb0T2IF0 + p: 6NbkXwDWUhi-eR55Cgbf27FkQDDWIamOaDr0rj1q0f1fFEz1W5A_09YvG09Fiv1AO2-D8Rl8gS1Vkz2i0zCSqnyy8A025XOcRviOMK7nIxE4OH_PEsko8dtIrb3TmE2hUXvCkmzw9EsTF1LQBOGC6iusLTXepIC1x9ukCKFZQvdgtEObQ5kzd9Nhq-cdqmSeMVLoxPLd1blviVT9Vm8-y12CtYpeJHOaIDtVPLlBhJiBoPKWg3vxSm4XxIliNOefqegIlsmTIa3MpS6WWlCK3yHhat0Q-rRxDxdyiVdG_wzJvp0Iw_2wms7pe-PgNPYvUWH9JphWP5K38YqEBiJFXQ + kty: RSA + q: 0A1FmpOWR91_RAWpqreWSavNaZb9nXeKiBo0DQGBz32DbqKqQ8S4aBJmbRhJcctjCLjain-ivut477tAUMmzJwVJDDq2MZFwC9Q-4VYZmFU4HJityQuSzHYe64RjN-E_NQ02TWhG3QGW6roq6c57c99rrUsETwJJiwS8M5p15Miuz53DaOjv-uqqFAFfywN5WkxHbraBcjHtMiQuyQbQqkCFh-oanHkwYNeytsNhTu2mQmwR5DR2roZ2nPiFjC6nsdk-A7E3S3wMzYYFw7jvbWWoYWo9vB40_MY2Y0FYQSqcDzcBIcq_0tnnasf3VW4Fdx6m80RzOb2Fsnln7vKXAQ + qi: GyM_p6JrXySiz1toFgKbWV-JdI3jQ4ypu9rbMWx3rQJBfmt0FoYzgUIZEVFEcOqwemRN81zoDAaa-Bk0KWNGDjJHZDdDmFhW3AN7lI-puxk_mHZGJ11rxyR8O55XLSe3SPmRfKwZI6yU24ZxvQKFYItdldUKGzO6Ia6zTKhAVRU + x: f83OJ3D2xF1Bg8vub9tLe1gHMzV76e8Tus9uPHvRVEU + "y": x_FEzRu9m36HLN_tue659LNpXW6pCyStikYjKIWI5a0 + alg: RS256 + created_at: 2000-01-23T04:56:07.000+00:00 + registration_client_uri: registration_client_uri + registration_access_token: registration_access_token + token_endpoint_auth_method: client_secret_basic + userinfo_signed_response_alg: userinfo_signed_response_alg + authorization_code_grant_id_token_lifespan: authorization_code_grant_id_token_lifespan + updated_at: 2000-01-23T04:56:07.000+00:00 + scope: scope1 scope-2 scope.3 scope:4 + device_authorization_grant_refresh_token_lifespan: device_authorization_grant_refresh_token_lifespan + client_name: client_name + policy_uri: policy_uri + owner: owner + skip_consent: true + authorization_code_grant_access_token_lifespan: authorization_code_grant_access_token_lifespan + grant_types: + - grant_types + - grant_types + subject_type: subject_type + skip_logout_consent: true implicit_grant_id_token_lifespan: implicit_grant_id_token_lifespan client_secret_expires_at: 0 implicit_grant_access_token_lifespan: implicit_grant_access_token_lifespan jwks_uri: jwks_uri - request_object_signing_alg: request_object_signing_alg - tos_uri: tos_uri contacts: - contacts - contacts - response_types: - - response_types - - response_types properties: + access_token_strategy: + description: |- + OAuth 2.0 Access Token Strategy + + AccessTokenStrategy is the strategy used to generate access tokens. + Valid options are `jwt` and `opaque`. `jwt` is a bad idea, see https://www.ory.sh/docs/oauth2-oidc/jwt-access-token + Setting the strategy here overrides the global setting in `strategies.access_token`. + type: string allowed_cors_origins: items: type: string @@ -2426,11 +2848,12 @@ components: title: Time duration type: string backchannel_logout_session_required: - description: "OpenID Connect Back-Channel Logout Session Required\n\nBoolean\ - \ value specifying whether the RP requires that a sid (session ID) Claim\ - \ be included in the Logout\nToken to identify the RP session with the\ - \ OP when the backchannel_logout_uri is used.\nIf omitted, the default\ - \ value is false." + description: |- + OpenID Connect Back-Channel Logout Session Required + + Boolean value specifying whether the RP requires that a sid (session ID) Claim be included in the Logout + Token to identify the RP session with the OP when the backchannel_logout_uri is used. + If omitted, the default value is false. type: boolean backchannel_logout_uri: description: |- @@ -2448,7 +2871,7 @@ components: description: |- OAuth 2.0 Client ID - The ID is autogenerated and immutable. + The ID is immutable. If no ID is provided, a UUID4 will be generated. type: string client_name: description: |- @@ -2458,9 +2881,11 @@ components: end-user during authorization. type: string client_secret: - description: "OAuth 2.0 Client Secret\n\nThe secret will be included in\ - \ the create request as cleartext, and then\nnever again. The secret is\ - \ kept in hashed format and is not recoverable once lost." + description: |- + OAuth 2.0 Client Secret + + The secret will be included in the create request as cleartext, and then + never again. The secret is kept in hashed format and is not recoverable once lost. type: string client_secret_expires_at: description: |- @@ -2470,9 +2895,12 @@ components: format: int64 type: integer client_uri: - description: "OAuth 2.0 Client URI\n\nClientURI is a URL string of a web\ - \ page providing information about the client.\nIf present, the server\ - \ SHOULD display this URL to the end-user in\na clickable fashion." + description: |- + OAuth 2.0 Client URI + + ClientURI is a URL string of a web page providing information about the client. + If present, the server SHOULD display this URL to the end-user in + a clickable fashion. type: string contacts: items: @@ -2487,20 +2915,40 @@ components: CreatedAt returns the timestamp of the client's creation. format: date-time type: string + device_authorization_grant_access_token_lifespan: + description: "Specify a time duration in milliseconds, seconds, minutes,\ + \ hours." + pattern: "^([0-9]+(ns|us|ms|s|m|h))*$" + title: Time duration + type: string + device_authorization_grant_id_token_lifespan: + description: "Specify a time duration in milliseconds, seconds, minutes,\ + \ hours." + pattern: "^([0-9]+(ns|us|ms|s|m|h))*$" + title: Time duration + type: string + device_authorization_grant_refresh_token_lifespan: + description: "Specify a time duration in milliseconds, seconds, minutes,\ + \ hours." + pattern: "^([0-9]+(ns|us|ms|s|m|h))*$" + title: Time duration + type: string frontchannel_logout_session_required: - description: "OpenID Connect Front-Channel Logout Session Required\n\nBoolean\ - \ value specifying whether the RP requires that iss (issuer) and sid (session\ - \ ID) query parameters be\nincluded to identify the RP session with the\ - \ OP when the frontchannel_logout_uri is used.\nIf omitted, the default\ - \ value is false." + description: |- + OpenID Connect Front-Channel Logout Session Required + + Boolean value specifying whether the RP requires that iss (issuer) and sid (session ID) query parameters be + included to identify the RP session with the OP when the frontchannel_logout_uri is used. + If omitted, the default value is false. type: boolean frontchannel_logout_uri: - description: "OpenID Connect Front-Channel Logout URI\n\nRP URL that will\ - \ cause the RP to log itself out when rendered in an iframe by the OP.\ - \ An iss (issuer) query\nparameter and a sid (session ID) query parameter\ - \ MAY be included by the OP to enable the RP to validate the\nrequest\ - \ and to determine which of the potentially multiple sessions is to be\ - \ logged out; if either is\nincluded, both MUST be." + description: |- + OpenID Connect Front-Channel Logout URI + + RP URL that will cause the RP to log itself out when rendered in an iframe by the OP. An iss (issuer) query + parameter and a sid (session ID) query parameter MAY be included by the OP to enable the RP to validate the + request and to determine which of the potentially multiple sessions is to be logged out; if either is + included, both MUST be. type: string grant_types: items: @@ -2521,31 +2969,19 @@ components: title: Time duration type: string jwks: - description: "OAuth 2.0 Client JSON Web Key Set\n\nClient's JSON Web Key\ - \ Set [JWK] document, passed by value. The semantics of the jwks parameter\ - \ are the same as\nthe jwks_uri parameter, other than that the JWK Set\ - \ is passed by value, rather than by reference. This parameter\nis intended\ - \ only to be used by Clients that, for some reason, are unable to use\ - \ the jwks_uri parameter, for\ninstance, by native applications that might\ - \ not have a location to host the contents of the JWK Set. If a Client\n\ - can use jwks_uri, it MUST NOT use jwks. One significant downside of jwks\ - \ is that it does not enable key rotation\n(which jwks_uri does, as described\ - \ in Section 10 of OpenID Connect Core 1.0 [OpenID.Core]). The jwks_uri\ - \ and jwks\nparameters MUST NOT be used together." + $ref: "#/components/schemas/jsonWebKeySet" jwks_uri: - description: "OAuth 2.0 Client JSON Web Key Set URL\n\nURL for the Client's\ - \ JSON Web Key Set [JWK] document. If the Client signs requests to the\ - \ Server, it contains\nthe signing key(s) the Server uses to validate\ - \ signatures from the Client. The JWK Set MAY also contain the\nClient's\ - \ encryption keys(s), which are used by the Server to encrypt responses\ - \ to the Client. When both signing\nand encryption keys are made available,\ - \ a use (Key Use) parameter value is REQUIRED for all keys in the referenced\n\ - JWK Set to indicate each key's intended usage. Although some algorithms\ - \ allow the same key to be used for both\nsignatures and encryption, doing\ - \ so is NOT RECOMMENDED, as it is less secure. The JWK x5c parameter MAY\ - \ be used\nto provide X.509 representations of keys provided. When used,\ - \ the bare key values MUST still be present and MUST\nmatch those in the\ - \ certificate." + description: |- + OAuth 2.0 Client JSON Web Key Set URL + + URL for the Client's JSON Web Key Set [JWK] document. If the Client signs requests to the Server, it contains + the signing key(s) the Server uses to validate signatures from the Client. The JWK Set MAY also contain the + Client's encryption keys(s), which are used by the Server to encrypt responses to the Client. When both signing + and encryption keys are made available, a use (Key Use) parameter value is REQUIRED for all keys in the referenced + JWK Set to indicate each key's intended usage. Although some algorithms allow the same key to be used for both + signatures and encryption, doing so is NOT RECOMMENDED, as it is less secure. The JWK x5c parameter MAY be used + to provide X.509 representations of keys provided. When used, the bare key values MUST still be present and MUST + match those in the certificate. type: string jwt_bearer_grant_access_token_lifespan: description: "Specify a time duration in milliseconds, seconds, minutes,\ @@ -2569,10 +3005,12 @@ components: Owner is a string identifying the owner of the OAuth 2.0 Client. type: string policy_uri: - description: "OAuth 2.0 Client Policy URI\n\nPolicyURI is a URL string that\ - \ points to a human-readable privacy policy document\nthat describes how\ - \ the deployment organization collects, uses,\nretains, and discloses\ - \ personal data." + description: |- + OAuth 2.0 Client Policy URI + + PolicyURI is a URL string that points to a human-readable privacy policy document + that describes how the deployment organization collects, uses, + retains, and discloses personal data. type: string post_logout_redirect_uris: items: @@ -2605,19 +3043,24 @@ components: title: Time duration type: string registration_access_token: - description: "OpenID Connect Dynamic Client Registration Access Token\n\n\ - RegistrationAccessToken can be used to update, get, or delete the OAuth2\ - \ Client. It is sent when creating a client\nusing Dynamic Client Registration." + description: |- + OpenID Connect Dynamic Client Registration Access Token + + RegistrationAccessToken can be used to update, get, or delete the OAuth2 Client. It is sent when creating a client + using Dynamic Client Registration. type: string registration_client_uri: - description: "OpenID Connect Dynamic Client Registration URL\n\nRegistrationClientURI\ - \ is the URL used to update, get, or delete the OAuth2 Client." + description: |- + OpenID Connect Dynamic Client Registration URL + + RegistrationClientURI is the URL used to update, get, or delete the OAuth2 Client. type: string request_object_signing_alg: - description: "OpenID Connect Request Object Signing Algorithm\n\nJWS [JWS]\ - \ alg algorithm [JWA] that MUST be used for signing Request Objects sent\ - \ to the OP. All Request Objects\nfrom this Client MUST be rejected, if\ - \ not signed with this algorithm." + description: |- + OpenID Connect Request Object Signing Algorithm + + JWS [JWS] alg algorithm [JWA] that MUST be used for signing Request Objects sent to the OP. All Request Objects + from this Client MUST be rejected, if not signed with this algorithm. type: string request_uris: items: @@ -2632,9 +3075,12 @@ components: \ JSON for SQL storage." type: array scope: - description: "OAuth 2.0 Client Scope\n\nScope is a string containing a space-separated\ - \ list of scope values (as\ndescribed in Section 3.3 of OAuth 2.0 [RFC6749])\ - \ that the client\ncan use when requesting access tokens." + description: |- + OAuth 2.0 Client Scope + + Scope is a string containing a space-separated list of scope values (as + described in Section 3.3 of OAuth 2.0 [RFC6749]) that the client + can use when requesting access tokens. example: scope1 scope-2 scope.3 scope:4 type: string sector_identifier_uri: @@ -2644,6 +3090,16 @@ components: URL using the https scheme to be used in calculating Pseudonymous Identifiers by the OP. The URL references a file with a single JSON array of redirect_uri values. type: string + skip_consent: + description: |- + SkipConsent skips the consent screen for this client. This field can only + be set from the admin API. + type: boolean + skip_logout_consent: + description: |- + SkipLogoutConsent skips the logout consent screen for this client. This field can only + be set from the admin API. + type: boolean subject_type: description: |- OpenID Connect Subject Type @@ -2652,14 +3108,16 @@ components: list of the supported subject_type values for this server. Valid types include `pairwise` and `public`. type: string token_endpoint_auth_method: - description: "OAuth 2.0 Token Endpoint Authentication Method\n\nRequested\ - \ Client Authentication method for the Token Endpoint. The options are:\n\ - \n`client_secret_post`: (default) Send `client_id` and `client_secret`\ - \ as `application/x-www-form-urlencoded` in the HTTP body.\n`client_secret_basic`:\ - \ Send `client_id` and `client_secret` as `application/x-www-form-urlencoded`\ - \ encoded in the HTTP Authorization header.\n`private_key_jwt`: Use JSON\ - \ Web Tokens to authenticate the client.\n`none`: Used for public clients\ - \ (native apps, mobile apps) which can not have secrets." + default: client_secret_basic + description: |- + OAuth 2.0 Token Endpoint Authentication Method + + Requested Client Authentication method for the Token Endpoint. The options are: + + `client_secret_basic`: (default) Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` encoded in the HTTP Authorization header. + `client_secret_post`: Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` in the HTTP body. + `private_key_jwt`: Use JSON Web Tokens to authenticate the client. + `none`: Used for public clients (native apps, mobile apps) which can not have secrets. type: string token_endpoint_auth_signing_alg: description: |- @@ -2684,12 +3142,12 @@ components: format: date-time type: string userinfo_signed_response_alg: - description: "OpenID Connect Request Userinfo Signed Response Algorithm\n\ - \nJWS alg algorithm [JWA] REQUIRED for signing UserInfo Responses. If\ - \ this is specified, the response will be JWT\n[JWT] serialized, and signed\ - \ using JWS. The default, if omitted, is for the UserInfo Response to\ - \ return the Claims\nas a UTF-8 encoded JSON object using the application/json\ - \ content-type." + description: |- + OpenID Connect Request Userinfo Signed Response Algorithm + + JWS alg algorithm [JWA] REQUIRED for signing UserInfo Responses. If this is specified, the response will be JWT + [JWT] serialized, and signed using JWS. The default, if omitted, is for the UserInfo Response to return the Claims + as a UTF-8 encoded JSON object using the application/json content-type. type: string title: OAuth 2.0 Client type: object @@ -2720,6 +3178,24 @@ components: pattern: "^([0-9]+(ns|us|ms|s|m|h))*$" title: Time duration type: string + device_authorization_grant_access_token_lifespan: + description: "Specify a time duration in milliseconds, seconds, minutes,\ + \ hours." + pattern: "^([0-9]+(ns|us|ms|s|m|h))*$" + title: Time duration + type: string + device_authorization_grant_id_token_lifespan: + description: "Specify a time duration in milliseconds, seconds, minutes,\ + \ hours." + pattern: "^([0-9]+(ns|us|ms|s|m|h))*$" + title: Time duration + type: string + device_authorization_grant_refresh_token_lifespan: + description: "Specify a time duration in milliseconds, seconds, minutes,\ + \ hours." + pattern: "^([0-9]+(ns|us|ms|s|m|h))*$" + title: Time duration + type: string implicit_grant_access_token_lifespan: description: "Specify a time duration in milliseconds, seconds, minutes,\ \ hours." @@ -2760,6 +3236,7 @@ components: type: object oAuth2ConsentRequest: example: + consent_request_id: consent_request_id requested_access_token_audience: - requested_access_token_audience - requested_access_token_audience @@ -2786,46 +3263,28 @@ components: challenge: challenge client: metadata: "" - token_endpoint_auth_signing_alg: token_endpoint_auth_signing_alg - client_uri: client_uri - jwt_bearer_grant_access_token_lifespan: jwt_bearer_grant_access_token_lifespan - jwks: "" logo_uri: logo_uri - created_at: 2000-01-23T04:56:07.000+00:00 - registration_client_uri: registration_client_uri allowed_cors_origins: - allowed_cors_origins - allowed_cors_origins refresh_token_grant_access_token_lifespan: refresh_token_grant_access_token_lifespan - registration_access_token: registration_access_token client_id: client_id - token_endpoint_auth_method: token_endpoint_auth_method - userinfo_signed_response_alg: userinfo_signed_response_alg - authorization_code_grant_id_token_lifespan: authorization_code_grant_id_token_lifespan authorization_code_grant_refresh_token_lifespan: authorization_code_grant_refresh_token_lifespan client_credentials_grant_access_token_lifespan: client_credentials_grant_access_token_lifespan - updated_at: 2000-01-23T04:56:07.000+00:00 - scope: scope1 scope-2 scope.3 scope:4 request_uris: - request_uris - request_uris client_secret: client_secret backchannel_logout_session_required: true backchannel_logout_uri: backchannel_logout_uri - client_name: client_name - policy_uri: policy_uri - owner: owner audience: - audience - audience - authorization_code_grant_access_token_lifespan: authorization_code_grant_access_token_lifespan post_logout_redirect_uris: - post_logout_redirect_uris - post_logout_redirect_uris - grant_types: - - grant_types - - grant_types - subject_type: subject_type + device_authorization_grant_id_token_lifespan: device_authorization_grant_id_token_lifespan + device_authorization_grant_access_token_lifespan: device_authorization_grant_access_token_lifespan refresh_token_grant_refresh_token_lifespan: refresh_token_grant_refresh_token_lifespan redirect_uris: - redirect_uris @@ -2834,27 +3293,90 @@ components: frontchannel_logout_session_required: true frontchannel_logout_uri: frontchannel_logout_uri refresh_token_grant_id_token_lifespan: refresh_token_grant_id_token_lifespan + access_token_strategy: access_token_strategy + request_object_signing_alg: request_object_signing_alg + tos_uri: tos_uri + response_types: + - response_types + - response_types + token_endpoint_auth_signing_alg: token_endpoint_auth_signing_alg + client_uri: client_uri + jwt_bearer_grant_access_token_lifespan: jwt_bearer_grant_access_token_lifespan + jwks: + keys: + - d: T_N8I-6He3M8a7X1vWt6TGIx4xB_GP3Mb4SsZSA4v-orvJzzRiQhLlRR81naWYxfQAYt5isDI6_C2L9bdWo4FFPjGQFvNoRX-_sBJyBI_rl-TBgsZYoUlAj3J92WmY2inbA-PwyJfsaIIDceYBC-eX-xiCu6qMqkZi3MwQAFL6bMdPEM0z4JBcwFT3VdiWAIRUuACWQwrXMq672x7fMuaIaHi7XDGgt1ith23CLfaREmJku9PQcchbt_uEY-hqrFY6ntTtS4paWWQj86xLL94S-Tf6v6xkL918PfLSOTq6XCzxvlFwzBJqApnAhbwqLjpPhgUG04EDRrqrSBc5Y1BLevn6Ip5h1AhessBp3wLkQgz_roeckt-ybvzKTjESMuagnpqLvOT7Y9veIug2MwPJZI2VjczRc1vzMs25XrFQ8DpUy-bNdp89TmvAXwctUMiJdgHloJw23Cv03gIUAkDnsTqZmkpbIf-crpgNKFmQP_EDKoe8p_PXZZgfbRri3NoEVGP7Mk6yEu8LjJhClhZaBNjuWw2-KlBfOA3g79mhfBnkInee5KO9mGR50qPk1V-MorUYNTFMZIm0kFE6eYVWFBwJHLKYhHU34DoiK1VP-svZpC2uAMFNA_UJEwM9CQ2b8qe4-5e9aywMvwcuArRkAB5mBIfOaOJao3mfukKAE + e: AQAB + crv: P-256 + use: sig + kid: 1603dfe0af8f4596 + x5c: + - x5c + - x5c + k: GawgguFyGrWKav7AX4VKUg + dp: G4sPXkc6Ya9y8oJW9_ILj4xuppu0lzi_H7VTkS8xj5SdX3coE0oimYwxIi2emTAue0UOa5dpgFGyBJ4c8tQ2VF402XRugKDTP8akYhFo5tAA77Qe_NmtuYZc3C3m3I24G2GvR5sSDxUyAN2zq8Lfn9EUms6rY3Ob8YeiKkTiBj0 + dq: s9lAH9fggBsoFR8Oac2R_E2gw282rT2kGOAhvIllETE1efrA6huUUvMfBcMpn8lqeW6vzznYY5SSQF7pMdC_agI3nG8Ibp1BUb0JUiraRNqUfLhcQb_d9GF4Dh7e74WbRsobRonujTYN1xCaP6TO61jvWrX-L18txXw494Q_cgk + "n": vTqrxUyQPl_20aqf5kXHwDZrel-KovIp8s7ewJod2EXHl8tWlRB3_Rem34KwBfqlKQGp1nqah-51H4Jzruqe0cFP58hPEIt6WqrvnmJCXxnNuIB53iX_uUUXXHDHBeaPCSRoNJzNysjoJ30TIUsKBiirhBa7f235PXbKiHducLevV6PcKxJ5cY8zO286qJLBWSPm-OIevwqsIsSIH44Qtm9sioFikhkbLwoqwWORGAY0nl6XvVOlhADdLjBSqSAeT1FPuCDCnXwzCDR8N9IFB_IjdStFkC-rVt2K5BYfPd0c3yFp_vHR15eRd0zJ8XQ7woBC8Vnsac6Et1pKS59pX6256DPWu8UDdEOolKAPgcd_g2NpA76cAaF_jcT80j9KrEzw8Tv0nJBGesuCjPNjGs_KzdkWTUXt23Hn9QJsdc1MZuaW0iqXBepHYfYoqNelzVte117t4BwVp0kUM6we0IqyXClaZgOI8S-WDBw2_Ovdm8e5NmhYAblEVoygcX8Y46oH6bKiaCQfKCFDMcRgChme7AoE1yZZYsPbaG_3IjPrC4LBMHQw8rM9dWjJ8ImjicvZ1pAm0dx-KHCP3y5PVKrxBDf1zSOsBRkOSjB8TPODnJMz6-jd5hTtZxpZPwPoIdCanTZ3ZD6uRBpTmDwtpRGm63UQs1m5FWPwb0T2IF0 + p: 6NbkXwDWUhi-eR55Cgbf27FkQDDWIamOaDr0rj1q0f1fFEz1W5A_09YvG09Fiv1AO2-D8Rl8gS1Vkz2i0zCSqnyy8A025XOcRviOMK7nIxE4OH_PEsko8dtIrb3TmE2hUXvCkmzw9EsTF1LQBOGC6iusLTXepIC1x9ukCKFZQvdgtEObQ5kzd9Nhq-cdqmSeMVLoxPLd1blviVT9Vm8-y12CtYpeJHOaIDtVPLlBhJiBoPKWg3vxSm4XxIliNOefqegIlsmTIa3MpS6WWlCK3yHhat0Q-rRxDxdyiVdG_wzJvp0Iw_2wms7pe-PgNPYvUWH9JphWP5K38YqEBiJFXQ + kty: RSA + q: 0A1FmpOWR91_RAWpqreWSavNaZb9nXeKiBo0DQGBz32DbqKqQ8S4aBJmbRhJcctjCLjain-ivut477tAUMmzJwVJDDq2MZFwC9Q-4VYZmFU4HJityQuSzHYe64RjN-E_NQ02TWhG3QGW6roq6c57c99rrUsETwJJiwS8M5p15Miuz53DaOjv-uqqFAFfywN5WkxHbraBcjHtMiQuyQbQqkCFh-oanHkwYNeytsNhTu2mQmwR5DR2roZ2nPiFjC6nsdk-A7E3S3wMzYYFw7jvbWWoYWo9vB40_MY2Y0FYQSqcDzcBIcq_0tnnasf3VW4Fdx6m80RzOb2Fsnln7vKXAQ + qi: GyM_p6JrXySiz1toFgKbWV-JdI3jQ4ypu9rbMWx3rQJBfmt0FoYzgUIZEVFEcOqwemRN81zoDAaa-Bk0KWNGDjJHZDdDmFhW3AN7lI-puxk_mHZGJ11rxyR8O55XLSe3SPmRfKwZI6yU24ZxvQKFYItdldUKGzO6Ia6zTKhAVRU + x: f83OJ3D2xF1Bg8vub9tLe1gHMzV76e8Tus9uPHvRVEU + "y": x_FEzRu9m36HLN_tue659LNpXW6pCyStikYjKIWI5a0 + alg: RS256 + - d: T_N8I-6He3M8a7X1vWt6TGIx4xB_GP3Mb4SsZSA4v-orvJzzRiQhLlRR81naWYxfQAYt5isDI6_C2L9bdWo4FFPjGQFvNoRX-_sBJyBI_rl-TBgsZYoUlAj3J92WmY2inbA-PwyJfsaIIDceYBC-eX-xiCu6qMqkZi3MwQAFL6bMdPEM0z4JBcwFT3VdiWAIRUuACWQwrXMq672x7fMuaIaHi7XDGgt1ith23CLfaREmJku9PQcchbt_uEY-hqrFY6ntTtS4paWWQj86xLL94S-Tf6v6xkL918PfLSOTq6XCzxvlFwzBJqApnAhbwqLjpPhgUG04EDRrqrSBc5Y1BLevn6Ip5h1AhessBp3wLkQgz_roeckt-ybvzKTjESMuagnpqLvOT7Y9veIug2MwPJZI2VjczRc1vzMs25XrFQ8DpUy-bNdp89TmvAXwctUMiJdgHloJw23Cv03gIUAkDnsTqZmkpbIf-crpgNKFmQP_EDKoe8p_PXZZgfbRri3NoEVGP7Mk6yEu8LjJhClhZaBNjuWw2-KlBfOA3g79mhfBnkInee5KO9mGR50qPk1V-MorUYNTFMZIm0kFE6eYVWFBwJHLKYhHU34DoiK1VP-svZpC2uAMFNA_UJEwM9CQ2b8qe4-5e9aywMvwcuArRkAB5mBIfOaOJao3mfukKAE + e: AQAB + crv: P-256 + use: sig + kid: 1603dfe0af8f4596 + x5c: + - x5c + - x5c + k: GawgguFyGrWKav7AX4VKUg + dp: G4sPXkc6Ya9y8oJW9_ILj4xuppu0lzi_H7VTkS8xj5SdX3coE0oimYwxIi2emTAue0UOa5dpgFGyBJ4c8tQ2VF402XRugKDTP8akYhFo5tAA77Qe_NmtuYZc3C3m3I24G2GvR5sSDxUyAN2zq8Lfn9EUms6rY3Ob8YeiKkTiBj0 + dq: s9lAH9fggBsoFR8Oac2R_E2gw282rT2kGOAhvIllETE1efrA6huUUvMfBcMpn8lqeW6vzznYY5SSQF7pMdC_agI3nG8Ibp1BUb0JUiraRNqUfLhcQb_d9GF4Dh7e74WbRsobRonujTYN1xCaP6TO61jvWrX-L18txXw494Q_cgk + "n": vTqrxUyQPl_20aqf5kXHwDZrel-KovIp8s7ewJod2EXHl8tWlRB3_Rem34KwBfqlKQGp1nqah-51H4Jzruqe0cFP58hPEIt6WqrvnmJCXxnNuIB53iX_uUUXXHDHBeaPCSRoNJzNysjoJ30TIUsKBiirhBa7f235PXbKiHducLevV6PcKxJ5cY8zO286qJLBWSPm-OIevwqsIsSIH44Qtm9sioFikhkbLwoqwWORGAY0nl6XvVOlhADdLjBSqSAeT1FPuCDCnXwzCDR8N9IFB_IjdStFkC-rVt2K5BYfPd0c3yFp_vHR15eRd0zJ8XQ7woBC8Vnsac6Et1pKS59pX6256DPWu8UDdEOolKAPgcd_g2NpA76cAaF_jcT80j9KrEzw8Tv0nJBGesuCjPNjGs_KzdkWTUXt23Hn9QJsdc1MZuaW0iqXBepHYfYoqNelzVte117t4BwVp0kUM6we0IqyXClaZgOI8S-WDBw2_Ovdm8e5NmhYAblEVoygcX8Y46oH6bKiaCQfKCFDMcRgChme7AoE1yZZYsPbaG_3IjPrC4LBMHQw8rM9dWjJ8ImjicvZ1pAm0dx-KHCP3y5PVKrxBDf1zSOsBRkOSjB8TPODnJMz6-jd5hTtZxpZPwPoIdCanTZ3ZD6uRBpTmDwtpRGm63UQs1m5FWPwb0T2IF0 + p: 6NbkXwDWUhi-eR55Cgbf27FkQDDWIamOaDr0rj1q0f1fFEz1W5A_09YvG09Fiv1AO2-D8Rl8gS1Vkz2i0zCSqnyy8A025XOcRviOMK7nIxE4OH_PEsko8dtIrb3TmE2hUXvCkmzw9EsTF1LQBOGC6iusLTXepIC1x9ukCKFZQvdgtEObQ5kzd9Nhq-cdqmSeMVLoxPLd1blviVT9Vm8-y12CtYpeJHOaIDtVPLlBhJiBoPKWg3vxSm4XxIliNOefqegIlsmTIa3MpS6WWlCK3yHhat0Q-rRxDxdyiVdG_wzJvp0Iw_2wms7pe-PgNPYvUWH9JphWP5K38YqEBiJFXQ + kty: RSA + q: 0A1FmpOWR91_RAWpqreWSavNaZb9nXeKiBo0DQGBz32DbqKqQ8S4aBJmbRhJcctjCLjain-ivut477tAUMmzJwVJDDq2MZFwC9Q-4VYZmFU4HJityQuSzHYe64RjN-E_NQ02TWhG3QGW6roq6c57c99rrUsETwJJiwS8M5p15Miuz53DaOjv-uqqFAFfywN5WkxHbraBcjHtMiQuyQbQqkCFh-oanHkwYNeytsNhTu2mQmwR5DR2roZ2nPiFjC6nsdk-A7E3S3wMzYYFw7jvbWWoYWo9vB40_MY2Y0FYQSqcDzcBIcq_0tnnasf3VW4Fdx6m80RzOb2Fsnln7vKXAQ + qi: GyM_p6JrXySiz1toFgKbWV-JdI3jQ4ypu9rbMWx3rQJBfmt0FoYzgUIZEVFEcOqwemRN81zoDAaa-Bk0KWNGDjJHZDdDmFhW3AN7lI-puxk_mHZGJ11rxyR8O55XLSe3SPmRfKwZI6yU24ZxvQKFYItdldUKGzO6Ia6zTKhAVRU + x: f83OJ3D2xF1Bg8vub9tLe1gHMzV76e8Tus9uPHvRVEU + "y": x_FEzRu9m36HLN_tue659LNpXW6pCyStikYjKIWI5a0 + alg: RS256 + created_at: 2000-01-23T04:56:07.000+00:00 + registration_client_uri: registration_client_uri + registration_access_token: registration_access_token + token_endpoint_auth_method: client_secret_basic + userinfo_signed_response_alg: userinfo_signed_response_alg + authorization_code_grant_id_token_lifespan: authorization_code_grant_id_token_lifespan + updated_at: 2000-01-23T04:56:07.000+00:00 + scope: scope1 scope-2 scope.3 scope:4 + device_authorization_grant_refresh_token_lifespan: device_authorization_grant_refresh_token_lifespan + client_name: client_name + policy_uri: policy_uri + owner: owner + skip_consent: true + authorization_code_grant_access_token_lifespan: authorization_code_grant_access_token_lifespan + grant_types: + - grant_types + - grant_types + subject_type: subject_type + skip_logout_consent: true implicit_grant_id_token_lifespan: implicit_grant_id_token_lifespan client_secret_expires_at: 0 implicit_grant_access_token_lifespan: implicit_grant_access_token_lifespan jwks_uri: jwks_uri - request_object_signing_alg: request_object_signing_alg - tos_uri: tos_uri contacts: - contacts - contacts - response_types: - - response_types - - response_types login_session_id: login_session_id requested_scope: - requested_scope - requested_scope properties: acr: - description: "ACR represents the Authentication AuthorizationContext Class\ - \ Reference value for this authentication session. You can use it\nto\ - \ express that, for example, a user authenticated using two factor authentication." + description: |- + ACR represents the Authentication AuthorizationContext Class Reference value for this authentication session. You can use it + to express that, for example, a user authenticated using two factor authentication. type: string amr: items: @@ -2863,12 +3385,13 @@ components: \ JSON for SQL storage." type: array challenge: - description: |- - ID is the identifier ("authorization challenge") of the consent authorization request. It is used to - identify the session. + description: Challenge is used to retrieve/accept/deny the consent request. type: string client: - $ref: '#/components/schemas/oAuth2Client' + $ref: "#/components/schemas/oAuth2Client" + consent_request_id: + description: ConsentRequestID is the ID of the consent request. + type: string context: title: "JSONRawMessage represents a json.RawMessage that works well with\ \ JSON, SQL, and Swagger." @@ -2885,13 +3408,12 @@ components: channel logout. It's value can generally be used to associate consecutive login requests by a certain user. type: string oidc_context: - $ref: '#/components/schemas/oAuth2ConsentRequestOpenIDConnectContext' + $ref: "#/components/schemas/oAuth2ConsentRequestOpenIDConnectContext" request_url: - description: "RequestURL is the original OAuth 2.0 Authorization URL requested\ - \ by the OAuth 2.0 client. It is the URL which\ninitiates the OAuth 2.0\ - \ Authorization Code or OAuth 2.0 Implicit flow. This URL is typically\ - \ not needed, but\nmight come in handy if you want to deal with additional\ - \ request parameters." + description: |- + RequestURL is the original OAuth 2.0 Authorization URL requested by the OAuth 2.0 client. It is the URL which + initiates the OAuth 2.0 Authorization Code or OAuth 2.0 Implicit flow. This URL is typically not needed, but + might come in handy if you want to deal with additional request parameters. type: string requested_access_token_audience: items: @@ -2906,15 +3428,15 @@ components: \ JSON for SQL storage." type: array skip: - description: "Skip, if true, implies that the client has requested the same\ - \ scopes from the same user previously.\nIf true, you must not ask the\ - \ user to grant the requested scopes. You must however either allow or\ - \ deny the\nconsent request using the usual API call." + description: |- + Skip, if true, implies that the client has requested the same scopes from the same user previously. + If true, you must not ask the user to grant the requested scopes. You must however either allow or deny the + consent request using the usual API call. type: boolean subject: - description: "Subject is the user ID of the end-user that authenticated.\ - \ Now, that end user needs to grant or deny the scope\nrequested by the\ - \ OAuth 2.0 client." + description: |- + Subject is the user ID of the end-user that authenticated. Now, that end user needs to grant or deny the scope + requested by the OAuth 2.0 client. type: string required: - challenge @@ -2934,36 +3456,29 @@ components: display: display properties: acr_values: - description: "ACRValues is the Authentication AuthorizationContext Class\ - \ Reference requested in the OAuth 2.0 Authorization request.\nIt is a\ - \ parameter defined by OpenID Connect and expresses which level of authentication\ - \ (e.g. 2FA) is required.\n\nOpenID Connect defines it as follows:\n>\ - \ Requested Authentication AuthorizationContext Class Reference values.\ - \ Space-separated string that specifies the acr values\nthat the Authorization\ - \ Server is being requested to use for processing this Authentication\ - \ Request, with the\nvalues appearing in order of preference. The Authentication\ - \ AuthorizationContext Class satisfied by the authentication\nperformed\ - \ is returned as the acr Claim Value, as specified in Section 2. The acr\ - \ Claim is requested as a\nVoluntary Claim by this parameter." + description: |- + ACRValues is the Authentication AuthorizationContext Class Reference requested in the OAuth 2.0 Authorization request. + It is a parameter defined by OpenID Connect and expresses which level of authentication (e.g. 2FA) is required. + + OpenID Connect defines it as follows: + > Requested Authentication AuthorizationContext Class Reference values. Space-separated string that specifies the acr values + that the Authorization Server is being requested to use for processing this Authentication Request, with the + values appearing in order of preference. The Authentication AuthorizationContext Class satisfied by the authentication + performed is returned as the acr Claim Value, as specified in Section 2. The acr Claim is requested as a + Voluntary Claim by this parameter. items: type: string type: array display: - description: "Display is a string value that specifies how the Authorization\ - \ Server displays the authentication and consent user interface pages\ - \ to the End-User.\nThe defined values are:\npage: The Authorization Server\ - \ SHOULD display the authentication and consent UI consistent with a full\ - \ User Agent page view. If the display parameter is not specified, this\ - \ is the default display mode.\npopup: The Authorization Server SHOULD\ - \ display the authentication and consent UI consistent with a popup User\ - \ Agent window. The popup User Agent window should be of an appropriate\ - \ size for a login-focused dialog and should not obscure the entire window\ - \ that it is popping up over.\ntouch: The Authorization Server SHOULD\ - \ display the authentication and consent UI consistent with a device that\ - \ leverages a touch interface.\nwap: The Authorization Server SHOULD display\ - \ the authentication and consent UI consistent with a \"feature phone\"\ - \ type display.\n\nThe Authorization Server MAY also attempt to detect\ - \ the capabilities of the User Agent and present an appropriate display." + description: |- + Display is a string value that specifies how the Authorization Server displays the authentication and consent user interface pages to the End-User. + The defined values are: + page: The Authorization Server SHOULD display the authentication and consent UI consistent with a full User Agent page view. If the display parameter is not specified, this is the default display mode. + popup: The Authorization Server SHOULD display the authentication and consent UI consistent with a popup User Agent window. The popup User Agent window should be of an appropriate size for a login-focused dialog and should not obscure the entire window that it is popping up over. + touch: The Authorization Server SHOULD display the authentication and consent UI consistent with a device that leverages a touch interface. + wap: The Authorization Server SHOULD display the authentication and consent UI consistent with a "feature phone" type display. + + The Authorization Server MAY also attempt to detect the capabilities of the User Agent and present an appropriate display. type: string id_token_hint_claims: additionalProperties: {} @@ -2979,14 +3494,12 @@ components: phone number in the format specified for the phone_number Claim. The use of this parameter is optional. type: string ui_locales: - description: "UILocales is the End-User'id preferred languages and scripts\ - \ for the user interface, represented as a\nspace-separated list of BCP47\ - \ [RFC5646] language tag values, ordered by preference. For instance,\ - \ the value\n\"fr-CA fr en\" represents a preference for French as spoken\ - \ in Canada, then French (without a region designation),\nfollowed by\ - \ English (without a region designation). An error SHOULD NOT result if\ - \ some or all of the requested\nlocales are not supported by the OpenID\ - \ Provider." + description: |- + UILocales is the End-User'id preferred languages and scripts for the user interface, represented as a + space-separated list of BCP47 [RFC5646] language tag values, ordered by preference. For instance, the value + "fr-CA fr en" represents a preference for French as spoken in Canada, then French (without a region designation), + followed by English (without a region designation). An error SHOULD NOT result if some or all of the requested + locales are not supported by the OpenID Provider. items: type: string type: array @@ -2995,8 +3508,10 @@ components: oAuth2ConsentSession: description: A completed OAuth 2.0 Consent Session. example: + consent_request_id: consent_request_id remember: true consent_request: + consent_request_id: consent_request_id requested_access_token_audience: - requested_access_token_audience - requested_access_token_audience @@ -3023,46 +3538,28 @@ components: challenge: challenge client: metadata: "" - token_endpoint_auth_signing_alg: token_endpoint_auth_signing_alg - client_uri: client_uri - jwt_bearer_grant_access_token_lifespan: jwt_bearer_grant_access_token_lifespan - jwks: "" logo_uri: logo_uri - created_at: 2000-01-23T04:56:07.000+00:00 - registration_client_uri: registration_client_uri allowed_cors_origins: - allowed_cors_origins - allowed_cors_origins refresh_token_grant_access_token_lifespan: refresh_token_grant_access_token_lifespan - registration_access_token: registration_access_token client_id: client_id - token_endpoint_auth_method: token_endpoint_auth_method - userinfo_signed_response_alg: userinfo_signed_response_alg - authorization_code_grant_id_token_lifespan: authorization_code_grant_id_token_lifespan authorization_code_grant_refresh_token_lifespan: authorization_code_grant_refresh_token_lifespan client_credentials_grant_access_token_lifespan: client_credentials_grant_access_token_lifespan - updated_at: 2000-01-23T04:56:07.000+00:00 - scope: scope1 scope-2 scope.3 scope:4 request_uris: - request_uris - request_uris client_secret: client_secret backchannel_logout_session_required: true backchannel_logout_uri: backchannel_logout_uri - client_name: client_name - policy_uri: policy_uri - owner: owner audience: - audience - audience - authorization_code_grant_access_token_lifespan: authorization_code_grant_access_token_lifespan post_logout_redirect_uris: - post_logout_redirect_uris - post_logout_redirect_uris - grant_types: - - grant_types - - grant_types - subject_type: subject_type + device_authorization_grant_id_token_lifespan: device_authorization_grant_id_token_lifespan + device_authorization_grant_access_token_lifespan: device_authorization_grant_access_token_lifespan refresh_token_grant_refresh_token_lifespan: refresh_token_grant_refresh_token_lifespan redirect_uris: - redirect_uris @@ -3071,31 +3568,89 @@ components: frontchannel_logout_session_required: true frontchannel_logout_uri: frontchannel_logout_uri refresh_token_grant_id_token_lifespan: refresh_token_grant_id_token_lifespan + access_token_strategy: access_token_strategy + request_object_signing_alg: request_object_signing_alg + tos_uri: tos_uri + response_types: + - response_types + - response_types + token_endpoint_auth_signing_alg: token_endpoint_auth_signing_alg + client_uri: client_uri + jwt_bearer_grant_access_token_lifespan: jwt_bearer_grant_access_token_lifespan + jwks: + keys: + - d: T_N8I-6He3M8a7X1vWt6TGIx4xB_GP3Mb4SsZSA4v-orvJzzRiQhLlRR81naWYxfQAYt5isDI6_C2L9bdWo4FFPjGQFvNoRX-_sBJyBI_rl-TBgsZYoUlAj3J92WmY2inbA-PwyJfsaIIDceYBC-eX-xiCu6qMqkZi3MwQAFL6bMdPEM0z4JBcwFT3VdiWAIRUuACWQwrXMq672x7fMuaIaHi7XDGgt1ith23CLfaREmJku9PQcchbt_uEY-hqrFY6ntTtS4paWWQj86xLL94S-Tf6v6xkL918PfLSOTq6XCzxvlFwzBJqApnAhbwqLjpPhgUG04EDRrqrSBc5Y1BLevn6Ip5h1AhessBp3wLkQgz_roeckt-ybvzKTjESMuagnpqLvOT7Y9veIug2MwPJZI2VjczRc1vzMs25XrFQ8DpUy-bNdp89TmvAXwctUMiJdgHloJw23Cv03gIUAkDnsTqZmkpbIf-crpgNKFmQP_EDKoe8p_PXZZgfbRri3NoEVGP7Mk6yEu8LjJhClhZaBNjuWw2-KlBfOA3g79mhfBnkInee5KO9mGR50qPk1V-MorUYNTFMZIm0kFE6eYVWFBwJHLKYhHU34DoiK1VP-svZpC2uAMFNA_UJEwM9CQ2b8qe4-5e9aywMvwcuArRkAB5mBIfOaOJao3mfukKAE + e: AQAB + crv: P-256 + use: sig + kid: 1603dfe0af8f4596 + x5c: + - x5c + - x5c + k: GawgguFyGrWKav7AX4VKUg + dp: G4sPXkc6Ya9y8oJW9_ILj4xuppu0lzi_H7VTkS8xj5SdX3coE0oimYwxIi2emTAue0UOa5dpgFGyBJ4c8tQ2VF402XRugKDTP8akYhFo5tAA77Qe_NmtuYZc3C3m3I24G2GvR5sSDxUyAN2zq8Lfn9EUms6rY3Ob8YeiKkTiBj0 + dq: s9lAH9fggBsoFR8Oac2R_E2gw282rT2kGOAhvIllETE1efrA6huUUvMfBcMpn8lqeW6vzznYY5SSQF7pMdC_agI3nG8Ibp1BUb0JUiraRNqUfLhcQb_d9GF4Dh7e74WbRsobRonujTYN1xCaP6TO61jvWrX-L18txXw494Q_cgk + "n": vTqrxUyQPl_20aqf5kXHwDZrel-KovIp8s7ewJod2EXHl8tWlRB3_Rem34KwBfqlKQGp1nqah-51H4Jzruqe0cFP58hPEIt6WqrvnmJCXxnNuIB53iX_uUUXXHDHBeaPCSRoNJzNysjoJ30TIUsKBiirhBa7f235PXbKiHducLevV6PcKxJ5cY8zO286qJLBWSPm-OIevwqsIsSIH44Qtm9sioFikhkbLwoqwWORGAY0nl6XvVOlhADdLjBSqSAeT1FPuCDCnXwzCDR8N9IFB_IjdStFkC-rVt2K5BYfPd0c3yFp_vHR15eRd0zJ8XQ7woBC8Vnsac6Et1pKS59pX6256DPWu8UDdEOolKAPgcd_g2NpA76cAaF_jcT80j9KrEzw8Tv0nJBGesuCjPNjGs_KzdkWTUXt23Hn9QJsdc1MZuaW0iqXBepHYfYoqNelzVte117t4BwVp0kUM6we0IqyXClaZgOI8S-WDBw2_Ovdm8e5NmhYAblEVoygcX8Y46oH6bKiaCQfKCFDMcRgChme7AoE1yZZYsPbaG_3IjPrC4LBMHQw8rM9dWjJ8ImjicvZ1pAm0dx-KHCP3y5PVKrxBDf1zSOsBRkOSjB8TPODnJMz6-jd5hTtZxpZPwPoIdCanTZ3ZD6uRBpTmDwtpRGm63UQs1m5FWPwb0T2IF0 + p: 6NbkXwDWUhi-eR55Cgbf27FkQDDWIamOaDr0rj1q0f1fFEz1W5A_09YvG09Fiv1AO2-D8Rl8gS1Vkz2i0zCSqnyy8A025XOcRviOMK7nIxE4OH_PEsko8dtIrb3TmE2hUXvCkmzw9EsTF1LQBOGC6iusLTXepIC1x9ukCKFZQvdgtEObQ5kzd9Nhq-cdqmSeMVLoxPLd1blviVT9Vm8-y12CtYpeJHOaIDtVPLlBhJiBoPKWg3vxSm4XxIliNOefqegIlsmTIa3MpS6WWlCK3yHhat0Q-rRxDxdyiVdG_wzJvp0Iw_2wms7pe-PgNPYvUWH9JphWP5K38YqEBiJFXQ + kty: RSA + q: 0A1FmpOWR91_RAWpqreWSavNaZb9nXeKiBo0DQGBz32DbqKqQ8S4aBJmbRhJcctjCLjain-ivut477tAUMmzJwVJDDq2MZFwC9Q-4VYZmFU4HJityQuSzHYe64RjN-E_NQ02TWhG3QGW6roq6c57c99rrUsETwJJiwS8M5p15Miuz53DaOjv-uqqFAFfywN5WkxHbraBcjHtMiQuyQbQqkCFh-oanHkwYNeytsNhTu2mQmwR5DR2roZ2nPiFjC6nsdk-A7E3S3wMzYYFw7jvbWWoYWo9vB40_MY2Y0FYQSqcDzcBIcq_0tnnasf3VW4Fdx6m80RzOb2Fsnln7vKXAQ + qi: GyM_p6JrXySiz1toFgKbWV-JdI3jQ4ypu9rbMWx3rQJBfmt0FoYzgUIZEVFEcOqwemRN81zoDAaa-Bk0KWNGDjJHZDdDmFhW3AN7lI-puxk_mHZGJ11rxyR8O55XLSe3SPmRfKwZI6yU24ZxvQKFYItdldUKGzO6Ia6zTKhAVRU + x: f83OJ3D2xF1Bg8vub9tLe1gHMzV76e8Tus9uPHvRVEU + "y": x_FEzRu9m36HLN_tue659LNpXW6pCyStikYjKIWI5a0 + alg: RS256 + - d: T_N8I-6He3M8a7X1vWt6TGIx4xB_GP3Mb4SsZSA4v-orvJzzRiQhLlRR81naWYxfQAYt5isDI6_C2L9bdWo4FFPjGQFvNoRX-_sBJyBI_rl-TBgsZYoUlAj3J92WmY2inbA-PwyJfsaIIDceYBC-eX-xiCu6qMqkZi3MwQAFL6bMdPEM0z4JBcwFT3VdiWAIRUuACWQwrXMq672x7fMuaIaHi7XDGgt1ith23CLfaREmJku9PQcchbt_uEY-hqrFY6ntTtS4paWWQj86xLL94S-Tf6v6xkL918PfLSOTq6XCzxvlFwzBJqApnAhbwqLjpPhgUG04EDRrqrSBc5Y1BLevn6Ip5h1AhessBp3wLkQgz_roeckt-ybvzKTjESMuagnpqLvOT7Y9veIug2MwPJZI2VjczRc1vzMs25XrFQ8DpUy-bNdp89TmvAXwctUMiJdgHloJw23Cv03gIUAkDnsTqZmkpbIf-crpgNKFmQP_EDKoe8p_PXZZgfbRri3NoEVGP7Mk6yEu8LjJhClhZaBNjuWw2-KlBfOA3g79mhfBnkInee5KO9mGR50qPk1V-MorUYNTFMZIm0kFE6eYVWFBwJHLKYhHU34DoiK1VP-svZpC2uAMFNA_UJEwM9CQ2b8qe4-5e9aywMvwcuArRkAB5mBIfOaOJao3mfukKAE + e: AQAB + crv: P-256 + use: sig + kid: 1603dfe0af8f4596 + x5c: + - x5c + - x5c + k: GawgguFyGrWKav7AX4VKUg + dp: G4sPXkc6Ya9y8oJW9_ILj4xuppu0lzi_H7VTkS8xj5SdX3coE0oimYwxIi2emTAue0UOa5dpgFGyBJ4c8tQ2VF402XRugKDTP8akYhFo5tAA77Qe_NmtuYZc3C3m3I24G2GvR5sSDxUyAN2zq8Lfn9EUms6rY3Ob8YeiKkTiBj0 + dq: s9lAH9fggBsoFR8Oac2R_E2gw282rT2kGOAhvIllETE1efrA6huUUvMfBcMpn8lqeW6vzznYY5SSQF7pMdC_agI3nG8Ibp1BUb0JUiraRNqUfLhcQb_d9GF4Dh7e74WbRsobRonujTYN1xCaP6TO61jvWrX-L18txXw494Q_cgk + "n": vTqrxUyQPl_20aqf5kXHwDZrel-KovIp8s7ewJod2EXHl8tWlRB3_Rem34KwBfqlKQGp1nqah-51H4Jzruqe0cFP58hPEIt6WqrvnmJCXxnNuIB53iX_uUUXXHDHBeaPCSRoNJzNysjoJ30TIUsKBiirhBa7f235PXbKiHducLevV6PcKxJ5cY8zO286qJLBWSPm-OIevwqsIsSIH44Qtm9sioFikhkbLwoqwWORGAY0nl6XvVOlhADdLjBSqSAeT1FPuCDCnXwzCDR8N9IFB_IjdStFkC-rVt2K5BYfPd0c3yFp_vHR15eRd0zJ8XQ7woBC8Vnsac6Et1pKS59pX6256DPWu8UDdEOolKAPgcd_g2NpA76cAaF_jcT80j9KrEzw8Tv0nJBGesuCjPNjGs_KzdkWTUXt23Hn9QJsdc1MZuaW0iqXBepHYfYoqNelzVte117t4BwVp0kUM6we0IqyXClaZgOI8S-WDBw2_Ovdm8e5NmhYAblEVoygcX8Y46oH6bKiaCQfKCFDMcRgChme7AoE1yZZYsPbaG_3IjPrC4LBMHQw8rM9dWjJ8ImjicvZ1pAm0dx-KHCP3y5PVKrxBDf1zSOsBRkOSjB8TPODnJMz6-jd5hTtZxpZPwPoIdCanTZ3ZD6uRBpTmDwtpRGm63UQs1m5FWPwb0T2IF0 + p: 6NbkXwDWUhi-eR55Cgbf27FkQDDWIamOaDr0rj1q0f1fFEz1W5A_09YvG09Fiv1AO2-D8Rl8gS1Vkz2i0zCSqnyy8A025XOcRviOMK7nIxE4OH_PEsko8dtIrb3TmE2hUXvCkmzw9EsTF1LQBOGC6iusLTXepIC1x9ukCKFZQvdgtEObQ5kzd9Nhq-cdqmSeMVLoxPLd1blviVT9Vm8-y12CtYpeJHOaIDtVPLlBhJiBoPKWg3vxSm4XxIliNOefqegIlsmTIa3MpS6WWlCK3yHhat0Q-rRxDxdyiVdG_wzJvp0Iw_2wms7pe-PgNPYvUWH9JphWP5K38YqEBiJFXQ + kty: RSA + q: 0A1FmpOWR91_RAWpqreWSavNaZb9nXeKiBo0DQGBz32DbqKqQ8S4aBJmbRhJcctjCLjain-ivut477tAUMmzJwVJDDq2MZFwC9Q-4VYZmFU4HJityQuSzHYe64RjN-E_NQ02TWhG3QGW6roq6c57c99rrUsETwJJiwS8M5p15Miuz53DaOjv-uqqFAFfywN5WkxHbraBcjHtMiQuyQbQqkCFh-oanHkwYNeytsNhTu2mQmwR5DR2roZ2nPiFjC6nsdk-A7E3S3wMzYYFw7jvbWWoYWo9vB40_MY2Y0FYQSqcDzcBIcq_0tnnasf3VW4Fdx6m80RzOb2Fsnln7vKXAQ + qi: GyM_p6JrXySiz1toFgKbWV-JdI3jQ4ypu9rbMWx3rQJBfmt0FoYzgUIZEVFEcOqwemRN81zoDAaa-Bk0KWNGDjJHZDdDmFhW3AN7lI-puxk_mHZGJ11rxyR8O55XLSe3SPmRfKwZI6yU24ZxvQKFYItdldUKGzO6Ia6zTKhAVRU + x: f83OJ3D2xF1Bg8vub9tLe1gHMzV76e8Tus9uPHvRVEU + "y": x_FEzRu9m36HLN_tue659LNpXW6pCyStikYjKIWI5a0 + alg: RS256 + created_at: 2000-01-23T04:56:07.000+00:00 + registration_client_uri: registration_client_uri + registration_access_token: registration_access_token + token_endpoint_auth_method: client_secret_basic + userinfo_signed_response_alg: userinfo_signed_response_alg + authorization_code_grant_id_token_lifespan: authorization_code_grant_id_token_lifespan + updated_at: 2000-01-23T04:56:07.000+00:00 + scope: scope1 scope-2 scope.3 scope:4 + device_authorization_grant_refresh_token_lifespan: device_authorization_grant_refresh_token_lifespan + client_name: client_name + policy_uri: policy_uri + owner: owner + skip_consent: true + authorization_code_grant_access_token_lifespan: authorization_code_grant_access_token_lifespan + grant_types: + - grant_types + - grant_types + subject_type: subject_type + skip_logout_consent: true implicit_grant_id_token_lifespan: implicit_grant_id_token_lifespan client_secret_expires_at: 0 implicit_grant_access_token_lifespan: implicit_grant_access_token_lifespan jwks_uri: jwks_uri - request_object_signing_alg: request_object_signing_alg - tos_uri: tos_uri contacts: - contacts - contacts - response_types: - - response_types - - response_types login_session_id: login_session_id requested_scope: - requested_scope - requested_scope - expires_at: - access_token: 2000-01-23T04:56:07.000+00:00 - refresh_token: 2000-01-23T04:56:07.000+00:00 - par_context: 2000-01-23T04:56:07.000+00:00 - id_token: 2000-01-23T04:56:07.000+00:00 - authorize_code: 2000-01-23T04:56:07.000+00:00 session: access_token: "" id_token: "" + context: "" grant_access_token_audience: - grant_access_token_audience - grant_access_token_audience @@ -3106,9 +3661,14 @@ components: remember_for: 0 properties: consent_request: - $ref: '#/components/schemas/oAuth2ConsentRequest' - expires_at: - $ref: '#/components/schemas/oAuth2ConsentSession_expires_at' + $ref: "#/components/schemas/oAuth2ConsentRequest" + consent_request_id: + description: ConsentRequestID is the identifier of the consent request that + initiated this consent session. + type: string + context: + title: "JSONRawMessage represents a json.RawMessage that works well with\ + \ JSON, SQL, and Swagger." grant_access_token_audience: items: type: string @@ -3126,24 +3686,28 @@ components: title: NullTime implements sql.NullTime functionality. type: string remember: - description: "Remember Consent\n\nRemember, if set to true, tells ORY Hydra\ - \ to remember this consent authorization and reuse it if the same\nclient\ - \ asks the same user for the same, or a subset of, scope." + description: |- + Remember Consent + + Remember, if set to true, tells ORY Hydra to remember this consent authorization and reuse it if the same + client asks the same user for the same, or a subset of, scope. type: boolean remember_for: - description: "Remember Consent For\n\nRememberFor sets how long the consent\ - \ authorization should be remembered for in seconds. If set to `0`, the\n\ - authorization will be remembered indefinitely." + description: |- + Remember Consent For + + RememberFor sets how long the consent authorization should be remembered for in seconds. If set to `0`, the + authorization will be remembered indefinitely. format: int64 type: integer session: - $ref: '#/components/schemas/acceptOAuth2ConsentRequestSession' + $ref: "#/components/schemas/acceptOAuth2ConsentRequestSession" title: OAuth 2.0 Consent Session type: object oAuth2ConsentSessions: description: List of OAuth 2.0 Consent Sessions items: - $ref: '#/components/schemas/oAuth2ConsentSession' + $ref: "#/components/schemas/oAuth2ConsentSession" type: array oAuth2LoginRequest: example: @@ -3165,46 +3729,28 @@ components: challenge: challenge client: metadata: "" - token_endpoint_auth_signing_alg: token_endpoint_auth_signing_alg - client_uri: client_uri - jwt_bearer_grant_access_token_lifespan: jwt_bearer_grant_access_token_lifespan - jwks: "" logo_uri: logo_uri - created_at: 2000-01-23T04:56:07.000+00:00 - registration_client_uri: registration_client_uri allowed_cors_origins: - allowed_cors_origins - allowed_cors_origins refresh_token_grant_access_token_lifespan: refresh_token_grant_access_token_lifespan - registration_access_token: registration_access_token client_id: client_id - token_endpoint_auth_method: token_endpoint_auth_method - userinfo_signed_response_alg: userinfo_signed_response_alg - authorization_code_grant_id_token_lifespan: authorization_code_grant_id_token_lifespan authorization_code_grant_refresh_token_lifespan: authorization_code_grant_refresh_token_lifespan client_credentials_grant_access_token_lifespan: client_credentials_grant_access_token_lifespan - updated_at: 2000-01-23T04:56:07.000+00:00 - scope: scope1 scope-2 scope.3 scope:4 request_uris: - request_uris - request_uris client_secret: client_secret backchannel_logout_session_required: true backchannel_logout_uri: backchannel_logout_uri - client_name: client_name - policy_uri: policy_uri - owner: owner audience: - audience - audience - authorization_code_grant_access_token_lifespan: authorization_code_grant_access_token_lifespan post_logout_redirect_uris: - post_logout_redirect_uris - post_logout_redirect_uris - grant_types: - - grant_types - - grant_types - subject_type: subject_type + device_authorization_grant_id_token_lifespan: device_authorization_grant_id_token_lifespan + device_authorization_grant_access_token_lifespan: device_authorization_grant_access_token_lifespan refresh_token_grant_refresh_token_lifespan: refresh_token_grant_refresh_token_lifespan redirect_uris: - redirect_uris @@ -3213,18 +3759,81 @@ components: frontchannel_logout_session_required: true frontchannel_logout_uri: frontchannel_logout_uri refresh_token_grant_id_token_lifespan: refresh_token_grant_id_token_lifespan + access_token_strategy: access_token_strategy + request_object_signing_alg: request_object_signing_alg + tos_uri: tos_uri + response_types: + - response_types + - response_types + token_endpoint_auth_signing_alg: token_endpoint_auth_signing_alg + client_uri: client_uri + jwt_bearer_grant_access_token_lifespan: jwt_bearer_grant_access_token_lifespan + jwks: + keys: + - d: T_N8I-6He3M8a7X1vWt6TGIx4xB_GP3Mb4SsZSA4v-orvJzzRiQhLlRR81naWYxfQAYt5isDI6_C2L9bdWo4FFPjGQFvNoRX-_sBJyBI_rl-TBgsZYoUlAj3J92WmY2inbA-PwyJfsaIIDceYBC-eX-xiCu6qMqkZi3MwQAFL6bMdPEM0z4JBcwFT3VdiWAIRUuACWQwrXMq672x7fMuaIaHi7XDGgt1ith23CLfaREmJku9PQcchbt_uEY-hqrFY6ntTtS4paWWQj86xLL94S-Tf6v6xkL918PfLSOTq6XCzxvlFwzBJqApnAhbwqLjpPhgUG04EDRrqrSBc5Y1BLevn6Ip5h1AhessBp3wLkQgz_roeckt-ybvzKTjESMuagnpqLvOT7Y9veIug2MwPJZI2VjczRc1vzMs25XrFQ8DpUy-bNdp89TmvAXwctUMiJdgHloJw23Cv03gIUAkDnsTqZmkpbIf-crpgNKFmQP_EDKoe8p_PXZZgfbRri3NoEVGP7Mk6yEu8LjJhClhZaBNjuWw2-KlBfOA3g79mhfBnkInee5KO9mGR50qPk1V-MorUYNTFMZIm0kFE6eYVWFBwJHLKYhHU34DoiK1VP-svZpC2uAMFNA_UJEwM9CQ2b8qe4-5e9aywMvwcuArRkAB5mBIfOaOJao3mfukKAE + e: AQAB + crv: P-256 + use: sig + kid: 1603dfe0af8f4596 + x5c: + - x5c + - x5c + k: GawgguFyGrWKav7AX4VKUg + dp: G4sPXkc6Ya9y8oJW9_ILj4xuppu0lzi_H7VTkS8xj5SdX3coE0oimYwxIi2emTAue0UOa5dpgFGyBJ4c8tQ2VF402XRugKDTP8akYhFo5tAA77Qe_NmtuYZc3C3m3I24G2GvR5sSDxUyAN2zq8Lfn9EUms6rY3Ob8YeiKkTiBj0 + dq: s9lAH9fggBsoFR8Oac2R_E2gw282rT2kGOAhvIllETE1efrA6huUUvMfBcMpn8lqeW6vzznYY5SSQF7pMdC_agI3nG8Ibp1BUb0JUiraRNqUfLhcQb_d9GF4Dh7e74WbRsobRonujTYN1xCaP6TO61jvWrX-L18txXw494Q_cgk + "n": vTqrxUyQPl_20aqf5kXHwDZrel-KovIp8s7ewJod2EXHl8tWlRB3_Rem34KwBfqlKQGp1nqah-51H4Jzruqe0cFP58hPEIt6WqrvnmJCXxnNuIB53iX_uUUXXHDHBeaPCSRoNJzNysjoJ30TIUsKBiirhBa7f235PXbKiHducLevV6PcKxJ5cY8zO286qJLBWSPm-OIevwqsIsSIH44Qtm9sioFikhkbLwoqwWORGAY0nl6XvVOlhADdLjBSqSAeT1FPuCDCnXwzCDR8N9IFB_IjdStFkC-rVt2K5BYfPd0c3yFp_vHR15eRd0zJ8XQ7woBC8Vnsac6Et1pKS59pX6256DPWu8UDdEOolKAPgcd_g2NpA76cAaF_jcT80j9KrEzw8Tv0nJBGesuCjPNjGs_KzdkWTUXt23Hn9QJsdc1MZuaW0iqXBepHYfYoqNelzVte117t4BwVp0kUM6we0IqyXClaZgOI8S-WDBw2_Ovdm8e5NmhYAblEVoygcX8Y46oH6bKiaCQfKCFDMcRgChme7AoE1yZZYsPbaG_3IjPrC4LBMHQw8rM9dWjJ8ImjicvZ1pAm0dx-KHCP3y5PVKrxBDf1zSOsBRkOSjB8TPODnJMz6-jd5hTtZxpZPwPoIdCanTZ3ZD6uRBpTmDwtpRGm63UQs1m5FWPwb0T2IF0 + p: 6NbkXwDWUhi-eR55Cgbf27FkQDDWIamOaDr0rj1q0f1fFEz1W5A_09YvG09Fiv1AO2-D8Rl8gS1Vkz2i0zCSqnyy8A025XOcRviOMK7nIxE4OH_PEsko8dtIrb3TmE2hUXvCkmzw9EsTF1LQBOGC6iusLTXepIC1x9ukCKFZQvdgtEObQ5kzd9Nhq-cdqmSeMVLoxPLd1blviVT9Vm8-y12CtYpeJHOaIDtVPLlBhJiBoPKWg3vxSm4XxIliNOefqegIlsmTIa3MpS6WWlCK3yHhat0Q-rRxDxdyiVdG_wzJvp0Iw_2wms7pe-PgNPYvUWH9JphWP5K38YqEBiJFXQ + kty: RSA + q: 0A1FmpOWR91_RAWpqreWSavNaZb9nXeKiBo0DQGBz32DbqKqQ8S4aBJmbRhJcctjCLjain-ivut477tAUMmzJwVJDDq2MZFwC9Q-4VYZmFU4HJityQuSzHYe64RjN-E_NQ02TWhG3QGW6roq6c57c99rrUsETwJJiwS8M5p15Miuz53DaOjv-uqqFAFfywN5WkxHbraBcjHtMiQuyQbQqkCFh-oanHkwYNeytsNhTu2mQmwR5DR2roZ2nPiFjC6nsdk-A7E3S3wMzYYFw7jvbWWoYWo9vB40_MY2Y0FYQSqcDzcBIcq_0tnnasf3VW4Fdx6m80RzOb2Fsnln7vKXAQ + qi: GyM_p6JrXySiz1toFgKbWV-JdI3jQ4ypu9rbMWx3rQJBfmt0FoYzgUIZEVFEcOqwemRN81zoDAaa-Bk0KWNGDjJHZDdDmFhW3AN7lI-puxk_mHZGJ11rxyR8O55XLSe3SPmRfKwZI6yU24ZxvQKFYItdldUKGzO6Ia6zTKhAVRU + x: f83OJ3D2xF1Bg8vub9tLe1gHMzV76e8Tus9uPHvRVEU + "y": x_FEzRu9m36HLN_tue659LNpXW6pCyStikYjKIWI5a0 + alg: RS256 + - d: T_N8I-6He3M8a7X1vWt6TGIx4xB_GP3Mb4SsZSA4v-orvJzzRiQhLlRR81naWYxfQAYt5isDI6_C2L9bdWo4FFPjGQFvNoRX-_sBJyBI_rl-TBgsZYoUlAj3J92WmY2inbA-PwyJfsaIIDceYBC-eX-xiCu6qMqkZi3MwQAFL6bMdPEM0z4JBcwFT3VdiWAIRUuACWQwrXMq672x7fMuaIaHi7XDGgt1ith23CLfaREmJku9PQcchbt_uEY-hqrFY6ntTtS4paWWQj86xLL94S-Tf6v6xkL918PfLSOTq6XCzxvlFwzBJqApnAhbwqLjpPhgUG04EDRrqrSBc5Y1BLevn6Ip5h1AhessBp3wLkQgz_roeckt-ybvzKTjESMuagnpqLvOT7Y9veIug2MwPJZI2VjczRc1vzMs25XrFQ8DpUy-bNdp89TmvAXwctUMiJdgHloJw23Cv03gIUAkDnsTqZmkpbIf-crpgNKFmQP_EDKoe8p_PXZZgfbRri3NoEVGP7Mk6yEu8LjJhClhZaBNjuWw2-KlBfOA3g79mhfBnkInee5KO9mGR50qPk1V-MorUYNTFMZIm0kFE6eYVWFBwJHLKYhHU34DoiK1VP-svZpC2uAMFNA_UJEwM9CQ2b8qe4-5e9aywMvwcuArRkAB5mBIfOaOJao3mfukKAE + e: AQAB + crv: P-256 + use: sig + kid: 1603dfe0af8f4596 + x5c: + - x5c + - x5c + k: GawgguFyGrWKav7AX4VKUg + dp: G4sPXkc6Ya9y8oJW9_ILj4xuppu0lzi_H7VTkS8xj5SdX3coE0oimYwxIi2emTAue0UOa5dpgFGyBJ4c8tQ2VF402XRugKDTP8akYhFo5tAA77Qe_NmtuYZc3C3m3I24G2GvR5sSDxUyAN2zq8Lfn9EUms6rY3Ob8YeiKkTiBj0 + dq: s9lAH9fggBsoFR8Oac2R_E2gw282rT2kGOAhvIllETE1efrA6huUUvMfBcMpn8lqeW6vzznYY5SSQF7pMdC_agI3nG8Ibp1BUb0JUiraRNqUfLhcQb_d9GF4Dh7e74WbRsobRonujTYN1xCaP6TO61jvWrX-L18txXw494Q_cgk + "n": vTqrxUyQPl_20aqf5kXHwDZrel-KovIp8s7ewJod2EXHl8tWlRB3_Rem34KwBfqlKQGp1nqah-51H4Jzruqe0cFP58hPEIt6WqrvnmJCXxnNuIB53iX_uUUXXHDHBeaPCSRoNJzNysjoJ30TIUsKBiirhBa7f235PXbKiHducLevV6PcKxJ5cY8zO286qJLBWSPm-OIevwqsIsSIH44Qtm9sioFikhkbLwoqwWORGAY0nl6XvVOlhADdLjBSqSAeT1FPuCDCnXwzCDR8N9IFB_IjdStFkC-rVt2K5BYfPd0c3yFp_vHR15eRd0zJ8XQ7woBC8Vnsac6Et1pKS59pX6256DPWu8UDdEOolKAPgcd_g2NpA76cAaF_jcT80j9KrEzw8Tv0nJBGesuCjPNjGs_KzdkWTUXt23Hn9QJsdc1MZuaW0iqXBepHYfYoqNelzVte117t4BwVp0kUM6we0IqyXClaZgOI8S-WDBw2_Ovdm8e5NmhYAblEVoygcX8Y46oH6bKiaCQfKCFDMcRgChme7AoE1yZZYsPbaG_3IjPrC4LBMHQw8rM9dWjJ8ImjicvZ1pAm0dx-KHCP3y5PVKrxBDf1zSOsBRkOSjB8TPODnJMz6-jd5hTtZxpZPwPoIdCanTZ3ZD6uRBpTmDwtpRGm63UQs1m5FWPwb0T2IF0 + p: 6NbkXwDWUhi-eR55Cgbf27FkQDDWIamOaDr0rj1q0f1fFEz1W5A_09YvG09Fiv1AO2-D8Rl8gS1Vkz2i0zCSqnyy8A025XOcRviOMK7nIxE4OH_PEsko8dtIrb3TmE2hUXvCkmzw9EsTF1LQBOGC6iusLTXepIC1x9ukCKFZQvdgtEObQ5kzd9Nhq-cdqmSeMVLoxPLd1blviVT9Vm8-y12CtYpeJHOaIDtVPLlBhJiBoPKWg3vxSm4XxIliNOefqegIlsmTIa3MpS6WWlCK3yHhat0Q-rRxDxdyiVdG_wzJvp0Iw_2wms7pe-PgNPYvUWH9JphWP5K38YqEBiJFXQ + kty: RSA + q: 0A1FmpOWR91_RAWpqreWSavNaZb9nXeKiBo0DQGBz32DbqKqQ8S4aBJmbRhJcctjCLjain-ivut477tAUMmzJwVJDDq2MZFwC9Q-4VYZmFU4HJityQuSzHYe64RjN-E_NQ02TWhG3QGW6roq6c57c99rrUsETwJJiwS8M5p15Miuz53DaOjv-uqqFAFfywN5WkxHbraBcjHtMiQuyQbQqkCFh-oanHkwYNeytsNhTu2mQmwR5DR2roZ2nPiFjC6nsdk-A7E3S3wMzYYFw7jvbWWoYWo9vB40_MY2Y0FYQSqcDzcBIcq_0tnnasf3VW4Fdx6m80RzOb2Fsnln7vKXAQ + qi: GyM_p6JrXySiz1toFgKbWV-JdI3jQ4ypu9rbMWx3rQJBfmt0FoYzgUIZEVFEcOqwemRN81zoDAaa-Bk0KWNGDjJHZDdDmFhW3AN7lI-puxk_mHZGJ11rxyR8O55XLSe3SPmRfKwZI6yU24ZxvQKFYItdldUKGzO6Ia6zTKhAVRU + x: f83OJ3D2xF1Bg8vub9tLe1gHMzV76e8Tus9uPHvRVEU + "y": x_FEzRu9m36HLN_tue659LNpXW6pCyStikYjKIWI5a0 + alg: RS256 + created_at: 2000-01-23T04:56:07.000+00:00 + registration_client_uri: registration_client_uri + registration_access_token: registration_access_token + token_endpoint_auth_method: client_secret_basic + userinfo_signed_response_alg: userinfo_signed_response_alg + authorization_code_grant_id_token_lifespan: authorization_code_grant_id_token_lifespan + updated_at: 2000-01-23T04:56:07.000+00:00 + scope: scope1 scope-2 scope.3 scope:4 + device_authorization_grant_refresh_token_lifespan: device_authorization_grant_refresh_token_lifespan + client_name: client_name + policy_uri: policy_uri + owner: owner + skip_consent: true + authorization_code_grant_access_token_lifespan: authorization_code_grant_access_token_lifespan + grant_types: + - grant_types + - grant_types + subject_type: subject_type + skip_logout_consent: true implicit_grant_id_token_lifespan: implicit_grant_id_token_lifespan client_secret_expires_at: 0 implicit_grant_access_token_lifespan: implicit_grant_access_token_lifespan jwks_uri: jwks_uri - request_object_signing_alg: request_object_signing_alg - tos_uri: tos_uri contacts: - contacts - contacts - response_types: - - response_types - - response_types session_id: session_id skip: true request_url: request_url @@ -3233,20 +3842,17 @@ components: - requested_scope properties: challenge: - description: |- - ID is the identifier ("login challenge") of the login request. It is used to - identify the session. + description: ID is the identifier of the login request. type: string client: - $ref: '#/components/schemas/oAuth2Client' + $ref: "#/components/schemas/oAuth2Client" oidc_context: - $ref: '#/components/schemas/oAuth2ConsentRequestOpenIDConnectContext' + $ref: "#/components/schemas/oAuth2ConsentRequestOpenIDConnectContext" request_url: - description: "RequestURL is the original OAuth 2.0 Authorization URL requested\ - \ by the OAuth 2.0 client. It is the URL which\ninitiates the OAuth 2.0\ - \ Authorization Code or OAuth 2.0 Implicit flow. This URL is typically\ - \ not needed, but\nmight come in handy if you want to deal with additional\ - \ request parameters." + description: |- + RequestURL is the original OAuth 2.0 Authorization URL requested by the OAuth 2.0 client. It is the URL which + initiates the OAuth 2.0 Authorization Code or OAuth 2.0 Implicit flow. This URL is typically not needed, but + might come in handy if you want to deal with additional request parameters. type: string requested_access_token_audience: items: @@ -3268,75 +3874,55 @@ components: channel logout. It's value can generally be used to associate consecutive login requests by a certain user. type: string skip: - description: "Skip, if true, implies that the client has requested the same\ - \ scopes from the same user previously.\nIf true, you can skip asking\ - \ the user to grant the requested scopes, and simply forward the user\ - \ to the redirect URL.\n\nThis feature allows you to update / set session\ - \ information." + description: |- + Skip, if true, implies that the client has requested the same scopes from the same user previously. + If true, you can skip asking the user to grant the requested scopes, and simply forward the user to the redirect URL. + + This feature allows you to update / set session information. type: boolean subject: - description: "Subject is the user ID of the end-user that authenticated.\ - \ Now, that end user needs to grant or deny the scope\nrequested by the\ - \ OAuth 2.0 client. If this value is set and `skip` is true, you MUST\ - \ include this subject type\nwhen accepting the login request, or the\ - \ request will fail." + description: |- + Subject is the user ID of the end-user that authenticated. Now, that end user needs to grant or deny the scope + requested by the OAuth 2.0 client. If this value is set and `skip` is true, you MUST include this subject type + when accepting the login request, or the request will fail. type: string required: - challenge - client - request_url - - requested_access_token_audience - - requested_scope - skip - subject title: Contains information on an ongoing login request. type: object oAuth2LogoutRequest: example: + expires_at: 2000-01-23T04:56:07.000+00:00 subject: subject challenge: challenge client: metadata: "" - token_endpoint_auth_signing_alg: token_endpoint_auth_signing_alg - client_uri: client_uri - jwt_bearer_grant_access_token_lifespan: jwt_bearer_grant_access_token_lifespan - jwks: "" logo_uri: logo_uri - created_at: 2000-01-23T04:56:07.000+00:00 - registration_client_uri: registration_client_uri allowed_cors_origins: - allowed_cors_origins - allowed_cors_origins refresh_token_grant_access_token_lifespan: refresh_token_grant_access_token_lifespan - registration_access_token: registration_access_token client_id: client_id - token_endpoint_auth_method: token_endpoint_auth_method - userinfo_signed_response_alg: userinfo_signed_response_alg - authorization_code_grant_id_token_lifespan: authorization_code_grant_id_token_lifespan authorization_code_grant_refresh_token_lifespan: authorization_code_grant_refresh_token_lifespan client_credentials_grant_access_token_lifespan: client_credentials_grant_access_token_lifespan - updated_at: 2000-01-23T04:56:07.000+00:00 - scope: scope1 scope-2 scope.3 scope:4 request_uris: - request_uris - request_uris client_secret: client_secret backchannel_logout_session_required: true backchannel_logout_uri: backchannel_logout_uri - client_name: client_name - policy_uri: policy_uri - owner: owner audience: - audience - audience - authorization_code_grant_access_token_lifespan: authorization_code_grant_access_token_lifespan post_logout_redirect_uris: - post_logout_redirect_uris - post_logout_redirect_uris - grant_types: - - grant_types - - grant_types - subject_type: subject_type + device_authorization_grant_id_token_lifespan: device_authorization_grant_id_token_lifespan + device_authorization_grant_access_token_lifespan: device_authorization_grant_access_token_lifespan refresh_token_grant_refresh_token_lifespan: refresh_token_grant_refresh_token_lifespan redirect_uris: - redirect_uris @@ -3345,32 +3931,102 @@ components: frontchannel_logout_session_required: true frontchannel_logout_uri: frontchannel_logout_uri refresh_token_grant_id_token_lifespan: refresh_token_grant_id_token_lifespan + access_token_strategy: access_token_strategy + request_object_signing_alg: request_object_signing_alg + tos_uri: tos_uri + response_types: + - response_types + - response_types + token_endpoint_auth_signing_alg: token_endpoint_auth_signing_alg + client_uri: client_uri + jwt_bearer_grant_access_token_lifespan: jwt_bearer_grant_access_token_lifespan + jwks: + keys: + - d: T_N8I-6He3M8a7X1vWt6TGIx4xB_GP3Mb4SsZSA4v-orvJzzRiQhLlRR81naWYxfQAYt5isDI6_C2L9bdWo4FFPjGQFvNoRX-_sBJyBI_rl-TBgsZYoUlAj3J92WmY2inbA-PwyJfsaIIDceYBC-eX-xiCu6qMqkZi3MwQAFL6bMdPEM0z4JBcwFT3VdiWAIRUuACWQwrXMq672x7fMuaIaHi7XDGgt1ith23CLfaREmJku9PQcchbt_uEY-hqrFY6ntTtS4paWWQj86xLL94S-Tf6v6xkL918PfLSOTq6XCzxvlFwzBJqApnAhbwqLjpPhgUG04EDRrqrSBc5Y1BLevn6Ip5h1AhessBp3wLkQgz_roeckt-ybvzKTjESMuagnpqLvOT7Y9veIug2MwPJZI2VjczRc1vzMs25XrFQ8DpUy-bNdp89TmvAXwctUMiJdgHloJw23Cv03gIUAkDnsTqZmkpbIf-crpgNKFmQP_EDKoe8p_PXZZgfbRri3NoEVGP7Mk6yEu8LjJhClhZaBNjuWw2-KlBfOA3g79mhfBnkInee5KO9mGR50qPk1V-MorUYNTFMZIm0kFE6eYVWFBwJHLKYhHU34DoiK1VP-svZpC2uAMFNA_UJEwM9CQ2b8qe4-5e9aywMvwcuArRkAB5mBIfOaOJao3mfukKAE + e: AQAB + crv: P-256 + use: sig + kid: 1603dfe0af8f4596 + x5c: + - x5c + - x5c + k: GawgguFyGrWKav7AX4VKUg + dp: G4sPXkc6Ya9y8oJW9_ILj4xuppu0lzi_H7VTkS8xj5SdX3coE0oimYwxIi2emTAue0UOa5dpgFGyBJ4c8tQ2VF402XRugKDTP8akYhFo5tAA77Qe_NmtuYZc3C3m3I24G2GvR5sSDxUyAN2zq8Lfn9EUms6rY3Ob8YeiKkTiBj0 + dq: s9lAH9fggBsoFR8Oac2R_E2gw282rT2kGOAhvIllETE1efrA6huUUvMfBcMpn8lqeW6vzznYY5SSQF7pMdC_agI3nG8Ibp1BUb0JUiraRNqUfLhcQb_d9GF4Dh7e74WbRsobRonujTYN1xCaP6TO61jvWrX-L18txXw494Q_cgk + "n": vTqrxUyQPl_20aqf5kXHwDZrel-KovIp8s7ewJod2EXHl8tWlRB3_Rem34KwBfqlKQGp1nqah-51H4Jzruqe0cFP58hPEIt6WqrvnmJCXxnNuIB53iX_uUUXXHDHBeaPCSRoNJzNysjoJ30TIUsKBiirhBa7f235PXbKiHducLevV6PcKxJ5cY8zO286qJLBWSPm-OIevwqsIsSIH44Qtm9sioFikhkbLwoqwWORGAY0nl6XvVOlhADdLjBSqSAeT1FPuCDCnXwzCDR8N9IFB_IjdStFkC-rVt2K5BYfPd0c3yFp_vHR15eRd0zJ8XQ7woBC8Vnsac6Et1pKS59pX6256DPWu8UDdEOolKAPgcd_g2NpA76cAaF_jcT80j9KrEzw8Tv0nJBGesuCjPNjGs_KzdkWTUXt23Hn9QJsdc1MZuaW0iqXBepHYfYoqNelzVte117t4BwVp0kUM6we0IqyXClaZgOI8S-WDBw2_Ovdm8e5NmhYAblEVoygcX8Y46oH6bKiaCQfKCFDMcRgChme7AoE1yZZYsPbaG_3IjPrC4LBMHQw8rM9dWjJ8ImjicvZ1pAm0dx-KHCP3y5PVKrxBDf1zSOsBRkOSjB8TPODnJMz6-jd5hTtZxpZPwPoIdCanTZ3ZD6uRBpTmDwtpRGm63UQs1m5FWPwb0T2IF0 + p: 6NbkXwDWUhi-eR55Cgbf27FkQDDWIamOaDr0rj1q0f1fFEz1W5A_09YvG09Fiv1AO2-D8Rl8gS1Vkz2i0zCSqnyy8A025XOcRviOMK7nIxE4OH_PEsko8dtIrb3TmE2hUXvCkmzw9EsTF1LQBOGC6iusLTXepIC1x9ukCKFZQvdgtEObQ5kzd9Nhq-cdqmSeMVLoxPLd1blviVT9Vm8-y12CtYpeJHOaIDtVPLlBhJiBoPKWg3vxSm4XxIliNOefqegIlsmTIa3MpS6WWlCK3yHhat0Q-rRxDxdyiVdG_wzJvp0Iw_2wms7pe-PgNPYvUWH9JphWP5K38YqEBiJFXQ + kty: RSA + q: 0A1FmpOWR91_RAWpqreWSavNaZb9nXeKiBo0DQGBz32DbqKqQ8S4aBJmbRhJcctjCLjain-ivut477tAUMmzJwVJDDq2MZFwC9Q-4VYZmFU4HJityQuSzHYe64RjN-E_NQ02TWhG3QGW6roq6c57c99rrUsETwJJiwS8M5p15Miuz53DaOjv-uqqFAFfywN5WkxHbraBcjHtMiQuyQbQqkCFh-oanHkwYNeytsNhTu2mQmwR5DR2roZ2nPiFjC6nsdk-A7E3S3wMzYYFw7jvbWWoYWo9vB40_MY2Y0FYQSqcDzcBIcq_0tnnasf3VW4Fdx6m80RzOb2Fsnln7vKXAQ + qi: GyM_p6JrXySiz1toFgKbWV-JdI3jQ4ypu9rbMWx3rQJBfmt0FoYzgUIZEVFEcOqwemRN81zoDAaa-Bk0KWNGDjJHZDdDmFhW3AN7lI-puxk_mHZGJ11rxyR8O55XLSe3SPmRfKwZI6yU24ZxvQKFYItdldUKGzO6Ia6zTKhAVRU + x: f83OJ3D2xF1Bg8vub9tLe1gHMzV76e8Tus9uPHvRVEU + "y": x_FEzRu9m36HLN_tue659LNpXW6pCyStikYjKIWI5a0 + alg: RS256 + - d: T_N8I-6He3M8a7X1vWt6TGIx4xB_GP3Mb4SsZSA4v-orvJzzRiQhLlRR81naWYxfQAYt5isDI6_C2L9bdWo4FFPjGQFvNoRX-_sBJyBI_rl-TBgsZYoUlAj3J92WmY2inbA-PwyJfsaIIDceYBC-eX-xiCu6qMqkZi3MwQAFL6bMdPEM0z4JBcwFT3VdiWAIRUuACWQwrXMq672x7fMuaIaHi7XDGgt1ith23CLfaREmJku9PQcchbt_uEY-hqrFY6ntTtS4paWWQj86xLL94S-Tf6v6xkL918PfLSOTq6XCzxvlFwzBJqApnAhbwqLjpPhgUG04EDRrqrSBc5Y1BLevn6Ip5h1AhessBp3wLkQgz_roeckt-ybvzKTjESMuagnpqLvOT7Y9veIug2MwPJZI2VjczRc1vzMs25XrFQ8DpUy-bNdp89TmvAXwctUMiJdgHloJw23Cv03gIUAkDnsTqZmkpbIf-crpgNKFmQP_EDKoe8p_PXZZgfbRri3NoEVGP7Mk6yEu8LjJhClhZaBNjuWw2-KlBfOA3g79mhfBnkInee5KO9mGR50qPk1V-MorUYNTFMZIm0kFE6eYVWFBwJHLKYhHU34DoiK1VP-svZpC2uAMFNA_UJEwM9CQ2b8qe4-5e9aywMvwcuArRkAB5mBIfOaOJao3mfukKAE + e: AQAB + crv: P-256 + use: sig + kid: 1603dfe0af8f4596 + x5c: + - x5c + - x5c + k: GawgguFyGrWKav7AX4VKUg + dp: G4sPXkc6Ya9y8oJW9_ILj4xuppu0lzi_H7VTkS8xj5SdX3coE0oimYwxIi2emTAue0UOa5dpgFGyBJ4c8tQ2VF402XRugKDTP8akYhFo5tAA77Qe_NmtuYZc3C3m3I24G2GvR5sSDxUyAN2zq8Lfn9EUms6rY3Ob8YeiKkTiBj0 + dq: s9lAH9fggBsoFR8Oac2R_E2gw282rT2kGOAhvIllETE1efrA6huUUvMfBcMpn8lqeW6vzznYY5SSQF7pMdC_agI3nG8Ibp1BUb0JUiraRNqUfLhcQb_d9GF4Dh7e74WbRsobRonujTYN1xCaP6TO61jvWrX-L18txXw494Q_cgk + "n": vTqrxUyQPl_20aqf5kXHwDZrel-KovIp8s7ewJod2EXHl8tWlRB3_Rem34KwBfqlKQGp1nqah-51H4Jzruqe0cFP58hPEIt6WqrvnmJCXxnNuIB53iX_uUUXXHDHBeaPCSRoNJzNysjoJ30TIUsKBiirhBa7f235PXbKiHducLevV6PcKxJ5cY8zO286qJLBWSPm-OIevwqsIsSIH44Qtm9sioFikhkbLwoqwWORGAY0nl6XvVOlhADdLjBSqSAeT1FPuCDCnXwzCDR8N9IFB_IjdStFkC-rVt2K5BYfPd0c3yFp_vHR15eRd0zJ8XQ7woBC8Vnsac6Et1pKS59pX6256DPWu8UDdEOolKAPgcd_g2NpA76cAaF_jcT80j9KrEzw8Tv0nJBGesuCjPNjGs_KzdkWTUXt23Hn9QJsdc1MZuaW0iqXBepHYfYoqNelzVte117t4BwVp0kUM6we0IqyXClaZgOI8S-WDBw2_Ovdm8e5NmhYAblEVoygcX8Y46oH6bKiaCQfKCFDMcRgChme7AoE1yZZYsPbaG_3IjPrC4LBMHQw8rM9dWjJ8ImjicvZ1pAm0dx-KHCP3y5PVKrxBDf1zSOsBRkOSjB8TPODnJMz6-jd5hTtZxpZPwPoIdCanTZ3ZD6uRBpTmDwtpRGm63UQs1m5FWPwb0T2IF0 + p: 6NbkXwDWUhi-eR55Cgbf27FkQDDWIamOaDr0rj1q0f1fFEz1W5A_09YvG09Fiv1AO2-D8Rl8gS1Vkz2i0zCSqnyy8A025XOcRviOMK7nIxE4OH_PEsko8dtIrb3TmE2hUXvCkmzw9EsTF1LQBOGC6iusLTXepIC1x9ukCKFZQvdgtEObQ5kzd9Nhq-cdqmSeMVLoxPLd1blviVT9Vm8-y12CtYpeJHOaIDtVPLlBhJiBoPKWg3vxSm4XxIliNOefqegIlsmTIa3MpS6WWlCK3yHhat0Q-rRxDxdyiVdG_wzJvp0Iw_2wms7pe-PgNPYvUWH9JphWP5K38YqEBiJFXQ + kty: RSA + q: 0A1FmpOWR91_RAWpqreWSavNaZb9nXeKiBo0DQGBz32DbqKqQ8S4aBJmbRhJcctjCLjain-ivut477tAUMmzJwVJDDq2MZFwC9Q-4VYZmFU4HJityQuSzHYe64RjN-E_NQ02TWhG3QGW6roq6c57c99rrUsETwJJiwS8M5p15Miuz53DaOjv-uqqFAFfywN5WkxHbraBcjHtMiQuyQbQqkCFh-oanHkwYNeytsNhTu2mQmwR5DR2roZ2nPiFjC6nsdk-A7E3S3wMzYYFw7jvbWWoYWo9vB40_MY2Y0FYQSqcDzcBIcq_0tnnasf3VW4Fdx6m80RzOb2Fsnln7vKXAQ + qi: GyM_p6JrXySiz1toFgKbWV-JdI3jQ4ypu9rbMWx3rQJBfmt0FoYzgUIZEVFEcOqwemRN81zoDAaa-Bk0KWNGDjJHZDdDmFhW3AN7lI-puxk_mHZGJ11rxyR8O55XLSe3SPmRfKwZI6yU24ZxvQKFYItdldUKGzO6Ia6zTKhAVRU + x: f83OJ3D2xF1Bg8vub9tLe1gHMzV76e8Tus9uPHvRVEU + "y": x_FEzRu9m36HLN_tue659LNpXW6pCyStikYjKIWI5a0 + alg: RS256 + created_at: 2000-01-23T04:56:07.000+00:00 + registration_client_uri: registration_client_uri + registration_access_token: registration_access_token + token_endpoint_auth_method: client_secret_basic + userinfo_signed_response_alg: userinfo_signed_response_alg + authorization_code_grant_id_token_lifespan: authorization_code_grant_id_token_lifespan + updated_at: 2000-01-23T04:56:07.000+00:00 + scope: scope1 scope-2 scope.3 scope:4 + device_authorization_grant_refresh_token_lifespan: device_authorization_grant_refresh_token_lifespan + client_name: client_name + policy_uri: policy_uri + owner: owner + skip_consent: true + authorization_code_grant_access_token_lifespan: authorization_code_grant_access_token_lifespan + grant_types: + - grant_types + - grant_types + subject_type: subject_type + skip_logout_consent: true implicit_grant_id_token_lifespan: implicit_grant_id_token_lifespan client_secret_expires_at: 0 implicit_grant_access_token_lifespan: implicit_grant_access_token_lifespan jwks_uri: jwks_uri - request_object_signing_alg: request_object_signing_alg - tos_uri: tos_uri contacts: - contacts - contacts - response_types: - - response_types - - response_types rp_initiated: true request_url: request_url + requested_at: 2000-01-23T04:56:07.000+00:00 sid: sid properties: challenge: - description: |- - Challenge is the identifier ("logout challenge") of the logout authentication request. It is used to - identify the session. + description: Challenge is the identifier of the logout authentication request. type: string client: - $ref: '#/components/schemas/oAuth2Client' + $ref: "#/components/schemas/oAuth2Client" + expires_at: + format: date-time + title: NullTime implements sql.NullTime functionality. + type: string request_url: description: RequestURL is the original Logout URL requested. type: string + requested_at: + format: date-time + title: NullTime implements sql.NullTime functionality. + type: string rp_initiated: description: "RPInitiated is set to true if the request was initiated by\ \ a Relying Party (RP), also known as an OAuth 2.0 Client." @@ -3404,7 +4060,7 @@ components: access_token: access_token refresh_token: refresh_token scope: scope - id_token: 6 + id_token: id_token token_type: token_type expires_in: 0 properties: @@ -3412,19 +4068,19 @@ components: description: The access token issued by the authorization server. type: string expires_in: - description: "The lifetime in seconds of the access token. For\nexample,\ - \ the value \"3600\" denotes that the access token will\nexpire in one\ - \ hour from the time the response was generated." + description: |- + The lifetime in seconds of the access token. For + example, the value "3600" denotes that the access token will + expire in one hour from the time the response was generated. format: int64 type: integer id_token: description: To retrieve a refresh token request the id_token scope. - format: int64 - type: integer + type: string refresh_token: - description: "The refresh token, which can be used to obtain new\naccess\ - \ tokens. To retrieve it add the scope \"offline\" to your access token\ - \ request." + description: |- + The refresh token, which can be used to obtain new + access tokens. To retrieve it add the scope "offline" to your access token request. type: string scope: description: The scope of the access token @@ -3449,6 +4105,7 @@ components: - userinfo_signed_response_alg - userinfo_signed_response_alg authorization_endpoint: https://playground.ory.sh/ory-hydra/public/oauth2/auth + device_authorization_endpoint: https://playground.ory.sh/ory-hydra/public/oauth2/device/oauth claims_supported: - claims_supported - claims_supported @@ -3481,8 +4138,30 @@ components: code_challenge_methods_supported: - code_challenge_methods_supported - code_challenge_methods_supported + credentials_endpoint_draft_00: credentials_endpoint_draft_00 frontchannel_logout_session_supported: true jwks_uri: "https://{slug}.projects.oryapis.com/.well-known/jwks.json" + credentials_supported_draft_00: + - types: + - types + - types + cryptographic_suites_supported: + - cryptographic_suites_supported + - cryptographic_suites_supported + cryptographic_binding_methods_supported: + - cryptographic_binding_methods_supported + - cryptographic_binding_methods_supported + format: format + - types: + - types + - types + cryptographic_suites_supported: + - cryptographic_suites_supported + - cryptographic_suites_supported + cryptographic_binding_methods_supported: + - cryptographic_binding_methods_supported + - cryptographic_binding_methods_supported + format: format subject_types_supported: - subject_types_supported - subject_types_supported @@ -3499,36 +4178,60 @@ components: example: https://playground.ory.sh/ory-hydra/public/oauth2/auth type: string backchannel_logout_session_supported: - description: "OpenID Connect Back-Channel Logout Session Required\n\nBoolean\ - \ value specifying whether the OP can pass a sid (session ID) Claim in\ - \ the Logout Token to identify the RP\nsession with the OP. If supported,\ - \ the sid Claim is also included in ID Tokens issued by the OP" + description: |- + OpenID Connect Back-Channel Logout Session Required + + Boolean value specifying whether the OP can pass a sid (session ID) Claim in the Logout Token to identify the RP + session with the OP. If supported, the sid Claim is also included in ID Tokens issued by the OP type: boolean backchannel_logout_supported: - description: "OpenID Connect Back-Channel Logout Supported\n\nBoolean value\ - \ specifying whether the OP supports back-channel logout, with true indicating\ - \ support." + description: |- + OpenID Connect Back-Channel Logout Supported + + Boolean value specifying whether the OP supports back-channel logout, with true indicating support. type: boolean claims_parameter_supported: - description: "OpenID Connect Claims Parameter Parameter Supported\n\nBoolean\ - \ value specifying whether the OP supports use of the claims parameter,\ - \ with true indicating support." + description: |- + OpenID Connect Claims Parameter Parameter Supported + + Boolean value specifying whether the OP supports use of the claims parameter, with true indicating support. type: boolean claims_supported: - description: "OpenID Connect Supported Claims\n\nJSON array containing a\ - \ list of the Claim Names of the Claims that the OpenID Provider MAY be\ - \ able to supply\nvalues for. Note that for privacy or other reasons,\ - \ this might not be an exhaustive list." + description: |- + OpenID Connect Supported Claims + + JSON array containing a list of the Claim Names of the Claims that the OpenID Provider MAY be able to supply + values for. Note that for privacy or other reasons, this might not be an exhaustive list. items: type: string type: array code_challenge_methods_supported: - description: "OAuth 2.0 PKCE Supported Code Challenge Methods\n\nJSON array\ - \ containing a list of Proof Key for Code Exchange (PKCE) [RFC7636] code\ - \ challenge methods supported\nby this authorization server." + description: |- + OAuth 2.0 PKCE Supported Code Challenge Methods + + JSON array containing a list of Proof Key for Code Exchange (PKCE) [RFC7636] code challenge methods supported + by this authorization server. items: type: string type: array + credentials_endpoint_draft_00: + description: |- + OpenID Connect Verifiable Credentials Endpoint + + Contains the URL of the Verifiable Credentials Endpoint. + type: string + credentials_supported_draft_00: + description: |- + OpenID Connect Verifiable Credentials Supported + + JSON array containing a list of the Verifiable Credentials supported by this authorization server. + items: + $ref: "#/components/schemas/credentialSupportedDraft00" + type: array + device_authorization_endpoint: + description: OAuth 2.0 Device Authorization Endpoint URL + example: https://playground.ory.sh/ory-hydra/public/oauth2/device/oauth + type: string end_session_endpoint: description: |- OpenID Connect End-Session Endpoint @@ -3536,16 +4239,18 @@ components: URL at the OP to which an RP can perform a redirect to request that the End-User be logged out at the OP. type: string frontchannel_logout_session_supported: - description: "OpenID Connect Front-Channel Logout Session Required\n\nBoolean\ - \ value specifying whether the OP can pass iss (issuer) and sid (session\ - \ ID) query parameters to identify\nthe RP session with the OP when the\ - \ frontchannel_logout_uri is used. If supported, the sid Claim is also\n\ - included in ID Tokens issued by the OP." + description: |- + OpenID Connect Front-Channel Logout Session Required + + Boolean value specifying whether the OP can pass iss (issuer) and sid (session ID) query parameters to identify + the RP session with the OP when the frontchannel_logout_uri is used. If supported, the sid Claim is also + included in ID Tokens issued by the OP. type: boolean frontchannel_logout_supported: - description: "OpenID Connect Front-Channel Logout Supported\n\nBoolean value\ - \ specifying whether the OP supports HTTP-based logout, with true indicating\ - \ support." + description: |- + OpenID Connect Front-Channel Logout Supported + + Boolean value specifying whether the OP supports HTTP-based logout, with true indicating support. type: boolean grant_types_supported: description: |- @@ -3573,26 +4278,25 @@ components: type: string type: array issuer: - description: "OpenID Connect Issuer URL\n\nAn URL using the https scheme\ - \ with no query or fragment component that the OP asserts as its IssuerURL\ - \ Identifier.\nIf IssuerURL discovery is supported , this value MUST be\ - \ identical to the issuer value returned\nby WebFinger. This also MUST\ - \ be identical to the iss Claim value in ID Tokens issued from this IssuerURL." + description: |- + OpenID Connect Issuer URL + + An URL using the https scheme with no query or fragment component that the OP asserts as its IssuerURL Identifier. + If IssuerURL discovery is supported , this value MUST be identical to the issuer value returned + by WebFinger. This also MUST be identical to the iss Claim value in ID Tokens issued from this IssuerURL. example: https://playground.ory.sh/ory-hydra/public/ type: string jwks_uri: - description: "OpenID Connect Well-Known JSON Web Keys URL\n\nURL of the\ - \ OP's JSON Web Key Set [JWK] document. This contains the signing key(s)\ - \ the RP uses to validate\nsignatures from the OP. The JWK Set MAY also\ - \ contain the Server's encryption key(s), which are used by RPs\nto encrypt\ - \ requests to the Server. When both signing and encryption keys are made\ - \ available, a use (Key Use)\nparameter value is REQUIRED for all keys\ - \ in the referenced JWK Set to indicate each key's intended usage.\nAlthough\ - \ some algorithms allow the same key to be used for both signatures and\ - \ encryption, doing so is\nNOT RECOMMENDED, as it is less secure. The\ - \ JWK x5c parameter MAY be used to provide X.509 representations of\n\ - keys provided. When used, the bare key values MUST still be present and\ - \ MUST match those in the certificate." + description: |- + OpenID Connect Well-Known JSON Web Keys URL + + URL of the OP's JSON Web Key Set [JWK] document. This contains the signing key(s) the RP uses to validate + signatures from the OP. The JWK Set MAY also contain the Server's encryption key(s), which are used by RPs + to encrypt requests to the Server. When both signing and encryption keys are made available, a use (Key Use) + parameter value is REQUIRED for all keys in the referenced JWK Set to indicate each key's intended usage. + Although some algorithms allow the same key to be used for both signatures and encryption, doing so is + NOT RECOMMENDED, as it is less secure. The JWK x5c parameter MAY be used to provide X.509 representations of + keys provided. When used, the bare key values MUST still be present and MUST match those in the certificate. example: "https://{slug}.projects.oryapis.com/.well-known/jwks.json" type: string registration_endpoint: @@ -3600,25 +4304,27 @@ components: example: https://playground.ory.sh/ory-hydra/admin/client type: string request_object_signing_alg_values_supported: - description: "OpenID Connect Supported Request Object Signing Algorithms\n\ - \nJSON array containing a list of the JWS signing algorithms (alg values)\ - \ supported by the OP for Request Objects,\nwhich are described in Section\ - \ 6.1 of OpenID Connect Core 1.0 [OpenID.Core]. These algorithms are used\ - \ both when\nthe Request Object is passed by value (using the request\ - \ parameter) and when it is passed by reference\n(using the request_uri\ - \ parameter)." + description: |- + OpenID Connect Supported Request Object Signing Algorithms + + JSON array containing a list of the JWS signing algorithms (alg values) supported by the OP for Request Objects, + which are described in Section 6.1 of OpenID Connect Core 1.0 [OpenID.Core]. These algorithms are used both when + the Request Object is passed by value (using the request parameter) and when it is passed by reference + (using the request_uri parameter). items: type: string type: array request_parameter_supported: - description: "OpenID Connect Request Parameter Supported\n\nBoolean value\ - \ specifying whether the OP supports use of the request parameter, with\ - \ true indicating support." + description: |- + OpenID Connect Request Parameter Supported + + Boolean value specifying whether the OP supports use of the request parameter, with true indicating support. type: boolean request_uri_parameter_supported: - description: "OpenID Connect Request URI Parameter Supported\n\nBoolean\ - \ value specifying whether the OP supports use of the request_uri parameter,\ - \ with true indicating support." + description: |- + OpenID Connect Request URI Parameter Supported + + Boolean value specifying whether the OP supports use of the request_uri parameter, with true indicating support. type: boolean require_request_uri_registration: description: |- @@ -3636,10 +4342,11 @@ components: type: string type: array response_types_supported: - description: "OAuth 2.0 Supported Response Types\n\nJSON array containing\ - \ a list of the OAuth 2.0 response_type values that this OP supports.\ - \ Dynamic OpenID\nProviders MUST support the code, id_token, and the token\ - \ id_token Response Type values." + description: |- + OAuth 2.0 Supported Response Types + + JSON array containing a list of the OAuth 2.0 response_type values that this OP supports. Dynamic OpenID + Providers MUST support the code, id_token, and the token id_token Response Type values. items: type: string type: array @@ -3650,11 +4357,11 @@ components: URL of the authorization server's OAuth 2.0 revocation endpoint. type: string scopes_supported: - description: "OAuth 2.0 Supported Scope Values\n\nJSON array containing\ - \ a list of the OAuth 2.0 [RFC6749] scope values that this server supports.\ - \ The server MUST\nsupport the openid scope value. Servers MAY choose\ - \ not to advertise some supported scope values even when this parameter\ - \ is used" + description: |- + OAuth 2.0 Supported Scope Values + + JSON array containing a list of the OAuth 2.0 [RFC6749] scope values that this server supports. The server MUST + support the openid scope value. Servers MAY choose not to advertise some supported scope values even when this parameter is used items: type: string type: array @@ -3672,11 +4379,11 @@ components: example: https://playground.ory.sh/ory-hydra/public/oauth2/token type: string token_endpoint_auth_methods_supported: - description: "OAuth 2.0 Supported Client Authentication Methods\n\nJSON\ - \ array containing a list of Client Authentication methods supported by\ - \ this Token Endpoint. The options are\nclient_secret_post, client_secret_basic,\ - \ client_secret_jwt, and private_key_jwt, as described in Section 9 of\ - \ OpenID Connect Core 1.0" + description: |- + OAuth 2.0 Supported Client Authentication Methods + + JSON array containing a list of Client Authentication methods supported by this Token Endpoint. The options are + client_secret_post, client_secret_basic, client_secret_jwt, and private_key_jwt, as described in Section 9 of OpenID Connect Core 1.0 items: type: string type: array @@ -3695,15 +4402,16 @@ components: type: string type: array userinfo_signing_alg_values_supported: - description: "OpenID Connect Supported Userinfo Signing Algorithm\n\nJSON\ - \ array containing a list of the JWS [JWS] signing algorithms (alg values)\ - \ [JWA] supported by the UserInfo Endpoint to encode the Claims in a JWT\ - \ [JWT]." + description: |- + OpenID Connect Supported Userinfo Signing Algorithm + + JSON array containing a list of the JWS [JWS] signing algorithms (alg values) [JWA] supported by the UserInfo Endpoint to encode the Claims in a JWT [JWT]. items: type: string type: array required: - authorization_endpoint + - device_authorization_endpoint - id_token_signed_response_alg - id_token_signing_alg_values_supported - issuer @@ -3852,40 +4560,13 @@ components: \ the End-User's time zone. For example, Europe/Paris or America/Los_Angeles." type: string type: object - pagination: - properties: - page_size: - default: 250 - description: "Items per page\n\nThis is the number of items per page to\ - \ return.\nFor details on pagination please head over to the [pagination\ - \ documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination)." - format: int64 - maximum: 1000 - minimum: 1 - type: integer - page_token: - default: "1" - description: "Next Page Token\n\nThe next page token.\nFor details on pagination\ - \ please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination)." - minimum: 1 - type: string - type: object - paginationHeaders: - properties: - link: - description: "The link header contains pagination links.\n\nFor details\ - \ on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).\n\ - \nin: header" - type: string - x-total-count: - description: "The total number of clients.\n\nin: header" - type: string - type: object rejectOAuth2Request: properties: error: - description: "The error should follow the OAuth2 error format (e.g. `invalid_request`,\ - \ `login_required`).\n\nDefaults to `request_denied`." + description: |- + The error should follow the OAuth2 error format (e.g. `invalid_request`, `login_required`). + + Defaults to `request_denied`. type: string error_debug: description: |- @@ -3911,69 +4592,92 @@ components: properties: page_size: default: 250 - description: "Items per page\n\nThis is the number of items per page to\ - \ return.\nFor details on pagination please head over to the [pagination\ - \ documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination)." + description: |- + Items per page + + This is the number of items per page to return. + For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). format: int64 maximum: 1000 minimum: 1 type: integer page_token: default: "1" - description: "Next Page Token\n\nThe next page token.\nFor details on pagination\ - \ please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination)." + description: |- + Next Page Token + + The next page token. + For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). minimum: 1 type: string type: object tokenPaginationHeaders: properties: link: - description: "The link header contains pagination links.\n\nFor details\ - \ on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).\n\ - \nin: header" + description: |- + The link header contains pagination links. + + For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). + + in: header type: string x-total-count: - description: "The total number of clients.\n\nin: header" + description: |- + The total number of clients. + + in: header type: string type: object tokenPaginationRequestParameters: - description: "The `Link` HTTP header contains multiple links (`first`, `next`,\ - \ `last`, `previous`) formatted as:\n`;\ - \ rel=\"{page}\"`\n\nFor details on pagination please head over to the [pagination\ - \ documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination)." + description: |- + The `Link` HTTP header contains multiple links (`first`, `next`, `last`, `previous`) formatted as: + `; rel="{page}"` + + For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). properties: page_size: default: 250 - description: "Items per Page\n\nThis is the number of items per page to\ - \ return.\nFor details on pagination please head over to the [pagination\ - \ documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination)." + description: |- + Items per Page + + This is the number of items per page to return. + For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). format: int64 maximum: 500 minimum: 1 type: integer page_token: default: "1" - description: "Next Page Token\n\nThe next page token.\nFor details on pagination\ - \ please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination)." + description: |- + Next Page Token + + The next page token. + For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). minimum: 1 type: string title: Pagination Request Parameters type: object tokenPaginationResponseHeaders: - description: "The `Link` HTTP header contains multiple links (`first`, `next`,\ - \ `last`, `previous`) formatted as:\n`;\ - \ rel=\"{page}\"`\n\nFor details on pagination please head over to the [pagination\ - \ documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination)." + description: |- + The `Link` HTTP header contains multiple links (`first`, `next`, `last`, `previous`) formatted as: + `; rel="{page}"` + + For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). properties: link: - description: "The Link HTTP Header\n\nThe `Link` header contains a comma-delimited\ - \ list of links to the following pages:\n\nfirst: The first page of results.\n\ - next: The next page of results.\nprev: The previous page of results.\n\ - last: The last page of results.\n\nPages are omitted if they do not exist.\ - \ For example, if there is no next page, the `next` link is omitted. Examples:\n\ - \n; rel=\"first\",;\ - \ rel=\"next\",; rel=\"prev\",;\ - \ rel=\"last\"" + description: |- + The Link HTTP Header + + The `Link` header contains a comma-delimited list of links to the following pages: + + first: The first page of results. + next: The next page of results. + prev: The previous page of results. + last: The last page of results. + + Pages are omitted if they do not exist. For example, if there is no next page, the `next` link is omitted. Examples: + + ; rel="first",; rel="next",; rel="prev",; rel="last" type: string x-total-count: description: |- @@ -4002,7 +4706,7 @@ components: example: https://jwt-idp.example.com type: string jwk: - $ref: '#/components/schemas/jsonWebKey' + $ref: "#/components/schemas/jsonWebKey" scope: description: "The \"scope\" contains list of scope values (as described\ \ in Section 3.3 of OAuth 2.0 [RFC6749])" @@ -4061,7 +4765,7 @@ components: example: https://jwt-idp.example.com type: string public_key: - $ref: '#/components/schemas/trustedOAuth2JwtGrantJsonWebKey' + $ref: "#/components/schemas/trustedOAuth2JwtGrantJsonWebKey" scope: description: "The \"scope\" contains list of scope values (as described\ \ in Section 3.3 of OAuth 2.0 [RFC6749])" @@ -4080,7 +4784,7 @@ components: trustedOAuth2JwtGrantIssuers: description: OAuth2 JWT Bearer Grant Type Issuer Trust Relationships items: - $ref: '#/components/schemas/trustedOAuth2JwtGrantIssuer' + $ref: "#/components/schemas/trustedOAuth2JwtGrantIssuer" type: array trustedOAuth2JwtGrantJsonWebKey: description: OAuth2 JWT Bearer Grant Type Issuer Trusted JSON Web Key @@ -4099,6 +4803,75 @@ components: example: https://jwt-idp.example.com type: string type: object + unexpectedError: + type: string + verifiableCredentialPrimingResponse: + example: + c_nonce_expires_in: 0 + error_debug: error_debug + status_code: 6 + c_nonce: c_nonce + error_description: error_description + format: format + error: error + error_hint: error_hint + properties: + c_nonce: + type: string + c_nonce_expires_in: + format: int64 + type: integer + error: + type: string + error_debug: + type: string + error_description: + type: string + error_hint: + type: string + format: + type: string + status_code: + format: int64 + type: integer + title: VerifiableCredentialPrimingResponse contains the nonce to include in + the proof-of-possession JWT. + type: object + verifiableCredentialResponse: + example: + credential_draft_00: credential_draft_00 + format: format + properties: + credential_draft_00: + type: string + format: + type: string + title: VerifiableCredentialResponse contains the verifiable credential. + type: object + verifyUserCodeRequest: + properties: + client: + $ref: "#/components/schemas/oAuth2Client" + device_code_request_id: + type: string + request_url: + description: RequestURL is the original Device Authorization URL requested. + type: string + requested_access_token_audience: + items: + type: string + title: "StringSliceJSONFormat represents []string{} which is encoded to/from\ + \ JSON for SQL storage." + type: array + requested_scope: + items: + type: string + title: "StringSliceJSONFormat represents []string{} which is encoded to/from\ + \ JSON for SQL storage." + type: array + title: HandledDeviceUserAuthRequest is the request payload used to accept a + device user_code. + type: object version: properties: version: @@ -4108,16 +4881,17 @@ components: introspectOAuth2Token_request: properties: scope: - description: "An optional, space separated list of required scopes. If the\ - \ access token was not granted one of the\nscopes, the result of active\ - \ will be false." + description: |- + An optional, space separated list of required scopes. If the access token was not granted one of the + scopes, the result of active will be false. type: string x-formData-name: scope token: - description: "The string value of the token. For access tokens, this\nis\ - \ the \"access_token\" value returned from the token endpoint\ndefined\ - \ in OAuth 2.0. For refresh tokens, this is the \"refresh_token\"\nvalue\ - \ returned." + description: |- + The string value of the token. For access tokens, this + is the "access_token" value returned from the token endpoint + defined in OAuth 2.0. For refresh tokens, this is the "refresh_token" + value returned. required: - token type: string @@ -4134,6 +4908,9 @@ components: type: string type: object isReady_503_response: + example: + errors: + key: errors properties: errors: additionalProperties: @@ -4188,30 +4965,6 @@ components: description: The version of Ory Hydra. type: string type: object - oAuth2ConsentSession_expires_at: - example: - access_token: 2000-01-23T04:56:07.000+00:00 - refresh_token: 2000-01-23T04:56:07.000+00:00 - par_context: 2000-01-23T04:56:07.000+00:00 - id_token: 2000-01-23T04:56:07.000+00:00 - authorize_code: 2000-01-23T04:56:07.000+00:00 - properties: - access_token: - format: date-time - type: string - authorize_code: - format: date-time - type: string - id_token: - format: date-time - type: string - par_context: - format: date-time - type: string - refresh_token: - format: date-time - type: string - type: object securitySchemes: basic: scheme: basic diff --git a/internal/httpclient/api_jwk.go b/internal/httpclient/api_jwk.go index eac14c93c1f..bcd97ca968b 100644 --- a/internal/httpclient/api_jwk.go +++ b/internal/httpclient/api_jwk.go @@ -14,18 +14,18 @@ package openapi import ( "bytes" "context" - "io/ioutil" + "io" "net/http" "net/url" "strings" ) -// JwkApiService JwkApi service -type JwkApiService service +// JwkAPIService JwkAPI service +type JwkAPIService service type ApiCreateJsonWebKeySetRequest struct { ctx context.Context - ApiService *JwkApiService + ApiService *JwkAPIService set string createJsonWebKeySet *CreateJsonWebKeySet } @@ -42,7 +42,7 @@ func (r ApiCreateJsonWebKeySetRequest) Execute() (*JsonWebKeySet, *http.Response /* CreateJsonWebKeySet Create JSON Web Key -This endpoint is capable of generating JSON Web Key Sets for you. There a different strategies available, such as symmetric cryptographic keys (HS256, HS512) and asymetric cryptographic keys (RS256, ECDSA). If the specified JSON Web Key Set does not exist, it will be created. +This endpoint is capable of generating JSON Web Key Sets for you. There are different strategies available, such as symmetric cryptographic keys (HS256, HS512) and asymmetric cryptographic keys (RS256, ECDSA). If the specified JSON Web Key Set does not exist, it will be created. A JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that represents a cryptographic key. A JWK Set is a JSON data structure that represents a set of JWKs. A JSON Web Key is identified by its set and key id. ORY Hydra uses this functionality to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), and allows storing user-defined keys as well. @@ -50,7 +50,7 @@ A JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that @param set The JSON Web Key Set ID @return ApiCreateJsonWebKeySetRequest */ -func (a *JwkApiService) CreateJsonWebKeySet(ctx context.Context, set string) ApiCreateJsonWebKeySetRequest { +func (a *JwkAPIService) CreateJsonWebKeySet(ctx context.Context, set string) ApiCreateJsonWebKeySetRequest { return ApiCreateJsonWebKeySetRequest{ ApiService: a, ctx: ctx, @@ -61,7 +61,7 @@ func (a *JwkApiService) CreateJsonWebKeySet(ctx context.Context, set string) Api // Execute executes the request // // @return JsonWebKeySet -func (a *JwkApiService) CreateJsonWebKeySetExecute(r ApiCreateJsonWebKeySetRequest) (*JsonWebKeySet, *http.Response, error) { +func (a *JwkAPIService) CreateJsonWebKeySetExecute(r ApiCreateJsonWebKeySetRequest) (*JsonWebKeySet, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPost localVarPostBody interface{} @@ -69,13 +69,13 @@ func (a *JwkApiService) CreateJsonWebKeySetExecute(r ApiCreateJsonWebKeySetReque localVarReturnValue *JsonWebKeySet ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "JwkApiService.CreateJsonWebKeySet") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "JwkAPIService.CreateJsonWebKeySet") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } localVarPath := localBasePath + "/admin/keys/{set}" - localVarPath = strings.Replace(localVarPath, "{"+"set"+"}", url.PathEscape(parameterToString(r.set, "")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"set"+"}", url.PathEscape(parameterValueToString(r.set, "set")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -113,9 +113,9 @@ func (a *JwkApiService) CreateJsonWebKeySetExecute(r ApiCreateJsonWebKeySetReque return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -131,6 +131,7 @@ func (a *JwkApiService) CreateJsonWebKeySetExecute(r ApiCreateJsonWebKeySetReque newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -149,7 +150,7 @@ func (a *JwkApiService) CreateJsonWebKeySetExecute(r ApiCreateJsonWebKeySetReque type ApiDeleteJsonWebKeyRequest struct { ctx context.Context - ApiService *JwkApiService + ApiService *JwkAPIService set string kid string } @@ -173,7 +174,7 @@ and allows storing user-defined keys as well. @param kid The JSON Web Key ID (kid) @return ApiDeleteJsonWebKeyRequest */ -func (a *JwkApiService) DeleteJsonWebKey(ctx context.Context, set string, kid string) ApiDeleteJsonWebKeyRequest { +func (a *JwkAPIService) DeleteJsonWebKey(ctx context.Context, set string, kid string) ApiDeleteJsonWebKeyRequest { return ApiDeleteJsonWebKeyRequest{ ApiService: a, ctx: ctx, @@ -183,21 +184,21 @@ func (a *JwkApiService) DeleteJsonWebKey(ctx context.Context, set string, kid st } // Execute executes the request -func (a *JwkApiService) DeleteJsonWebKeyExecute(r ApiDeleteJsonWebKeyRequest) (*http.Response, error) { +func (a *JwkAPIService) DeleteJsonWebKeyExecute(r ApiDeleteJsonWebKeyRequest) (*http.Response, error) { var ( localVarHTTPMethod = http.MethodDelete localVarPostBody interface{} formFiles []formFile ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "JwkApiService.DeleteJsonWebKey") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "JwkAPIService.DeleteJsonWebKey") if err != nil { return nil, &GenericOpenAPIError{error: err.Error()} } localVarPath := localBasePath + "/admin/keys/{set}/{kid}" - localVarPath = strings.Replace(localVarPath, "{"+"set"+"}", url.PathEscape(parameterToString(r.set, "")), -1) - localVarPath = strings.Replace(localVarPath, "{"+"kid"+"}", url.PathEscape(parameterToString(r.kid, "")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"set"+"}", url.PathEscape(parameterValueToString(r.set, "set")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"kid"+"}", url.PathEscape(parameterValueToString(r.kid, "kid")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -230,9 +231,9 @@ func (a *JwkApiService) DeleteJsonWebKeyExecute(r ApiDeleteJsonWebKeyRequest) (* return localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarHTTPResponse, err } @@ -248,6 +249,7 @@ func (a *JwkApiService) DeleteJsonWebKeyExecute(r ApiDeleteJsonWebKeyRequest) (* newErr.error = err.Error() return localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarHTTPResponse, newErr } @@ -257,7 +259,7 @@ func (a *JwkApiService) DeleteJsonWebKeyExecute(r ApiDeleteJsonWebKeyRequest) (* type ApiDeleteJsonWebKeySetRequest struct { ctx context.Context - ApiService *JwkApiService + ApiService *JwkAPIService set string } @@ -276,7 +278,7 @@ A JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that @param set The JSON Web Key Set @return ApiDeleteJsonWebKeySetRequest */ -func (a *JwkApiService) DeleteJsonWebKeySet(ctx context.Context, set string) ApiDeleteJsonWebKeySetRequest { +func (a *JwkAPIService) DeleteJsonWebKeySet(ctx context.Context, set string) ApiDeleteJsonWebKeySetRequest { return ApiDeleteJsonWebKeySetRequest{ ApiService: a, ctx: ctx, @@ -285,20 +287,20 @@ func (a *JwkApiService) DeleteJsonWebKeySet(ctx context.Context, set string) Api } // Execute executes the request -func (a *JwkApiService) DeleteJsonWebKeySetExecute(r ApiDeleteJsonWebKeySetRequest) (*http.Response, error) { +func (a *JwkAPIService) DeleteJsonWebKeySetExecute(r ApiDeleteJsonWebKeySetRequest) (*http.Response, error) { var ( localVarHTTPMethod = http.MethodDelete localVarPostBody interface{} formFiles []formFile ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "JwkApiService.DeleteJsonWebKeySet") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "JwkAPIService.DeleteJsonWebKeySet") if err != nil { return nil, &GenericOpenAPIError{error: err.Error()} } localVarPath := localBasePath + "/admin/keys/{set}" - localVarPath = strings.Replace(localVarPath, "{"+"set"+"}", url.PathEscape(parameterToString(r.set, "")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"set"+"}", url.PathEscape(parameterValueToString(r.set, "set")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -331,9 +333,9 @@ func (a *JwkApiService) DeleteJsonWebKeySetExecute(r ApiDeleteJsonWebKeySetReque return localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarHTTPResponse, err } @@ -349,6 +351,7 @@ func (a *JwkApiService) DeleteJsonWebKeySetExecute(r ApiDeleteJsonWebKeySetReque newErr.error = err.Error() return localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarHTTPResponse, newErr } @@ -358,7 +361,7 @@ func (a *JwkApiService) DeleteJsonWebKeySetExecute(r ApiDeleteJsonWebKeySetReque type ApiGetJsonWebKeyRequest struct { ctx context.Context - ApiService *JwkApiService + ApiService *JwkAPIService set string kid string } @@ -377,7 +380,7 @@ This endpoint returns a singular JSON Web Key contained in a set. It is identifi @param kid JSON Web Key ID @return ApiGetJsonWebKeyRequest */ -func (a *JwkApiService) GetJsonWebKey(ctx context.Context, set string, kid string) ApiGetJsonWebKeyRequest { +func (a *JwkAPIService) GetJsonWebKey(ctx context.Context, set string, kid string) ApiGetJsonWebKeyRequest { return ApiGetJsonWebKeyRequest{ ApiService: a, ctx: ctx, @@ -389,7 +392,7 @@ func (a *JwkApiService) GetJsonWebKey(ctx context.Context, set string, kid strin // Execute executes the request // // @return JsonWebKeySet -func (a *JwkApiService) GetJsonWebKeyExecute(r ApiGetJsonWebKeyRequest) (*JsonWebKeySet, *http.Response, error) { +func (a *JwkAPIService) GetJsonWebKeyExecute(r ApiGetJsonWebKeyRequest) (*JsonWebKeySet, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} @@ -397,14 +400,14 @@ func (a *JwkApiService) GetJsonWebKeyExecute(r ApiGetJsonWebKeyRequest) (*JsonWe localVarReturnValue *JsonWebKeySet ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "JwkApiService.GetJsonWebKey") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "JwkAPIService.GetJsonWebKey") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } localVarPath := localBasePath + "/admin/keys/{set}/{kid}" - localVarPath = strings.Replace(localVarPath, "{"+"set"+"}", url.PathEscape(parameterToString(r.set, "")), -1) - localVarPath = strings.Replace(localVarPath, "{"+"kid"+"}", url.PathEscape(parameterToString(r.kid, "")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"set"+"}", url.PathEscape(parameterValueToString(r.set, "set")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"kid"+"}", url.PathEscape(parameterValueToString(r.kid, "kid")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -437,9 +440,9 @@ func (a *JwkApiService) GetJsonWebKeyExecute(r ApiGetJsonWebKeyRequest) (*JsonWe return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -455,6 +458,7 @@ func (a *JwkApiService) GetJsonWebKeyExecute(r ApiGetJsonWebKeyRequest) (*JsonWe newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -473,7 +477,7 @@ func (a *JwkApiService) GetJsonWebKeyExecute(r ApiGetJsonWebKeyRequest) (*JsonWe type ApiGetJsonWebKeySetRequest struct { ctx context.Context - ApiService *JwkApiService + ApiService *JwkAPIService set string } @@ -492,7 +496,7 @@ A JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that @param set JSON Web Key Set ID @return ApiGetJsonWebKeySetRequest */ -func (a *JwkApiService) GetJsonWebKeySet(ctx context.Context, set string) ApiGetJsonWebKeySetRequest { +func (a *JwkAPIService) GetJsonWebKeySet(ctx context.Context, set string) ApiGetJsonWebKeySetRequest { return ApiGetJsonWebKeySetRequest{ ApiService: a, ctx: ctx, @@ -503,7 +507,7 @@ func (a *JwkApiService) GetJsonWebKeySet(ctx context.Context, set string) ApiGet // Execute executes the request // // @return JsonWebKeySet -func (a *JwkApiService) GetJsonWebKeySetExecute(r ApiGetJsonWebKeySetRequest) (*JsonWebKeySet, *http.Response, error) { +func (a *JwkAPIService) GetJsonWebKeySetExecute(r ApiGetJsonWebKeySetRequest) (*JsonWebKeySet, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} @@ -511,13 +515,13 @@ func (a *JwkApiService) GetJsonWebKeySetExecute(r ApiGetJsonWebKeySetRequest) (* localVarReturnValue *JsonWebKeySet ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "JwkApiService.GetJsonWebKeySet") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "JwkAPIService.GetJsonWebKeySet") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } localVarPath := localBasePath + "/admin/keys/{set}" - localVarPath = strings.Replace(localVarPath, "{"+"set"+"}", url.PathEscape(parameterToString(r.set, "")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"set"+"}", url.PathEscape(parameterValueToString(r.set, "set")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -550,9 +554,9 @@ func (a *JwkApiService) GetJsonWebKeySetExecute(r ApiGetJsonWebKeySetRequest) (* return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -568,6 +572,7 @@ func (a *JwkApiService) GetJsonWebKeySetExecute(r ApiGetJsonWebKeySetRequest) (* newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -586,7 +591,7 @@ func (a *JwkApiService) GetJsonWebKeySetExecute(r ApiGetJsonWebKeySetRequest) (* type ApiSetJsonWebKeyRequest struct { ctx context.Context - ApiService *JwkApiService + ApiService *JwkAPIService set string kid string jsonWebKey *JsonWebKey @@ -613,7 +618,7 @@ A JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that @param kid JSON Web Key ID @return ApiSetJsonWebKeyRequest */ -func (a *JwkApiService) SetJsonWebKey(ctx context.Context, set string, kid string) ApiSetJsonWebKeyRequest { +func (a *JwkAPIService) SetJsonWebKey(ctx context.Context, set string, kid string) ApiSetJsonWebKeyRequest { return ApiSetJsonWebKeyRequest{ ApiService: a, ctx: ctx, @@ -625,7 +630,7 @@ func (a *JwkApiService) SetJsonWebKey(ctx context.Context, set string, kid strin // Execute executes the request // // @return JsonWebKey -func (a *JwkApiService) SetJsonWebKeyExecute(r ApiSetJsonWebKeyRequest) (*JsonWebKey, *http.Response, error) { +func (a *JwkAPIService) SetJsonWebKeyExecute(r ApiSetJsonWebKeyRequest) (*JsonWebKey, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPut localVarPostBody interface{} @@ -633,14 +638,14 @@ func (a *JwkApiService) SetJsonWebKeyExecute(r ApiSetJsonWebKeyRequest) (*JsonWe localVarReturnValue *JsonWebKey ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "JwkApiService.SetJsonWebKey") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "JwkAPIService.SetJsonWebKey") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } localVarPath := localBasePath + "/admin/keys/{set}/{kid}" - localVarPath = strings.Replace(localVarPath, "{"+"set"+"}", url.PathEscape(parameterToString(r.set, "")), -1) - localVarPath = strings.Replace(localVarPath, "{"+"kid"+"}", url.PathEscape(parameterToString(r.kid, "")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"set"+"}", url.PathEscape(parameterValueToString(r.set, "set")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"kid"+"}", url.PathEscape(parameterValueToString(r.kid, "kid")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -675,9 +680,9 @@ func (a *JwkApiService) SetJsonWebKeyExecute(r ApiSetJsonWebKeyRequest) (*JsonWe return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -693,6 +698,7 @@ func (a *JwkApiService) SetJsonWebKeyExecute(r ApiSetJsonWebKeyRequest) (*JsonWe newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -711,7 +717,7 @@ func (a *JwkApiService) SetJsonWebKeyExecute(r ApiSetJsonWebKeyRequest) (*JsonWe type ApiSetJsonWebKeySetRequest struct { ctx context.Context - ApiService *JwkApiService + ApiService *JwkAPIService set string jsonWebKeySet *JsonWebKeySet } @@ -736,7 +742,7 @@ A JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that @param set The JSON Web Key Set ID @return ApiSetJsonWebKeySetRequest */ -func (a *JwkApiService) SetJsonWebKeySet(ctx context.Context, set string) ApiSetJsonWebKeySetRequest { +func (a *JwkAPIService) SetJsonWebKeySet(ctx context.Context, set string) ApiSetJsonWebKeySetRequest { return ApiSetJsonWebKeySetRequest{ ApiService: a, ctx: ctx, @@ -747,7 +753,7 @@ func (a *JwkApiService) SetJsonWebKeySet(ctx context.Context, set string) ApiSet // Execute executes the request // // @return JsonWebKeySet -func (a *JwkApiService) SetJsonWebKeySetExecute(r ApiSetJsonWebKeySetRequest) (*JsonWebKeySet, *http.Response, error) { +func (a *JwkAPIService) SetJsonWebKeySetExecute(r ApiSetJsonWebKeySetRequest) (*JsonWebKeySet, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPut localVarPostBody interface{} @@ -755,13 +761,13 @@ func (a *JwkApiService) SetJsonWebKeySetExecute(r ApiSetJsonWebKeySetRequest) (* localVarReturnValue *JsonWebKeySet ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "JwkApiService.SetJsonWebKeySet") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "JwkAPIService.SetJsonWebKeySet") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } localVarPath := localBasePath + "/admin/keys/{set}" - localVarPath = strings.Replace(localVarPath, "{"+"set"+"}", url.PathEscape(parameterToString(r.set, "")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"set"+"}", url.PathEscape(parameterValueToString(r.set, "set")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -796,9 +802,9 @@ func (a *JwkApiService) SetJsonWebKeySetExecute(r ApiSetJsonWebKeySetRequest) (* return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -814,6 +820,7 @@ func (a *JwkApiService) SetJsonWebKeySetExecute(r ApiSetJsonWebKeySetRequest) (* newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } diff --git a/internal/httpclient/api_metadata.go b/internal/httpclient/api_metadata.go index c57ea8ff5db..5f7d316b9fc 100644 --- a/internal/httpclient/api_metadata.go +++ b/internal/httpclient/api_metadata.go @@ -14,17 +14,17 @@ package openapi import ( "bytes" "context" - "io/ioutil" + "io" "net/http" "net/url" ) -// MetadataApiService MetadataApi service -type MetadataApiService service +// MetadataAPIService MetadataAPI service +type MetadataAPIService service type ApiGetVersionRequest struct { ctx context.Context - ApiService *MetadataApiService + ApiService *MetadataAPIService } func (r ApiGetVersionRequest) Execute() (*GetVersion200Response, *http.Response, error) { @@ -45,7 +45,7 @@ refer to the cluster state, only to a single instance. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiGetVersionRequest */ -func (a *MetadataApiService) GetVersion(ctx context.Context) ApiGetVersionRequest { +func (a *MetadataAPIService) GetVersion(ctx context.Context) ApiGetVersionRequest { return ApiGetVersionRequest{ ApiService: a, ctx: ctx, @@ -55,7 +55,7 @@ func (a *MetadataApiService) GetVersion(ctx context.Context) ApiGetVersionReques // Execute executes the request // // @return GetVersion200Response -func (a *MetadataApiService) GetVersionExecute(r ApiGetVersionRequest) (*GetVersion200Response, *http.Response, error) { +func (a *MetadataAPIService) GetVersionExecute(r ApiGetVersionRequest) (*GetVersion200Response, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} @@ -63,7 +63,7 @@ func (a *MetadataApiService) GetVersionExecute(r ApiGetVersionRequest) (*GetVers localVarReturnValue *GetVersion200Response ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "MetadataApiService.GetVersion") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "MetadataAPIService.GetVersion") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -101,9 +101,9 @@ func (a *MetadataApiService) GetVersionExecute(r ApiGetVersionRequest) (*GetVers return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -130,7 +130,7 @@ func (a *MetadataApiService) GetVersionExecute(r ApiGetVersionRequest) (*GetVers type ApiIsAliveRequest struct { ctx context.Context - ApiService *MetadataApiService + ApiService *MetadataAPIService } func (r ApiIsAliveRequest) Execute() (*HealthStatus, *http.Response, error) { @@ -152,7 +152,7 @@ refer to the cluster state, only to a single instance. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiIsAliveRequest */ -func (a *MetadataApiService) IsAlive(ctx context.Context) ApiIsAliveRequest { +func (a *MetadataAPIService) IsAlive(ctx context.Context) ApiIsAliveRequest { return ApiIsAliveRequest{ ApiService: a, ctx: ctx, @@ -162,7 +162,7 @@ func (a *MetadataApiService) IsAlive(ctx context.Context) ApiIsAliveRequest { // Execute executes the request // // @return HealthStatus -func (a *MetadataApiService) IsAliveExecute(r ApiIsAliveRequest) (*HealthStatus, *http.Response, error) { +func (a *MetadataAPIService) IsAliveExecute(r ApiIsAliveRequest) (*HealthStatus, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} @@ -170,7 +170,7 @@ func (a *MetadataApiService) IsAliveExecute(r ApiIsAliveRequest) (*HealthStatus, localVarReturnValue *HealthStatus ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "MetadataApiService.IsAlive") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "MetadataAPIService.IsAlive") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -208,9 +208,9 @@ func (a *MetadataApiService) IsAliveExecute(r ApiIsAliveRequest) (*HealthStatus, return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -227,6 +227,7 @@ func (a *MetadataApiService) IsAliveExecute(r ApiIsAliveRequest) (*HealthStatus, newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v } return localVarReturnValue, localVarHTTPResponse, newErr @@ -246,7 +247,7 @@ func (a *MetadataApiService) IsAliveExecute(r ApiIsAliveRequest) (*HealthStatus, type ApiIsReadyRequest struct { ctx context.Context - ApiService *MetadataApiService + ApiService *MetadataAPIService } func (r ApiIsReadyRequest) Execute() (*IsReady200Response, *http.Response, error) { @@ -268,7 +269,7 @@ refer to the cluster state, only to a single instance. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiIsReadyRequest */ -func (a *MetadataApiService) IsReady(ctx context.Context) ApiIsReadyRequest { +func (a *MetadataAPIService) IsReady(ctx context.Context) ApiIsReadyRequest { return ApiIsReadyRequest{ ApiService: a, ctx: ctx, @@ -278,7 +279,7 @@ func (a *MetadataApiService) IsReady(ctx context.Context) ApiIsReadyRequest { // Execute executes the request // // @return IsReady200Response -func (a *MetadataApiService) IsReadyExecute(r ApiIsReadyRequest) (*IsReady200Response, *http.Response, error) { +func (a *MetadataAPIService) IsReadyExecute(r ApiIsReadyRequest) (*IsReady200Response, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} @@ -286,7 +287,7 @@ func (a *MetadataApiService) IsReadyExecute(r ApiIsReadyRequest) (*IsReady200Res localVarReturnValue *IsReady200Response ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "MetadataApiService.IsReady") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "MetadataAPIService.IsReady") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -324,9 +325,9 @@ func (a *MetadataApiService) IsReadyExecute(r ApiIsReadyRequest) (*IsReady200Res return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -343,6 +344,7 @@ func (a *MetadataApiService) IsReadyExecute(r ApiIsReadyRequest) (*IsReady200Res newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v } return localVarReturnValue, localVarHTTPResponse, newErr diff --git a/internal/httpclient/api_o_auth2.go b/internal/httpclient/api_o_auth2.go index 7d4ebb8c853..b4f599bd43f 100644 --- a/internal/httpclient/api_o_auth2.go +++ b/internal/httpclient/api_o_auth2.go @@ -14,18 +14,18 @@ package openapi import ( "bytes" "context" - "io/ioutil" + "io" "net/http" "net/url" "strings" ) -// OAuth2ApiService OAuth2Api service -type OAuth2ApiService service +// OAuth2APIService OAuth2API service +type OAuth2APIService service type ApiAcceptOAuth2ConsentRequestRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService consentChallenge *string acceptOAuth2ConsentRequest *AcceptOAuth2ConsentRequest } @@ -68,7 +68,7 @@ head over to the OAuth 2.0 documentation. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiAcceptOAuth2ConsentRequestRequest */ -func (a *OAuth2ApiService) AcceptOAuth2ConsentRequest(ctx context.Context) ApiAcceptOAuth2ConsentRequestRequest { +func (a *OAuth2APIService) AcceptOAuth2ConsentRequest(ctx context.Context) ApiAcceptOAuth2ConsentRequestRequest { return ApiAcceptOAuth2ConsentRequestRequest{ ApiService: a, ctx: ctx, @@ -78,7 +78,7 @@ func (a *OAuth2ApiService) AcceptOAuth2ConsentRequest(ctx context.Context) ApiAc // Execute executes the request // // @return OAuth2RedirectTo -func (a *OAuth2ApiService) AcceptOAuth2ConsentRequestExecute(r ApiAcceptOAuth2ConsentRequestRequest) (*OAuth2RedirectTo, *http.Response, error) { +func (a *OAuth2APIService) AcceptOAuth2ConsentRequestExecute(r ApiAcceptOAuth2ConsentRequestRequest) (*OAuth2RedirectTo, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPut localVarPostBody interface{} @@ -86,7 +86,7 @@ func (a *OAuth2ApiService) AcceptOAuth2ConsentRequestExecute(r ApiAcceptOAuth2Co localVarReturnValue *OAuth2RedirectTo ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.AcceptOAuth2ConsentRequest") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.AcceptOAuth2ConsentRequest") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -100,7 +100,7 @@ func (a *OAuth2ApiService) AcceptOAuth2ConsentRequestExecute(r ApiAcceptOAuth2Co return localVarReturnValue, nil, reportError("consentChallenge is required and must be specified") } - localVarQueryParams.Add("consent_challenge", parameterToString(*r.consentChallenge, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "consent_challenge", r.consentChallenge, "form", "") // to determine the Content-Type header localVarHTTPContentTypes := []string{"application/json"} @@ -130,9 +130,9 @@ func (a *OAuth2ApiService) AcceptOAuth2ConsentRequestExecute(r ApiAcceptOAuth2Co return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -148,6 +148,7 @@ func (a *OAuth2ApiService) AcceptOAuth2ConsentRequestExecute(r ApiAcceptOAuth2Co newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -166,7 +167,7 @@ func (a *OAuth2ApiService) AcceptOAuth2ConsentRequestExecute(r ApiAcceptOAuth2Co type ApiAcceptOAuth2LoginRequestRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService loginChallenge *string acceptOAuth2LoginRequest *AcceptOAuth2LoginRequest } @@ -204,7 +205,7 @@ The response contains a redirect URL which the login provider should redirect th @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiAcceptOAuth2LoginRequestRequest */ -func (a *OAuth2ApiService) AcceptOAuth2LoginRequest(ctx context.Context) ApiAcceptOAuth2LoginRequestRequest { +func (a *OAuth2APIService) AcceptOAuth2LoginRequest(ctx context.Context) ApiAcceptOAuth2LoginRequestRequest { return ApiAcceptOAuth2LoginRequestRequest{ ApiService: a, ctx: ctx, @@ -214,7 +215,7 @@ func (a *OAuth2ApiService) AcceptOAuth2LoginRequest(ctx context.Context) ApiAcce // Execute executes the request // // @return OAuth2RedirectTo -func (a *OAuth2ApiService) AcceptOAuth2LoginRequestExecute(r ApiAcceptOAuth2LoginRequestRequest) (*OAuth2RedirectTo, *http.Response, error) { +func (a *OAuth2APIService) AcceptOAuth2LoginRequestExecute(r ApiAcceptOAuth2LoginRequestRequest) (*OAuth2RedirectTo, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPut localVarPostBody interface{} @@ -222,7 +223,7 @@ func (a *OAuth2ApiService) AcceptOAuth2LoginRequestExecute(r ApiAcceptOAuth2Logi localVarReturnValue *OAuth2RedirectTo ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.AcceptOAuth2LoginRequest") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.AcceptOAuth2LoginRequest") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -236,7 +237,7 @@ func (a *OAuth2ApiService) AcceptOAuth2LoginRequestExecute(r ApiAcceptOAuth2Logi return localVarReturnValue, nil, reportError("loginChallenge is required and must be specified") } - localVarQueryParams.Add("login_challenge", parameterToString(*r.loginChallenge, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "login_challenge", r.loginChallenge, "form", "") // to determine the Content-Type header localVarHTTPContentTypes := []string{"application/json"} @@ -266,9 +267,9 @@ func (a *OAuth2ApiService) AcceptOAuth2LoginRequestExecute(r ApiAcceptOAuth2Logi return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -284,6 +285,7 @@ func (a *OAuth2ApiService) AcceptOAuth2LoginRequestExecute(r ApiAcceptOAuth2Logi newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -302,7 +304,7 @@ func (a *OAuth2ApiService) AcceptOAuth2LoginRequestExecute(r ApiAcceptOAuth2Logi type ApiAcceptOAuth2LogoutRequestRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService logoutChallenge *string } @@ -326,7 +328,7 @@ The response contains a redirect URL which the consent provider should redirect @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiAcceptOAuth2LogoutRequestRequest */ -func (a *OAuth2ApiService) AcceptOAuth2LogoutRequest(ctx context.Context) ApiAcceptOAuth2LogoutRequestRequest { +func (a *OAuth2APIService) AcceptOAuth2LogoutRequest(ctx context.Context) ApiAcceptOAuth2LogoutRequestRequest { return ApiAcceptOAuth2LogoutRequestRequest{ ApiService: a, ctx: ctx, @@ -336,7 +338,7 @@ func (a *OAuth2ApiService) AcceptOAuth2LogoutRequest(ctx context.Context) ApiAcc // Execute executes the request // // @return OAuth2RedirectTo -func (a *OAuth2ApiService) AcceptOAuth2LogoutRequestExecute(r ApiAcceptOAuth2LogoutRequestRequest) (*OAuth2RedirectTo, *http.Response, error) { +func (a *OAuth2APIService) AcceptOAuth2LogoutRequestExecute(r ApiAcceptOAuth2LogoutRequestRequest) (*OAuth2RedirectTo, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPut localVarPostBody interface{} @@ -344,7 +346,7 @@ func (a *OAuth2ApiService) AcceptOAuth2LogoutRequestExecute(r ApiAcceptOAuth2Log localVarReturnValue *OAuth2RedirectTo ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.AcceptOAuth2LogoutRequest") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.AcceptOAuth2LogoutRequest") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -358,7 +360,7 @@ func (a *OAuth2ApiService) AcceptOAuth2LogoutRequestExecute(r ApiAcceptOAuth2Log return localVarReturnValue, nil, reportError("logoutChallenge is required and must be specified") } - localVarQueryParams.Add("logout_challenge", parameterToString(*r.logoutChallenge, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "logout_challenge", r.logoutChallenge, "form", "") // to determine the Content-Type header localVarHTTPContentTypes := []string{} @@ -386,9 +388,9 @@ func (a *OAuth2ApiService) AcceptOAuth2LogoutRequestExecute(r ApiAcceptOAuth2Log return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -404,6 +406,133 @@ func (a *OAuth2ApiService) AcceptOAuth2LogoutRequestExecute(r ApiAcceptOAuth2Log newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} + +type ApiAcceptUserCodeRequestRequest struct { + ctx context.Context + ApiService *OAuth2APIService + deviceChallenge *string + acceptDeviceUserCodeRequest *AcceptDeviceUserCodeRequest +} + +func (r ApiAcceptUserCodeRequestRequest) DeviceChallenge(deviceChallenge string) ApiAcceptUserCodeRequestRequest { + r.deviceChallenge = &deviceChallenge + return r +} + +func (r ApiAcceptUserCodeRequestRequest) AcceptDeviceUserCodeRequest(acceptDeviceUserCodeRequest AcceptDeviceUserCodeRequest) ApiAcceptUserCodeRequestRequest { + r.acceptDeviceUserCodeRequest = &acceptDeviceUserCodeRequest + return r +} + +func (r ApiAcceptUserCodeRequestRequest) Execute() (*OAuth2RedirectTo, *http.Response, error) { + return r.ApiService.AcceptUserCodeRequestExecute(r) +} + +/* +AcceptUserCodeRequest Accepts a device grant user_code request + +Accepts a device grant user_code request + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @return ApiAcceptUserCodeRequestRequest +*/ +func (a *OAuth2APIService) AcceptUserCodeRequest(ctx context.Context) ApiAcceptUserCodeRequestRequest { + return ApiAcceptUserCodeRequestRequest{ + ApiService: a, + ctx: ctx, + } +} + +// Execute executes the request +// +// @return OAuth2RedirectTo +func (a *OAuth2APIService) AcceptUserCodeRequestExecute(r ApiAcceptUserCodeRequestRequest) (*OAuth2RedirectTo, *http.Response, error) { + var ( + localVarHTTPMethod = http.MethodPut + localVarPostBody interface{} + formFiles []formFile + localVarReturnValue *OAuth2RedirectTo + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.AcceptUserCodeRequest") + if err != nil { + return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/admin/oauth2/auth/requests/device/accept" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + if r.deviceChallenge == nil { + return localVarReturnValue, nil, reportError("deviceChallenge is required and must be specified") + } + + parameterAddToHeaderOrQuery(localVarQueryParams, "device_challenge", r.deviceChallenge, "form", "") + // to determine the Content-Type header + localVarHTTPContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + // body params + localVarPostBody = r.acceptDeviceUserCodeRequest + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + var v ErrorOAuth2 + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -422,7 +551,7 @@ func (a *OAuth2ApiService) AcceptOAuth2LogoutRequestExecute(r ApiAcceptOAuth2Log type ApiCreateOAuth2ClientRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService oAuth2Client *OAuth2Client } @@ -445,7 +574,7 @@ is generated. The secret is echoed in the response. It is not possible to retrie @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiCreateOAuth2ClientRequest */ -func (a *OAuth2ApiService) CreateOAuth2Client(ctx context.Context) ApiCreateOAuth2ClientRequest { +func (a *OAuth2APIService) CreateOAuth2Client(ctx context.Context) ApiCreateOAuth2ClientRequest { return ApiCreateOAuth2ClientRequest{ ApiService: a, ctx: ctx, @@ -455,7 +584,7 @@ func (a *OAuth2ApiService) CreateOAuth2Client(ctx context.Context) ApiCreateOAut // Execute executes the request // // @return OAuth2Client -func (a *OAuth2ApiService) CreateOAuth2ClientExecute(r ApiCreateOAuth2ClientRequest) (*OAuth2Client, *http.Response, error) { +func (a *OAuth2APIService) CreateOAuth2ClientExecute(r ApiCreateOAuth2ClientRequest) (*OAuth2Client, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPost localVarPostBody interface{} @@ -463,7 +592,7 @@ func (a *OAuth2ApiService) CreateOAuth2ClientExecute(r ApiCreateOAuth2ClientRequ localVarReturnValue *OAuth2Client ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.CreateOAuth2Client") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.CreateOAuth2Client") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -506,9 +635,9 @@ func (a *OAuth2ApiService) CreateOAuth2ClientExecute(r ApiCreateOAuth2ClientRequ return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -525,6 +654,7 @@ func (a *OAuth2ApiService) CreateOAuth2ClientExecute(r ApiCreateOAuth2ClientRequ newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -534,6 +664,7 @@ func (a *OAuth2ApiService) CreateOAuth2ClientExecute(r ApiCreateOAuth2ClientRequ newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -552,7 +683,7 @@ func (a *OAuth2ApiService) CreateOAuth2ClientExecute(r ApiCreateOAuth2ClientRequ type ApiDeleteOAuth2ClientRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService id string } @@ -574,7 +705,7 @@ Make sure that this endpoint is well protected and only callable by first-party @param id The id of the OAuth 2.0 Client. @return ApiDeleteOAuth2ClientRequest */ -func (a *OAuth2ApiService) DeleteOAuth2Client(ctx context.Context, id string) ApiDeleteOAuth2ClientRequest { +func (a *OAuth2APIService) DeleteOAuth2Client(ctx context.Context, id string) ApiDeleteOAuth2ClientRequest { return ApiDeleteOAuth2ClientRequest{ ApiService: a, ctx: ctx, @@ -583,20 +714,20 @@ func (a *OAuth2ApiService) DeleteOAuth2Client(ctx context.Context, id string) Ap } // Execute executes the request -func (a *OAuth2ApiService) DeleteOAuth2ClientExecute(r ApiDeleteOAuth2ClientRequest) (*http.Response, error) { +func (a *OAuth2APIService) DeleteOAuth2ClientExecute(r ApiDeleteOAuth2ClientRequest) (*http.Response, error) { var ( localVarHTTPMethod = http.MethodDelete localVarPostBody interface{} formFiles []formFile ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.DeleteOAuth2Client") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.DeleteOAuth2Client") if err != nil { return nil, &GenericOpenAPIError{error: err.Error()} } localVarPath := localBasePath + "/admin/clients/{id}" - localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterToString(r.id, "")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterValueToString(r.id, "id")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -629,9 +760,9 @@ func (a *OAuth2ApiService) DeleteOAuth2ClientExecute(r ApiDeleteOAuth2ClientRequ return localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarHTTPResponse, err } @@ -647,6 +778,7 @@ func (a *OAuth2ApiService) DeleteOAuth2ClientExecute(r ApiDeleteOAuth2ClientRequ newErr.error = err.Error() return localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarHTTPResponse, newErr } @@ -656,7 +788,7 @@ func (a *OAuth2ApiService) DeleteOAuth2ClientExecute(r ApiDeleteOAuth2ClientRequ type ApiDeleteOAuth2TokenRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService clientId *string } @@ -678,7 +810,7 @@ This endpoint deletes OAuth2 access tokens issued to an OAuth 2.0 Client from th @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiDeleteOAuth2TokenRequest */ -func (a *OAuth2ApiService) DeleteOAuth2Token(ctx context.Context) ApiDeleteOAuth2TokenRequest { +func (a *OAuth2APIService) DeleteOAuth2Token(ctx context.Context) ApiDeleteOAuth2TokenRequest { return ApiDeleteOAuth2TokenRequest{ ApiService: a, ctx: ctx, @@ -686,14 +818,14 @@ func (a *OAuth2ApiService) DeleteOAuth2Token(ctx context.Context) ApiDeleteOAuth } // Execute executes the request -func (a *OAuth2ApiService) DeleteOAuth2TokenExecute(r ApiDeleteOAuth2TokenRequest) (*http.Response, error) { +func (a *OAuth2APIService) DeleteOAuth2TokenExecute(r ApiDeleteOAuth2TokenRequest) (*http.Response, error) { var ( localVarHTTPMethod = http.MethodDelete localVarPostBody interface{} formFiles []formFile ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.DeleteOAuth2Token") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.DeleteOAuth2Token") if err != nil { return nil, &GenericOpenAPIError{error: err.Error()} } @@ -707,7 +839,7 @@ func (a *OAuth2ApiService) DeleteOAuth2TokenExecute(r ApiDeleteOAuth2TokenReques return nil, reportError("clientId is required and must be specified") } - localVarQueryParams.Add("client_id", parameterToString(*r.clientId, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "client_id", r.clientId, "form", "") // to determine the Content-Type header localVarHTTPContentTypes := []string{} @@ -735,9 +867,9 @@ func (a *OAuth2ApiService) DeleteOAuth2TokenExecute(r ApiDeleteOAuth2TokenReques return localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarHTTPResponse, err } @@ -753,6 +885,7 @@ func (a *OAuth2ApiService) DeleteOAuth2TokenExecute(r ApiDeleteOAuth2TokenReques newErr.error = err.Error() return localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarHTTPResponse, newErr } @@ -762,7 +895,7 @@ func (a *OAuth2ApiService) DeleteOAuth2TokenExecute(r ApiDeleteOAuth2TokenReques type ApiDeleteTrustedOAuth2JwtGrantIssuerRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService id string } @@ -783,7 +916,7 @@ for OAuth 2.0 Client Authentication and Authorization Grant. @param id The id of the desired grant @return ApiDeleteTrustedOAuth2JwtGrantIssuerRequest */ -func (a *OAuth2ApiService) DeleteTrustedOAuth2JwtGrantIssuer(ctx context.Context, id string) ApiDeleteTrustedOAuth2JwtGrantIssuerRequest { +func (a *OAuth2APIService) DeleteTrustedOAuth2JwtGrantIssuer(ctx context.Context, id string) ApiDeleteTrustedOAuth2JwtGrantIssuerRequest { return ApiDeleteTrustedOAuth2JwtGrantIssuerRequest{ ApiService: a, ctx: ctx, @@ -792,20 +925,20 @@ func (a *OAuth2ApiService) DeleteTrustedOAuth2JwtGrantIssuer(ctx context.Context } // Execute executes the request -func (a *OAuth2ApiService) DeleteTrustedOAuth2JwtGrantIssuerExecute(r ApiDeleteTrustedOAuth2JwtGrantIssuerRequest) (*http.Response, error) { +func (a *OAuth2APIService) DeleteTrustedOAuth2JwtGrantIssuerExecute(r ApiDeleteTrustedOAuth2JwtGrantIssuerRequest) (*http.Response, error) { var ( localVarHTTPMethod = http.MethodDelete localVarPostBody interface{} formFiles []formFile ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.DeleteTrustedOAuth2JwtGrantIssuer") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.DeleteTrustedOAuth2JwtGrantIssuer") if err != nil { return nil, &GenericOpenAPIError{error: err.Error()} } localVarPath := localBasePath + "/admin/trust/grants/jwt-bearer/issuers/{id}" - localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterToString(r.id, "")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterValueToString(r.id, "id")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -838,9 +971,9 @@ func (a *OAuth2ApiService) DeleteTrustedOAuth2JwtGrantIssuerExecute(r ApiDeleteT return localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarHTTPResponse, err } @@ -856,6 +989,7 @@ func (a *OAuth2ApiService) DeleteTrustedOAuth2JwtGrantIssuerExecute(r ApiDeleteT newErr.error = err.Error() return localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarHTTPResponse, newErr } @@ -865,7 +999,7 @@ func (a *OAuth2ApiService) DeleteTrustedOAuth2JwtGrantIssuerExecute(r ApiDeleteT type ApiGetOAuth2ClientRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService id string } @@ -885,7 +1019,7 @@ generated for applications which want to consume your OAuth 2.0 or OpenID Connec @param id The id of the OAuth 2.0 Client. @return ApiGetOAuth2ClientRequest */ -func (a *OAuth2ApiService) GetOAuth2Client(ctx context.Context, id string) ApiGetOAuth2ClientRequest { +func (a *OAuth2APIService) GetOAuth2Client(ctx context.Context, id string) ApiGetOAuth2ClientRequest { return ApiGetOAuth2ClientRequest{ ApiService: a, ctx: ctx, @@ -896,7 +1030,7 @@ func (a *OAuth2ApiService) GetOAuth2Client(ctx context.Context, id string) ApiGe // Execute executes the request // // @return OAuth2Client -func (a *OAuth2ApiService) GetOAuth2ClientExecute(r ApiGetOAuth2ClientRequest) (*OAuth2Client, *http.Response, error) { +func (a *OAuth2APIService) GetOAuth2ClientExecute(r ApiGetOAuth2ClientRequest) (*OAuth2Client, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} @@ -904,13 +1038,13 @@ func (a *OAuth2ApiService) GetOAuth2ClientExecute(r ApiGetOAuth2ClientRequest) ( localVarReturnValue *OAuth2Client ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.GetOAuth2Client") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.GetOAuth2Client") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } localVarPath := localBasePath + "/admin/clients/{id}" - localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterToString(r.id, "")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterValueToString(r.id, "id")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -943,9 +1077,9 @@ func (a *OAuth2ApiService) GetOAuth2ClientExecute(r ApiGetOAuth2ClientRequest) ( return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -961,6 +1095,7 @@ func (a *OAuth2ApiService) GetOAuth2ClientExecute(r ApiGetOAuth2ClientRequest) ( newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -979,7 +1114,7 @@ func (a *OAuth2ApiService) GetOAuth2ClientExecute(r ApiGetOAuth2ClientRequest) ( type ApiGetOAuth2ConsentRequestRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService consentChallenge *string } @@ -1010,7 +1145,7 @@ head over to the OAuth 2.0 documentation. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiGetOAuth2ConsentRequestRequest */ -func (a *OAuth2ApiService) GetOAuth2ConsentRequest(ctx context.Context) ApiGetOAuth2ConsentRequestRequest { +func (a *OAuth2APIService) GetOAuth2ConsentRequest(ctx context.Context) ApiGetOAuth2ConsentRequestRequest { return ApiGetOAuth2ConsentRequestRequest{ ApiService: a, ctx: ctx, @@ -1020,7 +1155,7 @@ func (a *OAuth2ApiService) GetOAuth2ConsentRequest(ctx context.Context) ApiGetOA // Execute executes the request // // @return OAuth2ConsentRequest -func (a *OAuth2ApiService) GetOAuth2ConsentRequestExecute(r ApiGetOAuth2ConsentRequestRequest) (*OAuth2ConsentRequest, *http.Response, error) { +func (a *OAuth2APIService) GetOAuth2ConsentRequestExecute(r ApiGetOAuth2ConsentRequestRequest) (*OAuth2ConsentRequest, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} @@ -1028,7 +1163,7 @@ func (a *OAuth2ApiService) GetOAuth2ConsentRequestExecute(r ApiGetOAuth2ConsentR localVarReturnValue *OAuth2ConsentRequest ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.GetOAuth2ConsentRequest") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.GetOAuth2ConsentRequest") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -1042,7 +1177,7 @@ func (a *OAuth2ApiService) GetOAuth2ConsentRequestExecute(r ApiGetOAuth2ConsentR return localVarReturnValue, nil, reportError("consentChallenge is required and must be specified") } - localVarQueryParams.Add("consent_challenge", parameterToString(*r.consentChallenge, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "consent_challenge", r.consentChallenge, "form", "") // to determine the Content-Type header localVarHTTPContentTypes := []string{} @@ -1070,9 +1205,9 @@ func (a *OAuth2ApiService) GetOAuth2ConsentRequestExecute(r ApiGetOAuth2ConsentR return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -1089,6 +1224,7 @@ func (a *OAuth2ApiService) GetOAuth2ConsentRequestExecute(r ApiGetOAuth2ConsentR newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -1098,6 +1234,7 @@ func (a *OAuth2ApiService) GetOAuth2ConsentRequestExecute(r ApiGetOAuth2ConsentR newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -1116,7 +1253,7 @@ func (a *OAuth2ApiService) GetOAuth2ConsentRequestExecute(r ApiGetOAuth2ConsentR type ApiGetOAuth2LoginRequestRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService loginChallenge *string } @@ -1146,7 +1283,7 @@ provider uses that challenge to fetch information on the OAuth2 request and then @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiGetOAuth2LoginRequestRequest */ -func (a *OAuth2ApiService) GetOAuth2LoginRequest(ctx context.Context) ApiGetOAuth2LoginRequestRequest { +func (a *OAuth2APIService) GetOAuth2LoginRequest(ctx context.Context) ApiGetOAuth2LoginRequestRequest { return ApiGetOAuth2LoginRequestRequest{ ApiService: a, ctx: ctx, @@ -1156,7 +1293,7 @@ func (a *OAuth2ApiService) GetOAuth2LoginRequest(ctx context.Context) ApiGetOAut // Execute executes the request // // @return OAuth2LoginRequest -func (a *OAuth2ApiService) GetOAuth2LoginRequestExecute(r ApiGetOAuth2LoginRequestRequest) (*OAuth2LoginRequest, *http.Response, error) { +func (a *OAuth2APIService) GetOAuth2LoginRequestExecute(r ApiGetOAuth2LoginRequestRequest) (*OAuth2LoginRequest, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} @@ -1164,7 +1301,7 @@ func (a *OAuth2ApiService) GetOAuth2LoginRequestExecute(r ApiGetOAuth2LoginReque localVarReturnValue *OAuth2LoginRequest ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.GetOAuth2LoginRequest") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.GetOAuth2LoginRequest") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -1178,7 +1315,7 @@ func (a *OAuth2ApiService) GetOAuth2LoginRequestExecute(r ApiGetOAuth2LoginReque return localVarReturnValue, nil, reportError("loginChallenge is required and must be specified") } - localVarQueryParams.Add("login_challenge", parameterToString(*r.loginChallenge, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "login_challenge", r.loginChallenge, "form", "") // to determine the Content-Type header localVarHTTPContentTypes := []string{} @@ -1206,9 +1343,9 @@ func (a *OAuth2ApiService) GetOAuth2LoginRequestExecute(r ApiGetOAuth2LoginReque return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -1225,6 +1362,7 @@ func (a *OAuth2ApiService) GetOAuth2LoginRequestExecute(r ApiGetOAuth2LoginReque newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -1234,6 +1372,7 @@ func (a *OAuth2ApiService) GetOAuth2LoginRequestExecute(r ApiGetOAuth2LoginReque newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -1252,7 +1391,7 @@ func (a *OAuth2ApiService) GetOAuth2LoginRequestExecute(r ApiGetOAuth2LoginReque type ApiGetOAuth2LogoutRequestRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService logoutChallenge *string } @@ -1273,7 +1412,7 @@ Use this endpoint to fetch an Ory OAuth 2.0 logout request. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiGetOAuth2LogoutRequestRequest */ -func (a *OAuth2ApiService) GetOAuth2LogoutRequest(ctx context.Context) ApiGetOAuth2LogoutRequestRequest { +func (a *OAuth2APIService) GetOAuth2LogoutRequest(ctx context.Context) ApiGetOAuth2LogoutRequestRequest { return ApiGetOAuth2LogoutRequestRequest{ ApiService: a, ctx: ctx, @@ -1283,7 +1422,7 @@ func (a *OAuth2ApiService) GetOAuth2LogoutRequest(ctx context.Context) ApiGetOAu // Execute executes the request // // @return OAuth2LogoutRequest -func (a *OAuth2ApiService) GetOAuth2LogoutRequestExecute(r ApiGetOAuth2LogoutRequestRequest) (*OAuth2LogoutRequest, *http.Response, error) { +func (a *OAuth2APIService) GetOAuth2LogoutRequestExecute(r ApiGetOAuth2LogoutRequestRequest) (*OAuth2LogoutRequest, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} @@ -1291,7 +1430,7 @@ func (a *OAuth2ApiService) GetOAuth2LogoutRequestExecute(r ApiGetOAuth2LogoutReq localVarReturnValue *OAuth2LogoutRequest ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.GetOAuth2LogoutRequest") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.GetOAuth2LogoutRequest") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -1305,7 +1444,7 @@ func (a *OAuth2ApiService) GetOAuth2LogoutRequestExecute(r ApiGetOAuth2LogoutReq return localVarReturnValue, nil, reportError("logoutChallenge is required and must be specified") } - localVarQueryParams.Add("logout_challenge", parameterToString(*r.logoutChallenge, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "logout_challenge", r.logoutChallenge, "form", "") // to determine the Content-Type header localVarHTTPContentTypes := []string{} @@ -1333,9 +1472,9 @@ func (a *OAuth2ApiService) GetOAuth2LogoutRequestExecute(r ApiGetOAuth2LogoutReq return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -1352,6 +1491,7 @@ func (a *OAuth2ApiService) GetOAuth2LogoutRequestExecute(r ApiGetOAuth2LogoutReq newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -1361,6 +1501,7 @@ func (a *OAuth2ApiService) GetOAuth2LogoutRequestExecute(r ApiGetOAuth2LogoutReq newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -1379,7 +1520,7 @@ func (a *OAuth2ApiService) GetOAuth2LogoutRequestExecute(r ApiGetOAuth2LogoutReq type ApiGetTrustedOAuth2JwtGrantIssuerRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService id string } @@ -1397,7 +1538,7 @@ created the trust relationship. @param id The id of the desired grant @return ApiGetTrustedOAuth2JwtGrantIssuerRequest */ -func (a *OAuth2ApiService) GetTrustedOAuth2JwtGrantIssuer(ctx context.Context, id string) ApiGetTrustedOAuth2JwtGrantIssuerRequest { +func (a *OAuth2APIService) GetTrustedOAuth2JwtGrantIssuer(ctx context.Context, id string) ApiGetTrustedOAuth2JwtGrantIssuerRequest { return ApiGetTrustedOAuth2JwtGrantIssuerRequest{ ApiService: a, ctx: ctx, @@ -1408,7 +1549,7 @@ func (a *OAuth2ApiService) GetTrustedOAuth2JwtGrantIssuer(ctx context.Context, i // Execute executes the request // // @return TrustedOAuth2JwtGrantIssuer -func (a *OAuth2ApiService) GetTrustedOAuth2JwtGrantIssuerExecute(r ApiGetTrustedOAuth2JwtGrantIssuerRequest) (*TrustedOAuth2JwtGrantIssuer, *http.Response, error) { +func (a *OAuth2APIService) GetTrustedOAuth2JwtGrantIssuerExecute(r ApiGetTrustedOAuth2JwtGrantIssuerRequest) (*TrustedOAuth2JwtGrantIssuer, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} @@ -1416,13 +1557,13 @@ func (a *OAuth2ApiService) GetTrustedOAuth2JwtGrantIssuerExecute(r ApiGetTrusted localVarReturnValue *TrustedOAuth2JwtGrantIssuer ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.GetTrustedOAuth2JwtGrantIssuer") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.GetTrustedOAuth2JwtGrantIssuer") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } localVarPath := localBasePath + "/admin/trust/grants/jwt-bearer/issuers/{id}" - localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterToString(r.id, "")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterValueToString(r.id, "id")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -1455,9 +1596,9 @@ func (a *OAuth2ApiService) GetTrustedOAuth2JwtGrantIssuerExecute(r ApiGetTrusted return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -1473,6 +1614,7 @@ func (a *OAuth2ApiService) GetTrustedOAuth2JwtGrantIssuerExecute(r ApiGetTrusted newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -1491,7 +1633,7 @@ func (a *OAuth2ApiService) GetTrustedOAuth2JwtGrantIssuerExecute(r ApiGetTrusted type ApiIntrospectOAuth2TokenRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService token *string scope *string } @@ -1522,7 +1664,7 @@ set additional data for a token by setting `session.access_token` during the con @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiIntrospectOAuth2TokenRequest */ -func (a *OAuth2ApiService) IntrospectOAuth2Token(ctx context.Context) ApiIntrospectOAuth2TokenRequest { +func (a *OAuth2APIService) IntrospectOAuth2Token(ctx context.Context) ApiIntrospectOAuth2TokenRequest { return ApiIntrospectOAuth2TokenRequest{ ApiService: a, ctx: ctx, @@ -1532,7 +1674,7 @@ func (a *OAuth2ApiService) IntrospectOAuth2Token(ctx context.Context) ApiIntrosp // Execute executes the request // // @return IntrospectedOAuth2Token -func (a *OAuth2ApiService) IntrospectOAuth2TokenExecute(r ApiIntrospectOAuth2TokenRequest) (*IntrospectedOAuth2Token, *http.Response, error) { +func (a *OAuth2APIService) IntrospectOAuth2TokenExecute(r ApiIntrospectOAuth2TokenRequest) (*IntrospectedOAuth2Token, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPost localVarPostBody interface{} @@ -1540,7 +1682,7 @@ func (a *OAuth2ApiService) IntrospectOAuth2TokenExecute(r ApiIntrospectOAuth2Tok localVarReturnValue *IntrospectedOAuth2Token ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.IntrospectOAuth2Token") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.IntrospectOAuth2Token") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -1572,9 +1714,9 @@ func (a *OAuth2ApiService) IntrospectOAuth2TokenExecute(r ApiIntrospectOAuth2Tok localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept } if r.scope != nil { - localVarFormParams.Add("scope", parameterToString(*r.scope, "")) + parameterAddToHeaderOrQuery(localVarFormParams, "scope", r.scope, "", "") } - localVarFormParams.Add("token", parameterToString(*r.token, "")) + parameterAddToHeaderOrQuery(localVarFormParams, "token", r.token, "", "") req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) if err != nil { return localVarReturnValue, nil, err @@ -1585,9 +1727,9 @@ func (a *OAuth2ApiService) IntrospectOAuth2TokenExecute(r ApiIntrospectOAuth2Tok return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -1603,6 +1745,7 @@ func (a *OAuth2ApiService) IntrospectOAuth2TokenExecute(r ApiIntrospectOAuth2Tok newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -1621,7 +1764,7 @@ func (a *OAuth2ApiService) IntrospectOAuth2TokenExecute(r ApiIntrospectOAuth2Tok type ApiListOAuth2ClientsRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService pageSize *int64 pageToken *string clientName *string @@ -1665,7 +1808,7 @@ As a default it lists the first 100 clients. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiListOAuth2ClientsRequest */ -func (a *OAuth2ApiService) ListOAuth2Clients(ctx context.Context) ApiListOAuth2ClientsRequest { +func (a *OAuth2APIService) ListOAuth2Clients(ctx context.Context) ApiListOAuth2ClientsRequest { return ApiListOAuth2ClientsRequest{ ApiService: a, ctx: ctx, @@ -1675,7 +1818,7 @@ func (a *OAuth2ApiService) ListOAuth2Clients(ctx context.Context) ApiListOAuth2C // Execute executes the request // // @return []OAuth2Client -func (a *OAuth2ApiService) ListOAuth2ClientsExecute(r ApiListOAuth2ClientsRequest) ([]OAuth2Client, *http.Response, error) { +func (a *OAuth2APIService) ListOAuth2ClientsExecute(r ApiListOAuth2ClientsRequest) ([]OAuth2Client, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} @@ -1683,7 +1826,7 @@ func (a *OAuth2ApiService) ListOAuth2ClientsExecute(r ApiListOAuth2ClientsReques localVarReturnValue []OAuth2Client ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.ListOAuth2Clients") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.ListOAuth2Clients") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -1695,16 +1838,19 @@ func (a *OAuth2ApiService) ListOAuth2ClientsExecute(r ApiListOAuth2ClientsReques localVarFormParams := url.Values{} if r.pageSize != nil { - localVarQueryParams.Add("page_size", parameterToString(*r.pageSize, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "page_size", r.pageSize, "form", "") + } else { + var defaultValue int64 = 250 + r.pageSize = &defaultValue } if r.pageToken != nil { - localVarQueryParams.Add("page_token", parameterToString(*r.pageToken, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "page_token", r.pageToken, "form", "") } if r.clientName != nil { - localVarQueryParams.Add("client_name", parameterToString(*r.clientName, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "client_name", r.clientName, "form", "") } if r.owner != nil { - localVarQueryParams.Add("owner", parameterToString(*r.owner, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "owner", r.owner, "form", "") } // to determine the Content-Type header localVarHTTPContentTypes := []string{} @@ -1733,9 +1879,9 @@ func (a *OAuth2ApiService) ListOAuth2ClientsExecute(r ApiListOAuth2ClientsReques return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -1751,6 +1897,7 @@ func (a *OAuth2ApiService) ListOAuth2ClientsExecute(r ApiListOAuth2ClientsReques newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -1769,7 +1916,7 @@ func (a *OAuth2ApiService) ListOAuth2ClientsExecute(r ApiListOAuth2ClientsReques type ApiListOAuth2ConsentSessionsRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService subject *string pageSize *int64 pageToken *string @@ -1814,7 +1961,7 @@ empty JSON array with status code 200 OK. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiListOAuth2ConsentSessionsRequest */ -func (a *OAuth2ApiService) ListOAuth2ConsentSessions(ctx context.Context) ApiListOAuth2ConsentSessionsRequest { +func (a *OAuth2APIService) ListOAuth2ConsentSessions(ctx context.Context) ApiListOAuth2ConsentSessionsRequest { return ApiListOAuth2ConsentSessionsRequest{ ApiService: a, ctx: ctx, @@ -1824,7 +1971,7 @@ func (a *OAuth2ApiService) ListOAuth2ConsentSessions(ctx context.Context) ApiLis // Execute executes the request // // @return []OAuth2ConsentSession -func (a *OAuth2ApiService) ListOAuth2ConsentSessionsExecute(r ApiListOAuth2ConsentSessionsRequest) ([]OAuth2ConsentSession, *http.Response, error) { +func (a *OAuth2APIService) ListOAuth2ConsentSessionsExecute(r ApiListOAuth2ConsentSessionsRequest) ([]OAuth2ConsentSession, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} @@ -1832,7 +1979,7 @@ func (a *OAuth2ApiService) ListOAuth2ConsentSessionsExecute(r ApiListOAuth2Conse localVarReturnValue []OAuth2ConsentSession ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.ListOAuth2ConsentSessions") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.ListOAuth2ConsentSessions") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -1847,14 +1994,20 @@ func (a *OAuth2ApiService) ListOAuth2ConsentSessionsExecute(r ApiListOAuth2Conse } if r.pageSize != nil { - localVarQueryParams.Add("page_size", parameterToString(*r.pageSize, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "page_size", r.pageSize, "form", "") + } else { + var defaultValue int64 = 250 + r.pageSize = &defaultValue } if r.pageToken != nil { - localVarQueryParams.Add("page_token", parameterToString(*r.pageToken, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "page_token", r.pageToken, "form", "") + } else { + var defaultValue string = "1" + r.pageToken = &defaultValue } - localVarQueryParams.Add("subject", parameterToString(*r.subject, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "subject", r.subject, "form", "") if r.loginSessionId != nil { - localVarQueryParams.Add("login_session_id", parameterToString(*r.loginSessionId, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "login_session_id", r.loginSessionId, "form", "") } // to determine the Content-Type header localVarHTTPContentTypes := []string{} @@ -1883,9 +2036,9 @@ func (a *OAuth2ApiService) ListOAuth2ConsentSessionsExecute(r ApiListOAuth2Conse return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -1901,6 +2054,7 @@ func (a *OAuth2ApiService) ListOAuth2ConsentSessionsExecute(r ApiListOAuth2Conse newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -1918,20 +2072,22 @@ func (a *OAuth2ApiService) ListOAuth2ConsentSessionsExecute(r ApiListOAuth2Conse } type ApiListTrustedOAuth2JwtGrantIssuersRequest struct { - ctx context.Context - ApiService *OAuth2ApiService - maxItems *int64 - defaultItems *int64 - issuer *string + ctx context.Context + ApiService *OAuth2APIService + pageSize *int64 + pageToken *string + issuer *string } -func (r ApiListTrustedOAuth2JwtGrantIssuersRequest) MaxItems(maxItems int64) ApiListTrustedOAuth2JwtGrantIssuersRequest { - r.maxItems = &maxItems +// Items per Page This is the number of items per page to return. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). +func (r ApiListTrustedOAuth2JwtGrantIssuersRequest) PageSize(pageSize int64) ApiListTrustedOAuth2JwtGrantIssuersRequest { + r.pageSize = &pageSize return r } -func (r ApiListTrustedOAuth2JwtGrantIssuersRequest) DefaultItems(defaultItems int64) ApiListTrustedOAuth2JwtGrantIssuersRequest { - r.defaultItems = &defaultItems +// Next Page Token The next page token. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). +func (r ApiListTrustedOAuth2JwtGrantIssuersRequest) PageToken(pageToken string) ApiListTrustedOAuth2JwtGrantIssuersRequest { + r.pageToken = &pageToken return r } @@ -1953,7 +2109,7 @@ Use this endpoint to list all trusted JWT Bearer Grant Type Issuers. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiListTrustedOAuth2JwtGrantIssuersRequest */ -func (a *OAuth2ApiService) ListTrustedOAuth2JwtGrantIssuers(ctx context.Context) ApiListTrustedOAuth2JwtGrantIssuersRequest { +func (a *OAuth2APIService) ListTrustedOAuth2JwtGrantIssuers(ctx context.Context) ApiListTrustedOAuth2JwtGrantIssuersRequest { return ApiListTrustedOAuth2JwtGrantIssuersRequest{ ApiService: a, ctx: ctx, @@ -1963,7 +2119,7 @@ func (a *OAuth2ApiService) ListTrustedOAuth2JwtGrantIssuers(ctx context.Context) // Execute executes the request // // @return []TrustedOAuth2JwtGrantIssuer -func (a *OAuth2ApiService) ListTrustedOAuth2JwtGrantIssuersExecute(r ApiListTrustedOAuth2JwtGrantIssuersRequest) ([]TrustedOAuth2JwtGrantIssuer, *http.Response, error) { +func (a *OAuth2APIService) ListTrustedOAuth2JwtGrantIssuersExecute(r ApiListTrustedOAuth2JwtGrantIssuersRequest) ([]TrustedOAuth2JwtGrantIssuer, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} @@ -1971,7 +2127,7 @@ func (a *OAuth2ApiService) ListTrustedOAuth2JwtGrantIssuersExecute(r ApiListTrus localVarReturnValue []TrustedOAuth2JwtGrantIssuer ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.ListTrustedOAuth2JwtGrantIssuers") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.ListTrustedOAuth2JwtGrantIssuers") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -1982,14 +2138,17 @@ func (a *OAuth2ApiService) ListTrustedOAuth2JwtGrantIssuersExecute(r ApiListTrus localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if r.maxItems != nil { - localVarQueryParams.Add("MaxItems", parameterToString(*r.maxItems, "")) + if r.pageSize != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "page_size", r.pageSize, "form", "") + } else { + var defaultValue int64 = 250 + r.pageSize = &defaultValue } - if r.defaultItems != nil { - localVarQueryParams.Add("DefaultItems", parameterToString(*r.defaultItems, "")) + if r.pageToken != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "page_token", r.pageToken, "form", "") } if r.issuer != nil { - localVarQueryParams.Add("issuer", parameterToString(*r.issuer, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "issuer", r.issuer, "form", "") } // to determine the Content-Type header localVarHTTPContentTypes := []string{} @@ -2018,9 +2177,9 @@ func (a *OAuth2ApiService) ListTrustedOAuth2JwtGrantIssuersExecute(r ApiListTrus return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -2036,6 +2195,7 @@ func (a *OAuth2ApiService) ListTrustedOAuth2JwtGrantIssuersExecute(r ApiListTrus newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -2054,7 +2214,7 @@ func (a *OAuth2ApiService) ListTrustedOAuth2JwtGrantIssuersExecute(r ApiListTrus type ApiOAuth2AuthorizeRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService } func (r ApiOAuth2AuthorizeRequest) Execute() (*ErrorOAuth2, *http.Response, error) { @@ -2067,12 +2227,13 @@ OAuth2Authorize OAuth 2.0 Authorize Endpoint Use open source libraries to perform OAuth 2.0 and OpenID Connect available for any programming language. You can find a list of libraries at https://oauth.net/code/ -The Ory SDK is not yet able to this endpoint properly. +This endpoint should not be used via the Ory SDK and is only included for technical reasons. +Instead, use one of the libraries linked above. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiOAuth2AuthorizeRequest */ -func (a *OAuth2ApiService) OAuth2Authorize(ctx context.Context) ApiOAuth2AuthorizeRequest { +func (a *OAuth2APIService) OAuth2Authorize(ctx context.Context) ApiOAuth2AuthorizeRequest { return ApiOAuth2AuthorizeRequest{ ApiService: a, ctx: ctx, @@ -2082,7 +2243,7 @@ func (a *OAuth2ApiService) OAuth2Authorize(ctx context.Context) ApiOAuth2Authori // Execute executes the request // // @return ErrorOAuth2 -func (a *OAuth2ApiService) OAuth2AuthorizeExecute(r ApiOAuth2AuthorizeRequest) (*ErrorOAuth2, *http.Response, error) { +func (a *OAuth2APIService) OAuth2AuthorizeExecute(r ApiOAuth2AuthorizeRequest) (*ErrorOAuth2, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} @@ -2090,7 +2251,7 @@ func (a *OAuth2ApiService) OAuth2AuthorizeExecute(r ApiOAuth2AuthorizeRequest) ( localVarReturnValue *ErrorOAuth2 ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.OAuth2Authorize") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.OAuth2Authorize") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -2128,9 +2289,9 @@ func (a *OAuth2ApiService) OAuth2AuthorizeExecute(r ApiOAuth2AuthorizeRequest) ( return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -2146,6 +2307,118 @@ func (a *OAuth2ApiService) OAuth2AuthorizeExecute(r ApiOAuth2AuthorizeRequest) ( newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} + +type ApiOAuth2DeviceFlowRequest struct { + ctx context.Context + ApiService *OAuth2APIService +} + +func (r ApiOAuth2DeviceFlowRequest) Execute() (*DeviceAuthorization, *http.Response, error) { + return r.ApiService.OAuth2DeviceFlowExecute(r) +} + +/* +OAuth2DeviceFlow The OAuth 2.0 Device Authorize Endpoint + +This endpoint is not documented here because you should never use your own implementation to perform OAuth2 flows. +OAuth2 is a very popular protocol and a library for your programming language will exist. + +To learn more about this flow please refer to the specification: https://tools.ietf.org/html/rfc8628 + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @return ApiOAuth2DeviceFlowRequest +*/ +func (a *OAuth2APIService) OAuth2DeviceFlow(ctx context.Context) ApiOAuth2DeviceFlowRequest { + return ApiOAuth2DeviceFlowRequest{ + ApiService: a, + ctx: ctx, + } +} + +// Execute executes the request +// +// @return DeviceAuthorization +func (a *OAuth2APIService) OAuth2DeviceFlowExecute(r ApiOAuth2DeviceFlowRequest) (*DeviceAuthorization, *http.Response, error) { + var ( + localVarHTTPMethod = http.MethodPost + localVarPostBody interface{} + formFiles []formFile + localVarReturnValue *DeviceAuthorization + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.OAuth2DeviceFlow") + if err != nil { + return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/oauth2/device/auth" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + var v ErrorOAuth2 + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -2164,7 +2437,7 @@ func (a *OAuth2ApiService) OAuth2AuthorizeExecute(r ApiOAuth2AuthorizeRequest) ( type ApiOauth2TokenExchangeRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService grantType *string clientId *string code *string @@ -2207,12 +2480,13 @@ Oauth2TokenExchange The OAuth 2.0 Token Endpoint Use open source libraries to perform OAuth 2.0 and OpenID Connect available for any programming language. You can find a list of libraries here https://oauth.net/code/ -The Ory SDK is not yet able to this endpoint properly. +This endpoint should not be used via the Ory SDK and is only included for technical reasons. +Instead, use one of the libraries linked above. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiOauth2TokenExchangeRequest */ -func (a *OAuth2ApiService) Oauth2TokenExchange(ctx context.Context) ApiOauth2TokenExchangeRequest { +func (a *OAuth2APIService) Oauth2TokenExchange(ctx context.Context) ApiOauth2TokenExchangeRequest { return ApiOauth2TokenExchangeRequest{ ApiService: a, ctx: ctx, @@ -2222,7 +2496,7 @@ func (a *OAuth2ApiService) Oauth2TokenExchange(ctx context.Context) ApiOauth2Tok // Execute executes the request // // @return OAuth2TokenExchange -func (a *OAuth2ApiService) Oauth2TokenExchangeExecute(r ApiOauth2TokenExchangeRequest) (*OAuth2TokenExchange, *http.Response, error) { +func (a *OAuth2APIService) Oauth2TokenExchangeExecute(r ApiOauth2TokenExchangeRequest) (*OAuth2TokenExchange, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPost localVarPostBody interface{} @@ -2230,7 +2504,7 @@ func (a *OAuth2ApiService) Oauth2TokenExchangeExecute(r ApiOauth2TokenExchangeRe localVarReturnValue *OAuth2TokenExchange ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.Oauth2TokenExchange") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.Oauth2TokenExchange") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -2262,17 +2536,17 @@ func (a *OAuth2ApiService) Oauth2TokenExchangeExecute(r ApiOauth2TokenExchangeRe localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept } if r.clientId != nil { - localVarFormParams.Add("client_id", parameterToString(*r.clientId, "")) + parameterAddToHeaderOrQuery(localVarFormParams, "client_id", r.clientId, "", "") } if r.code != nil { - localVarFormParams.Add("code", parameterToString(*r.code, "")) + parameterAddToHeaderOrQuery(localVarFormParams, "code", r.code, "", "") } - localVarFormParams.Add("grant_type", parameterToString(*r.grantType, "")) + parameterAddToHeaderOrQuery(localVarFormParams, "grant_type", r.grantType, "", "") if r.redirectUri != nil { - localVarFormParams.Add("redirect_uri", parameterToString(*r.redirectUri, "")) + parameterAddToHeaderOrQuery(localVarFormParams, "redirect_uri", r.redirectUri, "", "") } if r.refreshToken != nil { - localVarFormParams.Add("refresh_token", parameterToString(*r.refreshToken, "")) + parameterAddToHeaderOrQuery(localVarFormParams, "refresh_token", r.refreshToken, "", "") } req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) if err != nil { @@ -2284,9 +2558,9 @@ func (a *OAuth2ApiService) Oauth2TokenExchangeExecute(r ApiOauth2TokenExchangeRe return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -2302,6 +2576,7 @@ func (a *OAuth2ApiService) Oauth2TokenExchangeExecute(r ApiOauth2TokenExchangeRe newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -2320,7 +2595,7 @@ func (a *OAuth2ApiService) Oauth2TokenExchangeExecute(r ApiOauth2TokenExchangeRe type ApiPatchOAuth2ClientRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService id string jsonPatch *[]JsonPatch } @@ -2349,7 +2624,7 @@ generated for applications which want to consume your OAuth 2.0 or OpenID Connec @param id The id of the OAuth 2.0 Client. @return ApiPatchOAuth2ClientRequest */ -func (a *OAuth2ApiService) PatchOAuth2Client(ctx context.Context, id string) ApiPatchOAuth2ClientRequest { +func (a *OAuth2APIService) PatchOAuth2Client(ctx context.Context, id string) ApiPatchOAuth2ClientRequest { return ApiPatchOAuth2ClientRequest{ ApiService: a, ctx: ctx, @@ -2360,7 +2635,7 @@ func (a *OAuth2ApiService) PatchOAuth2Client(ctx context.Context, id string) Api // Execute executes the request // // @return OAuth2Client -func (a *OAuth2ApiService) PatchOAuth2ClientExecute(r ApiPatchOAuth2ClientRequest) (*OAuth2Client, *http.Response, error) { +func (a *OAuth2APIService) PatchOAuth2ClientExecute(r ApiPatchOAuth2ClientRequest) (*OAuth2Client, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPatch localVarPostBody interface{} @@ -2368,13 +2643,13 @@ func (a *OAuth2ApiService) PatchOAuth2ClientExecute(r ApiPatchOAuth2ClientReques localVarReturnValue *OAuth2Client ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.PatchOAuth2Client") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.PatchOAuth2Client") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } localVarPath := localBasePath + "/admin/clients/{id}" - localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterToString(r.id, "")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterValueToString(r.id, "id")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -2412,9 +2687,9 @@ func (a *OAuth2ApiService) PatchOAuth2ClientExecute(r ApiPatchOAuth2ClientReques return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -2431,6 +2706,7 @@ func (a *OAuth2ApiService) PatchOAuth2ClientExecute(r ApiPatchOAuth2ClientReques newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -2440,6 +2716,115 @@ func (a *OAuth2ApiService) PatchOAuth2ClientExecute(r ApiPatchOAuth2ClientReques newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} + +type ApiPerformOAuth2DeviceVerificationFlowRequest struct { + ctx context.Context + ApiService *OAuth2APIService +} + +func (r ApiPerformOAuth2DeviceVerificationFlowRequest) Execute() (*ErrorOAuth2, *http.Response, error) { + return r.ApiService.PerformOAuth2DeviceVerificationFlowExecute(r) +} + +/* +PerformOAuth2DeviceVerificationFlow OAuth 2.0 Device Verification Endpoint + +This is the device user verification endpoint. The user is redirected here when trying to log in using the device flow. + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @return ApiPerformOAuth2DeviceVerificationFlowRequest +*/ +func (a *OAuth2APIService) PerformOAuth2DeviceVerificationFlow(ctx context.Context) ApiPerformOAuth2DeviceVerificationFlowRequest { + return ApiPerformOAuth2DeviceVerificationFlowRequest{ + ApiService: a, + ctx: ctx, + } +} + +// Execute executes the request +// +// @return ErrorOAuth2 +func (a *OAuth2APIService) PerformOAuth2DeviceVerificationFlowExecute(r ApiPerformOAuth2DeviceVerificationFlowRequest) (*ErrorOAuth2, *http.Response, error) { + var ( + localVarHTTPMethod = http.MethodGet + localVarPostBody interface{} + formFiles []formFile + localVarReturnValue *ErrorOAuth2 + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.PerformOAuth2DeviceVerificationFlow") + if err != nil { + return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/oauth2/device/verify" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + var v ErrorOAuth2 + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -2458,7 +2843,7 @@ func (a *OAuth2ApiService) PatchOAuth2ClientExecute(r ApiPatchOAuth2ClientReques type ApiRejectOAuth2ConsentRequestRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService consentChallenge *string rejectOAuth2Request *RejectOAuth2Request } @@ -2500,7 +2885,7 @@ head over to the OAuth 2.0 documentation. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiRejectOAuth2ConsentRequestRequest */ -func (a *OAuth2ApiService) RejectOAuth2ConsentRequest(ctx context.Context) ApiRejectOAuth2ConsentRequestRequest { +func (a *OAuth2APIService) RejectOAuth2ConsentRequest(ctx context.Context) ApiRejectOAuth2ConsentRequestRequest { return ApiRejectOAuth2ConsentRequestRequest{ ApiService: a, ctx: ctx, @@ -2510,7 +2895,7 @@ func (a *OAuth2ApiService) RejectOAuth2ConsentRequest(ctx context.Context) ApiRe // Execute executes the request // // @return OAuth2RedirectTo -func (a *OAuth2ApiService) RejectOAuth2ConsentRequestExecute(r ApiRejectOAuth2ConsentRequestRequest) (*OAuth2RedirectTo, *http.Response, error) { +func (a *OAuth2APIService) RejectOAuth2ConsentRequestExecute(r ApiRejectOAuth2ConsentRequestRequest) (*OAuth2RedirectTo, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPut localVarPostBody interface{} @@ -2518,7 +2903,7 @@ func (a *OAuth2ApiService) RejectOAuth2ConsentRequestExecute(r ApiRejectOAuth2Co localVarReturnValue *OAuth2RedirectTo ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.RejectOAuth2ConsentRequest") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.RejectOAuth2ConsentRequest") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -2532,7 +2917,7 @@ func (a *OAuth2ApiService) RejectOAuth2ConsentRequestExecute(r ApiRejectOAuth2Co return localVarReturnValue, nil, reportError("consentChallenge is required and must be specified") } - localVarQueryParams.Add("consent_challenge", parameterToString(*r.consentChallenge, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "consent_challenge", r.consentChallenge, "form", "") // to determine the Content-Type header localVarHTTPContentTypes := []string{"application/json"} @@ -2562,9 +2947,9 @@ func (a *OAuth2ApiService) RejectOAuth2ConsentRequestExecute(r ApiRejectOAuth2Co return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -2580,6 +2965,7 @@ func (a *OAuth2ApiService) RejectOAuth2ConsentRequestExecute(r ApiRejectOAuth2Co newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -2598,7 +2984,7 @@ func (a *OAuth2ApiService) RejectOAuth2ConsentRequestExecute(r ApiRejectOAuth2Co type ApiRejectOAuth2LoginRequestRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService loginChallenge *string rejectOAuth2Request *RejectOAuth2Request } @@ -2635,7 +3021,7 @@ The response contains a redirect URL which the login provider should redirect th @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiRejectOAuth2LoginRequestRequest */ -func (a *OAuth2ApiService) RejectOAuth2LoginRequest(ctx context.Context) ApiRejectOAuth2LoginRequestRequest { +func (a *OAuth2APIService) RejectOAuth2LoginRequest(ctx context.Context) ApiRejectOAuth2LoginRequestRequest { return ApiRejectOAuth2LoginRequestRequest{ ApiService: a, ctx: ctx, @@ -2645,7 +3031,7 @@ func (a *OAuth2ApiService) RejectOAuth2LoginRequest(ctx context.Context) ApiReje // Execute executes the request // // @return OAuth2RedirectTo -func (a *OAuth2ApiService) RejectOAuth2LoginRequestExecute(r ApiRejectOAuth2LoginRequestRequest) (*OAuth2RedirectTo, *http.Response, error) { +func (a *OAuth2APIService) RejectOAuth2LoginRequestExecute(r ApiRejectOAuth2LoginRequestRequest) (*OAuth2RedirectTo, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPut localVarPostBody interface{} @@ -2653,7 +3039,7 @@ func (a *OAuth2ApiService) RejectOAuth2LoginRequestExecute(r ApiRejectOAuth2Logi localVarReturnValue *OAuth2RedirectTo ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.RejectOAuth2LoginRequest") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.RejectOAuth2LoginRequest") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -2667,7 +3053,7 @@ func (a *OAuth2ApiService) RejectOAuth2LoginRequestExecute(r ApiRejectOAuth2Logi return localVarReturnValue, nil, reportError("loginChallenge is required and must be specified") } - localVarQueryParams.Add("login_challenge", parameterToString(*r.loginChallenge, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "login_challenge", r.loginChallenge, "form", "") // to determine the Content-Type header localVarHTTPContentTypes := []string{"application/json"} @@ -2697,9 +3083,9 @@ func (a *OAuth2ApiService) RejectOAuth2LoginRequestExecute(r ApiRejectOAuth2Logi return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -2715,6 +3101,7 @@ func (a *OAuth2ApiService) RejectOAuth2LoginRequestExecute(r ApiRejectOAuth2Logi newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -2733,7 +3120,7 @@ func (a *OAuth2ApiService) RejectOAuth2LoginRequestExecute(r ApiRejectOAuth2Logi type ApiRejectOAuth2LogoutRequestRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService logoutChallenge *string } @@ -2757,7 +3144,7 @@ The response is empty as the logout provider has to chose what action to perform @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiRejectOAuth2LogoutRequestRequest */ -func (a *OAuth2ApiService) RejectOAuth2LogoutRequest(ctx context.Context) ApiRejectOAuth2LogoutRequestRequest { +func (a *OAuth2APIService) RejectOAuth2LogoutRequest(ctx context.Context) ApiRejectOAuth2LogoutRequestRequest { return ApiRejectOAuth2LogoutRequestRequest{ ApiService: a, ctx: ctx, @@ -2765,14 +3152,14 @@ func (a *OAuth2ApiService) RejectOAuth2LogoutRequest(ctx context.Context) ApiRej } // Execute executes the request -func (a *OAuth2ApiService) RejectOAuth2LogoutRequestExecute(r ApiRejectOAuth2LogoutRequestRequest) (*http.Response, error) { +func (a *OAuth2APIService) RejectOAuth2LogoutRequestExecute(r ApiRejectOAuth2LogoutRequestRequest) (*http.Response, error) { var ( localVarHTTPMethod = http.MethodPut localVarPostBody interface{} formFiles []formFile ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.RejectOAuth2LogoutRequest") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.RejectOAuth2LogoutRequest") if err != nil { return nil, &GenericOpenAPIError{error: err.Error()} } @@ -2786,7 +3173,7 @@ func (a *OAuth2ApiService) RejectOAuth2LogoutRequestExecute(r ApiRejectOAuth2Log return nil, reportError("logoutChallenge is required and must be specified") } - localVarQueryParams.Add("logout_challenge", parameterToString(*r.logoutChallenge, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "logout_challenge", r.logoutChallenge, "form", "") // to determine the Content-Type header localVarHTTPContentTypes := []string{} @@ -2814,9 +3201,9 @@ func (a *OAuth2ApiService) RejectOAuth2LogoutRequestExecute(r ApiRejectOAuth2Log return localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarHTTPResponse, err } @@ -2832,6 +3219,7 @@ func (a *OAuth2ApiService) RejectOAuth2LogoutRequestExecute(r ApiRejectOAuth2Log newErr.error = err.Error() return localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarHTTPResponse, newErr } @@ -2840,11 +3228,12 @@ func (a *OAuth2ApiService) RejectOAuth2LogoutRequestExecute(r ApiRejectOAuth2Log } type ApiRevokeOAuth2ConsentSessionsRequest struct { - ctx context.Context - ApiService *OAuth2ApiService - subject *string - client *string - all *bool + ctx context.Context + ApiService *OAuth2APIService + subject *string + client *string + consentRequestId *string + all *bool } // OAuth 2.0 Consent Subject The subject whose consent sessions should be deleted. @@ -2859,6 +3248,12 @@ func (r ApiRevokeOAuth2ConsentSessionsRequest) Client(client string) ApiRevokeOA return r } +// Consent Request ID If set, revoke all token chains derived from this particular consent request ID. +func (r ApiRevokeOAuth2ConsentSessionsRequest) ConsentRequestId(consentRequestId string) ApiRevokeOAuth2ConsentSessionsRequest { + r.consentRequestId = &consentRequestId + return r +} + // Revoke All Consent Sessions If set to `true` deletes all consent sessions by the Subject that have been granted. func (r ApiRevokeOAuth2ConsentSessionsRequest) All(all bool) ApiRevokeOAuth2ConsentSessionsRequest { r.all = &all @@ -2878,7 +3273,7 @@ associated OAuth 2.0 Access Tokens. You may also only revoke sessions for a spec @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiRevokeOAuth2ConsentSessionsRequest */ -func (a *OAuth2ApiService) RevokeOAuth2ConsentSessions(ctx context.Context) ApiRevokeOAuth2ConsentSessionsRequest { +func (a *OAuth2APIService) RevokeOAuth2ConsentSessions(ctx context.Context) ApiRevokeOAuth2ConsentSessionsRequest { return ApiRevokeOAuth2ConsentSessionsRequest{ ApiService: a, ctx: ctx, @@ -2886,14 +3281,14 @@ func (a *OAuth2ApiService) RevokeOAuth2ConsentSessions(ctx context.Context) ApiR } // Execute executes the request -func (a *OAuth2ApiService) RevokeOAuth2ConsentSessionsExecute(r ApiRevokeOAuth2ConsentSessionsRequest) (*http.Response, error) { +func (a *OAuth2APIService) RevokeOAuth2ConsentSessionsExecute(r ApiRevokeOAuth2ConsentSessionsRequest) (*http.Response, error) { var ( localVarHTTPMethod = http.MethodDelete localVarPostBody interface{} formFiles []formFile ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.RevokeOAuth2ConsentSessions") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.RevokeOAuth2ConsentSessions") if err != nil { return nil, &GenericOpenAPIError{error: err.Error()} } @@ -2903,16 +3298,18 @@ func (a *OAuth2ApiService) RevokeOAuth2ConsentSessionsExecute(r ApiRevokeOAuth2C localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if r.subject == nil { - return nil, reportError("subject is required and must be specified") - } - localVarQueryParams.Add("subject", parameterToString(*r.subject, "")) + if r.subject != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "subject", r.subject, "form", "") + } if r.client != nil { - localVarQueryParams.Add("client", parameterToString(*r.client, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "client", r.client, "form", "") + } + if r.consentRequestId != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "consent_request_id", r.consentRequestId, "form", "") } if r.all != nil { - localVarQueryParams.Add("all", parameterToString(*r.all, "")) + parameterAddToHeaderOrQuery(localVarQueryParams, "all", r.all, "form", "") } // to determine the Content-Type header localVarHTTPContentTypes := []string{} @@ -2941,9 +3338,9 @@ func (a *OAuth2ApiService) RevokeOAuth2ConsentSessionsExecute(r ApiRevokeOAuth2C return localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarHTTPResponse, err } @@ -2959,6 +3356,7 @@ func (a *OAuth2ApiService) RevokeOAuth2ConsentSessionsExecute(r ApiRevokeOAuth2C newErr.error = err.Error() return localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarHTTPResponse, newErr } @@ -2968,8 +3366,9 @@ func (a *OAuth2ApiService) RevokeOAuth2ConsentSessionsExecute(r ApiRevokeOAuth2C type ApiRevokeOAuth2LoginSessionsRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService subject *string + sid *string } // OAuth 2.0 Subject The subject to revoke authentication sessions for. @@ -2978,21 +3377,34 @@ func (r ApiRevokeOAuth2LoginSessionsRequest) Subject(subject string) ApiRevokeOA return r } +// Login Session ID The login session to revoke. +func (r ApiRevokeOAuth2LoginSessionsRequest) Sid(sid string) ApiRevokeOAuth2LoginSessionsRequest { + r.sid = &sid + return r +} + func (r ApiRevokeOAuth2LoginSessionsRequest) Execute() (*http.Response, error) { return r.ApiService.RevokeOAuth2LoginSessionsExecute(r) } /* -RevokeOAuth2LoginSessions Revokes All OAuth 2.0 Login Sessions of a Subject +RevokeOAuth2LoginSessions Revokes OAuth 2.0 Login Sessions by either a Subject or a SessionID + +This endpoint invalidates authentication sessions. After revoking the authentication session(s), the subject +has to re-authenticate at the Ory OAuth2 Provider. This endpoint does not invalidate any tokens. + +If you send the subject in a query param, all authentication sessions that belong to that subject are revoked. +No OpenID Connect Front- or Back-channel logout is performed in this case. + +Alternatively, you can send a SessionID via `sid` query param, in which case, only the session that is connected +to that SessionID is revoked. OpenID Connect Back-channel logout is performed in this case. -This endpoint invalidates a subject's authentication session. After revoking the authentication session, the subject -has to re-authenticate at the Ory OAuth2 Provider. This endpoint does not invalidate any tokens and -does not work with OpenID Connect Front- or Back-channel logout. +When using Ory for the identity provider, the login provider will also invalidate the session cookie. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiRevokeOAuth2LoginSessionsRequest */ -func (a *OAuth2ApiService) RevokeOAuth2LoginSessions(ctx context.Context) ApiRevokeOAuth2LoginSessionsRequest { +func (a *OAuth2APIService) RevokeOAuth2LoginSessions(ctx context.Context) ApiRevokeOAuth2LoginSessionsRequest { return ApiRevokeOAuth2LoginSessionsRequest{ ApiService: a, ctx: ctx, @@ -3000,14 +3412,14 @@ func (a *OAuth2ApiService) RevokeOAuth2LoginSessions(ctx context.Context) ApiRev } // Execute executes the request -func (a *OAuth2ApiService) RevokeOAuth2LoginSessionsExecute(r ApiRevokeOAuth2LoginSessionsRequest) (*http.Response, error) { +func (a *OAuth2APIService) RevokeOAuth2LoginSessionsExecute(r ApiRevokeOAuth2LoginSessionsRequest) (*http.Response, error) { var ( localVarHTTPMethod = http.MethodDelete localVarPostBody interface{} formFiles []formFile ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.RevokeOAuth2LoginSessions") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.RevokeOAuth2LoginSessions") if err != nil { return nil, &GenericOpenAPIError{error: err.Error()} } @@ -3017,11 +3429,13 @@ func (a *OAuth2ApiService) RevokeOAuth2LoginSessionsExecute(r ApiRevokeOAuth2Log localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if r.subject == nil { - return nil, reportError("subject is required and must be specified") - } - localVarQueryParams.Add("subject", parameterToString(*r.subject, "")) + if r.subject != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "subject", r.subject, "form", "") + } + if r.sid != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "sid", r.sid, "form", "") + } // to determine the Content-Type header localVarHTTPContentTypes := []string{} @@ -3049,9 +3463,9 @@ func (a *OAuth2ApiService) RevokeOAuth2LoginSessionsExecute(r ApiRevokeOAuth2Log return localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarHTTPResponse, err } @@ -3067,6 +3481,7 @@ func (a *OAuth2ApiService) RevokeOAuth2LoginSessionsExecute(r ApiRevokeOAuth2Log newErr.error = err.Error() return localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarHTTPResponse, newErr } @@ -3076,7 +3491,7 @@ func (a *OAuth2ApiService) RevokeOAuth2LoginSessionsExecute(r ApiRevokeOAuth2Log type ApiRevokeOAuth2TokenRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService token *string clientId *string clientSecret *string @@ -3112,7 +3527,7 @@ the client the token was generated for. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiRevokeOAuth2TokenRequest */ -func (a *OAuth2ApiService) RevokeOAuth2Token(ctx context.Context) ApiRevokeOAuth2TokenRequest { +func (a *OAuth2APIService) RevokeOAuth2Token(ctx context.Context) ApiRevokeOAuth2TokenRequest { return ApiRevokeOAuth2TokenRequest{ ApiService: a, ctx: ctx, @@ -3120,14 +3535,14 @@ func (a *OAuth2ApiService) RevokeOAuth2Token(ctx context.Context) ApiRevokeOAuth } // Execute executes the request -func (a *OAuth2ApiService) RevokeOAuth2TokenExecute(r ApiRevokeOAuth2TokenRequest) (*http.Response, error) { +func (a *OAuth2APIService) RevokeOAuth2TokenExecute(r ApiRevokeOAuth2TokenRequest) (*http.Response, error) { var ( localVarHTTPMethod = http.MethodPost localVarPostBody interface{} formFiles []formFile ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.RevokeOAuth2Token") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.RevokeOAuth2Token") if err != nil { return nil, &GenericOpenAPIError{error: err.Error()} } @@ -3159,12 +3574,12 @@ func (a *OAuth2ApiService) RevokeOAuth2TokenExecute(r ApiRevokeOAuth2TokenReques localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept } if r.clientId != nil { - localVarFormParams.Add("client_id", parameterToString(*r.clientId, "")) + parameterAddToHeaderOrQuery(localVarFormParams, "client_id", r.clientId, "", "") } if r.clientSecret != nil { - localVarFormParams.Add("client_secret", parameterToString(*r.clientSecret, "")) + parameterAddToHeaderOrQuery(localVarFormParams, "client_secret", r.clientSecret, "", "") } - localVarFormParams.Add("token", parameterToString(*r.token, "")) + parameterAddToHeaderOrQuery(localVarFormParams, "token", r.token, "", "") req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) if err != nil { return nil, err @@ -3175,9 +3590,9 @@ func (a *OAuth2ApiService) RevokeOAuth2TokenExecute(r ApiRevokeOAuth2TokenReques return localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarHTTPResponse, err } @@ -3193,6 +3608,7 @@ func (a *OAuth2ApiService) RevokeOAuth2TokenExecute(r ApiRevokeOAuth2TokenReques newErr.error = err.Error() return localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarHTTPResponse, newErr } @@ -3202,7 +3618,7 @@ func (a *OAuth2ApiService) RevokeOAuth2TokenExecute(r ApiRevokeOAuth2TokenReques type ApiSetOAuth2ClientRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService id string oAuth2Client *OAuth2Client } @@ -3232,7 +3648,7 @@ generated for applications which want to consume your OAuth 2.0 or OpenID Connec @param id OAuth 2.0 Client ID @return ApiSetOAuth2ClientRequest */ -func (a *OAuth2ApiService) SetOAuth2Client(ctx context.Context, id string) ApiSetOAuth2ClientRequest { +func (a *OAuth2APIService) SetOAuth2Client(ctx context.Context, id string) ApiSetOAuth2ClientRequest { return ApiSetOAuth2ClientRequest{ ApiService: a, ctx: ctx, @@ -3243,7 +3659,7 @@ func (a *OAuth2ApiService) SetOAuth2Client(ctx context.Context, id string) ApiSe // Execute executes the request // // @return OAuth2Client -func (a *OAuth2ApiService) SetOAuth2ClientExecute(r ApiSetOAuth2ClientRequest) (*OAuth2Client, *http.Response, error) { +func (a *OAuth2APIService) SetOAuth2ClientExecute(r ApiSetOAuth2ClientRequest) (*OAuth2Client, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPut localVarPostBody interface{} @@ -3251,13 +3667,13 @@ func (a *OAuth2ApiService) SetOAuth2ClientExecute(r ApiSetOAuth2ClientRequest) ( localVarReturnValue *OAuth2Client ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.SetOAuth2Client") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.SetOAuth2Client") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } localVarPath := localBasePath + "/admin/clients/{id}" - localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterToString(r.id, "")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterValueToString(r.id, "id")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -3295,9 +3711,9 @@ func (a *OAuth2ApiService) SetOAuth2ClientExecute(r ApiSetOAuth2ClientRequest) ( return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -3314,6 +3730,7 @@ func (a *OAuth2ApiService) SetOAuth2ClientExecute(r ApiSetOAuth2ClientRequest) ( newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -3324,6 +3741,7 @@ func (a *OAuth2ApiService) SetOAuth2ClientExecute(r ApiSetOAuth2ClientRequest) ( newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -3333,6 +3751,7 @@ func (a *OAuth2ApiService) SetOAuth2ClientExecute(r ApiSetOAuth2ClientRequest) ( newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -3351,7 +3770,7 @@ func (a *OAuth2ApiService) SetOAuth2ClientExecute(r ApiSetOAuth2ClientRequest) ( type ApiSetOAuth2ClientLifespansRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService id string oAuth2ClientTokenLifespans *OAuth2ClientTokenLifespans } @@ -3374,7 +3793,7 @@ Set lifespans of different token types issued for this OAuth 2.0 client. Does no @param id OAuth 2.0 Client ID @return ApiSetOAuth2ClientLifespansRequest */ -func (a *OAuth2ApiService) SetOAuth2ClientLifespans(ctx context.Context, id string) ApiSetOAuth2ClientLifespansRequest { +func (a *OAuth2APIService) SetOAuth2ClientLifespans(ctx context.Context, id string) ApiSetOAuth2ClientLifespansRequest { return ApiSetOAuth2ClientLifespansRequest{ ApiService: a, ctx: ctx, @@ -3385,7 +3804,7 @@ func (a *OAuth2ApiService) SetOAuth2ClientLifespans(ctx context.Context, id stri // Execute executes the request // // @return OAuth2Client -func (a *OAuth2ApiService) SetOAuth2ClientLifespansExecute(r ApiSetOAuth2ClientLifespansRequest) (*OAuth2Client, *http.Response, error) { +func (a *OAuth2APIService) SetOAuth2ClientLifespansExecute(r ApiSetOAuth2ClientLifespansRequest) (*OAuth2Client, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPut localVarPostBody interface{} @@ -3393,13 +3812,13 @@ func (a *OAuth2ApiService) SetOAuth2ClientLifespansExecute(r ApiSetOAuth2ClientL localVarReturnValue *OAuth2Client ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.SetOAuth2ClientLifespans") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.SetOAuth2ClientLifespans") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } localVarPath := localBasePath + "/admin/clients/{id}/lifespans" - localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterToString(r.id, "")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterValueToString(r.id, "id")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -3434,9 +3853,9 @@ func (a *OAuth2ApiService) SetOAuth2ClientLifespansExecute(r ApiSetOAuth2ClientL return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -3452,6 +3871,7 @@ func (a *OAuth2ApiService) SetOAuth2ClientLifespansExecute(r ApiSetOAuth2ClientL newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -3470,7 +3890,7 @@ func (a *OAuth2ApiService) SetOAuth2ClientLifespansExecute(r ApiSetOAuth2ClientL type ApiTrustOAuth2JwtGrantIssuerRequest struct { ctx context.Context - ApiService *OAuth2ApiService + ApiService *OAuth2APIService trustOAuth2JwtGrantIssuer *TrustOAuth2JwtGrantIssuer } @@ -3493,7 +3913,7 @@ and Authorization Grants [RFC7523](https://datatracker.ietf.org/doc/html/rfc7523 @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiTrustOAuth2JwtGrantIssuerRequest */ -func (a *OAuth2ApiService) TrustOAuth2JwtGrantIssuer(ctx context.Context) ApiTrustOAuth2JwtGrantIssuerRequest { +func (a *OAuth2APIService) TrustOAuth2JwtGrantIssuer(ctx context.Context) ApiTrustOAuth2JwtGrantIssuerRequest { return ApiTrustOAuth2JwtGrantIssuerRequest{ ApiService: a, ctx: ctx, @@ -3503,7 +3923,7 @@ func (a *OAuth2ApiService) TrustOAuth2JwtGrantIssuer(ctx context.Context) ApiTru // Execute executes the request // // @return TrustedOAuth2JwtGrantIssuer -func (a *OAuth2ApiService) TrustOAuth2JwtGrantIssuerExecute(r ApiTrustOAuth2JwtGrantIssuerRequest) (*TrustedOAuth2JwtGrantIssuer, *http.Response, error) { +func (a *OAuth2APIService) TrustOAuth2JwtGrantIssuerExecute(r ApiTrustOAuth2JwtGrantIssuerRequest) (*TrustedOAuth2JwtGrantIssuer, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPost localVarPostBody interface{} @@ -3511,7 +3931,7 @@ func (a *OAuth2ApiService) TrustOAuth2JwtGrantIssuerExecute(r ApiTrustOAuth2JwtG localVarReturnValue *TrustedOAuth2JwtGrantIssuer ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2ApiService.TrustOAuth2JwtGrantIssuer") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OAuth2APIService.TrustOAuth2JwtGrantIssuer") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -3551,9 +3971,9 @@ func (a *OAuth2ApiService) TrustOAuth2JwtGrantIssuerExecute(r ApiTrustOAuth2JwtG return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -3569,6 +3989,7 @@ func (a *OAuth2ApiService) TrustOAuth2JwtGrantIssuerExecute(r ApiTrustOAuth2JwtG newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } diff --git a/internal/httpclient/api_oidc.go b/internal/httpclient/api_oidc.go index 285774a2535..406711e3192 100644 --- a/internal/httpclient/api_oidc.go +++ b/internal/httpclient/api_oidc.go @@ -14,18 +14,18 @@ package openapi import ( "bytes" "context" - "io/ioutil" + "io" "net/http" "net/url" "strings" ) -// OidcApiService OidcApi service -type OidcApiService service +// OidcAPIService OidcAPI service +type OidcAPIService service type ApiCreateOidcDynamicClientRequest struct { ctx context.Context - ApiService *OidcApiService + ApiService *OidcAPIService oAuth2Client *OAuth2Client } @@ -57,7 +57,7 @@ Write the secret down and keep it somewhere safe. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiCreateOidcDynamicClientRequest */ -func (a *OidcApiService) CreateOidcDynamicClient(ctx context.Context) ApiCreateOidcDynamicClientRequest { +func (a *OidcAPIService) CreateOidcDynamicClient(ctx context.Context) ApiCreateOidcDynamicClientRequest { return ApiCreateOidcDynamicClientRequest{ ApiService: a, ctx: ctx, @@ -67,7 +67,7 @@ func (a *OidcApiService) CreateOidcDynamicClient(ctx context.Context) ApiCreateO // Execute executes the request // // @return OAuth2Client -func (a *OidcApiService) CreateOidcDynamicClientExecute(r ApiCreateOidcDynamicClientRequest) (*OAuth2Client, *http.Response, error) { +func (a *OidcAPIService) CreateOidcDynamicClientExecute(r ApiCreateOidcDynamicClientRequest) (*OAuth2Client, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPost localVarPostBody interface{} @@ -75,7 +75,7 @@ func (a *OidcApiService) CreateOidcDynamicClientExecute(r ApiCreateOidcDynamicCl localVarReturnValue *OAuth2Client ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OidcApiService.CreateOidcDynamicClient") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OidcAPIService.CreateOidcDynamicClient") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -118,9 +118,9 @@ func (a *OidcApiService) CreateOidcDynamicClientExecute(r ApiCreateOidcDynamicCl return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -137,6 +137,7 @@ func (a *OidcApiService) CreateOidcDynamicClientExecute(r ApiCreateOidcDynamicCl newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -146,6 +147,139 @@ func (a *OidcApiService) CreateOidcDynamicClientExecute(r ApiCreateOidcDynamicCl newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} + +type ApiCreateVerifiableCredentialRequest struct { + ctx context.Context + ApiService *OidcAPIService + createVerifiableCredentialRequestBody *CreateVerifiableCredentialRequestBody +} + +func (r ApiCreateVerifiableCredentialRequest) CreateVerifiableCredentialRequestBody(createVerifiableCredentialRequestBody CreateVerifiableCredentialRequestBody) ApiCreateVerifiableCredentialRequest { + r.createVerifiableCredentialRequestBody = &createVerifiableCredentialRequestBody + return r +} + +func (r ApiCreateVerifiableCredentialRequest) Execute() (*VerifiableCredentialResponse, *http.Response, error) { + return r.ApiService.CreateVerifiableCredentialExecute(r) +} + +/* +CreateVerifiableCredential Issues a Verifiable Credential + +This endpoint creates a verifiable credential that attests that the user +authenticated with the provided access token owns a certain public/private key +pair. + +More information can be found at +https://openid.net/specs/openid-connect-userinfo-vc-1_0.html. + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @return ApiCreateVerifiableCredentialRequest +*/ +func (a *OidcAPIService) CreateVerifiableCredential(ctx context.Context) ApiCreateVerifiableCredentialRequest { + return ApiCreateVerifiableCredentialRequest{ + ApiService: a, + ctx: ctx, + } +} + +// Execute executes the request +// +// @return VerifiableCredentialResponse +func (a *OidcAPIService) CreateVerifiableCredentialExecute(r ApiCreateVerifiableCredentialRequest) (*VerifiableCredentialResponse, *http.Response, error) { + var ( + localVarHTTPMethod = http.MethodPost + localVarPostBody interface{} + formFiles []formFile + localVarReturnValue *VerifiableCredentialResponse + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OidcAPIService.CreateVerifiableCredential") + if err != nil { + return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/credentials" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + // body params + localVarPostBody = r.createVerifiableCredentialRequestBody + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + if localVarHTTPResponse.StatusCode == 400 { + var v VerifiableCredentialPrimingResponse + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + var v ErrorOAuth2 + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -164,7 +298,7 @@ func (a *OidcApiService) CreateOidcDynamicClientExecute(r ApiCreateOidcDynamicCl type ApiDeleteOidcDynamicClientRequest struct { ctx context.Context - ApiService *OidcApiService + ApiService *OidcAPIService id string } @@ -191,7 +325,7 @@ generated for applications which want to consume your OAuth 2.0 or OpenID Connec @param id The id of the OAuth 2.0 Client. @return ApiDeleteOidcDynamicClientRequest */ -func (a *OidcApiService) DeleteOidcDynamicClient(ctx context.Context, id string) ApiDeleteOidcDynamicClientRequest { +func (a *OidcAPIService) DeleteOidcDynamicClient(ctx context.Context, id string) ApiDeleteOidcDynamicClientRequest { return ApiDeleteOidcDynamicClientRequest{ ApiService: a, ctx: ctx, @@ -200,20 +334,20 @@ func (a *OidcApiService) DeleteOidcDynamicClient(ctx context.Context, id string) } // Execute executes the request -func (a *OidcApiService) DeleteOidcDynamicClientExecute(r ApiDeleteOidcDynamicClientRequest) (*http.Response, error) { +func (a *OidcAPIService) DeleteOidcDynamicClientExecute(r ApiDeleteOidcDynamicClientRequest) (*http.Response, error) { var ( localVarHTTPMethod = http.MethodDelete localVarPostBody interface{} formFiles []formFile ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OidcApiService.DeleteOidcDynamicClient") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OidcAPIService.DeleteOidcDynamicClient") if err != nil { return nil, &GenericOpenAPIError{error: err.Error()} } localVarPath := localBasePath + "/oauth2/register/{id}" - localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterToString(r.id, "")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterValueToString(r.id, "id")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -246,9 +380,9 @@ func (a *OidcApiService) DeleteOidcDynamicClientExecute(r ApiDeleteOidcDynamicCl return localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarHTTPResponse, err } @@ -264,6 +398,7 @@ func (a *OidcApiService) DeleteOidcDynamicClientExecute(r ApiDeleteOidcDynamicCl newErr.error = err.Error() return localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarHTTPResponse, newErr } @@ -273,7 +408,7 @@ func (a *OidcApiService) DeleteOidcDynamicClientExecute(r ApiDeleteOidcDynamicCl type ApiDiscoverOidcConfigurationRequest struct { ctx context.Context - ApiService *OidcApiService + ApiService *OidcAPIService } func (r ApiDiscoverOidcConfigurationRequest) Execute() (*OidcConfiguration, *http.Response, error) { @@ -291,7 +426,7 @@ For a full list of clients go here: https://openid.net/developers/certified/ @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiDiscoverOidcConfigurationRequest */ -func (a *OidcApiService) DiscoverOidcConfiguration(ctx context.Context) ApiDiscoverOidcConfigurationRequest { +func (a *OidcAPIService) DiscoverOidcConfiguration(ctx context.Context) ApiDiscoverOidcConfigurationRequest { return ApiDiscoverOidcConfigurationRequest{ ApiService: a, ctx: ctx, @@ -301,7 +436,7 @@ func (a *OidcApiService) DiscoverOidcConfiguration(ctx context.Context) ApiDisco // Execute executes the request // // @return OidcConfiguration -func (a *OidcApiService) DiscoverOidcConfigurationExecute(r ApiDiscoverOidcConfigurationRequest) (*OidcConfiguration, *http.Response, error) { +func (a *OidcAPIService) DiscoverOidcConfigurationExecute(r ApiDiscoverOidcConfigurationRequest) (*OidcConfiguration, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} @@ -309,7 +444,7 @@ func (a *OidcApiService) DiscoverOidcConfigurationExecute(r ApiDiscoverOidcConfi localVarReturnValue *OidcConfiguration ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OidcApiService.DiscoverOidcConfiguration") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OidcAPIService.DiscoverOidcConfiguration") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -347,9 +482,9 @@ func (a *OidcApiService) DiscoverOidcConfigurationExecute(r ApiDiscoverOidcConfi return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -365,6 +500,7 @@ func (a *OidcApiService) DiscoverOidcConfigurationExecute(r ApiDiscoverOidcConfi newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -383,7 +519,7 @@ func (a *OidcApiService) DiscoverOidcConfigurationExecute(r ApiDiscoverOidcConfi type ApiGetOidcDynamicClientRequest struct { ctx context.Context - ApiService *OidcApiService + ApiService *OidcAPIService id string } @@ -406,7 +542,7 @@ If it uses `client_secret_basic`, present the Client ID and the Client Secret in @param id The id of the OAuth 2.0 Client. @return ApiGetOidcDynamicClientRequest */ -func (a *OidcApiService) GetOidcDynamicClient(ctx context.Context, id string) ApiGetOidcDynamicClientRequest { +func (a *OidcAPIService) GetOidcDynamicClient(ctx context.Context, id string) ApiGetOidcDynamicClientRequest { return ApiGetOidcDynamicClientRequest{ ApiService: a, ctx: ctx, @@ -417,7 +553,7 @@ func (a *OidcApiService) GetOidcDynamicClient(ctx context.Context, id string) Ap // Execute executes the request // // @return OAuth2Client -func (a *OidcApiService) GetOidcDynamicClientExecute(r ApiGetOidcDynamicClientRequest) (*OAuth2Client, *http.Response, error) { +func (a *OidcAPIService) GetOidcDynamicClientExecute(r ApiGetOidcDynamicClientRequest) (*OAuth2Client, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} @@ -425,13 +561,13 @@ func (a *OidcApiService) GetOidcDynamicClientExecute(r ApiGetOidcDynamicClientRe localVarReturnValue *OAuth2Client ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OidcApiService.GetOidcDynamicClient") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OidcAPIService.GetOidcDynamicClient") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } localVarPath := localBasePath + "/oauth2/register/{id}" - localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterToString(r.id, "")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterValueToString(r.id, "id")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -464,9 +600,9 @@ func (a *OidcApiService) GetOidcDynamicClientExecute(r ApiGetOidcDynamicClientRe return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -482,6 +618,7 @@ func (a *OidcApiService) GetOidcDynamicClientExecute(r ApiGetOidcDynamicClientRe newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -500,7 +637,7 @@ func (a *OidcApiService) GetOidcDynamicClientExecute(r ApiGetOidcDynamicClientRe type ApiGetOidcUserInfoRequest struct { ctx context.Context - ApiService *OidcApiService + ApiService *OidcAPIService } func (r ApiGetOidcUserInfoRequest) Execute() (*OidcUserInfo, *http.Response, error) { @@ -520,7 +657,7 @@ for more details about header format. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiGetOidcUserInfoRequest */ -func (a *OidcApiService) GetOidcUserInfo(ctx context.Context) ApiGetOidcUserInfoRequest { +func (a *OidcAPIService) GetOidcUserInfo(ctx context.Context) ApiGetOidcUserInfoRequest { return ApiGetOidcUserInfoRequest{ ApiService: a, ctx: ctx, @@ -530,7 +667,7 @@ func (a *OidcApiService) GetOidcUserInfo(ctx context.Context) ApiGetOidcUserInfo // Execute executes the request // // @return OidcUserInfo -func (a *OidcApiService) GetOidcUserInfoExecute(r ApiGetOidcUserInfoRequest) (*OidcUserInfo, *http.Response, error) { +func (a *OidcAPIService) GetOidcUserInfoExecute(r ApiGetOidcUserInfoRequest) (*OidcUserInfo, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} @@ -538,7 +675,7 @@ func (a *OidcApiService) GetOidcUserInfoExecute(r ApiGetOidcUserInfoRequest) (*O localVarReturnValue *OidcUserInfo ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OidcApiService.GetOidcUserInfo") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OidcAPIService.GetOidcUserInfo") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -576,9 +713,9 @@ func (a *OidcApiService) GetOidcUserInfoExecute(r ApiGetOidcUserInfoRequest) (*O return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -594,6 +731,7 @@ func (a *OidcApiService) GetOidcUserInfoExecute(r ApiGetOidcUserInfoRequest) (*O newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -612,7 +750,7 @@ func (a *OidcApiService) GetOidcUserInfoExecute(r ApiGetOidcUserInfoRequest) (*O type ApiRevokeOidcSessionRequest struct { ctx context.Context - ApiService *OidcApiService + ApiService *OidcAPIService } func (r ApiRevokeOidcSessionRequest) Execute() (*http.Response, error) { @@ -632,7 +770,7 @@ Back-channel logout is performed asynchronously and does not affect logout flow. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiRevokeOidcSessionRequest */ -func (a *OidcApiService) RevokeOidcSession(ctx context.Context) ApiRevokeOidcSessionRequest { +func (a *OidcAPIService) RevokeOidcSession(ctx context.Context) ApiRevokeOidcSessionRequest { return ApiRevokeOidcSessionRequest{ ApiService: a, ctx: ctx, @@ -640,14 +778,14 @@ func (a *OidcApiService) RevokeOidcSession(ctx context.Context) ApiRevokeOidcSes } // Execute executes the request -func (a *OidcApiService) RevokeOidcSessionExecute(r ApiRevokeOidcSessionRequest) (*http.Response, error) { +func (a *OidcAPIService) RevokeOidcSessionExecute(r ApiRevokeOidcSessionRequest) (*http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} formFiles []formFile ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OidcApiService.RevokeOidcSession") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OidcAPIService.RevokeOidcSession") if err != nil { return nil, &GenericOpenAPIError{error: err.Error()} } @@ -685,9 +823,9 @@ func (a *OidcApiService) RevokeOidcSessionExecute(r ApiRevokeOidcSessionRequest) return localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarHTTPResponse, err } @@ -705,7 +843,7 @@ func (a *OidcApiService) RevokeOidcSessionExecute(r ApiRevokeOidcSessionRequest) type ApiSetOidcDynamicClientRequest struct { ctx context.Context - ApiService *OidcApiService + ApiService *OidcAPIService id string oAuth2Client *OAuth2Client } @@ -743,7 +881,7 @@ generated for applications which want to consume your OAuth 2.0 or OpenID Connec @param id OAuth 2.0 Client ID @return ApiSetOidcDynamicClientRequest */ -func (a *OidcApiService) SetOidcDynamicClient(ctx context.Context, id string) ApiSetOidcDynamicClientRequest { +func (a *OidcAPIService) SetOidcDynamicClient(ctx context.Context, id string) ApiSetOidcDynamicClientRequest { return ApiSetOidcDynamicClientRequest{ ApiService: a, ctx: ctx, @@ -754,7 +892,7 @@ func (a *OidcApiService) SetOidcDynamicClient(ctx context.Context, id string) Ap // Execute executes the request // // @return OAuth2Client -func (a *OidcApiService) SetOidcDynamicClientExecute(r ApiSetOidcDynamicClientRequest) (*OAuth2Client, *http.Response, error) { +func (a *OidcAPIService) SetOidcDynamicClientExecute(r ApiSetOidcDynamicClientRequest) (*OAuth2Client, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPut localVarPostBody interface{} @@ -762,13 +900,13 @@ func (a *OidcApiService) SetOidcDynamicClientExecute(r ApiSetOidcDynamicClientRe localVarReturnValue *OAuth2Client ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OidcApiService.SetOidcDynamicClient") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "OidcAPIService.SetOidcDynamicClient") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } localVarPath := localBasePath + "/oauth2/register/{id}" - localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterToString(r.id, "")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", url.PathEscape(parameterValueToString(r.id, "id")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -806,9 +944,9 @@ func (a *OidcApiService) SetOidcDynamicClientExecute(r ApiSetOidcDynamicClientRe return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -825,6 +963,7 @@ func (a *OidcApiService) SetOidcDynamicClientExecute(r ApiSetOidcDynamicClientRe newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } @@ -834,6 +973,7 @@ func (a *OidcApiService) SetOidcDynamicClientExecute(r ApiSetOidcDynamicClientRe newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } diff --git a/internal/httpclient/api_wellknown.go b/internal/httpclient/api_wellknown.go index 2ba904fa4ef..ed51efc44b9 100644 --- a/internal/httpclient/api_wellknown.go +++ b/internal/httpclient/api_wellknown.go @@ -14,17 +14,17 @@ package openapi import ( "bytes" "context" - "io/ioutil" + "io" "net/http" "net/url" ) -// WellknownApiService WellknownApi service -type WellknownApiService service +// WellknownAPIService WellknownAPI service +type WellknownAPIService service type ApiDiscoverJsonWebKeysRequest struct { ctx context.Context - ApiService *WellknownApiService + ApiService *WellknownAPIService } func (r ApiDiscoverJsonWebKeysRequest) Execute() (*JsonWebKeySet, *http.Response, error) { @@ -38,10 +38,13 @@ This endpoint returns JSON Web Keys required to verifying OpenID Connect ID Toke if enabled, OAuth 2.0 JWT Access Tokens. This endpoint can be used with client libraries like [node-jwks-rsa](https://github.com/auth0/node-jwks-rsa) among others. +Adding custom keys requires first creating a keyset via the createJsonWebKeySet operation, +and then configuring the webfinger.jwks.broadcast_keys configuration value to include the keyset name. + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @return ApiDiscoverJsonWebKeysRequest */ -func (a *WellknownApiService) DiscoverJsonWebKeys(ctx context.Context) ApiDiscoverJsonWebKeysRequest { +func (a *WellknownAPIService) DiscoverJsonWebKeys(ctx context.Context) ApiDiscoverJsonWebKeysRequest { return ApiDiscoverJsonWebKeysRequest{ ApiService: a, ctx: ctx, @@ -51,7 +54,7 @@ func (a *WellknownApiService) DiscoverJsonWebKeys(ctx context.Context) ApiDiscov // Execute executes the request // // @return JsonWebKeySet -func (a *WellknownApiService) DiscoverJsonWebKeysExecute(r ApiDiscoverJsonWebKeysRequest) (*JsonWebKeySet, *http.Response, error) { +func (a *WellknownAPIService) DiscoverJsonWebKeysExecute(r ApiDiscoverJsonWebKeysRequest) (*JsonWebKeySet, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} @@ -59,7 +62,7 @@ func (a *WellknownApiService) DiscoverJsonWebKeysExecute(r ApiDiscoverJsonWebKey localVarReturnValue *JsonWebKeySet ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "WellknownApiService.DiscoverJsonWebKeys") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "WellknownAPIService.DiscoverJsonWebKeys") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } @@ -97,9 +100,9 @@ func (a *WellknownApiService) DiscoverJsonWebKeysExecute(r ApiDiscoverJsonWebKey return localVarReturnValue, localVarHTTPResponse, err } - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) if err != nil { return localVarReturnValue, localVarHTTPResponse, err } @@ -115,6 +118,7 @@ func (a *WellknownApiService) DiscoverJsonWebKeysExecute(r ApiDiscoverJsonWebKey newErr.error = err.Error() return localVarReturnValue, localVarHTTPResponse, newErr } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v return localVarReturnValue, localVarHTTPResponse, newErr } diff --git a/internal/httpclient/client.go b/internal/httpclient/client.go index fe7ccccad0b..bdee9192521 100644 --- a/internal/httpclient/client.go +++ b/internal/httpclient/client.go @@ -19,7 +19,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "log" "mime/multipart" "net/http" @@ -38,8 +37,10 @@ import ( ) var ( - jsonCheck = regexp.MustCompile(`(?i:(?:application|text)/(?:vnd\.[^;]+\+)?json)`) - xmlCheck = regexp.MustCompile(`(?i:(?:application|text)/xml)`) + JsonCheck = regexp.MustCompile(`(?i:(?:application|text)/(?:[^;]+\+)?json)`) + XmlCheck = regexp.MustCompile(`(?i:(?:application|text)/(?:[^;]+\+)?xml)`) + queryParamSplit = regexp.MustCompile(`(^|&)([^&]+)`) + queryDescape = strings.NewReplacer("%5B", "[", "%5D", "]") ) // APIClient manages communication with the Ory Hydra API API v @@ -50,15 +51,15 @@ type APIClient struct { // API Services - JwkApi *JwkApiService + JwkAPI *JwkAPIService - MetadataApi *MetadataApiService + MetadataAPI *MetadataAPIService - OAuth2Api *OAuth2ApiService + OAuth2API *OAuth2APIService - OidcApi *OidcApiService + OidcAPI *OidcAPIService - WellknownApi *WellknownApiService + WellknownAPI *WellknownAPIService } type service struct { @@ -77,11 +78,11 @@ func NewAPIClient(cfg *Configuration) *APIClient { c.common.client = c // API Services - c.JwkApi = (*JwkApiService)(&c.common) - c.MetadataApi = (*MetadataApiService)(&c.common) - c.OAuth2Api = (*OAuth2ApiService)(&c.common) - c.OidcApi = (*OidcApiService)(&c.common) - c.WellknownApi = (*WellknownApiService)(&c.common) + c.JwkAPI = (*JwkAPIService)(&c.common) + c.MetadataAPI = (*MetadataAPIService)(&c.common) + c.OAuth2API = (*OAuth2APIService)(&c.common) + c.OidcAPI = (*OidcAPIService)(&c.common) + c.WellknownAPI = (*WellknownAPIService)(&c.common) return c } @@ -133,33 +134,119 @@ func typeCheckParameter(obj interface{}, expected string, name string) error { // Check the type is as expected. if reflect.TypeOf(obj).String() != expected { - return fmt.Errorf("Expected %s to be of type %s but received %s.", name, expected, reflect.TypeOf(obj).String()) + return fmt.Errorf("expected %s to be of type %s but received %s", name, expected, reflect.TypeOf(obj).String()) } return nil } -// parameterToString convert interface{} parameters to string, using a delimiter if format is provided. -func parameterToString(obj interface{}, collectionFormat string) string { - var delimiter string +func parameterValueToString(obj interface{}, key string) string { + if reflect.TypeOf(obj).Kind() != reflect.Ptr { + if actualObj, ok := obj.(interface{ GetActualInstanceValue() interface{} }); ok { + return fmt.Sprintf("%v", actualObj.GetActualInstanceValue()) + } - switch collectionFormat { - case "pipes": - delimiter = "|" - case "ssv": - delimiter = " " - case "tsv": - delimiter = "\t" - case "csv": - delimiter = "," + return fmt.Sprintf("%v", obj) + } + var param, ok = obj.(MappedNullable) + if !ok { + return "" } + dataMap, err := param.ToMap() + if err != nil { + return "" + } + return fmt.Sprintf("%v", dataMap[key]) +} - if reflect.TypeOf(obj).Kind() == reflect.Slice { - return strings.Trim(strings.Replace(fmt.Sprint(obj), " ", delimiter, -1), "[]") - } else if t, ok := obj.(time.Time); ok { - return t.Format(time.RFC3339) +// parameterAddToHeaderOrQuery adds the provided object to the request header or url query +// supporting deep object syntax +func parameterAddToHeaderOrQuery(headerOrQueryParams interface{}, keyPrefix string, obj interface{}, style string, collectionType string) { + var v = reflect.ValueOf(obj) + var value = "" + if v == reflect.ValueOf(nil) { + value = "null" + } else { + switch v.Kind() { + case reflect.Invalid: + value = "invalid" + + case reflect.Struct: + if t, ok := obj.(MappedNullable); ok { + dataMap, err := t.ToMap() + if err != nil { + return + } + parameterAddToHeaderOrQuery(headerOrQueryParams, keyPrefix, dataMap, style, collectionType) + return + } + if t, ok := obj.(time.Time); ok { + parameterAddToHeaderOrQuery(headerOrQueryParams, keyPrefix, t.Format(time.RFC3339Nano), style, collectionType) + return + } + value = v.Type().String() + " value" + case reflect.Slice: + var indValue = reflect.ValueOf(obj) + if indValue == reflect.ValueOf(nil) { + return + } + var lenIndValue = indValue.Len() + for i := 0; i < lenIndValue; i++ { + var arrayValue = indValue.Index(i) + var keyPrefixForCollectionType = keyPrefix + if style == "deepObject" { + keyPrefixForCollectionType = keyPrefix + "[" + strconv.Itoa(i) + "]" + } + parameterAddToHeaderOrQuery(headerOrQueryParams, keyPrefixForCollectionType, arrayValue.Interface(), style, collectionType) + } + return + + case reflect.Map: + var indValue = reflect.ValueOf(obj) + if indValue == reflect.ValueOf(nil) { + return + } + iter := indValue.MapRange() + for iter.Next() { + k, v := iter.Key(), iter.Value() + parameterAddToHeaderOrQuery(headerOrQueryParams, fmt.Sprintf("%s[%s]", keyPrefix, k.String()), v.Interface(), style, collectionType) + } + return + + case reflect.Interface: + fallthrough + case reflect.Ptr: + parameterAddToHeaderOrQuery(headerOrQueryParams, keyPrefix, v.Elem().Interface(), style, collectionType) + return + + case reflect.Int, reflect.Int8, reflect.Int16, + reflect.Int32, reflect.Int64: + value = strconv.FormatInt(v.Int(), 10) + case reflect.Uint, reflect.Uint8, reflect.Uint16, + reflect.Uint32, reflect.Uint64, reflect.Uintptr: + value = strconv.FormatUint(v.Uint(), 10) + case reflect.Float32, reflect.Float64: + value = strconv.FormatFloat(v.Float(), 'g', -1, 32) + case reflect.Bool: + value = strconv.FormatBool(v.Bool()) + case reflect.String: + value = v.String() + default: + value = v.Type().String() + " value" + } } - return fmt.Sprintf("%v", obj) + switch valuesMap := headerOrQueryParams.(type) { + case url.Values: + if collectionType == "csv" && valuesMap.Get(keyPrefix) != "" { + valuesMap.Set(keyPrefix, valuesMap.Get(keyPrefix)+","+value) + } else { + valuesMap.Add(keyPrefix, value) + } + break + case map[string]string: + valuesMap[keyPrefix] = value + break + } } // helper for converting interface{} parameters to json strings @@ -311,7 +398,11 @@ func (c *APIClient) prepareRequest( } // Encode the parameters. - url.RawQuery = query.Encode() + url.RawQuery = queryParamSplit.ReplaceAllStringFunc(query.Encode(), func(s string) string { + pieces := strings.Split(s, "=") + pieces[0] = queryDescape.Replace(pieces[0]) + return strings.Join(pieces, "=") + }) // Generate a new request if body != nil { @@ -378,8 +469,20 @@ func (c *APIClient) decode(v interface{}, b []byte, contentType string) (err err *s = string(b) return nil } + if f, ok := v.(*os.File); ok { + f, err = os.CreateTemp("", "HttpClientFile") + if err != nil { + return + } + _, err = f.Write(b) + if err != nil { + return + } + _, err = f.Seek(0, io.SeekStart) + return + } if f, ok := v.(**os.File); ok { - *f, err = ioutil.TempFile("", "HttpClientFile") + *f, err = os.CreateTemp("", "HttpClientFile") if err != nil { return } @@ -390,13 +493,13 @@ func (c *APIClient) decode(v interface{}, b []byte, contentType string) (err err _, err = (*f).Seek(0, io.SeekStart) return } - if xmlCheck.MatchString(contentType) { + if XmlCheck.MatchString(contentType) { if err = xml.Unmarshal(b, v); err != nil { return err } return nil } - if jsonCheck.MatchString(contentType) { + if JsonCheck.MatchString(contentType) { if actualObj, ok := v.(interface{ GetActualInstance() interface{} }); ok { // oneOf, anyOf schemas if unmarshalObj, ok := actualObj.(interface{ UnmarshalJSON([]byte) error }); ok { // make sure it has UnmarshalJSON defined if err = unmarshalObj.UnmarshalJSON(b); err != nil { @@ -433,18 +536,6 @@ func addFile(w *multipart.Writer, fieldName, path string) error { return err } -// Prevent trying to import "fmt" -func reportError(format string, a ...interface{}) error { - return fmt.Errorf(format, a...) -} - -// A wrapper for strict JSON decoding -func newStrictDecoder(data []byte) *json.Decoder { - dec := json.NewDecoder(bytes.NewBuffer(data)) - dec.DisallowUnknownFields() - return dec -} - // Set request body from an interface{} func setBody(body interface{}, contentType string) (bodyBuf *bytes.Buffer, err error) { if bodyBuf == nil { @@ -453,18 +544,22 @@ func setBody(body interface{}, contentType string) (bodyBuf *bytes.Buffer, err e if reader, ok := body.(io.Reader); ok { _, err = bodyBuf.ReadFrom(reader) - } else if fp, ok := body.(**os.File); ok { - _, err = bodyBuf.ReadFrom(*fp) + } else if fp, ok := body.(*os.File); ok { + _, err = bodyBuf.ReadFrom(fp) } else if b, ok := body.([]byte); ok { _, err = bodyBuf.Write(b) } else if s, ok := body.(string); ok { _, err = bodyBuf.WriteString(s) } else if s, ok := body.(*string); ok { _, err = bodyBuf.WriteString(*s) - } else if jsonCheck.MatchString(contentType) { + } else if JsonCheck.MatchString(contentType) { err = json.NewEncoder(bodyBuf).Encode(body) - } else if xmlCheck.MatchString(contentType) { - err = xml.NewEncoder(bodyBuf).Encode(body) + } else if XmlCheck.MatchString(contentType) { + var bs []byte + bs, err = xml.Marshal(body) + if err == nil { + bodyBuf.Write(bs) + } } if err != nil { @@ -472,7 +567,7 @@ func setBody(body interface{}, contentType string) (bodyBuf *bytes.Buffer, err e } if bodyBuf.Len() == 0 { - err = fmt.Errorf("Invalid body type %s\n", contentType) + err = fmt.Errorf("invalid body type %s\n", contentType) return nil, err } return bodyBuf, nil @@ -574,3 +669,23 @@ func (e GenericOpenAPIError) Body() []byte { func (e GenericOpenAPIError) Model() interface{} { return e.model } + +// format error message using title and detail when model implements rfc7807 +func formatErrorMessage(status string, v interface{}) string { + str := "" + metaValue := reflect.ValueOf(v).Elem() + + if metaValue.Kind() == reflect.Struct { + field := metaValue.FieldByName("Title") + if field != (reflect.Value{}) { + str = fmt.Sprintf("%s", field.Interface()) + } + + field = metaValue.FieldByName("Detail") + if field != (reflect.Value{}) { + str = fmt.Sprintf("%s (%s)", str, field.Interface()) + } + } + + return strings.TrimSpace(fmt.Sprintf("%s %s", status, str)) +} diff --git a/internal/httpclient/configuration.go b/internal/httpclient/configuration.go index 548fdbb05c1..4a85bc09c1a 100644 --- a/internal/httpclient/configuration.go +++ b/internal/httpclient/configuration.go @@ -38,12 +38,6 @@ var ( // ContextAccessToken takes a string oauth2 access token as authentication for the request. ContextAccessToken = contextKey("accesstoken") - // ContextAPIKeys takes a string apikey as authentication for the request - ContextAPIKeys = contextKey("apiKeys") - - // ContextHttpSignatureAuth takes HttpSignatureAuth as authentication for the request. - ContextHttpSignatureAuth = contextKey("httpsignature") - // ContextServerIndex uses a server configuration from the index. ContextServerIndex = contextKey("serverIndex") @@ -123,7 +117,7 @@ func (c *Configuration) AddDefaultHeader(key string, value string) { // URL formats template on a index using given variables func (sc ServerConfigurations) URL(index int, variables map[string]string) (string, error) { if index < 0 || len(sc) <= index { - return "", fmt.Errorf("Index %v out of range %v", index, len(sc)-1) + return "", fmt.Errorf("index %v out of range %v", index, len(sc)-1) } server := sc[index] url := server.URL @@ -138,7 +132,7 @@ func (sc ServerConfigurations) URL(index int, variables map[string]string) (stri } } if !found { - return "", fmt.Errorf("The variable %s in the server URL has invalid value %v. Must be %v", name, value, variable.EnumValues) + return "", fmt.Errorf("the variable %s in the server URL has invalid value %v. Must be %v", name, value, variable.EnumValues) } url = strings.Replace(url, "{"+name+"}", value, -1) } else { diff --git a/internal/httpclient/docs/AcceptDeviceUserCodeRequest.md b/internal/httpclient/docs/AcceptDeviceUserCodeRequest.md new file mode 100644 index 00000000000..2f892922a77 --- /dev/null +++ b/internal/httpclient/docs/AcceptDeviceUserCodeRequest.md @@ -0,0 +1,56 @@ +# AcceptDeviceUserCodeRequest + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**UserCode** | Pointer to **string** | | [optional] + +## Methods + +### NewAcceptDeviceUserCodeRequest + +`func NewAcceptDeviceUserCodeRequest() *AcceptDeviceUserCodeRequest` + +NewAcceptDeviceUserCodeRequest instantiates a new AcceptDeviceUserCodeRequest object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewAcceptDeviceUserCodeRequestWithDefaults + +`func NewAcceptDeviceUserCodeRequestWithDefaults() *AcceptDeviceUserCodeRequest` + +NewAcceptDeviceUserCodeRequestWithDefaults instantiates a new AcceptDeviceUserCodeRequest object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetUserCode + +`func (o *AcceptDeviceUserCodeRequest) GetUserCode() string` + +GetUserCode returns the UserCode field if non-nil, zero value otherwise. + +### GetUserCodeOk + +`func (o *AcceptDeviceUserCodeRequest) GetUserCodeOk() (*string, bool)` + +GetUserCodeOk returns a tuple with the UserCode field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetUserCode + +`func (o *AcceptDeviceUserCodeRequest) SetUserCode(v string)` + +SetUserCode sets UserCode field to given value. + +### HasUserCode + +`func (o *AcceptDeviceUserCodeRequest) HasUserCode() bool` + +HasUserCode returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/internal/httpclient/docs/AcceptOAuth2ConsentRequest.md b/internal/httpclient/docs/AcceptOAuth2ConsentRequest.md index ec518b5d77d..1bcf19aea86 100644 --- a/internal/httpclient/docs/AcceptOAuth2ConsentRequest.md +++ b/internal/httpclient/docs/AcceptOAuth2ConsentRequest.md @@ -4,9 +4,9 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- +**Context** | Pointer to **interface{}** | | [optional] **GrantAccessTokenAudience** | Pointer to **[]string** | | [optional] **GrantScope** | Pointer to **[]string** | | [optional] -**HandledAt** | Pointer to **time.Time** | | [optional] **Remember** | Pointer to **bool** | Remember, if set to true, tells ORY Hydra to remember this consent authorization and reuse it if the same client asks the same user for the same, or a subset of, scope. | [optional] **RememberFor** | Pointer to **int64** | RememberFor sets how long the consent authorization should be remembered for in seconds. If set to `0`, the authorization will be remembered indefinitely. | [optional] **Session** | Pointer to [**AcceptOAuth2ConsentRequestSession**](AcceptOAuth2ConsentRequestSession.md) | | [optional] @@ -30,6 +30,41 @@ NewAcceptOAuth2ConsentRequestWithDefaults instantiates a new AcceptOAuth2Consent This constructor will only assign default values to properties that have it defined, but it doesn't guarantee that properties required by API are set +### GetContext + +`func (o *AcceptOAuth2ConsentRequest) GetContext() interface{}` + +GetContext returns the Context field if non-nil, zero value otherwise. + +### GetContextOk + +`func (o *AcceptOAuth2ConsentRequest) GetContextOk() (*interface{}, bool)` + +GetContextOk returns a tuple with the Context field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetContext + +`func (o *AcceptOAuth2ConsentRequest) SetContext(v interface{})` + +SetContext sets Context field to given value. + +### HasContext + +`func (o *AcceptOAuth2ConsentRequest) HasContext() bool` + +HasContext returns a boolean if a field has been set. + +### SetContextNil + +`func (o *AcceptOAuth2ConsentRequest) SetContextNil(b bool)` + + SetContextNil sets the value for Context to be an explicit nil + +### UnsetContext +`func (o *AcceptOAuth2ConsentRequest) UnsetContext()` + +UnsetContext ensures that no value is present for Context, not even an explicit nil ### GetGrantAccessTokenAudience `func (o *AcceptOAuth2ConsentRequest) GetGrantAccessTokenAudience() []string` @@ -80,31 +115,6 @@ SetGrantScope sets GrantScope field to given value. HasGrantScope returns a boolean if a field has been set. -### GetHandledAt - -`func (o *AcceptOAuth2ConsentRequest) GetHandledAt() time.Time` - -GetHandledAt returns the HandledAt field if non-nil, zero value otherwise. - -### GetHandledAtOk - -`func (o *AcceptOAuth2ConsentRequest) GetHandledAtOk() (*time.Time, bool)` - -GetHandledAtOk returns a tuple with the HandledAt field if it's non-nil, zero value otherwise -and a boolean to check if the value has been set. - -### SetHandledAt - -`func (o *AcceptOAuth2ConsentRequest) SetHandledAt(v time.Time)` - -SetHandledAt sets HandledAt field to given value. - -### HasHandledAt - -`func (o *AcceptOAuth2ConsentRequest) HasHandledAt() bool` - -HasHandledAt returns a boolean if a field has been set. - ### GetRemember `func (o *AcceptOAuth2ConsentRequest) GetRemember() bool` diff --git a/internal/httpclient/docs/AcceptOAuth2LoginRequest.md b/internal/httpclient/docs/AcceptOAuth2LoginRequest.md index 80bf2c5ef5e..93b47523a32 100644 --- a/internal/httpclient/docs/AcceptOAuth2LoginRequest.md +++ b/internal/httpclient/docs/AcceptOAuth2LoginRequest.md @@ -4,11 +4,13 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**Acr** | Pointer to **string** | ACR sets the Authentication AuthorizationContext Class Reference value for this authentication session. You can use it to express that, for example, a user authenticated using two factor authentication. | [optional] +**Acr** | Pointer to **string** | ACR sets the Authentication AuthorizationContext Class Reference value for this authentication session. You can use it to express that, for example, a user authenticated using two-factor authentication. | [optional] **Amr** | Pointer to **[]string** | | [optional] **Context** | Pointer to **interface{}** | | [optional] +**ExtendSessionLifespan** | Pointer to **bool** | Extend OAuth2 authentication session lifespan If set to `true`, the OAuth2 authentication cookie lifespan is extended. This is for example useful if you want the user to be able to use `prompt=none` continuously. This value can only be set to `true` if the user has an authentication, which is the case if the `skip` value is `true`. | [optional] **ForceSubjectIdentifier** | Pointer to **string** | ForceSubjectIdentifier forces the \"pairwise\" user ID of the end-user that authenticated. The \"pairwise\" user ID refers to the (Pairwise Identifier Algorithm)[http://openid.net/specs/openid-connect-core-1_0.html#PairwiseAlg] of the OpenID Connect specification. It allows you to set an obfuscated subject (\"user\") identifier that is unique to the client. Please note that this changes the user ID on endpoint /userinfo and sub claim of the ID Token. It does not change the sub claim in the OAuth 2.0 Introspection. Per default, ORY Hydra handles this value with its own algorithm. In case you want to set this yourself you can use this field. Please note that setting this field has no effect if `pairwise` is not configured in ORY Hydra or the OAuth 2.0 Client does not expect a pairwise identifier (set via `subject_type` key in the client's configuration). Please also be aware that ORY Hydra is unable to properly compute this value during authentication. This implies that you have to compute this value on every authentication process (probably depending on the client ID or some other unique value). If you fail to compute the proper value, then authentication processes which have id_token_hint set might fail. | [optional] -**Remember** | Pointer to **bool** | Remember, if set to true, tells ORY Hydra to remember this user by telling the user agent (browser) to store a cookie with authentication data. If the same user performs another OAuth 2.0 Authorization Request, he/she will not be asked to log in again. | [optional] +**IdentityProviderSessionId** | Pointer to **string** | IdentityProviderSessionID is the session ID of the end-user that authenticated. If specified, we will use this value to propagate the logout. | [optional] +**Remember** | Pointer to **bool** | Remember, if set to true, tells Ory Hydra to remember this user by telling the user agent (browser) to store a cookie with authentication data. If the same user performs another OAuth 2.0 Authorization Request, they will not be asked to log in again. | [optional] **RememberFor** | Pointer to **int64** | RememberFor sets how long the authentication should be remembered for in seconds. If set to `0`, the authorization will be remembered for the duration of the browser session (using a session cookie). | [optional] **Subject** | **string** | Subject is the user ID of the end-user that authenticated. | @@ -116,6 +118,31 @@ HasContext returns a boolean if a field has been set. `func (o *AcceptOAuth2LoginRequest) UnsetContext()` UnsetContext ensures that no value is present for Context, not even an explicit nil +### GetExtendSessionLifespan + +`func (o *AcceptOAuth2LoginRequest) GetExtendSessionLifespan() bool` + +GetExtendSessionLifespan returns the ExtendSessionLifespan field if non-nil, zero value otherwise. + +### GetExtendSessionLifespanOk + +`func (o *AcceptOAuth2LoginRequest) GetExtendSessionLifespanOk() (*bool, bool)` + +GetExtendSessionLifespanOk returns a tuple with the ExtendSessionLifespan field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetExtendSessionLifespan + +`func (o *AcceptOAuth2LoginRequest) SetExtendSessionLifespan(v bool)` + +SetExtendSessionLifespan sets ExtendSessionLifespan field to given value. + +### HasExtendSessionLifespan + +`func (o *AcceptOAuth2LoginRequest) HasExtendSessionLifespan() bool` + +HasExtendSessionLifespan returns a boolean if a field has been set. + ### GetForceSubjectIdentifier `func (o *AcceptOAuth2LoginRequest) GetForceSubjectIdentifier() string` @@ -141,6 +168,31 @@ SetForceSubjectIdentifier sets ForceSubjectIdentifier field to given value. HasForceSubjectIdentifier returns a boolean if a field has been set. +### GetIdentityProviderSessionId + +`func (o *AcceptOAuth2LoginRequest) GetIdentityProviderSessionId() string` + +GetIdentityProviderSessionId returns the IdentityProviderSessionId field if non-nil, zero value otherwise. + +### GetIdentityProviderSessionIdOk + +`func (o *AcceptOAuth2LoginRequest) GetIdentityProviderSessionIdOk() (*string, bool)` + +GetIdentityProviderSessionIdOk returns a tuple with the IdentityProviderSessionId field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetIdentityProviderSessionId + +`func (o *AcceptOAuth2LoginRequest) SetIdentityProviderSessionId(v string)` + +SetIdentityProviderSessionId sets IdentityProviderSessionId field to given value. + +### HasIdentityProviderSessionId + +`func (o *AcceptOAuth2LoginRequest) HasIdentityProviderSessionId() bool` + +HasIdentityProviderSessionId returns a boolean if a field has been set. + ### GetRemember `func (o *AcceptOAuth2LoginRequest) GetRemember() bool` diff --git a/internal/httpclient/docs/CreateVerifiableCredentialRequestBody.md b/internal/httpclient/docs/CreateVerifiableCredentialRequestBody.md new file mode 100644 index 00000000000..24a2834e86b --- /dev/null +++ b/internal/httpclient/docs/CreateVerifiableCredentialRequestBody.md @@ -0,0 +1,108 @@ +# CreateVerifiableCredentialRequestBody + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Format** | Pointer to **string** | | [optional] +**Proof** | Pointer to [**VerifiableCredentialProof**](VerifiableCredentialProof.md) | | [optional] +**Types** | Pointer to **[]string** | | [optional] + +## Methods + +### NewCreateVerifiableCredentialRequestBody + +`func NewCreateVerifiableCredentialRequestBody() *CreateVerifiableCredentialRequestBody` + +NewCreateVerifiableCredentialRequestBody instantiates a new CreateVerifiableCredentialRequestBody object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewCreateVerifiableCredentialRequestBodyWithDefaults + +`func NewCreateVerifiableCredentialRequestBodyWithDefaults() *CreateVerifiableCredentialRequestBody` + +NewCreateVerifiableCredentialRequestBodyWithDefaults instantiates a new CreateVerifiableCredentialRequestBody object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetFormat + +`func (o *CreateVerifiableCredentialRequestBody) GetFormat() string` + +GetFormat returns the Format field if non-nil, zero value otherwise. + +### GetFormatOk + +`func (o *CreateVerifiableCredentialRequestBody) GetFormatOk() (*string, bool)` + +GetFormatOk returns a tuple with the Format field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetFormat + +`func (o *CreateVerifiableCredentialRequestBody) SetFormat(v string)` + +SetFormat sets Format field to given value. + +### HasFormat + +`func (o *CreateVerifiableCredentialRequestBody) HasFormat() bool` + +HasFormat returns a boolean if a field has been set. + +### GetProof + +`func (o *CreateVerifiableCredentialRequestBody) GetProof() VerifiableCredentialProof` + +GetProof returns the Proof field if non-nil, zero value otherwise. + +### GetProofOk + +`func (o *CreateVerifiableCredentialRequestBody) GetProofOk() (*VerifiableCredentialProof, bool)` + +GetProofOk returns a tuple with the Proof field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetProof + +`func (o *CreateVerifiableCredentialRequestBody) SetProof(v VerifiableCredentialProof)` + +SetProof sets Proof field to given value. + +### HasProof + +`func (o *CreateVerifiableCredentialRequestBody) HasProof() bool` + +HasProof returns a boolean if a field has been set. + +### GetTypes + +`func (o *CreateVerifiableCredentialRequestBody) GetTypes() []string` + +GetTypes returns the Types field if non-nil, zero value otherwise. + +### GetTypesOk + +`func (o *CreateVerifiableCredentialRequestBody) GetTypesOk() (*[]string, bool)` + +GetTypesOk returns a tuple with the Types field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetTypes + +`func (o *CreateVerifiableCredentialRequestBody) SetTypes(v []string)` + +SetTypes sets Types field to given value. + +### HasTypes + +`func (o *CreateVerifiableCredentialRequestBody) HasTypes() bool` + +HasTypes returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/internal/httpclient/docs/CredentialSupportedDraft00.md b/internal/httpclient/docs/CredentialSupportedDraft00.md new file mode 100644 index 00000000000..1738f518fa8 --- /dev/null +++ b/internal/httpclient/docs/CredentialSupportedDraft00.md @@ -0,0 +1,134 @@ +# CredentialSupportedDraft00 + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**CryptographicBindingMethodsSupported** | Pointer to **[]string** | OpenID Connect Verifiable Credentials Cryptographic Binding Methods Supported Contains a list of cryptographic binding methods supported for signing the proof. | [optional] +**CryptographicSuitesSupported** | Pointer to **[]string** | OpenID Connect Verifiable Credentials Cryptographic Suites Supported Contains a list of cryptographic suites methods supported for signing the proof. | [optional] +**Format** | Pointer to **string** | OpenID Connect Verifiable Credentials Format Contains the format that is supported by this authorization server. | [optional] +**Types** | Pointer to **[]string** | OpenID Connect Verifiable Credentials Types Contains the types of verifiable credentials supported. | [optional] + +## Methods + +### NewCredentialSupportedDraft00 + +`func NewCredentialSupportedDraft00() *CredentialSupportedDraft00` + +NewCredentialSupportedDraft00 instantiates a new CredentialSupportedDraft00 object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewCredentialSupportedDraft00WithDefaults + +`func NewCredentialSupportedDraft00WithDefaults() *CredentialSupportedDraft00` + +NewCredentialSupportedDraft00WithDefaults instantiates a new CredentialSupportedDraft00 object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetCryptographicBindingMethodsSupported + +`func (o *CredentialSupportedDraft00) GetCryptographicBindingMethodsSupported() []string` + +GetCryptographicBindingMethodsSupported returns the CryptographicBindingMethodsSupported field if non-nil, zero value otherwise. + +### GetCryptographicBindingMethodsSupportedOk + +`func (o *CredentialSupportedDraft00) GetCryptographicBindingMethodsSupportedOk() (*[]string, bool)` + +GetCryptographicBindingMethodsSupportedOk returns a tuple with the CryptographicBindingMethodsSupported field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetCryptographicBindingMethodsSupported + +`func (o *CredentialSupportedDraft00) SetCryptographicBindingMethodsSupported(v []string)` + +SetCryptographicBindingMethodsSupported sets CryptographicBindingMethodsSupported field to given value. + +### HasCryptographicBindingMethodsSupported + +`func (o *CredentialSupportedDraft00) HasCryptographicBindingMethodsSupported() bool` + +HasCryptographicBindingMethodsSupported returns a boolean if a field has been set. + +### GetCryptographicSuitesSupported + +`func (o *CredentialSupportedDraft00) GetCryptographicSuitesSupported() []string` + +GetCryptographicSuitesSupported returns the CryptographicSuitesSupported field if non-nil, zero value otherwise. + +### GetCryptographicSuitesSupportedOk + +`func (o *CredentialSupportedDraft00) GetCryptographicSuitesSupportedOk() (*[]string, bool)` + +GetCryptographicSuitesSupportedOk returns a tuple with the CryptographicSuitesSupported field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetCryptographicSuitesSupported + +`func (o *CredentialSupportedDraft00) SetCryptographicSuitesSupported(v []string)` + +SetCryptographicSuitesSupported sets CryptographicSuitesSupported field to given value. + +### HasCryptographicSuitesSupported + +`func (o *CredentialSupportedDraft00) HasCryptographicSuitesSupported() bool` + +HasCryptographicSuitesSupported returns a boolean if a field has been set. + +### GetFormat + +`func (o *CredentialSupportedDraft00) GetFormat() string` + +GetFormat returns the Format field if non-nil, zero value otherwise. + +### GetFormatOk + +`func (o *CredentialSupportedDraft00) GetFormatOk() (*string, bool)` + +GetFormatOk returns a tuple with the Format field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetFormat + +`func (o *CredentialSupportedDraft00) SetFormat(v string)` + +SetFormat sets Format field to given value. + +### HasFormat + +`func (o *CredentialSupportedDraft00) HasFormat() bool` + +HasFormat returns a boolean if a field has been set. + +### GetTypes + +`func (o *CredentialSupportedDraft00) GetTypes() []string` + +GetTypes returns the Types field if non-nil, zero value otherwise. + +### GetTypesOk + +`func (o *CredentialSupportedDraft00) GetTypesOk() (*[]string, bool)` + +GetTypesOk returns a tuple with the Types field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetTypes + +`func (o *CredentialSupportedDraft00) SetTypes(v []string)` + +SetTypes sets Types field to given value. + +### HasTypes + +`func (o *CredentialSupportedDraft00) HasTypes() bool` + +HasTypes returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/internal/httpclient/docs/DeviceAuthorization.md b/internal/httpclient/docs/DeviceAuthorization.md new file mode 100644 index 00000000000..4ba933a4b24 --- /dev/null +++ b/internal/httpclient/docs/DeviceAuthorization.md @@ -0,0 +1,186 @@ +# DeviceAuthorization + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**DeviceCode** | Pointer to **string** | The device verification code. | [optional] +**ExpiresIn** | Pointer to **int64** | The lifetime in seconds of the \"device_code\" and \"user_code\". | [optional] +**Interval** | Pointer to **int64** | The minimum amount of time in seconds that the client SHOULD wait between polling requests to the token endpoint. If no value is provided, clients MUST use 5 as the default. | [optional] +**UserCode** | Pointer to **string** | The end-user verification code. | [optional] +**VerificationUri** | Pointer to **string** | The end-user verification URI on the authorization server. The URI should be short and easy to remember as end users will be asked to manually type it into their user agent. | [optional] +**VerificationUriComplete** | Pointer to **string** | A verification URI that includes the \"user_code\" (or other information with the same function as the \"user_code\"), which is designed for non-textual transmission. | [optional] + +## Methods + +### NewDeviceAuthorization + +`func NewDeviceAuthorization() *DeviceAuthorization` + +NewDeviceAuthorization instantiates a new DeviceAuthorization object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewDeviceAuthorizationWithDefaults + +`func NewDeviceAuthorizationWithDefaults() *DeviceAuthorization` + +NewDeviceAuthorizationWithDefaults instantiates a new DeviceAuthorization object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetDeviceCode + +`func (o *DeviceAuthorization) GetDeviceCode() string` + +GetDeviceCode returns the DeviceCode field if non-nil, zero value otherwise. + +### GetDeviceCodeOk + +`func (o *DeviceAuthorization) GetDeviceCodeOk() (*string, bool)` + +GetDeviceCodeOk returns a tuple with the DeviceCode field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetDeviceCode + +`func (o *DeviceAuthorization) SetDeviceCode(v string)` + +SetDeviceCode sets DeviceCode field to given value. + +### HasDeviceCode + +`func (o *DeviceAuthorization) HasDeviceCode() bool` + +HasDeviceCode returns a boolean if a field has been set. + +### GetExpiresIn + +`func (o *DeviceAuthorization) GetExpiresIn() int64` + +GetExpiresIn returns the ExpiresIn field if non-nil, zero value otherwise. + +### GetExpiresInOk + +`func (o *DeviceAuthorization) GetExpiresInOk() (*int64, bool)` + +GetExpiresInOk returns a tuple with the ExpiresIn field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetExpiresIn + +`func (o *DeviceAuthorization) SetExpiresIn(v int64)` + +SetExpiresIn sets ExpiresIn field to given value. + +### HasExpiresIn + +`func (o *DeviceAuthorization) HasExpiresIn() bool` + +HasExpiresIn returns a boolean if a field has been set. + +### GetInterval + +`func (o *DeviceAuthorization) GetInterval() int64` + +GetInterval returns the Interval field if non-nil, zero value otherwise. + +### GetIntervalOk + +`func (o *DeviceAuthorization) GetIntervalOk() (*int64, bool)` + +GetIntervalOk returns a tuple with the Interval field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetInterval + +`func (o *DeviceAuthorization) SetInterval(v int64)` + +SetInterval sets Interval field to given value. + +### HasInterval + +`func (o *DeviceAuthorization) HasInterval() bool` + +HasInterval returns a boolean if a field has been set. + +### GetUserCode + +`func (o *DeviceAuthorization) GetUserCode() string` + +GetUserCode returns the UserCode field if non-nil, zero value otherwise. + +### GetUserCodeOk + +`func (o *DeviceAuthorization) GetUserCodeOk() (*string, bool)` + +GetUserCodeOk returns a tuple with the UserCode field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetUserCode + +`func (o *DeviceAuthorization) SetUserCode(v string)` + +SetUserCode sets UserCode field to given value. + +### HasUserCode + +`func (o *DeviceAuthorization) HasUserCode() bool` + +HasUserCode returns a boolean if a field has been set. + +### GetVerificationUri + +`func (o *DeviceAuthorization) GetVerificationUri() string` + +GetVerificationUri returns the VerificationUri field if non-nil, zero value otherwise. + +### GetVerificationUriOk + +`func (o *DeviceAuthorization) GetVerificationUriOk() (*string, bool)` + +GetVerificationUriOk returns a tuple with the VerificationUri field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetVerificationUri + +`func (o *DeviceAuthorization) SetVerificationUri(v string)` + +SetVerificationUri sets VerificationUri field to given value. + +### HasVerificationUri + +`func (o *DeviceAuthorization) HasVerificationUri() bool` + +HasVerificationUri returns a boolean if a field has been set. + +### GetVerificationUriComplete + +`func (o *DeviceAuthorization) GetVerificationUriComplete() string` + +GetVerificationUriComplete returns the VerificationUriComplete field if non-nil, zero value otherwise. + +### GetVerificationUriCompleteOk + +`func (o *DeviceAuthorization) GetVerificationUriCompleteOk() (*string, bool)` + +GetVerificationUriCompleteOk returns a tuple with the VerificationUriComplete field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetVerificationUriComplete + +`func (o *DeviceAuthorization) SetVerificationUriComplete(v string)` + +SetVerificationUriComplete sets VerificationUriComplete field to given value. + +### HasVerificationUriComplete + +`func (o *DeviceAuthorization) HasVerificationUriComplete() bool` + +HasVerificationUriComplete returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/internal/httpclient/docs/DeviceUserAuthRequest.md b/internal/httpclient/docs/DeviceUserAuthRequest.md new file mode 100644 index 00000000000..ae99e6223ff --- /dev/null +++ b/internal/httpclient/docs/DeviceUserAuthRequest.md @@ -0,0 +1,181 @@ +# DeviceUserAuthRequest + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Challenge** | **string** | ID is the identifier (\"device challenge\") of the device grant request. It is used to identify the session. | +**Client** | Pointer to [**OAuth2Client**](OAuth2Client.md) | | [optional] +**HandledAt** | Pointer to **time.Time** | | [optional] +**RequestUrl** | Pointer to **string** | RequestURL is the original Device Authorization URL requested. | [optional] +**RequestedAccessTokenAudience** | Pointer to **[]string** | | [optional] +**RequestedScope** | Pointer to **[]string** | | [optional] + +## Methods + +### NewDeviceUserAuthRequest + +`func NewDeviceUserAuthRequest(challenge string, ) *DeviceUserAuthRequest` + +NewDeviceUserAuthRequest instantiates a new DeviceUserAuthRequest object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewDeviceUserAuthRequestWithDefaults + +`func NewDeviceUserAuthRequestWithDefaults() *DeviceUserAuthRequest` + +NewDeviceUserAuthRequestWithDefaults instantiates a new DeviceUserAuthRequest object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetChallenge + +`func (o *DeviceUserAuthRequest) GetChallenge() string` + +GetChallenge returns the Challenge field if non-nil, zero value otherwise. + +### GetChallengeOk + +`func (o *DeviceUserAuthRequest) GetChallengeOk() (*string, bool)` + +GetChallengeOk returns a tuple with the Challenge field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetChallenge + +`func (o *DeviceUserAuthRequest) SetChallenge(v string)` + +SetChallenge sets Challenge field to given value. + + +### GetClient + +`func (o *DeviceUserAuthRequest) GetClient() OAuth2Client` + +GetClient returns the Client field if non-nil, zero value otherwise. + +### GetClientOk + +`func (o *DeviceUserAuthRequest) GetClientOk() (*OAuth2Client, bool)` + +GetClientOk returns a tuple with the Client field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetClient + +`func (o *DeviceUserAuthRequest) SetClient(v OAuth2Client)` + +SetClient sets Client field to given value. + +### HasClient + +`func (o *DeviceUserAuthRequest) HasClient() bool` + +HasClient returns a boolean if a field has been set. + +### GetHandledAt + +`func (o *DeviceUserAuthRequest) GetHandledAt() time.Time` + +GetHandledAt returns the HandledAt field if non-nil, zero value otherwise. + +### GetHandledAtOk + +`func (o *DeviceUserAuthRequest) GetHandledAtOk() (*time.Time, bool)` + +GetHandledAtOk returns a tuple with the HandledAt field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetHandledAt + +`func (o *DeviceUserAuthRequest) SetHandledAt(v time.Time)` + +SetHandledAt sets HandledAt field to given value. + +### HasHandledAt + +`func (o *DeviceUserAuthRequest) HasHandledAt() bool` + +HasHandledAt returns a boolean if a field has been set. + +### GetRequestUrl + +`func (o *DeviceUserAuthRequest) GetRequestUrl() string` + +GetRequestUrl returns the RequestUrl field if non-nil, zero value otherwise. + +### GetRequestUrlOk + +`func (o *DeviceUserAuthRequest) GetRequestUrlOk() (*string, bool)` + +GetRequestUrlOk returns a tuple with the RequestUrl field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetRequestUrl + +`func (o *DeviceUserAuthRequest) SetRequestUrl(v string)` + +SetRequestUrl sets RequestUrl field to given value. + +### HasRequestUrl + +`func (o *DeviceUserAuthRequest) HasRequestUrl() bool` + +HasRequestUrl returns a boolean if a field has been set. + +### GetRequestedAccessTokenAudience + +`func (o *DeviceUserAuthRequest) GetRequestedAccessTokenAudience() []string` + +GetRequestedAccessTokenAudience returns the RequestedAccessTokenAudience field if non-nil, zero value otherwise. + +### GetRequestedAccessTokenAudienceOk + +`func (o *DeviceUserAuthRequest) GetRequestedAccessTokenAudienceOk() (*[]string, bool)` + +GetRequestedAccessTokenAudienceOk returns a tuple with the RequestedAccessTokenAudience field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetRequestedAccessTokenAudience + +`func (o *DeviceUserAuthRequest) SetRequestedAccessTokenAudience(v []string)` + +SetRequestedAccessTokenAudience sets RequestedAccessTokenAudience field to given value. + +### HasRequestedAccessTokenAudience + +`func (o *DeviceUserAuthRequest) HasRequestedAccessTokenAudience() bool` + +HasRequestedAccessTokenAudience returns a boolean if a field has been set. + +### GetRequestedScope + +`func (o *DeviceUserAuthRequest) GetRequestedScope() []string` + +GetRequestedScope returns the RequestedScope field if non-nil, zero value otherwise. + +### GetRequestedScopeOk + +`func (o *DeviceUserAuthRequest) GetRequestedScopeOk() (*[]string, bool)` + +GetRequestedScopeOk returns a tuple with the RequestedScope field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetRequestedScope + +`func (o *DeviceUserAuthRequest) SetRequestedScope(v []string)` + +SetRequestedScope sets RequestedScope field to given value. + +### HasRequestedScope + +`func (o *DeviceUserAuthRequest) HasRequestedScope() bool` + +HasRequestedScope returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/internal/httpclient/docs/IntrospectedOAuth2Token.md b/internal/httpclient/docs/IntrospectedOAuth2Token.md index fc71cd578e2..573ce34c91c 100644 --- a/internal/httpclient/docs/IntrospectedOAuth2Token.md +++ b/internal/httpclient/docs/IntrospectedOAuth2Token.md @@ -6,7 +6,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **Active** | **bool** | Active is a boolean indicator of whether or not the presented token is currently active. The specifics of a token's \"active\" state will vary depending on the implementation of the authorization server and the information it keeps about its tokens, but a \"true\" value return for the \"active\" property will generally indicate that a given token has been issued by this authorization server, has not been revoked by the resource owner, and is within its given time window of validity (e.g., after its issuance time and before its expiration time). | **Aud** | Pointer to **[]string** | Audience contains a list of the token's intended audiences. | [optional] -**ClientId** | Pointer to **string** | ID is aclient identifier for the OAuth 2.0 client that requested this token. | [optional] +**ClientId** | Pointer to **string** | ID is a client identifier for the OAuth 2.0 client that requested this token. | [optional] **Exp** | Pointer to **int64** | Expires at is an integer timestamp, measured in the number of seconds since January 1 1970 UTC, indicating when this token will expire. | [optional] **Ext** | Pointer to **map[string]interface{}** | Extra is arbitrary data set by the session. | [optional] **Iat** | Pointer to **int64** | Issued at is an integer timestamp, measured in the number of seconds since January 1 1970 UTC, indicating when this token was originally issued. | [optional] diff --git a/internal/httpclient/docs/JwkApi.md b/internal/httpclient/docs/JwkAPI.md similarity index 61% rename from internal/httpclient/docs/JwkApi.md rename to internal/httpclient/docs/JwkAPI.md index 3527e0b9622..73db4fd70ee 100644 --- a/internal/httpclient/docs/JwkApi.md +++ b/internal/httpclient/docs/JwkAPI.md @@ -1,16 +1,16 @@ -# \JwkApi +# \JwkAPI All URIs are relative to *http://localhost* Method | HTTP request | Description ------------- | ------------- | ------------- -[**CreateJsonWebKeySet**](JwkApi.md#CreateJsonWebKeySet) | **Post** /admin/keys/{set} | Create JSON Web Key -[**DeleteJsonWebKey**](JwkApi.md#DeleteJsonWebKey) | **Delete** /admin/keys/{set}/{kid} | Delete JSON Web Key -[**DeleteJsonWebKeySet**](JwkApi.md#DeleteJsonWebKeySet) | **Delete** /admin/keys/{set} | Delete JSON Web Key Set -[**GetJsonWebKey**](JwkApi.md#GetJsonWebKey) | **Get** /admin/keys/{set}/{kid} | Get JSON Web Key -[**GetJsonWebKeySet**](JwkApi.md#GetJsonWebKeySet) | **Get** /admin/keys/{set} | Retrieve a JSON Web Key Set -[**SetJsonWebKey**](JwkApi.md#SetJsonWebKey) | **Put** /admin/keys/{set}/{kid} | Set JSON Web Key -[**SetJsonWebKeySet**](JwkApi.md#SetJsonWebKeySet) | **Put** /admin/keys/{set} | Update a JSON Web Key Set +[**CreateJsonWebKeySet**](JwkAPI.md#CreateJsonWebKeySet) | **Post** /admin/keys/{set} | Create JSON Web Key +[**DeleteJsonWebKey**](JwkAPI.md#DeleteJsonWebKey) | **Delete** /admin/keys/{set}/{kid} | Delete JSON Web Key +[**DeleteJsonWebKeySet**](JwkAPI.md#DeleteJsonWebKeySet) | **Delete** /admin/keys/{set} | Delete JSON Web Key Set +[**GetJsonWebKey**](JwkAPI.md#GetJsonWebKey) | **Get** /admin/keys/{set}/{kid} | Get JSON Web Key +[**GetJsonWebKeySet**](JwkAPI.md#GetJsonWebKeySet) | **Get** /admin/keys/{set} | Retrieve a JSON Web Key Set +[**SetJsonWebKey**](JwkAPI.md#SetJsonWebKey) | **Put** /admin/keys/{set}/{kid} | Set JSON Web Key +[**SetJsonWebKeySet**](JwkAPI.md#SetJsonWebKeySet) | **Put** /admin/keys/{set} | Update a JSON Web Key Set @@ -28,25 +28,25 @@ Create JSON Web Key package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - set := "set_example" // string | The JSON Web Key Set ID - createJsonWebKeySet := *openapiclient.NewCreateJsonWebKeySet("Alg_example", "Kid_example", "Use_example") // CreateJsonWebKeySet | - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.JwkApi.CreateJsonWebKeySet(context.Background(), set).CreateJsonWebKeySet(createJsonWebKeySet).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `JwkApi.CreateJsonWebKeySet``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `CreateJsonWebKeySet`: JsonWebKeySet - fmt.Fprintf(os.Stdout, "Response from `JwkApi.CreateJsonWebKeySet`: %v\n", resp) + set := "set_example" // string | The JSON Web Key Set ID + createJsonWebKeySet := *openapiclient.NewCreateJsonWebKeySet("Alg_example", "Kid_example", "Use_example") // CreateJsonWebKeySet | + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.JwkAPI.CreateJsonWebKeySet(context.Background(), set).CreateJsonWebKeySet(createJsonWebKeySet).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `JwkAPI.CreateJsonWebKeySet``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `CreateJsonWebKeySet`: JsonWebKeySet + fmt.Fprintf(os.Stdout, "Response from `JwkAPI.CreateJsonWebKeySet`: %v\n", resp) } ``` @@ -100,23 +100,23 @@ Delete JSON Web Key package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - set := "set_example" // string | The JSON Web Key Set - kid := "kid_example" // string | The JSON Web Key ID (kid) - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.JwkApi.DeleteJsonWebKey(context.Background(), set, kid).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `JwkApi.DeleteJsonWebKey``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } + set := "set_example" // string | The JSON Web Key Set + kid := "kid_example" // string | The JSON Web Key ID (kid) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + r, err := apiClient.JwkAPI.DeleteJsonWebKey(context.Background(), set, kid).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `JwkAPI.DeleteJsonWebKey``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } } ``` @@ -171,22 +171,22 @@ Delete JSON Web Key Set package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - set := "set_example" // string | The JSON Web Key Set - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.JwkApi.DeleteJsonWebKeySet(context.Background(), set).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `JwkApi.DeleteJsonWebKeySet``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } + set := "set_example" // string | The JSON Web Key Set + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + r, err := apiClient.JwkAPI.DeleteJsonWebKeySet(context.Background(), set).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `JwkAPI.DeleteJsonWebKeySet``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } } ``` @@ -239,25 +239,25 @@ Get JSON Web Key package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - set := "set_example" // string | JSON Web Key Set ID - kid := "kid_example" // string | JSON Web Key ID - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.JwkApi.GetJsonWebKey(context.Background(), set, kid).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `JwkApi.GetJsonWebKey``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `GetJsonWebKey`: JsonWebKeySet - fmt.Fprintf(os.Stdout, "Response from `JwkApi.GetJsonWebKey`: %v\n", resp) + set := "set_example" // string | JSON Web Key Set ID + kid := "kid_example" // string | JSON Web Key ID + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.JwkAPI.GetJsonWebKey(context.Background(), set, kid).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `JwkAPI.GetJsonWebKey``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `GetJsonWebKey`: JsonWebKeySet + fmt.Fprintf(os.Stdout, "Response from `JwkAPI.GetJsonWebKey`: %v\n", resp) } ``` @@ -312,24 +312,24 @@ Retrieve a JSON Web Key Set package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - set := "set_example" // string | JSON Web Key Set ID - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.JwkApi.GetJsonWebKeySet(context.Background(), set).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `JwkApi.GetJsonWebKeySet``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `GetJsonWebKeySet`: JsonWebKeySet - fmt.Fprintf(os.Stdout, "Response from `JwkApi.GetJsonWebKeySet`: %v\n", resp) + set := "set_example" // string | JSON Web Key Set ID + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.JwkAPI.GetJsonWebKeySet(context.Background(), set).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `JwkAPI.GetJsonWebKeySet``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `GetJsonWebKeySet`: JsonWebKeySet + fmt.Fprintf(os.Stdout, "Response from `JwkAPI.GetJsonWebKeySet`: %v\n", resp) } ``` @@ -382,26 +382,26 @@ Set JSON Web Key package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - set := "set_example" // string | The JSON Web Key Set ID - kid := "kid_example" // string | JSON Web Key ID - jsonWebKey := *openapiclient.NewJsonWebKey("RS256", "1603dfe0af8f4596", "RSA", "sig") // JsonWebKey | (optional) - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.JwkApi.SetJsonWebKey(context.Background(), set, kid).JsonWebKey(jsonWebKey).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `JwkApi.SetJsonWebKey``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `SetJsonWebKey`: JsonWebKey - fmt.Fprintf(os.Stdout, "Response from `JwkApi.SetJsonWebKey`: %v\n", resp) + set := "set_example" // string | The JSON Web Key Set ID + kid := "kid_example" // string | JSON Web Key ID + jsonWebKey := *openapiclient.NewJsonWebKey("RS256", "1603dfe0af8f4596", "RSA", "sig") // JsonWebKey | (optional) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.JwkAPI.SetJsonWebKey(context.Background(), set, kid).JsonWebKey(jsonWebKey).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `JwkAPI.SetJsonWebKey``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `SetJsonWebKey`: JsonWebKey + fmt.Fprintf(os.Stdout, "Response from `JwkAPI.SetJsonWebKey`: %v\n", resp) } ``` @@ -457,25 +457,25 @@ Update a JSON Web Key Set package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - set := "set_example" // string | The JSON Web Key Set ID - jsonWebKeySet := *openapiclient.NewJsonWebKeySet() // JsonWebKeySet | (optional) - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.JwkApi.SetJsonWebKeySet(context.Background(), set).JsonWebKeySet(jsonWebKeySet).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `JwkApi.SetJsonWebKeySet``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `SetJsonWebKeySet`: JsonWebKeySet - fmt.Fprintf(os.Stdout, "Response from `JwkApi.SetJsonWebKeySet`: %v\n", resp) + set := "set_example" // string | The JSON Web Key Set ID + jsonWebKeySet := *openapiclient.NewJsonWebKeySet() // JsonWebKeySet | (optional) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.JwkAPI.SetJsonWebKeySet(context.Background(), set).JsonWebKeySet(jsonWebKeySet).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `JwkAPI.SetJsonWebKeySet``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `SetJsonWebKeySet`: JsonWebKeySet + fmt.Fprintf(os.Stdout, "Response from `JwkAPI.SetJsonWebKeySet`: %v\n", resp) } ``` diff --git a/internal/httpclient/docs/Pagination.md b/internal/httpclient/docs/KeysetPaginationRequestParameters.md similarity index 61% rename from internal/httpclient/docs/Pagination.md rename to internal/httpclient/docs/KeysetPaginationRequestParameters.md index e3978ee5f58..e8449257d2c 100644 --- a/internal/httpclient/docs/Pagination.md +++ b/internal/httpclient/docs/KeysetPaginationRequestParameters.md @@ -1,78 +1,78 @@ -# Pagination +# KeysetPaginationRequestParameters ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**PageSize** | Pointer to **int64** | Items per page This is the number of items per page to return. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). | [optional] [default to 250] -**PageToken** | Pointer to **string** | Next Page Token The next page token. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). | [optional] [default to "1"] +**PageSize** | Pointer to **int64** | Items per Page This is the number of items per page to return. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). | [optional] [default to 250] +**PageToken** | Pointer to **string** | Next Page Token The next page token. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). | [optional] ## Methods -### NewPagination +### NewKeysetPaginationRequestParameters -`func NewPagination() *Pagination` +`func NewKeysetPaginationRequestParameters() *KeysetPaginationRequestParameters` -NewPagination instantiates a new Pagination object +NewKeysetPaginationRequestParameters instantiates a new KeysetPaginationRequestParameters object This constructor will assign default values to properties that have it defined, and makes sure properties required by API are set, but the set of arguments will change when the set of required properties is changed -### NewPaginationWithDefaults +### NewKeysetPaginationRequestParametersWithDefaults -`func NewPaginationWithDefaults() *Pagination` +`func NewKeysetPaginationRequestParametersWithDefaults() *KeysetPaginationRequestParameters` -NewPaginationWithDefaults instantiates a new Pagination object +NewKeysetPaginationRequestParametersWithDefaults instantiates a new KeysetPaginationRequestParameters object This constructor will only assign default values to properties that have it defined, but it doesn't guarantee that properties required by API are set ### GetPageSize -`func (o *Pagination) GetPageSize() int64` +`func (o *KeysetPaginationRequestParameters) GetPageSize() int64` GetPageSize returns the PageSize field if non-nil, zero value otherwise. ### GetPageSizeOk -`func (o *Pagination) GetPageSizeOk() (*int64, bool)` +`func (o *KeysetPaginationRequestParameters) GetPageSizeOk() (*int64, bool)` GetPageSizeOk returns a tuple with the PageSize field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetPageSize -`func (o *Pagination) SetPageSize(v int64)` +`func (o *KeysetPaginationRequestParameters) SetPageSize(v int64)` SetPageSize sets PageSize field to given value. ### HasPageSize -`func (o *Pagination) HasPageSize() bool` +`func (o *KeysetPaginationRequestParameters) HasPageSize() bool` HasPageSize returns a boolean if a field has been set. ### GetPageToken -`func (o *Pagination) GetPageToken() string` +`func (o *KeysetPaginationRequestParameters) GetPageToken() string` GetPageToken returns the PageToken field if non-nil, zero value otherwise. ### GetPageTokenOk -`func (o *Pagination) GetPageTokenOk() (*string, bool)` +`func (o *KeysetPaginationRequestParameters) GetPageTokenOk() (*string, bool)` GetPageTokenOk returns a tuple with the PageToken field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetPageToken -`func (o *Pagination) SetPageToken(v string)` +`func (o *KeysetPaginationRequestParameters) SetPageToken(v string)` SetPageToken sets PageToken field to given value. ### HasPageToken -`func (o *Pagination) HasPageToken() bool` +`func (o *KeysetPaginationRequestParameters) HasPageToken() bool` HasPageToken returns a boolean if a field has been set. diff --git a/internal/httpclient/docs/KeysetPaginationResponseHeaders.md b/internal/httpclient/docs/KeysetPaginationResponseHeaders.md new file mode 100644 index 00000000000..649856b493b --- /dev/null +++ b/internal/httpclient/docs/KeysetPaginationResponseHeaders.md @@ -0,0 +1,56 @@ +# KeysetPaginationResponseHeaders + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Link** | Pointer to **string** | The Link HTTP Header The `Link` header contains a comma-delimited list of links to the following pages: first: The first page of results. next: The next page of results. Pages are omitted if they do not exist. For example, if there is no next page, the `next` link is omitted. Examples: </admin/sessions?page_size=250&page_token={last_item_uuid}; rel=\"first\",/admin/sessions?page_size=250&page_token=>; rel=\"next\" | [optional] + +## Methods + +### NewKeysetPaginationResponseHeaders + +`func NewKeysetPaginationResponseHeaders() *KeysetPaginationResponseHeaders` + +NewKeysetPaginationResponseHeaders instantiates a new KeysetPaginationResponseHeaders object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewKeysetPaginationResponseHeadersWithDefaults + +`func NewKeysetPaginationResponseHeadersWithDefaults() *KeysetPaginationResponseHeaders` + +NewKeysetPaginationResponseHeadersWithDefaults instantiates a new KeysetPaginationResponseHeaders object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetLink + +`func (o *KeysetPaginationResponseHeaders) GetLink() string` + +GetLink returns the Link field if non-nil, zero value otherwise. + +### GetLinkOk + +`func (o *KeysetPaginationResponseHeaders) GetLinkOk() (*string, bool)` + +GetLinkOk returns a tuple with the Link field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetLink + +`func (o *KeysetPaginationResponseHeaders) SetLink(v string)` + +SetLink sets Link field to given value. + +### HasLink + +`func (o *KeysetPaginationResponseHeaders) HasLink() bool` + +HasLink returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/internal/httpclient/docs/MetadataApi.md b/internal/httpclient/docs/MetadataAPI.md similarity index 56% rename from internal/httpclient/docs/MetadataApi.md rename to internal/httpclient/docs/MetadataAPI.md index 064272fd09b..6280313b670 100644 --- a/internal/httpclient/docs/MetadataApi.md +++ b/internal/httpclient/docs/MetadataAPI.md @@ -1,12 +1,12 @@ -# \MetadataApi +# \MetadataAPI All URIs are relative to *http://localhost* Method | HTTP request | Description ------------- | ------------- | ------------- -[**GetVersion**](MetadataApi.md#GetVersion) | **Get** /version | Return Running Software Version. -[**IsAlive**](MetadataApi.md#IsAlive) | **Get** /health/alive | Check HTTP Server Status -[**IsReady**](MetadataApi.md#IsReady) | **Get** /health/ready | Check HTTP Server and Database Status +[**GetVersion**](MetadataAPI.md#GetVersion) | **Get** /version | Return Running Software Version. +[**IsAlive**](MetadataAPI.md#IsAlive) | **Get** /health/alive | Check HTTP Server Status +[**IsReady**](MetadataAPI.md#IsReady) | **Get** /health/ready | Check HTTP Server and Database Status @@ -24,23 +24,23 @@ Return Running Software Version. package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.MetadataApi.GetVersion(context.Background()).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `MetadataApi.GetVersion``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `GetVersion`: GetVersion200Response - fmt.Fprintf(os.Stdout, "Response from `MetadataApi.GetVersion`: %v\n", resp) + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.MetadataAPI.GetVersion(context.Background()).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetadataAPI.GetVersion``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `GetVersion`: GetVersion200Response + fmt.Fprintf(os.Stdout, "Response from `MetadataAPI.GetVersion`: %v\n", resp) } ``` @@ -85,23 +85,23 @@ Check HTTP Server Status package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.MetadataApi.IsAlive(context.Background()).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `MetadataApi.IsAlive``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `IsAlive`: HealthStatus - fmt.Fprintf(os.Stdout, "Response from `MetadataApi.IsAlive`: %v\n", resp) + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.MetadataAPI.IsAlive(context.Background()).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetadataAPI.IsAlive``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `IsAlive`: HealthStatus + fmt.Fprintf(os.Stdout, "Response from `MetadataAPI.IsAlive`: %v\n", resp) } ``` @@ -146,23 +146,23 @@ Check HTTP Server and Database Status package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.MetadataApi.IsReady(context.Background()).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `MetadataApi.IsReady``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `IsReady`: IsReady200Response - fmt.Fprintf(os.Stdout, "Response from `MetadataApi.IsReady`: %v\n", resp) + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.MetadataAPI.IsReady(context.Background()).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetadataAPI.IsReady``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `IsReady`: IsReady200Response + fmt.Fprintf(os.Stdout, "Response from `MetadataAPI.IsReady`: %v\n", resp) } ``` diff --git a/internal/httpclient/docs/OAuth2Api.md b/internal/httpclient/docs/OAuth2API.md similarity index 51% rename from internal/httpclient/docs/OAuth2Api.md rename to internal/httpclient/docs/OAuth2API.md index bdee97d6a53..125db1f312f 100644 --- a/internal/httpclient/docs/OAuth2Api.md +++ b/internal/httpclient/docs/OAuth2API.md @@ -1,37 +1,40 @@ -# \OAuth2Api +# \OAuth2API All URIs are relative to *http://localhost* Method | HTTP request | Description ------------- | ------------- | ------------- -[**AcceptOAuth2ConsentRequest**](OAuth2Api.md#AcceptOAuth2ConsentRequest) | **Put** /admin/oauth2/auth/requests/consent/accept | Accept OAuth 2.0 Consent Request -[**AcceptOAuth2LoginRequest**](OAuth2Api.md#AcceptOAuth2LoginRequest) | **Put** /admin/oauth2/auth/requests/login/accept | Accept OAuth 2.0 Login Request -[**AcceptOAuth2LogoutRequest**](OAuth2Api.md#AcceptOAuth2LogoutRequest) | **Put** /admin/oauth2/auth/requests/logout/accept | Accept OAuth 2.0 Session Logout Request -[**CreateOAuth2Client**](OAuth2Api.md#CreateOAuth2Client) | **Post** /admin/clients | Create OAuth 2.0 Client -[**DeleteOAuth2Client**](OAuth2Api.md#DeleteOAuth2Client) | **Delete** /admin/clients/{id} | Delete OAuth 2.0 Client -[**DeleteOAuth2Token**](OAuth2Api.md#DeleteOAuth2Token) | **Delete** /admin/oauth2/tokens | Delete OAuth 2.0 Access Tokens from specific OAuth 2.0 Client -[**DeleteTrustedOAuth2JwtGrantIssuer**](OAuth2Api.md#DeleteTrustedOAuth2JwtGrantIssuer) | **Delete** /admin/trust/grants/jwt-bearer/issuers/{id} | Delete Trusted OAuth2 JWT Bearer Grant Type Issuer -[**GetOAuth2Client**](OAuth2Api.md#GetOAuth2Client) | **Get** /admin/clients/{id} | Get an OAuth 2.0 Client -[**GetOAuth2ConsentRequest**](OAuth2Api.md#GetOAuth2ConsentRequest) | **Get** /admin/oauth2/auth/requests/consent | Get OAuth 2.0 Consent Request -[**GetOAuth2LoginRequest**](OAuth2Api.md#GetOAuth2LoginRequest) | **Get** /admin/oauth2/auth/requests/login | Get OAuth 2.0 Login Request -[**GetOAuth2LogoutRequest**](OAuth2Api.md#GetOAuth2LogoutRequest) | **Get** /admin/oauth2/auth/requests/logout | Get OAuth 2.0 Session Logout Request -[**GetTrustedOAuth2JwtGrantIssuer**](OAuth2Api.md#GetTrustedOAuth2JwtGrantIssuer) | **Get** /admin/trust/grants/jwt-bearer/issuers/{id} | Get Trusted OAuth2 JWT Bearer Grant Type Issuer -[**IntrospectOAuth2Token**](OAuth2Api.md#IntrospectOAuth2Token) | **Post** /admin/oauth2/introspect | Introspect OAuth2 Access and Refresh Tokens -[**ListOAuth2Clients**](OAuth2Api.md#ListOAuth2Clients) | **Get** /admin/clients | List OAuth 2.0 Clients -[**ListOAuth2ConsentSessions**](OAuth2Api.md#ListOAuth2ConsentSessions) | **Get** /admin/oauth2/auth/sessions/consent | List OAuth 2.0 Consent Sessions of a Subject -[**ListTrustedOAuth2JwtGrantIssuers**](OAuth2Api.md#ListTrustedOAuth2JwtGrantIssuers) | **Get** /admin/trust/grants/jwt-bearer/issuers | List Trusted OAuth2 JWT Bearer Grant Type Issuers -[**OAuth2Authorize**](OAuth2Api.md#OAuth2Authorize) | **Get** /oauth2/auth | OAuth 2.0 Authorize Endpoint -[**Oauth2TokenExchange**](OAuth2Api.md#Oauth2TokenExchange) | **Post** /oauth2/token | The OAuth 2.0 Token Endpoint -[**PatchOAuth2Client**](OAuth2Api.md#PatchOAuth2Client) | **Patch** /admin/clients/{id} | Patch OAuth 2.0 Client -[**RejectOAuth2ConsentRequest**](OAuth2Api.md#RejectOAuth2ConsentRequest) | **Put** /admin/oauth2/auth/requests/consent/reject | Reject OAuth 2.0 Consent Request -[**RejectOAuth2LoginRequest**](OAuth2Api.md#RejectOAuth2LoginRequest) | **Put** /admin/oauth2/auth/requests/login/reject | Reject OAuth 2.0 Login Request -[**RejectOAuth2LogoutRequest**](OAuth2Api.md#RejectOAuth2LogoutRequest) | **Put** /admin/oauth2/auth/requests/logout/reject | Reject OAuth 2.0 Session Logout Request -[**RevokeOAuth2ConsentSessions**](OAuth2Api.md#RevokeOAuth2ConsentSessions) | **Delete** /admin/oauth2/auth/sessions/consent | Revoke OAuth 2.0 Consent Sessions of a Subject -[**RevokeOAuth2LoginSessions**](OAuth2Api.md#RevokeOAuth2LoginSessions) | **Delete** /admin/oauth2/auth/sessions/login | Revokes All OAuth 2.0 Login Sessions of a Subject -[**RevokeOAuth2Token**](OAuth2Api.md#RevokeOAuth2Token) | **Post** /oauth2/revoke | Revoke OAuth 2.0 Access or Refresh Token -[**SetOAuth2Client**](OAuth2Api.md#SetOAuth2Client) | **Put** /admin/clients/{id} | Set OAuth 2.0 Client -[**SetOAuth2ClientLifespans**](OAuth2Api.md#SetOAuth2ClientLifespans) | **Put** /admin/clients/{id}/lifespans | Set OAuth2 Client Token Lifespans -[**TrustOAuth2JwtGrantIssuer**](OAuth2Api.md#TrustOAuth2JwtGrantIssuer) | **Post** /admin/trust/grants/jwt-bearer/issuers | Trust OAuth2 JWT Bearer Grant Type Issuer +[**AcceptOAuth2ConsentRequest**](OAuth2API.md#AcceptOAuth2ConsentRequest) | **Put** /admin/oauth2/auth/requests/consent/accept | Accept OAuth 2.0 Consent Request +[**AcceptOAuth2LoginRequest**](OAuth2API.md#AcceptOAuth2LoginRequest) | **Put** /admin/oauth2/auth/requests/login/accept | Accept OAuth 2.0 Login Request +[**AcceptOAuth2LogoutRequest**](OAuth2API.md#AcceptOAuth2LogoutRequest) | **Put** /admin/oauth2/auth/requests/logout/accept | Accept OAuth 2.0 Session Logout Request +[**AcceptUserCodeRequest**](OAuth2API.md#AcceptUserCodeRequest) | **Put** /admin/oauth2/auth/requests/device/accept | Accepts a device grant user_code request +[**CreateOAuth2Client**](OAuth2API.md#CreateOAuth2Client) | **Post** /admin/clients | Create OAuth 2.0 Client +[**DeleteOAuth2Client**](OAuth2API.md#DeleteOAuth2Client) | **Delete** /admin/clients/{id} | Delete OAuth 2.0 Client +[**DeleteOAuth2Token**](OAuth2API.md#DeleteOAuth2Token) | **Delete** /admin/oauth2/tokens | Delete OAuth 2.0 Access Tokens from specific OAuth 2.0 Client +[**DeleteTrustedOAuth2JwtGrantIssuer**](OAuth2API.md#DeleteTrustedOAuth2JwtGrantIssuer) | **Delete** /admin/trust/grants/jwt-bearer/issuers/{id} | Delete Trusted OAuth2 JWT Bearer Grant Type Issuer +[**GetOAuth2Client**](OAuth2API.md#GetOAuth2Client) | **Get** /admin/clients/{id} | Get an OAuth 2.0 Client +[**GetOAuth2ConsentRequest**](OAuth2API.md#GetOAuth2ConsentRequest) | **Get** /admin/oauth2/auth/requests/consent | Get OAuth 2.0 Consent Request +[**GetOAuth2LoginRequest**](OAuth2API.md#GetOAuth2LoginRequest) | **Get** /admin/oauth2/auth/requests/login | Get OAuth 2.0 Login Request +[**GetOAuth2LogoutRequest**](OAuth2API.md#GetOAuth2LogoutRequest) | **Get** /admin/oauth2/auth/requests/logout | Get OAuth 2.0 Session Logout Request +[**GetTrustedOAuth2JwtGrantIssuer**](OAuth2API.md#GetTrustedOAuth2JwtGrantIssuer) | **Get** /admin/trust/grants/jwt-bearer/issuers/{id} | Get Trusted OAuth2 JWT Bearer Grant Type Issuer +[**IntrospectOAuth2Token**](OAuth2API.md#IntrospectOAuth2Token) | **Post** /admin/oauth2/introspect | Introspect OAuth2 Access and Refresh Tokens +[**ListOAuth2Clients**](OAuth2API.md#ListOAuth2Clients) | **Get** /admin/clients | List OAuth 2.0 Clients +[**ListOAuth2ConsentSessions**](OAuth2API.md#ListOAuth2ConsentSessions) | **Get** /admin/oauth2/auth/sessions/consent | List OAuth 2.0 Consent Sessions of a Subject +[**ListTrustedOAuth2JwtGrantIssuers**](OAuth2API.md#ListTrustedOAuth2JwtGrantIssuers) | **Get** /admin/trust/grants/jwt-bearer/issuers | List Trusted OAuth2 JWT Bearer Grant Type Issuers +[**OAuth2Authorize**](OAuth2API.md#OAuth2Authorize) | **Get** /oauth2/auth | OAuth 2.0 Authorize Endpoint +[**OAuth2DeviceFlow**](OAuth2API.md#OAuth2DeviceFlow) | **Post** /oauth2/device/auth | The OAuth 2.0 Device Authorize Endpoint +[**Oauth2TokenExchange**](OAuth2API.md#Oauth2TokenExchange) | **Post** /oauth2/token | The OAuth 2.0 Token Endpoint +[**PatchOAuth2Client**](OAuth2API.md#PatchOAuth2Client) | **Patch** /admin/clients/{id} | Patch OAuth 2.0 Client +[**PerformOAuth2DeviceVerificationFlow**](OAuth2API.md#PerformOAuth2DeviceVerificationFlow) | **Get** /oauth2/device/verify | OAuth 2.0 Device Verification Endpoint +[**RejectOAuth2ConsentRequest**](OAuth2API.md#RejectOAuth2ConsentRequest) | **Put** /admin/oauth2/auth/requests/consent/reject | Reject OAuth 2.0 Consent Request +[**RejectOAuth2LoginRequest**](OAuth2API.md#RejectOAuth2LoginRequest) | **Put** /admin/oauth2/auth/requests/login/reject | Reject OAuth 2.0 Login Request +[**RejectOAuth2LogoutRequest**](OAuth2API.md#RejectOAuth2LogoutRequest) | **Put** /admin/oauth2/auth/requests/logout/reject | Reject OAuth 2.0 Session Logout Request +[**RevokeOAuth2ConsentSessions**](OAuth2API.md#RevokeOAuth2ConsentSessions) | **Delete** /admin/oauth2/auth/sessions/consent | Revoke OAuth 2.0 Consent Sessions of a Subject +[**RevokeOAuth2LoginSessions**](OAuth2API.md#RevokeOAuth2LoginSessions) | **Delete** /admin/oauth2/auth/sessions/login | Revokes OAuth 2.0 Login Sessions by either a Subject or a SessionID +[**RevokeOAuth2Token**](OAuth2API.md#RevokeOAuth2Token) | **Post** /oauth2/revoke | Revoke OAuth 2.0 Access or Refresh Token +[**SetOAuth2Client**](OAuth2API.md#SetOAuth2Client) | **Put** /admin/clients/{id} | Set OAuth 2.0 Client +[**SetOAuth2ClientLifespans**](OAuth2API.md#SetOAuth2ClientLifespans) | **Put** /admin/clients/{id}/lifespans | Set OAuth2 Client Token Lifespans +[**TrustOAuth2JwtGrantIssuer**](OAuth2API.md#TrustOAuth2JwtGrantIssuer) | **Post** /admin/trust/grants/jwt-bearer/issuers | Trust OAuth2 JWT Bearer Grant Type Issuer @@ -49,25 +52,25 @@ Accept OAuth 2.0 Consent Request package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - consentChallenge := "consentChallenge_example" // string | OAuth 2.0 Consent Request Challenge - acceptOAuth2ConsentRequest := *openapiclient.NewAcceptOAuth2ConsentRequest() // AcceptOAuth2ConsentRequest | (optional) - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.AcceptOAuth2ConsentRequest(context.Background()).ConsentChallenge(consentChallenge).AcceptOAuth2ConsentRequest(acceptOAuth2ConsentRequest).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.AcceptOAuth2ConsentRequest``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `AcceptOAuth2ConsentRequest`: OAuth2RedirectTo - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.AcceptOAuth2ConsentRequest`: %v\n", resp) + consentChallenge := "consentChallenge_example" // string | OAuth 2.0 Consent Request Challenge + acceptOAuth2ConsentRequest := *openapiclient.NewAcceptOAuth2ConsentRequest() // AcceptOAuth2ConsentRequest | (optional) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.AcceptOAuth2ConsentRequest(context.Background()).ConsentChallenge(consentChallenge).AcceptOAuth2ConsentRequest(acceptOAuth2ConsentRequest).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.AcceptOAuth2ConsentRequest``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `AcceptOAuth2ConsentRequest`: OAuth2RedirectTo + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.AcceptOAuth2ConsentRequest`: %v\n", resp) } ``` @@ -117,25 +120,25 @@ Accept OAuth 2.0 Login Request package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - loginChallenge := "loginChallenge_example" // string | OAuth 2.0 Login Request Challenge - acceptOAuth2LoginRequest := *openapiclient.NewAcceptOAuth2LoginRequest("Subject_example") // AcceptOAuth2LoginRequest | (optional) - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.AcceptOAuth2LoginRequest(context.Background()).LoginChallenge(loginChallenge).AcceptOAuth2LoginRequest(acceptOAuth2LoginRequest).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.AcceptOAuth2LoginRequest``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `AcceptOAuth2LoginRequest`: OAuth2RedirectTo - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.AcceptOAuth2LoginRequest`: %v\n", resp) + loginChallenge := "loginChallenge_example" // string | OAuth 2.0 Login Request Challenge + acceptOAuth2LoginRequest := *openapiclient.NewAcceptOAuth2LoginRequest("Subject_example") // AcceptOAuth2LoginRequest | (optional) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.AcceptOAuth2LoginRequest(context.Background()).LoginChallenge(loginChallenge).AcceptOAuth2LoginRequest(acceptOAuth2LoginRequest).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.AcceptOAuth2LoginRequest``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `AcceptOAuth2LoginRequest`: OAuth2RedirectTo + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.AcceptOAuth2LoginRequest`: %v\n", resp) } ``` @@ -185,24 +188,24 @@ Accept OAuth 2.0 Session Logout Request package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - logoutChallenge := "logoutChallenge_example" // string | OAuth 2.0 Logout Request Challenge - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.AcceptOAuth2LogoutRequest(context.Background()).LogoutChallenge(logoutChallenge).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.AcceptOAuth2LogoutRequest``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `AcceptOAuth2LogoutRequest`: OAuth2RedirectTo - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.AcceptOAuth2LogoutRequest`: %v\n", resp) + logoutChallenge := "logoutChallenge_example" // string | OAuth 2.0 Logout Request Challenge + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.AcceptOAuth2LogoutRequest(context.Background()).LogoutChallenge(logoutChallenge).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.AcceptOAuth2LogoutRequest``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `AcceptOAuth2LogoutRequest`: OAuth2RedirectTo + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.AcceptOAuth2LogoutRequest`: %v\n", resp) } ``` @@ -237,6 +240,74 @@ No authorization required [[Back to README]](../README.md) +## AcceptUserCodeRequest + +> OAuth2RedirectTo AcceptUserCodeRequest(ctx).DeviceChallenge(deviceChallenge).AcceptDeviceUserCodeRequest(acceptDeviceUserCodeRequest).Execute() + +Accepts a device grant user_code request + + + +### Example + +```go +package main + +import ( + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" +) + +func main() { + deviceChallenge := "deviceChallenge_example" // string | + acceptDeviceUserCodeRequest := *openapiclient.NewAcceptDeviceUserCodeRequest() // AcceptDeviceUserCodeRequest | (optional) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.AcceptUserCodeRequest(context.Background()).DeviceChallenge(deviceChallenge).AcceptDeviceUserCodeRequest(acceptDeviceUserCodeRequest).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.AcceptUserCodeRequest``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `AcceptUserCodeRequest`: OAuth2RedirectTo + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.AcceptUserCodeRequest`: %v\n", resp) +} +``` + +### Path Parameters + + + +### Other Parameters + +Other parameters are passed through a pointer to a apiAcceptUserCodeRequestRequest struct via the builder pattern + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **deviceChallenge** | **string** | | + **acceptDeviceUserCodeRequest** | [**AcceptDeviceUserCodeRequest**](AcceptDeviceUserCodeRequest.md) | | + +### Return type + +[**OAuth2RedirectTo**](OAuth2RedirectTo.md) + +### Authorization + +No authorization required + +### HTTP request headers + +- **Content-Type**: application/json +- **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) +[[Back to Model list]](../README.md#documentation-for-models) +[[Back to README]](../README.md) + + ## CreateOAuth2Client > OAuth2Client CreateOAuth2Client(ctx).OAuth2Client(oAuth2Client).Execute() @@ -251,24 +322,24 @@ Create OAuth 2.0 Client package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - oAuth2Client := *openapiclient.NewOAuth2Client() // OAuth2Client | OAuth 2.0 Client Request Body - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.CreateOAuth2Client(context.Background()).OAuth2Client(oAuth2Client).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.CreateOAuth2Client``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `CreateOAuth2Client`: OAuth2Client - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.CreateOAuth2Client`: %v\n", resp) + oAuth2Client := *openapiclient.NewOAuth2Client() // OAuth2Client | OAuth 2.0 Client Request Body + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.CreateOAuth2Client(context.Background()).OAuth2Client(oAuth2Client).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.CreateOAuth2Client``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `CreateOAuth2Client`: OAuth2Client + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.CreateOAuth2Client`: %v\n", resp) } ``` @@ -317,22 +388,22 @@ Delete OAuth 2.0 Client package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - id := "id_example" // string | The id of the OAuth 2.0 Client. - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.DeleteOAuth2Client(context.Background(), id).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.DeleteOAuth2Client``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } + id := "id_example" // string | The id of the OAuth 2.0 Client. + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + r, err := apiClient.OAuth2API.DeleteOAuth2Client(context.Background(), id).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.DeleteOAuth2Client``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } } ``` @@ -385,22 +456,22 @@ Delete OAuth 2.0 Access Tokens from specific OAuth 2.0 Client package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - clientId := "clientId_example" // string | OAuth 2.0 Client ID - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.DeleteOAuth2Token(context.Background()).ClientId(clientId).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.DeleteOAuth2Token``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } + clientId := "clientId_example" // string | OAuth 2.0 Client ID + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + r, err := apiClient.OAuth2API.DeleteOAuth2Token(context.Background()).ClientId(clientId).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.DeleteOAuth2Token``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } } ``` @@ -449,22 +520,22 @@ Delete Trusted OAuth2 JWT Bearer Grant Type Issuer package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - id := "id_example" // string | The id of the desired grant - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.DeleteTrustedOAuth2JwtGrantIssuer(context.Background(), id).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.DeleteTrustedOAuth2JwtGrantIssuer``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } + id := "id_example" // string | The id of the desired grant + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + r, err := apiClient.OAuth2API.DeleteTrustedOAuth2JwtGrantIssuer(context.Background(), id).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.DeleteTrustedOAuth2JwtGrantIssuer``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } } ``` @@ -517,24 +588,24 @@ Get an OAuth 2.0 Client package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - id := "id_example" // string | The id of the OAuth 2.0 Client. - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.GetOAuth2Client(context.Background(), id).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.GetOAuth2Client``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `GetOAuth2Client`: OAuth2Client - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.GetOAuth2Client`: %v\n", resp) + id := "id_example" // string | The id of the OAuth 2.0 Client. + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.GetOAuth2Client(context.Background(), id).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.GetOAuth2Client``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `GetOAuth2Client`: OAuth2Client + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.GetOAuth2Client`: %v\n", resp) } ``` @@ -587,24 +658,24 @@ Get OAuth 2.0 Consent Request package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - consentChallenge := "consentChallenge_example" // string | OAuth 2.0 Consent Request Challenge - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.GetOAuth2ConsentRequest(context.Background()).ConsentChallenge(consentChallenge).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.GetOAuth2ConsentRequest``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `GetOAuth2ConsentRequest`: OAuth2ConsentRequest - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.GetOAuth2ConsentRequest`: %v\n", resp) + consentChallenge := "consentChallenge_example" // string | OAuth 2.0 Consent Request Challenge + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.GetOAuth2ConsentRequest(context.Background()).ConsentChallenge(consentChallenge).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.GetOAuth2ConsentRequest``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `GetOAuth2ConsentRequest`: OAuth2ConsentRequest + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.GetOAuth2ConsentRequest`: %v\n", resp) } ``` @@ -653,24 +724,24 @@ Get OAuth 2.0 Login Request package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - loginChallenge := "loginChallenge_example" // string | OAuth 2.0 Login Request Challenge - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.GetOAuth2LoginRequest(context.Background()).LoginChallenge(loginChallenge).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.GetOAuth2LoginRequest``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `GetOAuth2LoginRequest`: OAuth2LoginRequest - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.GetOAuth2LoginRequest`: %v\n", resp) + loginChallenge := "loginChallenge_example" // string | OAuth 2.0 Login Request Challenge + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.GetOAuth2LoginRequest(context.Background()).LoginChallenge(loginChallenge).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.GetOAuth2LoginRequest``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `GetOAuth2LoginRequest`: OAuth2LoginRequest + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.GetOAuth2LoginRequest`: %v\n", resp) } ``` @@ -719,24 +790,24 @@ Get OAuth 2.0 Session Logout Request package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - logoutChallenge := "logoutChallenge_example" // string | - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.GetOAuth2LogoutRequest(context.Background()).LogoutChallenge(logoutChallenge).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.GetOAuth2LogoutRequest``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `GetOAuth2LogoutRequest`: OAuth2LogoutRequest - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.GetOAuth2LogoutRequest`: %v\n", resp) + logoutChallenge := "logoutChallenge_example" // string | + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.GetOAuth2LogoutRequest(context.Background()).LogoutChallenge(logoutChallenge).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.GetOAuth2LogoutRequest``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `GetOAuth2LogoutRequest`: OAuth2LogoutRequest + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.GetOAuth2LogoutRequest`: %v\n", resp) } ``` @@ -785,24 +856,24 @@ Get Trusted OAuth2 JWT Bearer Grant Type Issuer package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - id := "id_example" // string | The id of the desired grant - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.GetTrustedOAuth2JwtGrantIssuer(context.Background(), id).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.GetTrustedOAuth2JwtGrantIssuer``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `GetTrustedOAuth2JwtGrantIssuer`: TrustedOAuth2JwtGrantIssuer - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.GetTrustedOAuth2JwtGrantIssuer`: %v\n", resp) + id := "id_example" // string | The id of the desired grant + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.GetTrustedOAuth2JwtGrantIssuer(context.Background(), id).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.GetTrustedOAuth2JwtGrantIssuer``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `GetTrustedOAuth2JwtGrantIssuer`: TrustedOAuth2JwtGrantIssuer + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.GetTrustedOAuth2JwtGrantIssuer`: %v\n", resp) } ``` @@ -855,25 +926,25 @@ Introspect OAuth2 Access and Refresh Tokens package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - token := "token_example" // string | The string value of the token. For access tokens, this is the \\\"access_token\\\" value returned from the token endpoint defined in OAuth 2.0. For refresh tokens, this is the \\\"refresh_token\\\" value returned. - scope := "scope_example" // string | An optional, space separated list of required scopes. If the access token was not granted one of the scopes, the result of active will be false. (optional) - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.IntrospectOAuth2Token(context.Background()).Token(token).Scope(scope).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.IntrospectOAuth2Token``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `IntrospectOAuth2Token`: IntrospectedOAuth2Token - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.IntrospectOAuth2Token`: %v\n", resp) + token := "token_example" // string | The string value of the token. For access tokens, this is the \\\"access_token\\\" value returned from the token endpoint defined in OAuth 2.0. For refresh tokens, this is the \\\"refresh_token\\\" value returned. + scope := "scope_example" // string | An optional, space separated list of required scopes. If the access token was not granted one of the scopes, the result of active will be false. (optional) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.IntrospectOAuth2Token(context.Background()).Token(token).Scope(scope).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.IntrospectOAuth2Token``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `IntrospectOAuth2Token`: IntrospectedOAuth2Token + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.IntrospectOAuth2Token`: %v\n", resp) } ``` @@ -923,27 +994,27 @@ List OAuth 2.0 Clients package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - pageSize := int64(789) // int64 | Items per Page This is the number of items per page to return. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). (optional) (default to 250) - pageToken := "pageToken_example" // string | Next Page Token The next page token. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). (optional) (default to "1") - clientName := "clientName_example" // string | The name of the clients to filter by. (optional) - owner := "owner_example" // string | The owner of the clients to filter by. (optional) - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.ListOAuth2Clients(context.Background()).PageSize(pageSize).PageToken(pageToken).ClientName(clientName).Owner(owner).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.ListOAuth2Clients``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `ListOAuth2Clients`: []OAuth2Client - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.ListOAuth2Clients`: %v\n", resp) + pageSize := int64(789) // int64 | Items per Page This is the number of items per page to return. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). (optional) (default to 250) + pageToken := "pageToken_example" // string | Next Page Token The next page token. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). (optional) + clientName := "clientName_example" // string | The name of the clients to filter by. (optional) + owner := "owner_example" // string | The owner of the clients to filter by. (optional) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.ListOAuth2Clients(context.Background()).PageSize(pageSize).PageToken(pageToken).ClientName(clientName).Owner(owner).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.ListOAuth2Clients``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `ListOAuth2Clients`: []OAuth2Client + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.ListOAuth2Clients`: %v\n", resp) } ``` @@ -959,7 +1030,7 @@ Other parameters are passed through a pointer to a apiListOAuth2ClientsRequest s Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **pageSize** | **int64** | Items per Page This is the number of items per page to return. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). | [default to 250] - **pageToken** | **string** | Next Page Token The next page token. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). | [default to "1"] + **pageToken** | **string** | Next Page Token The next page token. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). | **clientName** | **string** | The name of the clients to filter by. | **owner** | **string** | The owner of the clients to filter by. | @@ -995,27 +1066,27 @@ List OAuth 2.0 Consent Sessions of a Subject package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - subject := "subject_example" // string | The subject to list the consent sessions for. - pageSize := int64(789) // int64 | Items per Page This is the number of items per page to return. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). (optional) (default to 250) - pageToken := "pageToken_example" // string | Next Page Token The next page token. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). (optional) (default to "1") - loginSessionId := "loginSessionId_example" // string | The login session id to list the consent sessions for. (optional) - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.ListOAuth2ConsentSessions(context.Background()).Subject(subject).PageSize(pageSize).PageToken(pageToken).LoginSessionId(loginSessionId).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.ListOAuth2ConsentSessions``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `ListOAuth2ConsentSessions`: []OAuth2ConsentSession - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.ListOAuth2ConsentSessions`: %v\n", resp) + subject := "subject_example" // string | The subject to list the consent sessions for. + pageSize := int64(789) // int64 | Items per Page This is the number of items per page to return. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). (optional) (default to 250) + pageToken := "pageToken_example" // string | Next Page Token The next page token. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). (optional) (default to "1") + loginSessionId := "loginSessionId_example" // string | The login session id to list the consent sessions for. (optional) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.ListOAuth2ConsentSessions(context.Background()).Subject(subject).PageSize(pageSize).PageToken(pageToken).LoginSessionId(loginSessionId).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.ListOAuth2ConsentSessions``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `ListOAuth2ConsentSessions`: []OAuth2ConsentSession + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.ListOAuth2ConsentSessions`: %v\n", resp) } ``` @@ -1055,7 +1126,7 @@ No authorization required ## ListTrustedOAuth2JwtGrantIssuers -> []TrustedOAuth2JwtGrantIssuer ListTrustedOAuth2JwtGrantIssuers(ctx).MaxItems(maxItems).DefaultItems(defaultItems).Issuer(issuer).Execute() +> []TrustedOAuth2JwtGrantIssuer ListTrustedOAuth2JwtGrantIssuers(ctx).PageSize(pageSize).PageToken(pageToken).Issuer(issuer).Execute() List Trusted OAuth2 JWT Bearer Grant Type Issuers @@ -1067,26 +1138,26 @@ List Trusted OAuth2 JWT Bearer Grant Type Issuers package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - maxItems := int64(789) // int64 | (optional) - defaultItems := int64(789) // int64 | (optional) - issuer := "issuer_example" // string | If optional \"issuer\" is supplied, only jwt-bearer grants with this issuer will be returned. (optional) - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.ListTrustedOAuth2JwtGrantIssuers(context.Background()).MaxItems(maxItems).DefaultItems(defaultItems).Issuer(issuer).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.ListTrustedOAuth2JwtGrantIssuers``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `ListTrustedOAuth2JwtGrantIssuers`: []TrustedOAuth2JwtGrantIssuer - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.ListTrustedOAuth2JwtGrantIssuers`: %v\n", resp) + pageSize := int64(789) // int64 | Items per Page This is the number of items per page to return. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). (optional) (default to 250) + pageToken := "pageToken_example" // string | Next Page Token The next page token. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). (optional) + issuer := "issuer_example" // string | If optional \"issuer\" is supplied, only jwt-bearer grants with this issuer will be returned. (optional) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.ListTrustedOAuth2JwtGrantIssuers(context.Background()).PageSize(pageSize).PageToken(pageToken).Issuer(issuer).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.ListTrustedOAuth2JwtGrantIssuers``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `ListTrustedOAuth2JwtGrantIssuers`: []TrustedOAuth2JwtGrantIssuer + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.ListTrustedOAuth2JwtGrantIssuers`: %v\n", resp) } ``` @@ -1101,8 +1172,8 @@ Other parameters are passed through a pointer to a apiListTrustedOAuth2JwtGrantI Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **maxItems** | **int64** | | - **defaultItems** | **int64** | | + **pageSize** | **int64** | Items per Page This is the number of items per page to return. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). | [default to 250] + **pageToken** | **string** | Next Page Token The next page token. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). | **issuer** | **string** | If optional \"issuer\" is supplied, only jwt-bearer grants with this issuer will be returned. | ### Return type @@ -1137,23 +1208,23 @@ OAuth 2.0 Authorize Endpoint package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.OAuth2Authorize(context.Background()).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.OAuth2Authorize``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `OAuth2Authorize`: ErrorOAuth2 - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.OAuth2Authorize`: %v\n", resp) + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.OAuth2Authorize(context.Background()).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.OAuth2Authorize``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `OAuth2Authorize`: ErrorOAuth2 + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.OAuth2Authorize`: %v\n", resp) } ``` @@ -1184,6 +1255,67 @@ No authorization required [[Back to README]](../README.md) +## OAuth2DeviceFlow + +> DeviceAuthorization OAuth2DeviceFlow(ctx).Execute() + +The OAuth 2.0 Device Authorize Endpoint + + + +### Example + +```go +package main + +import ( + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" +) + +func main() { + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.OAuth2DeviceFlow(context.Background()).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.OAuth2DeviceFlow``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `OAuth2DeviceFlow`: DeviceAuthorization + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.OAuth2DeviceFlow`: %v\n", resp) +} +``` + +### Path Parameters + +This endpoint does not need any parameter. + +### Other Parameters + +Other parameters are passed through a pointer to a apiOAuth2DeviceFlowRequest struct via the builder pattern + + +### Return type + +[**DeviceAuthorization**](DeviceAuthorization.md) + +### Authorization + +No authorization required + +### HTTP request headers + +- **Content-Type**: Not defined +- **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) +[[Back to Model list]](../README.md#documentation-for-models) +[[Back to README]](../README.md) + + ## Oauth2TokenExchange > OAuth2TokenExchange Oauth2TokenExchange(ctx).GrantType(grantType).ClientId(clientId).Code(code).RedirectUri(redirectUri).RefreshToken(refreshToken).Execute() @@ -1198,28 +1330,28 @@ The OAuth 2.0 Token Endpoint package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - grantType := "grantType_example" // string | - clientId := "clientId_example" // string | (optional) - code := "code_example" // string | (optional) - redirectUri := "redirectUri_example" // string | (optional) - refreshToken := "refreshToken_example" // string | (optional) - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.Oauth2TokenExchange(context.Background()).GrantType(grantType).ClientId(clientId).Code(code).RedirectUri(redirectUri).RefreshToken(refreshToken).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.Oauth2TokenExchange``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `Oauth2TokenExchange`: OAuth2TokenExchange - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.Oauth2TokenExchange`: %v\n", resp) + grantType := "grantType_example" // string | + clientId := "clientId_example" // string | (optional) + code := "code_example" // string | (optional) + redirectUri := "redirectUri_example" // string | (optional) + refreshToken := "refreshToken_example" // string | (optional) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.Oauth2TokenExchange(context.Background()).GrantType(grantType).ClientId(clientId).Code(code).RedirectUri(redirectUri).RefreshToken(refreshToken).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.Oauth2TokenExchange``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `Oauth2TokenExchange`: OAuth2TokenExchange + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.Oauth2TokenExchange`: %v\n", resp) } ``` @@ -1272,25 +1404,25 @@ Patch OAuth 2.0 Client package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - id := "id_example" // string | The id of the OAuth 2.0 Client. - jsonPatch := []openapiclient.JsonPatch{*openapiclient.NewJsonPatch("replace", "/name")} // []JsonPatch | OAuth 2.0 Client JSON Patch Body - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.PatchOAuth2Client(context.Background(), id).JsonPatch(jsonPatch).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.PatchOAuth2Client``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `PatchOAuth2Client`: OAuth2Client - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.PatchOAuth2Client`: %v\n", resp) + id := "id_example" // string | The id of the OAuth 2.0 Client. + jsonPatch := []openapiclient.JsonPatch{*openapiclient.NewJsonPatch("replace", "/name")} // []JsonPatch | OAuth 2.0 Client JSON Patch Body + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.PatchOAuth2Client(context.Background(), id).JsonPatch(jsonPatch).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.PatchOAuth2Client``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `PatchOAuth2Client`: OAuth2Client + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.PatchOAuth2Client`: %v\n", resp) } ``` @@ -1330,6 +1462,67 @@ No authorization required [[Back to README]](../README.md) +## PerformOAuth2DeviceVerificationFlow + +> ErrorOAuth2 PerformOAuth2DeviceVerificationFlow(ctx).Execute() + +OAuth 2.0 Device Verification Endpoint + + + +### Example + +```go +package main + +import ( + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" +) + +func main() { + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.PerformOAuth2DeviceVerificationFlow(context.Background()).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.PerformOAuth2DeviceVerificationFlow``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `PerformOAuth2DeviceVerificationFlow`: ErrorOAuth2 + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.PerformOAuth2DeviceVerificationFlow`: %v\n", resp) +} +``` + +### Path Parameters + +This endpoint does not need any parameter. + +### Other Parameters + +Other parameters are passed through a pointer to a apiPerformOAuth2DeviceVerificationFlowRequest struct via the builder pattern + + +### Return type + +[**ErrorOAuth2**](ErrorOAuth2.md) + +### Authorization + +No authorization required + +### HTTP request headers + +- **Content-Type**: Not defined +- **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) +[[Back to Model list]](../README.md#documentation-for-models) +[[Back to README]](../README.md) + + ## RejectOAuth2ConsentRequest > OAuth2RedirectTo RejectOAuth2ConsentRequest(ctx).ConsentChallenge(consentChallenge).RejectOAuth2Request(rejectOAuth2Request).Execute() @@ -1344,25 +1537,25 @@ Reject OAuth 2.0 Consent Request package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - consentChallenge := "consentChallenge_example" // string | OAuth 2.0 Consent Request Challenge - rejectOAuth2Request := *openapiclient.NewRejectOAuth2Request() // RejectOAuth2Request | (optional) - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.RejectOAuth2ConsentRequest(context.Background()).ConsentChallenge(consentChallenge).RejectOAuth2Request(rejectOAuth2Request).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.RejectOAuth2ConsentRequest``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `RejectOAuth2ConsentRequest`: OAuth2RedirectTo - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.RejectOAuth2ConsentRequest`: %v\n", resp) + consentChallenge := "consentChallenge_example" // string | OAuth 2.0 Consent Request Challenge + rejectOAuth2Request := *openapiclient.NewRejectOAuth2Request() // RejectOAuth2Request | (optional) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.RejectOAuth2ConsentRequest(context.Background()).ConsentChallenge(consentChallenge).RejectOAuth2Request(rejectOAuth2Request).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.RejectOAuth2ConsentRequest``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `RejectOAuth2ConsentRequest`: OAuth2RedirectTo + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.RejectOAuth2ConsentRequest`: %v\n", resp) } ``` @@ -1412,25 +1605,25 @@ Reject OAuth 2.0 Login Request package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - loginChallenge := "loginChallenge_example" // string | OAuth 2.0 Login Request Challenge - rejectOAuth2Request := *openapiclient.NewRejectOAuth2Request() // RejectOAuth2Request | (optional) - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.RejectOAuth2LoginRequest(context.Background()).LoginChallenge(loginChallenge).RejectOAuth2Request(rejectOAuth2Request).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.RejectOAuth2LoginRequest``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `RejectOAuth2LoginRequest`: OAuth2RedirectTo - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.RejectOAuth2LoginRequest`: %v\n", resp) + loginChallenge := "loginChallenge_example" // string | OAuth 2.0 Login Request Challenge + rejectOAuth2Request := *openapiclient.NewRejectOAuth2Request() // RejectOAuth2Request | (optional) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.RejectOAuth2LoginRequest(context.Background()).LoginChallenge(loginChallenge).RejectOAuth2Request(rejectOAuth2Request).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.RejectOAuth2LoginRequest``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `RejectOAuth2LoginRequest`: OAuth2RedirectTo + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.RejectOAuth2LoginRequest`: %v\n", resp) } ``` @@ -1480,22 +1673,22 @@ Reject OAuth 2.0 Session Logout Request package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - logoutChallenge := "logoutChallenge_example" // string | - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.RejectOAuth2LogoutRequest(context.Background()).LogoutChallenge(logoutChallenge).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.RejectOAuth2LogoutRequest``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } + logoutChallenge := "logoutChallenge_example" // string | + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + r, err := apiClient.OAuth2API.RejectOAuth2LogoutRequest(context.Background()).LogoutChallenge(logoutChallenge).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.RejectOAuth2LogoutRequest``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } } ``` @@ -1532,7 +1725,7 @@ No authorization required ## RevokeOAuth2ConsentSessions -> RevokeOAuth2ConsentSessions(ctx).Subject(subject).Client(client).All(all).Execute() +> RevokeOAuth2ConsentSessions(ctx).Subject(subject).Client(client).ConsentRequestId(consentRequestId).All(all).Execute() Revoke OAuth 2.0 Consent Sessions of a Subject @@ -1544,24 +1737,25 @@ Revoke OAuth 2.0 Consent Sessions of a Subject package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - subject := "subject_example" // string | OAuth 2.0 Consent Subject The subject whose consent sessions should be deleted. - client := "client_example" // string | OAuth 2.0 Client ID If set, deletes only those consent sessions that have been granted to the specified OAuth 2.0 Client ID. (optional) - all := true // bool | Revoke All Consent Sessions If set to `true` deletes all consent sessions by the Subject that have been granted. (optional) - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.RevokeOAuth2ConsentSessions(context.Background()).Subject(subject).Client(client).All(all).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.RevokeOAuth2ConsentSessions``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } + subject := "subject_example" // string | OAuth 2.0 Consent Subject The subject whose consent sessions should be deleted. (optional) + client := "client_example" // string | OAuth 2.0 Client ID If set, deletes only those consent sessions that have been granted to the specified OAuth 2.0 Client ID. (optional) + consentRequestId := "consentRequestId_example" // string | Consent Request ID If set, revoke all token chains derived from this particular consent request ID. (optional) + all := true // bool | Revoke All Consent Sessions If set to `true` deletes all consent sessions by the Subject that have been granted. (optional) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + r, err := apiClient.OAuth2API.RevokeOAuth2ConsentSessions(context.Background()).Subject(subject).Client(client).ConsentRequestId(consentRequestId).All(all).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.RevokeOAuth2ConsentSessions``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } } ``` @@ -1578,6 +1772,7 @@ Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **subject** | **string** | OAuth 2.0 Consent Subject The subject whose consent sessions should be deleted. | **client** | **string** | OAuth 2.0 Client ID If set, deletes only those consent sessions that have been granted to the specified OAuth 2.0 Client ID. | + **consentRequestId** | **string** | Consent Request ID If set, revoke all token chains derived from this particular consent request ID. | **all** | **bool** | Revoke All Consent Sessions If set to `true` deletes all consent sessions by the Subject that have been granted. | ### Return type @@ -1600,9 +1795,9 @@ No authorization required ## RevokeOAuth2LoginSessions -> RevokeOAuth2LoginSessions(ctx).Subject(subject).Execute() +> RevokeOAuth2LoginSessions(ctx).Subject(subject).Sid(sid).Execute() -Revokes All OAuth 2.0 Login Sessions of a Subject +Revokes OAuth 2.0 Login Sessions by either a Subject or a SessionID @@ -1612,22 +1807,23 @@ Revokes All OAuth 2.0 Login Sessions of a Subject package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - subject := "subject_example" // string | OAuth 2.0 Subject The subject to revoke authentication sessions for. - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.RevokeOAuth2LoginSessions(context.Background()).Subject(subject).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.RevokeOAuth2LoginSessions``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } + subject := "subject_example" // string | OAuth 2.0 Subject The subject to revoke authentication sessions for. (optional) + sid := "sid_example" // string | Login Session ID The login session to revoke. (optional) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + r, err := apiClient.OAuth2API.RevokeOAuth2LoginSessions(context.Background()).Subject(subject).Sid(sid).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.RevokeOAuth2LoginSessions``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } } ``` @@ -1643,6 +1839,7 @@ Other parameters are passed through a pointer to a apiRevokeOAuth2LoginSessionsR Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **subject** | **string** | OAuth 2.0 Subject The subject to revoke authentication sessions for. | + **sid** | **string** | Login Session ID The login session to revoke. | ### Return type @@ -1676,24 +1873,24 @@ Revoke OAuth 2.0 Access or Refresh Token package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - token := "token_example" // string | - clientId := "clientId_example" // string | (optional) - clientSecret := "clientSecret_example" // string | (optional) - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.RevokeOAuth2Token(context.Background()).Token(token).ClientId(clientId).ClientSecret(clientSecret).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.RevokeOAuth2Token``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } + token := "token_example" // string | + clientId := "clientId_example" // string | (optional) + clientSecret := "clientSecret_example" // string | (optional) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + r, err := apiClient.OAuth2API.RevokeOAuth2Token(context.Background()).Token(token).ClientId(clientId).ClientSecret(clientSecret).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.RevokeOAuth2Token``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } } ``` @@ -1744,25 +1941,25 @@ Set OAuth 2.0 Client package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - id := "id_example" // string | OAuth 2.0 Client ID - oAuth2Client := *openapiclient.NewOAuth2Client() // OAuth2Client | OAuth 2.0 Client Request Body - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.SetOAuth2Client(context.Background(), id).OAuth2Client(oAuth2Client).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.SetOAuth2Client``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `SetOAuth2Client`: OAuth2Client - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.SetOAuth2Client`: %v\n", resp) + id := "id_example" // string | OAuth 2.0 Client ID + oAuth2Client := *openapiclient.NewOAuth2Client() // OAuth2Client | OAuth 2.0 Client Request Body + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.SetOAuth2Client(context.Background(), id).OAuth2Client(oAuth2Client).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.SetOAuth2Client``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `SetOAuth2Client`: OAuth2Client + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.SetOAuth2Client`: %v\n", resp) } ``` @@ -1816,25 +2013,25 @@ Set OAuth2 Client Token Lifespans package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - id := "id_example" // string | OAuth 2.0 Client ID - oAuth2ClientTokenLifespans := *openapiclient.NewOAuth2ClientTokenLifespans() // OAuth2ClientTokenLifespans | (optional) - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.SetOAuth2ClientLifespans(context.Background(), id).OAuth2ClientTokenLifespans(oAuth2ClientTokenLifespans).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.SetOAuth2ClientLifespans``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `SetOAuth2ClientLifespans`: OAuth2Client - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.SetOAuth2ClientLifespans`: %v\n", resp) + id := "id_example" // string | OAuth 2.0 Client ID + oAuth2ClientTokenLifespans := *openapiclient.NewOAuth2ClientTokenLifespans() // OAuth2ClientTokenLifespans | (optional) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.SetOAuth2ClientLifespans(context.Background(), id).OAuth2ClientTokenLifespans(oAuth2ClientTokenLifespans).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.SetOAuth2ClientLifespans``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `SetOAuth2ClientLifespans`: OAuth2Client + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.SetOAuth2ClientLifespans`: %v\n", resp) } ``` @@ -1888,25 +2085,25 @@ Trust OAuth2 JWT Bearer Grant Type Issuer package main import ( - "context" - "fmt" - "os" + "context" + "fmt" + "os" "time" - openapiclient "./openapi" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - trustOAuth2JwtGrantIssuer := *openapiclient.NewTrustOAuth2JwtGrantIssuer(time.Now(), "https://jwt-idp.example.com", *openapiclient.NewJsonWebKey("RS256", "1603dfe0af8f4596", "RSA", "sig"), []string{"Scope_example"}) // TrustOAuth2JwtGrantIssuer | (optional) - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OAuth2Api.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(trustOAuth2JwtGrantIssuer).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OAuth2Api.TrustOAuth2JwtGrantIssuer``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `TrustOAuth2JwtGrantIssuer`: TrustedOAuth2JwtGrantIssuer - fmt.Fprintf(os.Stdout, "Response from `OAuth2Api.TrustOAuth2JwtGrantIssuer`: %v\n", resp) + trustOAuth2JwtGrantIssuer := *openapiclient.NewTrustOAuth2JwtGrantIssuer(time.Now(), "https://jwt-idp.example.com", *openapiclient.NewJsonWebKey("RS256", "1603dfe0af8f4596", "RSA", "sig"), []string{"Scope_example"}) // TrustOAuth2JwtGrantIssuer | (optional) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OAuth2API.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(trustOAuth2JwtGrantIssuer).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OAuth2API.TrustOAuth2JwtGrantIssuer``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `TrustOAuth2JwtGrantIssuer`: TrustedOAuth2JwtGrantIssuer + fmt.Fprintf(os.Stdout, "Response from `OAuth2API.TrustOAuth2JwtGrantIssuer`: %v\n", resp) } ``` diff --git a/internal/httpclient/docs/OAuth2Client.md b/internal/httpclient/docs/OAuth2Client.md index 9fc94fd071c..f5debb039f1 100644 --- a/internal/httpclient/docs/OAuth2Client.md +++ b/internal/httpclient/docs/OAuth2Client.md @@ -4,6 +4,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- +**AccessTokenStrategy** | Pointer to **string** | OAuth 2.0 Access Token Strategy AccessTokenStrategy is the strategy used to generate access tokens. Valid options are `jwt` and `opaque`. `jwt` is a bad idea, see https://www.ory.sh/docs/oauth2-oidc/jwt-access-token Setting the strategy here overrides the global setting in `strategies.access_token`. | [optional] **AllowedCorsOrigins** | Pointer to **[]string** | | [optional] **Audience** | Pointer to **[]string** | | [optional] **AuthorizationCodeGrantAccessTokenLifespan** | Pointer to **string** | Specify a time duration in milliseconds, seconds, minutes, hours. | [optional] @@ -12,19 +13,22 @@ Name | Type | Description | Notes **BackchannelLogoutSessionRequired** | Pointer to **bool** | OpenID Connect Back-Channel Logout Session Required Boolean value specifying whether the RP requires that a sid (session ID) Claim be included in the Logout Token to identify the RP session with the OP when the backchannel_logout_uri is used. If omitted, the default value is false. | [optional] **BackchannelLogoutUri** | Pointer to **string** | OpenID Connect Back-Channel Logout URI RP URL that will cause the RP to log itself out when sent a Logout Token by the OP. | [optional] **ClientCredentialsGrantAccessTokenLifespan** | Pointer to **string** | Specify a time duration in milliseconds, seconds, minutes, hours. | [optional] -**ClientId** | Pointer to **string** | OAuth 2.0 Client ID The ID is autogenerated and immutable. | [optional] +**ClientId** | Pointer to **string** | OAuth 2.0 Client ID The ID is immutable. If no ID is provided, a UUID4 will be generated. | [optional] **ClientName** | Pointer to **string** | OAuth 2.0 Client Name The human-readable name of the client to be presented to the end-user during authorization. | [optional] **ClientSecret** | Pointer to **string** | OAuth 2.0 Client Secret The secret will be included in the create request as cleartext, and then never again. The secret is kept in hashed format and is not recoverable once lost. | [optional] **ClientSecretExpiresAt** | Pointer to **int64** | OAuth 2.0 Client Secret Expires At The field is currently not supported and its value is always 0. | [optional] **ClientUri** | Pointer to **string** | OAuth 2.0 Client URI ClientURI is a URL string of a web page providing information about the client. If present, the server SHOULD display this URL to the end-user in a clickable fashion. | [optional] **Contacts** | Pointer to **[]string** | | [optional] **CreatedAt** | Pointer to **time.Time** | OAuth 2.0 Client Creation Date CreatedAt returns the timestamp of the client's creation. | [optional] +**DeviceAuthorizationGrantAccessTokenLifespan** | Pointer to **string** | Specify a time duration in milliseconds, seconds, minutes, hours. | [optional] +**DeviceAuthorizationGrantIdTokenLifespan** | Pointer to **string** | Specify a time duration in milliseconds, seconds, minutes, hours. | [optional] +**DeviceAuthorizationGrantRefreshTokenLifespan** | Pointer to **string** | Specify a time duration in milliseconds, seconds, minutes, hours. | [optional] **FrontchannelLogoutSessionRequired** | Pointer to **bool** | OpenID Connect Front-Channel Logout Session Required Boolean value specifying whether the RP requires that iss (issuer) and sid (session ID) query parameters be included to identify the RP session with the OP when the frontchannel_logout_uri is used. If omitted, the default value is false. | [optional] **FrontchannelLogoutUri** | Pointer to **string** | OpenID Connect Front-Channel Logout URI RP URL that will cause the RP to log itself out when rendered in an iframe by the OP. An iss (issuer) query parameter and a sid (session ID) query parameter MAY be included by the OP to enable the RP to validate the request and to determine which of the potentially multiple sessions is to be logged out; if either is included, both MUST be. | [optional] **GrantTypes** | Pointer to **[]string** | | [optional] **ImplicitGrantAccessTokenLifespan** | Pointer to **string** | Specify a time duration in milliseconds, seconds, minutes, hours. | [optional] **ImplicitGrantIdTokenLifespan** | Pointer to **string** | Specify a time duration in milliseconds, seconds, minutes, hours. | [optional] -**Jwks** | Pointer to **interface{}** | OAuth 2.0 Client JSON Web Key Set Client's JSON Web Key Set [JWK] document, passed by value. The semantics of the jwks parameter are the same as the jwks_uri parameter, other than that the JWK Set is passed by value, rather than by reference. This parameter is intended only to be used by Clients that, for some reason, are unable to use the jwks_uri parameter, for instance, by native applications that might not have a location to host the contents of the JWK Set. If a Client can use jwks_uri, it MUST NOT use jwks. One significant downside of jwks is that it does not enable key rotation (which jwks_uri does, as described in Section 10 of OpenID Connect Core 1.0 [OpenID.Core]). The jwks_uri and jwks parameters MUST NOT be used together. | [optional] +**Jwks** | Pointer to [**JsonWebKeySet**](JsonWebKeySet.md) | | [optional] **JwksUri** | Pointer to **string** | OAuth 2.0 Client JSON Web Key Set URL URL for the Client's JSON Web Key Set [JWK] document. If the Client signs requests to the Server, it contains the signing key(s) the Server uses to validate signatures from the Client. The JWK Set MAY also contain the Client's encryption keys(s), which are used by the Server to encrypt responses to the Client. When both signing and encryption keys are made available, a use (Key Use) parameter value is REQUIRED for all keys in the referenced JWK Set to indicate each key's intended usage. Although some algorithms allow the same key to be used for both signatures and encryption, doing so is NOT RECOMMENDED, as it is less secure. The JWK x5c parameter MAY be used to provide X.509 representations of keys provided. When used, the bare key values MUST still be present and MUST match those in the certificate. | [optional] **JwtBearerGrantAccessTokenLifespan** | Pointer to **string** | Specify a time duration in milliseconds, seconds, minutes, hours. | [optional] **LogoUri** | Pointer to **string** | OAuth 2.0 Client Logo URI A URL string referencing the client's logo. | [optional] @@ -43,8 +47,10 @@ Name | Type | Description | Notes **ResponseTypes** | Pointer to **[]string** | | [optional] **Scope** | Pointer to **string** | OAuth 2.0 Client Scope Scope is a string containing a space-separated list of scope values (as described in Section 3.3 of OAuth 2.0 [RFC6749]) that the client can use when requesting access tokens. | [optional] **SectorIdentifierUri** | Pointer to **string** | OpenID Connect Sector Identifier URI URL using the https scheme to be used in calculating Pseudonymous Identifiers by the OP. The URL references a file with a single JSON array of redirect_uri values. | [optional] +**SkipConsent** | Pointer to **bool** | SkipConsent skips the consent screen for this client. This field can only be set from the admin API. | [optional] +**SkipLogoutConsent** | Pointer to **bool** | SkipLogoutConsent skips the logout consent screen for this client. This field can only be set from the admin API. | [optional] **SubjectType** | Pointer to **string** | OpenID Connect Subject Type The `subject_types_supported` Discovery parameter contains a list of the supported subject_type values for this server. Valid types include `pairwise` and `public`. | [optional] -**TokenEndpointAuthMethod** | Pointer to **string** | OAuth 2.0 Token Endpoint Authentication Method Requested Client Authentication method for the Token Endpoint. The options are: `client_secret_post`: (default) Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` in the HTTP body. `client_secret_basic`: Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` encoded in the HTTP Authorization header. `private_key_jwt`: Use JSON Web Tokens to authenticate the client. `none`: Used for public clients (native apps, mobile apps) which can not have secrets. | [optional] +**TokenEndpointAuthMethod** | Pointer to **string** | OAuth 2.0 Token Endpoint Authentication Method Requested Client Authentication method for the Token Endpoint. The options are: `client_secret_basic`: (default) Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` encoded in the HTTP Authorization header. `client_secret_post`: Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` in the HTTP body. `private_key_jwt`: Use JSON Web Tokens to authenticate the client. `none`: Used for public clients (native apps, mobile apps) which can not have secrets. | [optional] [default to "client_secret_basic"] **TokenEndpointAuthSigningAlg** | Pointer to **string** | OAuth 2.0 Token Endpoint Signing Algorithm Requested Client Authentication signing algorithm for the Token Endpoint. | [optional] **TosUri** | Pointer to **string** | OAuth 2.0 Client Terms of Service URI A URL string pointing to a human-readable terms of service document for the client that describes a contractual relationship between the end-user and the client that the end-user accepts when authorizing the client. | [optional] **UpdatedAt** | Pointer to **time.Time** | OAuth 2.0 Client Last Update Date UpdatedAt returns the timestamp of the last update. | [optional] @@ -69,6 +75,31 @@ NewOAuth2ClientWithDefaults instantiates a new OAuth2Client object This constructor will only assign default values to properties that have it defined, but it doesn't guarantee that properties required by API are set +### GetAccessTokenStrategy + +`func (o *OAuth2Client) GetAccessTokenStrategy() string` + +GetAccessTokenStrategy returns the AccessTokenStrategy field if non-nil, zero value otherwise. + +### GetAccessTokenStrategyOk + +`func (o *OAuth2Client) GetAccessTokenStrategyOk() (*string, bool)` + +GetAccessTokenStrategyOk returns a tuple with the AccessTokenStrategy field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetAccessTokenStrategy + +`func (o *OAuth2Client) SetAccessTokenStrategy(v string)` + +SetAccessTokenStrategy sets AccessTokenStrategy field to given value. + +### HasAccessTokenStrategy + +`func (o *OAuth2Client) HasAccessTokenStrategy() bool` + +HasAccessTokenStrategy returns a boolean if a field has been set. + ### GetAllowedCorsOrigins `func (o *OAuth2Client) GetAllowedCorsOrigins() []string` @@ -444,6 +475,81 @@ SetCreatedAt sets CreatedAt field to given value. HasCreatedAt returns a boolean if a field has been set. +### GetDeviceAuthorizationGrantAccessTokenLifespan + +`func (o *OAuth2Client) GetDeviceAuthorizationGrantAccessTokenLifespan() string` + +GetDeviceAuthorizationGrantAccessTokenLifespan returns the DeviceAuthorizationGrantAccessTokenLifespan field if non-nil, zero value otherwise. + +### GetDeviceAuthorizationGrantAccessTokenLifespanOk + +`func (o *OAuth2Client) GetDeviceAuthorizationGrantAccessTokenLifespanOk() (*string, bool)` + +GetDeviceAuthorizationGrantAccessTokenLifespanOk returns a tuple with the DeviceAuthorizationGrantAccessTokenLifespan field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetDeviceAuthorizationGrantAccessTokenLifespan + +`func (o *OAuth2Client) SetDeviceAuthorizationGrantAccessTokenLifespan(v string)` + +SetDeviceAuthorizationGrantAccessTokenLifespan sets DeviceAuthorizationGrantAccessTokenLifespan field to given value. + +### HasDeviceAuthorizationGrantAccessTokenLifespan + +`func (o *OAuth2Client) HasDeviceAuthorizationGrantAccessTokenLifespan() bool` + +HasDeviceAuthorizationGrantAccessTokenLifespan returns a boolean if a field has been set. + +### GetDeviceAuthorizationGrantIdTokenLifespan + +`func (o *OAuth2Client) GetDeviceAuthorizationGrantIdTokenLifespan() string` + +GetDeviceAuthorizationGrantIdTokenLifespan returns the DeviceAuthorizationGrantIdTokenLifespan field if non-nil, zero value otherwise. + +### GetDeviceAuthorizationGrantIdTokenLifespanOk + +`func (o *OAuth2Client) GetDeviceAuthorizationGrantIdTokenLifespanOk() (*string, bool)` + +GetDeviceAuthorizationGrantIdTokenLifespanOk returns a tuple with the DeviceAuthorizationGrantIdTokenLifespan field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetDeviceAuthorizationGrantIdTokenLifespan + +`func (o *OAuth2Client) SetDeviceAuthorizationGrantIdTokenLifespan(v string)` + +SetDeviceAuthorizationGrantIdTokenLifespan sets DeviceAuthorizationGrantIdTokenLifespan field to given value. + +### HasDeviceAuthorizationGrantIdTokenLifespan + +`func (o *OAuth2Client) HasDeviceAuthorizationGrantIdTokenLifespan() bool` + +HasDeviceAuthorizationGrantIdTokenLifespan returns a boolean if a field has been set. + +### GetDeviceAuthorizationGrantRefreshTokenLifespan + +`func (o *OAuth2Client) GetDeviceAuthorizationGrantRefreshTokenLifespan() string` + +GetDeviceAuthorizationGrantRefreshTokenLifespan returns the DeviceAuthorizationGrantRefreshTokenLifespan field if non-nil, zero value otherwise. + +### GetDeviceAuthorizationGrantRefreshTokenLifespanOk + +`func (o *OAuth2Client) GetDeviceAuthorizationGrantRefreshTokenLifespanOk() (*string, bool)` + +GetDeviceAuthorizationGrantRefreshTokenLifespanOk returns a tuple with the DeviceAuthorizationGrantRefreshTokenLifespan field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetDeviceAuthorizationGrantRefreshTokenLifespan + +`func (o *OAuth2Client) SetDeviceAuthorizationGrantRefreshTokenLifespan(v string)` + +SetDeviceAuthorizationGrantRefreshTokenLifespan sets DeviceAuthorizationGrantRefreshTokenLifespan field to given value. + +### HasDeviceAuthorizationGrantRefreshTokenLifespan + +`func (o *OAuth2Client) HasDeviceAuthorizationGrantRefreshTokenLifespan() bool` + +HasDeviceAuthorizationGrantRefreshTokenLifespan returns a boolean if a field has been set. + ### GetFrontchannelLogoutSessionRequired `func (o *OAuth2Client) GetFrontchannelLogoutSessionRequired() bool` @@ -571,20 +677,20 @@ HasImplicitGrantIdTokenLifespan returns a boolean if a field has been set. ### GetJwks -`func (o *OAuth2Client) GetJwks() interface{}` +`func (o *OAuth2Client) GetJwks() JsonWebKeySet` GetJwks returns the Jwks field if non-nil, zero value otherwise. ### GetJwksOk -`func (o *OAuth2Client) GetJwksOk() (*interface{}, bool)` +`func (o *OAuth2Client) GetJwksOk() (*JsonWebKeySet, bool)` GetJwksOk returns a tuple with the Jwks field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetJwks -`func (o *OAuth2Client) SetJwks(v interface{})` +`func (o *OAuth2Client) SetJwks(v JsonWebKeySet)` SetJwks sets Jwks field to given value. @@ -594,16 +700,6 @@ SetJwks sets Jwks field to given value. HasJwks returns a boolean if a field has been set. -### SetJwksNil - -`func (o *OAuth2Client) SetJwksNil(b bool)` - - SetJwksNil sets the value for Jwks to be an explicit nil - -### UnsetJwks -`func (o *OAuth2Client) UnsetJwks()` - -UnsetJwks ensures that no value is present for Jwks, not even an explicit nil ### GetJwksUri `func (o *OAuth2Client) GetJwksUri() string` @@ -1064,6 +1160,56 @@ SetSectorIdentifierUri sets SectorIdentifierUri field to given value. HasSectorIdentifierUri returns a boolean if a field has been set. +### GetSkipConsent + +`func (o *OAuth2Client) GetSkipConsent() bool` + +GetSkipConsent returns the SkipConsent field if non-nil, zero value otherwise. + +### GetSkipConsentOk + +`func (o *OAuth2Client) GetSkipConsentOk() (*bool, bool)` + +GetSkipConsentOk returns a tuple with the SkipConsent field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetSkipConsent + +`func (o *OAuth2Client) SetSkipConsent(v bool)` + +SetSkipConsent sets SkipConsent field to given value. + +### HasSkipConsent + +`func (o *OAuth2Client) HasSkipConsent() bool` + +HasSkipConsent returns a boolean if a field has been set. + +### GetSkipLogoutConsent + +`func (o *OAuth2Client) GetSkipLogoutConsent() bool` + +GetSkipLogoutConsent returns the SkipLogoutConsent field if non-nil, zero value otherwise. + +### GetSkipLogoutConsentOk + +`func (o *OAuth2Client) GetSkipLogoutConsentOk() (*bool, bool)` + +GetSkipLogoutConsentOk returns a tuple with the SkipLogoutConsent field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetSkipLogoutConsent + +`func (o *OAuth2Client) SetSkipLogoutConsent(v bool)` + +SetSkipLogoutConsent sets SkipLogoutConsent field to given value. + +### HasSkipLogoutConsent + +`func (o *OAuth2Client) HasSkipLogoutConsent() bool` + +HasSkipLogoutConsent returns a boolean if a field has been set. + ### GetSubjectType `func (o *OAuth2Client) GetSubjectType() string` diff --git a/internal/httpclient/docs/OAuth2ClientTokenLifespans.md b/internal/httpclient/docs/OAuth2ClientTokenLifespans.md index cda6ca600ca..b38aef35d74 100644 --- a/internal/httpclient/docs/OAuth2ClientTokenLifespans.md +++ b/internal/httpclient/docs/OAuth2ClientTokenLifespans.md @@ -8,6 +8,9 @@ Name | Type | Description | Notes **AuthorizationCodeGrantIdTokenLifespan** | Pointer to **string** | Specify a time duration in milliseconds, seconds, minutes, hours. | [optional] **AuthorizationCodeGrantRefreshTokenLifespan** | Pointer to **string** | Specify a time duration in milliseconds, seconds, minutes, hours. | [optional] **ClientCredentialsGrantAccessTokenLifespan** | Pointer to **string** | Specify a time duration in milliseconds, seconds, minutes, hours. | [optional] +**DeviceAuthorizationGrantAccessTokenLifespan** | Pointer to **string** | Specify a time duration in milliseconds, seconds, minutes, hours. | [optional] +**DeviceAuthorizationGrantIdTokenLifespan** | Pointer to **string** | Specify a time duration in milliseconds, seconds, minutes, hours. | [optional] +**DeviceAuthorizationGrantRefreshTokenLifespan** | Pointer to **string** | Specify a time duration in milliseconds, seconds, minutes, hours. | [optional] **ImplicitGrantAccessTokenLifespan** | Pointer to **string** | Specify a time duration in milliseconds, seconds, minutes, hours. | [optional] **ImplicitGrantIdTokenLifespan** | Pointer to **string** | Specify a time duration in milliseconds, seconds, minutes, hours. | [optional] **JwtBearerGrantAccessTokenLifespan** | Pointer to **string** | Specify a time duration in milliseconds, seconds, minutes, hours. | [optional] @@ -134,6 +137,81 @@ SetClientCredentialsGrantAccessTokenLifespan sets ClientCredentialsGrantAccessTo HasClientCredentialsGrantAccessTokenLifespan returns a boolean if a field has been set. +### GetDeviceAuthorizationGrantAccessTokenLifespan + +`func (o *OAuth2ClientTokenLifespans) GetDeviceAuthorizationGrantAccessTokenLifespan() string` + +GetDeviceAuthorizationGrantAccessTokenLifespan returns the DeviceAuthorizationGrantAccessTokenLifespan field if non-nil, zero value otherwise. + +### GetDeviceAuthorizationGrantAccessTokenLifespanOk + +`func (o *OAuth2ClientTokenLifespans) GetDeviceAuthorizationGrantAccessTokenLifespanOk() (*string, bool)` + +GetDeviceAuthorizationGrantAccessTokenLifespanOk returns a tuple with the DeviceAuthorizationGrantAccessTokenLifespan field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetDeviceAuthorizationGrantAccessTokenLifespan + +`func (o *OAuth2ClientTokenLifespans) SetDeviceAuthorizationGrantAccessTokenLifespan(v string)` + +SetDeviceAuthorizationGrantAccessTokenLifespan sets DeviceAuthorizationGrantAccessTokenLifespan field to given value. + +### HasDeviceAuthorizationGrantAccessTokenLifespan + +`func (o *OAuth2ClientTokenLifespans) HasDeviceAuthorizationGrantAccessTokenLifespan() bool` + +HasDeviceAuthorizationGrantAccessTokenLifespan returns a boolean if a field has been set. + +### GetDeviceAuthorizationGrantIdTokenLifespan + +`func (o *OAuth2ClientTokenLifespans) GetDeviceAuthorizationGrantIdTokenLifespan() string` + +GetDeviceAuthorizationGrantIdTokenLifespan returns the DeviceAuthorizationGrantIdTokenLifespan field if non-nil, zero value otherwise. + +### GetDeviceAuthorizationGrantIdTokenLifespanOk + +`func (o *OAuth2ClientTokenLifespans) GetDeviceAuthorizationGrantIdTokenLifespanOk() (*string, bool)` + +GetDeviceAuthorizationGrantIdTokenLifespanOk returns a tuple with the DeviceAuthorizationGrantIdTokenLifespan field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetDeviceAuthorizationGrantIdTokenLifespan + +`func (o *OAuth2ClientTokenLifespans) SetDeviceAuthorizationGrantIdTokenLifespan(v string)` + +SetDeviceAuthorizationGrantIdTokenLifespan sets DeviceAuthorizationGrantIdTokenLifespan field to given value. + +### HasDeviceAuthorizationGrantIdTokenLifespan + +`func (o *OAuth2ClientTokenLifespans) HasDeviceAuthorizationGrantIdTokenLifespan() bool` + +HasDeviceAuthorizationGrantIdTokenLifespan returns a boolean if a field has been set. + +### GetDeviceAuthorizationGrantRefreshTokenLifespan + +`func (o *OAuth2ClientTokenLifespans) GetDeviceAuthorizationGrantRefreshTokenLifespan() string` + +GetDeviceAuthorizationGrantRefreshTokenLifespan returns the DeviceAuthorizationGrantRefreshTokenLifespan field if non-nil, zero value otherwise. + +### GetDeviceAuthorizationGrantRefreshTokenLifespanOk + +`func (o *OAuth2ClientTokenLifespans) GetDeviceAuthorizationGrantRefreshTokenLifespanOk() (*string, bool)` + +GetDeviceAuthorizationGrantRefreshTokenLifespanOk returns a tuple with the DeviceAuthorizationGrantRefreshTokenLifespan field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetDeviceAuthorizationGrantRefreshTokenLifespan + +`func (o *OAuth2ClientTokenLifespans) SetDeviceAuthorizationGrantRefreshTokenLifespan(v string)` + +SetDeviceAuthorizationGrantRefreshTokenLifespan sets DeviceAuthorizationGrantRefreshTokenLifespan field to given value. + +### HasDeviceAuthorizationGrantRefreshTokenLifespan + +`func (o *OAuth2ClientTokenLifespans) HasDeviceAuthorizationGrantRefreshTokenLifespan() bool` + +HasDeviceAuthorizationGrantRefreshTokenLifespan returns a boolean if a field has been set. + ### GetImplicitGrantAccessTokenLifespan `func (o *OAuth2ClientTokenLifespans) GetImplicitGrantAccessTokenLifespan() string` diff --git a/internal/httpclient/docs/OAuth2ConsentRequest.md b/internal/httpclient/docs/OAuth2ConsentRequest.md index f01dc3f79f9..1197410e106 100644 --- a/internal/httpclient/docs/OAuth2ConsentRequest.md +++ b/internal/httpclient/docs/OAuth2ConsentRequest.md @@ -6,8 +6,9 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **Acr** | Pointer to **string** | ACR represents the Authentication AuthorizationContext Class Reference value for this authentication session. You can use it to express that, for example, a user authenticated using two factor authentication. | [optional] **Amr** | Pointer to **[]string** | | [optional] -**Challenge** | **string** | ID is the identifier (\"authorization challenge\") of the consent authorization request. It is used to identify the session. | +**Challenge** | **string** | Challenge is used to retrieve/accept/deny the consent request. | **Client** | Pointer to [**OAuth2Client**](OAuth2Client.md) | | [optional] +**ConsentRequestId** | Pointer to **string** | ConsentRequestID is the ID of the consent request. | [optional] **Context** | Pointer to **interface{}** | | [optional] **LoginChallenge** | Pointer to **string** | LoginChallenge is the login challenge this consent challenge belongs to. It can be used to associate a login and consent request in the login & consent app. | [optional] **LoginSessionId** | Pointer to **string** | LoginSessionID is the login session ID. If the user-agent reuses a login session (via cookie / remember flag) this ID will remain the same. If the user-agent did not have an existing authentication session (e.g. remember is false) this will be a new random value. This value is used as the \"sid\" parameter in the ID Token and in OIDC Front-/Back- channel logout. It's value can generally be used to associate consecutive login requests by a certain user. | [optional] @@ -132,6 +133,31 @@ SetClient sets Client field to given value. HasClient returns a boolean if a field has been set. +### GetConsentRequestId + +`func (o *OAuth2ConsentRequest) GetConsentRequestId() string` + +GetConsentRequestId returns the ConsentRequestId field if non-nil, zero value otherwise. + +### GetConsentRequestIdOk + +`func (o *OAuth2ConsentRequest) GetConsentRequestIdOk() (*string, bool)` + +GetConsentRequestIdOk returns a tuple with the ConsentRequestId field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetConsentRequestId + +`func (o *OAuth2ConsentRequest) SetConsentRequestId(v string)` + +SetConsentRequestId sets ConsentRequestId field to given value. + +### HasConsentRequestId + +`func (o *OAuth2ConsentRequest) HasConsentRequestId() bool` + +HasConsentRequestId returns a boolean if a field has been set. + ### GetContext `func (o *OAuth2ConsentRequest) GetContext() interface{}` diff --git a/internal/httpclient/docs/OAuth2ConsentSession.md b/internal/httpclient/docs/OAuth2ConsentSession.md index 732ecca2a3f..217ac63a56e 100644 --- a/internal/httpclient/docs/OAuth2ConsentSession.md +++ b/internal/httpclient/docs/OAuth2ConsentSession.md @@ -5,7 +5,8 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **ConsentRequest** | Pointer to [**OAuth2ConsentRequest**](OAuth2ConsentRequest.md) | | [optional] -**ExpiresAt** | Pointer to [**OAuth2ConsentSessionExpiresAt**](OAuth2ConsentSessionExpiresAt.md) | | [optional] +**ConsentRequestId** | Pointer to **string** | ConsentRequestID is the identifier of the consent request that initiated this consent session. | [optional] +**Context** | Pointer to **interface{}** | | [optional] **GrantAccessTokenAudience** | Pointer to **[]string** | | [optional] **GrantScope** | Pointer to **[]string** | | [optional] **HandledAt** | Pointer to **time.Time** | | [optional] @@ -57,31 +58,66 @@ SetConsentRequest sets ConsentRequest field to given value. HasConsentRequest returns a boolean if a field has been set. -### GetExpiresAt +### GetConsentRequestId -`func (o *OAuth2ConsentSession) GetExpiresAt() OAuth2ConsentSessionExpiresAt` +`func (o *OAuth2ConsentSession) GetConsentRequestId() string` -GetExpiresAt returns the ExpiresAt field if non-nil, zero value otherwise. +GetConsentRequestId returns the ConsentRequestId field if non-nil, zero value otherwise. -### GetExpiresAtOk +### GetConsentRequestIdOk -`func (o *OAuth2ConsentSession) GetExpiresAtOk() (*OAuth2ConsentSessionExpiresAt, bool)` +`func (o *OAuth2ConsentSession) GetConsentRequestIdOk() (*string, bool)` -GetExpiresAtOk returns a tuple with the ExpiresAt field if it's non-nil, zero value otherwise +GetConsentRequestIdOk returns a tuple with the ConsentRequestId field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. -### SetExpiresAt +### SetConsentRequestId -`func (o *OAuth2ConsentSession) SetExpiresAt(v OAuth2ConsentSessionExpiresAt)` +`func (o *OAuth2ConsentSession) SetConsentRequestId(v string)` -SetExpiresAt sets ExpiresAt field to given value. +SetConsentRequestId sets ConsentRequestId field to given value. -### HasExpiresAt +### HasConsentRequestId -`func (o *OAuth2ConsentSession) HasExpiresAt() bool` +`func (o *OAuth2ConsentSession) HasConsentRequestId() bool` -HasExpiresAt returns a boolean if a field has been set. +HasConsentRequestId returns a boolean if a field has been set. +### GetContext + +`func (o *OAuth2ConsentSession) GetContext() interface{}` + +GetContext returns the Context field if non-nil, zero value otherwise. + +### GetContextOk + +`func (o *OAuth2ConsentSession) GetContextOk() (*interface{}, bool)` + +GetContextOk returns a tuple with the Context field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetContext + +`func (o *OAuth2ConsentSession) SetContext(v interface{})` + +SetContext sets Context field to given value. + +### HasContext + +`func (o *OAuth2ConsentSession) HasContext() bool` + +HasContext returns a boolean if a field has been set. + +### SetContextNil + +`func (o *OAuth2ConsentSession) SetContextNil(b bool)` + + SetContextNil sets the value for Context to be an explicit nil + +### UnsetContext +`func (o *OAuth2ConsentSession) UnsetContext()` + +UnsetContext ensures that no value is present for Context, not even an explicit nil ### GetGrantAccessTokenAudience `func (o *OAuth2ConsentSession) GetGrantAccessTokenAudience() []string` diff --git a/internal/httpclient/docs/OAuth2ConsentSessionExpiresAt.md b/internal/httpclient/docs/OAuth2ConsentSessionExpiresAt.md deleted file mode 100644 index f49c3aeb6b7..00000000000 --- a/internal/httpclient/docs/OAuth2ConsentSessionExpiresAt.md +++ /dev/null @@ -1,160 +0,0 @@ -# OAuth2ConsentSessionExpiresAt - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**AccessToken** | Pointer to **time.Time** | | [optional] -**AuthorizeCode** | Pointer to **time.Time** | | [optional] -**IdToken** | Pointer to **time.Time** | | [optional] -**ParContext** | Pointer to **time.Time** | | [optional] -**RefreshToken** | Pointer to **time.Time** | | [optional] - -## Methods - -### NewOAuth2ConsentSessionExpiresAt - -`func NewOAuth2ConsentSessionExpiresAt() *OAuth2ConsentSessionExpiresAt` - -NewOAuth2ConsentSessionExpiresAt instantiates a new OAuth2ConsentSessionExpiresAt object -This constructor will assign default values to properties that have it defined, -and makes sure properties required by API are set, but the set of arguments -will change when the set of required properties is changed - -### NewOAuth2ConsentSessionExpiresAtWithDefaults - -`func NewOAuth2ConsentSessionExpiresAtWithDefaults() *OAuth2ConsentSessionExpiresAt` - -NewOAuth2ConsentSessionExpiresAtWithDefaults instantiates a new OAuth2ConsentSessionExpiresAt object -This constructor will only assign default values to properties that have it defined, -but it doesn't guarantee that properties required by API are set - -### GetAccessToken - -`func (o *OAuth2ConsentSessionExpiresAt) GetAccessToken() time.Time` - -GetAccessToken returns the AccessToken field if non-nil, zero value otherwise. - -### GetAccessTokenOk - -`func (o *OAuth2ConsentSessionExpiresAt) GetAccessTokenOk() (*time.Time, bool)` - -GetAccessTokenOk returns a tuple with the AccessToken field if it's non-nil, zero value otherwise -and a boolean to check if the value has been set. - -### SetAccessToken - -`func (o *OAuth2ConsentSessionExpiresAt) SetAccessToken(v time.Time)` - -SetAccessToken sets AccessToken field to given value. - -### HasAccessToken - -`func (o *OAuth2ConsentSessionExpiresAt) HasAccessToken() bool` - -HasAccessToken returns a boolean if a field has been set. - -### GetAuthorizeCode - -`func (o *OAuth2ConsentSessionExpiresAt) GetAuthorizeCode() time.Time` - -GetAuthorizeCode returns the AuthorizeCode field if non-nil, zero value otherwise. - -### GetAuthorizeCodeOk - -`func (o *OAuth2ConsentSessionExpiresAt) GetAuthorizeCodeOk() (*time.Time, bool)` - -GetAuthorizeCodeOk returns a tuple with the AuthorizeCode field if it's non-nil, zero value otherwise -and a boolean to check if the value has been set. - -### SetAuthorizeCode - -`func (o *OAuth2ConsentSessionExpiresAt) SetAuthorizeCode(v time.Time)` - -SetAuthorizeCode sets AuthorizeCode field to given value. - -### HasAuthorizeCode - -`func (o *OAuth2ConsentSessionExpiresAt) HasAuthorizeCode() bool` - -HasAuthorizeCode returns a boolean if a field has been set. - -### GetIdToken - -`func (o *OAuth2ConsentSessionExpiresAt) GetIdToken() time.Time` - -GetIdToken returns the IdToken field if non-nil, zero value otherwise. - -### GetIdTokenOk - -`func (o *OAuth2ConsentSessionExpiresAt) GetIdTokenOk() (*time.Time, bool)` - -GetIdTokenOk returns a tuple with the IdToken field if it's non-nil, zero value otherwise -and a boolean to check if the value has been set. - -### SetIdToken - -`func (o *OAuth2ConsentSessionExpiresAt) SetIdToken(v time.Time)` - -SetIdToken sets IdToken field to given value. - -### HasIdToken - -`func (o *OAuth2ConsentSessionExpiresAt) HasIdToken() bool` - -HasIdToken returns a boolean if a field has been set. - -### GetParContext - -`func (o *OAuth2ConsentSessionExpiresAt) GetParContext() time.Time` - -GetParContext returns the ParContext field if non-nil, zero value otherwise. - -### GetParContextOk - -`func (o *OAuth2ConsentSessionExpiresAt) GetParContextOk() (*time.Time, bool)` - -GetParContextOk returns a tuple with the ParContext field if it's non-nil, zero value otherwise -and a boolean to check if the value has been set. - -### SetParContext - -`func (o *OAuth2ConsentSessionExpiresAt) SetParContext(v time.Time)` - -SetParContext sets ParContext field to given value. - -### HasParContext - -`func (o *OAuth2ConsentSessionExpiresAt) HasParContext() bool` - -HasParContext returns a boolean if a field has been set. - -### GetRefreshToken - -`func (o *OAuth2ConsentSessionExpiresAt) GetRefreshToken() time.Time` - -GetRefreshToken returns the RefreshToken field if non-nil, zero value otherwise. - -### GetRefreshTokenOk - -`func (o *OAuth2ConsentSessionExpiresAt) GetRefreshTokenOk() (*time.Time, bool)` - -GetRefreshTokenOk returns a tuple with the RefreshToken field if it's non-nil, zero value otherwise -and a boolean to check if the value has been set. - -### SetRefreshToken - -`func (o *OAuth2ConsentSessionExpiresAt) SetRefreshToken(v time.Time)` - -SetRefreshToken sets RefreshToken field to given value. - -### HasRefreshToken - -`func (o *OAuth2ConsentSessionExpiresAt) HasRefreshToken() bool` - -HasRefreshToken returns a boolean if a field has been set. - - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/internal/httpclient/docs/OAuth2LoginRequest.md b/internal/httpclient/docs/OAuth2LoginRequest.md index aa34f248272..34037195301 100644 --- a/internal/httpclient/docs/OAuth2LoginRequest.md +++ b/internal/httpclient/docs/OAuth2LoginRequest.md @@ -4,12 +4,12 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**Challenge** | **string** | ID is the identifier (\"login challenge\") of the login request. It is used to identify the session. | +**Challenge** | **string** | ID is the identifier of the login request. | **Client** | [**OAuth2Client**](OAuth2Client.md) | | **OidcContext** | Pointer to [**OAuth2ConsentRequestOpenIDConnectContext**](OAuth2ConsentRequestOpenIDConnectContext.md) | | [optional] **RequestUrl** | **string** | RequestURL is the original OAuth 2.0 Authorization URL requested by the OAuth 2.0 client. It is the URL which initiates the OAuth 2.0 Authorization Code or OAuth 2.0 Implicit flow. This URL is typically not needed, but might come in handy if you want to deal with additional request parameters. | -**RequestedAccessTokenAudience** | **[]string** | | -**RequestedScope** | **[]string** | | +**RequestedAccessTokenAudience** | Pointer to **[]string** | | [optional] +**RequestedScope** | Pointer to **[]string** | | [optional] **SessionId** | Pointer to **string** | SessionID is the login session ID. If the user-agent reuses a login session (via cookie / remember flag) this ID will remain the same. If the user-agent did not have an existing authentication session (e.g. remember is false) this will be a new random value. This value is used as the \"sid\" parameter in the ID Token and in OIDC Front-/Back- channel logout. It's value can generally be used to associate consecutive login requests by a certain user. | [optional] **Skip** | **bool** | Skip, if true, implies that the client has requested the same scopes from the same user previously. If true, you can skip asking the user to grant the requested scopes, and simply forward the user to the redirect URL. This feature allows you to update / set session information. | **Subject** | **string** | Subject is the user ID of the end-user that authenticated. Now, that end user needs to grant or deny the scope requested by the OAuth 2.0 client. If this value is set and `skip` is true, you MUST include this subject type when accepting the login request, or the request will fail. | @@ -18,7 +18,7 @@ Name | Type | Description | Notes ### NewOAuth2LoginRequest -`func NewOAuth2LoginRequest(challenge string, client OAuth2Client, requestUrl string, requestedAccessTokenAudience []string, requestedScope []string, skip bool, subject string, ) *OAuth2LoginRequest` +`func NewOAuth2LoginRequest(challenge string, client OAuth2Client, requestUrl string, skip bool, subject string, ) *OAuth2LoginRequest` NewOAuth2LoginRequest instantiates a new OAuth2LoginRequest object This constructor will assign default values to properties that have it defined, @@ -137,6 +137,11 @@ and a boolean to check if the value has been set. SetRequestedAccessTokenAudience sets RequestedAccessTokenAudience field to given value. +### HasRequestedAccessTokenAudience + +`func (o *OAuth2LoginRequest) HasRequestedAccessTokenAudience() bool` + +HasRequestedAccessTokenAudience returns a boolean if a field has been set. ### GetRequestedScope @@ -157,6 +162,11 @@ and a boolean to check if the value has been set. SetRequestedScope sets RequestedScope field to given value. +### HasRequestedScope + +`func (o *OAuth2LoginRequest) HasRequestedScope() bool` + +HasRequestedScope returns a boolean if a field has been set. ### GetSessionId diff --git a/internal/httpclient/docs/OAuth2LogoutRequest.md b/internal/httpclient/docs/OAuth2LogoutRequest.md index cd620202bf8..81da891ef54 100644 --- a/internal/httpclient/docs/OAuth2LogoutRequest.md +++ b/internal/httpclient/docs/OAuth2LogoutRequest.md @@ -4,9 +4,11 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**Challenge** | Pointer to **string** | Challenge is the identifier (\"logout challenge\") of the logout authentication request. It is used to identify the session. | [optional] +**Challenge** | Pointer to **string** | Challenge is the identifier of the logout authentication request. | [optional] **Client** | Pointer to [**OAuth2Client**](OAuth2Client.md) | | [optional] +**ExpiresAt** | Pointer to **time.Time** | | [optional] **RequestUrl** | Pointer to **string** | RequestURL is the original Logout URL requested. | [optional] +**RequestedAt** | Pointer to **time.Time** | | [optional] **RpInitiated** | Pointer to **bool** | RPInitiated is set to true if the request was initiated by a Relying Party (RP), also known as an OAuth 2.0 Client. | [optional] **Sid** | Pointer to **string** | SessionID is the login session ID that was requested to log out. | [optional] **Subject** | Pointer to **string** | Subject is the user for whom the logout was request. | [optional] @@ -80,6 +82,31 @@ SetClient sets Client field to given value. HasClient returns a boolean if a field has been set. +### GetExpiresAt + +`func (o *OAuth2LogoutRequest) GetExpiresAt() time.Time` + +GetExpiresAt returns the ExpiresAt field if non-nil, zero value otherwise. + +### GetExpiresAtOk + +`func (o *OAuth2LogoutRequest) GetExpiresAtOk() (*time.Time, bool)` + +GetExpiresAtOk returns a tuple with the ExpiresAt field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetExpiresAt + +`func (o *OAuth2LogoutRequest) SetExpiresAt(v time.Time)` + +SetExpiresAt sets ExpiresAt field to given value. + +### HasExpiresAt + +`func (o *OAuth2LogoutRequest) HasExpiresAt() bool` + +HasExpiresAt returns a boolean if a field has been set. + ### GetRequestUrl `func (o *OAuth2LogoutRequest) GetRequestUrl() string` @@ -105,6 +132,31 @@ SetRequestUrl sets RequestUrl field to given value. HasRequestUrl returns a boolean if a field has been set. +### GetRequestedAt + +`func (o *OAuth2LogoutRequest) GetRequestedAt() time.Time` + +GetRequestedAt returns the RequestedAt field if non-nil, zero value otherwise. + +### GetRequestedAtOk + +`func (o *OAuth2LogoutRequest) GetRequestedAtOk() (*time.Time, bool)` + +GetRequestedAtOk returns a tuple with the RequestedAt field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetRequestedAt + +`func (o *OAuth2LogoutRequest) SetRequestedAt(v time.Time)` + +SetRequestedAt sets RequestedAt field to given value. + +### HasRequestedAt + +`func (o *OAuth2LogoutRequest) HasRequestedAt() bool` + +HasRequestedAt returns a boolean if a field has been set. + ### GetRpInitiated `func (o *OAuth2LogoutRequest) GetRpInitiated() bool` diff --git a/internal/httpclient/docs/OAuth2TokenExchange.md b/internal/httpclient/docs/OAuth2TokenExchange.md index 8976c6ff0a6..cae2673e099 100644 --- a/internal/httpclient/docs/OAuth2TokenExchange.md +++ b/internal/httpclient/docs/OAuth2TokenExchange.md @@ -6,7 +6,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **AccessToken** | Pointer to **string** | The access token issued by the authorization server. | [optional] **ExpiresIn** | Pointer to **int64** | The lifetime in seconds of the access token. For example, the value \"3600\" denotes that the access token will expire in one hour from the time the response was generated. | [optional] -**IdToken** | Pointer to **int64** | To retrieve a refresh token request the id_token scope. | [optional] +**IdToken** | Pointer to **string** | To retrieve a refresh token request the id_token scope. | [optional] **RefreshToken** | Pointer to **string** | The refresh token, which can be used to obtain new access tokens. To retrieve it add the scope \"offline\" to your access token request. | [optional] **Scope** | Pointer to **string** | The scope of the access token | [optional] **TokenType** | Pointer to **string** | The type of the token issued | [optional] @@ -82,20 +82,20 @@ HasExpiresIn returns a boolean if a field has been set. ### GetIdToken -`func (o *OAuth2TokenExchange) GetIdToken() int64` +`func (o *OAuth2TokenExchange) GetIdToken() string` GetIdToken returns the IdToken field if non-nil, zero value otherwise. ### GetIdTokenOk -`func (o *OAuth2TokenExchange) GetIdTokenOk() (*int64, bool)` +`func (o *OAuth2TokenExchange) GetIdTokenOk() (*string, bool)` GetIdTokenOk returns a tuple with the IdToken field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetIdToken -`func (o *OAuth2TokenExchange) SetIdToken(v int64)` +`func (o *OAuth2TokenExchange) SetIdToken(v string)` SetIdToken sets IdToken field to given value. diff --git a/internal/httpclient/docs/OidcApi.md b/internal/httpclient/docs/OidcAPI.md similarity index 52% rename from internal/httpclient/docs/OidcApi.md rename to internal/httpclient/docs/OidcAPI.md index 8087d0565df..7ec0f5b17a0 100644 --- a/internal/httpclient/docs/OidcApi.md +++ b/internal/httpclient/docs/OidcAPI.md @@ -1,16 +1,17 @@ -# \OidcApi +# \OidcAPI All URIs are relative to *http://localhost* Method | HTTP request | Description ------------- | ------------- | ------------- -[**CreateOidcDynamicClient**](OidcApi.md#CreateOidcDynamicClient) | **Post** /oauth2/register | Register OAuth2 Client using OpenID Dynamic Client Registration -[**DeleteOidcDynamicClient**](OidcApi.md#DeleteOidcDynamicClient) | **Delete** /oauth2/register/{id} | Delete OAuth 2.0 Client using the OpenID Dynamic Client Registration Management Protocol -[**DiscoverOidcConfiguration**](OidcApi.md#DiscoverOidcConfiguration) | **Get** /.well-known/openid-configuration | OpenID Connect Discovery -[**GetOidcDynamicClient**](OidcApi.md#GetOidcDynamicClient) | **Get** /oauth2/register/{id} | Get OAuth2 Client using OpenID Dynamic Client Registration -[**GetOidcUserInfo**](OidcApi.md#GetOidcUserInfo) | **Get** /userinfo | OpenID Connect Userinfo -[**RevokeOidcSession**](OidcApi.md#RevokeOidcSession) | **Get** /oauth2/sessions/logout | OpenID Connect Front- and Back-channel Enabled Logout -[**SetOidcDynamicClient**](OidcApi.md#SetOidcDynamicClient) | **Put** /oauth2/register/{id} | Set OAuth2 Client using OpenID Dynamic Client Registration +[**CreateOidcDynamicClient**](OidcAPI.md#CreateOidcDynamicClient) | **Post** /oauth2/register | Register OAuth2 Client using OpenID Dynamic Client Registration +[**CreateVerifiableCredential**](OidcAPI.md#CreateVerifiableCredential) | **Post** /credentials | Issues a Verifiable Credential +[**DeleteOidcDynamicClient**](OidcAPI.md#DeleteOidcDynamicClient) | **Delete** /oauth2/register/{id} | Delete OAuth 2.0 Client using the OpenID Dynamic Client Registration Management Protocol +[**DiscoverOidcConfiguration**](OidcAPI.md#DiscoverOidcConfiguration) | **Get** /.well-known/openid-configuration | OpenID Connect Discovery +[**GetOidcDynamicClient**](OidcAPI.md#GetOidcDynamicClient) | **Get** /oauth2/register/{id} | Get OAuth2 Client using OpenID Dynamic Client Registration +[**GetOidcUserInfo**](OidcAPI.md#GetOidcUserInfo) | **Get** /userinfo | OpenID Connect Userinfo +[**RevokeOidcSession**](OidcAPI.md#RevokeOidcSession) | **Get** /oauth2/sessions/logout | OpenID Connect Front- and Back-channel Enabled Logout +[**SetOidcDynamicClient**](OidcAPI.md#SetOidcDynamicClient) | **Put** /oauth2/register/{id} | Set OAuth2 Client using OpenID Dynamic Client Registration @@ -28,24 +29,24 @@ Register OAuth2 Client using OpenID Dynamic Client Registration package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - oAuth2Client := *openapiclient.NewOAuth2Client() // OAuth2Client | Dynamic Client Registration Request Body - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OidcApi.CreateOidcDynamicClient(context.Background()).OAuth2Client(oAuth2Client).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OidcApi.CreateOidcDynamicClient``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `CreateOidcDynamicClient`: OAuth2Client - fmt.Fprintf(os.Stdout, "Response from `OidcApi.CreateOidcDynamicClient`: %v\n", resp) + oAuth2Client := *openapiclient.NewOAuth2Client() // OAuth2Client | Dynamic Client Registration Request Body + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OidcAPI.CreateOidcDynamicClient(context.Background()).OAuth2Client(oAuth2Client).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OidcAPI.CreateOidcDynamicClient``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `CreateOidcDynamicClient`: OAuth2Client + fmt.Fprintf(os.Stdout, "Response from `OidcAPI.CreateOidcDynamicClient`: %v\n", resp) } ``` @@ -80,6 +81,72 @@ No authorization required [[Back to README]](../README.md) +## CreateVerifiableCredential + +> VerifiableCredentialResponse CreateVerifiableCredential(ctx).CreateVerifiableCredentialRequestBody(createVerifiableCredentialRequestBody).Execute() + +Issues a Verifiable Credential + + + +### Example + +```go +package main + +import ( + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" +) + +func main() { + createVerifiableCredentialRequestBody := *openapiclient.NewCreateVerifiableCredentialRequestBody() // CreateVerifiableCredentialRequestBody | (optional) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OidcAPI.CreateVerifiableCredential(context.Background()).CreateVerifiableCredentialRequestBody(createVerifiableCredentialRequestBody).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OidcAPI.CreateVerifiableCredential``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `CreateVerifiableCredential`: VerifiableCredentialResponse + fmt.Fprintf(os.Stdout, "Response from `OidcAPI.CreateVerifiableCredential`: %v\n", resp) +} +``` + +### Path Parameters + + + +### Other Parameters + +Other parameters are passed through a pointer to a apiCreateVerifiableCredentialRequest struct via the builder pattern + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **createVerifiableCredentialRequestBody** | [**CreateVerifiableCredentialRequestBody**](CreateVerifiableCredentialRequestBody.md) | | + +### Return type + +[**VerifiableCredentialResponse**](VerifiableCredentialResponse.md) + +### Authorization + +No authorization required + +### HTTP request headers + +- **Content-Type**: application/json +- **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) +[[Back to Model list]](../README.md#documentation-for-models) +[[Back to README]](../README.md) + + ## DeleteOidcDynamicClient > DeleteOidcDynamicClient(ctx, id).Execute() @@ -94,22 +161,22 @@ Delete OAuth 2.0 Client using the OpenID Dynamic Client Registration Management package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - id := "id_example" // string | The id of the OAuth 2.0 Client. - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OidcApi.DeleteOidcDynamicClient(context.Background(), id).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OidcApi.DeleteOidcDynamicClient``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } + id := "id_example" // string | The id of the OAuth 2.0 Client. + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + r, err := apiClient.OidcAPI.DeleteOidcDynamicClient(context.Background(), id).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OidcAPI.DeleteOidcDynamicClient``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } } ``` @@ -162,23 +229,23 @@ OpenID Connect Discovery package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OidcApi.DiscoverOidcConfiguration(context.Background()).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OidcApi.DiscoverOidcConfiguration``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `DiscoverOidcConfiguration`: OidcConfiguration - fmt.Fprintf(os.Stdout, "Response from `OidcApi.DiscoverOidcConfiguration`: %v\n", resp) + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OidcAPI.DiscoverOidcConfiguration(context.Background()).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OidcAPI.DiscoverOidcConfiguration``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `DiscoverOidcConfiguration`: OidcConfiguration + fmt.Fprintf(os.Stdout, "Response from `OidcAPI.DiscoverOidcConfiguration`: %v\n", resp) } ``` @@ -223,24 +290,24 @@ Get OAuth2 Client using OpenID Dynamic Client Registration package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - id := "id_example" // string | The id of the OAuth 2.0 Client. - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OidcApi.GetOidcDynamicClient(context.Background(), id).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OidcApi.GetOidcDynamicClient``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `GetOidcDynamicClient`: OAuth2Client - fmt.Fprintf(os.Stdout, "Response from `OidcApi.GetOidcDynamicClient`: %v\n", resp) + id := "id_example" // string | The id of the OAuth 2.0 Client. + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OidcAPI.GetOidcDynamicClient(context.Background(), id).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OidcAPI.GetOidcDynamicClient``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `GetOidcDynamicClient`: OAuth2Client + fmt.Fprintf(os.Stdout, "Response from `OidcAPI.GetOidcDynamicClient`: %v\n", resp) } ``` @@ -293,23 +360,23 @@ OpenID Connect Userinfo package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OidcApi.GetOidcUserInfo(context.Background()).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OidcApi.GetOidcUserInfo``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `GetOidcUserInfo`: OidcUserInfo - fmt.Fprintf(os.Stdout, "Response from `OidcApi.GetOidcUserInfo`: %v\n", resp) + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OidcAPI.GetOidcUserInfo(context.Background()).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OidcAPI.GetOidcUserInfo``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `GetOidcUserInfo`: OidcUserInfo + fmt.Fprintf(os.Stdout, "Response from `OidcAPI.GetOidcUserInfo`: %v\n", resp) } ``` @@ -354,21 +421,21 @@ OpenID Connect Front- and Back-channel Enabled Logout package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OidcApi.RevokeOidcSession(context.Background()).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OidcApi.RevokeOidcSession``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + r, err := apiClient.OidcAPI.RevokeOidcSession(context.Background()).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OidcAPI.RevokeOidcSession``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } } ``` @@ -413,25 +480,25 @@ Set OAuth2 Client using OpenID Dynamic Client Registration package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - id := "id_example" // string | OAuth 2.0 Client ID - oAuth2Client := *openapiclient.NewOAuth2Client() // OAuth2Client | OAuth 2.0 Client Request Body - - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.OidcApi.SetOidcDynamicClient(context.Background(), id).OAuth2Client(oAuth2Client).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `OidcApi.SetOidcDynamicClient``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `SetOidcDynamicClient`: OAuth2Client - fmt.Fprintf(os.Stdout, "Response from `OidcApi.SetOidcDynamicClient`: %v\n", resp) + id := "id_example" // string | OAuth 2.0 Client ID + oAuth2Client := *openapiclient.NewOAuth2Client() // OAuth2Client | OAuth 2.0 Client Request Body + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.OidcAPI.SetOidcDynamicClient(context.Background(), id).OAuth2Client(oAuth2Client).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `OidcAPI.SetOidcDynamicClient``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `SetOidcDynamicClient`: OAuth2Client + fmt.Fprintf(os.Stdout, "Response from `OidcAPI.SetOidcDynamicClient`: %v\n", resp) } ``` diff --git a/internal/httpclient/docs/OidcConfiguration.md b/internal/httpclient/docs/OidcConfiguration.md index 723bd6a8b47..27f0134440c 100644 --- a/internal/httpclient/docs/OidcConfiguration.md +++ b/internal/httpclient/docs/OidcConfiguration.md @@ -10,6 +10,9 @@ Name | Type | Description | Notes **ClaimsParameterSupported** | Pointer to **bool** | OpenID Connect Claims Parameter Parameter Supported Boolean value specifying whether the OP supports use of the claims parameter, with true indicating support. | [optional] **ClaimsSupported** | Pointer to **[]string** | OpenID Connect Supported Claims JSON array containing a list of the Claim Names of the Claims that the OpenID Provider MAY be able to supply values for. Note that for privacy or other reasons, this might not be an exhaustive list. | [optional] **CodeChallengeMethodsSupported** | Pointer to **[]string** | OAuth 2.0 PKCE Supported Code Challenge Methods JSON array containing a list of Proof Key for Code Exchange (PKCE) [RFC7636] code challenge methods supported by this authorization server. | [optional] +**CredentialsEndpointDraft00** | Pointer to **string** | OpenID Connect Verifiable Credentials Endpoint Contains the URL of the Verifiable Credentials Endpoint. | [optional] +**CredentialsSupportedDraft00** | Pointer to [**[]CredentialSupportedDraft00**](CredentialSupportedDraft00.md) | OpenID Connect Verifiable Credentials Supported JSON array containing a list of the Verifiable Credentials supported by this authorization server. | [optional] +**DeviceAuthorizationEndpoint** | **string** | OAuth 2.0 Device Authorization Endpoint URL | **EndSessionEndpoint** | Pointer to **string** | OpenID Connect End-Session Endpoint URL at the OP to which an RP can perform a redirect to request that the End-User be logged out at the OP. | [optional] **FrontchannelLogoutSessionSupported** | Pointer to **bool** | OpenID Connect Front-Channel Logout Session Required Boolean value specifying whether the OP can pass iss (issuer) and sid (session ID) query parameters to identify the RP session with the OP when the frontchannel_logout_uri is used. If supported, the sid Claim is also included in ID Tokens issued by the OP. | [optional] **FrontchannelLogoutSupported** | Pointer to **bool** | OpenID Connect Front-Channel Logout Supported Boolean value specifying whether the OP supports HTTP-based logout, with true indicating support. | [optional] @@ -38,7 +41,7 @@ Name | Type | Description | Notes ### NewOidcConfiguration -`func NewOidcConfiguration(authorizationEndpoint string, idTokenSignedResponseAlg []string, idTokenSigningAlgValuesSupported []string, issuer string, jwksUri string, responseTypesSupported []string, subjectTypesSupported []string, tokenEndpoint string, userinfoSignedResponseAlg []string, ) *OidcConfiguration` +`func NewOidcConfiguration(authorizationEndpoint string, deviceAuthorizationEndpoint string, idTokenSignedResponseAlg []string, idTokenSigningAlgValuesSupported []string, issuer string, jwksUri string, responseTypesSupported []string, subjectTypesSupported []string, tokenEndpoint string, userinfoSignedResponseAlg []string, ) *OidcConfiguration` NewOidcConfiguration instantiates a new OidcConfiguration object This constructor will assign default values to properties that have it defined, @@ -198,6 +201,76 @@ SetCodeChallengeMethodsSupported sets CodeChallengeMethodsSupported field to giv HasCodeChallengeMethodsSupported returns a boolean if a field has been set. +### GetCredentialsEndpointDraft00 + +`func (o *OidcConfiguration) GetCredentialsEndpointDraft00() string` + +GetCredentialsEndpointDraft00 returns the CredentialsEndpointDraft00 field if non-nil, zero value otherwise. + +### GetCredentialsEndpointDraft00Ok + +`func (o *OidcConfiguration) GetCredentialsEndpointDraft00Ok() (*string, bool)` + +GetCredentialsEndpointDraft00Ok returns a tuple with the CredentialsEndpointDraft00 field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetCredentialsEndpointDraft00 + +`func (o *OidcConfiguration) SetCredentialsEndpointDraft00(v string)` + +SetCredentialsEndpointDraft00 sets CredentialsEndpointDraft00 field to given value. + +### HasCredentialsEndpointDraft00 + +`func (o *OidcConfiguration) HasCredentialsEndpointDraft00() bool` + +HasCredentialsEndpointDraft00 returns a boolean if a field has been set. + +### GetCredentialsSupportedDraft00 + +`func (o *OidcConfiguration) GetCredentialsSupportedDraft00() []CredentialSupportedDraft00` + +GetCredentialsSupportedDraft00 returns the CredentialsSupportedDraft00 field if non-nil, zero value otherwise. + +### GetCredentialsSupportedDraft00Ok + +`func (o *OidcConfiguration) GetCredentialsSupportedDraft00Ok() (*[]CredentialSupportedDraft00, bool)` + +GetCredentialsSupportedDraft00Ok returns a tuple with the CredentialsSupportedDraft00 field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetCredentialsSupportedDraft00 + +`func (o *OidcConfiguration) SetCredentialsSupportedDraft00(v []CredentialSupportedDraft00)` + +SetCredentialsSupportedDraft00 sets CredentialsSupportedDraft00 field to given value. + +### HasCredentialsSupportedDraft00 + +`func (o *OidcConfiguration) HasCredentialsSupportedDraft00() bool` + +HasCredentialsSupportedDraft00 returns a boolean if a field has been set. + +### GetDeviceAuthorizationEndpoint + +`func (o *OidcConfiguration) GetDeviceAuthorizationEndpoint() string` + +GetDeviceAuthorizationEndpoint returns the DeviceAuthorizationEndpoint field if non-nil, zero value otherwise. + +### GetDeviceAuthorizationEndpointOk + +`func (o *OidcConfiguration) GetDeviceAuthorizationEndpointOk() (*string, bool)` + +GetDeviceAuthorizationEndpointOk returns a tuple with the DeviceAuthorizationEndpoint field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetDeviceAuthorizationEndpoint + +`func (o *OidcConfiguration) SetDeviceAuthorizationEndpoint(v string)` + +SetDeviceAuthorizationEndpoint sets DeviceAuthorizationEndpoint field to given value. + + ### GetEndSessionEndpoint `func (o *OidcConfiguration) GetEndSessionEndpoint() string` diff --git a/internal/httpclient/docs/PaginationHeaders.md b/internal/httpclient/docs/PaginationHeaders.md deleted file mode 100644 index 78f93cc4b16..00000000000 --- a/internal/httpclient/docs/PaginationHeaders.md +++ /dev/null @@ -1,82 +0,0 @@ -# PaginationHeaders - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**Link** | Pointer to **string** | The link header contains pagination links. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). in: header | [optional] -**XTotalCount** | Pointer to **string** | The total number of clients. in: header | [optional] - -## Methods - -### NewPaginationHeaders - -`func NewPaginationHeaders() *PaginationHeaders` - -NewPaginationHeaders instantiates a new PaginationHeaders object -This constructor will assign default values to properties that have it defined, -and makes sure properties required by API are set, but the set of arguments -will change when the set of required properties is changed - -### NewPaginationHeadersWithDefaults - -`func NewPaginationHeadersWithDefaults() *PaginationHeaders` - -NewPaginationHeadersWithDefaults instantiates a new PaginationHeaders object -This constructor will only assign default values to properties that have it defined, -but it doesn't guarantee that properties required by API are set - -### GetLink - -`func (o *PaginationHeaders) GetLink() string` - -GetLink returns the Link field if non-nil, zero value otherwise. - -### GetLinkOk - -`func (o *PaginationHeaders) GetLinkOk() (*string, bool)` - -GetLinkOk returns a tuple with the Link field if it's non-nil, zero value otherwise -and a boolean to check if the value has been set. - -### SetLink - -`func (o *PaginationHeaders) SetLink(v string)` - -SetLink sets Link field to given value. - -### HasLink - -`func (o *PaginationHeaders) HasLink() bool` - -HasLink returns a boolean if a field has been set. - -### GetXTotalCount - -`func (o *PaginationHeaders) GetXTotalCount() string` - -GetXTotalCount returns the XTotalCount field if non-nil, zero value otherwise. - -### GetXTotalCountOk - -`func (o *PaginationHeaders) GetXTotalCountOk() (*string, bool)` - -GetXTotalCountOk returns a tuple with the XTotalCount field if it's non-nil, zero value otherwise -and a boolean to check if the value has been set. - -### SetXTotalCount - -`func (o *PaginationHeaders) SetXTotalCount(v string)` - -SetXTotalCount sets XTotalCount field to given value. - -### HasXTotalCount - -`func (o *PaginationHeaders) HasXTotalCount() bool` - -HasXTotalCount returns a boolean if a field has been set. - - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/internal/httpclient/docs/RFC6749ErrorJson.md b/internal/httpclient/docs/RFC6749ErrorJson.md new file mode 100644 index 00000000000..570e38c5a91 --- /dev/null +++ b/internal/httpclient/docs/RFC6749ErrorJson.md @@ -0,0 +1,160 @@ +# RFC6749ErrorJson + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Error** | Pointer to **string** | | [optional] +**ErrorDebug** | Pointer to **string** | | [optional] +**ErrorDescription** | Pointer to **string** | | [optional] +**ErrorHint** | Pointer to **string** | | [optional] +**StatusCode** | Pointer to **int64** | | [optional] + +## Methods + +### NewRFC6749ErrorJson + +`func NewRFC6749ErrorJson() *RFC6749ErrorJson` + +NewRFC6749ErrorJson instantiates a new RFC6749ErrorJson object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewRFC6749ErrorJsonWithDefaults + +`func NewRFC6749ErrorJsonWithDefaults() *RFC6749ErrorJson` + +NewRFC6749ErrorJsonWithDefaults instantiates a new RFC6749ErrorJson object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetError + +`func (o *RFC6749ErrorJson) GetError() string` + +GetError returns the Error field if non-nil, zero value otherwise. + +### GetErrorOk + +`func (o *RFC6749ErrorJson) GetErrorOk() (*string, bool)` + +GetErrorOk returns a tuple with the Error field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetError + +`func (o *RFC6749ErrorJson) SetError(v string)` + +SetError sets Error field to given value. + +### HasError + +`func (o *RFC6749ErrorJson) HasError() bool` + +HasError returns a boolean if a field has been set. + +### GetErrorDebug + +`func (o *RFC6749ErrorJson) GetErrorDebug() string` + +GetErrorDebug returns the ErrorDebug field if non-nil, zero value otherwise. + +### GetErrorDebugOk + +`func (o *RFC6749ErrorJson) GetErrorDebugOk() (*string, bool)` + +GetErrorDebugOk returns a tuple with the ErrorDebug field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetErrorDebug + +`func (o *RFC6749ErrorJson) SetErrorDebug(v string)` + +SetErrorDebug sets ErrorDebug field to given value. + +### HasErrorDebug + +`func (o *RFC6749ErrorJson) HasErrorDebug() bool` + +HasErrorDebug returns a boolean if a field has been set. + +### GetErrorDescription + +`func (o *RFC6749ErrorJson) GetErrorDescription() string` + +GetErrorDescription returns the ErrorDescription field if non-nil, zero value otherwise. + +### GetErrorDescriptionOk + +`func (o *RFC6749ErrorJson) GetErrorDescriptionOk() (*string, bool)` + +GetErrorDescriptionOk returns a tuple with the ErrorDescription field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetErrorDescription + +`func (o *RFC6749ErrorJson) SetErrorDescription(v string)` + +SetErrorDescription sets ErrorDescription field to given value. + +### HasErrorDescription + +`func (o *RFC6749ErrorJson) HasErrorDescription() bool` + +HasErrorDescription returns a boolean if a field has been set. + +### GetErrorHint + +`func (o *RFC6749ErrorJson) GetErrorHint() string` + +GetErrorHint returns the ErrorHint field if non-nil, zero value otherwise. + +### GetErrorHintOk + +`func (o *RFC6749ErrorJson) GetErrorHintOk() (*string, bool)` + +GetErrorHintOk returns a tuple with the ErrorHint field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetErrorHint + +`func (o *RFC6749ErrorJson) SetErrorHint(v string)` + +SetErrorHint sets ErrorHint field to given value. + +### HasErrorHint + +`func (o *RFC6749ErrorJson) HasErrorHint() bool` + +HasErrorHint returns a boolean if a field has been set. + +### GetStatusCode + +`func (o *RFC6749ErrorJson) GetStatusCode() int64` + +GetStatusCode returns the StatusCode field if non-nil, zero value otherwise. + +### GetStatusCodeOk + +`func (o *RFC6749ErrorJson) GetStatusCodeOk() (*int64, bool)` + +GetStatusCodeOk returns a tuple with the StatusCode field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetStatusCode + +`func (o *RFC6749ErrorJson) SetStatusCode(v int64)` + +SetStatusCode sets StatusCode field to given value. + +### HasStatusCode + +`func (o *RFC6749ErrorJson) HasStatusCode() bool` + +HasStatusCode returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/internal/httpclient/docs/VerifiableCredentialPrimingResponse.md b/internal/httpclient/docs/VerifiableCredentialPrimingResponse.md new file mode 100644 index 00000000000..5668ebf5a0b --- /dev/null +++ b/internal/httpclient/docs/VerifiableCredentialPrimingResponse.md @@ -0,0 +1,238 @@ +# VerifiableCredentialPrimingResponse + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**CNonce** | Pointer to **string** | | [optional] +**CNonceExpiresIn** | Pointer to **int64** | | [optional] +**Error** | Pointer to **string** | | [optional] +**ErrorDebug** | Pointer to **string** | | [optional] +**ErrorDescription** | Pointer to **string** | | [optional] +**ErrorHint** | Pointer to **string** | | [optional] +**Format** | Pointer to **string** | | [optional] +**StatusCode** | Pointer to **int64** | | [optional] + +## Methods + +### NewVerifiableCredentialPrimingResponse + +`func NewVerifiableCredentialPrimingResponse() *VerifiableCredentialPrimingResponse` + +NewVerifiableCredentialPrimingResponse instantiates a new VerifiableCredentialPrimingResponse object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewVerifiableCredentialPrimingResponseWithDefaults + +`func NewVerifiableCredentialPrimingResponseWithDefaults() *VerifiableCredentialPrimingResponse` + +NewVerifiableCredentialPrimingResponseWithDefaults instantiates a new VerifiableCredentialPrimingResponse object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetCNonce + +`func (o *VerifiableCredentialPrimingResponse) GetCNonce() string` + +GetCNonce returns the CNonce field if non-nil, zero value otherwise. + +### GetCNonceOk + +`func (o *VerifiableCredentialPrimingResponse) GetCNonceOk() (*string, bool)` + +GetCNonceOk returns a tuple with the CNonce field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetCNonce + +`func (o *VerifiableCredentialPrimingResponse) SetCNonce(v string)` + +SetCNonce sets CNonce field to given value. + +### HasCNonce + +`func (o *VerifiableCredentialPrimingResponse) HasCNonce() bool` + +HasCNonce returns a boolean if a field has been set. + +### GetCNonceExpiresIn + +`func (o *VerifiableCredentialPrimingResponse) GetCNonceExpiresIn() int64` + +GetCNonceExpiresIn returns the CNonceExpiresIn field if non-nil, zero value otherwise. + +### GetCNonceExpiresInOk + +`func (o *VerifiableCredentialPrimingResponse) GetCNonceExpiresInOk() (*int64, bool)` + +GetCNonceExpiresInOk returns a tuple with the CNonceExpiresIn field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetCNonceExpiresIn + +`func (o *VerifiableCredentialPrimingResponse) SetCNonceExpiresIn(v int64)` + +SetCNonceExpiresIn sets CNonceExpiresIn field to given value. + +### HasCNonceExpiresIn + +`func (o *VerifiableCredentialPrimingResponse) HasCNonceExpiresIn() bool` + +HasCNonceExpiresIn returns a boolean if a field has been set. + +### GetError + +`func (o *VerifiableCredentialPrimingResponse) GetError() string` + +GetError returns the Error field if non-nil, zero value otherwise. + +### GetErrorOk + +`func (o *VerifiableCredentialPrimingResponse) GetErrorOk() (*string, bool)` + +GetErrorOk returns a tuple with the Error field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetError + +`func (o *VerifiableCredentialPrimingResponse) SetError(v string)` + +SetError sets Error field to given value. + +### HasError + +`func (o *VerifiableCredentialPrimingResponse) HasError() bool` + +HasError returns a boolean if a field has been set. + +### GetErrorDebug + +`func (o *VerifiableCredentialPrimingResponse) GetErrorDebug() string` + +GetErrorDebug returns the ErrorDebug field if non-nil, zero value otherwise. + +### GetErrorDebugOk + +`func (o *VerifiableCredentialPrimingResponse) GetErrorDebugOk() (*string, bool)` + +GetErrorDebugOk returns a tuple with the ErrorDebug field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetErrorDebug + +`func (o *VerifiableCredentialPrimingResponse) SetErrorDebug(v string)` + +SetErrorDebug sets ErrorDebug field to given value. + +### HasErrorDebug + +`func (o *VerifiableCredentialPrimingResponse) HasErrorDebug() bool` + +HasErrorDebug returns a boolean if a field has been set. + +### GetErrorDescription + +`func (o *VerifiableCredentialPrimingResponse) GetErrorDescription() string` + +GetErrorDescription returns the ErrorDescription field if non-nil, zero value otherwise. + +### GetErrorDescriptionOk + +`func (o *VerifiableCredentialPrimingResponse) GetErrorDescriptionOk() (*string, bool)` + +GetErrorDescriptionOk returns a tuple with the ErrorDescription field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetErrorDescription + +`func (o *VerifiableCredentialPrimingResponse) SetErrorDescription(v string)` + +SetErrorDescription sets ErrorDescription field to given value. + +### HasErrorDescription + +`func (o *VerifiableCredentialPrimingResponse) HasErrorDescription() bool` + +HasErrorDescription returns a boolean if a field has been set. + +### GetErrorHint + +`func (o *VerifiableCredentialPrimingResponse) GetErrorHint() string` + +GetErrorHint returns the ErrorHint field if non-nil, zero value otherwise. + +### GetErrorHintOk + +`func (o *VerifiableCredentialPrimingResponse) GetErrorHintOk() (*string, bool)` + +GetErrorHintOk returns a tuple with the ErrorHint field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetErrorHint + +`func (o *VerifiableCredentialPrimingResponse) SetErrorHint(v string)` + +SetErrorHint sets ErrorHint field to given value. + +### HasErrorHint + +`func (o *VerifiableCredentialPrimingResponse) HasErrorHint() bool` + +HasErrorHint returns a boolean if a field has been set. + +### GetFormat + +`func (o *VerifiableCredentialPrimingResponse) GetFormat() string` + +GetFormat returns the Format field if non-nil, zero value otherwise. + +### GetFormatOk + +`func (o *VerifiableCredentialPrimingResponse) GetFormatOk() (*string, bool)` + +GetFormatOk returns a tuple with the Format field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetFormat + +`func (o *VerifiableCredentialPrimingResponse) SetFormat(v string)` + +SetFormat sets Format field to given value. + +### HasFormat + +`func (o *VerifiableCredentialPrimingResponse) HasFormat() bool` + +HasFormat returns a boolean if a field has been set. + +### GetStatusCode + +`func (o *VerifiableCredentialPrimingResponse) GetStatusCode() int64` + +GetStatusCode returns the StatusCode field if non-nil, zero value otherwise. + +### GetStatusCodeOk + +`func (o *VerifiableCredentialPrimingResponse) GetStatusCodeOk() (*int64, bool)` + +GetStatusCodeOk returns a tuple with the StatusCode field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetStatusCode + +`func (o *VerifiableCredentialPrimingResponse) SetStatusCode(v int64)` + +SetStatusCode sets StatusCode field to given value. + +### HasStatusCode + +`func (o *VerifiableCredentialPrimingResponse) HasStatusCode() bool` + +HasStatusCode returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/internal/httpclient/docs/VerifiableCredentialProof.md b/internal/httpclient/docs/VerifiableCredentialProof.md new file mode 100644 index 00000000000..9412036cbb2 --- /dev/null +++ b/internal/httpclient/docs/VerifiableCredentialProof.md @@ -0,0 +1,82 @@ +# VerifiableCredentialProof + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Jwt** | Pointer to **string** | | [optional] +**ProofType** | Pointer to **string** | | [optional] + +## Methods + +### NewVerifiableCredentialProof + +`func NewVerifiableCredentialProof() *VerifiableCredentialProof` + +NewVerifiableCredentialProof instantiates a new VerifiableCredentialProof object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewVerifiableCredentialProofWithDefaults + +`func NewVerifiableCredentialProofWithDefaults() *VerifiableCredentialProof` + +NewVerifiableCredentialProofWithDefaults instantiates a new VerifiableCredentialProof object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetJwt + +`func (o *VerifiableCredentialProof) GetJwt() string` + +GetJwt returns the Jwt field if non-nil, zero value otherwise. + +### GetJwtOk + +`func (o *VerifiableCredentialProof) GetJwtOk() (*string, bool)` + +GetJwtOk returns a tuple with the Jwt field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetJwt + +`func (o *VerifiableCredentialProof) SetJwt(v string)` + +SetJwt sets Jwt field to given value. + +### HasJwt + +`func (o *VerifiableCredentialProof) HasJwt() bool` + +HasJwt returns a boolean if a field has been set. + +### GetProofType + +`func (o *VerifiableCredentialProof) GetProofType() string` + +GetProofType returns the ProofType field if non-nil, zero value otherwise. + +### GetProofTypeOk + +`func (o *VerifiableCredentialProof) GetProofTypeOk() (*string, bool)` + +GetProofTypeOk returns a tuple with the ProofType field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetProofType + +`func (o *VerifiableCredentialProof) SetProofType(v string)` + +SetProofType sets ProofType field to given value. + +### HasProofType + +`func (o *VerifiableCredentialProof) HasProofType() bool` + +HasProofType returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/internal/httpclient/docs/VerifiableCredentialResponse.md b/internal/httpclient/docs/VerifiableCredentialResponse.md new file mode 100644 index 00000000000..aa594541c40 --- /dev/null +++ b/internal/httpclient/docs/VerifiableCredentialResponse.md @@ -0,0 +1,82 @@ +# VerifiableCredentialResponse + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**CredentialDraft00** | Pointer to **string** | | [optional] +**Format** | Pointer to **string** | | [optional] + +## Methods + +### NewVerifiableCredentialResponse + +`func NewVerifiableCredentialResponse() *VerifiableCredentialResponse` + +NewVerifiableCredentialResponse instantiates a new VerifiableCredentialResponse object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewVerifiableCredentialResponseWithDefaults + +`func NewVerifiableCredentialResponseWithDefaults() *VerifiableCredentialResponse` + +NewVerifiableCredentialResponseWithDefaults instantiates a new VerifiableCredentialResponse object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetCredentialDraft00 + +`func (o *VerifiableCredentialResponse) GetCredentialDraft00() string` + +GetCredentialDraft00 returns the CredentialDraft00 field if non-nil, zero value otherwise. + +### GetCredentialDraft00Ok + +`func (o *VerifiableCredentialResponse) GetCredentialDraft00Ok() (*string, bool)` + +GetCredentialDraft00Ok returns a tuple with the CredentialDraft00 field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetCredentialDraft00 + +`func (o *VerifiableCredentialResponse) SetCredentialDraft00(v string)` + +SetCredentialDraft00 sets CredentialDraft00 field to given value. + +### HasCredentialDraft00 + +`func (o *VerifiableCredentialResponse) HasCredentialDraft00() bool` + +HasCredentialDraft00 returns a boolean if a field has been set. + +### GetFormat + +`func (o *VerifiableCredentialResponse) GetFormat() string` + +GetFormat returns the Format field if non-nil, zero value otherwise. + +### GetFormatOk + +`func (o *VerifiableCredentialResponse) GetFormatOk() (*string, bool)` + +GetFormatOk returns a tuple with the Format field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetFormat + +`func (o *VerifiableCredentialResponse) SetFormat(v string)` + +SetFormat sets Format field to given value. + +### HasFormat + +`func (o *VerifiableCredentialResponse) HasFormat() bool` + +HasFormat returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/internal/httpclient/docs/VerifyUserCodeRequest.md b/internal/httpclient/docs/VerifyUserCodeRequest.md new file mode 100644 index 00000000000..746a7bbb942 --- /dev/null +++ b/internal/httpclient/docs/VerifyUserCodeRequest.md @@ -0,0 +1,160 @@ +# VerifyUserCodeRequest + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Client** | Pointer to [**OAuth2Client**](OAuth2Client.md) | | [optional] +**DeviceCodeRequestId** | Pointer to **string** | | [optional] +**RequestUrl** | Pointer to **string** | RequestURL is the original Device Authorization URL requested. | [optional] +**RequestedAccessTokenAudience** | Pointer to **[]string** | | [optional] +**RequestedScope** | Pointer to **[]string** | | [optional] + +## Methods + +### NewVerifyUserCodeRequest + +`func NewVerifyUserCodeRequest() *VerifyUserCodeRequest` + +NewVerifyUserCodeRequest instantiates a new VerifyUserCodeRequest object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewVerifyUserCodeRequestWithDefaults + +`func NewVerifyUserCodeRequestWithDefaults() *VerifyUserCodeRequest` + +NewVerifyUserCodeRequestWithDefaults instantiates a new VerifyUserCodeRequest object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetClient + +`func (o *VerifyUserCodeRequest) GetClient() OAuth2Client` + +GetClient returns the Client field if non-nil, zero value otherwise. + +### GetClientOk + +`func (o *VerifyUserCodeRequest) GetClientOk() (*OAuth2Client, bool)` + +GetClientOk returns a tuple with the Client field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetClient + +`func (o *VerifyUserCodeRequest) SetClient(v OAuth2Client)` + +SetClient sets Client field to given value. + +### HasClient + +`func (o *VerifyUserCodeRequest) HasClient() bool` + +HasClient returns a boolean if a field has been set. + +### GetDeviceCodeRequestId + +`func (o *VerifyUserCodeRequest) GetDeviceCodeRequestId() string` + +GetDeviceCodeRequestId returns the DeviceCodeRequestId field if non-nil, zero value otherwise. + +### GetDeviceCodeRequestIdOk + +`func (o *VerifyUserCodeRequest) GetDeviceCodeRequestIdOk() (*string, bool)` + +GetDeviceCodeRequestIdOk returns a tuple with the DeviceCodeRequestId field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetDeviceCodeRequestId + +`func (o *VerifyUserCodeRequest) SetDeviceCodeRequestId(v string)` + +SetDeviceCodeRequestId sets DeviceCodeRequestId field to given value. + +### HasDeviceCodeRequestId + +`func (o *VerifyUserCodeRequest) HasDeviceCodeRequestId() bool` + +HasDeviceCodeRequestId returns a boolean if a field has been set. + +### GetRequestUrl + +`func (o *VerifyUserCodeRequest) GetRequestUrl() string` + +GetRequestUrl returns the RequestUrl field if non-nil, zero value otherwise. + +### GetRequestUrlOk + +`func (o *VerifyUserCodeRequest) GetRequestUrlOk() (*string, bool)` + +GetRequestUrlOk returns a tuple with the RequestUrl field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetRequestUrl + +`func (o *VerifyUserCodeRequest) SetRequestUrl(v string)` + +SetRequestUrl sets RequestUrl field to given value. + +### HasRequestUrl + +`func (o *VerifyUserCodeRequest) HasRequestUrl() bool` + +HasRequestUrl returns a boolean if a field has been set. + +### GetRequestedAccessTokenAudience + +`func (o *VerifyUserCodeRequest) GetRequestedAccessTokenAudience() []string` + +GetRequestedAccessTokenAudience returns the RequestedAccessTokenAudience field if non-nil, zero value otherwise. + +### GetRequestedAccessTokenAudienceOk + +`func (o *VerifyUserCodeRequest) GetRequestedAccessTokenAudienceOk() (*[]string, bool)` + +GetRequestedAccessTokenAudienceOk returns a tuple with the RequestedAccessTokenAudience field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetRequestedAccessTokenAudience + +`func (o *VerifyUserCodeRequest) SetRequestedAccessTokenAudience(v []string)` + +SetRequestedAccessTokenAudience sets RequestedAccessTokenAudience field to given value. + +### HasRequestedAccessTokenAudience + +`func (o *VerifyUserCodeRequest) HasRequestedAccessTokenAudience() bool` + +HasRequestedAccessTokenAudience returns a boolean if a field has been set. + +### GetRequestedScope + +`func (o *VerifyUserCodeRequest) GetRequestedScope() []string` + +GetRequestedScope returns the RequestedScope field if non-nil, zero value otherwise. + +### GetRequestedScopeOk + +`func (o *VerifyUserCodeRequest) GetRequestedScopeOk() (*[]string, bool)` + +GetRequestedScopeOk returns a tuple with the RequestedScope field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetRequestedScope + +`func (o *VerifyUserCodeRequest) SetRequestedScope(v []string)` + +SetRequestedScope sets RequestedScope field to given value. + +### HasRequestedScope + +`func (o *VerifyUserCodeRequest) HasRequestedScope() bool` + +HasRequestedScope returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/internal/httpclient/docs/WellknownApi.md b/internal/httpclient/docs/WellknownAPI.md similarity index 58% rename from internal/httpclient/docs/WellknownApi.md rename to internal/httpclient/docs/WellknownAPI.md index 5f184777044..f61bbbc5186 100644 --- a/internal/httpclient/docs/WellknownApi.md +++ b/internal/httpclient/docs/WellknownAPI.md @@ -1,10 +1,10 @@ -# \WellknownApi +# \WellknownAPI All URIs are relative to *http://localhost* Method | HTTP request | Description ------------- | ------------- | ------------- -[**DiscoverJsonWebKeys**](WellknownApi.md#DiscoverJsonWebKeys) | **Get** /.well-known/jwks.json | Discover Well-Known JSON Web Keys +[**DiscoverJsonWebKeys**](WellknownAPI.md#DiscoverJsonWebKeys) | **Get** /.well-known/jwks.json | Discover Well-Known JSON Web Keys @@ -22,23 +22,23 @@ Discover Well-Known JSON Web Keys package main import ( - "context" - "fmt" - "os" - openapiclient "./openapi" + "context" + "fmt" + "os" + openapiclient "github.com/ory/hydra-client-go/v2" ) func main() { - configuration := openapiclient.NewConfiguration() - apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.WellknownApi.DiscoverJsonWebKeys(context.Background()).Execute() - if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `WellknownApi.DiscoverJsonWebKeys``: %v\n", err) - fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) - } - // response from `DiscoverJsonWebKeys`: JsonWebKeySet - fmt.Fprintf(os.Stdout, "Response from `WellknownApi.DiscoverJsonWebKeys`: %v\n", resp) + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.WellknownAPI.DiscoverJsonWebKeys(context.Background()).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `WellknownAPI.DiscoverJsonWebKeys``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `DiscoverJsonWebKeys`: JsonWebKeySet + fmt.Fprintf(os.Stdout, "Response from `WellknownAPI.DiscoverJsonWebKeys`: %v\n", resp) } ``` diff --git a/internal/httpclient/git_push.sh b/internal/httpclient/git_push.sh index cb3fc304a3a..c25540340a5 100644 --- a/internal/httpclient/git_push.sh +++ b/internal/httpclient/git_push.sh @@ -19,7 +19,7 @@ if [ "$git_user_id" = "" ]; then fi if [ "$git_repo_id" = "" ]; then - git_repo_id="hydra-client-go" + git_repo_id="hydra-client-go/v2" echo "[INFO] No command line input provided. Set \$git_repo_id to $git_repo_id" fi diff --git a/internal/httpclient/go.mod b/internal/httpclient/go.mod index b4d363e00c8..52600d4a747 100644 --- a/internal/httpclient/go.mod +++ b/internal/httpclient/go.mod @@ -1,7 +1,5 @@ -module github.com/ory/hydra-client-go +module github.com/ory/hydra-client-go/v2 -go 1.13 +go 1.24.0 -require ( - golang.org/x/oauth2 v0.0.0-20210323180902-22b0adad7558 -) +require golang.org/x/oauth2 v0.34.0 diff --git a/internal/httpclient/go.sum b/internal/httpclient/go.sum index 734252e6815..d93f6621519 100644 --- a/internal/httpclient/go.sum +++ b/internal/httpclient/go.sum @@ -1,13 +1,2 @@ -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e h1:bRhVy7zSSasaqNksaRZiA5EEI+Ei4I1nO5Jh72wfHlg= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45 h1:SVwTIAaPC2U/AvvLNZ2a7OVsmBpC8L5BlwK1whH3hm0= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4 h1:YUO/7uOKsKeq9UokNS62b8FYywz3ker1l1vDZRCRefw= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -google.golang.org/appengine v1.4.0 h1:/wp5JvzpHIxhs/dumFmF7BXTf3Z+dd4uXta4kVyO508= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw= +golang.org/x/oauth2 v0.34.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA= diff --git a/internal/httpclient/model_accept_device_user_code_request.go b/internal/httpclient/model_accept_device_user_code_request.go new file mode 100644 index 00000000000..c34d1cd5045 --- /dev/null +++ b/internal/httpclient/model_accept_device_user_code_request.go @@ -0,0 +1,125 @@ +/* +Ory Hydra API + +Documentation for all of Ory Hydra's APIs. + +API version: +Contact: hi@ory.sh +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "encoding/json" +) + +// checks if the AcceptDeviceUserCodeRequest type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &AcceptDeviceUserCodeRequest{} + +// AcceptDeviceUserCodeRequest Contains information on an device verification +type AcceptDeviceUserCodeRequest struct { + UserCode *string `json:"user_code,omitempty"` +} + +// NewAcceptDeviceUserCodeRequest instantiates a new AcceptDeviceUserCodeRequest object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewAcceptDeviceUserCodeRequest() *AcceptDeviceUserCodeRequest { + this := AcceptDeviceUserCodeRequest{} + return &this +} + +// NewAcceptDeviceUserCodeRequestWithDefaults instantiates a new AcceptDeviceUserCodeRequest object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewAcceptDeviceUserCodeRequestWithDefaults() *AcceptDeviceUserCodeRequest { + this := AcceptDeviceUserCodeRequest{} + return &this +} + +// GetUserCode returns the UserCode field value if set, zero value otherwise. +func (o *AcceptDeviceUserCodeRequest) GetUserCode() string { + if o == nil || IsNil(o.UserCode) { + var ret string + return ret + } + return *o.UserCode +} + +// GetUserCodeOk returns a tuple with the UserCode field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *AcceptDeviceUserCodeRequest) GetUserCodeOk() (*string, bool) { + if o == nil || IsNil(o.UserCode) { + return nil, false + } + return o.UserCode, true +} + +// HasUserCode returns a boolean if a field has been set. +func (o *AcceptDeviceUserCodeRequest) HasUserCode() bool { + if o != nil && !IsNil(o.UserCode) { + return true + } + + return false +} + +// SetUserCode gets a reference to the given string and assigns it to the UserCode field. +func (o *AcceptDeviceUserCodeRequest) SetUserCode(v string) { + o.UserCode = &v +} + +func (o AcceptDeviceUserCodeRequest) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o AcceptDeviceUserCodeRequest) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.UserCode) { + toSerialize["user_code"] = o.UserCode + } + return toSerialize, nil +} + +type NullableAcceptDeviceUserCodeRequest struct { + value *AcceptDeviceUserCodeRequest + isSet bool +} + +func (v NullableAcceptDeviceUserCodeRequest) Get() *AcceptDeviceUserCodeRequest { + return v.value +} + +func (v *NullableAcceptDeviceUserCodeRequest) Set(val *AcceptDeviceUserCodeRequest) { + v.value = val + v.isSet = true +} + +func (v NullableAcceptDeviceUserCodeRequest) IsSet() bool { + return v.isSet +} + +func (v *NullableAcceptDeviceUserCodeRequest) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableAcceptDeviceUserCodeRequest(val *AcceptDeviceUserCodeRequest) *NullableAcceptDeviceUserCodeRequest { + return &NullableAcceptDeviceUserCodeRequest{value: val, isSet: true} +} + +func (v NullableAcceptDeviceUserCodeRequest) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableAcceptDeviceUserCodeRequest) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/internal/httpclient/model_accept_o_auth2_consent_request.go b/internal/httpclient/model_accept_o_auth2_consent_request.go index 11d6459acfd..d3e68cfc807 100644 --- a/internal/httpclient/model_accept_o_auth2_consent_request.go +++ b/internal/httpclient/model_accept_o_auth2_consent_request.go @@ -13,14 +13,16 @@ package openapi import ( "encoding/json" - "time" ) +// checks if the AcceptOAuth2ConsentRequest type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &AcceptOAuth2ConsentRequest{} + // AcceptOAuth2ConsentRequest struct for AcceptOAuth2ConsentRequest type AcceptOAuth2ConsentRequest struct { - GrantAccessTokenAudience []string `json:"grant_access_token_audience,omitempty"` - GrantScope []string `json:"grant_scope,omitempty"` - HandledAt *time.Time `json:"handled_at,omitempty"` + Context interface{} `json:"context,omitempty"` + GrantAccessTokenAudience []string `json:"grant_access_token_audience,omitempty"` + GrantScope []string `json:"grant_scope,omitempty"` // Remember, if set to true, tells ORY Hydra to remember this consent authorization and reuse it if the same client asks the same user for the same, or a subset of, scope. Remember *bool `json:"remember,omitempty"` // RememberFor sets how long the consent authorization should be remembered for in seconds. If set to `0`, the authorization will be remembered indefinitely. @@ -45,9 +47,42 @@ func NewAcceptOAuth2ConsentRequestWithDefaults() *AcceptOAuth2ConsentRequest { return &this } +// GetContext returns the Context field value if set, zero value otherwise (both if not set or set to explicit null). +func (o *AcceptOAuth2ConsentRequest) GetContext() interface{} { + if o == nil { + var ret interface{} + return ret + } + return o.Context +} + +// GetContextOk returns a tuple with the Context field value if set, nil otherwise +// and a boolean to check if the value has been set. +// NOTE: If the value is an explicit nil, `nil, true` will be returned +func (o *AcceptOAuth2ConsentRequest) GetContextOk() (*interface{}, bool) { + if o == nil || IsNil(o.Context) { + return nil, false + } + return &o.Context, true +} + +// HasContext returns a boolean if a field has been set. +func (o *AcceptOAuth2ConsentRequest) HasContext() bool { + if o != nil && !IsNil(o.Context) { + return true + } + + return false +} + +// SetContext gets a reference to the given interface{} and assigns it to the Context field. +func (o *AcceptOAuth2ConsentRequest) SetContext(v interface{}) { + o.Context = v +} + // GetGrantAccessTokenAudience returns the GrantAccessTokenAudience field value if set, zero value otherwise. func (o *AcceptOAuth2ConsentRequest) GetGrantAccessTokenAudience() []string { - if o == nil || o.GrantAccessTokenAudience == nil { + if o == nil || IsNil(o.GrantAccessTokenAudience) { var ret []string return ret } @@ -57,7 +92,7 @@ func (o *AcceptOAuth2ConsentRequest) GetGrantAccessTokenAudience() []string { // GetGrantAccessTokenAudienceOk returns a tuple with the GrantAccessTokenAudience field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *AcceptOAuth2ConsentRequest) GetGrantAccessTokenAudienceOk() ([]string, bool) { - if o == nil || o.GrantAccessTokenAudience == nil { + if o == nil || IsNil(o.GrantAccessTokenAudience) { return nil, false } return o.GrantAccessTokenAudience, true @@ -65,7 +100,7 @@ func (o *AcceptOAuth2ConsentRequest) GetGrantAccessTokenAudienceOk() ([]string, // HasGrantAccessTokenAudience returns a boolean if a field has been set. func (o *AcceptOAuth2ConsentRequest) HasGrantAccessTokenAudience() bool { - if o != nil && o.GrantAccessTokenAudience != nil { + if o != nil && !IsNil(o.GrantAccessTokenAudience) { return true } @@ -79,7 +114,7 @@ func (o *AcceptOAuth2ConsentRequest) SetGrantAccessTokenAudience(v []string) { // GetGrantScope returns the GrantScope field value if set, zero value otherwise. func (o *AcceptOAuth2ConsentRequest) GetGrantScope() []string { - if o == nil || o.GrantScope == nil { + if o == nil || IsNil(o.GrantScope) { var ret []string return ret } @@ -89,7 +124,7 @@ func (o *AcceptOAuth2ConsentRequest) GetGrantScope() []string { // GetGrantScopeOk returns a tuple with the GrantScope field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *AcceptOAuth2ConsentRequest) GetGrantScopeOk() ([]string, bool) { - if o == nil || o.GrantScope == nil { + if o == nil || IsNil(o.GrantScope) { return nil, false } return o.GrantScope, true @@ -97,7 +132,7 @@ func (o *AcceptOAuth2ConsentRequest) GetGrantScopeOk() ([]string, bool) { // HasGrantScope returns a boolean if a field has been set. func (o *AcceptOAuth2ConsentRequest) HasGrantScope() bool { - if o != nil && o.GrantScope != nil { + if o != nil && !IsNil(o.GrantScope) { return true } @@ -109,41 +144,9 @@ func (o *AcceptOAuth2ConsentRequest) SetGrantScope(v []string) { o.GrantScope = v } -// GetHandledAt returns the HandledAt field value if set, zero value otherwise. -func (o *AcceptOAuth2ConsentRequest) GetHandledAt() time.Time { - if o == nil || o.HandledAt == nil { - var ret time.Time - return ret - } - return *o.HandledAt -} - -// GetHandledAtOk returns a tuple with the HandledAt field value if set, nil otherwise -// and a boolean to check if the value has been set. -func (o *AcceptOAuth2ConsentRequest) GetHandledAtOk() (*time.Time, bool) { - if o == nil || o.HandledAt == nil { - return nil, false - } - return o.HandledAt, true -} - -// HasHandledAt returns a boolean if a field has been set. -func (o *AcceptOAuth2ConsentRequest) HasHandledAt() bool { - if o != nil && o.HandledAt != nil { - return true - } - - return false -} - -// SetHandledAt gets a reference to the given time.Time and assigns it to the HandledAt field. -func (o *AcceptOAuth2ConsentRequest) SetHandledAt(v time.Time) { - o.HandledAt = &v -} - // GetRemember returns the Remember field value if set, zero value otherwise. func (o *AcceptOAuth2ConsentRequest) GetRemember() bool { - if o == nil || o.Remember == nil { + if o == nil || IsNil(o.Remember) { var ret bool return ret } @@ -153,7 +156,7 @@ func (o *AcceptOAuth2ConsentRequest) GetRemember() bool { // GetRememberOk returns a tuple with the Remember field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *AcceptOAuth2ConsentRequest) GetRememberOk() (*bool, bool) { - if o == nil || o.Remember == nil { + if o == nil || IsNil(o.Remember) { return nil, false } return o.Remember, true @@ -161,7 +164,7 @@ func (o *AcceptOAuth2ConsentRequest) GetRememberOk() (*bool, bool) { // HasRemember returns a boolean if a field has been set. func (o *AcceptOAuth2ConsentRequest) HasRemember() bool { - if o != nil && o.Remember != nil { + if o != nil && !IsNil(o.Remember) { return true } @@ -175,7 +178,7 @@ func (o *AcceptOAuth2ConsentRequest) SetRemember(v bool) { // GetRememberFor returns the RememberFor field value if set, zero value otherwise. func (o *AcceptOAuth2ConsentRequest) GetRememberFor() int64 { - if o == nil || o.RememberFor == nil { + if o == nil || IsNil(o.RememberFor) { var ret int64 return ret } @@ -185,7 +188,7 @@ func (o *AcceptOAuth2ConsentRequest) GetRememberFor() int64 { // GetRememberForOk returns a tuple with the RememberFor field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *AcceptOAuth2ConsentRequest) GetRememberForOk() (*int64, bool) { - if o == nil || o.RememberFor == nil { + if o == nil || IsNil(o.RememberFor) { return nil, false } return o.RememberFor, true @@ -193,7 +196,7 @@ func (o *AcceptOAuth2ConsentRequest) GetRememberForOk() (*int64, bool) { // HasRememberFor returns a boolean if a field has been set. func (o *AcceptOAuth2ConsentRequest) HasRememberFor() bool { - if o != nil && o.RememberFor != nil { + if o != nil && !IsNil(o.RememberFor) { return true } @@ -207,7 +210,7 @@ func (o *AcceptOAuth2ConsentRequest) SetRememberFor(v int64) { // GetSession returns the Session field value if set, zero value otherwise. func (o *AcceptOAuth2ConsentRequest) GetSession() AcceptOAuth2ConsentRequestSession { - if o == nil || o.Session == nil { + if o == nil || IsNil(o.Session) { var ret AcceptOAuth2ConsentRequestSession return ret } @@ -217,7 +220,7 @@ func (o *AcceptOAuth2ConsentRequest) GetSession() AcceptOAuth2ConsentRequestSess // GetSessionOk returns a tuple with the Session field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *AcceptOAuth2ConsentRequest) GetSessionOk() (*AcceptOAuth2ConsentRequestSession, bool) { - if o == nil || o.Session == nil { + if o == nil || IsNil(o.Session) { return nil, false } return o.Session, true @@ -225,7 +228,7 @@ func (o *AcceptOAuth2ConsentRequest) GetSessionOk() (*AcceptOAuth2ConsentRequest // HasSession returns a boolean if a field has been set. func (o *AcceptOAuth2ConsentRequest) HasSession() bool { - if o != nil && o.Session != nil { + if o != nil && !IsNil(o.Session) { return true } @@ -238,26 +241,34 @@ func (o *AcceptOAuth2ConsentRequest) SetSession(v AcceptOAuth2ConsentRequestSess } func (o AcceptOAuth2ConsentRequest) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o AcceptOAuth2ConsentRequest) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.GrantAccessTokenAudience != nil { + if o.Context != nil { + toSerialize["context"] = o.Context + } + if !IsNil(o.GrantAccessTokenAudience) { toSerialize["grant_access_token_audience"] = o.GrantAccessTokenAudience } - if o.GrantScope != nil { + if !IsNil(o.GrantScope) { toSerialize["grant_scope"] = o.GrantScope } - if o.HandledAt != nil { - toSerialize["handled_at"] = o.HandledAt - } - if o.Remember != nil { + if !IsNil(o.Remember) { toSerialize["remember"] = o.Remember } - if o.RememberFor != nil { + if !IsNil(o.RememberFor) { toSerialize["remember_for"] = o.RememberFor } - if o.Session != nil { + if !IsNil(o.Session) { toSerialize["session"] = o.Session } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableAcceptOAuth2ConsentRequest struct { diff --git a/internal/httpclient/model_accept_o_auth2_consent_request_session.go b/internal/httpclient/model_accept_o_auth2_consent_request_session.go index 33f78991a43..19300cebd09 100644 --- a/internal/httpclient/model_accept_o_auth2_consent_request_session.go +++ b/internal/httpclient/model_accept_o_auth2_consent_request_session.go @@ -15,6 +15,9 @@ import ( "encoding/json" ) +// checks if the AcceptOAuth2ConsentRequestSession type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &AcceptOAuth2ConsentRequestSession{} + // AcceptOAuth2ConsentRequestSession struct for AcceptOAuth2ConsentRequestSession type AcceptOAuth2ConsentRequestSession struct { // AccessToken sets session data for the access and refresh token, as well as any future tokens issued by the refresh grant. Keep in mind that this data will be available to anyone performing OAuth 2.0 Challenge Introspection. If only your services can perform OAuth 2.0 Challenge Introspection, this is usually fine. But if third parties can access that endpoint as well, sensitive data from the session might be exposed to them. Use with care! @@ -53,7 +56,7 @@ func (o *AcceptOAuth2ConsentRequestSession) GetAccessToken() interface{} { // and a boolean to check if the value has been set. // NOTE: If the value is an explicit nil, `nil, true` will be returned func (o *AcceptOAuth2ConsentRequestSession) GetAccessTokenOk() (*interface{}, bool) { - if o == nil || o.AccessToken == nil { + if o == nil || IsNil(o.AccessToken) { return nil, false } return &o.AccessToken, true @@ -61,7 +64,7 @@ func (o *AcceptOAuth2ConsentRequestSession) GetAccessTokenOk() (*interface{}, bo // HasAccessToken returns a boolean if a field has been set. func (o *AcceptOAuth2ConsentRequestSession) HasAccessToken() bool { - if o != nil && o.AccessToken != nil { + if o != nil && !IsNil(o.AccessToken) { return true } @@ -86,7 +89,7 @@ func (o *AcceptOAuth2ConsentRequestSession) GetIdToken() interface{} { // and a boolean to check if the value has been set. // NOTE: If the value is an explicit nil, `nil, true` will be returned func (o *AcceptOAuth2ConsentRequestSession) GetIdTokenOk() (*interface{}, bool) { - if o == nil || o.IdToken == nil { + if o == nil || IsNil(o.IdToken) { return nil, false } return &o.IdToken, true @@ -94,7 +97,7 @@ func (o *AcceptOAuth2ConsentRequestSession) GetIdTokenOk() (*interface{}, bool) // HasIdToken returns a boolean if a field has been set. func (o *AcceptOAuth2ConsentRequestSession) HasIdToken() bool { - if o != nil && o.IdToken != nil { + if o != nil && !IsNil(o.IdToken) { return true } @@ -107,6 +110,14 @@ func (o *AcceptOAuth2ConsentRequestSession) SetIdToken(v interface{}) { } func (o AcceptOAuth2ConsentRequestSession) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o AcceptOAuth2ConsentRequestSession) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} if o.AccessToken != nil { toSerialize["access_token"] = o.AccessToken @@ -114,7 +125,7 @@ func (o AcceptOAuth2ConsentRequestSession) MarshalJSON() ([]byte, error) { if o.IdToken != nil { toSerialize["id_token"] = o.IdToken } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableAcceptOAuth2ConsentRequestSession struct { diff --git a/internal/httpclient/model_accept_o_auth2_login_request.go b/internal/httpclient/model_accept_o_auth2_login_request.go index 5ae383ad040..373c44f5b6a 100644 --- a/internal/httpclient/model_accept_o_auth2_login_request.go +++ b/internal/httpclient/model_accept_o_auth2_login_request.go @@ -12,18 +12,27 @@ Contact: hi@ory.sh package openapi import ( + "bytes" "encoding/json" + "fmt" ) +// checks if the AcceptOAuth2LoginRequest type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &AcceptOAuth2LoginRequest{} + // AcceptOAuth2LoginRequest struct for AcceptOAuth2LoginRequest type AcceptOAuth2LoginRequest struct { - // ACR sets the Authentication AuthorizationContext Class Reference value for this authentication session. You can use it to express that, for example, a user authenticated using two factor authentication. + // ACR sets the Authentication AuthorizationContext Class Reference value for this authentication session. You can use it to express that, for example, a user authenticated using two-factor authentication. Acr *string `json:"acr,omitempty"` Amr []string `json:"amr,omitempty"` Context interface{} `json:"context,omitempty"` + // Extend OAuth2 authentication session lifespan If set to `true`, the OAuth2 authentication cookie lifespan is extended. This is for example useful if you want the user to be able to use `prompt=none` continuously. This value can only be set to `true` if the user has an authentication, which is the case if the `skip` value is `true`. + ExtendSessionLifespan *bool `json:"extend_session_lifespan,omitempty"` // ForceSubjectIdentifier forces the \"pairwise\" user ID of the end-user that authenticated. The \"pairwise\" user ID refers to the (Pairwise Identifier Algorithm)[http://openid.net/specs/openid-connect-core-1_0.html#PairwiseAlg] of the OpenID Connect specification. It allows you to set an obfuscated subject (\"user\") identifier that is unique to the client. Please note that this changes the user ID on endpoint /userinfo and sub claim of the ID Token. It does not change the sub claim in the OAuth 2.0 Introspection. Per default, ORY Hydra handles this value with its own algorithm. In case you want to set this yourself you can use this field. Please note that setting this field has no effect if `pairwise` is not configured in ORY Hydra or the OAuth 2.0 Client does not expect a pairwise identifier (set via `subject_type` key in the client's configuration). Please also be aware that ORY Hydra is unable to properly compute this value during authentication. This implies that you have to compute this value on every authentication process (probably depending on the client ID or some other unique value). If you fail to compute the proper value, then authentication processes which have id_token_hint set might fail. ForceSubjectIdentifier *string `json:"force_subject_identifier,omitempty"` - // Remember, if set to true, tells ORY Hydra to remember this user by telling the user agent (browser) to store a cookie with authentication data. If the same user performs another OAuth 2.0 Authorization Request, he/she will not be asked to log in again. + // IdentityProviderSessionID is the session ID of the end-user that authenticated. If specified, we will use this value to propagate the logout. + IdentityProviderSessionId *string `json:"identity_provider_session_id,omitempty"` + // Remember, if set to true, tells Ory Hydra to remember this user by telling the user agent (browser) to store a cookie with authentication data. If the same user performs another OAuth 2.0 Authorization Request, they will not be asked to log in again. Remember *bool `json:"remember,omitempty"` // RememberFor sets how long the authentication should be remembered for in seconds. If set to `0`, the authorization will be remembered for the duration of the browser session (using a session cookie). RememberFor *int64 `json:"remember_for,omitempty"` @@ -31,6 +40,8 @@ type AcceptOAuth2LoginRequest struct { Subject string `json:"subject"` } +type _AcceptOAuth2LoginRequest AcceptOAuth2LoginRequest + // NewAcceptOAuth2LoginRequest instantiates a new AcceptOAuth2LoginRequest object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments @@ -51,7 +62,7 @@ func NewAcceptOAuth2LoginRequestWithDefaults() *AcceptOAuth2LoginRequest { // GetAcr returns the Acr field value if set, zero value otherwise. func (o *AcceptOAuth2LoginRequest) GetAcr() string { - if o == nil || o.Acr == nil { + if o == nil || IsNil(o.Acr) { var ret string return ret } @@ -61,7 +72,7 @@ func (o *AcceptOAuth2LoginRequest) GetAcr() string { // GetAcrOk returns a tuple with the Acr field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *AcceptOAuth2LoginRequest) GetAcrOk() (*string, bool) { - if o == nil || o.Acr == nil { + if o == nil || IsNil(o.Acr) { return nil, false } return o.Acr, true @@ -69,7 +80,7 @@ func (o *AcceptOAuth2LoginRequest) GetAcrOk() (*string, bool) { // HasAcr returns a boolean if a field has been set. func (o *AcceptOAuth2LoginRequest) HasAcr() bool { - if o != nil && o.Acr != nil { + if o != nil && !IsNil(o.Acr) { return true } @@ -83,7 +94,7 @@ func (o *AcceptOAuth2LoginRequest) SetAcr(v string) { // GetAmr returns the Amr field value if set, zero value otherwise. func (o *AcceptOAuth2LoginRequest) GetAmr() []string { - if o == nil || o.Amr == nil { + if o == nil || IsNil(o.Amr) { var ret []string return ret } @@ -93,7 +104,7 @@ func (o *AcceptOAuth2LoginRequest) GetAmr() []string { // GetAmrOk returns a tuple with the Amr field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *AcceptOAuth2LoginRequest) GetAmrOk() ([]string, bool) { - if o == nil || o.Amr == nil { + if o == nil || IsNil(o.Amr) { return nil, false } return o.Amr, true @@ -101,7 +112,7 @@ func (o *AcceptOAuth2LoginRequest) GetAmrOk() ([]string, bool) { // HasAmr returns a boolean if a field has been set. func (o *AcceptOAuth2LoginRequest) HasAmr() bool { - if o != nil && o.Amr != nil { + if o != nil && !IsNil(o.Amr) { return true } @@ -126,7 +137,7 @@ func (o *AcceptOAuth2LoginRequest) GetContext() interface{} { // and a boolean to check if the value has been set. // NOTE: If the value is an explicit nil, `nil, true` will be returned func (o *AcceptOAuth2LoginRequest) GetContextOk() (*interface{}, bool) { - if o == nil || o.Context == nil { + if o == nil || IsNil(o.Context) { return nil, false } return &o.Context, true @@ -134,7 +145,7 @@ func (o *AcceptOAuth2LoginRequest) GetContextOk() (*interface{}, bool) { // HasContext returns a boolean if a field has been set. func (o *AcceptOAuth2LoginRequest) HasContext() bool { - if o != nil && o.Context != nil { + if o != nil && !IsNil(o.Context) { return true } @@ -146,9 +157,41 @@ func (o *AcceptOAuth2LoginRequest) SetContext(v interface{}) { o.Context = v } +// GetExtendSessionLifespan returns the ExtendSessionLifespan field value if set, zero value otherwise. +func (o *AcceptOAuth2LoginRequest) GetExtendSessionLifespan() bool { + if o == nil || IsNil(o.ExtendSessionLifespan) { + var ret bool + return ret + } + return *o.ExtendSessionLifespan +} + +// GetExtendSessionLifespanOk returns a tuple with the ExtendSessionLifespan field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *AcceptOAuth2LoginRequest) GetExtendSessionLifespanOk() (*bool, bool) { + if o == nil || IsNil(o.ExtendSessionLifespan) { + return nil, false + } + return o.ExtendSessionLifespan, true +} + +// HasExtendSessionLifespan returns a boolean if a field has been set. +func (o *AcceptOAuth2LoginRequest) HasExtendSessionLifespan() bool { + if o != nil && !IsNil(o.ExtendSessionLifespan) { + return true + } + + return false +} + +// SetExtendSessionLifespan gets a reference to the given bool and assigns it to the ExtendSessionLifespan field. +func (o *AcceptOAuth2LoginRequest) SetExtendSessionLifespan(v bool) { + o.ExtendSessionLifespan = &v +} + // GetForceSubjectIdentifier returns the ForceSubjectIdentifier field value if set, zero value otherwise. func (o *AcceptOAuth2LoginRequest) GetForceSubjectIdentifier() string { - if o == nil || o.ForceSubjectIdentifier == nil { + if o == nil || IsNil(o.ForceSubjectIdentifier) { var ret string return ret } @@ -158,7 +201,7 @@ func (o *AcceptOAuth2LoginRequest) GetForceSubjectIdentifier() string { // GetForceSubjectIdentifierOk returns a tuple with the ForceSubjectIdentifier field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *AcceptOAuth2LoginRequest) GetForceSubjectIdentifierOk() (*string, bool) { - if o == nil || o.ForceSubjectIdentifier == nil { + if o == nil || IsNil(o.ForceSubjectIdentifier) { return nil, false } return o.ForceSubjectIdentifier, true @@ -166,7 +209,7 @@ func (o *AcceptOAuth2LoginRequest) GetForceSubjectIdentifierOk() (*string, bool) // HasForceSubjectIdentifier returns a boolean if a field has been set. func (o *AcceptOAuth2LoginRequest) HasForceSubjectIdentifier() bool { - if o != nil && o.ForceSubjectIdentifier != nil { + if o != nil && !IsNil(o.ForceSubjectIdentifier) { return true } @@ -178,9 +221,41 @@ func (o *AcceptOAuth2LoginRequest) SetForceSubjectIdentifier(v string) { o.ForceSubjectIdentifier = &v } +// GetIdentityProviderSessionId returns the IdentityProviderSessionId field value if set, zero value otherwise. +func (o *AcceptOAuth2LoginRequest) GetIdentityProviderSessionId() string { + if o == nil || IsNil(o.IdentityProviderSessionId) { + var ret string + return ret + } + return *o.IdentityProviderSessionId +} + +// GetIdentityProviderSessionIdOk returns a tuple with the IdentityProviderSessionId field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *AcceptOAuth2LoginRequest) GetIdentityProviderSessionIdOk() (*string, bool) { + if o == nil || IsNil(o.IdentityProviderSessionId) { + return nil, false + } + return o.IdentityProviderSessionId, true +} + +// HasIdentityProviderSessionId returns a boolean if a field has been set. +func (o *AcceptOAuth2LoginRequest) HasIdentityProviderSessionId() bool { + if o != nil && !IsNil(o.IdentityProviderSessionId) { + return true + } + + return false +} + +// SetIdentityProviderSessionId gets a reference to the given string and assigns it to the IdentityProviderSessionId field. +func (o *AcceptOAuth2LoginRequest) SetIdentityProviderSessionId(v string) { + o.IdentityProviderSessionId = &v +} + // GetRemember returns the Remember field value if set, zero value otherwise. func (o *AcceptOAuth2LoginRequest) GetRemember() bool { - if o == nil || o.Remember == nil { + if o == nil || IsNil(o.Remember) { var ret bool return ret } @@ -190,7 +265,7 @@ func (o *AcceptOAuth2LoginRequest) GetRemember() bool { // GetRememberOk returns a tuple with the Remember field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *AcceptOAuth2LoginRequest) GetRememberOk() (*bool, bool) { - if o == nil || o.Remember == nil { + if o == nil || IsNil(o.Remember) { return nil, false } return o.Remember, true @@ -198,7 +273,7 @@ func (o *AcceptOAuth2LoginRequest) GetRememberOk() (*bool, bool) { // HasRemember returns a boolean if a field has been set. func (o *AcceptOAuth2LoginRequest) HasRemember() bool { - if o != nil && o.Remember != nil { + if o != nil && !IsNil(o.Remember) { return true } @@ -212,7 +287,7 @@ func (o *AcceptOAuth2LoginRequest) SetRemember(v bool) { // GetRememberFor returns the RememberFor field value if set, zero value otherwise. func (o *AcceptOAuth2LoginRequest) GetRememberFor() int64 { - if o == nil || o.RememberFor == nil { + if o == nil || IsNil(o.RememberFor) { var ret int64 return ret } @@ -222,7 +297,7 @@ func (o *AcceptOAuth2LoginRequest) GetRememberFor() int64 { // GetRememberForOk returns a tuple with the RememberFor field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *AcceptOAuth2LoginRequest) GetRememberForOk() (*int64, bool) { - if o == nil || o.RememberFor == nil { + if o == nil || IsNil(o.RememberFor) { return nil, false } return o.RememberFor, true @@ -230,7 +305,7 @@ func (o *AcceptOAuth2LoginRequest) GetRememberForOk() (*int64, bool) { // HasRememberFor returns a boolean if a field has been set. func (o *AcceptOAuth2LoginRequest) HasRememberFor() bool { - if o != nil && o.RememberFor != nil { + if o != nil && !IsNil(o.RememberFor) { return true } @@ -267,29 +342,78 @@ func (o *AcceptOAuth2LoginRequest) SetSubject(v string) { } func (o AcceptOAuth2LoginRequest) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o AcceptOAuth2LoginRequest) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.Acr != nil { + if !IsNil(o.Acr) { toSerialize["acr"] = o.Acr } - if o.Amr != nil { + if !IsNil(o.Amr) { toSerialize["amr"] = o.Amr } if o.Context != nil { toSerialize["context"] = o.Context } - if o.ForceSubjectIdentifier != nil { + if !IsNil(o.ExtendSessionLifespan) { + toSerialize["extend_session_lifespan"] = o.ExtendSessionLifespan + } + if !IsNil(o.ForceSubjectIdentifier) { toSerialize["force_subject_identifier"] = o.ForceSubjectIdentifier } - if o.Remember != nil { + if !IsNil(o.IdentityProviderSessionId) { + toSerialize["identity_provider_session_id"] = o.IdentityProviderSessionId + } + if !IsNil(o.Remember) { toSerialize["remember"] = o.Remember } - if o.RememberFor != nil { + if !IsNil(o.RememberFor) { toSerialize["remember_for"] = o.RememberFor } - if true { - toSerialize["subject"] = o.Subject + toSerialize["subject"] = o.Subject + return toSerialize, nil +} + +func (o *AcceptOAuth2LoginRequest) UnmarshalJSON(data []byte) (err error) { + // This validates that all required properties are included in the JSON object + // by unmarshalling the object into a generic map with string keys and checking + // that every required field exists as a key in the generic map. + requiredProperties := []string{ + "subject", } - return json.Marshal(toSerialize) + + allProperties := make(map[string]interface{}) + + err = json.Unmarshal(data, &allProperties) + + if err != nil { + return err + } + + for _, requiredProperty := range requiredProperties { + if _, exists := allProperties[requiredProperty]; !exists { + return fmt.Errorf("no value given for required property %v", requiredProperty) + } + } + + varAcceptOAuth2LoginRequest := _AcceptOAuth2LoginRequest{} + + decoder := json.NewDecoder(bytes.NewReader(data)) + decoder.DisallowUnknownFields() + err = decoder.Decode(&varAcceptOAuth2LoginRequest) + + if err != nil { + return err + } + + *o = AcceptOAuth2LoginRequest(varAcceptOAuth2LoginRequest) + + return err } type NullableAcceptOAuth2LoginRequest struct { diff --git a/internal/httpclient/model_create_json_web_key_set.go b/internal/httpclient/model_create_json_web_key_set.go index 3c0f429b21b..4170214875e 100644 --- a/internal/httpclient/model_create_json_web_key_set.go +++ b/internal/httpclient/model_create_json_web_key_set.go @@ -12,9 +12,14 @@ Contact: hi@ory.sh package openapi import ( + "bytes" "encoding/json" + "fmt" ) +// checks if the CreateJsonWebKeySet type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &CreateJsonWebKeySet{} + // CreateJsonWebKeySet Create JSON Web Key Set Request Body type CreateJsonWebKeySet struct { // JSON Web Key Algorithm The algorithm to be used for creating the key. Supports `RS256`, `ES256`, `ES512`, `HS512`, and `HS256`. @@ -25,6 +30,8 @@ type CreateJsonWebKeySet struct { Use string `json:"use"` } +type _CreateJsonWebKeySet CreateJsonWebKeySet + // NewCreateJsonWebKeySet instantiates a new CreateJsonWebKeySet object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments @@ -118,17 +125,58 @@ func (o *CreateJsonWebKeySet) SetUse(v string) { } func (o CreateJsonWebKeySet) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o CreateJsonWebKeySet) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if true { - toSerialize["alg"] = o.Alg + toSerialize["alg"] = o.Alg + toSerialize["kid"] = o.Kid + toSerialize["use"] = o.Use + return toSerialize, nil +} + +func (o *CreateJsonWebKeySet) UnmarshalJSON(data []byte) (err error) { + // This validates that all required properties are included in the JSON object + // by unmarshalling the object into a generic map with string keys and checking + // that every required field exists as a key in the generic map. + requiredProperties := []string{ + "alg", + "kid", + "use", } - if true { - toSerialize["kid"] = o.Kid + + allProperties := make(map[string]interface{}) + + err = json.Unmarshal(data, &allProperties) + + if err != nil { + return err } - if true { - toSerialize["use"] = o.Use + + for _, requiredProperty := range requiredProperties { + if _, exists := allProperties[requiredProperty]; !exists { + return fmt.Errorf("no value given for required property %v", requiredProperty) + } } - return json.Marshal(toSerialize) + + varCreateJsonWebKeySet := _CreateJsonWebKeySet{} + + decoder := json.NewDecoder(bytes.NewReader(data)) + decoder.DisallowUnknownFields() + err = decoder.Decode(&varCreateJsonWebKeySet) + + if err != nil { + return err + } + + *o = CreateJsonWebKeySet(varCreateJsonWebKeySet) + + return err } type NullableCreateJsonWebKeySet struct { diff --git a/internal/httpclient/model_create_verifiable_credential_request_body.go b/internal/httpclient/model_create_verifiable_credential_request_body.go new file mode 100644 index 00000000000..463e4ac6501 --- /dev/null +++ b/internal/httpclient/model_create_verifiable_credential_request_body.go @@ -0,0 +1,197 @@ +/* +Ory Hydra API + +Documentation for all of Ory Hydra's APIs. + +API version: +Contact: hi@ory.sh +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "encoding/json" +) + +// checks if the CreateVerifiableCredentialRequestBody type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &CreateVerifiableCredentialRequestBody{} + +// CreateVerifiableCredentialRequestBody struct for CreateVerifiableCredentialRequestBody +type CreateVerifiableCredentialRequestBody struct { + Format *string `json:"format,omitempty"` + Proof *VerifiableCredentialProof `json:"proof,omitempty"` + Types []string `json:"types,omitempty"` +} + +// NewCreateVerifiableCredentialRequestBody instantiates a new CreateVerifiableCredentialRequestBody object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewCreateVerifiableCredentialRequestBody() *CreateVerifiableCredentialRequestBody { + this := CreateVerifiableCredentialRequestBody{} + return &this +} + +// NewCreateVerifiableCredentialRequestBodyWithDefaults instantiates a new CreateVerifiableCredentialRequestBody object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewCreateVerifiableCredentialRequestBodyWithDefaults() *CreateVerifiableCredentialRequestBody { + this := CreateVerifiableCredentialRequestBody{} + return &this +} + +// GetFormat returns the Format field value if set, zero value otherwise. +func (o *CreateVerifiableCredentialRequestBody) GetFormat() string { + if o == nil || IsNil(o.Format) { + var ret string + return ret + } + return *o.Format +} + +// GetFormatOk returns a tuple with the Format field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CreateVerifiableCredentialRequestBody) GetFormatOk() (*string, bool) { + if o == nil || IsNil(o.Format) { + return nil, false + } + return o.Format, true +} + +// HasFormat returns a boolean if a field has been set. +func (o *CreateVerifiableCredentialRequestBody) HasFormat() bool { + if o != nil && !IsNil(o.Format) { + return true + } + + return false +} + +// SetFormat gets a reference to the given string and assigns it to the Format field. +func (o *CreateVerifiableCredentialRequestBody) SetFormat(v string) { + o.Format = &v +} + +// GetProof returns the Proof field value if set, zero value otherwise. +func (o *CreateVerifiableCredentialRequestBody) GetProof() VerifiableCredentialProof { + if o == nil || IsNil(o.Proof) { + var ret VerifiableCredentialProof + return ret + } + return *o.Proof +} + +// GetProofOk returns a tuple with the Proof field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CreateVerifiableCredentialRequestBody) GetProofOk() (*VerifiableCredentialProof, bool) { + if o == nil || IsNil(o.Proof) { + return nil, false + } + return o.Proof, true +} + +// HasProof returns a boolean if a field has been set. +func (o *CreateVerifiableCredentialRequestBody) HasProof() bool { + if o != nil && !IsNil(o.Proof) { + return true + } + + return false +} + +// SetProof gets a reference to the given VerifiableCredentialProof and assigns it to the Proof field. +func (o *CreateVerifiableCredentialRequestBody) SetProof(v VerifiableCredentialProof) { + o.Proof = &v +} + +// GetTypes returns the Types field value if set, zero value otherwise. +func (o *CreateVerifiableCredentialRequestBody) GetTypes() []string { + if o == nil || IsNil(o.Types) { + var ret []string + return ret + } + return o.Types +} + +// GetTypesOk returns a tuple with the Types field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CreateVerifiableCredentialRequestBody) GetTypesOk() ([]string, bool) { + if o == nil || IsNil(o.Types) { + return nil, false + } + return o.Types, true +} + +// HasTypes returns a boolean if a field has been set. +func (o *CreateVerifiableCredentialRequestBody) HasTypes() bool { + if o != nil && !IsNil(o.Types) { + return true + } + + return false +} + +// SetTypes gets a reference to the given []string and assigns it to the Types field. +func (o *CreateVerifiableCredentialRequestBody) SetTypes(v []string) { + o.Types = v +} + +func (o CreateVerifiableCredentialRequestBody) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o CreateVerifiableCredentialRequestBody) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.Format) { + toSerialize["format"] = o.Format + } + if !IsNil(o.Proof) { + toSerialize["proof"] = o.Proof + } + if !IsNil(o.Types) { + toSerialize["types"] = o.Types + } + return toSerialize, nil +} + +type NullableCreateVerifiableCredentialRequestBody struct { + value *CreateVerifiableCredentialRequestBody + isSet bool +} + +func (v NullableCreateVerifiableCredentialRequestBody) Get() *CreateVerifiableCredentialRequestBody { + return v.value +} + +func (v *NullableCreateVerifiableCredentialRequestBody) Set(val *CreateVerifiableCredentialRequestBody) { + v.value = val + v.isSet = true +} + +func (v NullableCreateVerifiableCredentialRequestBody) IsSet() bool { + return v.isSet +} + +func (v *NullableCreateVerifiableCredentialRequestBody) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableCreateVerifiableCredentialRequestBody(val *CreateVerifiableCredentialRequestBody) *NullableCreateVerifiableCredentialRequestBody { + return &NullableCreateVerifiableCredentialRequestBody{value: val, isSet: true} +} + +func (v NullableCreateVerifiableCredentialRequestBody) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableCreateVerifiableCredentialRequestBody) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/internal/httpclient/model_credential_supported_draft00.go b/internal/httpclient/model_credential_supported_draft00.go new file mode 100644 index 00000000000..47d1c5198e7 --- /dev/null +++ b/internal/httpclient/model_credential_supported_draft00.go @@ -0,0 +1,237 @@ +/* +Ory Hydra API + +Documentation for all of Ory Hydra's APIs. + +API version: +Contact: hi@ory.sh +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "encoding/json" +) + +// checks if the CredentialSupportedDraft00 type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &CredentialSupportedDraft00{} + +// CredentialSupportedDraft00 Includes information about the supported verifiable credentials. +type CredentialSupportedDraft00 struct { + // OpenID Connect Verifiable Credentials Cryptographic Binding Methods Supported Contains a list of cryptographic binding methods supported for signing the proof. + CryptographicBindingMethodsSupported []string `json:"cryptographic_binding_methods_supported,omitempty"` + // OpenID Connect Verifiable Credentials Cryptographic Suites Supported Contains a list of cryptographic suites methods supported for signing the proof. + CryptographicSuitesSupported []string `json:"cryptographic_suites_supported,omitempty"` + // OpenID Connect Verifiable Credentials Format Contains the format that is supported by this authorization server. + Format *string `json:"format,omitempty"` + // OpenID Connect Verifiable Credentials Types Contains the types of verifiable credentials supported. + Types []string `json:"types,omitempty"` +} + +// NewCredentialSupportedDraft00 instantiates a new CredentialSupportedDraft00 object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewCredentialSupportedDraft00() *CredentialSupportedDraft00 { + this := CredentialSupportedDraft00{} + return &this +} + +// NewCredentialSupportedDraft00WithDefaults instantiates a new CredentialSupportedDraft00 object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewCredentialSupportedDraft00WithDefaults() *CredentialSupportedDraft00 { + this := CredentialSupportedDraft00{} + return &this +} + +// GetCryptographicBindingMethodsSupported returns the CryptographicBindingMethodsSupported field value if set, zero value otherwise. +func (o *CredentialSupportedDraft00) GetCryptographicBindingMethodsSupported() []string { + if o == nil || IsNil(o.CryptographicBindingMethodsSupported) { + var ret []string + return ret + } + return o.CryptographicBindingMethodsSupported +} + +// GetCryptographicBindingMethodsSupportedOk returns a tuple with the CryptographicBindingMethodsSupported field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CredentialSupportedDraft00) GetCryptographicBindingMethodsSupportedOk() ([]string, bool) { + if o == nil || IsNil(o.CryptographicBindingMethodsSupported) { + return nil, false + } + return o.CryptographicBindingMethodsSupported, true +} + +// HasCryptographicBindingMethodsSupported returns a boolean if a field has been set. +func (o *CredentialSupportedDraft00) HasCryptographicBindingMethodsSupported() bool { + if o != nil && !IsNil(o.CryptographicBindingMethodsSupported) { + return true + } + + return false +} + +// SetCryptographicBindingMethodsSupported gets a reference to the given []string and assigns it to the CryptographicBindingMethodsSupported field. +func (o *CredentialSupportedDraft00) SetCryptographicBindingMethodsSupported(v []string) { + o.CryptographicBindingMethodsSupported = v +} + +// GetCryptographicSuitesSupported returns the CryptographicSuitesSupported field value if set, zero value otherwise. +func (o *CredentialSupportedDraft00) GetCryptographicSuitesSupported() []string { + if o == nil || IsNil(o.CryptographicSuitesSupported) { + var ret []string + return ret + } + return o.CryptographicSuitesSupported +} + +// GetCryptographicSuitesSupportedOk returns a tuple with the CryptographicSuitesSupported field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CredentialSupportedDraft00) GetCryptographicSuitesSupportedOk() ([]string, bool) { + if o == nil || IsNil(o.CryptographicSuitesSupported) { + return nil, false + } + return o.CryptographicSuitesSupported, true +} + +// HasCryptographicSuitesSupported returns a boolean if a field has been set. +func (o *CredentialSupportedDraft00) HasCryptographicSuitesSupported() bool { + if o != nil && !IsNil(o.CryptographicSuitesSupported) { + return true + } + + return false +} + +// SetCryptographicSuitesSupported gets a reference to the given []string and assigns it to the CryptographicSuitesSupported field. +func (o *CredentialSupportedDraft00) SetCryptographicSuitesSupported(v []string) { + o.CryptographicSuitesSupported = v +} + +// GetFormat returns the Format field value if set, zero value otherwise. +func (o *CredentialSupportedDraft00) GetFormat() string { + if o == nil || IsNil(o.Format) { + var ret string + return ret + } + return *o.Format +} + +// GetFormatOk returns a tuple with the Format field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CredentialSupportedDraft00) GetFormatOk() (*string, bool) { + if o == nil || IsNil(o.Format) { + return nil, false + } + return o.Format, true +} + +// HasFormat returns a boolean if a field has been set. +func (o *CredentialSupportedDraft00) HasFormat() bool { + if o != nil && !IsNil(o.Format) { + return true + } + + return false +} + +// SetFormat gets a reference to the given string and assigns it to the Format field. +func (o *CredentialSupportedDraft00) SetFormat(v string) { + o.Format = &v +} + +// GetTypes returns the Types field value if set, zero value otherwise. +func (o *CredentialSupportedDraft00) GetTypes() []string { + if o == nil || IsNil(o.Types) { + var ret []string + return ret + } + return o.Types +} + +// GetTypesOk returns a tuple with the Types field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *CredentialSupportedDraft00) GetTypesOk() ([]string, bool) { + if o == nil || IsNil(o.Types) { + return nil, false + } + return o.Types, true +} + +// HasTypes returns a boolean if a field has been set. +func (o *CredentialSupportedDraft00) HasTypes() bool { + if o != nil && !IsNil(o.Types) { + return true + } + + return false +} + +// SetTypes gets a reference to the given []string and assigns it to the Types field. +func (o *CredentialSupportedDraft00) SetTypes(v []string) { + o.Types = v +} + +func (o CredentialSupportedDraft00) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o CredentialSupportedDraft00) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.CryptographicBindingMethodsSupported) { + toSerialize["cryptographic_binding_methods_supported"] = o.CryptographicBindingMethodsSupported + } + if !IsNil(o.CryptographicSuitesSupported) { + toSerialize["cryptographic_suites_supported"] = o.CryptographicSuitesSupported + } + if !IsNil(o.Format) { + toSerialize["format"] = o.Format + } + if !IsNil(o.Types) { + toSerialize["types"] = o.Types + } + return toSerialize, nil +} + +type NullableCredentialSupportedDraft00 struct { + value *CredentialSupportedDraft00 + isSet bool +} + +func (v NullableCredentialSupportedDraft00) Get() *CredentialSupportedDraft00 { + return v.value +} + +func (v *NullableCredentialSupportedDraft00) Set(val *CredentialSupportedDraft00) { + v.value = val + v.isSet = true +} + +func (v NullableCredentialSupportedDraft00) IsSet() bool { + return v.isSet +} + +func (v *NullableCredentialSupportedDraft00) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableCredentialSupportedDraft00(val *CredentialSupportedDraft00) *NullableCredentialSupportedDraft00 { + return &NullableCredentialSupportedDraft00{value: val, isSet: true} +} + +func (v NullableCredentialSupportedDraft00) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableCredentialSupportedDraft00) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/internal/httpclient/model_device_authorization.go b/internal/httpclient/model_device_authorization.go new file mode 100644 index 00000000000..975972a8532 --- /dev/null +++ b/internal/httpclient/model_device_authorization.go @@ -0,0 +1,311 @@ +/* +Ory Hydra API + +Documentation for all of Ory Hydra's APIs. + +API version: +Contact: hi@ory.sh +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "encoding/json" +) + +// checks if the DeviceAuthorization type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &DeviceAuthorization{} + +// DeviceAuthorization # Ory's OAuth 2.0 Device Authorization API +type DeviceAuthorization struct { + // The device verification code. + DeviceCode *string `json:"device_code,omitempty"` + // The lifetime in seconds of the \"device_code\" and \"user_code\". + ExpiresIn *int64 `json:"expires_in,omitempty"` + // The minimum amount of time in seconds that the client SHOULD wait between polling requests to the token endpoint. If no value is provided, clients MUST use 5 as the default. + Interval *int64 `json:"interval,omitempty"` + // The end-user verification code. + UserCode *string `json:"user_code,omitempty"` + // The end-user verification URI on the authorization server. The URI should be short and easy to remember as end users will be asked to manually type it into their user agent. + VerificationUri *string `json:"verification_uri,omitempty"` + // A verification URI that includes the \"user_code\" (or other information with the same function as the \"user_code\"), which is designed for non-textual transmission. + VerificationUriComplete *string `json:"verification_uri_complete,omitempty"` +} + +// NewDeviceAuthorization instantiates a new DeviceAuthorization object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewDeviceAuthorization() *DeviceAuthorization { + this := DeviceAuthorization{} + return &this +} + +// NewDeviceAuthorizationWithDefaults instantiates a new DeviceAuthorization object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewDeviceAuthorizationWithDefaults() *DeviceAuthorization { + this := DeviceAuthorization{} + return &this +} + +// GetDeviceCode returns the DeviceCode field value if set, zero value otherwise. +func (o *DeviceAuthorization) GetDeviceCode() string { + if o == nil || IsNil(o.DeviceCode) { + var ret string + return ret + } + return *o.DeviceCode +} + +// GetDeviceCodeOk returns a tuple with the DeviceCode field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *DeviceAuthorization) GetDeviceCodeOk() (*string, bool) { + if o == nil || IsNil(o.DeviceCode) { + return nil, false + } + return o.DeviceCode, true +} + +// HasDeviceCode returns a boolean if a field has been set. +func (o *DeviceAuthorization) HasDeviceCode() bool { + if o != nil && !IsNil(o.DeviceCode) { + return true + } + + return false +} + +// SetDeviceCode gets a reference to the given string and assigns it to the DeviceCode field. +func (o *DeviceAuthorization) SetDeviceCode(v string) { + o.DeviceCode = &v +} + +// GetExpiresIn returns the ExpiresIn field value if set, zero value otherwise. +func (o *DeviceAuthorization) GetExpiresIn() int64 { + if o == nil || IsNil(o.ExpiresIn) { + var ret int64 + return ret + } + return *o.ExpiresIn +} + +// GetExpiresInOk returns a tuple with the ExpiresIn field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *DeviceAuthorization) GetExpiresInOk() (*int64, bool) { + if o == nil || IsNil(o.ExpiresIn) { + return nil, false + } + return o.ExpiresIn, true +} + +// HasExpiresIn returns a boolean if a field has been set. +func (o *DeviceAuthorization) HasExpiresIn() bool { + if o != nil && !IsNil(o.ExpiresIn) { + return true + } + + return false +} + +// SetExpiresIn gets a reference to the given int64 and assigns it to the ExpiresIn field. +func (o *DeviceAuthorization) SetExpiresIn(v int64) { + o.ExpiresIn = &v +} + +// GetInterval returns the Interval field value if set, zero value otherwise. +func (o *DeviceAuthorization) GetInterval() int64 { + if o == nil || IsNil(o.Interval) { + var ret int64 + return ret + } + return *o.Interval +} + +// GetIntervalOk returns a tuple with the Interval field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *DeviceAuthorization) GetIntervalOk() (*int64, bool) { + if o == nil || IsNil(o.Interval) { + return nil, false + } + return o.Interval, true +} + +// HasInterval returns a boolean if a field has been set. +func (o *DeviceAuthorization) HasInterval() bool { + if o != nil && !IsNil(o.Interval) { + return true + } + + return false +} + +// SetInterval gets a reference to the given int64 and assigns it to the Interval field. +func (o *DeviceAuthorization) SetInterval(v int64) { + o.Interval = &v +} + +// GetUserCode returns the UserCode field value if set, zero value otherwise. +func (o *DeviceAuthorization) GetUserCode() string { + if o == nil || IsNil(o.UserCode) { + var ret string + return ret + } + return *o.UserCode +} + +// GetUserCodeOk returns a tuple with the UserCode field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *DeviceAuthorization) GetUserCodeOk() (*string, bool) { + if o == nil || IsNil(o.UserCode) { + return nil, false + } + return o.UserCode, true +} + +// HasUserCode returns a boolean if a field has been set. +func (o *DeviceAuthorization) HasUserCode() bool { + if o != nil && !IsNil(o.UserCode) { + return true + } + + return false +} + +// SetUserCode gets a reference to the given string and assigns it to the UserCode field. +func (o *DeviceAuthorization) SetUserCode(v string) { + o.UserCode = &v +} + +// GetVerificationUri returns the VerificationUri field value if set, zero value otherwise. +func (o *DeviceAuthorization) GetVerificationUri() string { + if o == nil || IsNil(o.VerificationUri) { + var ret string + return ret + } + return *o.VerificationUri +} + +// GetVerificationUriOk returns a tuple with the VerificationUri field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *DeviceAuthorization) GetVerificationUriOk() (*string, bool) { + if o == nil || IsNil(o.VerificationUri) { + return nil, false + } + return o.VerificationUri, true +} + +// HasVerificationUri returns a boolean if a field has been set. +func (o *DeviceAuthorization) HasVerificationUri() bool { + if o != nil && !IsNil(o.VerificationUri) { + return true + } + + return false +} + +// SetVerificationUri gets a reference to the given string and assigns it to the VerificationUri field. +func (o *DeviceAuthorization) SetVerificationUri(v string) { + o.VerificationUri = &v +} + +// GetVerificationUriComplete returns the VerificationUriComplete field value if set, zero value otherwise. +func (o *DeviceAuthorization) GetVerificationUriComplete() string { + if o == nil || IsNil(o.VerificationUriComplete) { + var ret string + return ret + } + return *o.VerificationUriComplete +} + +// GetVerificationUriCompleteOk returns a tuple with the VerificationUriComplete field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *DeviceAuthorization) GetVerificationUriCompleteOk() (*string, bool) { + if o == nil || IsNil(o.VerificationUriComplete) { + return nil, false + } + return o.VerificationUriComplete, true +} + +// HasVerificationUriComplete returns a boolean if a field has been set. +func (o *DeviceAuthorization) HasVerificationUriComplete() bool { + if o != nil && !IsNil(o.VerificationUriComplete) { + return true + } + + return false +} + +// SetVerificationUriComplete gets a reference to the given string and assigns it to the VerificationUriComplete field. +func (o *DeviceAuthorization) SetVerificationUriComplete(v string) { + o.VerificationUriComplete = &v +} + +func (o DeviceAuthorization) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o DeviceAuthorization) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.DeviceCode) { + toSerialize["device_code"] = o.DeviceCode + } + if !IsNil(o.ExpiresIn) { + toSerialize["expires_in"] = o.ExpiresIn + } + if !IsNil(o.Interval) { + toSerialize["interval"] = o.Interval + } + if !IsNil(o.UserCode) { + toSerialize["user_code"] = o.UserCode + } + if !IsNil(o.VerificationUri) { + toSerialize["verification_uri"] = o.VerificationUri + } + if !IsNil(o.VerificationUriComplete) { + toSerialize["verification_uri_complete"] = o.VerificationUriComplete + } + return toSerialize, nil +} + +type NullableDeviceAuthorization struct { + value *DeviceAuthorization + isSet bool +} + +func (v NullableDeviceAuthorization) Get() *DeviceAuthorization { + return v.value +} + +func (v *NullableDeviceAuthorization) Set(val *DeviceAuthorization) { + v.value = val + v.isSet = true +} + +func (v NullableDeviceAuthorization) IsSet() bool { + return v.isSet +} + +func (v *NullableDeviceAuthorization) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableDeviceAuthorization(val *DeviceAuthorization) *NullableDeviceAuthorization { + return &NullableDeviceAuthorization{value: val, isSet: true} +} + +func (v NullableDeviceAuthorization) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableDeviceAuthorization) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/internal/httpclient/model_device_user_auth_request.go b/internal/httpclient/model_device_user_auth_request.go new file mode 100644 index 00000000000..a101144b4a1 --- /dev/null +++ b/internal/httpclient/model_device_user_auth_request.go @@ -0,0 +1,340 @@ +/* +Ory Hydra API + +Documentation for all of Ory Hydra's APIs. + +API version: +Contact: hi@ory.sh +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "bytes" + "encoding/json" + "fmt" + "time" +) + +// checks if the DeviceUserAuthRequest type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &DeviceUserAuthRequest{} + +// DeviceUserAuthRequest struct for DeviceUserAuthRequest +type DeviceUserAuthRequest struct { + // ID is the identifier (\"device challenge\") of the device grant request. It is used to identify the session. + Challenge string `json:"challenge"` + Client *OAuth2Client `json:"client,omitempty"` + HandledAt *time.Time `json:"handled_at,omitempty"` + // RequestURL is the original Device Authorization URL requested. + RequestUrl *string `json:"request_url,omitempty"` + RequestedAccessTokenAudience []string `json:"requested_access_token_audience,omitempty"` + RequestedScope []string `json:"requested_scope,omitempty"` +} + +type _DeviceUserAuthRequest DeviceUserAuthRequest + +// NewDeviceUserAuthRequest instantiates a new DeviceUserAuthRequest object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewDeviceUserAuthRequest(challenge string) *DeviceUserAuthRequest { + this := DeviceUserAuthRequest{} + this.Challenge = challenge + return &this +} + +// NewDeviceUserAuthRequestWithDefaults instantiates a new DeviceUserAuthRequest object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewDeviceUserAuthRequestWithDefaults() *DeviceUserAuthRequest { + this := DeviceUserAuthRequest{} + return &this +} + +// GetChallenge returns the Challenge field value +func (o *DeviceUserAuthRequest) GetChallenge() string { + if o == nil { + var ret string + return ret + } + + return o.Challenge +} + +// GetChallengeOk returns a tuple with the Challenge field value +// and a boolean to check if the value has been set. +func (o *DeviceUserAuthRequest) GetChallengeOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Challenge, true +} + +// SetChallenge sets field value +func (o *DeviceUserAuthRequest) SetChallenge(v string) { + o.Challenge = v +} + +// GetClient returns the Client field value if set, zero value otherwise. +func (o *DeviceUserAuthRequest) GetClient() OAuth2Client { + if o == nil || IsNil(o.Client) { + var ret OAuth2Client + return ret + } + return *o.Client +} + +// GetClientOk returns a tuple with the Client field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *DeviceUserAuthRequest) GetClientOk() (*OAuth2Client, bool) { + if o == nil || IsNil(o.Client) { + return nil, false + } + return o.Client, true +} + +// HasClient returns a boolean if a field has been set. +func (o *DeviceUserAuthRequest) HasClient() bool { + if o != nil && !IsNil(o.Client) { + return true + } + + return false +} + +// SetClient gets a reference to the given OAuth2Client and assigns it to the Client field. +func (o *DeviceUserAuthRequest) SetClient(v OAuth2Client) { + o.Client = &v +} + +// GetHandledAt returns the HandledAt field value if set, zero value otherwise. +func (o *DeviceUserAuthRequest) GetHandledAt() time.Time { + if o == nil || IsNil(o.HandledAt) { + var ret time.Time + return ret + } + return *o.HandledAt +} + +// GetHandledAtOk returns a tuple with the HandledAt field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *DeviceUserAuthRequest) GetHandledAtOk() (*time.Time, bool) { + if o == nil || IsNil(o.HandledAt) { + return nil, false + } + return o.HandledAt, true +} + +// HasHandledAt returns a boolean if a field has been set. +func (o *DeviceUserAuthRequest) HasHandledAt() bool { + if o != nil && !IsNil(o.HandledAt) { + return true + } + + return false +} + +// SetHandledAt gets a reference to the given time.Time and assigns it to the HandledAt field. +func (o *DeviceUserAuthRequest) SetHandledAt(v time.Time) { + o.HandledAt = &v +} + +// GetRequestUrl returns the RequestUrl field value if set, zero value otherwise. +func (o *DeviceUserAuthRequest) GetRequestUrl() string { + if o == nil || IsNil(o.RequestUrl) { + var ret string + return ret + } + return *o.RequestUrl +} + +// GetRequestUrlOk returns a tuple with the RequestUrl field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *DeviceUserAuthRequest) GetRequestUrlOk() (*string, bool) { + if o == nil || IsNil(o.RequestUrl) { + return nil, false + } + return o.RequestUrl, true +} + +// HasRequestUrl returns a boolean if a field has been set. +func (o *DeviceUserAuthRequest) HasRequestUrl() bool { + if o != nil && !IsNil(o.RequestUrl) { + return true + } + + return false +} + +// SetRequestUrl gets a reference to the given string and assigns it to the RequestUrl field. +func (o *DeviceUserAuthRequest) SetRequestUrl(v string) { + o.RequestUrl = &v +} + +// GetRequestedAccessTokenAudience returns the RequestedAccessTokenAudience field value if set, zero value otherwise. +func (o *DeviceUserAuthRequest) GetRequestedAccessTokenAudience() []string { + if o == nil || IsNil(o.RequestedAccessTokenAudience) { + var ret []string + return ret + } + return o.RequestedAccessTokenAudience +} + +// GetRequestedAccessTokenAudienceOk returns a tuple with the RequestedAccessTokenAudience field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *DeviceUserAuthRequest) GetRequestedAccessTokenAudienceOk() ([]string, bool) { + if o == nil || IsNil(o.RequestedAccessTokenAudience) { + return nil, false + } + return o.RequestedAccessTokenAudience, true +} + +// HasRequestedAccessTokenAudience returns a boolean if a field has been set. +func (o *DeviceUserAuthRequest) HasRequestedAccessTokenAudience() bool { + if o != nil && !IsNil(o.RequestedAccessTokenAudience) { + return true + } + + return false +} + +// SetRequestedAccessTokenAudience gets a reference to the given []string and assigns it to the RequestedAccessTokenAudience field. +func (o *DeviceUserAuthRequest) SetRequestedAccessTokenAudience(v []string) { + o.RequestedAccessTokenAudience = v +} + +// GetRequestedScope returns the RequestedScope field value if set, zero value otherwise. +func (o *DeviceUserAuthRequest) GetRequestedScope() []string { + if o == nil || IsNil(o.RequestedScope) { + var ret []string + return ret + } + return o.RequestedScope +} + +// GetRequestedScopeOk returns a tuple with the RequestedScope field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *DeviceUserAuthRequest) GetRequestedScopeOk() ([]string, bool) { + if o == nil || IsNil(o.RequestedScope) { + return nil, false + } + return o.RequestedScope, true +} + +// HasRequestedScope returns a boolean if a field has been set. +func (o *DeviceUserAuthRequest) HasRequestedScope() bool { + if o != nil && !IsNil(o.RequestedScope) { + return true + } + + return false +} + +// SetRequestedScope gets a reference to the given []string and assigns it to the RequestedScope field. +func (o *DeviceUserAuthRequest) SetRequestedScope(v []string) { + o.RequestedScope = v +} + +func (o DeviceUserAuthRequest) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o DeviceUserAuthRequest) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + toSerialize["challenge"] = o.Challenge + if !IsNil(o.Client) { + toSerialize["client"] = o.Client + } + if !IsNil(o.HandledAt) { + toSerialize["handled_at"] = o.HandledAt + } + if !IsNil(o.RequestUrl) { + toSerialize["request_url"] = o.RequestUrl + } + if !IsNil(o.RequestedAccessTokenAudience) { + toSerialize["requested_access_token_audience"] = o.RequestedAccessTokenAudience + } + if !IsNil(o.RequestedScope) { + toSerialize["requested_scope"] = o.RequestedScope + } + return toSerialize, nil +} + +func (o *DeviceUserAuthRequest) UnmarshalJSON(data []byte) (err error) { + // This validates that all required properties are included in the JSON object + // by unmarshalling the object into a generic map with string keys and checking + // that every required field exists as a key in the generic map. + requiredProperties := []string{ + "challenge", + } + + allProperties := make(map[string]interface{}) + + err = json.Unmarshal(data, &allProperties) + + if err != nil { + return err + } + + for _, requiredProperty := range requiredProperties { + if _, exists := allProperties[requiredProperty]; !exists { + return fmt.Errorf("no value given for required property %v", requiredProperty) + } + } + + varDeviceUserAuthRequest := _DeviceUserAuthRequest{} + + decoder := json.NewDecoder(bytes.NewReader(data)) + decoder.DisallowUnknownFields() + err = decoder.Decode(&varDeviceUserAuthRequest) + + if err != nil { + return err + } + + *o = DeviceUserAuthRequest(varDeviceUserAuthRequest) + + return err +} + +type NullableDeviceUserAuthRequest struct { + value *DeviceUserAuthRequest + isSet bool +} + +func (v NullableDeviceUserAuthRequest) Get() *DeviceUserAuthRequest { + return v.value +} + +func (v *NullableDeviceUserAuthRequest) Set(val *DeviceUserAuthRequest) { + v.value = val + v.isSet = true +} + +func (v NullableDeviceUserAuthRequest) IsSet() bool { + return v.isSet +} + +func (v *NullableDeviceUserAuthRequest) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableDeviceUserAuthRequest(val *DeviceUserAuthRequest) *NullableDeviceUserAuthRequest { + return &NullableDeviceUserAuthRequest{value: val, isSet: true} +} + +func (v NullableDeviceUserAuthRequest) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableDeviceUserAuthRequest) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/internal/httpclient/model_error_o_auth2.go b/internal/httpclient/model_error_o_auth2.go index 47b81b55881..f8be4fca180 100644 --- a/internal/httpclient/model_error_o_auth2.go +++ b/internal/httpclient/model_error_o_auth2.go @@ -15,6 +15,9 @@ import ( "encoding/json" ) +// checks if the ErrorOAuth2 type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &ErrorOAuth2{} + // ErrorOAuth2 Error type ErrorOAuth2 struct { // Error @@ -48,7 +51,7 @@ func NewErrorOAuth2WithDefaults() *ErrorOAuth2 { // GetError returns the Error field value if set, zero value otherwise. func (o *ErrorOAuth2) GetError() string { - if o == nil || o.Error == nil { + if o == nil || IsNil(o.Error) { var ret string return ret } @@ -58,7 +61,7 @@ func (o *ErrorOAuth2) GetError() string { // GetErrorOk returns a tuple with the Error field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *ErrorOAuth2) GetErrorOk() (*string, bool) { - if o == nil || o.Error == nil { + if o == nil || IsNil(o.Error) { return nil, false } return o.Error, true @@ -66,7 +69,7 @@ func (o *ErrorOAuth2) GetErrorOk() (*string, bool) { // HasError returns a boolean if a field has been set. func (o *ErrorOAuth2) HasError() bool { - if o != nil && o.Error != nil { + if o != nil && !IsNil(o.Error) { return true } @@ -80,7 +83,7 @@ func (o *ErrorOAuth2) SetError(v string) { // GetErrorDebug returns the ErrorDebug field value if set, zero value otherwise. func (o *ErrorOAuth2) GetErrorDebug() string { - if o == nil || o.ErrorDebug == nil { + if o == nil || IsNil(o.ErrorDebug) { var ret string return ret } @@ -90,7 +93,7 @@ func (o *ErrorOAuth2) GetErrorDebug() string { // GetErrorDebugOk returns a tuple with the ErrorDebug field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *ErrorOAuth2) GetErrorDebugOk() (*string, bool) { - if o == nil || o.ErrorDebug == nil { + if o == nil || IsNil(o.ErrorDebug) { return nil, false } return o.ErrorDebug, true @@ -98,7 +101,7 @@ func (o *ErrorOAuth2) GetErrorDebugOk() (*string, bool) { // HasErrorDebug returns a boolean if a field has been set. func (o *ErrorOAuth2) HasErrorDebug() bool { - if o != nil && o.ErrorDebug != nil { + if o != nil && !IsNil(o.ErrorDebug) { return true } @@ -112,7 +115,7 @@ func (o *ErrorOAuth2) SetErrorDebug(v string) { // GetErrorDescription returns the ErrorDescription field value if set, zero value otherwise. func (o *ErrorOAuth2) GetErrorDescription() string { - if o == nil || o.ErrorDescription == nil { + if o == nil || IsNil(o.ErrorDescription) { var ret string return ret } @@ -122,7 +125,7 @@ func (o *ErrorOAuth2) GetErrorDescription() string { // GetErrorDescriptionOk returns a tuple with the ErrorDescription field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *ErrorOAuth2) GetErrorDescriptionOk() (*string, bool) { - if o == nil || o.ErrorDescription == nil { + if o == nil || IsNil(o.ErrorDescription) { return nil, false } return o.ErrorDescription, true @@ -130,7 +133,7 @@ func (o *ErrorOAuth2) GetErrorDescriptionOk() (*string, bool) { // HasErrorDescription returns a boolean if a field has been set. func (o *ErrorOAuth2) HasErrorDescription() bool { - if o != nil && o.ErrorDescription != nil { + if o != nil && !IsNil(o.ErrorDescription) { return true } @@ -144,7 +147,7 @@ func (o *ErrorOAuth2) SetErrorDescription(v string) { // GetErrorHint returns the ErrorHint field value if set, zero value otherwise. func (o *ErrorOAuth2) GetErrorHint() string { - if o == nil || o.ErrorHint == nil { + if o == nil || IsNil(o.ErrorHint) { var ret string return ret } @@ -154,7 +157,7 @@ func (o *ErrorOAuth2) GetErrorHint() string { // GetErrorHintOk returns a tuple with the ErrorHint field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *ErrorOAuth2) GetErrorHintOk() (*string, bool) { - if o == nil || o.ErrorHint == nil { + if o == nil || IsNil(o.ErrorHint) { return nil, false } return o.ErrorHint, true @@ -162,7 +165,7 @@ func (o *ErrorOAuth2) GetErrorHintOk() (*string, bool) { // HasErrorHint returns a boolean if a field has been set. func (o *ErrorOAuth2) HasErrorHint() bool { - if o != nil && o.ErrorHint != nil { + if o != nil && !IsNil(o.ErrorHint) { return true } @@ -176,7 +179,7 @@ func (o *ErrorOAuth2) SetErrorHint(v string) { // GetStatusCode returns the StatusCode field value if set, zero value otherwise. func (o *ErrorOAuth2) GetStatusCode() int64 { - if o == nil || o.StatusCode == nil { + if o == nil || IsNil(o.StatusCode) { var ret int64 return ret } @@ -186,7 +189,7 @@ func (o *ErrorOAuth2) GetStatusCode() int64 { // GetStatusCodeOk returns a tuple with the StatusCode field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *ErrorOAuth2) GetStatusCodeOk() (*int64, bool) { - if o == nil || o.StatusCode == nil { + if o == nil || IsNil(o.StatusCode) { return nil, false } return o.StatusCode, true @@ -194,7 +197,7 @@ func (o *ErrorOAuth2) GetStatusCodeOk() (*int64, bool) { // HasStatusCode returns a boolean if a field has been set. func (o *ErrorOAuth2) HasStatusCode() bool { - if o != nil && o.StatusCode != nil { + if o != nil && !IsNil(o.StatusCode) { return true } @@ -207,23 +210,31 @@ func (o *ErrorOAuth2) SetStatusCode(v int64) { } func (o ErrorOAuth2) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o ErrorOAuth2) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.Error != nil { + if !IsNil(o.Error) { toSerialize["error"] = o.Error } - if o.ErrorDebug != nil { + if !IsNil(o.ErrorDebug) { toSerialize["error_debug"] = o.ErrorDebug } - if o.ErrorDescription != nil { + if !IsNil(o.ErrorDescription) { toSerialize["error_description"] = o.ErrorDescription } - if o.ErrorHint != nil { + if !IsNil(o.ErrorHint) { toSerialize["error_hint"] = o.ErrorHint } - if o.StatusCode != nil { + if !IsNil(o.StatusCode) { toSerialize["status_code"] = o.StatusCode } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableErrorOAuth2 struct { diff --git a/internal/httpclient/model_generic_error.go b/internal/httpclient/model_generic_error.go index ad78dc23583..3cdb2816cbb 100644 --- a/internal/httpclient/model_generic_error.go +++ b/internal/httpclient/model_generic_error.go @@ -12,9 +12,14 @@ Contact: hi@ory.sh package openapi import ( + "bytes" "encoding/json" + "fmt" ) +// checks if the GenericError type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &GenericError{} + // GenericError struct for GenericError type GenericError struct { // The status code @@ -35,6 +40,8 @@ type GenericError struct { Status *string `json:"status,omitempty"` } +type _GenericError GenericError + // NewGenericError instantiates a new GenericError object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments @@ -55,7 +62,7 @@ func NewGenericErrorWithDefaults() *GenericError { // GetCode returns the Code field value if set, zero value otherwise. func (o *GenericError) GetCode() int64 { - if o == nil || o.Code == nil { + if o == nil || IsNil(o.Code) { var ret int64 return ret } @@ -65,7 +72,7 @@ func (o *GenericError) GetCode() int64 { // GetCodeOk returns a tuple with the Code field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *GenericError) GetCodeOk() (*int64, bool) { - if o == nil || o.Code == nil { + if o == nil || IsNil(o.Code) { return nil, false } return o.Code, true @@ -73,7 +80,7 @@ func (o *GenericError) GetCodeOk() (*int64, bool) { // HasCode returns a boolean if a field has been set. func (o *GenericError) HasCode() bool { - if o != nil && o.Code != nil { + if o != nil && !IsNil(o.Code) { return true } @@ -87,7 +94,7 @@ func (o *GenericError) SetCode(v int64) { // GetDebug returns the Debug field value if set, zero value otherwise. func (o *GenericError) GetDebug() string { - if o == nil || o.Debug == nil { + if o == nil || IsNil(o.Debug) { var ret string return ret } @@ -97,7 +104,7 @@ func (o *GenericError) GetDebug() string { // GetDebugOk returns a tuple with the Debug field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *GenericError) GetDebugOk() (*string, bool) { - if o == nil || o.Debug == nil { + if o == nil || IsNil(o.Debug) { return nil, false } return o.Debug, true @@ -105,7 +112,7 @@ func (o *GenericError) GetDebugOk() (*string, bool) { // HasDebug returns a boolean if a field has been set. func (o *GenericError) HasDebug() bool { - if o != nil && o.Debug != nil { + if o != nil && !IsNil(o.Debug) { return true } @@ -130,7 +137,7 @@ func (o *GenericError) GetDetails() interface{} { // and a boolean to check if the value has been set. // NOTE: If the value is an explicit nil, `nil, true` will be returned func (o *GenericError) GetDetailsOk() (*interface{}, bool) { - if o == nil || o.Details == nil { + if o == nil || IsNil(o.Details) { return nil, false } return &o.Details, true @@ -138,7 +145,7 @@ func (o *GenericError) GetDetailsOk() (*interface{}, bool) { // HasDetails returns a boolean if a field has been set. func (o *GenericError) HasDetails() bool { - if o != nil && o.Details != nil { + if o != nil && !IsNil(o.Details) { return true } @@ -152,7 +159,7 @@ func (o *GenericError) SetDetails(v interface{}) { // GetId returns the Id field value if set, zero value otherwise. func (o *GenericError) GetId() string { - if o == nil || o.Id == nil { + if o == nil || IsNil(o.Id) { var ret string return ret } @@ -162,7 +169,7 @@ func (o *GenericError) GetId() string { // GetIdOk returns a tuple with the Id field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *GenericError) GetIdOk() (*string, bool) { - if o == nil || o.Id == nil { + if o == nil || IsNil(o.Id) { return nil, false } return o.Id, true @@ -170,7 +177,7 @@ func (o *GenericError) GetIdOk() (*string, bool) { // HasId returns a boolean if a field has been set. func (o *GenericError) HasId() bool { - if o != nil && o.Id != nil { + if o != nil && !IsNil(o.Id) { return true } @@ -208,7 +215,7 @@ func (o *GenericError) SetMessage(v string) { // GetReason returns the Reason field value if set, zero value otherwise. func (o *GenericError) GetReason() string { - if o == nil || o.Reason == nil { + if o == nil || IsNil(o.Reason) { var ret string return ret } @@ -218,7 +225,7 @@ func (o *GenericError) GetReason() string { // GetReasonOk returns a tuple with the Reason field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *GenericError) GetReasonOk() (*string, bool) { - if o == nil || o.Reason == nil { + if o == nil || IsNil(o.Reason) { return nil, false } return o.Reason, true @@ -226,7 +233,7 @@ func (o *GenericError) GetReasonOk() (*string, bool) { // HasReason returns a boolean if a field has been set. func (o *GenericError) HasReason() bool { - if o != nil && o.Reason != nil { + if o != nil && !IsNil(o.Reason) { return true } @@ -240,7 +247,7 @@ func (o *GenericError) SetReason(v string) { // GetRequest returns the Request field value if set, zero value otherwise. func (o *GenericError) GetRequest() string { - if o == nil || o.Request == nil { + if o == nil || IsNil(o.Request) { var ret string return ret } @@ -250,7 +257,7 @@ func (o *GenericError) GetRequest() string { // GetRequestOk returns a tuple with the Request field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *GenericError) GetRequestOk() (*string, bool) { - if o == nil || o.Request == nil { + if o == nil || IsNil(o.Request) { return nil, false } return o.Request, true @@ -258,7 +265,7 @@ func (o *GenericError) GetRequestOk() (*string, bool) { // HasRequest returns a boolean if a field has been set. func (o *GenericError) HasRequest() bool { - if o != nil && o.Request != nil { + if o != nil && !IsNil(o.Request) { return true } @@ -272,7 +279,7 @@ func (o *GenericError) SetRequest(v string) { // GetStatus returns the Status field value if set, zero value otherwise. func (o *GenericError) GetStatus() string { - if o == nil || o.Status == nil { + if o == nil || IsNil(o.Status) { var ret string return ret } @@ -282,7 +289,7 @@ func (o *GenericError) GetStatus() string { // GetStatusOk returns a tuple with the Status field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *GenericError) GetStatusOk() (*string, bool) { - if o == nil || o.Status == nil { + if o == nil || IsNil(o.Status) { return nil, false } return o.Status, true @@ -290,7 +297,7 @@ func (o *GenericError) GetStatusOk() (*string, bool) { // HasStatus returns a boolean if a field has been set. func (o *GenericError) HasStatus() bool { - if o != nil && o.Status != nil { + if o != nil && !IsNil(o.Status) { return true } @@ -303,32 +310,75 @@ func (o *GenericError) SetStatus(v string) { } func (o GenericError) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o GenericError) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.Code != nil { + if !IsNil(o.Code) { toSerialize["code"] = o.Code } - if o.Debug != nil { + if !IsNil(o.Debug) { toSerialize["debug"] = o.Debug } if o.Details != nil { toSerialize["details"] = o.Details } - if o.Id != nil { + if !IsNil(o.Id) { toSerialize["id"] = o.Id } - if true { - toSerialize["message"] = o.Message - } - if o.Reason != nil { + toSerialize["message"] = o.Message + if !IsNil(o.Reason) { toSerialize["reason"] = o.Reason } - if o.Request != nil { + if !IsNil(o.Request) { toSerialize["request"] = o.Request } - if o.Status != nil { + if !IsNil(o.Status) { toSerialize["status"] = o.Status } - return json.Marshal(toSerialize) + return toSerialize, nil +} + +func (o *GenericError) UnmarshalJSON(data []byte) (err error) { + // This validates that all required properties are included in the JSON object + // by unmarshalling the object into a generic map with string keys and checking + // that every required field exists as a key in the generic map. + requiredProperties := []string{ + "message", + } + + allProperties := make(map[string]interface{}) + + err = json.Unmarshal(data, &allProperties) + + if err != nil { + return err + } + + for _, requiredProperty := range requiredProperties { + if _, exists := allProperties[requiredProperty]; !exists { + return fmt.Errorf("no value given for required property %v", requiredProperty) + } + } + + varGenericError := _GenericError{} + + decoder := json.NewDecoder(bytes.NewReader(data)) + decoder.DisallowUnknownFields() + err = decoder.Decode(&varGenericError) + + if err != nil { + return err + } + + *o = GenericError(varGenericError) + + return err } type NullableGenericError struct { diff --git a/internal/httpclient/model_get_version_200_response.go b/internal/httpclient/model_get_version_200_response.go index d53f4a72dcb..d4b885e3011 100644 --- a/internal/httpclient/model_get_version_200_response.go +++ b/internal/httpclient/model_get_version_200_response.go @@ -15,6 +15,9 @@ import ( "encoding/json" ) +// checks if the GetVersion200Response type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &GetVersion200Response{} + // GetVersion200Response struct for GetVersion200Response type GetVersion200Response struct { // The version of Ory Hydra. @@ -40,7 +43,7 @@ func NewGetVersion200ResponseWithDefaults() *GetVersion200Response { // GetVersion returns the Version field value if set, zero value otherwise. func (o *GetVersion200Response) GetVersion() string { - if o == nil || o.Version == nil { + if o == nil || IsNil(o.Version) { var ret string return ret } @@ -50,7 +53,7 @@ func (o *GetVersion200Response) GetVersion() string { // GetVersionOk returns a tuple with the Version field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *GetVersion200Response) GetVersionOk() (*string, bool) { - if o == nil || o.Version == nil { + if o == nil || IsNil(o.Version) { return nil, false } return o.Version, true @@ -58,7 +61,7 @@ func (o *GetVersion200Response) GetVersionOk() (*string, bool) { // HasVersion returns a boolean if a field has been set. func (o *GetVersion200Response) HasVersion() bool { - if o != nil && o.Version != nil { + if o != nil && !IsNil(o.Version) { return true } @@ -71,11 +74,19 @@ func (o *GetVersion200Response) SetVersion(v string) { } func (o GetVersion200Response) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o GetVersion200Response) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.Version != nil { + if !IsNil(o.Version) { toSerialize["version"] = o.Version } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableGetVersion200Response struct { diff --git a/internal/httpclient/model_health_not_ready_status.go b/internal/httpclient/model_health_not_ready_status.go index 97ac1e0b8fc..1fcf0b32667 100644 --- a/internal/httpclient/model_health_not_ready_status.go +++ b/internal/httpclient/model_health_not_ready_status.go @@ -15,6 +15,9 @@ import ( "encoding/json" ) +// checks if the HealthNotReadyStatus type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &HealthNotReadyStatus{} + // HealthNotReadyStatus struct for HealthNotReadyStatus type HealthNotReadyStatus struct { // Errors contains a list of errors that caused the not ready status. @@ -40,7 +43,7 @@ func NewHealthNotReadyStatusWithDefaults() *HealthNotReadyStatus { // GetErrors returns the Errors field value if set, zero value otherwise. func (o *HealthNotReadyStatus) GetErrors() map[string]string { - if o == nil || o.Errors == nil { + if o == nil || IsNil(o.Errors) { var ret map[string]string return ret } @@ -50,7 +53,7 @@ func (o *HealthNotReadyStatus) GetErrors() map[string]string { // GetErrorsOk returns a tuple with the Errors field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *HealthNotReadyStatus) GetErrorsOk() (*map[string]string, bool) { - if o == nil || o.Errors == nil { + if o == nil || IsNil(o.Errors) { return nil, false } return o.Errors, true @@ -58,7 +61,7 @@ func (o *HealthNotReadyStatus) GetErrorsOk() (*map[string]string, bool) { // HasErrors returns a boolean if a field has been set. func (o *HealthNotReadyStatus) HasErrors() bool { - if o != nil && o.Errors != nil { + if o != nil && !IsNil(o.Errors) { return true } @@ -71,11 +74,19 @@ func (o *HealthNotReadyStatus) SetErrors(v map[string]string) { } func (o HealthNotReadyStatus) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o HealthNotReadyStatus) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.Errors != nil { + if !IsNil(o.Errors) { toSerialize["errors"] = o.Errors } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableHealthNotReadyStatus struct { diff --git a/internal/httpclient/model_health_status.go b/internal/httpclient/model_health_status.go index 193dc526174..20d626d914a 100644 --- a/internal/httpclient/model_health_status.go +++ b/internal/httpclient/model_health_status.go @@ -15,6 +15,9 @@ import ( "encoding/json" ) +// checks if the HealthStatus type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &HealthStatus{} + // HealthStatus struct for HealthStatus type HealthStatus struct { // Status always contains \"ok\". @@ -40,7 +43,7 @@ func NewHealthStatusWithDefaults() *HealthStatus { // GetStatus returns the Status field value if set, zero value otherwise. func (o *HealthStatus) GetStatus() string { - if o == nil || o.Status == nil { + if o == nil || IsNil(o.Status) { var ret string return ret } @@ -50,7 +53,7 @@ func (o *HealthStatus) GetStatus() string { // GetStatusOk returns a tuple with the Status field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *HealthStatus) GetStatusOk() (*string, bool) { - if o == nil || o.Status == nil { + if o == nil || IsNil(o.Status) { return nil, false } return o.Status, true @@ -58,7 +61,7 @@ func (o *HealthStatus) GetStatusOk() (*string, bool) { // HasStatus returns a boolean if a field has been set. func (o *HealthStatus) HasStatus() bool { - if o != nil && o.Status != nil { + if o != nil && !IsNil(o.Status) { return true } @@ -71,11 +74,19 @@ func (o *HealthStatus) SetStatus(v string) { } func (o HealthStatus) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o HealthStatus) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.Status != nil { + if !IsNil(o.Status) { toSerialize["status"] = o.Status } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableHealthStatus struct { diff --git a/internal/httpclient/model_introspected_o_auth2_token.go b/internal/httpclient/model_introspected_o_auth2_token.go index a7d55aff1c0..9b96258a033 100644 --- a/internal/httpclient/model_introspected_o_auth2_token.go +++ b/internal/httpclient/model_introspected_o_auth2_token.go @@ -12,16 +12,21 @@ Contact: hi@ory.sh package openapi import ( + "bytes" "encoding/json" + "fmt" ) +// checks if the IntrospectedOAuth2Token type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &IntrospectedOAuth2Token{} + // IntrospectedOAuth2Token Introspection contains an access token's session data as specified by [IETF RFC 7662](https://tools.ietf.org/html/rfc7662) type IntrospectedOAuth2Token struct { // Active is a boolean indicator of whether or not the presented token is currently active. The specifics of a token's \"active\" state will vary depending on the implementation of the authorization server and the information it keeps about its tokens, but a \"true\" value return for the \"active\" property will generally indicate that a given token has been issued by this authorization server, has not been revoked by the resource owner, and is within its given time window of validity (e.g., after its issuance time and before its expiration time). Active bool `json:"active"` // Audience contains a list of the token's intended audiences. Aud []string `json:"aud,omitempty"` - // ID is aclient identifier for the OAuth 2.0 client that requested this token. + // ID is a client identifier for the OAuth 2.0 client that requested this token. ClientId *string `json:"client_id,omitempty"` // Expires at is an integer timestamp, measured in the number of seconds since January 1 1970 UTC, indicating when this token will expire. Exp *int64 `json:"exp,omitempty"` @@ -47,6 +52,8 @@ type IntrospectedOAuth2Token struct { Username *string `json:"username,omitempty"` } +type _IntrospectedOAuth2Token IntrospectedOAuth2Token + // NewIntrospectedOAuth2Token instantiates a new IntrospectedOAuth2Token object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments @@ -91,7 +98,7 @@ func (o *IntrospectedOAuth2Token) SetActive(v bool) { // GetAud returns the Aud field value if set, zero value otherwise. func (o *IntrospectedOAuth2Token) GetAud() []string { - if o == nil || o.Aud == nil { + if o == nil || IsNil(o.Aud) { var ret []string return ret } @@ -101,7 +108,7 @@ func (o *IntrospectedOAuth2Token) GetAud() []string { // GetAudOk returns a tuple with the Aud field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *IntrospectedOAuth2Token) GetAudOk() ([]string, bool) { - if o == nil || o.Aud == nil { + if o == nil || IsNil(o.Aud) { return nil, false } return o.Aud, true @@ -109,7 +116,7 @@ func (o *IntrospectedOAuth2Token) GetAudOk() ([]string, bool) { // HasAud returns a boolean if a field has been set. func (o *IntrospectedOAuth2Token) HasAud() bool { - if o != nil && o.Aud != nil { + if o != nil && !IsNil(o.Aud) { return true } @@ -123,7 +130,7 @@ func (o *IntrospectedOAuth2Token) SetAud(v []string) { // GetClientId returns the ClientId field value if set, zero value otherwise. func (o *IntrospectedOAuth2Token) GetClientId() string { - if o == nil || o.ClientId == nil { + if o == nil || IsNil(o.ClientId) { var ret string return ret } @@ -133,7 +140,7 @@ func (o *IntrospectedOAuth2Token) GetClientId() string { // GetClientIdOk returns a tuple with the ClientId field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *IntrospectedOAuth2Token) GetClientIdOk() (*string, bool) { - if o == nil || o.ClientId == nil { + if o == nil || IsNil(o.ClientId) { return nil, false } return o.ClientId, true @@ -141,7 +148,7 @@ func (o *IntrospectedOAuth2Token) GetClientIdOk() (*string, bool) { // HasClientId returns a boolean if a field has been set. func (o *IntrospectedOAuth2Token) HasClientId() bool { - if o != nil && o.ClientId != nil { + if o != nil && !IsNil(o.ClientId) { return true } @@ -155,7 +162,7 @@ func (o *IntrospectedOAuth2Token) SetClientId(v string) { // GetExp returns the Exp field value if set, zero value otherwise. func (o *IntrospectedOAuth2Token) GetExp() int64 { - if o == nil || o.Exp == nil { + if o == nil || IsNil(o.Exp) { var ret int64 return ret } @@ -165,7 +172,7 @@ func (o *IntrospectedOAuth2Token) GetExp() int64 { // GetExpOk returns a tuple with the Exp field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *IntrospectedOAuth2Token) GetExpOk() (*int64, bool) { - if o == nil || o.Exp == nil { + if o == nil || IsNil(o.Exp) { return nil, false } return o.Exp, true @@ -173,7 +180,7 @@ func (o *IntrospectedOAuth2Token) GetExpOk() (*int64, bool) { // HasExp returns a boolean if a field has been set. func (o *IntrospectedOAuth2Token) HasExp() bool { - if o != nil && o.Exp != nil { + if o != nil && !IsNil(o.Exp) { return true } @@ -187,7 +194,7 @@ func (o *IntrospectedOAuth2Token) SetExp(v int64) { // GetExt returns the Ext field value if set, zero value otherwise. func (o *IntrospectedOAuth2Token) GetExt() map[string]interface{} { - if o == nil || o.Ext == nil { + if o == nil || IsNil(o.Ext) { var ret map[string]interface{} return ret } @@ -197,15 +204,15 @@ func (o *IntrospectedOAuth2Token) GetExt() map[string]interface{} { // GetExtOk returns a tuple with the Ext field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *IntrospectedOAuth2Token) GetExtOk() (map[string]interface{}, bool) { - if o == nil || o.Ext == nil { - return nil, false + if o == nil || IsNil(o.Ext) { + return map[string]interface{}{}, false } return o.Ext, true } // HasExt returns a boolean if a field has been set. func (o *IntrospectedOAuth2Token) HasExt() bool { - if o != nil && o.Ext != nil { + if o != nil && !IsNil(o.Ext) { return true } @@ -219,7 +226,7 @@ func (o *IntrospectedOAuth2Token) SetExt(v map[string]interface{}) { // GetIat returns the Iat field value if set, zero value otherwise. func (o *IntrospectedOAuth2Token) GetIat() int64 { - if o == nil || o.Iat == nil { + if o == nil || IsNil(o.Iat) { var ret int64 return ret } @@ -229,7 +236,7 @@ func (o *IntrospectedOAuth2Token) GetIat() int64 { // GetIatOk returns a tuple with the Iat field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *IntrospectedOAuth2Token) GetIatOk() (*int64, bool) { - if o == nil || o.Iat == nil { + if o == nil || IsNil(o.Iat) { return nil, false } return o.Iat, true @@ -237,7 +244,7 @@ func (o *IntrospectedOAuth2Token) GetIatOk() (*int64, bool) { // HasIat returns a boolean if a field has been set. func (o *IntrospectedOAuth2Token) HasIat() bool { - if o != nil && o.Iat != nil { + if o != nil && !IsNil(o.Iat) { return true } @@ -251,7 +258,7 @@ func (o *IntrospectedOAuth2Token) SetIat(v int64) { // GetIss returns the Iss field value if set, zero value otherwise. func (o *IntrospectedOAuth2Token) GetIss() string { - if o == nil || o.Iss == nil { + if o == nil || IsNil(o.Iss) { var ret string return ret } @@ -261,7 +268,7 @@ func (o *IntrospectedOAuth2Token) GetIss() string { // GetIssOk returns a tuple with the Iss field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *IntrospectedOAuth2Token) GetIssOk() (*string, bool) { - if o == nil || o.Iss == nil { + if o == nil || IsNil(o.Iss) { return nil, false } return o.Iss, true @@ -269,7 +276,7 @@ func (o *IntrospectedOAuth2Token) GetIssOk() (*string, bool) { // HasIss returns a boolean if a field has been set. func (o *IntrospectedOAuth2Token) HasIss() bool { - if o != nil && o.Iss != nil { + if o != nil && !IsNil(o.Iss) { return true } @@ -283,7 +290,7 @@ func (o *IntrospectedOAuth2Token) SetIss(v string) { // GetNbf returns the Nbf field value if set, zero value otherwise. func (o *IntrospectedOAuth2Token) GetNbf() int64 { - if o == nil || o.Nbf == nil { + if o == nil || IsNil(o.Nbf) { var ret int64 return ret } @@ -293,7 +300,7 @@ func (o *IntrospectedOAuth2Token) GetNbf() int64 { // GetNbfOk returns a tuple with the Nbf field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *IntrospectedOAuth2Token) GetNbfOk() (*int64, bool) { - if o == nil || o.Nbf == nil { + if o == nil || IsNil(o.Nbf) { return nil, false } return o.Nbf, true @@ -301,7 +308,7 @@ func (o *IntrospectedOAuth2Token) GetNbfOk() (*int64, bool) { // HasNbf returns a boolean if a field has been set. func (o *IntrospectedOAuth2Token) HasNbf() bool { - if o != nil && o.Nbf != nil { + if o != nil && !IsNil(o.Nbf) { return true } @@ -315,7 +322,7 @@ func (o *IntrospectedOAuth2Token) SetNbf(v int64) { // GetObfuscatedSubject returns the ObfuscatedSubject field value if set, zero value otherwise. func (o *IntrospectedOAuth2Token) GetObfuscatedSubject() string { - if o == nil || o.ObfuscatedSubject == nil { + if o == nil || IsNil(o.ObfuscatedSubject) { var ret string return ret } @@ -325,7 +332,7 @@ func (o *IntrospectedOAuth2Token) GetObfuscatedSubject() string { // GetObfuscatedSubjectOk returns a tuple with the ObfuscatedSubject field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *IntrospectedOAuth2Token) GetObfuscatedSubjectOk() (*string, bool) { - if o == nil || o.ObfuscatedSubject == nil { + if o == nil || IsNil(o.ObfuscatedSubject) { return nil, false } return o.ObfuscatedSubject, true @@ -333,7 +340,7 @@ func (o *IntrospectedOAuth2Token) GetObfuscatedSubjectOk() (*string, bool) { // HasObfuscatedSubject returns a boolean if a field has been set. func (o *IntrospectedOAuth2Token) HasObfuscatedSubject() bool { - if o != nil && o.ObfuscatedSubject != nil { + if o != nil && !IsNil(o.ObfuscatedSubject) { return true } @@ -347,7 +354,7 @@ func (o *IntrospectedOAuth2Token) SetObfuscatedSubject(v string) { // GetScope returns the Scope field value if set, zero value otherwise. func (o *IntrospectedOAuth2Token) GetScope() string { - if o == nil || o.Scope == nil { + if o == nil || IsNil(o.Scope) { var ret string return ret } @@ -357,7 +364,7 @@ func (o *IntrospectedOAuth2Token) GetScope() string { // GetScopeOk returns a tuple with the Scope field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *IntrospectedOAuth2Token) GetScopeOk() (*string, bool) { - if o == nil || o.Scope == nil { + if o == nil || IsNil(o.Scope) { return nil, false } return o.Scope, true @@ -365,7 +372,7 @@ func (o *IntrospectedOAuth2Token) GetScopeOk() (*string, bool) { // HasScope returns a boolean if a field has been set. func (o *IntrospectedOAuth2Token) HasScope() bool { - if o != nil && o.Scope != nil { + if o != nil && !IsNil(o.Scope) { return true } @@ -379,7 +386,7 @@ func (o *IntrospectedOAuth2Token) SetScope(v string) { // GetSub returns the Sub field value if set, zero value otherwise. func (o *IntrospectedOAuth2Token) GetSub() string { - if o == nil || o.Sub == nil { + if o == nil || IsNil(o.Sub) { var ret string return ret } @@ -389,7 +396,7 @@ func (o *IntrospectedOAuth2Token) GetSub() string { // GetSubOk returns a tuple with the Sub field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *IntrospectedOAuth2Token) GetSubOk() (*string, bool) { - if o == nil || o.Sub == nil { + if o == nil || IsNil(o.Sub) { return nil, false } return o.Sub, true @@ -397,7 +404,7 @@ func (o *IntrospectedOAuth2Token) GetSubOk() (*string, bool) { // HasSub returns a boolean if a field has been set. func (o *IntrospectedOAuth2Token) HasSub() bool { - if o != nil && o.Sub != nil { + if o != nil && !IsNil(o.Sub) { return true } @@ -411,7 +418,7 @@ func (o *IntrospectedOAuth2Token) SetSub(v string) { // GetTokenType returns the TokenType field value if set, zero value otherwise. func (o *IntrospectedOAuth2Token) GetTokenType() string { - if o == nil || o.TokenType == nil { + if o == nil || IsNil(o.TokenType) { var ret string return ret } @@ -421,7 +428,7 @@ func (o *IntrospectedOAuth2Token) GetTokenType() string { // GetTokenTypeOk returns a tuple with the TokenType field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *IntrospectedOAuth2Token) GetTokenTypeOk() (*string, bool) { - if o == nil || o.TokenType == nil { + if o == nil || IsNil(o.TokenType) { return nil, false } return o.TokenType, true @@ -429,7 +436,7 @@ func (o *IntrospectedOAuth2Token) GetTokenTypeOk() (*string, bool) { // HasTokenType returns a boolean if a field has been set. func (o *IntrospectedOAuth2Token) HasTokenType() bool { - if o != nil && o.TokenType != nil { + if o != nil && !IsNil(o.TokenType) { return true } @@ -443,7 +450,7 @@ func (o *IntrospectedOAuth2Token) SetTokenType(v string) { // GetTokenUse returns the TokenUse field value if set, zero value otherwise. func (o *IntrospectedOAuth2Token) GetTokenUse() string { - if o == nil || o.TokenUse == nil { + if o == nil || IsNil(o.TokenUse) { var ret string return ret } @@ -453,7 +460,7 @@ func (o *IntrospectedOAuth2Token) GetTokenUse() string { // GetTokenUseOk returns a tuple with the TokenUse field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *IntrospectedOAuth2Token) GetTokenUseOk() (*string, bool) { - if o == nil || o.TokenUse == nil { + if o == nil || IsNil(o.TokenUse) { return nil, false } return o.TokenUse, true @@ -461,7 +468,7 @@ func (o *IntrospectedOAuth2Token) GetTokenUseOk() (*string, bool) { // HasTokenUse returns a boolean if a field has been set. func (o *IntrospectedOAuth2Token) HasTokenUse() bool { - if o != nil && o.TokenUse != nil { + if o != nil && !IsNil(o.TokenUse) { return true } @@ -475,7 +482,7 @@ func (o *IntrospectedOAuth2Token) SetTokenUse(v string) { // GetUsername returns the Username field value if set, zero value otherwise. func (o *IntrospectedOAuth2Token) GetUsername() string { - if o == nil || o.Username == nil { + if o == nil || IsNil(o.Username) { var ret string return ret } @@ -485,7 +492,7 @@ func (o *IntrospectedOAuth2Token) GetUsername() string { // GetUsernameOk returns a tuple with the Username field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *IntrospectedOAuth2Token) GetUsernameOk() (*string, bool) { - if o == nil || o.Username == nil { + if o == nil || IsNil(o.Username) { return nil, false } return o.Username, true @@ -493,7 +500,7 @@ func (o *IntrospectedOAuth2Token) GetUsernameOk() (*string, bool) { // HasUsername returns a boolean if a field has been set. func (o *IntrospectedOAuth2Token) HasUsername() bool { - if o != nil && o.Username != nil { + if o != nil && !IsNil(o.Username) { return true } @@ -506,50 +513,93 @@ func (o *IntrospectedOAuth2Token) SetUsername(v string) { } func (o IntrospectedOAuth2Token) MarshalJSON() ([]byte, error) { - toSerialize := map[string]interface{}{} - if true { - toSerialize["active"] = o.Active + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err } - if o.Aud != nil { + return json.Marshal(toSerialize) +} + +func (o IntrospectedOAuth2Token) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + toSerialize["active"] = o.Active + if !IsNil(o.Aud) { toSerialize["aud"] = o.Aud } - if o.ClientId != nil { + if !IsNil(o.ClientId) { toSerialize["client_id"] = o.ClientId } - if o.Exp != nil { + if !IsNil(o.Exp) { toSerialize["exp"] = o.Exp } - if o.Ext != nil { + if !IsNil(o.Ext) { toSerialize["ext"] = o.Ext } - if o.Iat != nil { + if !IsNil(o.Iat) { toSerialize["iat"] = o.Iat } - if o.Iss != nil { + if !IsNil(o.Iss) { toSerialize["iss"] = o.Iss } - if o.Nbf != nil { + if !IsNil(o.Nbf) { toSerialize["nbf"] = o.Nbf } - if o.ObfuscatedSubject != nil { + if !IsNil(o.ObfuscatedSubject) { toSerialize["obfuscated_subject"] = o.ObfuscatedSubject } - if o.Scope != nil { + if !IsNil(o.Scope) { toSerialize["scope"] = o.Scope } - if o.Sub != nil { + if !IsNil(o.Sub) { toSerialize["sub"] = o.Sub } - if o.TokenType != nil { + if !IsNil(o.TokenType) { toSerialize["token_type"] = o.TokenType } - if o.TokenUse != nil { + if !IsNil(o.TokenUse) { toSerialize["token_use"] = o.TokenUse } - if o.Username != nil { + if !IsNil(o.Username) { toSerialize["username"] = o.Username } - return json.Marshal(toSerialize) + return toSerialize, nil +} + +func (o *IntrospectedOAuth2Token) UnmarshalJSON(data []byte) (err error) { + // This validates that all required properties are included in the JSON object + // by unmarshalling the object into a generic map with string keys and checking + // that every required field exists as a key in the generic map. + requiredProperties := []string{ + "active", + } + + allProperties := make(map[string]interface{}) + + err = json.Unmarshal(data, &allProperties) + + if err != nil { + return err + } + + for _, requiredProperty := range requiredProperties { + if _, exists := allProperties[requiredProperty]; !exists { + return fmt.Errorf("no value given for required property %v", requiredProperty) + } + } + + varIntrospectedOAuth2Token := _IntrospectedOAuth2Token{} + + decoder := json.NewDecoder(bytes.NewReader(data)) + decoder.DisallowUnknownFields() + err = decoder.Decode(&varIntrospectedOAuth2Token) + + if err != nil { + return err + } + + *o = IntrospectedOAuth2Token(varIntrospectedOAuth2Token) + + return err } type NullableIntrospectedOAuth2Token struct { diff --git a/internal/httpclient/model_is_ready_200_response.go b/internal/httpclient/model_is_ready_200_response.go index f7b8957c70f..8a44bacc885 100644 --- a/internal/httpclient/model_is_ready_200_response.go +++ b/internal/httpclient/model_is_ready_200_response.go @@ -15,6 +15,9 @@ import ( "encoding/json" ) +// checks if the IsReady200Response type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &IsReady200Response{} + // IsReady200Response struct for IsReady200Response type IsReady200Response struct { // Always \"ok\". @@ -40,7 +43,7 @@ func NewIsReady200ResponseWithDefaults() *IsReady200Response { // GetStatus returns the Status field value if set, zero value otherwise. func (o *IsReady200Response) GetStatus() string { - if o == nil || o.Status == nil { + if o == nil || IsNil(o.Status) { var ret string return ret } @@ -50,7 +53,7 @@ func (o *IsReady200Response) GetStatus() string { // GetStatusOk returns a tuple with the Status field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *IsReady200Response) GetStatusOk() (*string, bool) { - if o == nil || o.Status == nil { + if o == nil || IsNil(o.Status) { return nil, false } return o.Status, true @@ -58,7 +61,7 @@ func (o *IsReady200Response) GetStatusOk() (*string, bool) { // HasStatus returns a boolean if a field has been set. func (o *IsReady200Response) HasStatus() bool { - if o != nil && o.Status != nil { + if o != nil && !IsNil(o.Status) { return true } @@ -71,11 +74,19 @@ func (o *IsReady200Response) SetStatus(v string) { } func (o IsReady200Response) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o IsReady200Response) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.Status != nil { + if !IsNil(o.Status) { toSerialize["status"] = o.Status } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableIsReady200Response struct { diff --git a/internal/httpclient/model_is_ready_503_response.go b/internal/httpclient/model_is_ready_503_response.go index 14788440b2b..f0696c01c6f 100644 --- a/internal/httpclient/model_is_ready_503_response.go +++ b/internal/httpclient/model_is_ready_503_response.go @@ -15,6 +15,9 @@ import ( "encoding/json" ) +// checks if the IsReady503Response type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &IsReady503Response{} + // IsReady503Response struct for IsReady503Response type IsReady503Response struct { // Errors contains a list of errors that caused the not ready status. @@ -40,7 +43,7 @@ func NewIsReady503ResponseWithDefaults() *IsReady503Response { // GetErrors returns the Errors field value if set, zero value otherwise. func (o *IsReady503Response) GetErrors() map[string]string { - if o == nil || o.Errors == nil { + if o == nil || IsNil(o.Errors) { var ret map[string]string return ret } @@ -50,7 +53,7 @@ func (o *IsReady503Response) GetErrors() map[string]string { // GetErrorsOk returns a tuple with the Errors field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *IsReady503Response) GetErrorsOk() (*map[string]string, bool) { - if o == nil || o.Errors == nil { + if o == nil || IsNil(o.Errors) { return nil, false } return o.Errors, true @@ -58,7 +61,7 @@ func (o *IsReady503Response) GetErrorsOk() (*map[string]string, bool) { // HasErrors returns a boolean if a field has been set. func (o *IsReady503Response) HasErrors() bool { - if o != nil && o.Errors != nil { + if o != nil && !IsNil(o.Errors) { return true } @@ -71,11 +74,19 @@ func (o *IsReady503Response) SetErrors(v map[string]string) { } func (o IsReady503Response) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o IsReady503Response) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.Errors != nil { + if !IsNil(o.Errors) { toSerialize["errors"] = o.Errors } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableIsReady503Response struct { diff --git a/internal/httpclient/model_json_patch.go b/internal/httpclient/model_json_patch.go index 4489698fd83..b6a2b6ecc2e 100644 --- a/internal/httpclient/model_json_patch.go +++ b/internal/httpclient/model_json_patch.go @@ -12,9 +12,14 @@ Contact: hi@ory.sh package openapi import ( + "bytes" "encoding/json" + "fmt" ) +// checks if the JsonPatch type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &JsonPatch{} + // JsonPatch A JSONPatch document as defined by RFC 6902 type JsonPatch struct { // This field is used together with operation \"move\" and uses JSON Pointer notation. Learn more [about JSON Pointers](https://datatracker.ietf.org/doc/html/rfc6901#section-5). @@ -27,6 +32,8 @@ type JsonPatch struct { Value interface{} `json:"value,omitempty"` } +type _JsonPatch JsonPatch + // NewJsonPatch instantiates a new JsonPatch object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments @@ -48,7 +55,7 @@ func NewJsonPatchWithDefaults() *JsonPatch { // GetFrom returns the From field value if set, zero value otherwise. func (o *JsonPatch) GetFrom() string { - if o == nil || o.From == nil { + if o == nil || IsNil(o.From) { var ret string return ret } @@ -58,7 +65,7 @@ func (o *JsonPatch) GetFrom() string { // GetFromOk returns a tuple with the From field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *JsonPatch) GetFromOk() (*string, bool) { - if o == nil || o.From == nil { + if o == nil || IsNil(o.From) { return nil, false } return o.From, true @@ -66,7 +73,7 @@ func (o *JsonPatch) GetFromOk() (*string, bool) { // HasFrom returns a boolean if a field has been set. func (o *JsonPatch) HasFrom() bool { - if o != nil && o.From != nil { + if o != nil && !IsNil(o.From) { return true } @@ -139,7 +146,7 @@ func (o *JsonPatch) GetValue() interface{} { // and a boolean to check if the value has been set. // NOTE: If the value is an explicit nil, `nil, true` will be returned func (o *JsonPatch) GetValueOk() (*interface{}, bool) { - if o == nil || o.Value == nil { + if o == nil || IsNil(o.Value) { return nil, false } return &o.Value, true @@ -147,7 +154,7 @@ func (o *JsonPatch) GetValueOk() (*interface{}, bool) { // HasValue returns a boolean if a field has been set. func (o *JsonPatch) HasValue() bool { - if o != nil && o.Value != nil { + if o != nil && !IsNil(o.Value) { return true } @@ -160,20 +167,62 @@ func (o *JsonPatch) SetValue(v interface{}) { } func (o JsonPatch) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o JsonPatch) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.From != nil { + if !IsNil(o.From) { toSerialize["from"] = o.From } - if true { - toSerialize["op"] = o.Op - } - if true { - toSerialize["path"] = o.Path - } + toSerialize["op"] = o.Op + toSerialize["path"] = o.Path if o.Value != nil { toSerialize["value"] = o.Value } - return json.Marshal(toSerialize) + return toSerialize, nil +} + +func (o *JsonPatch) UnmarshalJSON(data []byte) (err error) { + // This validates that all required properties are included in the JSON object + // by unmarshalling the object into a generic map with string keys and checking + // that every required field exists as a key in the generic map. + requiredProperties := []string{ + "op", + "path", + } + + allProperties := make(map[string]interface{}) + + err = json.Unmarshal(data, &allProperties) + + if err != nil { + return err + } + + for _, requiredProperty := range requiredProperties { + if _, exists := allProperties[requiredProperty]; !exists { + return fmt.Errorf("no value given for required property %v", requiredProperty) + } + } + + varJsonPatch := _JsonPatch{} + + decoder := json.NewDecoder(bytes.NewReader(data)) + decoder.DisallowUnknownFields() + err = decoder.Decode(&varJsonPatch) + + if err != nil { + return err + } + + *o = JsonPatch(varJsonPatch) + + return err } type NullableJsonPatch struct { diff --git a/internal/httpclient/model_json_web_key.go b/internal/httpclient/model_json_web_key.go index a56124b5ec1..c10a80c8fe9 100644 --- a/internal/httpclient/model_json_web_key.go +++ b/internal/httpclient/model_json_web_key.go @@ -12,9 +12,14 @@ Contact: hi@ory.sh package openapi import ( + "bytes" "encoding/json" + "fmt" ) +// checks if the JsonWebKey type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &JsonWebKey{} + // JsonWebKey struct for JsonWebKey type JsonWebKey struct { // The \"alg\" (algorithm) parameter identifies the algorithm intended for use with the key. The values used should either be registered in the IANA \"JSON Web Signature and Encryption Algorithms\" registry established by [JWA] or be a value that contains a Collision- Resistant Name. @@ -41,6 +46,8 @@ type JsonWebKey struct { Y *string `json:"y,omitempty"` } +type _JsonWebKey JsonWebKey + // NewJsonWebKey instantiates a new JsonWebKey object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments @@ -88,7 +95,7 @@ func (o *JsonWebKey) SetAlg(v string) { // GetCrv returns the Crv field value if set, zero value otherwise. func (o *JsonWebKey) GetCrv() string { - if o == nil || o.Crv == nil { + if o == nil || IsNil(o.Crv) { var ret string return ret } @@ -98,7 +105,7 @@ func (o *JsonWebKey) GetCrv() string { // GetCrvOk returns a tuple with the Crv field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *JsonWebKey) GetCrvOk() (*string, bool) { - if o == nil || o.Crv == nil { + if o == nil || IsNil(o.Crv) { return nil, false } return o.Crv, true @@ -106,7 +113,7 @@ func (o *JsonWebKey) GetCrvOk() (*string, bool) { // HasCrv returns a boolean if a field has been set. func (o *JsonWebKey) HasCrv() bool { - if o != nil && o.Crv != nil { + if o != nil && !IsNil(o.Crv) { return true } @@ -120,7 +127,7 @@ func (o *JsonWebKey) SetCrv(v string) { // GetD returns the D field value if set, zero value otherwise. func (o *JsonWebKey) GetD() string { - if o == nil || o.D == nil { + if o == nil || IsNil(o.D) { var ret string return ret } @@ -130,7 +137,7 @@ func (o *JsonWebKey) GetD() string { // GetDOk returns a tuple with the D field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *JsonWebKey) GetDOk() (*string, bool) { - if o == nil || o.D == nil { + if o == nil || IsNil(o.D) { return nil, false } return o.D, true @@ -138,7 +145,7 @@ func (o *JsonWebKey) GetDOk() (*string, bool) { // HasD returns a boolean if a field has been set. func (o *JsonWebKey) HasD() bool { - if o != nil && o.D != nil { + if o != nil && !IsNil(o.D) { return true } @@ -152,7 +159,7 @@ func (o *JsonWebKey) SetD(v string) { // GetDp returns the Dp field value if set, zero value otherwise. func (o *JsonWebKey) GetDp() string { - if o == nil || o.Dp == nil { + if o == nil || IsNil(o.Dp) { var ret string return ret } @@ -162,7 +169,7 @@ func (o *JsonWebKey) GetDp() string { // GetDpOk returns a tuple with the Dp field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *JsonWebKey) GetDpOk() (*string, bool) { - if o == nil || o.Dp == nil { + if o == nil || IsNil(o.Dp) { return nil, false } return o.Dp, true @@ -170,7 +177,7 @@ func (o *JsonWebKey) GetDpOk() (*string, bool) { // HasDp returns a boolean if a field has been set. func (o *JsonWebKey) HasDp() bool { - if o != nil && o.Dp != nil { + if o != nil && !IsNil(o.Dp) { return true } @@ -184,7 +191,7 @@ func (o *JsonWebKey) SetDp(v string) { // GetDq returns the Dq field value if set, zero value otherwise. func (o *JsonWebKey) GetDq() string { - if o == nil || o.Dq == nil { + if o == nil || IsNil(o.Dq) { var ret string return ret } @@ -194,7 +201,7 @@ func (o *JsonWebKey) GetDq() string { // GetDqOk returns a tuple with the Dq field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *JsonWebKey) GetDqOk() (*string, bool) { - if o == nil || o.Dq == nil { + if o == nil || IsNil(o.Dq) { return nil, false } return o.Dq, true @@ -202,7 +209,7 @@ func (o *JsonWebKey) GetDqOk() (*string, bool) { // HasDq returns a boolean if a field has been set. func (o *JsonWebKey) HasDq() bool { - if o != nil && o.Dq != nil { + if o != nil && !IsNil(o.Dq) { return true } @@ -216,7 +223,7 @@ func (o *JsonWebKey) SetDq(v string) { // GetE returns the E field value if set, zero value otherwise. func (o *JsonWebKey) GetE() string { - if o == nil || o.E == nil { + if o == nil || IsNil(o.E) { var ret string return ret } @@ -226,7 +233,7 @@ func (o *JsonWebKey) GetE() string { // GetEOk returns a tuple with the E field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *JsonWebKey) GetEOk() (*string, bool) { - if o == nil || o.E == nil { + if o == nil || IsNil(o.E) { return nil, false } return o.E, true @@ -234,7 +241,7 @@ func (o *JsonWebKey) GetEOk() (*string, bool) { // HasE returns a boolean if a field has been set. func (o *JsonWebKey) HasE() bool { - if o != nil && o.E != nil { + if o != nil && !IsNil(o.E) { return true } @@ -248,7 +255,7 @@ func (o *JsonWebKey) SetE(v string) { // GetK returns the K field value if set, zero value otherwise. func (o *JsonWebKey) GetK() string { - if o == nil || o.K == nil { + if o == nil || IsNil(o.K) { var ret string return ret } @@ -258,7 +265,7 @@ func (o *JsonWebKey) GetK() string { // GetKOk returns a tuple with the K field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *JsonWebKey) GetKOk() (*string, bool) { - if o == nil || o.K == nil { + if o == nil || IsNil(o.K) { return nil, false } return o.K, true @@ -266,7 +273,7 @@ func (o *JsonWebKey) GetKOk() (*string, bool) { // HasK returns a boolean if a field has been set. func (o *JsonWebKey) HasK() bool { - if o != nil && o.K != nil { + if o != nil && !IsNil(o.K) { return true } @@ -328,7 +335,7 @@ func (o *JsonWebKey) SetKty(v string) { // GetN returns the N field value if set, zero value otherwise. func (o *JsonWebKey) GetN() string { - if o == nil || o.N == nil { + if o == nil || IsNil(o.N) { var ret string return ret } @@ -338,7 +345,7 @@ func (o *JsonWebKey) GetN() string { // GetNOk returns a tuple with the N field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *JsonWebKey) GetNOk() (*string, bool) { - if o == nil || o.N == nil { + if o == nil || IsNil(o.N) { return nil, false } return o.N, true @@ -346,7 +353,7 @@ func (o *JsonWebKey) GetNOk() (*string, bool) { // HasN returns a boolean if a field has been set. func (o *JsonWebKey) HasN() bool { - if o != nil && o.N != nil { + if o != nil && !IsNil(o.N) { return true } @@ -360,7 +367,7 @@ func (o *JsonWebKey) SetN(v string) { // GetP returns the P field value if set, zero value otherwise. func (o *JsonWebKey) GetP() string { - if o == nil || o.P == nil { + if o == nil || IsNil(o.P) { var ret string return ret } @@ -370,7 +377,7 @@ func (o *JsonWebKey) GetP() string { // GetPOk returns a tuple with the P field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *JsonWebKey) GetPOk() (*string, bool) { - if o == nil || o.P == nil { + if o == nil || IsNil(o.P) { return nil, false } return o.P, true @@ -378,7 +385,7 @@ func (o *JsonWebKey) GetPOk() (*string, bool) { // HasP returns a boolean if a field has been set. func (o *JsonWebKey) HasP() bool { - if o != nil && o.P != nil { + if o != nil && !IsNil(o.P) { return true } @@ -392,7 +399,7 @@ func (o *JsonWebKey) SetP(v string) { // GetQ returns the Q field value if set, zero value otherwise. func (o *JsonWebKey) GetQ() string { - if o == nil || o.Q == nil { + if o == nil || IsNil(o.Q) { var ret string return ret } @@ -402,7 +409,7 @@ func (o *JsonWebKey) GetQ() string { // GetQOk returns a tuple with the Q field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *JsonWebKey) GetQOk() (*string, bool) { - if o == nil || o.Q == nil { + if o == nil || IsNil(o.Q) { return nil, false } return o.Q, true @@ -410,7 +417,7 @@ func (o *JsonWebKey) GetQOk() (*string, bool) { // HasQ returns a boolean if a field has been set. func (o *JsonWebKey) HasQ() bool { - if o != nil && o.Q != nil { + if o != nil && !IsNil(o.Q) { return true } @@ -424,7 +431,7 @@ func (o *JsonWebKey) SetQ(v string) { // GetQi returns the Qi field value if set, zero value otherwise. func (o *JsonWebKey) GetQi() string { - if o == nil || o.Qi == nil { + if o == nil || IsNil(o.Qi) { var ret string return ret } @@ -434,7 +441,7 @@ func (o *JsonWebKey) GetQi() string { // GetQiOk returns a tuple with the Qi field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *JsonWebKey) GetQiOk() (*string, bool) { - if o == nil || o.Qi == nil { + if o == nil || IsNil(o.Qi) { return nil, false } return o.Qi, true @@ -442,7 +449,7 @@ func (o *JsonWebKey) GetQiOk() (*string, bool) { // HasQi returns a boolean if a field has been set. func (o *JsonWebKey) HasQi() bool { - if o != nil && o.Qi != nil { + if o != nil && !IsNil(o.Qi) { return true } @@ -480,7 +487,7 @@ func (o *JsonWebKey) SetUse(v string) { // GetX returns the X field value if set, zero value otherwise. func (o *JsonWebKey) GetX() string { - if o == nil || o.X == nil { + if o == nil || IsNil(o.X) { var ret string return ret } @@ -490,7 +497,7 @@ func (o *JsonWebKey) GetX() string { // GetXOk returns a tuple with the X field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *JsonWebKey) GetXOk() (*string, bool) { - if o == nil || o.X == nil { + if o == nil || IsNil(o.X) { return nil, false } return o.X, true @@ -498,7 +505,7 @@ func (o *JsonWebKey) GetXOk() (*string, bool) { // HasX returns a boolean if a field has been set. func (o *JsonWebKey) HasX() bool { - if o != nil && o.X != nil { + if o != nil && !IsNil(o.X) { return true } @@ -512,7 +519,7 @@ func (o *JsonWebKey) SetX(v string) { // GetX5c returns the X5c field value if set, zero value otherwise. func (o *JsonWebKey) GetX5c() []string { - if o == nil || o.X5c == nil { + if o == nil || IsNil(o.X5c) { var ret []string return ret } @@ -522,7 +529,7 @@ func (o *JsonWebKey) GetX5c() []string { // GetX5cOk returns a tuple with the X5c field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *JsonWebKey) GetX5cOk() ([]string, bool) { - if o == nil || o.X5c == nil { + if o == nil || IsNil(o.X5c) { return nil, false } return o.X5c, true @@ -530,7 +537,7 @@ func (o *JsonWebKey) GetX5cOk() ([]string, bool) { // HasX5c returns a boolean if a field has been set. func (o *JsonWebKey) HasX5c() bool { - if o != nil && o.X5c != nil { + if o != nil && !IsNil(o.X5c) { return true } @@ -544,7 +551,7 @@ func (o *JsonWebKey) SetX5c(v []string) { // GetY returns the Y field value if set, zero value otherwise. func (o *JsonWebKey) GetY() string { - if o == nil || o.Y == nil { + if o == nil || IsNil(o.Y) { var ret string return ret } @@ -554,7 +561,7 @@ func (o *JsonWebKey) GetY() string { // GetYOk returns a tuple with the Y field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *JsonWebKey) GetYOk() (*string, bool) { - if o == nil || o.Y == nil { + if o == nil || IsNil(o.Y) { return nil, false } return o.Y, true @@ -562,7 +569,7 @@ func (o *JsonWebKey) GetYOk() (*string, bool) { // HasY returns a boolean if a field has been set. func (o *JsonWebKey) HasY() bool { - if o != nil && o.Y != nil { + if o != nil && !IsNil(o.Y) { return true } @@ -575,59 +582,99 @@ func (o *JsonWebKey) SetY(v string) { } func (o JsonWebKey) MarshalJSON() ([]byte, error) { - toSerialize := map[string]interface{}{} - if true { - toSerialize["alg"] = o.Alg + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err } - if o.Crv != nil { + return json.Marshal(toSerialize) +} + +func (o JsonWebKey) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + toSerialize["alg"] = o.Alg + if !IsNil(o.Crv) { toSerialize["crv"] = o.Crv } - if o.D != nil { + if !IsNil(o.D) { toSerialize["d"] = o.D } - if o.Dp != nil { + if !IsNil(o.Dp) { toSerialize["dp"] = o.Dp } - if o.Dq != nil { + if !IsNil(o.Dq) { toSerialize["dq"] = o.Dq } - if o.E != nil { + if !IsNil(o.E) { toSerialize["e"] = o.E } - if o.K != nil { + if !IsNil(o.K) { toSerialize["k"] = o.K } - if true { - toSerialize["kid"] = o.Kid - } - if true { - toSerialize["kty"] = o.Kty - } - if o.N != nil { + toSerialize["kid"] = o.Kid + toSerialize["kty"] = o.Kty + if !IsNil(o.N) { toSerialize["n"] = o.N } - if o.P != nil { + if !IsNil(o.P) { toSerialize["p"] = o.P } - if o.Q != nil { + if !IsNil(o.Q) { toSerialize["q"] = o.Q } - if o.Qi != nil { + if !IsNil(o.Qi) { toSerialize["qi"] = o.Qi } - if true { - toSerialize["use"] = o.Use - } - if o.X != nil { + toSerialize["use"] = o.Use + if !IsNil(o.X) { toSerialize["x"] = o.X } - if o.X5c != nil { + if !IsNil(o.X5c) { toSerialize["x5c"] = o.X5c } - if o.Y != nil { + if !IsNil(o.Y) { toSerialize["y"] = o.Y } - return json.Marshal(toSerialize) + return toSerialize, nil +} + +func (o *JsonWebKey) UnmarshalJSON(data []byte) (err error) { + // This validates that all required properties are included in the JSON object + // by unmarshalling the object into a generic map with string keys and checking + // that every required field exists as a key in the generic map. + requiredProperties := []string{ + "alg", + "kid", + "kty", + "use", + } + + allProperties := make(map[string]interface{}) + + err = json.Unmarshal(data, &allProperties) + + if err != nil { + return err + } + + for _, requiredProperty := range requiredProperties { + if _, exists := allProperties[requiredProperty]; !exists { + return fmt.Errorf("no value given for required property %v", requiredProperty) + } + } + + varJsonWebKey := _JsonWebKey{} + + decoder := json.NewDecoder(bytes.NewReader(data)) + decoder.DisallowUnknownFields() + err = decoder.Decode(&varJsonWebKey) + + if err != nil { + return err + } + + *o = JsonWebKey(varJsonWebKey) + + return err } type NullableJsonWebKey struct { diff --git a/internal/httpclient/model_json_web_key_set.go b/internal/httpclient/model_json_web_key_set.go index 6d328f6615d..28820351167 100644 --- a/internal/httpclient/model_json_web_key_set.go +++ b/internal/httpclient/model_json_web_key_set.go @@ -15,6 +15,9 @@ import ( "encoding/json" ) +// checks if the JsonWebKeySet type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &JsonWebKeySet{} + // JsonWebKeySet JSON Web Key Set type JsonWebKeySet struct { // List of JSON Web Keys The value of the \"keys\" parameter is an array of JSON Web Key (JWK) values. By default, the order of the JWK values within the array does not imply an order of preference among them, although applications of JWK Sets can choose to assign a meaning to the order for their purposes, if desired. @@ -40,7 +43,7 @@ func NewJsonWebKeySetWithDefaults() *JsonWebKeySet { // GetKeys returns the Keys field value if set, zero value otherwise. func (o *JsonWebKeySet) GetKeys() []JsonWebKey { - if o == nil || o.Keys == nil { + if o == nil || IsNil(o.Keys) { var ret []JsonWebKey return ret } @@ -50,7 +53,7 @@ func (o *JsonWebKeySet) GetKeys() []JsonWebKey { // GetKeysOk returns a tuple with the Keys field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *JsonWebKeySet) GetKeysOk() ([]JsonWebKey, bool) { - if o == nil || o.Keys == nil { + if o == nil || IsNil(o.Keys) { return nil, false } return o.Keys, true @@ -58,7 +61,7 @@ func (o *JsonWebKeySet) GetKeysOk() ([]JsonWebKey, bool) { // HasKeys returns a boolean if a field has been set. func (o *JsonWebKeySet) HasKeys() bool { - if o != nil && o.Keys != nil { + if o != nil && !IsNil(o.Keys) { return true } @@ -71,11 +74,19 @@ func (o *JsonWebKeySet) SetKeys(v []JsonWebKey) { } func (o JsonWebKeySet) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o JsonWebKeySet) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.Keys != nil { + if !IsNil(o.Keys) { toSerialize["keys"] = o.Keys } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableJsonWebKeySet struct { diff --git a/internal/httpclient/model_keyset_pagination_request_parameters.go b/internal/httpclient/model_keyset_pagination_request_parameters.go new file mode 100644 index 00000000000..a4eb09f50dc --- /dev/null +++ b/internal/httpclient/model_keyset_pagination_request_parameters.go @@ -0,0 +1,167 @@ +/* +Ory Hydra API + +Documentation for all of Ory Hydra's APIs. + +API version: +Contact: hi@ory.sh +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "encoding/json" +) + +// checks if the KeysetPaginationRequestParameters type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &KeysetPaginationRequestParameters{} + +// KeysetPaginationRequestParameters For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). +type KeysetPaginationRequestParameters struct { + // Items per Page This is the number of items per page to return. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). + PageSize *int64 `json:"page_size,omitempty"` + // Next Page Token The next page token. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). + PageToken *string `json:"page_token,omitempty"` +} + +// NewKeysetPaginationRequestParameters instantiates a new KeysetPaginationRequestParameters object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewKeysetPaginationRequestParameters() *KeysetPaginationRequestParameters { + this := KeysetPaginationRequestParameters{} + var pageSize int64 = 250 + this.PageSize = &pageSize + return &this +} + +// NewKeysetPaginationRequestParametersWithDefaults instantiates a new KeysetPaginationRequestParameters object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewKeysetPaginationRequestParametersWithDefaults() *KeysetPaginationRequestParameters { + this := KeysetPaginationRequestParameters{} + var pageSize int64 = 250 + this.PageSize = &pageSize + return &this +} + +// GetPageSize returns the PageSize field value if set, zero value otherwise. +func (o *KeysetPaginationRequestParameters) GetPageSize() int64 { + if o == nil || IsNil(o.PageSize) { + var ret int64 + return ret + } + return *o.PageSize +} + +// GetPageSizeOk returns a tuple with the PageSize field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *KeysetPaginationRequestParameters) GetPageSizeOk() (*int64, bool) { + if o == nil || IsNil(o.PageSize) { + return nil, false + } + return o.PageSize, true +} + +// HasPageSize returns a boolean if a field has been set. +func (o *KeysetPaginationRequestParameters) HasPageSize() bool { + if o != nil && !IsNil(o.PageSize) { + return true + } + + return false +} + +// SetPageSize gets a reference to the given int64 and assigns it to the PageSize field. +func (o *KeysetPaginationRequestParameters) SetPageSize(v int64) { + o.PageSize = &v +} + +// GetPageToken returns the PageToken field value if set, zero value otherwise. +func (o *KeysetPaginationRequestParameters) GetPageToken() string { + if o == nil || IsNil(o.PageToken) { + var ret string + return ret + } + return *o.PageToken +} + +// GetPageTokenOk returns a tuple with the PageToken field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *KeysetPaginationRequestParameters) GetPageTokenOk() (*string, bool) { + if o == nil || IsNil(o.PageToken) { + return nil, false + } + return o.PageToken, true +} + +// HasPageToken returns a boolean if a field has been set. +func (o *KeysetPaginationRequestParameters) HasPageToken() bool { + if o != nil && !IsNil(o.PageToken) { + return true + } + + return false +} + +// SetPageToken gets a reference to the given string and assigns it to the PageToken field. +func (o *KeysetPaginationRequestParameters) SetPageToken(v string) { + o.PageToken = &v +} + +func (o KeysetPaginationRequestParameters) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o KeysetPaginationRequestParameters) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.PageSize) { + toSerialize["page_size"] = o.PageSize + } + if !IsNil(o.PageToken) { + toSerialize["page_token"] = o.PageToken + } + return toSerialize, nil +} + +type NullableKeysetPaginationRequestParameters struct { + value *KeysetPaginationRequestParameters + isSet bool +} + +func (v NullableKeysetPaginationRequestParameters) Get() *KeysetPaginationRequestParameters { + return v.value +} + +func (v *NullableKeysetPaginationRequestParameters) Set(val *KeysetPaginationRequestParameters) { + v.value = val + v.isSet = true +} + +func (v NullableKeysetPaginationRequestParameters) IsSet() bool { + return v.isSet +} + +func (v *NullableKeysetPaginationRequestParameters) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableKeysetPaginationRequestParameters(val *KeysetPaginationRequestParameters) *NullableKeysetPaginationRequestParameters { + return &NullableKeysetPaginationRequestParameters{value: val, isSet: true} +} + +func (v NullableKeysetPaginationRequestParameters) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableKeysetPaginationRequestParameters) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/internal/httpclient/model_keyset_pagination_response_headers.go b/internal/httpclient/model_keyset_pagination_response_headers.go new file mode 100644 index 00000000000..570230de4d8 --- /dev/null +++ b/internal/httpclient/model_keyset_pagination_response_headers.go @@ -0,0 +1,126 @@ +/* +Ory Hydra API + +Documentation for all of Ory Hydra's APIs. + +API version: +Contact: hi@ory.sh +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "encoding/json" +) + +// checks if the KeysetPaginationResponseHeaders type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &KeysetPaginationResponseHeaders{} + +// KeysetPaginationResponseHeaders The `Link` HTTP header contains multiple links (`first`, `next`) formatted as: `; rel=\"first\"` For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). +type KeysetPaginationResponseHeaders struct { + // The Link HTTP Header The `Link` header contains a comma-delimited list of links to the following pages: first: The first page of results. next: The next page of results. Pages are omitted if they do not exist. For example, if there is no next page, the `next` link is omitted. Examples: ; rel=\"next\" + Link *string `json:"link,omitempty"` +} + +// NewKeysetPaginationResponseHeaders instantiates a new KeysetPaginationResponseHeaders object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewKeysetPaginationResponseHeaders() *KeysetPaginationResponseHeaders { + this := KeysetPaginationResponseHeaders{} + return &this +} + +// NewKeysetPaginationResponseHeadersWithDefaults instantiates a new KeysetPaginationResponseHeaders object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewKeysetPaginationResponseHeadersWithDefaults() *KeysetPaginationResponseHeaders { + this := KeysetPaginationResponseHeaders{} + return &this +} + +// GetLink returns the Link field value if set, zero value otherwise. +func (o *KeysetPaginationResponseHeaders) GetLink() string { + if o == nil || IsNil(o.Link) { + var ret string + return ret + } + return *o.Link +} + +// GetLinkOk returns a tuple with the Link field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *KeysetPaginationResponseHeaders) GetLinkOk() (*string, bool) { + if o == nil || IsNil(o.Link) { + return nil, false + } + return o.Link, true +} + +// HasLink returns a boolean if a field has been set. +func (o *KeysetPaginationResponseHeaders) HasLink() bool { + if o != nil && !IsNil(o.Link) { + return true + } + + return false +} + +// SetLink gets a reference to the given string and assigns it to the Link field. +func (o *KeysetPaginationResponseHeaders) SetLink(v string) { + o.Link = &v +} + +func (o KeysetPaginationResponseHeaders) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o KeysetPaginationResponseHeaders) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.Link) { + toSerialize["link"] = o.Link + } + return toSerialize, nil +} + +type NullableKeysetPaginationResponseHeaders struct { + value *KeysetPaginationResponseHeaders + isSet bool +} + +func (v NullableKeysetPaginationResponseHeaders) Get() *KeysetPaginationResponseHeaders { + return v.value +} + +func (v *NullableKeysetPaginationResponseHeaders) Set(val *KeysetPaginationResponseHeaders) { + v.value = val + v.isSet = true +} + +func (v NullableKeysetPaginationResponseHeaders) IsSet() bool { + return v.isSet +} + +func (v *NullableKeysetPaginationResponseHeaders) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableKeysetPaginationResponseHeaders(val *KeysetPaginationResponseHeaders) *NullableKeysetPaginationResponseHeaders { + return &NullableKeysetPaginationResponseHeaders{value: val, isSet: true} +} + +func (v NullableKeysetPaginationResponseHeaders) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableKeysetPaginationResponseHeaders) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/internal/httpclient/model_o_auth2_client.go b/internal/httpclient/model_o_auth2_client.go index 976219bd5e4..6420532c692 100644 --- a/internal/httpclient/model_o_auth2_client.go +++ b/internal/httpclient/model_o_auth2_client.go @@ -16,23 +16,28 @@ import ( "time" ) +// checks if the OAuth2Client type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &OAuth2Client{} + // OAuth2Client OAuth 2.0 Clients are used to perform OAuth 2.0 and OpenID Connect flows. Usually, OAuth 2.0 clients are generated for applications which want to consume your OAuth 2.0 or OpenID Connect capabilities. type OAuth2Client struct { - AllowedCorsOrigins []string `json:"allowed_cors_origins,omitempty"` - Audience []string `json:"audience,omitempty"` + // OAuth 2.0 Access Token Strategy AccessTokenStrategy is the strategy used to generate access tokens. Valid options are `jwt` and `opaque`. `jwt` is a bad idea, see https://www.ory.sh/docs/oauth2-oidc/jwt-access-token Setting the strategy here overrides the global setting in `strategies.access_token`. + AccessTokenStrategy *string `json:"access_token_strategy,omitempty"` + AllowedCorsOrigins []string `json:"allowed_cors_origins,omitempty"` + Audience []string `json:"audience,omitempty"` // Specify a time duration in milliseconds, seconds, minutes, hours. - AuthorizationCodeGrantAccessTokenLifespan *string `json:"authorization_code_grant_access_token_lifespan,omitempty"` + AuthorizationCodeGrantAccessTokenLifespan *string `json:"authorization_code_grant_access_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` // Specify a time duration in milliseconds, seconds, minutes, hours. - AuthorizationCodeGrantIdTokenLifespan *string `json:"authorization_code_grant_id_token_lifespan,omitempty"` + AuthorizationCodeGrantIdTokenLifespan *string `json:"authorization_code_grant_id_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` // Specify a time duration in milliseconds, seconds, minutes, hours. - AuthorizationCodeGrantRefreshTokenLifespan *string `json:"authorization_code_grant_refresh_token_lifespan,omitempty"` + AuthorizationCodeGrantRefreshTokenLifespan *string `json:"authorization_code_grant_refresh_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` // OpenID Connect Back-Channel Logout Session Required Boolean value specifying whether the RP requires that a sid (session ID) Claim be included in the Logout Token to identify the RP session with the OP when the backchannel_logout_uri is used. If omitted, the default value is false. BackchannelLogoutSessionRequired *bool `json:"backchannel_logout_session_required,omitempty"` // OpenID Connect Back-Channel Logout URI RP URL that will cause the RP to log itself out when sent a Logout Token by the OP. BackchannelLogoutUri *string `json:"backchannel_logout_uri,omitempty"` // Specify a time duration in milliseconds, seconds, minutes, hours. - ClientCredentialsGrantAccessTokenLifespan *string `json:"client_credentials_grant_access_token_lifespan,omitempty"` - // OAuth 2.0 Client ID The ID is autogenerated and immutable. + ClientCredentialsGrantAccessTokenLifespan *string `json:"client_credentials_grant_access_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` + // OAuth 2.0 Client ID The ID is immutable. If no ID is provided, a UUID4 will be generated. ClientId *string `json:"client_id,omitempty"` // OAuth 2.0 Client Name The human-readable name of the client to be presented to the end-user during authorization. ClientName *string `json:"client_name,omitempty"` @@ -45,21 +50,26 @@ type OAuth2Client struct { Contacts []string `json:"contacts,omitempty"` // OAuth 2.0 Client Creation Date CreatedAt returns the timestamp of the client's creation. CreatedAt *time.Time `json:"created_at,omitempty"` + // Specify a time duration in milliseconds, seconds, minutes, hours. + DeviceAuthorizationGrantAccessTokenLifespan *string `json:"device_authorization_grant_access_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` + // Specify a time duration in milliseconds, seconds, minutes, hours. + DeviceAuthorizationGrantIdTokenLifespan *string `json:"device_authorization_grant_id_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` + // Specify a time duration in milliseconds, seconds, minutes, hours. + DeviceAuthorizationGrantRefreshTokenLifespan *string `json:"device_authorization_grant_refresh_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` // OpenID Connect Front-Channel Logout Session Required Boolean value specifying whether the RP requires that iss (issuer) and sid (session ID) query parameters be included to identify the RP session with the OP when the frontchannel_logout_uri is used. If omitted, the default value is false. FrontchannelLogoutSessionRequired *bool `json:"frontchannel_logout_session_required,omitempty"` // OpenID Connect Front-Channel Logout URI RP URL that will cause the RP to log itself out when rendered in an iframe by the OP. An iss (issuer) query parameter and a sid (session ID) query parameter MAY be included by the OP to enable the RP to validate the request and to determine which of the potentially multiple sessions is to be logged out; if either is included, both MUST be. FrontchannelLogoutUri *string `json:"frontchannel_logout_uri,omitempty"` GrantTypes []string `json:"grant_types,omitempty"` // Specify a time duration in milliseconds, seconds, minutes, hours. - ImplicitGrantAccessTokenLifespan *string `json:"implicit_grant_access_token_lifespan,omitempty"` + ImplicitGrantAccessTokenLifespan *string `json:"implicit_grant_access_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` // Specify a time duration in milliseconds, seconds, minutes, hours. - ImplicitGrantIdTokenLifespan *string `json:"implicit_grant_id_token_lifespan,omitempty"` - // OAuth 2.0 Client JSON Web Key Set Client's JSON Web Key Set [JWK] document, passed by value. The semantics of the jwks parameter are the same as the jwks_uri parameter, other than that the JWK Set is passed by value, rather than by reference. This parameter is intended only to be used by Clients that, for some reason, are unable to use the jwks_uri parameter, for instance, by native applications that might not have a location to host the contents of the JWK Set. If a Client can use jwks_uri, it MUST NOT use jwks. One significant downside of jwks is that it does not enable key rotation (which jwks_uri does, as described in Section 10 of OpenID Connect Core 1.0 [OpenID.Core]). The jwks_uri and jwks parameters MUST NOT be used together. - Jwks interface{} `json:"jwks,omitempty"` + ImplicitGrantIdTokenLifespan *string `json:"implicit_grant_id_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` + Jwks *JsonWebKeySet `json:"jwks,omitempty"` // OAuth 2.0 Client JSON Web Key Set URL URL for the Client's JSON Web Key Set [JWK] document. If the Client signs requests to the Server, it contains the signing key(s) the Server uses to validate signatures from the Client. The JWK Set MAY also contain the Client's encryption keys(s), which are used by the Server to encrypt responses to the Client. When both signing and encryption keys are made available, a use (Key Use) parameter value is REQUIRED for all keys in the referenced JWK Set to indicate each key's intended usage. Although some algorithms allow the same key to be used for both signatures and encryption, doing so is NOT RECOMMENDED, as it is less secure. The JWK x5c parameter MAY be used to provide X.509 representations of keys provided. When used, the bare key values MUST still be present and MUST match those in the certificate. JwksUri *string `json:"jwks_uri,omitempty"` // Specify a time duration in milliseconds, seconds, minutes, hours. - JwtBearerGrantAccessTokenLifespan *string `json:"jwt_bearer_grant_access_token_lifespan,omitempty"` + JwtBearerGrantAccessTokenLifespan *string `json:"jwt_bearer_grant_access_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` // OAuth 2.0 Client Logo URI A URL string referencing the client's logo. LogoUri *string `json:"logo_uri,omitempty"` Metadata interface{} `json:"metadata,omitempty"` @@ -70,11 +80,11 @@ type OAuth2Client struct { PostLogoutRedirectUris []string `json:"post_logout_redirect_uris,omitempty"` RedirectUris []string `json:"redirect_uris,omitempty"` // Specify a time duration in milliseconds, seconds, minutes, hours. - RefreshTokenGrantAccessTokenLifespan *string `json:"refresh_token_grant_access_token_lifespan,omitempty"` + RefreshTokenGrantAccessTokenLifespan *string `json:"refresh_token_grant_access_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` // Specify a time duration in milliseconds, seconds, minutes, hours. - RefreshTokenGrantIdTokenLifespan *string `json:"refresh_token_grant_id_token_lifespan,omitempty"` + RefreshTokenGrantIdTokenLifespan *string `json:"refresh_token_grant_id_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` // Specify a time duration in milliseconds, seconds, minutes, hours. - RefreshTokenGrantRefreshTokenLifespan *string `json:"refresh_token_grant_refresh_token_lifespan,omitempty"` + RefreshTokenGrantRefreshTokenLifespan *string `json:"refresh_token_grant_refresh_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` // OpenID Connect Dynamic Client Registration Access Token RegistrationAccessToken can be used to update, get, or delete the OAuth2 Client. It is sent when creating a client using Dynamic Client Registration. RegistrationAccessToken *string `json:"registration_access_token,omitempty"` // OpenID Connect Dynamic Client Registration URL RegistrationClientURI is the URL used to update, get, or delete the OAuth2 Client. @@ -87,9 +97,13 @@ type OAuth2Client struct { Scope *string `json:"scope,omitempty"` // OpenID Connect Sector Identifier URI URL using the https scheme to be used in calculating Pseudonymous Identifiers by the OP. The URL references a file with a single JSON array of redirect_uri values. SectorIdentifierUri *string `json:"sector_identifier_uri,omitempty"` + // SkipConsent skips the consent screen for this client. This field can only be set from the admin API. + SkipConsent *bool `json:"skip_consent,omitempty"` + // SkipLogoutConsent skips the logout consent screen for this client. This field can only be set from the admin API. + SkipLogoutConsent *bool `json:"skip_logout_consent,omitempty"` // OpenID Connect Subject Type The `subject_types_supported` Discovery parameter contains a list of the supported subject_type values for this server. Valid types include `pairwise` and `public`. SubjectType *string `json:"subject_type,omitempty"` - // OAuth 2.0 Token Endpoint Authentication Method Requested Client Authentication method for the Token Endpoint. The options are: `client_secret_post`: (default) Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` in the HTTP body. `client_secret_basic`: Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` encoded in the HTTP Authorization header. `private_key_jwt`: Use JSON Web Tokens to authenticate the client. `none`: Used for public clients (native apps, mobile apps) which can not have secrets. + // OAuth 2.0 Token Endpoint Authentication Method Requested Client Authentication method for the Token Endpoint. The options are: `client_secret_basic`: (default) Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` encoded in the HTTP Authorization header. `client_secret_post`: Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` in the HTTP body. `private_key_jwt`: Use JSON Web Tokens to authenticate the client. `none`: Used for public clients (native apps, mobile apps) which can not have secrets. TokenEndpointAuthMethod *string `json:"token_endpoint_auth_method,omitempty"` // OAuth 2.0 Token Endpoint Signing Algorithm Requested Client Authentication signing algorithm for the Token Endpoint. TokenEndpointAuthSigningAlg *string `json:"token_endpoint_auth_signing_alg,omitempty"` @@ -107,6 +121,8 @@ type OAuth2Client struct { // will change when the set of required properties is changed func NewOAuth2Client() *OAuth2Client { this := OAuth2Client{} + var tokenEndpointAuthMethod string = "client_secret_basic" + this.TokenEndpointAuthMethod = &tokenEndpointAuthMethod return &this } @@ -115,12 +131,46 @@ func NewOAuth2Client() *OAuth2Client { // but it doesn't guarantee that properties required by API are set func NewOAuth2ClientWithDefaults() *OAuth2Client { this := OAuth2Client{} + var tokenEndpointAuthMethod string = "client_secret_basic" + this.TokenEndpointAuthMethod = &tokenEndpointAuthMethod return &this } +// GetAccessTokenStrategy returns the AccessTokenStrategy field value if set, zero value otherwise. +func (o *OAuth2Client) GetAccessTokenStrategy() string { + if o == nil || IsNil(o.AccessTokenStrategy) { + var ret string + return ret + } + return *o.AccessTokenStrategy +} + +// GetAccessTokenStrategyOk returns a tuple with the AccessTokenStrategy field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *OAuth2Client) GetAccessTokenStrategyOk() (*string, bool) { + if o == nil || IsNil(o.AccessTokenStrategy) { + return nil, false + } + return o.AccessTokenStrategy, true +} + +// HasAccessTokenStrategy returns a boolean if a field has been set. +func (o *OAuth2Client) HasAccessTokenStrategy() bool { + if o != nil && !IsNil(o.AccessTokenStrategy) { + return true + } + + return false +} + +// SetAccessTokenStrategy gets a reference to the given string and assigns it to the AccessTokenStrategy field. +func (o *OAuth2Client) SetAccessTokenStrategy(v string) { + o.AccessTokenStrategy = &v +} + // GetAllowedCorsOrigins returns the AllowedCorsOrigins field value if set, zero value otherwise. func (o *OAuth2Client) GetAllowedCorsOrigins() []string { - if o == nil || o.AllowedCorsOrigins == nil { + if o == nil || IsNil(o.AllowedCorsOrigins) { var ret []string return ret } @@ -130,7 +180,7 @@ func (o *OAuth2Client) GetAllowedCorsOrigins() []string { // GetAllowedCorsOriginsOk returns a tuple with the AllowedCorsOrigins field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetAllowedCorsOriginsOk() ([]string, bool) { - if o == nil || o.AllowedCorsOrigins == nil { + if o == nil || IsNil(o.AllowedCorsOrigins) { return nil, false } return o.AllowedCorsOrigins, true @@ -138,7 +188,7 @@ func (o *OAuth2Client) GetAllowedCorsOriginsOk() ([]string, bool) { // HasAllowedCorsOrigins returns a boolean if a field has been set. func (o *OAuth2Client) HasAllowedCorsOrigins() bool { - if o != nil && o.AllowedCorsOrigins != nil { + if o != nil && !IsNil(o.AllowedCorsOrigins) { return true } @@ -152,7 +202,7 @@ func (o *OAuth2Client) SetAllowedCorsOrigins(v []string) { // GetAudience returns the Audience field value if set, zero value otherwise. func (o *OAuth2Client) GetAudience() []string { - if o == nil || o.Audience == nil { + if o == nil || IsNil(o.Audience) { var ret []string return ret } @@ -162,7 +212,7 @@ func (o *OAuth2Client) GetAudience() []string { // GetAudienceOk returns a tuple with the Audience field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetAudienceOk() ([]string, bool) { - if o == nil || o.Audience == nil { + if o == nil || IsNil(o.Audience) { return nil, false } return o.Audience, true @@ -170,7 +220,7 @@ func (o *OAuth2Client) GetAudienceOk() ([]string, bool) { // HasAudience returns a boolean if a field has been set. func (o *OAuth2Client) HasAudience() bool { - if o != nil && o.Audience != nil { + if o != nil && !IsNil(o.Audience) { return true } @@ -184,7 +234,7 @@ func (o *OAuth2Client) SetAudience(v []string) { // GetAuthorizationCodeGrantAccessTokenLifespan returns the AuthorizationCodeGrantAccessTokenLifespan field value if set, zero value otherwise. func (o *OAuth2Client) GetAuthorizationCodeGrantAccessTokenLifespan() string { - if o == nil || o.AuthorizationCodeGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.AuthorizationCodeGrantAccessTokenLifespan) { var ret string return ret } @@ -194,7 +244,7 @@ func (o *OAuth2Client) GetAuthorizationCodeGrantAccessTokenLifespan() string { // GetAuthorizationCodeGrantAccessTokenLifespanOk returns a tuple with the AuthorizationCodeGrantAccessTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetAuthorizationCodeGrantAccessTokenLifespanOk() (*string, bool) { - if o == nil || o.AuthorizationCodeGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.AuthorizationCodeGrantAccessTokenLifespan) { return nil, false } return o.AuthorizationCodeGrantAccessTokenLifespan, true @@ -202,7 +252,7 @@ func (o *OAuth2Client) GetAuthorizationCodeGrantAccessTokenLifespanOk() (*string // HasAuthorizationCodeGrantAccessTokenLifespan returns a boolean if a field has been set. func (o *OAuth2Client) HasAuthorizationCodeGrantAccessTokenLifespan() bool { - if o != nil && o.AuthorizationCodeGrantAccessTokenLifespan != nil { + if o != nil && !IsNil(o.AuthorizationCodeGrantAccessTokenLifespan) { return true } @@ -216,7 +266,7 @@ func (o *OAuth2Client) SetAuthorizationCodeGrantAccessTokenLifespan(v string) { // GetAuthorizationCodeGrantIdTokenLifespan returns the AuthorizationCodeGrantIdTokenLifespan field value if set, zero value otherwise. func (o *OAuth2Client) GetAuthorizationCodeGrantIdTokenLifespan() string { - if o == nil || o.AuthorizationCodeGrantIdTokenLifespan == nil { + if o == nil || IsNil(o.AuthorizationCodeGrantIdTokenLifespan) { var ret string return ret } @@ -226,7 +276,7 @@ func (o *OAuth2Client) GetAuthorizationCodeGrantIdTokenLifespan() string { // GetAuthorizationCodeGrantIdTokenLifespanOk returns a tuple with the AuthorizationCodeGrantIdTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetAuthorizationCodeGrantIdTokenLifespanOk() (*string, bool) { - if o == nil || o.AuthorizationCodeGrantIdTokenLifespan == nil { + if o == nil || IsNil(o.AuthorizationCodeGrantIdTokenLifespan) { return nil, false } return o.AuthorizationCodeGrantIdTokenLifespan, true @@ -234,7 +284,7 @@ func (o *OAuth2Client) GetAuthorizationCodeGrantIdTokenLifespanOk() (*string, bo // HasAuthorizationCodeGrantIdTokenLifespan returns a boolean if a field has been set. func (o *OAuth2Client) HasAuthorizationCodeGrantIdTokenLifespan() bool { - if o != nil && o.AuthorizationCodeGrantIdTokenLifespan != nil { + if o != nil && !IsNil(o.AuthorizationCodeGrantIdTokenLifespan) { return true } @@ -248,7 +298,7 @@ func (o *OAuth2Client) SetAuthorizationCodeGrantIdTokenLifespan(v string) { // GetAuthorizationCodeGrantRefreshTokenLifespan returns the AuthorizationCodeGrantRefreshTokenLifespan field value if set, zero value otherwise. func (o *OAuth2Client) GetAuthorizationCodeGrantRefreshTokenLifespan() string { - if o == nil || o.AuthorizationCodeGrantRefreshTokenLifespan == nil { + if o == nil || IsNil(o.AuthorizationCodeGrantRefreshTokenLifespan) { var ret string return ret } @@ -258,7 +308,7 @@ func (o *OAuth2Client) GetAuthorizationCodeGrantRefreshTokenLifespan() string { // GetAuthorizationCodeGrantRefreshTokenLifespanOk returns a tuple with the AuthorizationCodeGrantRefreshTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetAuthorizationCodeGrantRefreshTokenLifespanOk() (*string, bool) { - if o == nil || o.AuthorizationCodeGrantRefreshTokenLifespan == nil { + if o == nil || IsNil(o.AuthorizationCodeGrantRefreshTokenLifespan) { return nil, false } return o.AuthorizationCodeGrantRefreshTokenLifespan, true @@ -266,7 +316,7 @@ func (o *OAuth2Client) GetAuthorizationCodeGrantRefreshTokenLifespanOk() (*strin // HasAuthorizationCodeGrantRefreshTokenLifespan returns a boolean if a field has been set. func (o *OAuth2Client) HasAuthorizationCodeGrantRefreshTokenLifespan() bool { - if o != nil && o.AuthorizationCodeGrantRefreshTokenLifespan != nil { + if o != nil && !IsNil(o.AuthorizationCodeGrantRefreshTokenLifespan) { return true } @@ -280,7 +330,7 @@ func (o *OAuth2Client) SetAuthorizationCodeGrantRefreshTokenLifespan(v string) { // GetBackchannelLogoutSessionRequired returns the BackchannelLogoutSessionRequired field value if set, zero value otherwise. func (o *OAuth2Client) GetBackchannelLogoutSessionRequired() bool { - if o == nil || o.BackchannelLogoutSessionRequired == nil { + if o == nil || IsNil(o.BackchannelLogoutSessionRequired) { var ret bool return ret } @@ -290,7 +340,7 @@ func (o *OAuth2Client) GetBackchannelLogoutSessionRequired() bool { // GetBackchannelLogoutSessionRequiredOk returns a tuple with the BackchannelLogoutSessionRequired field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetBackchannelLogoutSessionRequiredOk() (*bool, bool) { - if o == nil || o.BackchannelLogoutSessionRequired == nil { + if o == nil || IsNil(o.BackchannelLogoutSessionRequired) { return nil, false } return o.BackchannelLogoutSessionRequired, true @@ -298,7 +348,7 @@ func (o *OAuth2Client) GetBackchannelLogoutSessionRequiredOk() (*bool, bool) { // HasBackchannelLogoutSessionRequired returns a boolean if a field has been set. func (o *OAuth2Client) HasBackchannelLogoutSessionRequired() bool { - if o != nil && o.BackchannelLogoutSessionRequired != nil { + if o != nil && !IsNil(o.BackchannelLogoutSessionRequired) { return true } @@ -312,7 +362,7 @@ func (o *OAuth2Client) SetBackchannelLogoutSessionRequired(v bool) { // GetBackchannelLogoutUri returns the BackchannelLogoutUri field value if set, zero value otherwise. func (o *OAuth2Client) GetBackchannelLogoutUri() string { - if o == nil || o.BackchannelLogoutUri == nil { + if o == nil || IsNil(o.BackchannelLogoutUri) { var ret string return ret } @@ -322,7 +372,7 @@ func (o *OAuth2Client) GetBackchannelLogoutUri() string { // GetBackchannelLogoutUriOk returns a tuple with the BackchannelLogoutUri field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetBackchannelLogoutUriOk() (*string, bool) { - if o == nil || o.BackchannelLogoutUri == nil { + if o == nil || IsNil(o.BackchannelLogoutUri) { return nil, false } return o.BackchannelLogoutUri, true @@ -330,7 +380,7 @@ func (o *OAuth2Client) GetBackchannelLogoutUriOk() (*string, bool) { // HasBackchannelLogoutUri returns a boolean if a field has been set. func (o *OAuth2Client) HasBackchannelLogoutUri() bool { - if o != nil && o.BackchannelLogoutUri != nil { + if o != nil && !IsNil(o.BackchannelLogoutUri) { return true } @@ -344,7 +394,7 @@ func (o *OAuth2Client) SetBackchannelLogoutUri(v string) { // GetClientCredentialsGrantAccessTokenLifespan returns the ClientCredentialsGrantAccessTokenLifespan field value if set, zero value otherwise. func (o *OAuth2Client) GetClientCredentialsGrantAccessTokenLifespan() string { - if o == nil || o.ClientCredentialsGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.ClientCredentialsGrantAccessTokenLifespan) { var ret string return ret } @@ -354,7 +404,7 @@ func (o *OAuth2Client) GetClientCredentialsGrantAccessTokenLifespan() string { // GetClientCredentialsGrantAccessTokenLifespanOk returns a tuple with the ClientCredentialsGrantAccessTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetClientCredentialsGrantAccessTokenLifespanOk() (*string, bool) { - if o == nil || o.ClientCredentialsGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.ClientCredentialsGrantAccessTokenLifespan) { return nil, false } return o.ClientCredentialsGrantAccessTokenLifespan, true @@ -362,7 +412,7 @@ func (o *OAuth2Client) GetClientCredentialsGrantAccessTokenLifespanOk() (*string // HasClientCredentialsGrantAccessTokenLifespan returns a boolean if a field has been set. func (o *OAuth2Client) HasClientCredentialsGrantAccessTokenLifespan() bool { - if o != nil && o.ClientCredentialsGrantAccessTokenLifespan != nil { + if o != nil && !IsNil(o.ClientCredentialsGrantAccessTokenLifespan) { return true } @@ -376,7 +426,7 @@ func (o *OAuth2Client) SetClientCredentialsGrantAccessTokenLifespan(v string) { // GetClientId returns the ClientId field value if set, zero value otherwise. func (o *OAuth2Client) GetClientId() string { - if o == nil || o.ClientId == nil { + if o == nil || IsNil(o.ClientId) { var ret string return ret } @@ -386,7 +436,7 @@ func (o *OAuth2Client) GetClientId() string { // GetClientIdOk returns a tuple with the ClientId field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetClientIdOk() (*string, bool) { - if o == nil || o.ClientId == nil { + if o == nil || IsNil(o.ClientId) { return nil, false } return o.ClientId, true @@ -394,7 +444,7 @@ func (o *OAuth2Client) GetClientIdOk() (*string, bool) { // HasClientId returns a boolean if a field has been set. func (o *OAuth2Client) HasClientId() bool { - if o != nil && o.ClientId != nil { + if o != nil && !IsNil(o.ClientId) { return true } @@ -408,7 +458,7 @@ func (o *OAuth2Client) SetClientId(v string) { // GetClientName returns the ClientName field value if set, zero value otherwise. func (o *OAuth2Client) GetClientName() string { - if o == nil || o.ClientName == nil { + if o == nil || IsNil(o.ClientName) { var ret string return ret } @@ -418,7 +468,7 @@ func (o *OAuth2Client) GetClientName() string { // GetClientNameOk returns a tuple with the ClientName field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetClientNameOk() (*string, bool) { - if o == nil || o.ClientName == nil { + if o == nil || IsNil(o.ClientName) { return nil, false } return o.ClientName, true @@ -426,7 +476,7 @@ func (o *OAuth2Client) GetClientNameOk() (*string, bool) { // HasClientName returns a boolean if a field has been set. func (o *OAuth2Client) HasClientName() bool { - if o != nil && o.ClientName != nil { + if o != nil && !IsNil(o.ClientName) { return true } @@ -440,7 +490,7 @@ func (o *OAuth2Client) SetClientName(v string) { // GetClientSecret returns the ClientSecret field value if set, zero value otherwise. func (o *OAuth2Client) GetClientSecret() string { - if o == nil || o.ClientSecret == nil { + if o == nil || IsNil(o.ClientSecret) { var ret string return ret } @@ -450,7 +500,7 @@ func (o *OAuth2Client) GetClientSecret() string { // GetClientSecretOk returns a tuple with the ClientSecret field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetClientSecretOk() (*string, bool) { - if o == nil || o.ClientSecret == nil { + if o == nil || IsNil(o.ClientSecret) { return nil, false } return o.ClientSecret, true @@ -458,7 +508,7 @@ func (o *OAuth2Client) GetClientSecretOk() (*string, bool) { // HasClientSecret returns a boolean if a field has been set. func (o *OAuth2Client) HasClientSecret() bool { - if o != nil && o.ClientSecret != nil { + if o != nil && !IsNil(o.ClientSecret) { return true } @@ -472,7 +522,7 @@ func (o *OAuth2Client) SetClientSecret(v string) { // GetClientSecretExpiresAt returns the ClientSecretExpiresAt field value if set, zero value otherwise. func (o *OAuth2Client) GetClientSecretExpiresAt() int64 { - if o == nil || o.ClientSecretExpiresAt == nil { + if o == nil || IsNil(o.ClientSecretExpiresAt) { var ret int64 return ret } @@ -482,7 +532,7 @@ func (o *OAuth2Client) GetClientSecretExpiresAt() int64 { // GetClientSecretExpiresAtOk returns a tuple with the ClientSecretExpiresAt field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetClientSecretExpiresAtOk() (*int64, bool) { - if o == nil || o.ClientSecretExpiresAt == nil { + if o == nil || IsNil(o.ClientSecretExpiresAt) { return nil, false } return o.ClientSecretExpiresAt, true @@ -490,7 +540,7 @@ func (o *OAuth2Client) GetClientSecretExpiresAtOk() (*int64, bool) { // HasClientSecretExpiresAt returns a boolean if a field has been set. func (o *OAuth2Client) HasClientSecretExpiresAt() bool { - if o != nil && o.ClientSecretExpiresAt != nil { + if o != nil && !IsNil(o.ClientSecretExpiresAt) { return true } @@ -504,7 +554,7 @@ func (o *OAuth2Client) SetClientSecretExpiresAt(v int64) { // GetClientUri returns the ClientUri field value if set, zero value otherwise. func (o *OAuth2Client) GetClientUri() string { - if o == nil || o.ClientUri == nil { + if o == nil || IsNil(o.ClientUri) { var ret string return ret } @@ -514,7 +564,7 @@ func (o *OAuth2Client) GetClientUri() string { // GetClientUriOk returns a tuple with the ClientUri field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetClientUriOk() (*string, bool) { - if o == nil || o.ClientUri == nil { + if o == nil || IsNil(o.ClientUri) { return nil, false } return o.ClientUri, true @@ -522,7 +572,7 @@ func (o *OAuth2Client) GetClientUriOk() (*string, bool) { // HasClientUri returns a boolean if a field has been set. func (o *OAuth2Client) HasClientUri() bool { - if o != nil && o.ClientUri != nil { + if o != nil && !IsNil(o.ClientUri) { return true } @@ -536,7 +586,7 @@ func (o *OAuth2Client) SetClientUri(v string) { // GetContacts returns the Contacts field value if set, zero value otherwise. func (o *OAuth2Client) GetContacts() []string { - if o == nil || o.Contacts == nil { + if o == nil || IsNil(o.Contacts) { var ret []string return ret } @@ -546,7 +596,7 @@ func (o *OAuth2Client) GetContacts() []string { // GetContactsOk returns a tuple with the Contacts field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetContactsOk() ([]string, bool) { - if o == nil || o.Contacts == nil { + if o == nil || IsNil(o.Contacts) { return nil, false } return o.Contacts, true @@ -554,7 +604,7 @@ func (o *OAuth2Client) GetContactsOk() ([]string, bool) { // HasContacts returns a boolean if a field has been set. func (o *OAuth2Client) HasContacts() bool { - if o != nil && o.Contacts != nil { + if o != nil && !IsNil(o.Contacts) { return true } @@ -568,7 +618,7 @@ func (o *OAuth2Client) SetContacts(v []string) { // GetCreatedAt returns the CreatedAt field value if set, zero value otherwise. func (o *OAuth2Client) GetCreatedAt() time.Time { - if o == nil || o.CreatedAt == nil { + if o == nil || IsNil(o.CreatedAt) { var ret time.Time return ret } @@ -578,7 +628,7 @@ func (o *OAuth2Client) GetCreatedAt() time.Time { // GetCreatedAtOk returns a tuple with the CreatedAt field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetCreatedAtOk() (*time.Time, bool) { - if o == nil || o.CreatedAt == nil { + if o == nil || IsNil(o.CreatedAt) { return nil, false } return o.CreatedAt, true @@ -586,7 +636,7 @@ func (o *OAuth2Client) GetCreatedAtOk() (*time.Time, bool) { // HasCreatedAt returns a boolean if a field has been set. func (o *OAuth2Client) HasCreatedAt() bool { - if o != nil && o.CreatedAt != nil { + if o != nil && !IsNil(o.CreatedAt) { return true } @@ -598,9 +648,105 @@ func (o *OAuth2Client) SetCreatedAt(v time.Time) { o.CreatedAt = &v } +// GetDeviceAuthorizationGrantAccessTokenLifespan returns the DeviceAuthorizationGrantAccessTokenLifespan field value if set, zero value otherwise. +func (o *OAuth2Client) GetDeviceAuthorizationGrantAccessTokenLifespan() string { + if o == nil || IsNil(o.DeviceAuthorizationGrantAccessTokenLifespan) { + var ret string + return ret + } + return *o.DeviceAuthorizationGrantAccessTokenLifespan +} + +// GetDeviceAuthorizationGrantAccessTokenLifespanOk returns a tuple with the DeviceAuthorizationGrantAccessTokenLifespan field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *OAuth2Client) GetDeviceAuthorizationGrantAccessTokenLifespanOk() (*string, bool) { + if o == nil || IsNil(o.DeviceAuthorizationGrantAccessTokenLifespan) { + return nil, false + } + return o.DeviceAuthorizationGrantAccessTokenLifespan, true +} + +// HasDeviceAuthorizationGrantAccessTokenLifespan returns a boolean if a field has been set. +func (o *OAuth2Client) HasDeviceAuthorizationGrantAccessTokenLifespan() bool { + if o != nil && !IsNil(o.DeviceAuthorizationGrantAccessTokenLifespan) { + return true + } + + return false +} + +// SetDeviceAuthorizationGrantAccessTokenLifespan gets a reference to the given string and assigns it to the DeviceAuthorizationGrantAccessTokenLifespan field. +func (o *OAuth2Client) SetDeviceAuthorizationGrantAccessTokenLifespan(v string) { + o.DeviceAuthorizationGrantAccessTokenLifespan = &v +} + +// GetDeviceAuthorizationGrantIdTokenLifespan returns the DeviceAuthorizationGrantIdTokenLifespan field value if set, zero value otherwise. +func (o *OAuth2Client) GetDeviceAuthorizationGrantIdTokenLifespan() string { + if o == nil || IsNil(o.DeviceAuthorizationGrantIdTokenLifespan) { + var ret string + return ret + } + return *o.DeviceAuthorizationGrantIdTokenLifespan +} + +// GetDeviceAuthorizationGrantIdTokenLifespanOk returns a tuple with the DeviceAuthorizationGrantIdTokenLifespan field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *OAuth2Client) GetDeviceAuthorizationGrantIdTokenLifespanOk() (*string, bool) { + if o == nil || IsNil(o.DeviceAuthorizationGrantIdTokenLifespan) { + return nil, false + } + return o.DeviceAuthorizationGrantIdTokenLifespan, true +} + +// HasDeviceAuthorizationGrantIdTokenLifespan returns a boolean if a field has been set. +func (o *OAuth2Client) HasDeviceAuthorizationGrantIdTokenLifespan() bool { + if o != nil && !IsNil(o.DeviceAuthorizationGrantIdTokenLifespan) { + return true + } + + return false +} + +// SetDeviceAuthorizationGrantIdTokenLifespan gets a reference to the given string and assigns it to the DeviceAuthorizationGrantIdTokenLifespan field. +func (o *OAuth2Client) SetDeviceAuthorizationGrantIdTokenLifespan(v string) { + o.DeviceAuthorizationGrantIdTokenLifespan = &v +} + +// GetDeviceAuthorizationGrantRefreshTokenLifespan returns the DeviceAuthorizationGrantRefreshTokenLifespan field value if set, zero value otherwise. +func (o *OAuth2Client) GetDeviceAuthorizationGrantRefreshTokenLifespan() string { + if o == nil || IsNil(o.DeviceAuthorizationGrantRefreshTokenLifespan) { + var ret string + return ret + } + return *o.DeviceAuthorizationGrantRefreshTokenLifespan +} + +// GetDeviceAuthorizationGrantRefreshTokenLifespanOk returns a tuple with the DeviceAuthorizationGrantRefreshTokenLifespan field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *OAuth2Client) GetDeviceAuthorizationGrantRefreshTokenLifespanOk() (*string, bool) { + if o == nil || IsNil(o.DeviceAuthorizationGrantRefreshTokenLifespan) { + return nil, false + } + return o.DeviceAuthorizationGrantRefreshTokenLifespan, true +} + +// HasDeviceAuthorizationGrantRefreshTokenLifespan returns a boolean if a field has been set. +func (o *OAuth2Client) HasDeviceAuthorizationGrantRefreshTokenLifespan() bool { + if o != nil && !IsNil(o.DeviceAuthorizationGrantRefreshTokenLifespan) { + return true + } + + return false +} + +// SetDeviceAuthorizationGrantRefreshTokenLifespan gets a reference to the given string and assigns it to the DeviceAuthorizationGrantRefreshTokenLifespan field. +func (o *OAuth2Client) SetDeviceAuthorizationGrantRefreshTokenLifespan(v string) { + o.DeviceAuthorizationGrantRefreshTokenLifespan = &v +} + // GetFrontchannelLogoutSessionRequired returns the FrontchannelLogoutSessionRequired field value if set, zero value otherwise. func (o *OAuth2Client) GetFrontchannelLogoutSessionRequired() bool { - if o == nil || o.FrontchannelLogoutSessionRequired == nil { + if o == nil || IsNil(o.FrontchannelLogoutSessionRequired) { var ret bool return ret } @@ -610,7 +756,7 @@ func (o *OAuth2Client) GetFrontchannelLogoutSessionRequired() bool { // GetFrontchannelLogoutSessionRequiredOk returns a tuple with the FrontchannelLogoutSessionRequired field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetFrontchannelLogoutSessionRequiredOk() (*bool, bool) { - if o == nil || o.FrontchannelLogoutSessionRequired == nil { + if o == nil || IsNil(o.FrontchannelLogoutSessionRequired) { return nil, false } return o.FrontchannelLogoutSessionRequired, true @@ -618,7 +764,7 @@ func (o *OAuth2Client) GetFrontchannelLogoutSessionRequiredOk() (*bool, bool) { // HasFrontchannelLogoutSessionRequired returns a boolean if a field has been set. func (o *OAuth2Client) HasFrontchannelLogoutSessionRequired() bool { - if o != nil && o.FrontchannelLogoutSessionRequired != nil { + if o != nil && !IsNil(o.FrontchannelLogoutSessionRequired) { return true } @@ -632,7 +778,7 @@ func (o *OAuth2Client) SetFrontchannelLogoutSessionRequired(v bool) { // GetFrontchannelLogoutUri returns the FrontchannelLogoutUri field value if set, zero value otherwise. func (o *OAuth2Client) GetFrontchannelLogoutUri() string { - if o == nil || o.FrontchannelLogoutUri == nil { + if o == nil || IsNil(o.FrontchannelLogoutUri) { var ret string return ret } @@ -642,7 +788,7 @@ func (o *OAuth2Client) GetFrontchannelLogoutUri() string { // GetFrontchannelLogoutUriOk returns a tuple with the FrontchannelLogoutUri field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetFrontchannelLogoutUriOk() (*string, bool) { - if o == nil || o.FrontchannelLogoutUri == nil { + if o == nil || IsNil(o.FrontchannelLogoutUri) { return nil, false } return o.FrontchannelLogoutUri, true @@ -650,7 +796,7 @@ func (o *OAuth2Client) GetFrontchannelLogoutUriOk() (*string, bool) { // HasFrontchannelLogoutUri returns a boolean if a field has been set. func (o *OAuth2Client) HasFrontchannelLogoutUri() bool { - if o != nil && o.FrontchannelLogoutUri != nil { + if o != nil && !IsNil(o.FrontchannelLogoutUri) { return true } @@ -664,7 +810,7 @@ func (o *OAuth2Client) SetFrontchannelLogoutUri(v string) { // GetGrantTypes returns the GrantTypes field value if set, zero value otherwise. func (o *OAuth2Client) GetGrantTypes() []string { - if o == nil || o.GrantTypes == nil { + if o == nil || IsNil(o.GrantTypes) { var ret []string return ret } @@ -674,7 +820,7 @@ func (o *OAuth2Client) GetGrantTypes() []string { // GetGrantTypesOk returns a tuple with the GrantTypes field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetGrantTypesOk() ([]string, bool) { - if o == nil || o.GrantTypes == nil { + if o == nil || IsNil(o.GrantTypes) { return nil, false } return o.GrantTypes, true @@ -682,7 +828,7 @@ func (o *OAuth2Client) GetGrantTypesOk() ([]string, bool) { // HasGrantTypes returns a boolean if a field has been set. func (o *OAuth2Client) HasGrantTypes() bool { - if o != nil && o.GrantTypes != nil { + if o != nil && !IsNil(o.GrantTypes) { return true } @@ -696,7 +842,7 @@ func (o *OAuth2Client) SetGrantTypes(v []string) { // GetImplicitGrantAccessTokenLifespan returns the ImplicitGrantAccessTokenLifespan field value if set, zero value otherwise. func (o *OAuth2Client) GetImplicitGrantAccessTokenLifespan() string { - if o == nil || o.ImplicitGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.ImplicitGrantAccessTokenLifespan) { var ret string return ret } @@ -706,7 +852,7 @@ func (o *OAuth2Client) GetImplicitGrantAccessTokenLifespan() string { // GetImplicitGrantAccessTokenLifespanOk returns a tuple with the ImplicitGrantAccessTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetImplicitGrantAccessTokenLifespanOk() (*string, bool) { - if o == nil || o.ImplicitGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.ImplicitGrantAccessTokenLifespan) { return nil, false } return o.ImplicitGrantAccessTokenLifespan, true @@ -714,7 +860,7 @@ func (o *OAuth2Client) GetImplicitGrantAccessTokenLifespanOk() (*string, bool) { // HasImplicitGrantAccessTokenLifespan returns a boolean if a field has been set. func (o *OAuth2Client) HasImplicitGrantAccessTokenLifespan() bool { - if o != nil && o.ImplicitGrantAccessTokenLifespan != nil { + if o != nil && !IsNil(o.ImplicitGrantAccessTokenLifespan) { return true } @@ -728,7 +874,7 @@ func (o *OAuth2Client) SetImplicitGrantAccessTokenLifespan(v string) { // GetImplicitGrantIdTokenLifespan returns the ImplicitGrantIdTokenLifespan field value if set, zero value otherwise. func (o *OAuth2Client) GetImplicitGrantIdTokenLifespan() string { - if o == nil || o.ImplicitGrantIdTokenLifespan == nil { + if o == nil || IsNil(o.ImplicitGrantIdTokenLifespan) { var ret string return ret } @@ -738,7 +884,7 @@ func (o *OAuth2Client) GetImplicitGrantIdTokenLifespan() string { // GetImplicitGrantIdTokenLifespanOk returns a tuple with the ImplicitGrantIdTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetImplicitGrantIdTokenLifespanOk() (*string, bool) { - if o == nil || o.ImplicitGrantIdTokenLifespan == nil { + if o == nil || IsNil(o.ImplicitGrantIdTokenLifespan) { return nil, false } return o.ImplicitGrantIdTokenLifespan, true @@ -746,7 +892,7 @@ func (o *OAuth2Client) GetImplicitGrantIdTokenLifespanOk() (*string, bool) { // HasImplicitGrantIdTokenLifespan returns a boolean if a field has been set. func (o *OAuth2Client) HasImplicitGrantIdTokenLifespan() bool { - if o != nil && o.ImplicitGrantIdTokenLifespan != nil { + if o != nil && !IsNil(o.ImplicitGrantIdTokenLifespan) { return true } @@ -758,42 +904,41 @@ func (o *OAuth2Client) SetImplicitGrantIdTokenLifespan(v string) { o.ImplicitGrantIdTokenLifespan = &v } -// GetJwks returns the Jwks field value if set, zero value otherwise (both if not set or set to explicit null). -func (o *OAuth2Client) GetJwks() interface{} { - if o == nil { - var ret interface{} +// GetJwks returns the Jwks field value if set, zero value otherwise. +func (o *OAuth2Client) GetJwks() JsonWebKeySet { + if o == nil || IsNil(o.Jwks) { + var ret JsonWebKeySet return ret } - return o.Jwks + return *o.Jwks } // GetJwksOk returns a tuple with the Jwks field value if set, nil otherwise // and a boolean to check if the value has been set. -// NOTE: If the value is an explicit nil, `nil, true` will be returned -func (o *OAuth2Client) GetJwksOk() (*interface{}, bool) { - if o == nil || o.Jwks == nil { +func (o *OAuth2Client) GetJwksOk() (*JsonWebKeySet, bool) { + if o == nil || IsNil(o.Jwks) { return nil, false } - return &o.Jwks, true + return o.Jwks, true } // HasJwks returns a boolean if a field has been set. func (o *OAuth2Client) HasJwks() bool { - if o != nil && o.Jwks != nil { + if o != nil && !IsNil(o.Jwks) { return true } return false } -// SetJwks gets a reference to the given interface{} and assigns it to the Jwks field. -func (o *OAuth2Client) SetJwks(v interface{}) { - o.Jwks = v +// SetJwks gets a reference to the given JsonWebKeySet and assigns it to the Jwks field. +func (o *OAuth2Client) SetJwks(v JsonWebKeySet) { + o.Jwks = &v } // GetJwksUri returns the JwksUri field value if set, zero value otherwise. func (o *OAuth2Client) GetJwksUri() string { - if o == nil || o.JwksUri == nil { + if o == nil || IsNil(o.JwksUri) { var ret string return ret } @@ -803,7 +948,7 @@ func (o *OAuth2Client) GetJwksUri() string { // GetJwksUriOk returns a tuple with the JwksUri field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetJwksUriOk() (*string, bool) { - if o == nil || o.JwksUri == nil { + if o == nil || IsNil(o.JwksUri) { return nil, false } return o.JwksUri, true @@ -811,7 +956,7 @@ func (o *OAuth2Client) GetJwksUriOk() (*string, bool) { // HasJwksUri returns a boolean if a field has been set. func (o *OAuth2Client) HasJwksUri() bool { - if o != nil && o.JwksUri != nil { + if o != nil && !IsNil(o.JwksUri) { return true } @@ -825,7 +970,7 @@ func (o *OAuth2Client) SetJwksUri(v string) { // GetJwtBearerGrantAccessTokenLifespan returns the JwtBearerGrantAccessTokenLifespan field value if set, zero value otherwise. func (o *OAuth2Client) GetJwtBearerGrantAccessTokenLifespan() string { - if o == nil || o.JwtBearerGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.JwtBearerGrantAccessTokenLifespan) { var ret string return ret } @@ -835,7 +980,7 @@ func (o *OAuth2Client) GetJwtBearerGrantAccessTokenLifespan() string { // GetJwtBearerGrantAccessTokenLifespanOk returns a tuple with the JwtBearerGrantAccessTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetJwtBearerGrantAccessTokenLifespanOk() (*string, bool) { - if o == nil || o.JwtBearerGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.JwtBearerGrantAccessTokenLifespan) { return nil, false } return o.JwtBearerGrantAccessTokenLifespan, true @@ -843,7 +988,7 @@ func (o *OAuth2Client) GetJwtBearerGrantAccessTokenLifespanOk() (*string, bool) // HasJwtBearerGrantAccessTokenLifespan returns a boolean if a field has been set. func (o *OAuth2Client) HasJwtBearerGrantAccessTokenLifespan() bool { - if o != nil && o.JwtBearerGrantAccessTokenLifespan != nil { + if o != nil && !IsNil(o.JwtBearerGrantAccessTokenLifespan) { return true } @@ -857,7 +1002,7 @@ func (o *OAuth2Client) SetJwtBearerGrantAccessTokenLifespan(v string) { // GetLogoUri returns the LogoUri field value if set, zero value otherwise. func (o *OAuth2Client) GetLogoUri() string { - if o == nil || o.LogoUri == nil { + if o == nil || IsNil(o.LogoUri) { var ret string return ret } @@ -867,7 +1012,7 @@ func (o *OAuth2Client) GetLogoUri() string { // GetLogoUriOk returns a tuple with the LogoUri field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetLogoUriOk() (*string, bool) { - if o == nil || o.LogoUri == nil { + if o == nil || IsNil(o.LogoUri) { return nil, false } return o.LogoUri, true @@ -875,7 +1020,7 @@ func (o *OAuth2Client) GetLogoUriOk() (*string, bool) { // HasLogoUri returns a boolean if a field has been set. func (o *OAuth2Client) HasLogoUri() bool { - if o != nil && o.LogoUri != nil { + if o != nil && !IsNil(o.LogoUri) { return true } @@ -900,7 +1045,7 @@ func (o *OAuth2Client) GetMetadata() interface{} { // and a boolean to check if the value has been set. // NOTE: If the value is an explicit nil, `nil, true` will be returned func (o *OAuth2Client) GetMetadataOk() (*interface{}, bool) { - if o == nil || o.Metadata == nil { + if o == nil || IsNil(o.Metadata) { return nil, false } return &o.Metadata, true @@ -908,7 +1053,7 @@ func (o *OAuth2Client) GetMetadataOk() (*interface{}, bool) { // HasMetadata returns a boolean if a field has been set. func (o *OAuth2Client) HasMetadata() bool { - if o != nil && o.Metadata != nil { + if o != nil && !IsNil(o.Metadata) { return true } @@ -922,7 +1067,7 @@ func (o *OAuth2Client) SetMetadata(v interface{}) { // GetOwner returns the Owner field value if set, zero value otherwise. func (o *OAuth2Client) GetOwner() string { - if o == nil || o.Owner == nil { + if o == nil || IsNil(o.Owner) { var ret string return ret } @@ -932,7 +1077,7 @@ func (o *OAuth2Client) GetOwner() string { // GetOwnerOk returns a tuple with the Owner field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetOwnerOk() (*string, bool) { - if o == nil || o.Owner == nil { + if o == nil || IsNil(o.Owner) { return nil, false } return o.Owner, true @@ -940,7 +1085,7 @@ func (o *OAuth2Client) GetOwnerOk() (*string, bool) { // HasOwner returns a boolean if a field has been set. func (o *OAuth2Client) HasOwner() bool { - if o != nil && o.Owner != nil { + if o != nil && !IsNil(o.Owner) { return true } @@ -954,7 +1099,7 @@ func (o *OAuth2Client) SetOwner(v string) { // GetPolicyUri returns the PolicyUri field value if set, zero value otherwise. func (o *OAuth2Client) GetPolicyUri() string { - if o == nil || o.PolicyUri == nil { + if o == nil || IsNil(o.PolicyUri) { var ret string return ret } @@ -964,7 +1109,7 @@ func (o *OAuth2Client) GetPolicyUri() string { // GetPolicyUriOk returns a tuple with the PolicyUri field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetPolicyUriOk() (*string, bool) { - if o == nil || o.PolicyUri == nil { + if o == nil || IsNil(o.PolicyUri) { return nil, false } return o.PolicyUri, true @@ -972,7 +1117,7 @@ func (o *OAuth2Client) GetPolicyUriOk() (*string, bool) { // HasPolicyUri returns a boolean if a field has been set. func (o *OAuth2Client) HasPolicyUri() bool { - if o != nil && o.PolicyUri != nil { + if o != nil && !IsNil(o.PolicyUri) { return true } @@ -986,7 +1131,7 @@ func (o *OAuth2Client) SetPolicyUri(v string) { // GetPostLogoutRedirectUris returns the PostLogoutRedirectUris field value if set, zero value otherwise. func (o *OAuth2Client) GetPostLogoutRedirectUris() []string { - if o == nil || o.PostLogoutRedirectUris == nil { + if o == nil || IsNil(o.PostLogoutRedirectUris) { var ret []string return ret } @@ -996,7 +1141,7 @@ func (o *OAuth2Client) GetPostLogoutRedirectUris() []string { // GetPostLogoutRedirectUrisOk returns a tuple with the PostLogoutRedirectUris field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetPostLogoutRedirectUrisOk() ([]string, bool) { - if o == nil || o.PostLogoutRedirectUris == nil { + if o == nil || IsNil(o.PostLogoutRedirectUris) { return nil, false } return o.PostLogoutRedirectUris, true @@ -1004,7 +1149,7 @@ func (o *OAuth2Client) GetPostLogoutRedirectUrisOk() ([]string, bool) { // HasPostLogoutRedirectUris returns a boolean if a field has been set. func (o *OAuth2Client) HasPostLogoutRedirectUris() bool { - if o != nil && o.PostLogoutRedirectUris != nil { + if o != nil && !IsNil(o.PostLogoutRedirectUris) { return true } @@ -1018,7 +1163,7 @@ func (o *OAuth2Client) SetPostLogoutRedirectUris(v []string) { // GetRedirectUris returns the RedirectUris field value if set, zero value otherwise. func (o *OAuth2Client) GetRedirectUris() []string { - if o == nil || o.RedirectUris == nil { + if o == nil || IsNil(o.RedirectUris) { var ret []string return ret } @@ -1028,7 +1173,7 @@ func (o *OAuth2Client) GetRedirectUris() []string { // GetRedirectUrisOk returns a tuple with the RedirectUris field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetRedirectUrisOk() ([]string, bool) { - if o == nil || o.RedirectUris == nil { + if o == nil || IsNil(o.RedirectUris) { return nil, false } return o.RedirectUris, true @@ -1036,7 +1181,7 @@ func (o *OAuth2Client) GetRedirectUrisOk() ([]string, bool) { // HasRedirectUris returns a boolean if a field has been set. func (o *OAuth2Client) HasRedirectUris() bool { - if o != nil && o.RedirectUris != nil { + if o != nil && !IsNil(o.RedirectUris) { return true } @@ -1050,7 +1195,7 @@ func (o *OAuth2Client) SetRedirectUris(v []string) { // GetRefreshTokenGrantAccessTokenLifespan returns the RefreshTokenGrantAccessTokenLifespan field value if set, zero value otherwise. func (o *OAuth2Client) GetRefreshTokenGrantAccessTokenLifespan() string { - if o == nil || o.RefreshTokenGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.RefreshTokenGrantAccessTokenLifespan) { var ret string return ret } @@ -1060,7 +1205,7 @@ func (o *OAuth2Client) GetRefreshTokenGrantAccessTokenLifespan() string { // GetRefreshTokenGrantAccessTokenLifespanOk returns a tuple with the RefreshTokenGrantAccessTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetRefreshTokenGrantAccessTokenLifespanOk() (*string, bool) { - if o == nil || o.RefreshTokenGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.RefreshTokenGrantAccessTokenLifespan) { return nil, false } return o.RefreshTokenGrantAccessTokenLifespan, true @@ -1068,7 +1213,7 @@ func (o *OAuth2Client) GetRefreshTokenGrantAccessTokenLifespanOk() (*string, boo // HasRefreshTokenGrantAccessTokenLifespan returns a boolean if a field has been set. func (o *OAuth2Client) HasRefreshTokenGrantAccessTokenLifespan() bool { - if o != nil && o.RefreshTokenGrantAccessTokenLifespan != nil { + if o != nil && !IsNil(o.RefreshTokenGrantAccessTokenLifespan) { return true } @@ -1082,7 +1227,7 @@ func (o *OAuth2Client) SetRefreshTokenGrantAccessTokenLifespan(v string) { // GetRefreshTokenGrantIdTokenLifespan returns the RefreshTokenGrantIdTokenLifespan field value if set, zero value otherwise. func (o *OAuth2Client) GetRefreshTokenGrantIdTokenLifespan() string { - if o == nil || o.RefreshTokenGrantIdTokenLifespan == nil { + if o == nil || IsNil(o.RefreshTokenGrantIdTokenLifespan) { var ret string return ret } @@ -1092,7 +1237,7 @@ func (o *OAuth2Client) GetRefreshTokenGrantIdTokenLifespan() string { // GetRefreshTokenGrantIdTokenLifespanOk returns a tuple with the RefreshTokenGrantIdTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetRefreshTokenGrantIdTokenLifespanOk() (*string, bool) { - if o == nil || o.RefreshTokenGrantIdTokenLifespan == nil { + if o == nil || IsNil(o.RefreshTokenGrantIdTokenLifespan) { return nil, false } return o.RefreshTokenGrantIdTokenLifespan, true @@ -1100,7 +1245,7 @@ func (o *OAuth2Client) GetRefreshTokenGrantIdTokenLifespanOk() (*string, bool) { // HasRefreshTokenGrantIdTokenLifespan returns a boolean if a field has been set. func (o *OAuth2Client) HasRefreshTokenGrantIdTokenLifespan() bool { - if o != nil && o.RefreshTokenGrantIdTokenLifespan != nil { + if o != nil && !IsNil(o.RefreshTokenGrantIdTokenLifespan) { return true } @@ -1114,7 +1259,7 @@ func (o *OAuth2Client) SetRefreshTokenGrantIdTokenLifespan(v string) { // GetRefreshTokenGrantRefreshTokenLifespan returns the RefreshTokenGrantRefreshTokenLifespan field value if set, zero value otherwise. func (o *OAuth2Client) GetRefreshTokenGrantRefreshTokenLifespan() string { - if o == nil || o.RefreshTokenGrantRefreshTokenLifespan == nil { + if o == nil || IsNil(o.RefreshTokenGrantRefreshTokenLifespan) { var ret string return ret } @@ -1124,7 +1269,7 @@ func (o *OAuth2Client) GetRefreshTokenGrantRefreshTokenLifespan() string { // GetRefreshTokenGrantRefreshTokenLifespanOk returns a tuple with the RefreshTokenGrantRefreshTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetRefreshTokenGrantRefreshTokenLifespanOk() (*string, bool) { - if o == nil || o.RefreshTokenGrantRefreshTokenLifespan == nil { + if o == nil || IsNil(o.RefreshTokenGrantRefreshTokenLifespan) { return nil, false } return o.RefreshTokenGrantRefreshTokenLifespan, true @@ -1132,7 +1277,7 @@ func (o *OAuth2Client) GetRefreshTokenGrantRefreshTokenLifespanOk() (*string, bo // HasRefreshTokenGrantRefreshTokenLifespan returns a boolean if a field has been set. func (o *OAuth2Client) HasRefreshTokenGrantRefreshTokenLifespan() bool { - if o != nil && o.RefreshTokenGrantRefreshTokenLifespan != nil { + if o != nil && !IsNil(o.RefreshTokenGrantRefreshTokenLifespan) { return true } @@ -1146,7 +1291,7 @@ func (o *OAuth2Client) SetRefreshTokenGrantRefreshTokenLifespan(v string) { // GetRegistrationAccessToken returns the RegistrationAccessToken field value if set, zero value otherwise. func (o *OAuth2Client) GetRegistrationAccessToken() string { - if o == nil || o.RegistrationAccessToken == nil { + if o == nil || IsNil(o.RegistrationAccessToken) { var ret string return ret } @@ -1156,7 +1301,7 @@ func (o *OAuth2Client) GetRegistrationAccessToken() string { // GetRegistrationAccessTokenOk returns a tuple with the RegistrationAccessToken field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetRegistrationAccessTokenOk() (*string, bool) { - if o == nil || o.RegistrationAccessToken == nil { + if o == nil || IsNil(o.RegistrationAccessToken) { return nil, false } return o.RegistrationAccessToken, true @@ -1164,7 +1309,7 @@ func (o *OAuth2Client) GetRegistrationAccessTokenOk() (*string, bool) { // HasRegistrationAccessToken returns a boolean if a field has been set. func (o *OAuth2Client) HasRegistrationAccessToken() bool { - if o != nil && o.RegistrationAccessToken != nil { + if o != nil && !IsNil(o.RegistrationAccessToken) { return true } @@ -1178,7 +1323,7 @@ func (o *OAuth2Client) SetRegistrationAccessToken(v string) { // GetRegistrationClientUri returns the RegistrationClientUri field value if set, zero value otherwise. func (o *OAuth2Client) GetRegistrationClientUri() string { - if o == nil || o.RegistrationClientUri == nil { + if o == nil || IsNil(o.RegistrationClientUri) { var ret string return ret } @@ -1188,7 +1333,7 @@ func (o *OAuth2Client) GetRegistrationClientUri() string { // GetRegistrationClientUriOk returns a tuple with the RegistrationClientUri field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetRegistrationClientUriOk() (*string, bool) { - if o == nil || o.RegistrationClientUri == nil { + if o == nil || IsNil(o.RegistrationClientUri) { return nil, false } return o.RegistrationClientUri, true @@ -1196,7 +1341,7 @@ func (o *OAuth2Client) GetRegistrationClientUriOk() (*string, bool) { // HasRegistrationClientUri returns a boolean if a field has been set. func (o *OAuth2Client) HasRegistrationClientUri() bool { - if o != nil && o.RegistrationClientUri != nil { + if o != nil && !IsNil(o.RegistrationClientUri) { return true } @@ -1210,7 +1355,7 @@ func (o *OAuth2Client) SetRegistrationClientUri(v string) { // GetRequestObjectSigningAlg returns the RequestObjectSigningAlg field value if set, zero value otherwise. func (o *OAuth2Client) GetRequestObjectSigningAlg() string { - if o == nil || o.RequestObjectSigningAlg == nil { + if o == nil || IsNil(o.RequestObjectSigningAlg) { var ret string return ret } @@ -1220,7 +1365,7 @@ func (o *OAuth2Client) GetRequestObjectSigningAlg() string { // GetRequestObjectSigningAlgOk returns a tuple with the RequestObjectSigningAlg field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetRequestObjectSigningAlgOk() (*string, bool) { - if o == nil || o.RequestObjectSigningAlg == nil { + if o == nil || IsNil(o.RequestObjectSigningAlg) { return nil, false } return o.RequestObjectSigningAlg, true @@ -1228,7 +1373,7 @@ func (o *OAuth2Client) GetRequestObjectSigningAlgOk() (*string, bool) { // HasRequestObjectSigningAlg returns a boolean if a field has been set. func (o *OAuth2Client) HasRequestObjectSigningAlg() bool { - if o != nil && o.RequestObjectSigningAlg != nil { + if o != nil && !IsNil(o.RequestObjectSigningAlg) { return true } @@ -1242,7 +1387,7 @@ func (o *OAuth2Client) SetRequestObjectSigningAlg(v string) { // GetRequestUris returns the RequestUris field value if set, zero value otherwise. func (o *OAuth2Client) GetRequestUris() []string { - if o == nil || o.RequestUris == nil { + if o == nil || IsNil(o.RequestUris) { var ret []string return ret } @@ -1252,7 +1397,7 @@ func (o *OAuth2Client) GetRequestUris() []string { // GetRequestUrisOk returns a tuple with the RequestUris field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetRequestUrisOk() ([]string, bool) { - if o == nil || o.RequestUris == nil { + if o == nil || IsNil(o.RequestUris) { return nil, false } return o.RequestUris, true @@ -1260,7 +1405,7 @@ func (o *OAuth2Client) GetRequestUrisOk() ([]string, bool) { // HasRequestUris returns a boolean if a field has been set. func (o *OAuth2Client) HasRequestUris() bool { - if o != nil && o.RequestUris != nil { + if o != nil && !IsNil(o.RequestUris) { return true } @@ -1274,7 +1419,7 @@ func (o *OAuth2Client) SetRequestUris(v []string) { // GetResponseTypes returns the ResponseTypes field value if set, zero value otherwise. func (o *OAuth2Client) GetResponseTypes() []string { - if o == nil || o.ResponseTypes == nil { + if o == nil || IsNil(o.ResponseTypes) { var ret []string return ret } @@ -1284,7 +1429,7 @@ func (o *OAuth2Client) GetResponseTypes() []string { // GetResponseTypesOk returns a tuple with the ResponseTypes field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetResponseTypesOk() ([]string, bool) { - if o == nil || o.ResponseTypes == nil { + if o == nil || IsNil(o.ResponseTypes) { return nil, false } return o.ResponseTypes, true @@ -1292,7 +1437,7 @@ func (o *OAuth2Client) GetResponseTypesOk() ([]string, bool) { // HasResponseTypes returns a boolean if a field has been set. func (o *OAuth2Client) HasResponseTypes() bool { - if o != nil && o.ResponseTypes != nil { + if o != nil && !IsNil(o.ResponseTypes) { return true } @@ -1306,7 +1451,7 @@ func (o *OAuth2Client) SetResponseTypes(v []string) { // GetScope returns the Scope field value if set, zero value otherwise. func (o *OAuth2Client) GetScope() string { - if o == nil || o.Scope == nil { + if o == nil || IsNil(o.Scope) { var ret string return ret } @@ -1316,7 +1461,7 @@ func (o *OAuth2Client) GetScope() string { // GetScopeOk returns a tuple with the Scope field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetScopeOk() (*string, bool) { - if o == nil || o.Scope == nil { + if o == nil || IsNil(o.Scope) { return nil, false } return o.Scope, true @@ -1324,7 +1469,7 @@ func (o *OAuth2Client) GetScopeOk() (*string, bool) { // HasScope returns a boolean if a field has been set. func (o *OAuth2Client) HasScope() bool { - if o != nil && o.Scope != nil { + if o != nil && !IsNil(o.Scope) { return true } @@ -1338,7 +1483,7 @@ func (o *OAuth2Client) SetScope(v string) { // GetSectorIdentifierUri returns the SectorIdentifierUri field value if set, zero value otherwise. func (o *OAuth2Client) GetSectorIdentifierUri() string { - if o == nil || o.SectorIdentifierUri == nil { + if o == nil || IsNil(o.SectorIdentifierUri) { var ret string return ret } @@ -1348,7 +1493,7 @@ func (o *OAuth2Client) GetSectorIdentifierUri() string { // GetSectorIdentifierUriOk returns a tuple with the SectorIdentifierUri field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetSectorIdentifierUriOk() (*string, bool) { - if o == nil || o.SectorIdentifierUri == nil { + if o == nil || IsNil(o.SectorIdentifierUri) { return nil, false } return o.SectorIdentifierUri, true @@ -1356,7 +1501,7 @@ func (o *OAuth2Client) GetSectorIdentifierUriOk() (*string, bool) { // HasSectorIdentifierUri returns a boolean if a field has been set. func (o *OAuth2Client) HasSectorIdentifierUri() bool { - if o != nil && o.SectorIdentifierUri != nil { + if o != nil && !IsNil(o.SectorIdentifierUri) { return true } @@ -1368,9 +1513,73 @@ func (o *OAuth2Client) SetSectorIdentifierUri(v string) { o.SectorIdentifierUri = &v } +// GetSkipConsent returns the SkipConsent field value if set, zero value otherwise. +func (o *OAuth2Client) GetSkipConsent() bool { + if o == nil || IsNil(o.SkipConsent) { + var ret bool + return ret + } + return *o.SkipConsent +} + +// GetSkipConsentOk returns a tuple with the SkipConsent field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *OAuth2Client) GetSkipConsentOk() (*bool, bool) { + if o == nil || IsNil(o.SkipConsent) { + return nil, false + } + return o.SkipConsent, true +} + +// HasSkipConsent returns a boolean if a field has been set. +func (o *OAuth2Client) HasSkipConsent() bool { + if o != nil && !IsNil(o.SkipConsent) { + return true + } + + return false +} + +// SetSkipConsent gets a reference to the given bool and assigns it to the SkipConsent field. +func (o *OAuth2Client) SetSkipConsent(v bool) { + o.SkipConsent = &v +} + +// GetSkipLogoutConsent returns the SkipLogoutConsent field value if set, zero value otherwise. +func (o *OAuth2Client) GetSkipLogoutConsent() bool { + if o == nil || IsNil(o.SkipLogoutConsent) { + var ret bool + return ret + } + return *o.SkipLogoutConsent +} + +// GetSkipLogoutConsentOk returns a tuple with the SkipLogoutConsent field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *OAuth2Client) GetSkipLogoutConsentOk() (*bool, bool) { + if o == nil || IsNil(o.SkipLogoutConsent) { + return nil, false + } + return o.SkipLogoutConsent, true +} + +// HasSkipLogoutConsent returns a boolean if a field has been set. +func (o *OAuth2Client) HasSkipLogoutConsent() bool { + if o != nil && !IsNil(o.SkipLogoutConsent) { + return true + } + + return false +} + +// SetSkipLogoutConsent gets a reference to the given bool and assigns it to the SkipLogoutConsent field. +func (o *OAuth2Client) SetSkipLogoutConsent(v bool) { + o.SkipLogoutConsent = &v +} + // GetSubjectType returns the SubjectType field value if set, zero value otherwise. func (o *OAuth2Client) GetSubjectType() string { - if o == nil || o.SubjectType == nil { + if o == nil || IsNil(o.SubjectType) { var ret string return ret } @@ -1380,7 +1589,7 @@ func (o *OAuth2Client) GetSubjectType() string { // GetSubjectTypeOk returns a tuple with the SubjectType field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetSubjectTypeOk() (*string, bool) { - if o == nil || o.SubjectType == nil { + if o == nil || IsNil(o.SubjectType) { return nil, false } return o.SubjectType, true @@ -1388,7 +1597,7 @@ func (o *OAuth2Client) GetSubjectTypeOk() (*string, bool) { // HasSubjectType returns a boolean if a field has been set. func (o *OAuth2Client) HasSubjectType() bool { - if o != nil && o.SubjectType != nil { + if o != nil && !IsNil(o.SubjectType) { return true } @@ -1402,7 +1611,7 @@ func (o *OAuth2Client) SetSubjectType(v string) { // GetTokenEndpointAuthMethod returns the TokenEndpointAuthMethod field value if set, zero value otherwise. func (o *OAuth2Client) GetTokenEndpointAuthMethod() string { - if o == nil || o.TokenEndpointAuthMethod == nil { + if o == nil || IsNil(o.TokenEndpointAuthMethod) { var ret string return ret } @@ -1412,7 +1621,7 @@ func (o *OAuth2Client) GetTokenEndpointAuthMethod() string { // GetTokenEndpointAuthMethodOk returns a tuple with the TokenEndpointAuthMethod field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetTokenEndpointAuthMethodOk() (*string, bool) { - if o == nil || o.TokenEndpointAuthMethod == nil { + if o == nil || IsNil(o.TokenEndpointAuthMethod) { return nil, false } return o.TokenEndpointAuthMethod, true @@ -1420,7 +1629,7 @@ func (o *OAuth2Client) GetTokenEndpointAuthMethodOk() (*string, bool) { // HasTokenEndpointAuthMethod returns a boolean if a field has been set. func (o *OAuth2Client) HasTokenEndpointAuthMethod() bool { - if o != nil && o.TokenEndpointAuthMethod != nil { + if o != nil && !IsNil(o.TokenEndpointAuthMethod) { return true } @@ -1434,7 +1643,7 @@ func (o *OAuth2Client) SetTokenEndpointAuthMethod(v string) { // GetTokenEndpointAuthSigningAlg returns the TokenEndpointAuthSigningAlg field value if set, zero value otherwise. func (o *OAuth2Client) GetTokenEndpointAuthSigningAlg() string { - if o == nil || o.TokenEndpointAuthSigningAlg == nil { + if o == nil || IsNil(o.TokenEndpointAuthSigningAlg) { var ret string return ret } @@ -1444,7 +1653,7 @@ func (o *OAuth2Client) GetTokenEndpointAuthSigningAlg() string { // GetTokenEndpointAuthSigningAlgOk returns a tuple with the TokenEndpointAuthSigningAlg field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetTokenEndpointAuthSigningAlgOk() (*string, bool) { - if o == nil || o.TokenEndpointAuthSigningAlg == nil { + if o == nil || IsNil(o.TokenEndpointAuthSigningAlg) { return nil, false } return o.TokenEndpointAuthSigningAlg, true @@ -1452,7 +1661,7 @@ func (o *OAuth2Client) GetTokenEndpointAuthSigningAlgOk() (*string, bool) { // HasTokenEndpointAuthSigningAlg returns a boolean if a field has been set. func (o *OAuth2Client) HasTokenEndpointAuthSigningAlg() bool { - if o != nil && o.TokenEndpointAuthSigningAlg != nil { + if o != nil && !IsNil(o.TokenEndpointAuthSigningAlg) { return true } @@ -1466,7 +1675,7 @@ func (o *OAuth2Client) SetTokenEndpointAuthSigningAlg(v string) { // GetTosUri returns the TosUri field value if set, zero value otherwise. func (o *OAuth2Client) GetTosUri() string { - if o == nil || o.TosUri == nil { + if o == nil || IsNil(o.TosUri) { var ret string return ret } @@ -1476,7 +1685,7 @@ func (o *OAuth2Client) GetTosUri() string { // GetTosUriOk returns a tuple with the TosUri field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetTosUriOk() (*string, bool) { - if o == nil || o.TosUri == nil { + if o == nil || IsNil(o.TosUri) { return nil, false } return o.TosUri, true @@ -1484,7 +1693,7 @@ func (o *OAuth2Client) GetTosUriOk() (*string, bool) { // HasTosUri returns a boolean if a field has been set. func (o *OAuth2Client) HasTosUri() bool { - if o != nil && o.TosUri != nil { + if o != nil && !IsNil(o.TosUri) { return true } @@ -1498,7 +1707,7 @@ func (o *OAuth2Client) SetTosUri(v string) { // GetUpdatedAt returns the UpdatedAt field value if set, zero value otherwise. func (o *OAuth2Client) GetUpdatedAt() time.Time { - if o == nil || o.UpdatedAt == nil { + if o == nil || IsNil(o.UpdatedAt) { var ret time.Time return ret } @@ -1508,7 +1717,7 @@ func (o *OAuth2Client) GetUpdatedAt() time.Time { // GetUpdatedAtOk returns a tuple with the UpdatedAt field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetUpdatedAtOk() (*time.Time, bool) { - if o == nil || o.UpdatedAt == nil { + if o == nil || IsNil(o.UpdatedAt) { return nil, false } return o.UpdatedAt, true @@ -1516,7 +1725,7 @@ func (o *OAuth2Client) GetUpdatedAtOk() (*time.Time, bool) { // HasUpdatedAt returns a boolean if a field has been set. func (o *OAuth2Client) HasUpdatedAt() bool { - if o != nil && o.UpdatedAt != nil { + if o != nil && !IsNil(o.UpdatedAt) { return true } @@ -1530,7 +1739,7 @@ func (o *OAuth2Client) SetUpdatedAt(v time.Time) { // GetUserinfoSignedResponseAlg returns the UserinfoSignedResponseAlg field value if set, zero value otherwise. func (o *OAuth2Client) GetUserinfoSignedResponseAlg() string { - if o == nil || o.UserinfoSignedResponseAlg == nil { + if o == nil || IsNil(o.UserinfoSignedResponseAlg) { var ret string return ret } @@ -1540,7 +1749,7 @@ func (o *OAuth2Client) GetUserinfoSignedResponseAlg() string { // GetUserinfoSignedResponseAlgOk returns a tuple with the UserinfoSignedResponseAlg field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2Client) GetUserinfoSignedResponseAlgOk() (*string, bool) { - if o == nil || o.UserinfoSignedResponseAlg == nil { + if o == nil || IsNil(o.UserinfoSignedResponseAlg) { return nil, false } return o.UserinfoSignedResponseAlg, true @@ -1548,7 +1757,7 @@ func (o *OAuth2Client) GetUserinfoSignedResponseAlgOk() (*string, bool) { // HasUserinfoSignedResponseAlg returns a boolean if a field has been set. func (o *OAuth2Client) HasUserinfoSignedResponseAlg() bool { - if o != nil && o.UserinfoSignedResponseAlg != nil { + if o != nil && !IsNil(o.UserinfoSignedResponseAlg) { return true } @@ -1561,143 +1770,169 @@ func (o *OAuth2Client) SetUserinfoSignedResponseAlg(v string) { } func (o OAuth2Client) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o OAuth2Client) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.AllowedCorsOrigins != nil { + if !IsNil(o.AccessTokenStrategy) { + toSerialize["access_token_strategy"] = o.AccessTokenStrategy + } + if !IsNil(o.AllowedCorsOrigins) { toSerialize["allowed_cors_origins"] = o.AllowedCorsOrigins } - if o.Audience != nil { + if !IsNil(o.Audience) { toSerialize["audience"] = o.Audience } - if o.AuthorizationCodeGrantAccessTokenLifespan != nil { + if !IsNil(o.AuthorizationCodeGrantAccessTokenLifespan) { toSerialize["authorization_code_grant_access_token_lifespan"] = o.AuthorizationCodeGrantAccessTokenLifespan } - if o.AuthorizationCodeGrantIdTokenLifespan != nil { + if !IsNil(o.AuthorizationCodeGrantIdTokenLifespan) { toSerialize["authorization_code_grant_id_token_lifespan"] = o.AuthorizationCodeGrantIdTokenLifespan } - if o.AuthorizationCodeGrantRefreshTokenLifespan != nil { + if !IsNil(o.AuthorizationCodeGrantRefreshTokenLifespan) { toSerialize["authorization_code_grant_refresh_token_lifespan"] = o.AuthorizationCodeGrantRefreshTokenLifespan } - if o.BackchannelLogoutSessionRequired != nil { + if !IsNil(o.BackchannelLogoutSessionRequired) { toSerialize["backchannel_logout_session_required"] = o.BackchannelLogoutSessionRequired } - if o.BackchannelLogoutUri != nil { + if !IsNil(o.BackchannelLogoutUri) { toSerialize["backchannel_logout_uri"] = o.BackchannelLogoutUri } - if o.ClientCredentialsGrantAccessTokenLifespan != nil { + if !IsNil(o.ClientCredentialsGrantAccessTokenLifespan) { toSerialize["client_credentials_grant_access_token_lifespan"] = o.ClientCredentialsGrantAccessTokenLifespan } - if o.ClientId != nil { + if !IsNil(o.ClientId) { toSerialize["client_id"] = o.ClientId } - if o.ClientName != nil { + if !IsNil(o.ClientName) { toSerialize["client_name"] = o.ClientName } - if o.ClientSecret != nil { + if !IsNil(o.ClientSecret) { toSerialize["client_secret"] = o.ClientSecret } - if o.ClientSecretExpiresAt != nil { + if !IsNil(o.ClientSecretExpiresAt) { toSerialize["client_secret_expires_at"] = o.ClientSecretExpiresAt } - if o.ClientUri != nil { + if !IsNil(o.ClientUri) { toSerialize["client_uri"] = o.ClientUri } - if o.Contacts != nil { + if !IsNil(o.Contacts) { toSerialize["contacts"] = o.Contacts } - if o.CreatedAt != nil { + if !IsNil(o.CreatedAt) { toSerialize["created_at"] = o.CreatedAt } - if o.FrontchannelLogoutSessionRequired != nil { + if !IsNil(o.DeviceAuthorizationGrantAccessTokenLifespan) { + toSerialize["device_authorization_grant_access_token_lifespan"] = o.DeviceAuthorizationGrantAccessTokenLifespan + } + if !IsNil(o.DeviceAuthorizationGrantIdTokenLifespan) { + toSerialize["device_authorization_grant_id_token_lifespan"] = o.DeviceAuthorizationGrantIdTokenLifespan + } + if !IsNil(o.DeviceAuthorizationGrantRefreshTokenLifespan) { + toSerialize["device_authorization_grant_refresh_token_lifespan"] = o.DeviceAuthorizationGrantRefreshTokenLifespan + } + if !IsNil(o.FrontchannelLogoutSessionRequired) { toSerialize["frontchannel_logout_session_required"] = o.FrontchannelLogoutSessionRequired } - if o.FrontchannelLogoutUri != nil { + if !IsNil(o.FrontchannelLogoutUri) { toSerialize["frontchannel_logout_uri"] = o.FrontchannelLogoutUri } - if o.GrantTypes != nil { + if !IsNil(o.GrantTypes) { toSerialize["grant_types"] = o.GrantTypes } - if o.ImplicitGrantAccessTokenLifespan != nil { + if !IsNil(o.ImplicitGrantAccessTokenLifespan) { toSerialize["implicit_grant_access_token_lifespan"] = o.ImplicitGrantAccessTokenLifespan } - if o.ImplicitGrantIdTokenLifespan != nil { + if !IsNil(o.ImplicitGrantIdTokenLifespan) { toSerialize["implicit_grant_id_token_lifespan"] = o.ImplicitGrantIdTokenLifespan } - if o.Jwks != nil { + if !IsNil(o.Jwks) { toSerialize["jwks"] = o.Jwks } - if o.JwksUri != nil { + if !IsNil(o.JwksUri) { toSerialize["jwks_uri"] = o.JwksUri } - if o.JwtBearerGrantAccessTokenLifespan != nil { + if !IsNil(o.JwtBearerGrantAccessTokenLifespan) { toSerialize["jwt_bearer_grant_access_token_lifespan"] = o.JwtBearerGrantAccessTokenLifespan } - if o.LogoUri != nil { + if !IsNil(o.LogoUri) { toSerialize["logo_uri"] = o.LogoUri } if o.Metadata != nil { toSerialize["metadata"] = o.Metadata } - if o.Owner != nil { + if !IsNil(o.Owner) { toSerialize["owner"] = o.Owner } - if o.PolicyUri != nil { + if !IsNil(o.PolicyUri) { toSerialize["policy_uri"] = o.PolicyUri } - if o.PostLogoutRedirectUris != nil { + if !IsNil(o.PostLogoutRedirectUris) { toSerialize["post_logout_redirect_uris"] = o.PostLogoutRedirectUris } - if o.RedirectUris != nil { + if !IsNil(o.RedirectUris) { toSerialize["redirect_uris"] = o.RedirectUris } - if o.RefreshTokenGrantAccessTokenLifespan != nil { + if !IsNil(o.RefreshTokenGrantAccessTokenLifespan) { toSerialize["refresh_token_grant_access_token_lifespan"] = o.RefreshTokenGrantAccessTokenLifespan } - if o.RefreshTokenGrantIdTokenLifespan != nil { + if !IsNil(o.RefreshTokenGrantIdTokenLifespan) { toSerialize["refresh_token_grant_id_token_lifespan"] = o.RefreshTokenGrantIdTokenLifespan } - if o.RefreshTokenGrantRefreshTokenLifespan != nil { + if !IsNil(o.RefreshTokenGrantRefreshTokenLifespan) { toSerialize["refresh_token_grant_refresh_token_lifespan"] = o.RefreshTokenGrantRefreshTokenLifespan } - if o.RegistrationAccessToken != nil { + if !IsNil(o.RegistrationAccessToken) { toSerialize["registration_access_token"] = o.RegistrationAccessToken } - if o.RegistrationClientUri != nil { + if !IsNil(o.RegistrationClientUri) { toSerialize["registration_client_uri"] = o.RegistrationClientUri } - if o.RequestObjectSigningAlg != nil { + if !IsNil(o.RequestObjectSigningAlg) { toSerialize["request_object_signing_alg"] = o.RequestObjectSigningAlg } - if o.RequestUris != nil { + if !IsNil(o.RequestUris) { toSerialize["request_uris"] = o.RequestUris } - if o.ResponseTypes != nil { + if !IsNil(o.ResponseTypes) { toSerialize["response_types"] = o.ResponseTypes } - if o.Scope != nil { + if !IsNil(o.Scope) { toSerialize["scope"] = o.Scope } - if o.SectorIdentifierUri != nil { + if !IsNil(o.SectorIdentifierUri) { toSerialize["sector_identifier_uri"] = o.SectorIdentifierUri } - if o.SubjectType != nil { + if !IsNil(o.SkipConsent) { + toSerialize["skip_consent"] = o.SkipConsent + } + if !IsNil(o.SkipLogoutConsent) { + toSerialize["skip_logout_consent"] = o.SkipLogoutConsent + } + if !IsNil(o.SubjectType) { toSerialize["subject_type"] = o.SubjectType } - if o.TokenEndpointAuthMethod != nil { + if !IsNil(o.TokenEndpointAuthMethod) { toSerialize["token_endpoint_auth_method"] = o.TokenEndpointAuthMethod } - if o.TokenEndpointAuthSigningAlg != nil { + if !IsNil(o.TokenEndpointAuthSigningAlg) { toSerialize["token_endpoint_auth_signing_alg"] = o.TokenEndpointAuthSigningAlg } - if o.TosUri != nil { + if !IsNil(o.TosUri) { toSerialize["tos_uri"] = o.TosUri } - if o.UpdatedAt != nil { + if !IsNil(o.UpdatedAt) { toSerialize["updated_at"] = o.UpdatedAt } - if o.UserinfoSignedResponseAlg != nil { + if !IsNil(o.UserinfoSignedResponseAlg) { toSerialize["userinfo_signed_response_alg"] = o.UserinfoSignedResponseAlg } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableOAuth2Client struct { diff --git a/internal/httpclient/model_o_auth2_client_token_lifespans.go b/internal/httpclient/model_o_auth2_client_token_lifespans.go index 27af7508496..7fe43053e10 100644 --- a/internal/httpclient/model_o_auth2_client_token_lifespans.go +++ b/internal/httpclient/model_o_auth2_client_token_lifespans.go @@ -15,28 +15,37 @@ import ( "encoding/json" ) +// checks if the OAuth2ClientTokenLifespans type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &OAuth2ClientTokenLifespans{} + // OAuth2ClientTokenLifespans Lifespans of different token types issued for this OAuth 2.0 Client. type OAuth2ClientTokenLifespans struct { // Specify a time duration in milliseconds, seconds, minutes, hours. - AuthorizationCodeGrantAccessTokenLifespan *string `json:"authorization_code_grant_access_token_lifespan,omitempty"` + AuthorizationCodeGrantAccessTokenLifespan *string `json:"authorization_code_grant_access_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` + // Specify a time duration in milliseconds, seconds, minutes, hours. + AuthorizationCodeGrantIdTokenLifespan *string `json:"authorization_code_grant_id_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` + // Specify a time duration in milliseconds, seconds, minutes, hours. + AuthorizationCodeGrantRefreshTokenLifespan *string `json:"authorization_code_grant_refresh_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` // Specify a time duration in milliseconds, seconds, minutes, hours. - AuthorizationCodeGrantIdTokenLifespan *string `json:"authorization_code_grant_id_token_lifespan,omitempty"` + ClientCredentialsGrantAccessTokenLifespan *string `json:"client_credentials_grant_access_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` // Specify a time duration in milliseconds, seconds, minutes, hours. - AuthorizationCodeGrantRefreshTokenLifespan *string `json:"authorization_code_grant_refresh_token_lifespan,omitempty"` + DeviceAuthorizationGrantAccessTokenLifespan *string `json:"device_authorization_grant_access_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` // Specify a time duration in milliseconds, seconds, minutes, hours. - ClientCredentialsGrantAccessTokenLifespan *string `json:"client_credentials_grant_access_token_lifespan,omitempty"` + DeviceAuthorizationGrantIdTokenLifespan *string `json:"device_authorization_grant_id_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` // Specify a time duration in milliseconds, seconds, minutes, hours. - ImplicitGrantAccessTokenLifespan *string `json:"implicit_grant_access_token_lifespan,omitempty"` + DeviceAuthorizationGrantRefreshTokenLifespan *string `json:"device_authorization_grant_refresh_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` // Specify a time duration in milliseconds, seconds, minutes, hours. - ImplicitGrantIdTokenLifespan *string `json:"implicit_grant_id_token_lifespan,omitempty"` + ImplicitGrantAccessTokenLifespan *string `json:"implicit_grant_access_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` // Specify a time duration in milliseconds, seconds, minutes, hours. - JwtBearerGrantAccessTokenLifespan *string `json:"jwt_bearer_grant_access_token_lifespan,omitempty"` + ImplicitGrantIdTokenLifespan *string `json:"implicit_grant_id_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` // Specify a time duration in milliseconds, seconds, minutes, hours. - RefreshTokenGrantAccessTokenLifespan *string `json:"refresh_token_grant_access_token_lifespan,omitempty"` + JwtBearerGrantAccessTokenLifespan *string `json:"jwt_bearer_grant_access_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` // Specify a time duration in milliseconds, seconds, minutes, hours. - RefreshTokenGrantIdTokenLifespan *string `json:"refresh_token_grant_id_token_lifespan,omitempty"` + RefreshTokenGrantAccessTokenLifespan *string `json:"refresh_token_grant_access_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` // Specify a time duration in milliseconds, seconds, minutes, hours. - RefreshTokenGrantRefreshTokenLifespan *string `json:"refresh_token_grant_refresh_token_lifespan,omitempty"` + RefreshTokenGrantIdTokenLifespan *string `json:"refresh_token_grant_id_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` + // Specify a time duration in milliseconds, seconds, minutes, hours. + RefreshTokenGrantRefreshTokenLifespan *string `json:"refresh_token_grant_refresh_token_lifespan,omitempty" validate:"regexp=^([0-9]+(ns|us|ms|s|m|h))*$"` } // NewOAuth2ClientTokenLifespans instantiates a new OAuth2ClientTokenLifespans object @@ -58,7 +67,7 @@ func NewOAuth2ClientTokenLifespansWithDefaults() *OAuth2ClientTokenLifespans { // GetAuthorizationCodeGrantAccessTokenLifespan returns the AuthorizationCodeGrantAccessTokenLifespan field value if set, zero value otherwise. func (o *OAuth2ClientTokenLifespans) GetAuthorizationCodeGrantAccessTokenLifespan() string { - if o == nil || o.AuthorizationCodeGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.AuthorizationCodeGrantAccessTokenLifespan) { var ret string return ret } @@ -68,7 +77,7 @@ func (o *OAuth2ClientTokenLifespans) GetAuthorizationCodeGrantAccessTokenLifespa // GetAuthorizationCodeGrantAccessTokenLifespanOk returns a tuple with the AuthorizationCodeGrantAccessTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ClientTokenLifespans) GetAuthorizationCodeGrantAccessTokenLifespanOk() (*string, bool) { - if o == nil || o.AuthorizationCodeGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.AuthorizationCodeGrantAccessTokenLifespan) { return nil, false } return o.AuthorizationCodeGrantAccessTokenLifespan, true @@ -76,7 +85,7 @@ func (o *OAuth2ClientTokenLifespans) GetAuthorizationCodeGrantAccessTokenLifespa // HasAuthorizationCodeGrantAccessTokenLifespan returns a boolean if a field has been set. func (o *OAuth2ClientTokenLifespans) HasAuthorizationCodeGrantAccessTokenLifespan() bool { - if o != nil && o.AuthorizationCodeGrantAccessTokenLifespan != nil { + if o != nil && !IsNil(o.AuthorizationCodeGrantAccessTokenLifespan) { return true } @@ -90,7 +99,7 @@ func (o *OAuth2ClientTokenLifespans) SetAuthorizationCodeGrantAccessTokenLifespa // GetAuthorizationCodeGrantIdTokenLifespan returns the AuthorizationCodeGrantIdTokenLifespan field value if set, zero value otherwise. func (o *OAuth2ClientTokenLifespans) GetAuthorizationCodeGrantIdTokenLifespan() string { - if o == nil || o.AuthorizationCodeGrantIdTokenLifespan == nil { + if o == nil || IsNil(o.AuthorizationCodeGrantIdTokenLifespan) { var ret string return ret } @@ -100,7 +109,7 @@ func (o *OAuth2ClientTokenLifespans) GetAuthorizationCodeGrantIdTokenLifespan() // GetAuthorizationCodeGrantIdTokenLifespanOk returns a tuple with the AuthorizationCodeGrantIdTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ClientTokenLifespans) GetAuthorizationCodeGrantIdTokenLifespanOk() (*string, bool) { - if o == nil || o.AuthorizationCodeGrantIdTokenLifespan == nil { + if o == nil || IsNil(o.AuthorizationCodeGrantIdTokenLifespan) { return nil, false } return o.AuthorizationCodeGrantIdTokenLifespan, true @@ -108,7 +117,7 @@ func (o *OAuth2ClientTokenLifespans) GetAuthorizationCodeGrantIdTokenLifespanOk( // HasAuthorizationCodeGrantIdTokenLifespan returns a boolean if a field has been set. func (o *OAuth2ClientTokenLifespans) HasAuthorizationCodeGrantIdTokenLifespan() bool { - if o != nil && o.AuthorizationCodeGrantIdTokenLifespan != nil { + if o != nil && !IsNil(o.AuthorizationCodeGrantIdTokenLifespan) { return true } @@ -122,7 +131,7 @@ func (o *OAuth2ClientTokenLifespans) SetAuthorizationCodeGrantIdTokenLifespan(v // GetAuthorizationCodeGrantRefreshTokenLifespan returns the AuthorizationCodeGrantRefreshTokenLifespan field value if set, zero value otherwise. func (o *OAuth2ClientTokenLifespans) GetAuthorizationCodeGrantRefreshTokenLifespan() string { - if o == nil || o.AuthorizationCodeGrantRefreshTokenLifespan == nil { + if o == nil || IsNil(o.AuthorizationCodeGrantRefreshTokenLifespan) { var ret string return ret } @@ -132,7 +141,7 @@ func (o *OAuth2ClientTokenLifespans) GetAuthorizationCodeGrantRefreshTokenLifesp // GetAuthorizationCodeGrantRefreshTokenLifespanOk returns a tuple with the AuthorizationCodeGrantRefreshTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ClientTokenLifespans) GetAuthorizationCodeGrantRefreshTokenLifespanOk() (*string, bool) { - if o == nil || o.AuthorizationCodeGrantRefreshTokenLifespan == nil { + if o == nil || IsNil(o.AuthorizationCodeGrantRefreshTokenLifespan) { return nil, false } return o.AuthorizationCodeGrantRefreshTokenLifespan, true @@ -140,7 +149,7 @@ func (o *OAuth2ClientTokenLifespans) GetAuthorizationCodeGrantRefreshTokenLifesp // HasAuthorizationCodeGrantRefreshTokenLifespan returns a boolean if a field has been set. func (o *OAuth2ClientTokenLifespans) HasAuthorizationCodeGrantRefreshTokenLifespan() bool { - if o != nil && o.AuthorizationCodeGrantRefreshTokenLifespan != nil { + if o != nil && !IsNil(o.AuthorizationCodeGrantRefreshTokenLifespan) { return true } @@ -154,7 +163,7 @@ func (o *OAuth2ClientTokenLifespans) SetAuthorizationCodeGrantRefreshTokenLifesp // GetClientCredentialsGrantAccessTokenLifespan returns the ClientCredentialsGrantAccessTokenLifespan field value if set, zero value otherwise. func (o *OAuth2ClientTokenLifespans) GetClientCredentialsGrantAccessTokenLifespan() string { - if o == nil || o.ClientCredentialsGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.ClientCredentialsGrantAccessTokenLifespan) { var ret string return ret } @@ -164,7 +173,7 @@ func (o *OAuth2ClientTokenLifespans) GetClientCredentialsGrantAccessTokenLifespa // GetClientCredentialsGrantAccessTokenLifespanOk returns a tuple with the ClientCredentialsGrantAccessTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ClientTokenLifespans) GetClientCredentialsGrantAccessTokenLifespanOk() (*string, bool) { - if o == nil || o.ClientCredentialsGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.ClientCredentialsGrantAccessTokenLifespan) { return nil, false } return o.ClientCredentialsGrantAccessTokenLifespan, true @@ -172,7 +181,7 @@ func (o *OAuth2ClientTokenLifespans) GetClientCredentialsGrantAccessTokenLifespa // HasClientCredentialsGrantAccessTokenLifespan returns a boolean if a field has been set. func (o *OAuth2ClientTokenLifespans) HasClientCredentialsGrantAccessTokenLifespan() bool { - if o != nil && o.ClientCredentialsGrantAccessTokenLifespan != nil { + if o != nil && !IsNil(o.ClientCredentialsGrantAccessTokenLifespan) { return true } @@ -184,9 +193,105 @@ func (o *OAuth2ClientTokenLifespans) SetClientCredentialsGrantAccessTokenLifespa o.ClientCredentialsGrantAccessTokenLifespan = &v } +// GetDeviceAuthorizationGrantAccessTokenLifespan returns the DeviceAuthorizationGrantAccessTokenLifespan field value if set, zero value otherwise. +func (o *OAuth2ClientTokenLifespans) GetDeviceAuthorizationGrantAccessTokenLifespan() string { + if o == nil || IsNil(o.DeviceAuthorizationGrantAccessTokenLifespan) { + var ret string + return ret + } + return *o.DeviceAuthorizationGrantAccessTokenLifespan +} + +// GetDeviceAuthorizationGrantAccessTokenLifespanOk returns a tuple with the DeviceAuthorizationGrantAccessTokenLifespan field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *OAuth2ClientTokenLifespans) GetDeviceAuthorizationGrantAccessTokenLifespanOk() (*string, bool) { + if o == nil || IsNil(o.DeviceAuthorizationGrantAccessTokenLifespan) { + return nil, false + } + return o.DeviceAuthorizationGrantAccessTokenLifespan, true +} + +// HasDeviceAuthorizationGrantAccessTokenLifespan returns a boolean if a field has been set. +func (o *OAuth2ClientTokenLifespans) HasDeviceAuthorizationGrantAccessTokenLifespan() bool { + if o != nil && !IsNil(o.DeviceAuthorizationGrantAccessTokenLifespan) { + return true + } + + return false +} + +// SetDeviceAuthorizationGrantAccessTokenLifespan gets a reference to the given string and assigns it to the DeviceAuthorizationGrantAccessTokenLifespan field. +func (o *OAuth2ClientTokenLifespans) SetDeviceAuthorizationGrantAccessTokenLifespan(v string) { + o.DeviceAuthorizationGrantAccessTokenLifespan = &v +} + +// GetDeviceAuthorizationGrantIdTokenLifespan returns the DeviceAuthorizationGrantIdTokenLifespan field value if set, zero value otherwise. +func (o *OAuth2ClientTokenLifespans) GetDeviceAuthorizationGrantIdTokenLifespan() string { + if o == nil || IsNil(o.DeviceAuthorizationGrantIdTokenLifespan) { + var ret string + return ret + } + return *o.DeviceAuthorizationGrantIdTokenLifespan +} + +// GetDeviceAuthorizationGrantIdTokenLifespanOk returns a tuple with the DeviceAuthorizationGrantIdTokenLifespan field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *OAuth2ClientTokenLifespans) GetDeviceAuthorizationGrantIdTokenLifespanOk() (*string, bool) { + if o == nil || IsNil(o.DeviceAuthorizationGrantIdTokenLifespan) { + return nil, false + } + return o.DeviceAuthorizationGrantIdTokenLifespan, true +} + +// HasDeviceAuthorizationGrantIdTokenLifespan returns a boolean if a field has been set. +func (o *OAuth2ClientTokenLifespans) HasDeviceAuthorizationGrantIdTokenLifespan() bool { + if o != nil && !IsNil(o.DeviceAuthorizationGrantIdTokenLifespan) { + return true + } + + return false +} + +// SetDeviceAuthorizationGrantIdTokenLifespan gets a reference to the given string and assigns it to the DeviceAuthorizationGrantIdTokenLifespan field. +func (o *OAuth2ClientTokenLifespans) SetDeviceAuthorizationGrantIdTokenLifespan(v string) { + o.DeviceAuthorizationGrantIdTokenLifespan = &v +} + +// GetDeviceAuthorizationGrantRefreshTokenLifespan returns the DeviceAuthorizationGrantRefreshTokenLifespan field value if set, zero value otherwise. +func (o *OAuth2ClientTokenLifespans) GetDeviceAuthorizationGrantRefreshTokenLifespan() string { + if o == nil || IsNil(o.DeviceAuthorizationGrantRefreshTokenLifespan) { + var ret string + return ret + } + return *o.DeviceAuthorizationGrantRefreshTokenLifespan +} + +// GetDeviceAuthorizationGrantRefreshTokenLifespanOk returns a tuple with the DeviceAuthorizationGrantRefreshTokenLifespan field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *OAuth2ClientTokenLifespans) GetDeviceAuthorizationGrantRefreshTokenLifespanOk() (*string, bool) { + if o == nil || IsNil(o.DeviceAuthorizationGrantRefreshTokenLifespan) { + return nil, false + } + return o.DeviceAuthorizationGrantRefreshTokenLifespan, true +} + +// HasDeviceAuthorizationGrantRefreshTokenLifespan returns a boolean if a field has been set. +func (o *OAuth2ClientTokenLifespans) HasDeviceAuthorizationGrantRefreshTokenLifespan() bool { + if o != nil && !IsNil(o.DeviceAuthorizationGrantRefreshTokenLifespan) { + return true + } + + return false +} + +// SetDeviceAuthorizationGrantRefreshTokenLifespan gets a reference to the given string and assigns it to the DeviceAuthorizationGrantRefreshTokenLifespan field. +func (o *OAuth2ClientTokenLifespans) SetDeviceAuthorizationGrantRefreshTokenLifespan(v string) { + o.DeviceAuthorizationGrantRefreshTokenLifespan = &v +} + // GetImplicitGrantAccessTokenLifespan returns the ImplicitGrantAccessTokenLifespan field value if set, zero value otherwise. func (o *OAuth2ClientTokenLifespans) GetImplicitGrantAccessTokenLifespan() string { - if o == nil || o.ImplicitGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.ImplicitGrantAccessTokenLifespan) { var ret string return ret } @@ -196,7 +301,7 @@ func (o *OAuth2ClientTokenLifespans) GetImplicitGrantAccessTokenLifespan() strin // GetImplicitGrantAccessTokenLifespanOk returns a tuple with the ImplicitGrantAccessTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ClientTokenLifespans) GetImplicitGrantAccessTokenLifespanOk() (*string, bool) { - if o == nil || o.ImplicitGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.ImplicitGrantAccessTokenLifespan) { return nil, false } return o.ImplicitGrantAccessTokenLifespan, true @@ -204,7 +309,7 @@ func (o *OAuth2ClientTokenLifespans) GetImplicitGrantAccessTokenLifespanOk() (*s // HasImplicitGrantAccessTokenLifespan returns a boolean if a field has been set. func (o *OAuth2ClientTokenLifespans) HasImplicitGrantAccessTokenLifespan() bool { - if o != nil && o.ImplicitGrantAccessTokenLifespan != nil { + if o != nil && !IsNil(o.ImplicitGrantAccessTokenLifespan) { return true } @@ -218,7 +323,7 @@ func (o *OAuth2ClientTokenLifespans) SetImplicitGrantAccessTokenLifespan(v strin // GetImplicitGrantIdTokenLifespan returns the ImplicitGrantIdTokenLifespan field value if set, zero value otherwise. func (o *OAuth2ClientTokenLifespans) GetImplicitGrantIdTokenLifespan() string { - if o == nil || o.ImplicitGrantIdTokenLifespan == nil { + if o == nil || IsNil(o.ImplicitGrantIdTokenLifespan) { var ret string return ret } @@ -228,7 +333,7 @@ func (o *OAuth2ClientTokenLifespans) GetImplicitGrantIdTokenLifespan() string { // GetImplicitGrantIdTokenLifespanOk returns a tuple with the ImplicitGrantIdTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ClientTokenLifespans) GetImplicitGrantIdTokenLifespanOk() (*string, bool) { - if o == nil || o.ImplicitGrantIdTokenLifespan == nil { + if o == nil || IsNil(o.ImplicitGrantIdTokenLifespan) { return nil, false } return o.ImplicitGrantIdTokenLifespan, true @@ -236,7 +341,7 @@ func (o *OAuth2ClientTokenLifespans) GetImplicitGrantIdTokenLifespanOk() (*strin // HasImplicitGrantIdTokenLifespan returns a boolean if a field has been set. func (o *OAuth2ClientTokenLifespans) HasImplicitGrantIdTokenLifespan() bool { - if o != nil && o.ImplicitGrantIdTokenLifespan != nil { + if o != nil && !IsNil(o.ImplicitGrantIdTokenLifespan) { return true } @@ -250,7 +355,7 @@ func (o *OAuth2ClientTokenLifespans) SetImplicitGrantIdTokenLifespan(v string) { // GetJwtBearerGrantAccessTokenLifespan returns the JwtBearerGrantAccessTokenLifespan field value if set, zero value otherwise. func (o *OAuth2ClientTokenLifespans) GetJwtBearerGrantAccessTokenLifespan() string { - if o == nil || o.JwtBearerGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.JwtBearerGrantAccessTokenLifespan) { var ret string return ret } @@ -260,7 +365,7 @@ func (o *OAuth2ClientTokenLifespans) GetJwtBearerGrantAccessTokenLifespan() stri // GetJwtBearerGrantAccessTokenLifespanOk returns a tuple with the JwtBearerGrantAccessTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ClientTokenLifespans) GetJwtBearerGrantAccessTokenLifespanOk() (*string, bool) { - if o == nil || o.JwtBearerGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.JwtBearerGrantAccessTokenLifespan) { return nil, false } return o.JwtBearerGrantAccessTokenLifespan, true @@ -268,7 +373,7 @@ func (o *OAuth2ClientTokenLifespans) GetJwtBearerGrantAccessTokenLifespanOk() (* // HasJwtBearerGrantAccessTokenLifespan returns a boolean if a field has been set. func (o *OAuth2ClientTokenLifespans) HasJwtBearerGrantAccessTokenLifespan() bool { - if o != nil && o.JwtBearerGrantAccessTokenLifespan != nil { + if o != nil && !IsNil(o.JwtBearerGrantAccessTokenLifespan) { return true } @@ -282,7 +387,7 @@ func (o *OAuth2ClientTokenLifespans) SetJwtBearerGrantAccessTokenLifespan(v stri // GetRefreshTokenGrantAccessTokenLifespan returns the RefreshTokenGrantAccessTokenLifespan field value if set, zero value otherwise. func (o *OAuth2ClientTokenLifespans) GetRefreshTokenGrantAccessTokenLifespan() string { - if o == nil || o.RefreshTokenGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.RefreshTokenGrantAccessTokenLifespan) { var ret string return ret } @@ -292,7 +397,7 @@ func (o *OAuth2ClientTokenLifespans) GetRefreshTokenGrantAccessTokenLifespan() s // GetRefreshTokenGrantAccessTokenLifespanOk returns a tuple with the RefreshTokenGrantAccessTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ClientTokenLifespans) GetRefreshTokenGrantAccessTokenLifespanOk() (*string, bool) { - if o == nil || o.RefreshTokenGrantAccessTokenLifespan == nil { + if o == nil || IsNil(o.RefreshTokenGrantAccessTokenLifespan) { return nil, false } return o.RefreshTokenGrantAccessTokenLifespan, true @@ -300,7 +405,7 @@ func (o *OAuth2ClientTokenLifespans) GetRefreshTokenGrantAccessTokenLifespanOk() // HasRefreshTokenGrantAccessTokenLifespan returns a boolean if a field has been set. func (o *OAuth2ClientTokenLifespans) HasRefreshTokenGrantAccessTokenLifespan() bool { - if o != nil && o.RefreshTokenGrantAccessTokenLifespan != nil { + if o != nil && !IsNil(o.RefreshTokenGrantAccessTokenLifespan) { return true } @@ -314,7 +419,7 @@ func (o *OAuth2ClientTokenLifespans) SetRefreshTokenGrantAccessTokenLifespan(v s // GetRefreshTokenGrantIdTokenLifespan returns the RefreshTokenGrantIdTokenLifespan field value if set, zero value otherwise. func (o *OAuth2ClientTokenLifespans) GetRefreshTokenGrantIdTokenLifespan() string { - if o == nil || o.RefreshTokenGrantIdTokenLifespan == nil { + if o == nil || IsNil(o.RefreshTokenGrantIdTokenLifespan) { var ret string return ret } @@ -324,7 +429,7 @@ func (o *OAuth2ClientTokenLifespans) GetRefreshTokenGrantIdTokenLifespan() strin // GetRefreshTokenGrantIdTokenLifespanOk returns a tuple with the RefreshTokenGrantIdTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ClientTokenLifespans) GetRefreshTokenGrantIdTokenLifespanOk() (*string, bool) { - if o == nil || o.RefreshTokenGrantIdTokenLifespan == nil { + if o == nil || IsNil(o.RefreshTokenGrantIdTokenLifespan) { return nil, false } return o.RefreshTokenGrantIdTokenLifespan, true @@ -332,7 +437,7 @@ func (o *OAuth2ClientTokenLifespans) GetRefreshTokenGrantIdTokenLifespanOk() (*s // HasRefreshTokenGrantIdTokenLifespan returns a boolean if a field has been set. func (o *OAuth2ClientTokenLifespans) HasRefreshTokenGrantIdTokenLifespan() bool { - if o != nil && o.RefreshTokenGrantIdTokenLifespan != nil { + if o != nil && !IsNil(o.RefreshTokenGrantIdTokenLifespan) { return true } @@ -346,7 +451,7 @@ func (o *OAuth2ClientTokenLifespans) SetRefreshTokenGrantIdTokenLifespan(v strin // GetRefreshTokenGrantRefreshTokenLifespan returns the RefreshTokenGrantRefreshTokenLifespan field value if set, zero value otherwise. func (o *OAuth2ClientTokenLifespans) GetRefreshTokenGrantRefreshTokenLifespan() string { - if o == nil || o.RefreshTokenGrantRefreshTokenLifespan == nil { + if o == nil || IsNil(o.RefreshTokenGrantRefreshTokenLifespan) { var ret string return ret } @@ -356,7 +461,7 @@ func (o *OAuth2ClientTokenLifespans) GetRefreshTokenGrantRefreshTokenLifespan() // GetRefreshTokenGrantRefreshTokenLifespanOk returns a tuple with the RefreshTokenGrantRefreshTokenLifespan field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ClientTokenLifespans) GetRefreshTokenGrantRefreshTokenLifespanOk() (*string, bool) { - if o == nil || o.RefreshTokenGrantRefreshTokenLifespan == nil { + if o == nil || IsNil(o.RefreshTokenGrantRefreshTokenLifespan) { return nil, false } return o.RefreshTokenGrantRefreshTokenLifespan, true @@ -364,7 +469,7 @@ func (o *OAuth2ClientTokenLifespans) GetRefreshTokenGrantRefreshTokenLifespanOk( // HasRefreshTokenGrantRefreshTokenLifespan returns a boolean if a field has been set. func (o *OAuth2ClientTokenLifespans) HasRefreshTokenGrantRefreshTokenLifespan() bool { - if o != nil && o.RefreshTokenGrantRefreshTokenLifespan != nil { + if o != nil && !IsNil(o.RefreshTokenGrantRefreshTokenLifespan) { return true } @@ -377,38 +482,55 @@ func (o *OAuth2ClientTokenLifespans) SetRefreshTokenGrantRefreshTokenLifespan(v } func (o OAuth2ClientTokenLifespans) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o OAuth2ClientTokenLifespans) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.AuthorizationCodeGrantAccessTokenLifespan != nil { + if !IsNil(o.AuthorizationCodeGrantAccessTokenLifespan) { toSerialize["authorization_code_grant_access_token_lifespan"] = o.AuthorizationCodeGrantAccessTokenLifespan } - if o.AuthorizationCodeGrantIdTokenLifespan != nil { + if !IsNil(o.AuthorizationCodeGrantIdTokenLifespan) { toSerialize["authorization_code_grant_id_token_lifespan"] = o.AuthorizationCodeGrantIdTokenLifespan } - if o.AuthorizationCodeGrantRefreshTokenLifespan != nil { + if !IsNil(o.AuthorizationCodeGrantRefreshTokenLifespan) { toSerialize["authorization_code_grant_refresh_token_lifespan"] = o.AuthorizationCodeGrantRefreshTokenLifespan } - if o.ClientCredentialsGrantAccessTokenLifespan != nil { + if !IsNil(o.ClientCredentialsGrantAccessTokenLifespan) { toSerialize["client_credentials_grant_access_token_lifespan"] = o.ClientCredentialsGrantAccessTokenLifespan } - if o.ImplicitGrantAccessTokenLifespan != nil { + if !IsNil(o.DeviceAuthorizationGrantAccessTokenLifespan) { + toSerialize["device_authorization_grant_access_token_lifespan"] = o.DeviceAuthorizationGrantAccessTokenLifespan + } + if !IsNil(o.DeviceAuthorizationGrantIdTokenLifespan) { + toSerialize["device_authorization_grant_id_token_lifespan"] = o.DeviceAuthorizationGrantIdTokenLifespan + } + if !IsNil(o.DeviceAuthorizationGrantRefreshTokenLifespan) { + toSerialize["device_authorization_grant_refresh_token_lifespan"] = o.DeviceAuthorizationGrantRefreshTokenLifespan + } + if !IsNil(o.ImplicitGrantAccessTokenLifespan) { toSerialize["implicit_grant_access_token_lifespan"] = o.ImplicitGrantAccessTokenLifespan } - if o.ImplicitGrantIdTokenLifespan != nil { + if !IsNil(o.ImplicitGrantIdTokenLifespan) { toSerialize["implicit_grant_id_token_lifespan"] = o.ImplicitGrantIdTokenLifespan } - if o.JwtBearerGrantAccessTokenLifespan != nil { + if !IsNil(o.JwtBearerGrantAccessTokenLifespan) { toSerialize["jwt_bearer_grant_access_token_lifespan"] = o.JwtBearerGrantAccessTokenLifespan } - if o.RefreshTokenGrantAccessTokenLifespan != nil { + if !IsNil(o.RefreshTokenGrantAccessTokenLifespan) { toSerialize["refresh_token_grant_access_token_lifespan"] = o.RefreshTokenGrantAccessTokenLifespan } - if o.RefreshTokenGrantIdTokenLifespan != nil { + if !IsNil(o.RefreshTokenGrantIdTokenLifespan) { toSerialize["refresh_token_grant_id_token_lifespan"] = o.RefreshTokenGrantIdTokenLifespan } - if o.RefreshTokenGrantRefreshTokenLifespan != nil { + if !IsNil(o.RefreshTokenGrantRefreshTokenLifespan) { toSerialize["refresh_token_grant_refresh_token_lifespan"] = o.RefreshTokenGrantRefreshTokenLifespan } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableOAuth2ClientTokenLifespans struct { diff --git a/internal/httpclient/model_o_auth2_consent_request.go b/internal/httpclient/model_o_auth2_consent_request.go index a61e14d016e..77d638dac9c 100644 --- a/internal/httpclient/model_o_auth2_consent_request.go +++ b/internal/httpclient/model_o_auth2_consent_request.go @@ -12,18 +12,25 @@ Contact: hi@ory.sh package openapi import ( + "bytes" "encoding/json" + "fmt" ) +// checks if the OAuth2ConsentRequest type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &OAuth2ConsentRequest{} + // OAuth2ConsentRequest struct for OAuth2ConsentRequest type OAuth2ConsentRequest struct { // ACR represents the Authentication AuthorizationContext Class Reference value for this authentication session. You can use it to express that, for example, a user authenticated using two factor authentication. Acr *string `json:"acr,omitempty"` Amr []string `json:"amr,omitempty"` - // ID is the identifier (\"authorization challenge\") of the consent authorization request. It is used to identify the session. + // Challenge is used to retrieve/accept/deny the consent request. Challenge string `json:"challenge"` Client *OAuth2Client `json:"client,omitempty"` - Context interface{} `json:"context,omitempty"` + // ConsentRequestID is the ID of the consent request. + ConsentRequestId *string `json:"consent_request_id,omitempty"` + Context interface{} `json:"context,omitempty"` // LoginChallenge is the login challenge this consent challenge belongs to. It can be used to associate a login and consent request in the login & consent app. LoginChallenge *string `json:"login_challenge,omitempty"` // LoginSessionID is the login session ID. If the user-agent reuses a login session (via cookie / remember flag) this ID will remain the same. If the user-agent did not have an existing authentication session (e.g. remember is false) this will be a new random value. This value is used as the \"sid\" parameter in the ID Token and in OIDC Front-/Back- channel logout. It's value can generally be used to associate consecutive login requests by a certain user. @@ -39,6 +46,8 @@ type OAuth2ConsentRequest struct { Subject *string `json:"subject,omitempty"` } +type _OAuth2ConsentRequest OAuth2ConsentRequest + // NewOAuth2ConsentRequest instantiates a new OAuth2ConsentRequest object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments @@ -59,7 +68,7 @@ func NewOAuth2ConsentRequestWithDefaults() *OAuth2ConsentRequest { // GetAcr returns the Acr field value if set, zero value otherwise. func (o *OAuth2ConsentRequest) GetAcr() string { - if o == nil || o.Acr == nil { + if o == nil || IsNil(o.Acr) { var ret string return ret } @@ -69,7 +78,7 @@ func (o *OAuth2ConsentRequest) GetAcr() string { // GetAcrOk returns a tuple with the Acr field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentRequest) GetAcrOk() (*string, bool) { - if o == nil || o.Acr == nil { + if o == nil || IsNil(o.Acr) { return nil, false } return o.Acr, true @@ -77,7 +86,7 @@ func (o *OAuth2ConsentRequest) GetAcrOk() (*string, bool) { // HasAcr returns a boolean if a field has been set. func (o *OAuth2ConsentRequest) HasAcr() bool { - if o != nil && o.Acr != nil { + if o != nil && !IsNil(o.Acr) { return true } @@ -91,7 +100,7 @@ func (o *OAuth2ConsentRequest) SetAcr(v string) { // GetAmr returns the Amr field value if set, zero value otherwise. func (o *OAuth2ConsentRequest) GetAmr() []string { - if o == nil || o.Amr == nil { + if o == nil || IsNil(o.Amr) { var ret []string return ret } @@ -101,7 +110,7 @@ func (o *OAuth2ConsentRequest) GetAmr() []string { // GetAmrOk returns a tuple with the Amr field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentRequest) GetAmrOk() ([]string, bool) { - if o == nil || o.Amr == nil { + if o == nil || IsNil(o.Amr) { return nil, false } return o.Amr, true @@ -109,7 +118,7 @@ func (o *OAuth2ConsentRequest) GetAmrOk() ([]string, bool) { // HasAmr returns a boolean if a field has been set. func (o *OAuth2ConsentRequest) HasAmr() bool { - if o != nil && o.Amr != nil { + if o != nil && !IsNil(o.Amr) { return true } @@ -147,7 +156,7 @@ func (o *OAuth2ConsentRequest) SetChallenge(v string) { // GetClient returns the Client field value if set, zero value otherwise. func (o *OAuth2ConsentRequest) GetClient() OAuth2Client { - if o == nil || o.Client == nil { + if o == nil || IsNil(o.Client) { var ret OAuth2Client return ret } @@ -157,7 +166,7 @@ func (o *OAuth2ConsentRequest) GetClient() OAuth2Client { // GetClientOk returns a tuple with the Client field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentRequest) GetClientOk() (*OAuth2Client, bool) { - if o == nil || o.Client == nil { + if o == nil || IsNil(o.Client) { return nil, false } return o.Client, true @@ -165,7 +174,7 @@ func (o *OAuth2ConsentRequest) GetClientOk() (*OAuth2Client, bool) { // HasClient returns a boolean if a field has been set. func (o *OAuth2ConsentRequest) HasClient() bool { - if o != nil && o.Client != nil { + if o != nil && !IsNil(o.Client) { return true } @@ -177,6 +186,38 @@ func (o *OAuth2ConsentRequest) SetClient(v OAuth2Client) { o.Client = &v } +// GetConsentRequestId returns the ConsentRequestId field value if set, zero value otherwise. +func (o *OAuth2ConsentRequest) GetConsentRequestId() string { + if o == nil || IsNil(o.ConsentRequestId) { + var ret string + return ret + } + return *o.ConsentRequestId +} + +// GetConsentRequestIdOk returns a tuple with the ConsentRequestId field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *OAuth2ConsentRequest) GetConsentRequestIdOk() (*string, bool) { + if o == nil || IsNil(o.ConsentRequestId) { + return nil, false + } + return o.ConsentRequestId, true +} + +// HasConsentRequestId returns a boolean if a field has been set. +func (o *OAuth2ConsentRequest) HasConsentRequestId() bool { + if o != nil && !IsNil(o.ConsentRequestId) { + return true + } + + return false +} + +// SetConsentRequestId gets a reference to the given string and assigns it to the ConsentRequestId field. +func (o *OAuth2ConsentRequest) SetConsentRequestId(v string) { + o.ConsentRequestId = &v +} + // GetContext returns the Context field value if set, zero value otherwise (both if not set or set to explicit null). func (o *OAuth2ConsentRequest) GetContext() interface{} { if o == nil { @@ -190,7 +231,7 @@ func (o *OAuth2ConsentRequest) GetContext() interface{} { // and a boolean to check if the value has been set. // NOTE: If the value is an explicit nil, `nil, true` will be returned func (o *OAuth2ConsentRequest) GetContextOk() (*interface{}, bool) { - if o == nil || o.Context == nil { + if o == nil || IsNil(o.Context) { return nil, false } return &o.Context, true @@ -198,7 +239,7 @@ func (o *OAuth2ConsentRequest) GetContextOk() (*interface{}, bool) { // HasContext returns a boolean if a field has been set. func (o *OAuth2ConsentRequest) HasContext() bool { - if o != nil && o.Context != nil { + if o != nil && !IsNil(o.Context) { return true } @@ -212,7 +253,7 @@ func (o *OAuth2ConsentRequest) SetContext(v interface{}) { // GetLoginChallenge returns the LoginChallenge field value if set, zero value otherwise. func (o *OAuth2ConsentRequest) GetLoginChallenge() string { - if o == nil || o.LoginChallenge == nil { + if o == nil || IsNil(o.LoginChallenge) { var ret string return ret } @@ -222,7 +263,7 @@ func (o *OAuth2ConsentRequest) GetLoginChallenge() string { // GetLoginChallengeOk returns a tuple with the LoginChallenge field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentRequest) GetLoginChallengeOk() (*string, bool) { - if o == nil || o.LoginChallenge == nil { + if o == nil || IsNil(o.LoginChallenge) { return nil, false } return o.LoginChallenge, true @@ -230,7 +271,7 @@ func (o *OAuth2ConsentRequest) GetLoginChallengeOk() (*string, bool) { // HasLoginChallenge returns a boolean if a field has been set. func (o *OAuth2ConsentRequest) HasLoginChallenge() bool { - if o != nil && o.LoginChallenge != nil { + if o != nil && !IsNil(o.LoginChallenge) { return true } @@ -244,7 +285,7 @@ func (o *OAuth2ConsentRequest) SetLoginChallenge(v string) { // GetLoginSessionId returns the LoginSessionId field value if set, zero value otherwise. func (o *OAuth2ConsentRequest) GetLoginSessionId() string { - if o == nil || o.LoginSessionId == nil { + if o == nil || IsNil(o.LoginSessionId) { var ret string return ret } @@ -254,7 +295,7 @@ func (o *OAuth2ConsentRequest) GetLoginSessionId() string { // GetLoginSessionIdOk returns a tuple with the LoginSessionId field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentRequest) GetLoginSessionIdOk() (*string, bool) { - if o == nil || o.LoginSessionId == nil { + if o == nil || IsNil(o.LoginSessionId) { return nil, false } return o.LoginSessionId, true @@ -262,7 +303,7 @@ func (o *OAuth2ConsentRequest) GetLoginSessionIdOk() (*string, bool) { // HasLoginSessionId returns a boolean if a field has been set. func (o *OAuth2ConsentRequest) HasLoginSessionId() bool { - if o != nil && o.LoginSessionId != nil { + if o != nil && !IsNil(o.LoginSessionId) { return true } @@ -276,7 +317,7 @@ func (o *OAuth2ConsentRequest) SetLoginSessionId(v string) { // GetOidcContext returns the OidcContext field value if set, zero value otherwise. func (o *OAuth2ConsentRequest) GetOidcContext() OAuth2ConsentRequestOpenIDConnectContext { - if o == nil || o.OidcContext == nil { + if o == nil || IsNil(o.OidcContext) { var ret OAuth2ConsentRequestOpenIDConnectContext return ret } @@ -286,7 +327,7 @@ func (o *OAuth2ConsentRequest) GetOidcContext() OAuth2ConsentRequestOpenIDConnec // GetOidcContextOk returns a tuple with the OidcContext field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentRequest) GetOidcContextOk() (*OAuth2ConsentRequestOpenIDConnectContext, bool) { - if o == nil || o.OidcContext == nil { + if o == nil || IsNil(o.OidcContext) { return nil, false } return o.OidcContext, true @@ -294,7 +335,7 @@ func (o *OAuth2ConsentRequest) GetOidcContextOk() (*OAuth2ConsentRequestOpenIDCo // HasOidcContext returns a boolean if a field has been set. func (o *OAuth2ConsentRequest) HasOidcContext() bool { - if o != nil && o.OidcContext != nil { + if o != nil && !IsNil(o.OidcContext) { return true } @@ -308,7 +349,7 @@ func (o *OAuth2ConsentRequest) SetOidcContext(v OAuth2ConsentRequestOpenIDConnec // GetRequestUrl returns the RequestUrl field value if set, zero value otherwise. func (o *OAuth2ConsentRequest) GetRequestUrl() string { - if o == nil || o.RequestUrl == nil { + if o == nil || IsNil(o.RequestUrl) { var ret string return ret } @@ -318,7 +359,7 @@ func (o *OAuth2ConsentRequest) GetRequestUrl() string { // GetRequestUrlOk returns a tuple with the RequestUrl field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentRequest) GetRequestUrlOk() (*string, bool) { - if o == nil || o.RequestUrl == nil { + if o == nil || IsNil(o.RequestUrl) { return nil, false } return o.RequestUrl, true @@ -326,7 +367,7 @@ func (o *OAuth2ConsentRequest) GetRequestUrlOk() (*string, bool) { // HasRequestUrl returns a boolean if a field has been set. func (o *OAuth2ConsentRequest) HasRequestUrl() bool { - if o != nil && o.RequestUrl != nil { + if o != nil && !IsNil(o.RequestUrl) { return true } @@ -340,7 +381,7 @@ func (o *OAuth2ConsentRequest) SetRequestUrl(v string) { // GetRequestedAccessTokenAudience returns the RequestedAccessTokenAudience field value if set, zero value otherwise. func (o *OAuth2ConsentRequest) GetRequestedAccessTokenAudience() []string { - if o == nil || o.RequestedAccessTokenAudience == nil { + if o == nil || IsNil(o.RequestedAccessTokenAudience) { var ret []string return ret } @@ -350,7 +391,7 @@ func (o *OAuth2ConsentRequest) GetRequestedAccessTokenAudience() []string { // GetRequestedAccessTokenAudienceOk returns a tuple with the RequestedAccessTokenAudience field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentRequest) GetRequestedAccessTokenAudienceOk() ([]string, bool) { - if o == nil || o.RequestedAccessTokenAudience == nil { + if o == nil || IsNil(o.RequestedAccessTokenAudience) { return nil, false } return o.RequestedAccessTokenAudience, true @@ -358,7 +399,7 @@ func (o *OAuth2ConsentRequest) GetRequestedAccessTokenAudienceOk() ([]string, bo // HasRequestedAccessTokenAudience returns a boolean if a field has been set. func (o *OAuth2ConsentRequest) HasRequestedAccessTokenAudience() bool { - if o != nil && o.RequestedAccessTokenAudience != nil { + if o != nil && !IsNil(o.RequestedAccessTokenAudience) { return true } @@ -372,7 +413,7 @@ func (o *OAuth2ConsentRequest) SetRequestedAccessTokenAudience(v []string) { // GetRequestedScope returns the RequestedScope field value if set, zero value otherwise. func (o *OAuth2ConsentRequest) GetRequestedScope() []string { - if o == nil || o.RequestedScope == nil { + if o == nil || IsNil(o.RequestedScope) { var ret []string return ret } @@ -382,7 +423,7 @@ func (o *OAuth2ConsentRequest) GetRequestedScope() []string { // GetRequestedScopeOk returns a tuple with the RequestedScope field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentRequest) GetRequestedScopeOk() ([]string, bool) { - if o == nil || o.RequestedScope == nil { + if o == nil || IsNil(o.RequestedScope) { return nil, false } return o.RequestedScope, true @@ -390,7 +431,7 @@ func (o *OAuth2ConsentRequest) GetRequestedScopeOk() ([]string, bool) { // HasRequestedScope returns a boolean if a field has been set. func (o *OAuth2ConsentRequest) HasRequestedScope() bool { - if o != nil && o.RequestedScope != nil { + if o != nil && !IsNil(o.RequestedScope) { return true } @@ -404,7 +445,7 @@ func (o *OAuth2ConsentRequest) SetRequestedScope(v []string) { // GetSkip returns the Skip field value if set, zero value otherwise. func (o *OAuth2ConsentRequest) GetSkip() bool { - if o == nil || o.Skip == nil { + if o == nil || IsNil(o.Skip) { var ret bool return ret } @@ -414,7 +455,7 @@ func (o *OAuth2ConsentRequest) GetSkip() bool { // GetSkipOk returns a tuple with the Skip field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentRequest) GetSkipOk() (*bool, bool) { - if o == nil || o.Skip == nil { + if o == nil || IsNil(o.Skip) { return nil, false } return o.Skip, true @@ -422,7 +463,7 @@ func (o *OAuth2ConsentRequest) GetSkipOk() (*bool, bool) { // HasSkip returns a boolean if a field has been set. func (o *OAuth2ConsentRequest) HasSkip() bool { - if o != nil && o.Skip != nil { + if o != nil && !IsNil(o.Skip) { return true } @@ -436,7 +477,7 @@ func (o *OAuth2ConsentRequest) SetSkip(v bool) { // GetSubject returns the Subject field value if set, zero value otherwise. func (o *OAuth2ConsentRequest) GetSubject() string { - if o == nil || o.Subject == nil { + if o == nil || IsNil(o.Subject) { var ret string return ret } @@ -446,7 +487,7 @@ func (o *OAuth2ConsentRequest) GetSubject() string { // GetSubjectOk returns a tuple with the Subject field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentRequest) GetSubjectOk() (*string, bool) { - if o == nil || o.Subject == nil { + if o == nil || IsNil(o.Subject) { return nil, false } return o.Subject, true @@ -454,7 +495,7 @@ func (o *OAuth2ConsentRequest) GetSubjectOk() (*string, bool) { // HasSubject returns a boolean if a field has been set. func (o *OAuth2ConsentRequest) HasSubject() bool { - if o != nil && o.Subject != nil { + if o != nil && !IsNil(o.Subject) { return true } @@ -467,47 +508,93 @@ func (o *OAuth2ConsentRequest) SetSubject(v string) { } func (o OAuth2ConsentRequest) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o OAuth2ConsentRequest) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.Acr != nil { + if !IsNil(o.Acr) { toSerialize["acr"] = o.Acr } - if o.Amr != nil { + if !IsNil(o.Amr) { toSerialize["amr"] = o.Amr } - if true { - toSerialize["challenge"] = o.Challenge - } - if o.Client != nil { + toSerialize["challenge"] = o.Challenge + if !IsNil(o.Client) { toSerialize["client"] = o.Client } + if !IsNil(o.ConsentRequestId) { + toSerialize["consent_request_id"] = o.ConsentRequestId + } if o.Context != nil { toSerialize["context"] = o.Context } - if o.LoginChallenge != nil { + if !IsNil(o.LoginChallenge) { toSerialize["login_challenge"] = o.LoginChallenge } - if o.LoginSessionId != nil { + if !IsNil(o.LoginSessionId) { toSerialize["login_session_id"] = o.LoginSessionId } - if o.OidcContext != nil { + if !IsNil(o.OidcContext) { toSerialize["oidc_context"] = o.OidcContext } - if o.RequestUrl != nil { + if !IsNil(o.RequestUrl) { toSerialize["request_url"] = o.RequestUrl } - if o.RequestedAccessTokenAudience != nil { + if !IsNil(o.RequestedAccessTokenAudience) { toSerialize["requested_access_token_audience"] = o.RequestedAccessTokenAudience } - if o.RequestedScope != nil { + if !IsNil(o.RequestedScope) { toSerialize["requested_scope"] = o.RequestedScope } - if o.Skip != nil { + if !IsNil(o.Skip) { toSerialize["skip"] = o.Skip } - if o.Subject != nil { + if !IsNil(o.Subject) { toSerialize["subject"] = o.Subject } - return json.Marshal(toSerialize) + return toSerialize, nil +} + +func (o *OAuth2ConsentRequest) UnmarshalJSON(data []byte) (err error) { + // This validates that all required properties are included in the JSON object + // by unmarshalling the object into a generic map with string keys and checking + // that every required field exists as a key in the generic map. + requiredProperties := []string{ + "challenge", + } + + allProperties := make(map[string]interface{}) + + err = json.Unmarshal(data, &allProperties) + + if err != nil { + return err + } + + for _, requiredProperty := range requiredProperties { + if _, exists := allProperties[requiredProperty]; !exists { + return fmt.Errorf("no value given for required property %v", requiredProperty) + } + } + + varOAuth2ConsentRequest := _OAuth2ConsentRequest{} + + decoder := json.NewDecoder(bytes.NewReader(data)) + decoder.DisallowUnknownFields() + err = decoder.Decode(&varOAuth2ConsentRequest) + + if err != nil { + return err + } + + *o = OAuth2ConsentRequest(varOAuth2ConsentRequest) + + return err } type NullableOAuth2ConsentRequest struct { diff --git a/internal/httpclient/model_o_auth2_consent_request_open_id_connect_context.go b/internal/httpclient/model_o_auth2_consent_request_open_id_connect_context.go index 8bc15fafe33..962ab736c66 100644 --- a/internal/httpclient/model_o_auth2_consent_request_open_id_connect_context.go +++ b/internal/httpclient/model_o_auth2_consent_request_open_id_connect_context.go @@ -15,6 +15,9 @@ import ( "encoding/json" ) +// checks if the OAuth2ConsentRequestOpenIDConnectContext type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &OAuth2ConsentRequestOpenIDConnectContext{} + // OAuth2ConsentRequestOpenIDConnectContext struct for OAuth2ConsentRequestOpenIDConnectContext type OAuth2ConsentRequestOpenIDConnectContext struct { // ACRValues is the Authentication AuthorizationContext Class Reference requested in the OAuth 2.0 Authorization request. It is a parameter defined by OpenID Connect and expresses which level of authentication (e.g. 2FA) is required. OpenID Connect defines it as follows: > Requested Authentication AuthorizationContext Class Reference values. Space-separated string that specifies the acr values that the Authorization Server is being requested to use for processing this Authentication Request, with the values appearing in order of preference. The Authentication AuthorizationContext Class satisfied by the authentication performed is returned as the acr Claim Value, as specified in Section 2. The acr Claim is requested as a Voluntary Claim by this parameter. @@ -48,7 +51,7 @@ func NewOAuth2ConsentRequestOpenIDConnectContextWithDefaults() *OAuth2ConsentReq // GetAcrValues returns the AcrValues field value if set, zero value otherwise. func (o *OAuth2ConsentRequestOpenIDConnectContext) GetAcrValues() []string { - if o == nil || o.AcrValues == nil { + if o == nil || IsNil(o.AcrValues) { var ret []string return ret } @@ -58,7 +61,7 @@ func (o *OAuth2ConsentRequestOpenIDConnectContext) GetAcrValues() []string { // GetAcrValuesOk returns a tuple with the AcrValues field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentRequestOpenIDConnectContext) GetAcrValuesOk() ([]string, bool) { - if o == nil || o.AcrValues == nil { + if o == nil || IsNil(o.AcrValues) { return nil, false } return o.AcrValues, true @@ -66,7 +69,7 @@ func (o *OAuth2ConsentRequestOpenIDConnectContext) GetAcrValuesOk() ([]string, b // HasAcrValues returns a boolean if a field has been set. func (o *OAuth2ConsentRequestOpenIDConnectContext) HasAcrValues() bool { - if o != nil && o.AcrValues != nil { + if o != nil && !IsNil(o.AcrValues) { return true } @@ -80,7 +83,7 @@ func (o *OAuth2ConsentRequestOpenIDConnectContext) SetAcrValues(v []string) { // GetDisplay returns the Display field value if set, zero value otherwise. func (o *OAuth2ConsentRequestOpenIDConnectContext) GetDisplay() string { - if o == nil || o.Display == nil { + if o == nil || IsNil(o.Display) { var ret string return ret } @@ -90,7 +93,7 @@ func (o *OAuth2ConsentRequestOpenIDConnectContext) GetDisplay() string { // GetDisplayOk returns a tuple with the Display field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentRequestOpenIDConnectContext) GetDisplayOk() (*string, bool) { - if o == nil || o.Display == nil { + if o == nil || IsNil(o.Display) { return nil, false } return o.Display, true @@ -98,7 +101,7 @@ func (o *OAuth2ConsentRequestOpenIDConnectContext) GetDisplayOk() (*string, bool // HasDisplay returns a boolean if a field has been set. func (o *OAuth2ConsentRequestOpenIDConnectContext) HasDisplay() bool { - if o != nil && o.Display != nil { + if o != nil && !IsNil(o.Display) { return true } @@ -112,7 +115,7 @@ func (o *OAuth2ConsentRequestOpenIDConnectContext) SetDisplay(v string) { // GetIdTokenHintClaims returns the IdTokenHintClaims field value if set, zero value otherwise. func (o *OAuth2ConsentRequestOpenIDConnectContext) GetIdTokenHintClaims() map[string]interface{} { - if o == nil || o.IdTokenHintClaims == nil { + if o == nil || IsNil(o.IdTokenHintClaims) { var ret map[string]interface{} return ret } @@ -122,15 +125,15 @@ func (o *OAuth2ConsentRequestOpenIDConnectContext) GetIdTokenHintClaims() map[st // GetIdTokenHintClaimsOk returns a tuple with the IdTokenHintClaims field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentRequestOpenIDConnectContext) GetIdTokenHintClaimsOk() (map[string]interface{}, bool) { - if o == nil || o.IdTokenHintClaims == nil { - return nil, false + if o == nil || IsNil(o.IdTokenHintClaims) { + return map[string]interface{}{}, false } return o.IdTokenHintClaims, true } // HasIdTokenHintClaims returns a boolean if a field has been set. func (o *OAuth2ConsentRequestOpenIDConnectContext) HasIdTokenHintClaims() bool { - if o != nil && o.IdTokenHintClaims != nil { + if o != nil && !IsNil(o.IdTokenHintClaims) { return true } @@ -144,7 +147,7 @@ func (o *OAuth2ConsentRequestOpenIDConnectContext) SetIdTokenHintClaims(v map[st // GetLoginHint returns the LoginHint field value if set, zero value otherwise. func (o *OAuth2ConsentRequestOpenIDConnectContext) GetLoginHint() string { - if o == nil || o.LoginHint == nil { + if o == nil || IsNil(o.LoginHint) { var ret string return ret } @@ -154,7 +157,7 @@ func (o *OAuth2ConsentRequestOpenIDConnectContext) GetLoginHint() string { // GetLoginHintOk returns a tuple with the LoginHint field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentRequestOpenIDConnectContext) GetLoginHintOk() (*string, bool) { - if o == nil || o.LoginHint == nil { + if o == nil || IsNil(o.LoginHint) { return nil, false } return o.LoginHint, true @@ -162,7 +165,7 @@ func (o *OAuth2ConsentRequestOpenIDConnectContext) GetLoginHintOk() (*string, bo // HasLoginHint returns a boolean if a field has been set. func (o *OAuth2ConsentRequestOpenIDConnectContext) HasLoginHint() bool { - if o != nil && o.LoginHint != nil { + if o != nil && !IsNil(o.LoginHint) { return true } @@ -176,7 +179,7 @@ func (o *OAuth2ConsentRequestOpenIDConnectContext) SetLoginHint(v string) { // GetUiLocales returns the UiLocales field value if set, zero value otherwise. func (o *OAuth2ConsentRequestOpenIDConnectContext) GetUiLocales() []string { - if o == nil || o.UiLocales == nil { + if o == nil || IsNil(o.UiLocales) { var ret []string return ret } @@ -186,7 +189,7 @@ func (o *OAuth2ConsentRequestOpenIDConnectContext) GetUiLocales() []string { // GetUiLocalesOk returns a tuple with the UiLocales field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentRequestOpenIDConnectContext) GetUiLocalesOk() ([]string, bool) { - if o == nil || o.UiLocales == nil { + if o == nil || IsNil(o.UiLocales) { return nil, false } return o.UiLocales, true @@ -194,7 +197,7 @@ func (o *OAuth2ConsentRequestOpenIDConnectContext) GetUiLocalesOk() ([]string, b // HasUiLocales returns a boolean if a field has been set. func (o *OAuth2ConsentRequestOpenIDConnectContext) HasUiLocales() bool { - if o != nil && o.UiLocales != nil { + if o != nil && !IsNil(o.UiLocales) { return true } @@ -207,23 +210,31 @@ func (o *OAuth2ConsentRequestOpenIDConnectContext) SetUiLocales(v []string) { } func (o OAuth2ConsentRequestOpenIDConnectContext) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o OAuth2ConsentRequestOpenIDConnectContext) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.AcrValues != nil { + if !IsNil(o.AcrValues) { toSerialize["acr_values"] = o.AcrValues } - if o.Display != nil { + if !IsNil(o.Display) { toSerialize["display"] = o.Display } - if o.IdTokenHintClaims != nil { + if !IsNil(o.IdTokenHintClaims) { toSerialize["id_token_hint_claims"] = o.IdTokenHintClaims } - if o.LoginHint != nil { + if !IsNil(o.LoginHint) { toSerialize["login_hint"] = o.LoginHint } - if o.UiLocales != nil { + if !IsNil(o.UiLocales) { toSerialize["ui_locales"] = o.UiLocales } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableOAuth2ConsentRequestOpenIDConnectContext struct { diff --git a/internal/httpclient/model_o_auth2_consent_session.go b/internal/httpclient/model_o_auth2_consent_session.go index 10d5e797cc5..93f6cd886d8 100644 --- a/internal/httpclient/model_o_auth2_consent_session.go +++ b/internal/httpclient/model_o_auth2_consent_session.go @@ -16,13 +16,18 @@ import ( "time" ) +// checks if the OAuth2ConsentSession type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &OAuth2ConsentSession{} + // OAuth2ConsentSession A completed OAuth 2.0 Consent Session. type OAuth2ConsentSession struct { - ConsentRequest *OAuth2ConsentRequest `json:"consent_request,omitempty"` - ExpiresAt *OAuth2ConsentSessionExpiresAt `json:"expires_at,omitempty"` - GrantAccessTokenAudience []string `json:"grant_access_token_audience,omitempty"` - GrantScope []string `json:"grant_scope,omitempty"` - HandledAt *time.Time `json:"handled_at,omitempty"` + ConsentRequest *OAuth2ConsentRequest `json:"consent_request,omitempty"` + // ConsentRequestID is the identifier of the consent request that initiated this consent session. + ConsentRequestId *string `json:"consent_request_id,omitempty"` + Context interface{} `json:"context,omitempty"` + GrantAccessTokenAudience []string `json:"grant_access_token_audience,omitempty"` + GrantScope []string `json:"grant_scope,omitempty"` + HandledAt *time.Time `json:"handled_at,omitempty"` // Remember Consent Remember, if set to true, tells ORY Hydra to remember this consent authorization and reuse it if the same client asks the same user for the same, or a subset of, scope. Remember *bool `json:"remember,omitempty"` // Remember Consent For RememberFor sets how long the consent authorization should be remembered for in seconds. If set to `0`, the authorization will be remembered indefinitely. @@ -49,7 +54,7 @@ func NewOAuth2ConsentSessionWithDefaults() *OAuth2ConsentSession { // GetConsentRequest returns the ConsentRequest field value if set, zero value otherwise. func (o *OAuth2ConsentSession) GetConsentRequest() OAuth2ConsentRequest { - if o == nil || o.ConsentRequest == nil { + if o == nil || IsNil(o.ConsentRequest) { var ret OAuth2ConsentRequest return ret } @@ -59,7 +64,7 @@ func (o *OAuth2ConsentSession) GetConsentRequest() OAuth2ConsentRequest { // GetConsentRequestOk returns a tuple with the ConsentRequest field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentSession) GetConsentRequestOk() (*OAuth2ConsentRequest, bool) { - if o == nil || o.ConsentRequest == nil { + if o == nil || IsNil(o.ConsentRequest) { return nil, false } return o.ConsentRequest, true @@ -67,7 +72,7 @@ func (o *OAuth2ConsentSession) GetConsentRequestOk() (*OAuth2ConsentRequest, boo // HasConsentRequest returns a boolean if a field has been set. func (o *OAuth2ConsentSession) HasConsentRequest() bool { - if o != nil && o.ConsentRequest != nil { + if o != nil && !IsNil(o.ConsentRequest) { return true } @@ -79,41 +84,74 @@ func (o *OAuth2ConsentSession) SetConsentRequest(v OAuth2ConsentRequest) { o.ConsentRequest = &v } -// GetExpiresAt returns the ExpiresAt field value if set, zero value otherwise. -func (o *OAuth2ConsentSession) GetExpiresAt() OAuth2ConsentSessionExpiresAt { - if o == nil || o.ExpiresAt == nil { - var ret OAuth2ConsentSessionExpiresAt +// GetConsentRequestId returns the ConsentRequestId field value if set, zero value otherwise. +func (o *OAuth2ConsentSession) GetConsentRequestId() string { + if o == nil || IsNil(o.ConsentRequestId) { + var ret string + return ret + } + return *o.ConsentRequestId +} + +// GetConsentRequestIdOk returns a tuple with the ConsentRequestId field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *OAuth2ConsentSession) GetConsentRequestIdOk() (*string, bool) { + if o == nil || IsNil(o.ConsentRequestId) { + return nil, false + } + return o.ConsentRequestId, true +} + +// HasConsentRequestId returns a boolean if a field has been set. +func (o *OAuth2ConsentSession) HasConsentRequestId() bool { + if o != nil && !IsNil(o.ConsentRequestId) { + return true + } + + return false +} + +// SetConsentRequestId gets a reference to the given string and assigns it to the ConsentRequestId field. +func (o *OAuth2ConsentSession) SetConsentRequestId(v string) { + o.ConsentRequestId = &v +} + +// GetContext returns the Context field value if set, zero value otherwise (both if not set or set to explicit null). +func (o *OAuth2ConsentSession) GetContext() interface{} { + if o == nil { + var ret interface{} return ret } - return *o.ExpiresAt + return o.Context } -// GetExpiresAtOk returns a tuple with the ExpiresAt field value if set, nil otherwise +// GetContextOk returns a tuple with the Context field value if set, nil otherwise // and a boolean to check if the value has been set. -func (o *OAuth2ConsentSession) GetExpiresAtOk() (*OAuth2ConsentSessionExpiresAt, bool) { - if o == nil || o.ExpiresAt == nil { +// NOTE: If the value is an explicit nil, `nil, true` will be returned +func (o *OAuth2ConsentSession) GetContextOk() (*interface{}, bool) { + if o == nil || IsNil(o.Context) { return nil, false } - return o.ExpiresAt, true + return &o.Context, true } -// HasExpiresAt returns a boolean if a field has been set. -func (o *OAuth2ConsentSession) HasExpiresAt() bool { - if o != nil && o.ExpiresAt != nil { +// HasContext returns a boolean if a field has been set. +func (o *OAuth2ConsentSession) HasContext() bool { + if o != nil && !IsNil(o.Context) { return true } return false } -// SetExpiresAt gets a reference to the given OAuth2ConsentSessionExpiresAt and assigns it to the ExpiresAt field. -func (o *OAuth2ConsentSession) SetExpiresAt(v OAuth2ConsentSessionExpiresAt) { - o.ExpiresAt = &v +// SetContext gets a reference to the given interface{} and assigns it to the Context field. +func (o *OAuth2ConsentSession) SetContext(v interface{}) { + o.Context = v } // GetGrantAccessTokenAudience returns the GrantAccessTokenAudience field value if set, zero value otherwise. func (o *OAuth2ConsentSession) GetGrantAccessTokenAudience() []string { - if o == nil || o.GrantAccessTokenAudience == nil { + if o == nil || IsNil(o.GrantAccessTokenAudience) { var ret []string return ret } @@ -123,7 +161,7 @@ func (o *OAuth2ConsentSession) GetGrantAccessTokenAudience() []string { // GetGrantAccessTokenAudienceOk returns a tuple with the GrantAccessTokenAudience field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentSession) GetGrantAccessTokenAudienceOk() ([]string, bool) { - if o == nil || o.GrantAccessTokenAudience == nil { + if o == nil || IsNil(o.GrantAccessTokenAudience) { return nil, false } return o.GrantAccessTokenAudience, true @@ -131,7 +169,7 @@ func (o *OAuth2ConsentSession) GetGrantAccessTokenAudienceOk() ([]string, bool) // HasGrantAccessTokenAudience returns a boolean if a field has been set. func (o *OAuth2ConsentSession) HasGrantAccessTokenAudience() bool { - if o != nil && o.GrantAccessTokenAudience != nil { + if o != nil && !IsNil(o.GrantAccessTokenAudience) { return true } @@ -145,7 +183,7 @@ func (o *OAuth2ConsentSession) SetGrantAccessTokenAudience(v []string) { // GetGrantScope returns the GrantScope field value if set, zero value otherwise. func (o *OAuth2ConsentSession) GetGrantScope() []string { - if o == nil || o.GrantScope == nil { + if o == nil || IsNil(o.GrantScope) { var ret []string return ret } @@ -155,7 +193,7 @@ func (o *OAuth2ConsentSession) GetGrantScope() []string { // GetGrantScopeOk returns a tuple with the GrantScope field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentSession) GetGrantScopeOk() ([]string, bool) { - if o == nil || o.GrantScope == nil { + if o == nil || IsNil(o.GrantScope) { return nil, false } return o.GrantScope, true @@ -163,7 +201,7 @@ func (o *OAuth2ConsentSession) GetGrantScopeOk() ([]string, bool) { // HasGrantScope returns a boolean if a field has been set. func (o *OAuth2ConsentSession) HasGrantScope() bool { - if o != nil && o.GrantScope != nil { + if o != nil && !IsNil(o.GrantScope) { return true } @@ -177,7 +215,7 @@ func (o *OAuth2ConsentSession) SetGrantScope(v []string) { // GetHandledAt returns the HandledAt field value if set, zero value otherwise. func (o *OAuth2ConsentSession) GetHandledAt() time.Time { - if o == nil || o.HandledAt == nil { + if o == nil || IsNil(o.HandledAt) { var ret time.Time return ret } @@ -187,7 +225,7 @@ func (o *OAuth2ConsentSession) GetHandledAt() time.Time { // GetHandledAtOk returns a tuple with the HandledAt field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentSession) GetHandledAtOk() (*time.Time, bool) { - if o == nil || o.HandledAt == nil { + if o == nil || IsNil(o.HandledAt) { return nil, false } return o.HandledAt, true @@ -195,7 +233,7 @@ func (o *OAuth2ConsentSession) GetHandledAtOk() (*time.Time, bool) { // HasHandledAt returns a boolean if a field has been set. func (o *OAuth2ConsentSession) HasHandledAt() bool { - if o != nil && o.HandledAt != nil { + if o != nil && !IsNil(o.HandledAt) { return true } @@ -209,7 +247,7 @@ func (o *OAuth2ConsentSession) SetHandledAt(v time.Time) { // GetRemember returns the Remember field value if set, zero value otherwise. func (o *OAuth2ConsentSession) GetRemember() bool { - if o == nil || o.Remember == nil { + if o == nil || IsNil(o.Remember) { var ret bool return ret } @@ -219,7 +257,7 @@ func (o *OAuth2ConsentSession) GetRemember() bool { // GetRememberOk returns a tuple with the Remember field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentSession) GetRememberOk() (*bool, bool) { - if o == nil || o.Remember == nil { + if o == nil || IsNil(o.Remember) { return nil, false } return o.Remember, true @@ -227,7 +265,7 @@ func (o *OAuth2ConsentSession) GetRememberOk() (*bool, bool) { // HasRemember returns a boolean if a field has been set. func (o *OAuth2ConsentSession) HasRemember() bool { - if o != nil && o.Remember != nil { + if o != nil && !IsNil(o.Remember) { return true } @@ -241,7 +279,7 @@ func (o *OAuth2ConsentSession) SetRemember(v bool) { // GetRememberFor returns the RememberFor field value if set, zero value otherwise. func (o *OAuth2ConsentSession) GetRememberFor() int64 { - if o == nil || o.RememberFor == nil { + if o == nil || IsNil(o.RememberFor) { var ret int64 return ret } @@ -251,7 +289,7 @@ func (o *OAuth2ConsentSession) GetRememberFor() int64 { // GetRememberForOk returns a tuple with the RememberFor field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentSession) GetRememberForOk() (*int64, bool) { - if o == nil || o.RememberFor == nil { + if o == nil || IsNil(o.RememberFor) { return nil, false } return o.RememberFor, true @@ -259,7 +297,7 @@ func (o *OAuth2ConsentSession) GetRememberForOk() (*int64, bool) { // HasRememberFor returns a boolean if a field has been set. func (o *OAuth2ConsentSession) HasRememberFor() bool { - if o != nil && o.RememberFor != nil { + if o != nil && !IsNil(o.RememberFor) { return true } @@ -273,7 +311,7 @@ func (o *OAuth2ConsentSession) SetRememberFor(v int64) { // GetSession returns the Session field value if set, zero value otherwise. func (o *OAuth2ConsentSession) GetSession() AcceptOAuth2ConsentRequestSession { - if o == nil || o.Session == nil { + if o == nil || IsNil(o.Session) { var ret AcceptOAuth2ConsentRequestSession return ret } @@ -283,7 +321,7 @@ func (o *OAuth2ConsentSession) GetSession() AcceptOAuth2ConsentRequestSession { // GetSessionOk returns a tuple with the Session field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2ConsentSession) GetSessionOk() (*AcceptOAuth2ConsentRequestSession, bool) { - if o == nil || o.Session == nil { + if o == nil || IsNil(o.Session) { return nil, false } return o.Session, true @@ -291,7 +329,7 @@ func (o *OAuth2ConsentSession) GetSessionOk() (*AcceptOAuth2ConsentRequestSessio // HasSession returns a boolean if a field has been set. func (o *OAuth2ConsentSession) HasSession() bool { - if o != nil && o.Session != nil { + if o != nil && !IsNil(o.Session) { return true } @@ -304,32 +342,43 @@ func (o *OAuth2ConsentSession) SetSession(v AcceptOAuth2ConsentRequestSession) { } func (o OAuth2ConsentSession) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o OAuth2ConsentSession) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.ConsentRequest != nil { + if !IsNil(o.ConsentRequest) { toSerialize["consent_request"] = o.ConsentRequest } - if o.ExpiresAt != nil { - toSerialize["expires_at"] = o.ExpiresAt + if !IsNil(o.ConsentRequestId) { + toSerialize["consent_request_id"] = o.ConsentRequestId + } + if o.Context != nil { + toSerialize["context"] = o.Context } - if o.GrantAccessTokenAudience != nil { + if !IsNil(o.GrantAccessTokenAudience) { toSerialize["grant_access_token_audience"] = o.GrantAccessTokenAudience } - if o.GrantScope != nil { + if !IsNil(o.GrantScope) { toSerialize["grant_scope"] = o.GrantScope } - if o.HandledAt != nil { + if !IsNil(o.HandledAt) { toSerialize["handled_at"] = o.HandledAt } - if o.Remember != nil { + if !IsNil(o.Remember) { toSerialize["remember"] = o.Remember } - if o.RememberFor != nil { + if !IsNil(o.RememberFor) { toSerialize["remember_for"] = o.RememberFor } - if o.Session != nil { + if !IsNil(o.Session) { toSerialize["session"] = o.Session } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableOAuth2ConsentSession struct { diff --git a/internal/httpclient/model_o_auth2_consent_session_expires_at.go b/internal/httpclient/model_o_auth2_consent_session_expires_at.go deleted file mode 100644 index a0a752b99bb..00000000000 --- a/internal/httpclient/model_o_auth2_consent_session_expires_at.go +++ /dev/null @@ -1,259 +0,0 @@ -/* -Ory Hydra API - -Documentation for all of Ory Hydra's APIs. - -API version: -Contact: hi@ory.sh -*/ - -// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. - -package openapi - -import ( - "encoding/json" - "time" -) - -// OAuth2ConsentSessionExpiresAt struct for OAuth2ConsentSessionExpiresAt -type OAuth2ConsentSessionExpiresAt struct { - AccessToken *time.Time `json:"access_token,omitempty"` - AuthorizeCode *time.Time `json:"authorize_code,omitempty"` - IdToken *time.Time `json:"id_token,omitempty"` - ParContext *time.Time `json:"par_context,omitempty"` - RefreshToken *time.Time `json:"refresh_token,omitempty"` -} - -// NewOAuth2ConsentSessionExpiresAt instantiates a new OAuth2ConsentSessionExpiresAt object -// This constructor will assign default values to properties that have it defined, -// and makes sure properties required by API are set, but the set of arguments -// will change when the set of required properties is changed -func NewOAuth2ConsentSessionExpiresAt() *OAuth2ConsentSessionExpiresAt { - this := OAuth2ConsentSessionExpiresAt{} - return &this -} - -// NewOAuth2ConsentSessionExpiresAtWithDefaults instantiates a new OAuth2ConsentSessionExpiresAt object -// This constructor will only assign default values to properties that have it defined, -// but it doesn't guarantee that properties required by API are set -func NewOAuth2ConsentSessionExpiresAtWithDefaults() *OAuth2ConsentSessionExpiresAt { - this := OAuth2ConsentSessionExpiresAt{} - return &this -} - -// GetAccessToken returns the AccessToken field value if set, zero value otherwise. -func (o *OAuth2ConsentSessionExpiresAt) GetAccessToken() time.Time { - if o == nil || o.AccessToken == nil { - var ret time.Time - return ret - } - return *o.AccessToken -} - -// GetAccessTokenOk returns a tuple with the AccessToken field value if set, nil otherwise -// and a boolean to check if the value has been set. -func (o *OAuth2ConsentSessionExpiresAt) GetAccessTokenOk() (*time.Time, bool) { - if o == nil || o.AccessToken == nil { - return nil, false - } - return o.AccessToken, true -} - -// HasAccessToken returns a boolean if a field has been set. -func (o *OAuth2ConsentSessionExpiresAt) HasAccessToken() bool { - if o != nil && o.AccessToken != nil { - return true - } - - return false -} - -// SetAccessToken gets a reference to the given time.Time and assigns it to the AccessToken field. -func (o *OAuth2ConsentSessionExpiresAt) SetAccessToken(v time.Time) { - o.AccessToken = &v -} - -// GetAuthorizeCode returns the AuthorizeCode field value if set, zero value otherwise. -func (o *OAuth2ConsentSessionExpiresAt) GetAuthorizeCode() time.Time { - if o == nil || o.AuthorizeCode == nil { - var ret time.Time - return ret - } - return *o.AuthorizeCode -} - -// GetAuthorizeCodeOk returns a tuple with the AuthorizeCode field value if set, nil otherwise -// and a boolean to check if the value has been set. -func (o *OAuth2ConsentSessionExpiresAt) GetAuthorizeCodeOk() (*time.Time, bool) { - if o == nil || o.AuthorizeCode == nil { - return nil, false - } - return o.AuthorizeCode, true -} - -// HasAuthorizeCode returns a boolean if a field has been set. -func (o *OAuth2ConsentSessionExpiresAt) HasAuthorizeCode() bool { - if o != nil && o.AuthorizeCode != nil { - return true - } - - return false -} - -// SetAuthorizeCode gets a reference to the given time.Time and assigns it to the AuthorizeCode field. -func (o *OAuth2ConsentSessionExpiresAt) SetAuthorizeCode(v time.Time) { - o.AuthorizeCode = &v -} - -// GetIdToken returns the IdToken field value if set, zero value otherwise. -func (o *OAuth2ConsentSessionExpiresAt) GetIdToken() time.Time { - if o == nil || o.IdToken == nil { - var ret time.Time - return ret - } - return *o.IdToken -} - -// GetIdTokenOk returns a tuple with the IdToken field value if set, nil otherwise -// and a boolean to check if the value has been set. -func (o *OAuth2ConsentSessionExpiresAt) GetIdTokenOk() (*time.Time, bool) { - if o == nil || o.IdToken == nil { - return nil, false - } - return o.IdToken, true -} - -// HasIdToken returns a boolean if a field has been set. -func (o *OAuth2ConsentSessionExpiresAt) HasIdToken() bool { - if o != nil && o.IdToken != nil { - return true - } - - return false -} - -// SetIdToken gets a reference to the given time.Time and assigns it to the IdToken field. -func (o *OAuth2ConsentSessionExpiresAt) SetIdToken(v time.Time) { - o.IdToken = &v -} - -// GetParContext returns the ParContext field value if set, zero value otherwise. -func (o *OAuth2ConsentSessionExpiresAt) GetParContext() time.Time { - if o == nil || o.ParContext == nil { - var ret time.Time - return ret - } - return *o.ParContext -} - -// GetParContextOk returns a tuple with the ParContext field value if set, nil otherwise -// and a boolean to check if the value has been set. -func (o *OAuth2ConsentSessionExpiresAt) GetParContextOk() (*time.Time, bool) { - if o == nil || o.ParContext == nil { - return nil, false - } - return o.ParContext, true -} - -// HasParContext returns a boolean if a field has been set. -func (o *OAuth2ConsentSessionExpiresAt) HasParContext() bool { - if o != nil && o.ParContext != nil { - return true - } - - return false -} - -// SetParContext gets a reference to the given time.Time and assigns it to the ParContext field. -func (o *OAuth2ConsentSessionExpiresAt) SetParContext(v time.Time) { - o.ParContext = &v -} - -// GetRefreshToken returns the RefreshToken field value if set, zero value otherwise. -func (o *OAuth2ConsentSessionExpiresAt) GetRefreshToken() time.Time { - if o == nil || o.RefreshToken == nil { - var ret time.Time - return ret - } - return *o.RefreshToken -} - -// GetRefreshTokenOk returns a tuple with the RefreshToken field value if set, nil otherwise -// and a boolean to check if the value has been set. -func (o *OAuth2ConsentSessionExpiresAt) GetRefreshTokenOk() (*time.Time, bool) { - if o == nil || o.RefreshToken == nil { - return nil, false - } - return o.RefreshToken, true -} - -// HasRefreshToken returns a boolean if a field has been set. -func (o *OAuth2ConsentSessionExpiresAt) HasRefreshToken() bool { - if o != nil && o.RefreshToken != nil { - return true - } - - return false -} - -// SetRefreshToken gets a reference to the given time.Time and assigns it to the RefreshToken field. -func (o *OAuth2ConsentSessionExpiresAt) SetRefreshToken(v time.Time) { - o.RefreshToken = &v -} - -func (o OAuth2ConsentSessionExpiresAt) MarshalJSON() ([]byte, error) { - toSerialize := map[string]interface{}{} - if o.AccessToken != nil { - toSerialize["access_token"] = o.AccessToken - } - if o.AuthorizeCode != nil { - toSerialize["authorize_code"] = o.AuthorizeCode - } - if o.IdToken != nil { - toSerialize["id_token"] = o.IdToken - } - if o.ParContext != nil { - toSerialize["par_context"] = o.ParContext - } - if o.RefreshToken != nil { - toSerialize["refresh_token"] = o.RefreshToken - } - return json.Marshal(toSerialize) -} - -type NullableOAuth2ConsentSessionExpiresAt struct { - value *OAuth2ConsentSessionExpiresAt - isSet bool -} - -func (v NullableOAuth2ConsentSessionExpiresAt) Get() *OAuth2ConsentSessionExpiresAt { - return v.value -} - -func (v *NullableOAuth2ConsentSessionExpiresAt) Set(val *OAuth2ConsentSessionExpiresAt) { - v.value = val - v.isSet = true -} - -func (v NullableOAuth2ConsentSessionExpiresAt) IsSet() bool { - return v.isSet -} - -func (v *NullableOAuth2ConsentSessionExpiresAt) Unset() { - v.value = nil - v.isSet = false -} - -func NewNullableOAuth2ConsentSessionExpiresAt(val *OAuth2ConsentSessionExpiresAt) *NullableOAuth2ConsentSessionExpiresAt { - return &NullableOAuth2ConsentSessionExpiresAt{value: val, isSet: true} -} - -func (v NullableOAuth2ConsentSessionExpiresAt) MarshalJSON() ([]byte, error) { - return json.Marshal(v.value) -} - -func (v *NullableOAuth2ConsentSessionExpiresAt) UnmarshalJSON(src []byte) error { - v.isSet = true - return json.Unmarshal(src, &v.value) -} diff --git a/internal/httpclient/model_o_auth2_login_request.go b/internal/httpclient/model_o_auth2_login_request.go index cf29c2a47d0..9ab2ad3e606 100644 --- a/internal/httpclient/model_o_auth2_login_request.go +++ b/internal/httpclient/model_o_auth2_login_request.go @@ -12,19 +12,24 @@ Contact: hi@ory.sh package openapi import ( + "bytes" "encoding/json" + "fmt" ) +// checks if the OAuth2LoginRequest type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &OAuth2LoginRequest{} + // OAuth2LoginRequest struct for OAuth2LoginRequest type OAuth2LoginRequest struct { - // ID is the identifier (\"login challenge\") of the login request. It is used to identify the session. + // ID is the identifier of the login request. Challenge string `json:"challenge"` Client OAuth2Client `json:"client"` OidcContext *OAuth2ConsentRequestOpenIDConnectContext `json:"oidc_context,omitempty"` // RequestURL is the original OAuth 2.0 Authorization URL requested by the OAuth 2.0 client. It is the URL which initiates the OAuth 2.0 Authorization Code or OAuth 2.0 Implicit flow. This URL is typically not needed, but might come in handy if you want to deal with additional request parameters. RequestUrl string `json:"request_url"` - RequestedAccessTokenAudience []string `json:"requested_access_token_audience"` - RequestedScope []string `json:"requested_scope"` + RequestedAccessTokenAudience []string `json:"requested_access_token_audience,omitempty"` + RequestedScope []string `json:"requested_scope,omitempty"` // SessionID is the login session ID. If the user-agent reuses a login session (via cookie / remember flag) this ID will remain the same. If the user-agent did not have an existing authentication session (e.g. remember is false) this will be a new random value. This value is used as the \"sid\" parameter in the ID Token and in OIDC Front-/Back- channel logout. It's value can generally be used to associate consecutive login requests by a certain user. SessionId *string `json:"session_id,omitempty"` // Skip, if true, implies that the client has requested the same scopes from the same user previously. If true, you can skip asking the user to grant the requested scopes, and simply forward the user to the redirect URL. This feature allows you to update / set session information. @@ -33,17 +38,17 @@ type OAuth2LoginRequest struct { Subject string `json:"subject"` } +type _OAuth2LoginRequest OAuth2LoginRequest + // NewOAuth2LoginRequest instantiates a new OAuth2LoginRequest object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed -func NewOAuth2LoginRequest(challenge string, client OAuth2Client, requestUrl string, requestedAccessTokenAudience []string, requestedScope []string, skip bool, subject string) *OAuth2LoginRequest { +func NewOAuth2LoginRequest(challenge string, client OAuth2Client, requestUrl string, skip bool, subject string) *OAuth2LoginRequest { this := OAuth2LoginRequest{} this.Challenge = challenge this.Client = client this.RequestUrl = requestUrl - this.RequestedAccessTokenAudience = requestedAccessTokenAudience - this.RequestedScope = requestedScope this.Skip = skip this.Subject = subject return &this @@ -107,7 +112,7 @@ func (o *OAuth2LoginRequest) SetClient(v OAuth2Client) { // GetOidcContext returns the OidcContext field value if set, zero value otherwise. func (o *OAuth2LoginRequest) GetOidcContext() OAuth2ConsentRequestOpenIDConnectContext { - if o == nil || o.OidcContext == nil { + if o == nil || IsNil(o.OidcContext) { var ret OAuth2ConsentRequestOpenIDConnectContext return ret } @@ -117,7 +122,7 @@ func (o *OAuth2LoginRequest) GetOidcContext() OAuth2ConsentRequestOpenIDConnectC // GetOidcContextOk returns a tuple with the OidcContext field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2LoginRequest) GetOidcContextOk() (*OAuth2ConsentRequestOpenIDConnectContext, bool) { - if o == nil || o.OidcContext == nil { + if o == nil || IsNil(o.OidcContext) { return nil, false } return o.OidcContext, true @@ -125,7 +130,7 @@ func (o *OAuth2LoginRequest) GetOidcContextOk() (*OAuth2ConsentRequestOpenIDConn // HasOidcContext returns a boolean if a field has been set. func (o *OAuth2LoginRequest) HasOidcContext() bool { - if o != nil && o.OidcContext != nil { + if o != nil && !IsNil(o.OidcContext) { return true } @@ -161,57 +166,73 @@ func (o *OAuth2LoginRequest) SetRequestUrl(v string) { o.RequestUrl = v } -// GetRequestedAccessTokenAudience returns the RequestedAccessTokenAudience field value +// GetRequestedAccessTokenAudience returns the RequestedAccessTokenAudience field value if set, zero value otherwise. func (o *OAuth2LoginRequest) GetRequestedAccessTokenAudience() []string { - if o == nil { + if o == nil || IsNil(o.RequestedAccessTokenAudience) { var ret []string return ret } - return o.RequestedAccessTokenAudience } -// GetRequestedAccessTokenAudienceOk returns a tuple with the RequestedAccessTokenAudience field value +// GetRequestedAccessTokenAudienceOk returns a tuple with the RequestedAccessTokenAudience field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2LoginRequest) GetRequestedAccessTokenAudienceOk() ([]string, bool) { - if o == nil { + if o == nil || IsNil(o.RequestedAccessTokenAudience) { return nil, false } return o.RequestedAccessTokenAudience, true } -// SetRequestedAccessTokenAudience sets field value +// HasRequestedAccessTokenAudience returns a boolean if a field has been set. +func (o *OAuth2LoginRequest) HasRequestedAccessTokenAudience() bool { + if o != nil && !IsNil(o.RequestedAccessTokenAudience) { + return true + } + + return false +} + +// SetRequestedAccessTokenAudience gets a reference to the given []string and assigns it to the RequestedAccessTokenAudience field. func (o *OAuth2LoginRequest) SetRequestedAccessTokenAudience(v []string) { o.RequestedAccessTokenAudience = v } -// GetRequestedScope returns the RequestedScope field value +// GetRequestedScope returns the RequestedScope field value if set, zero value otherwise. func (o *OAuth2LoginRequest) GetRequestedScope() []string { - if o == nil { + if o == nil || IsNil(o.RequestedScope) { var ret []string return ret } - return o.RequestedScope } -// GetRequestedScopeOk returns a tuple with the RequestedScope field value +// GetRequestedScopeOk returns a tuple with the RequestedScope field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2LoginRequest) GetRequestedScopeOk() ([]string, bool) { - if o == nil { + if o == nil || IsNil(o.RequestedScope) { return nil, false } return o.RequestedScope, true } -// SetRequestedScope sets field value +// HasRequestedScope returns a boolean if a field has been set. +func (o *OAuth2LoginRequest) HasRequestedScope() bool { + if o != nil && !IsNil(o.RequestedScope) { + return true + } + + return false +} + +// SetRequestedScope gets a reference to the given []string and assigns it to the RequestedScope field. func (o *OAuth2LoginRequest) SetRequestedScope(v []string) { o.RequestedScope = v } // GetSessionId returns the SessionId field value if set, zero value otherwise. func (o *OAuth2LoginRequest) GetSessionId() string { - if o == nil || o.SessionId == nil { + if o == nil || IsNil(o.SessionId) { var ret string return ret } @@ -221,7 +242,7 @@ func (o *OAuth2LoginRequest) GetSessionId() string { // GetSessionIdOk returns a tuple with the SessionId field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2LoginRequest) GetSessionIdOk() (*string, bool) { - if o == nil || o.SessionId == nil { + if o == nil || IsNil(o.SessionId) { return nil, false } return o.SessionId, true @@ -229,7 +250,7 @@ func (o *OAuth2LoginRequest) GetSessionIdOk() (*string, bool) { // HasSessionId returns a boolean if a field has been set. func (o *OAuth2LoginRequest) HasSessionId() bool { - if o != nil && o.SessionId != nil { + if o != nil && !IsNil(o.SessionId) { return true } @@ -290,35 +311,74 @@ func (o *OAuth2LoginRequest) SetSubject(v string) { } func (o OAuth2LoginRequest) MarshalJSON() ([]byte, error) { - toSerialize := map[string]interface{}{} - if true { - toSerialize["challenge"] = o.Challenge - } - if true { - toSerialize["client"] = o.Client + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err } - if o.OidcContext != nil { + return json.Marshal(toSerialize) +} + +func (o OAuth2LoginRequest) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + toSerialize["challenge"] = o.Challenge + toSerialize["client"] = o.Client + if !IsNil(o.OidcContext) { toSerialize["oidc_context"] = o.OidcContext } - if true { - toSerialize["request_url"] = o.RequestUrl - } - if true { + toSerialize["request_url"] = o.RequestUrl + if !IsNil(o.RequestedAccessTokenAudience) { toSerialize["requested_access_token_audience"] = o.RequestedAccessTokenAudience } - if true { + if !IsNil(o.RequestedScope) { toSerialize["requested_scope"] = o.RequestedScope } - if o.SessionId != nil { + if !IsNil(o.SessionId) { toSerialize["session_id"] = o.SessionId } - if true { - toSerialize["skip"] = o.Skip + toSerialize["skip"] = o.Skip + toSerialize["subject"] = o.Subject + return toSerialize, nil +} + +func (o *OAuth2LoginRequest) UnmarshalJSON(data []byte) (err error) { + // This validates that all required properties are included in the JSON object + // by unmarshalling the object into a generic map with string keys and checking + // that every required field exists as a key in the generic map. + requiredProperties := []string{ + "challenge", + "client", + "request_url", + "skip", + "subject", } - if true { - toSerialize["subject"] = o.Subject + + allProperties := make(map[string]interface{}) + + err = json.Unmarshal(data, &allProperties) + + if err != nil { + return err } - return json.Marshal(toSerialize) + + for _, requiredProperty := range requiredProperties { + if _, exists := allProperties[requiredProperty]; !exists { + return fmt.Errorf("no value given for required property %v", requiredProperty) + } + } + + varOAuth2LoginRequest := _OAuth2LoginRequest{} + + decoder := json.NewDecoder(bytes.NewReader(data)) + decoder.DisallowUnknownFields() + err = decoder.Decode(&varOAuth2LoginRequest) + + if err != nil { + return err + } + + *o = OAuth2LoginRequest(varOAuth2LoginRequest) + + return err } type NullableOAuth2LoginRequest struct { diff --git a/internal/httpclient/model_o_auth2_logout_request.go b/internal/httpclient/model_o_auth2_logout_request.go index 4a2ef7c0bc7..8a792ab43ec 100644 --- a/internal/httpclient/model_o_auth2_logout_request.go +++ b/internal/httpclient/model_o_auth2_logout_request.go @@ -13,15 +13,21 @@ package openapi import ( "encoding/json" + "time" ) +// checks if the OAuth2LogoutRequest type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &OAuth2LogoutRequest{} + // OAuth2LogoutRequest struct for OAuth2LogoutRequest type OAuth2LogoutRequest struct { - // Challenge is the identifier (\"logout challenge\") of the logout authentication request. It is used to identify the session. + // Challenge is the identifier of the logout authentication request. Challenge *string `json:"challenge,omitempty"` Client *OAuth2Client `json:"client,omitempty"` + ExpiresAt *time.Time `json:"expires_at,omitempty"` // RequestURL is the original Logout URL requested. - RequestUrl *string `json:"request_url,omitempty"` + RequestUrl *string `json:"request_url,omitempty"` + RequestedAt *time.Time `json:"requested_at,omitempty"` // RPInitiated is set to true if the request was initiated by a Relying Party (RP), also known as an OAuth 2.0 Client. RpInitiated *bool `json:"rp_initiated,omitempty"` // SessionID is the login session ID that was requested to log out. @@ -49,7 +55,7 @@ func NewOAuth2LogoutRequestWithDefaults() *OAuth2LogoutRequest { // GetChallenge returns the Challenge field value if set, zero value otherwise. func (o *OAuth2LogoutRequest) GetChallenge() string { - if o == nil || o.Challenge == nil { + if o == nil || IsNil(o.Challenge) { var ret string return ret } @@ -59,7 +65,7 @@ func (o *OAuth2LogoutRequest) GetChallenge() string { // GetChallengeOk returns a tuple with the Challenge field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2LogoutRequest) GetChallengeOk() (*string, bool) { - if o == nil || o.Challenge == nil { + if o == nil || IsNil(o.Challenge) { return nil, false } return o.Challenge, true @@ -67,7 +73,7 @@ func (o *OAuth2LogoutRequest) GetChallengeOk() (*string, bool) { // HasChallenge returns a boolean if a field has been set. func (o *OAuth2LogoutRequest) HasChallenge() bool { - if o != nil && o.Challenge != nil { + if o != nil && !IsNil(o.Challenge) { return true } @@ -81,7 +87,7 @@ func (o *OAuth2LogoutRequest) SetChallenge(v string) { // GetClient returns the Client field value if set, zero value otherwise. func (o *OAuth2LogoutRequest) GetClient() OAuth2Client { - if o == nil || o.Client == nil { + if o == nil || IsNil(o.Client) { var ret OAuth2Client return ret } @@ -91,7 +97,7 @@ func (o *OAuth2LogoutRequest) GetClient() OAuth2Client { // GetClientOk returns a tuple with the Client field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2LogoutRequest) GetClientOk() (*OAuth2Client, bool) { - if o == nil || o.Client == nil { + if o == nil || IsNil(o.Client) { return nil, false } return o.Client, true @@ -99,7 +105,7 @@ func (o *OAuth2LogoutRequest) GetClientOk() (*OAuth2Client, bool) { // HasClient returns a boolean if a field has been set. func (o *OAuth2LogoutRequest) HasClient() bool { - if o != nil && o.Client != nil { + if o != nil && !IsNil(o.Client) { return true } @@ -111,9 +117,41 @@ func (o *OAuth2LogoutRequest) SetClient(v OAuth2Client) { o.Client = &v } +// GetExpiresAt returns the ExpiresAt field value if set, zero value otherwise. +func (o *OAuth2LogoutRequest) GetExpiresAt() time.Time { + if o == nil || IsNil(o.ExpiresAt) { + var ret time.Time + return ret + } + return *o.ExpiresAt +} + +// GetExpiresAtOk returns a tuple with the ExpiresAt field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *OAuth2LogoutRequest) GetExpiresAtOk() (*time.Time, bool) { + if o == nil || IsNil(o.ExpiresAt) { + return nil, false + } + return o.ExpiresAt, true +} + +// HasExpiresAt returns a boolean if a field has been set. +func (o *OAuth2LogoutRequest) HasExpiresAt() bool { + if o != nil && !IsNil(o.ExpiresAt) { + return true + } + + return false +} + +// SetExpiresAt gets a reference to the given time.Time and assigns it to the ExpiresAt field. +func (o *OAuth2LogoutRequest) SetExpiresAt(v time.Time) { + o.ExpiresAt = &v +} + // GetRequestUrl returns the RequestUrl field value if set, zero value otherwise. func (o *OAuth2LogoutRequest) GetRequestUrl() string { - if o == nil || o.RequestUrl == nil { + if o == nil || IsNil(o.RequestUrl) { var ret string return ret } @@ -123,7 +161,7 @@ func (o *OAuth2LogoutRequest) GetRequestUrl() string { // GetRequestUrlOk returns a tuple with the RequestUrl field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2LogoutRequest) GetRequestUrlOk() (*string, bool) { - if o == nil || o.RequestUrl == nil { + if o == nil || IsNil(o.RequestUrl) { return nil, false } return o.RequestUrl, true @@ -131,7 +169,7 @@ func (o *OAuth2LogoutRequest) GetRequestUrlOk() (*string, bool) { // HasRequestUrl returns a boolean if a field has been set. func (o *OAuth2LogoutRequest) HasRequestUrl() bool { - if o != nil && o.RequestUrl != nil { + if o != nil && !IsNil(o.RequestUrl) { return true } @@ -143,9 +181,41 @@ func (o *OAuth2LogoutRequest) SetRequestUrl(v string) { o.RequestUrl = &v } +// GetRequestedAt returns the RequestedAt field value if set, zero value otherwise. +func (o *OAuth2LogoutRequest) GetRequestedAt() time.Time { + if o == nil || IsNil(o.RequestedAt) { + var ret time.Time + return ret + } + return *o.RequestedAt +} + +// GetRequestedAtOk returns a tuple with the RequestedAt field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *OAuth2LogoutRequest) GetRequestedAtOk() (*time.Time, bool) { + if o == nil || IsNil(o.RequestedAt) { + return nil, false + } + return o.RequestedAt, true +} + +// HasRequestedAt returns a boolean if a field has been set. +func (o *OAuth2LogoutRequest) HasRequestedAt() bool { + if o != nil && !IsNil(o.RequestedAt) { + return true + } + + return false +} + +// SetRequestedAt gets a reference to the given time.Time and assigns it to the RequestedAt field. +func (o *OAuth2LogoutRequest) SetRequestedAt(v time.Time) { + o.RequestedAt = &v +} + // GetRpInitiated returns the RpInitiated field value if set, zero value otherwise. func (o *OAuth2LogoutRequest) GetRpInitiated() bool { - if o == nil || o.RpInitiated == nil { + if o == nil || IsNil(o.RpInitiated) { var ret bool return ret } @@ -155,7 +225,7 @@ func (o *OAuth2LogoutRequest) GetRpInitiated() bool { // GetRpInitiatedOk returns a tuple with the RpInitiated field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2LogoutRequest) GetRpInitiatedOk() (*bool, bool) { - if o == nil || o.RpInitiated == nil { + if o == nil || IsNil(o.RpInitiated) { return nil, false } return o.RpInitiated, true @@ -163,7 +233,7 @@ func (o *OAuth2LogoutRequest) GetRpInitiatedOk() (*bool, bool) { // HasRpInitiated returns a boolean if a field has been set. func (o *OAuth2LogoutRequest) HasRpInitiated() bool { - if o != nil && o.RpInitiated != nil { + if o != nil && !IsNil(o.RpInitiated) { return true } @@ -177,7 +247,7 @@ func (o *OAuth2LogoutRequest) SetRpInitiated(v bool) { // GetSid returns the Sid field value if set, zero value otherwise. func (o *OAuth2LogoutRequest) GetSid() string { - if o == nil || o.Sid == nil { + if o == nil || IsNil(o.Sid) { var ret string return ret } @@ -187,7 +257,7 @@ func (o *OAuth2LogoutRequest) GetSid() string { // GetSidOk returns a tuple with the Sid field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2LogoutRequest) GetSidOk() (*string, bool) { - if o == nil || o.Sid == nil { + if o == nil || IsNil(o.Sid) { return nil, false } return o.Sid, true @@ -195,7 +265,7 @@ func (o *OAuth2LogoutRequest) GetSidOk() (*string, bool) { // HasSid returns a boolean if a field has been set. func (o *OAuth2LogoutRequest) HasSid() bool { - if o != nil && o.Sid != nil { + if o != nil && !IsNil(o.Sid) { return true } @@ -209,7 +279,7 @@ func (o *OAuth2LogoutRequest) SetSid(v string) { // GetSubject returns the Subject field value if set, zero value otherwise. func (o *OAuth2LogoutRequest) GetSubject() string { - if o == nil || o.Subject == nil { + if o == nil || IsNil(o.Subject) { var ret string return ret } @@ -219,7 +289,7 @@ func (o *OAuth2LogoutRequest) GetSubject() string { // GetSubjectOk returns a tuple with the Subject field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2LogoutRequest) GetSubjectOk() (*string, bool) { - if o == nil || o.Subject == nil { + if o == nil || IsNil(o.Subject) { return nil, false } return o.Subject, true @@ -227,7 +297,7 @@ func (o *OAuth2LogoutRequest) GetSubjectOk() (*string, bool) { // HasSubject returns a boolean if a field has been set. func (o *OAuth2LogoutRequest) HasSubject() bool { - if o != nil && o.Subject != nil { + if o != nil && !IsNil(o.Subject) { return true } @@ -240,26 +310,40 @@ func (o *OAuth2LogoutRequest) SetSubject(v string) { } func (o OAuth2LogoutRequest) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o OAuth2LogoutRequest) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.Challenge != nil { + if !IsNil(o.Challenge) { toSerialize["challenge"] = o.Challenge } - if o.Client != nil { + if !IsNil(o.Client) { toSerialize["client"] = o.Client } - if o.RequestUrl != nil { + if !IsNil(o.ExpiresAt) { + toSerialize["expires_at"] = o.ExpiresAt + } + if !IsNil(o.RequestUrl) { toSerialize["request_url"] = o.RequestUrl } - if o.RpInitiated != nil { + if !IsNil(o.RequestedAt) { + toSerialize["requested_at"] = o.RequestedAt + } + if !IsNil(o.RpInitiated) { toSerialize["rp_initiated"] = o.RpInitiated } - if o.Sid != nil { + if !IsNil(o.Sid) { toSerialize["sid"] = o.Sid } - if o.Subject != nil { + if !IsNil(o.Subject) { toSerialize["subject"] = o.Subject } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableOAuth2LogoutRequest struct { diff --git a/internal/httpclient/model_o_auth2_redirect_to.go b/internal/httpclient/model_o_auth2_redirect_to.go index 5abf9d4f250..e2ff2035357 100644 --- a/internal/httpclient/model_o_auth2_redirect_to.go +++ b/internal/httpclient/model_o_auth2_redirect_to.go @@ -12,15 +12,22 @@ Contact: hi@ory.sh package openapi import ( + "bytes" "encoding/json" + "fmt" ) +// checks if the OAuth2RedirectTo type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &OAuth2RedirectTo{} + // OAuth2RedirectTo Contains a redirect URL used to complete a login, consent, or logout request. type OAuth2RedirectTo struct { // RedirectURL is the URL which you should redirect the user's browser to once the authentication process is completed. RedirectTo string `json:"redirect_to"` } +type _OAuth2RedirectTo OAuth2RedirectTo + // NewOAuth2RedirectTo instantiates a new OAuth2RedirectTo object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments @@ -64,13 +71,56 @@ func (o *OAuth2RedirectTo) SetRedirectTo(v string) { } func (o OAuth2RedirectTo) MarshalJSON() ([]byte, error) { - toSerialize := map[string]interface{}{} - if true { - toSerialize["redirect_to"] = o.RedirectTo + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err } return json.Marshal(toSerialize) } +func (o OAuth2RedirectTo) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + toSerialize["redirect_to"] = o.RedirectTo + return toSerialize, nil +} + +func (o *OAuth2RedirectTo) UnmarshalJSON(data []byte) (err error) { + // This validates that all required properties are included in the JSON object + // by unmarshalling the object into a generic map with string keys and checking + // that every required field exists as a key in the generic map. + requiredProperties := []string{ + "redirect_to", + } + + allProperties := make(map[string]interface{}) + + err = json.Unmarshal(data, &allProperties) + + if err != nil { + return err + } + + for _, requiredProperty := range requiredProperties { + if _, exists := allProperties[requiredProperty]; !exists { + return fmt.Errorf("no value given for required property %v", requiredProperty) + } + } + + varOAuth2RedirectTo := _OAuth2RedirectTo{} + + decoder := json.NewDecoder(bytes.NewReader(data)) + decoder.DisallowUnknownFields() + err = decoder.Decode(&varOAuth2RedirectTo) + + if err != nil { + return err + } + + *o = OAuth2RedirectTo(varOAuth2RedirectTo) + + return err +} + type NullableOAuth2RedirectTo struct { value *OAuth2RedirectTo isSet bool diff --git a/internal/httpclient/model_o_auth2_token_exchange.go b/internal/httpclient/model_o_auth2_token_exchange.go index 6db3a25cb8f..f2997682d5d 100644 --- a/internal/httpclient/model_o_auth2_token_exchange.go +++ b/internal/httpclient/model_o_auth2_token_exchange.go @@ -15,6 +15,9 @@ import ( "encoding/json" ) +// checks if the OAuth2TokenExchange type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &OAuth2TokenExchange{} + // OAuth2TokenExchange OAuth2 Token Exchange Result type OAuth2TokenExchange struct { // The access token issued by the authorization server. @@ -22,7 +25,7 @@ type OAuth2TokenExchange struct { // The lifetime in seconds of the access token. For example, the value \"3600\" denotes that the access token will expire in one hour from the time the response was generated. ExpiresIn *int64 `json:"expires_in,omitempty"` // To retrieve a refresh token request the id_token scope. - IdToken *int64 `json:"id_token,omitempty"` + IdToken *string `json:"id_token,omitempty"` // The refresh token, which can be used to obtain new access tokens. To retrieve it add the scope \"offline\" to your access token request. RefreshToken *string `json:"refresh_token,omitempty"` // The scope of the access token @@ -50,7 +53,7 @@ func NewOAuth2TokenExchangeWithDefaults() *OAuth2TokenExchange { // GetAccessToken returns the AccessToken field value if set, zero value otherwise. func (o *OAuth2TokenExchange) GetAccessToken() string { - if o == nil || o.AccessToken == nil { + if o == nil || IsNil(o.AccessToken) { var ret string return ret } @@ -60,7 +63,7 @@ func (o *OAuth2TokenExchange) GetAccessToken() string { // GetAccessTokenOk returns a tuple with the AccessToken field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2TokenExchange) GetAccessTokenOk() (*string, bool) { - if o == nil || o.AccessToken == nil { + if o == nil || IsNil(o.AccessToken) { return nil, false } return o.AccessToken, true @@ -68,7 +71,7 @@ func (o *OAuth2TokenExchange) GetAccessTokenOk() (*string, bool) { // HasAccessToken returns a boolean if a field has been set. func (o *OAuth2TokenExchange) HasAccessToken() bool { - if o != nil && o.AccessToken != nil { + if o != nil && !IsNil(o.AccessToken) { return true } @@ -82,7 +85,7 @@ func (o *OAuth2TokenExchange) SetAccessToken(v string) { // GetExpiresIn returns the ExpiresIn field value if set, zero value otherwise. func (o *OAuth2TokenExchange) GetExpiresIn() int64 { - if o == nil || o.ExpiresIn == nil { + if o == nil || IsNil(o.ExpiresIn) { var ret int64 return ret } @@ -92,7 +95,7 @@ func (o *OAuth2TokenExchange) GetExpiresIn() int64 { // GetExpiresInOk returns a tuple with the ExpiresIn field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2TokenExchange) GetExpiresInOk() (*int64, bool) { - if o == nil || o.ExpiresIn == nil { + if o == nil || IsNil(o.ExpiresIn) { return nil, false } return o.ExpiresIn, true @@ -100,7 +103,7 @@ func (o *OAuth2TokenExchange) GetExpiresInOk() (*int64, bool) { // HasExpiresIn returns a boolean if a field has been set. func (o *OAuth2TokenExchange) HasExpiresIn() bool { - if o != nil && o.ExpiresIn != nil { + if o != nil && !IsNil(o.ExpiresIn) { return true } @@ -113,9 +116,9 @@ func (o *OAuth2TokenExchange) SetExpiresIn(v int64) { } // GetIdToken returns the IdToken field value if set, zero value otherwise. -func (o *OAuth2TokenExchange) GetIdToken() int64 { - if o == nil || o.IdToken == nil { - var ret int64 +func (o *OAuth2TokenExchange) GetIdToken() string { + if o == nil || IsNil(o.IdToken) { + var ret string return ret } return *o.IdToken @@ -123,8 +126,8 @@ func (o *OAuth2TokenExchange) GetIdToken() int64 { // GetIdTokenOk returns a tuple with the IdToken field value if set, nil otherwise // and a boolean to check if the value has been set. -func (o *OAuth2TokenExchange) GetIdTokenOk() (*int64, bool) { - if o == nil || o.IdToken == nil { +func (o *OAuth2TokenExchange) GetIdTokenOk() (*string, bool) { + if o == nil || IsNil(o.IdToken) { return nil, false } return o.IdToken, true @@ -132,21 +135,21 @@ func (o *OAuth2TokenExchange) GetIdTokenOk() (*int64, bool) { // HasIdToken returns a boolean if a field has been set. func (o *OAuth2TokenExchange) HasIdToken() bool { - if o != nil && o.IdToken != nil { + if o != nil && !IsNil(o.IdToken) { return true } return false } -// SetIdToken gets a reference to the given int64 and assigns it to the IdToken field. -func (o *OAuth2TokenExchange) SetIdToken(v int64) { +// SetIdToken gets a reference to the given string and assigns it to the IdToken field. +func (o *OAuth2TokenExchange) SetIdToken(v string) { o.IdToken = &v } // GetRefreshToken returns the RefreshToken field value if set, zero value otherwise. func (o *OAuth2TokenExchange) GetRefreshToken() string { - if o == nil || o.RefreshToken == nil { + if o == nil || IsNil(o.RefreshToken) { var ret string return ret } @@ -156,7 +159,7 @@ func (o *OAuth2TokenExchange) GetRefreshToken() string { // GetRefreshTokenOk returns a tuple with the RefreshToken field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2TokenExchange) GetRefreshTokenOk() (*string, bool) { - if o == nil || o.RefreshToken == nil { + if o == nil || IsNil(o.RefreshToken) { return nil, false } return o.RefreshToken, true @@ -164,7 +167,7 @@ func (o *OAuth2TokenExchange) GetRefreshTokenOk() (*string, bool) { // HasRefreshToken returns a boolean if a field has been set. func (o *OAuth2TokenExchange) HasRefreshToken() bool { - if o != nil && o.RefreshToken != nil { + if o != nil && !IsNil(o.RefreshToken) { return true } @@ -178,7 +181,7 @@ func (o *OAuth2TokenExchange) SetRefreshToken(v string) { // GetScope returns the Scope field value if set, zero value otherwise. func (o *OAuth2TokenExchange) GetScope() string { - if o == nil || o.Scope == nil { + if o == nil || IsNil(o.Scope) { var ret string return ret } @@ -188,7 +191,7 @@ func (o *OAuth2TokenExchange) GetScope() string { // GetScopeOk returns a tuple with the Scope field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2TokenExchange) GetScopeOk() (*string, bool) { - if o == nil || o.Scope == nil { + if o == nil || IsNil(o.Scope) { return nil, false } return o.Scope, true @@ -196,7 +199,7 @@ func (o *OAuth2TokenExchange) GetScopeOk() (*string, bool) { // HasScope returns a boolean if a field has been set. func (o *OAuth2TokenExchange) HasScope() bool { - if o != nil && o.Scope != nil { + if o != nil && !IsNil(o.Scope) { return true } @@ -210,7 +213,7 @@ func (o *OAuth2TokenExchange) SetScope(v string) { // GetTokenType returns the TokenType field value if set, zero value otherwise. func (o *OAuth2TokenExchange) GetTokenType() string { - if o == nil || o.TokenType == nil { + if o == nil || IsNil(o.TokenType) { var ret string return ret } @@ -220,7 +223,7 @@ func (o *OAuth2TokenExchange) GetTokenType() string { // GetTokenTypeOk returns a tuple with the TokenType field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OAuth2TokenExchange) GetTokenTypeOk() (*string, bool) { - if o == nil || o.TokenType == nil { + if o == nil || IsNil(o.TokenType) { return nil, false } return o.TokenType, true @@ -228,7 +231,7 @@ func (o *OAuth2TokenExchange) GetTokenTypeOk() (*string, bool) { // HasTokenType returns a boolean if a field has been set. func (o *OAuth2TokenExchange) HasTokenType() bool { - if o != nil && o.TokenType != nil { + if o != nil && !IsNil(o.TokenType) { return true } @@ -241,26 +244,34 @@ func (o *OAuth2TokenExchange) SetTokenType(v string) { } func (o OAuth2TokenExchange) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o OAuth2TokenExchange) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.AccessToken != nil { + if !IsNil(o.AccessToken) { toSerialize["access_token"] = o.AccessToken } - if o.ExpiresIn != nil { + if !IsNil(o.ExpiresIn) { toSerialize["expires_in"] = o.ExpiresIn } - if o.IdToken != nil { + if !IsNil(o.IdToken) { toSerialize["id_token"] = o.IdToken } - if o.RefreshToken != nil { + if !IsNil(o.RefreshToken) { toSerialize["refresh_token"] = o.RefreshToken } - if o.Scope != nil { + if !IsNil(o.Scope) { toSerialize["scope"] = o.Scope } - if o.TokenType != nil { + if !IsNil(o.TokenType) { toSerialize["token_type"] = o.TokenType } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableOAuth2TokenExchange struct { diff --git a/internal/httpclient/model_oidc_configuration.go b/internal/httpclient/model_oidc_configuration.go index 78411e52b75..465fa997f3f 100644 --- a/internal/httpclient/model_oidc_configuration.go +++ b/internal/httpclient/model_oidc_configuration.go @@ -12,9 +12,14 @@ Contact: hi@ory.sh package openapi import ( + "bytes" "encoding/json" + "fmt" ) +// checks if the OidcConfiguration type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &OidcConfiguration{} + // OidcConfiguration Includes links to several endpoints (for example `/oauth2/token`) and exposes information on supported signature algorithms among others. type OidcConfiguration struct { // OAuth 2.0 Authorization Endpoint URL @@ -29,6 +34,12 @@ type OidcConfiguration struct { ClaimsSupported []string `json:"claims_supported,omitempty"` // OAuth 2.0 PKCE Supported Code Challenge Methods JSON array containing a list of Proof Key for Code Exchange (PKCE) [RFC7636] code challenge methods supported by this authorization server. CodeChallengeMethodsSupported []string `json:"code_challenge_methods_supported,omitempty"` + // OpenID Connect Verifiable Credentials Endpoint Contains the URL of the Verifiable Credentials Endpoint. + CredentialsEndpointDraft00 *string `json:"credentials_endpoint_draft_00,omitempty"` + // OpenID Connect Verifiable Credentials Supported JSON array containing a list of the Verifiable Credentials supported by this authorization server. + CredentialsSupportedDraft00 []CredentialSupportedDraft00 `json:"credentials_supported_draft_00,omitempty"` + // OAuth 2.0 Device Authorization Endpoint URL + DeviceAuthorizationEndpoint string `json:"device_authorization_endpoint"` // OpenID Connect End-Session Endpoint URL at the OP to which an RP can perform a redirect to request that the End-User be logged out at the OP. EndSessionEndpoint *string `json:"end_session_endpoint,omitempty"` // OpenID Connect Front-Channel Logout Session Required Boolean value specifying whether the OP can pass iss (issuer) and sid (session ID) query parameters to identify the RP session with the OP when the frontchannel_logout_uri is used. If supported, the sid Claim is also included in ID Tokens issued by the OP. @@ -77,13 +88,16 @@ type OidcConfiguration struct { UserinfoSigningAlgValuesSupported []string `json:"userinfo_signing_alg_values_supported,omitempty"` } +type _OidcConfiguration OidcConfiguration + // NewOidcConfiguration instantiates a new OidcConfiguration object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed -func NewOidcConfiguration(authorizationEndpoint string, idTokenSignedResponseAlg []string, idTokenSigningAlgValuesSupported []string, issuer string, jwksUri string, responseTypesSupported []string, subjectTypesSupported []string, tokenEndpoint string, userinfoSignedResponseAlg []string) *OidcConfiguration { +func NewOidcConfiguration(authorizationEndpoint string, deviceAuthorizationEndpoint string, idTokenSignedResponseAlg []string, idTokenSigningAlgValuesSupported []string, issuer string, jwksUri string, responseTypesSupported []string, subjectTypesSupported []string, tokenEndpoint string, userinfoSignedResponseAlg []string) *OidcConfiguration { this := OidcConfiguration{} this.AuthorizationEndpoint = authorizationEndpoint + this.DeviceAuthorizationEndpoint = deviceAuthorizationEndpoint this.IdTokenSignedResponseAlg = idTokenSignedResponseAlg this.IdTokenSigningAlgValuesSupported = idTokenSigningAlgValuesSupported this.Issuer = issuer @@ -129,7 +143,7 @@ func (o *OidcConfiguration) SetAuthorizationEndpoint(v string) { // GetBackchannelLogoutSessionSupported returns the BackchannelLogoutSessionSupported field value if set, zero value otherwise. func (o *OidcConfiguration) GetBackchannelLogoutSessionSupported() bool { - if o == nil || o.BackchannelLogoutSessionSupported == nil { + if o == nil || IsNil(o.BackchannelLogoutSessionSupported) { var ret bool return ret } @@ -139,7 +153,7 @@ func (o *OidcConfiguration) GetBackchannelLogoutSessionSupported() bool { // GetBackchannelLogoutSessionSupportedOk returns a tuple with the BackchannelLogoutSessionSupported field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetBackchannelLogoutSessionSupportedOk() (*bool, bool) { - if o == nil || o.BackchannelLogoutSessionSupported == nil { + if o == nil || IsNil(o.BackchannelLogoutSessionSupported) { return nil, false } return o.BackchannelLogoutSessionSupported, true @@ -147,7 +161,7 @@ func (o *OidcConfiguration) GetBackchannelLogoutSessionSupportedOk() (*bool, boo // HasBackchannelLogoutSessionSupported returns a boolean if a field has been set. func (o *OidcConfiguration) HasBackchannelLogoutSessionSupported() bool { - if o != nil && o.BackchannelLogoutSessionSupported != nil { + if o != nil && !IsNil(o.BackchannelLogoutSessionSupported) { return true } @@ -161,7 +175,7 @@ func (o *OidcConfiguration) SetBackchannelLogoutSessionSupported(v bool) { // GetBackchannelLogoutSupported returns the BackchannelLogoutSupported field value if set, zero value otherwise. func (o *OidcConfiguration) GetBackchannelLogoutSupported() bool { - if o == nil || o.BackchannelLogoutSupported == nil { + if o == nil || IsNil(o.BackchannelLogoutSupported) { var ret bool return ret } @@ -171,7 +185,7 @@ func (o *OidcConfiguration) GetBackchannelLogoutSupported() bool { // GetBackchannelLogoutSupportedOk returns a tuple with the BackchannelLogoutSupported field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetBackchannelLogoutSupportedOk() (*bool, bool) { - if o == nil || o.BackchannelLogoutSupported == nil { + if o == nil || IsNil(o.BackchannelLogoutSupported) { return nil, false } return o.BackchannelLogoutSupported, true @@ -179,7 +193,7 @@ func (o *OidcConfiguration) GetBackchannelLogoutSupportedOk() (*bool, bool) { // HasBackchannelLogoutSupported returns a boolean if a field has been set. func (o *OidcConfiguration) HasBackchannelLogoutSupported() bool { - if o != nil && o.BackchannelLogoutSupported != nil { + if o != nil && !IsNil(o.BackchannelLogoutSupported) { return true } @@ -193,7 +207,7 @@ func (o *OidcConfiguration) SetBackchannelLogoutSupported(v bool) { // GetClaimsParameterSupported returns the ClaimsParameterSupported field value if set, zero value otherwise. func (o *OidcConfiguration) GetClaimsParameterSupported() bool { - if o == nil || o.ClaimsParameterSupported == nil { + if o == nil || IsNil(o.ClaimsParameterSupported) { var ret bool return ret } @@ -203,7 +217,7 @@ func (o *OidcConfiguration) GetClaimsParameterSupported() bool { // GetClaimsParameterSupportedOk returns a tuple with the ClaimsParameterSupported field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetClaimsParameterSupportedOk() (*bool, bool) { - if o == nil || o.ClaimsParameterSupported == nil { + if o == nil || IsNil(o.ClaimsParameterSupported) { return nil, false } return o.ClaimsParameterSupported, true @@ -211,7 +225,7 @@ func (o *OidcConfiguration) GetClaimsParameterSupportedOk() (*bool, bool) { // HasClaimsParameterSupported returns a boolean if a field has been set. func (o *OidcConfiguration) HasClaimsParameterSupported() bool { - if o != nil && o.ClaimsParameterSupported != nil { + if o != nil && !IsNil(o.ClaimsParameterSupported) { return true } @@ -225,7 +239,7 @@ func (o *OidcConfiguration) SetClaimsParameterSupported(v bool) { // GetClaimsSupported returns the ClaimsSupported field value if set, zero value otherwise. func (o *OidcConfiguration) GetClaimsSupported() []string { - if o == nil || o.ClaimsSupported == nil { + if o == nil || IsNil(o.ClaimsSupported) { var ret []string return ret } @@ -235,7 +249,7 @@ func (o *OidcConfiguration) GetClaimsSupported() []string { // GetClaimsSupportedOk returns a tuple with the ClaimsSupported field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetClaimsSupportedOk() ([]string, bool) { - if o == nil || o.ClaimsSupported == nil { + if o == nil || IsNil(o.ClaimsSupported) { return nil, false } return o.ClaimsSupported, true @@ -243,7 +257,7 @@ func (o *OidcConfiguration) GetClaimsSupportedOk() ([]string, bool) { // HasClaimsSupported returns a boolean if a field has been set. func (o *OidcConfiguration) HasClaimsSupported() bool { - if o != nil && o.ClaimsSupported != nil { + if o != nil && !IsNil(o.ClaimsSupported) { return true } @@ -257,7 +271,7 @@ func (o *OidcConfiguration) SetClaimsSupported(v []string) { // GetCodeChallengeMethodsSupported returns the CodeChallengeMethodsSupported field value if set, zero value otherwise. func (o *OidcConfiguration) GetCodeChallengeMethodsSupported() []string { - if o == nil || o.CodeChallengeMethodsSupported == nil { + if o == nil || IsNil(o.CodeChallengeMethodsSupported) { var ret []string return ret } @@ -267,7 +281,7 @@ func (o *OidcConfiguration) GetCodeChallengeMethodsSupported() []string { // GetCodeChallengeMethodsSupportedOk returns a tuple with the CodeChallengeMethodsSupported field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetCodeChallengeMethodsSupportedOk() ([]string, bool) { - if o == nil || o.CodeChallengeMethodsSupported == nil { + if o == nil || IsNil(o.CodeChallengeMethodsSupported) { return nil, false } return o.CodeChallengeMethodsSupported, true @@ -275,7 +289,7 @@ func (o *OidcConfiguration) GetCodeChallengeMethodsSupportedOk() ([]string, bool // HasCodeChallengeMethodsSupported returns a boolean if a field has been set. func (o *OidcConfiguration) HasCodeChallengeMethodsSupported() bool { - if o != nil && o.CodeChallengeMethodsSupported != nil { + if o != nil && !IsNil(o.CodeChallengeMethodsSupported) { return true } @@ -287,9 +301,97 @@ func (o *OidcConfiguration) SetCodeChallengeMethodsSupported(v []string) { o.CodeChallengeMethodsSupported = v } +// GetCredentialsEndpointDraft00 returns the CredentialsEndpointDraft00 field value if set, zero value otherwise. +func (o *OidcConfiguration) GetCredentialsEndpointDraft00() string { + if o == nil || IsNil(o.CredentialsEndpointDraft00) { + var ret string + return ret + } + return *o.CredentialsEndpointDraft00 +} + +// GetCredentialsEndpointDraft00Ok returns a tuple with the CredentialsEndpointDraft00 field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *OidcConfiguration) GetCredentialsEndpointDraft00Ok() (*string, bool) { + if o == nil || IsNil(o.CredentialsEndpointDraft00) { + return nil, false + } + return o.CredentialsEndpointDraft00, true +} + +// HasCredentialsEndpointDraft00 returns a boolean if a field has been set. +func (o *OidcConfiguration) HasCredentialsEndpointDraft00() bool { + if o != nil && !IsNil(o.CredentialsEndpointDraft00) { + return true + } + + return false +} + +// SetCredentialsEndpointDraft00 gets a reference to the given string and assigns it to the CredentialsEndpointDraft00 field. +func (o *OidcConfiguration) SetCredentialsEndpointDraft00(v string) { + o.CredentialsEndpointDraft00 = &v +} + +// GetCredentialsSupportedDraft00 returns the CredentialsSupportedDraft00 field value if set, zero value otherwise. +func (o *OidcConfiguration) GetCredentialsSupportedDraft00() []CredentialSupportedDraft00 { + if o == nil || IsNil(o.CredentialsSupportedDraft00) { + var ret []CredentialSupportedDraft00 + return ret + } + return o.CredentialsSupportedDraft00 +} + +// GetCredentialsSupportedDraft00Ok returns a tuple with the CredentialsSupportedDraft00 field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *OidcConfiguration) GetCredentialsSupportedDraft00Ok() ([]CredentialSupportedDraft00, bool) { + if o == nil || IsNil(o.CredentialsSupportedDraft00) { + return nil, false + } + return o.CredentialsSupportedDraft00, true +} + +// HasCredentialsSupportedDraft00 returns a boolean if a field has been set. +func (o *OidcConfiguration) HasCredentialsSupportedDraft00() bool { + if o != nil && !IsNil(o.CredentialsSupportedDraft00) { + return true + } + + return false +} + +// SetCredentialsSupportedDraft00 gets a reference to the given []CredentialSupportedDraft00 and assigns it to the CredentialsSupportedDraft00 field. +func (o *OidcConfiguration) SetCredentialsSupportedDraft00(v []CredentialSupportedDraft00) { + o.CredentialsSupportedDraft00 = v +} + +// GetDeviceAuthorizationEndpoint returns the DeviceAuthorizationEndpoint field value +func (o *OidcConfiguration) GetDeviceAuthorizationEndpoint() string { + if o == nil { + var ret string + return ret + } + + return o.DeviceAuthorizationEndpoint +} + +// GetDeviceAuthorizationEndpointOk returns a tuple with the DeviceAuthorizationEndpoint field value +// and a boolean to check if the value has been set. +func (o *OidcConfiguration) GetDeviceAuthorizationEndpointOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.DeviceAuthorizationEndpoint, true +} + +// SetDeviceAuthorizationEndpoint sets field value +func (o *OidcConfiguration) SetDeviceAuthorizationEndpoint(v string) { + o.DeviceAuthorizationEndpoint = v +} + // GetEndSessionEndpoint returns the EndSessionEndpoint field value if set, zero value otherwise. func (o *OidcConfiguration) GetEndSessionEndpoint() string { - if o == nil || o.EndSessionEndpoint == nil { + if o == nil || IsNil(o.EndSessionEndpoint) { var ret string return ret } @@ -299,7 +401,7 @@ func (o *OidcConfiguration) GetEndSessionEndpoint() string { // GetEndSessionEndpointOk returns a tuple with the EndSessionEndpoint field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetEndSessionEndpointOk() (*string, bool) { - if o == nil || o.EndSessionEndpoint == nil { + if o == nil || IsNil(o.EndSessionEndpoint) { return nil, false } return o.EndSessionEndpoint, true @@ -307,7 +409,7 @@ func (o *OidcConfiguration) GetEndSessionEndpointOk() (*string, bool) { // HasEndSessionEndpoint returns a boolean if a field has been set. func (o *OidcConfiguration) HasEndSessionEndpoint() bool { - if o != nil && o.EndSessionEndpoint != nil { + if o != nil && !IsNil(o.EndSessionEndpoint) { return true } @@ -321,7 +423,7 @@ func (o *OidcConfiguration) SetEndSessionEndpoint(v string) { // GetFrontchannelLogoutSessionSupported returns the FrontchannelLogoutSessionSupported field value if set, zero value otherwise. func (o *OidcConfiguration) GetFrontchannelLogoutSessionSupported() bool { - if o == nil || o.FrontchannelLogoutSessionSupported == nil { + if o == nil || IsNil(o.FrontchannelLogoutSessionSupported) { var ret bool return ret } @@ -331,7 +433,7 @@ func (o *OidcConfiguration) GetFrontchannelLogoutSessionSupported() bool { // GetFrontchannelLogoutSessionSupportedOk returns a tuple with the FrontchannelLogoutSessionSupported field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetFrontchannelLogoutSessionSupportedOk() (*bool, bool) { - if o == nil || o.FrontchannelLogoutSessionSupported == nil { + if o == nil || IsNil(o.FrontchannelLogoutSessionSupported) { return nil, false } return o.FrontchannelLogoutSessionSupported, true @@ -339,7 +441,7 @@ func (o *OidcConfiguration) GetFrontchannelLogoutSessionSupportedOk() (*bool, bo // HasFrontchannelLogoutSessionSupported returns a boolean if a field has been set. func (o *OidcConfiguration) HasFrontchannelLogoutSessionSupported() bool { - if o != nil && o.FrontchannelLogoutSessionSupported != nil { + if o != nil && !IsNil(o.FrontchannelLogoutSessionSupported) { return true } @@ -353,7 +455,7 @@ func (o *OidcConfiguration) SetFrontchannelLogoutSessionSupported(v bool) { // GetFrontchannelLogoutSupported returns the FrontchannelLogoutSupported field value if set, zero value otherwise. func (o *OidcConfiguration) GetFrontchannelLogoutSupported() bool { - if o == nil || o.FrontchannelLogoutSupported == nil { + if o == nil || IsNil(o.FrontchannelLogoutSupported) { var ret bool return ret } @@ -363,7 +465,7 @@ func (o *OidcConfiguration) GetFrontchannelLogoutSupported() bool { // GetFrontchannelLogoutSupportedOk returns a tuple with the FrontchannelLogoutSupported field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetFrontchannelLogoutSupportedOk() (*bool, bool) { - if o == nil || o.FrontchannelLogoutSupported == nil { + if o == nil || IsNil(o.FrontchannelLogoutSupported) { return nil, false } return o.FrontchannelLogoutSupported, true @@ -371,7 +473,7 @@ func (o *OidcConfiguration) GetFrontchannelLogoutSupportedOk() (*bool, bool) { // HasFrontchannelLogoutSupported returns a boolean if a field has been set. func (o *OidcConfiguration) HasFrontchannelLogoutSupported() bool { - if o != nil && o.FrontchannelLogoutSupported != nil { + if o != nil && !IsNil(o.FrontchannelLogoutSupported) { return true } @@ -385,7 +487,7 @@ func (o *OidcConfiguration) SetFrontchannelLogoutSupported(v bool) { // GetGrantTypesSupported returns the GrantTypesSupported field value if set, zero value otherwise. func (o *OidcConfiguration) GetGrantTypesSupported() []string { - if o == nil || o.GrantTypesSupported == nil { + if o == nil || IsNil(o.GrantTypesSupported) { var ret []string return ret } @@ -395,7 +497,7 @@ func (o *OidcConfiguration) GetGrantTypesSupported() []string { // GetGrantTypesSupportedOk returns a tuple with the GrantTypesSupported field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetGrantTypesSupportedOk() ([]string, bool) { - if o == nil || o.GrantTypesSupported == nil { + if o == nil || IsNil(o.GrantTypesSupported) { return nil, false } return o.GrantTypesSupported, true @@ -403,7 +505,7 @@ func (o *OidcConfiguration) GetGrantTypesSupportedOk() ([]string, bool) { // HasGrantTypesSupported returns a boolean if a field has been set. func (o *OidcConfiguration) HasGrantTypesSupported() bool { - if o != nil && o.GrantTypesSupported != nil { + if o != nil && !IsNil(o.GrantTypesSupported) { return true } @@ -513,7 +615,7 @@ func (o *OidcConfiguration) SetJwksUri(v string) { // GetRegistrationEndpoint returns the RegistrationEndpoint field value if set, zero value otherwise. func (o *OidcConfiguration) GetRegistrationEndpoint() string { - if o == nil || o.RegistrationEndpoint == nil { + if o == nil || IsNil(o.RegistrationEndpoint) { var ret string return ret } @@ -523,7 +625,7 @@ func (o *OidcConfiguration) GetRegistrationEndpoint() string { // GetRegistrationEndpointOk returns a tuple with the RegistrationEndpoint field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetRegistrationEndpointOk() (*string, bool) { - if o == nil || o.RegistrationEndpoint == nil { + if o == nil || IsNil(o.RegistrationEndpoint) { return nil, false } return o.RegistrationEndpoint, true @@ -531,7 +633,7 @@ func (o *OidcConfiguration) GetRegistrationEndpointOk() (*string, bool) { // HasRegistrationEndpoint returns a boolean if a field has been set. func (o *OidcConfiguration) HasRegistrationEndpoint() bool { - if o != nil && o.RegistrationEndpoint != nil { + if o != nil && !IsNil(o.RegistrationEndpoint) { return true } @@ -545,7 +647,7 @@ func (o *OidcConfiguration) SetRegistrationEndpoint(v string) { // GetRequestObjectSigningAlgValuesSupported returns the RequestObjectSigningAlgValuesSupported field value if set, zero value otherwise. func (o *OidcConfiguration) GetRequestObjectSigningAlgValuesSupported() []string { - if o == nil || o.RequestObjectSigningAlgValuesSupported == nil { + if o == nil || IsNil(o.RequestObjectSigningAlgValuesSupported) { var ret []string return ret } @@ -555,7 +657,7 @@ func (o *OidcConfiguration) GetRequestObjectSigningAlgValuesSupported() []string // GetRequestObjectSigningAlgValuesSupportedOk returns a tuple with the RequestObjectSigningAlgValuesSupported field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetRequestObjectSigningAlgValuesSupportedOk() ([]string, bool) { - if o == nil || o.RequestObjectSigningAlgValuesSupported == nil { + if o == nil || IsNil(o.RequestObjectSigningAlgValuesSupported) { return nil, false } return o.RequestObjectSigningAlgValuesSupported, true @@ -563,7 +665,7 @@ func (o *OidcConfiguration) GetRequestObjectSigningAlgValuesSupportedOk() ([]str // HasRequestObjectSigningAlgValuesSupported returns a boolean if a field has been set. func (o *OidcConfiguration) HasRequestObjectSigningAlgValuesSupported() bool { - if o != nil && o.RequestObjectSigningAlgValuesSupported != nil { + if o != nil && !IsNil(o.RequestObjectSigningAlgValuesSupported) { return true } @@ -577,7 +679,7 @@ func (o *OidcConfiguration) SetRequestObjectSigningAlgValuesSupported(v []string // GetRequestParameterSupported returns the RequestParameterSupported field value if set, zero value otherwise. func (o *OidcConfiguration) GetRequestParameterSupported() bool { - if o == nil || o.RequestParameterSupported == nil { + if o == nil || IsNil(o.RequestParameterSupported) { var ret bool return ret } @@ -587,7 +689,7 @@ func (o *OidcConfiguration) GetRequestParameterSupported() bool { // GetRequestParameterSupportedOk returns a tuple with the RequestParameterSupported field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetRequestParameterSupportedOk() (*bool, bool) { - if o == nil || o.RequestParameterSupported == nil { + if o == nil || IsNil(o.RequestParameterSupported) { return nil, false } return o.RequestParameterSupported, true @@ -595,7 +697,7 @@ func (o *OidcConfiguration) GetRequestParameterSupportedOk() (*bool, bool) { // HasRequestParameterSupported returns a boolean if a field has been set. func (o *OidcConfiguration) HasRequestParameterSupported() bool { - if o != nil && o.RequestParameterSupported != nil { + if o != nil && !IsNil(o.RequestParameterSupported) { return true } @@ -609,7 +711,7 @@ func (o *OidcConfiguration) SetRequestParameterSupported(v bool) { // GetRequestUriParameterSupported returns the RequestUriParameterSupported field value if set, zero value otherwise. func (o *OidcConfiguration) GetRequestUriParameterSupported() bool { - if o == nil || o.RequestUriParameterSupported == nil { + if o == nil || IsNil(o.RequestUriParameterSupported) { var ret bool return ret } @@ -619,7 +721,7 @@ func (o *OidcConfiguration) GetRequestUriParameterSupported() bool { // GetRequestUriParameterSupportedOk returns a tuple with the RequestUriParameterSupported field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetRequestUriParameterSupportedOk() (*bool, bool) { - if o == nil || o.RequestUriParameterSupported == nil { + if o == nil || IsNil(o.RequestUriParameterSupported) { return nil, false } return o.RequestUriParameterSupported, true @@ -627,7 +729,7 @@ func (o *OidcConfiguration) GetRequestUriParameterSupportedOk() (*bool, bool) { // HasRequestUriParameterSupported returns a boolean if a field has been set. func (o *OidcConfiguration) HasRequestUriParameterSupported() bool { - if o != nil && o.RequestUriParameterSupported != nil { + if o != nil && !IsNil(o.RequestUriParameterSupported) { return true } @@ -641,7 +743,7 @@ func (o *OidcConfiguration) SetRequestUriParameterSupported(v bool) { // GetRequireRequestUriRegistration returns the RequireRequestUriRegistration field value if set, zero value otherwise. func (o *OidcConfiguration) GetRequireRequestUriRegistration() bool { - if o == nil || o.RequireRequestUriRegistration == nil { + if o == nil || IsNil(o.RequireRequestUriRegistration) { var ret bool return ret } @@ -651,7 +753,7 @@ func (o *OidcConfiguration) GetRequireRequestUriRegistration() bool { // GetRequireRequestUriRegistrationOk returns a tuple with the RequireRequestUriRegistration field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetRequireRequestUriRegistrationOk() (*bool, bool) { - if o == nil || o.RequireRequestUriRegistration == nil { + if o == nil || IsNil(o.RequireRequestUriRegistration) { return nil, false } return o.RequireRequestUriRegistration, true @@ -659,7 +761,7 @@ func (o *OidcConfiguration) GetRequireRequestUriRegistrationOk() (*bool, bool) { // HasRequireRequestUriRegistration returns a boolean if a field has been set. func (o *OidcConfiguration) HasRequireRequestUriRegistration() bool { - if o != nil && o.RequireRequestUriRegistration != nil { + if o != nil && !IsNil(o.RequireRequestUriRegistration) { return true } @@ -673,7 +775,7 @@ func (o *OidcConfiguration) SetRequireRequestUriRegistration(v bool) { // GetResponseModesSupported returns the ResponseModesSupported field value if set, zero value otherwise. func (o *OidcConfiguration) GetResponseModesSupported() []string { - if o == nil || o.ResponseModesSupported == nil { + if o == nil || IsNil(o.ResponseModesSupported) { var ret []string return ret } @@ -683,7 +785,7 @@ func (o *OidcConfiguration) GetResponseModesSupported() []string { // GetResponseModesSupportedOk returns a tuple with the ResponseModesSupported field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetResponseModesSupportedOk() ([]string, bool) { - if o == nil || o.ResponseModesSupported == nil { + if o == nil || IsNil(o.ResponseModesSupported) { return nil, false } return o.ResponseModesSupported, true @@ -691,7 +793,7 @@ func (o *OidcConfiguration) GetResponseModesSupportedOk() ([]string, bool) { // HasResponseModesSupported returns a boolean if a field has been set. func (o *OidcConfiguration) HasResponseModesSupported() bool { - if o != nil && o.ResponseModesSupported != nil { + if o != nil && !IsNil(o.ResponseModesSupported) { return true } @@ -729,7 +831,7 @@ func (o *OidcConfiguration) SetResponseTypesSupported(v []string) { // GetRevocationEndpoint returns the RevocationEndpoint field value if set, zero value otherwise. func (o *OidcConfiguration) GetRevocationEndpoint() string { - if o == nil || o.RevocationEndpoint == nil { + if o == nil || IsNil(o.RevocationEndpoint) { var ret string return ret } @@ -739,7 +841,7 @@ func (o *OidcConfiguration) GetRevocationEndpoint() string { // GetRevocationEndpointOk returns a tuple with the RevocationEndpoint field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetRevocationEndpointOk() (*string, bool) { - if o == nil || o.RevocationEndpoint == nil { + if o == nil || IsNil(o.RevocationEndpoint) { return nil, false } return o.RevocationEndpoint, true @@ -747,7 +849,7 @@ func (o *OidcConfiguration) GetRevocationEndpointOk() (*string, bool) { // HasRevocationEndpoint returns a boolean if a field has been set. func (o *OidcConfiguration) HasRevocationEndpoint() bool { - if o != nil && o.RevocationEndpoint != nil { + if o != nil && !IsNil(o.RevocationEndpoint) { return true } @@ -761,7 +863,7 @@ func (o *OidcConfiguration) SetRevocationEndpoint(v string) { // GetScopesSupported returns the ScopesSupported field value if set, zero value otherwise. func (o *OidcConfiguration) GetScopesSupported() []string { - if o == nil || o.ScopesSupported == nil { + if o == nil || IsNil(o.ScopesSupported) { var ret []string return ret } @@ -771,7 +873,7 @@ func (o *OidcConfiguration) GetScopesSupported() []string { // GetScopesSupportedOk returns a tuple with the ScopesSupported field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetScopesSupportedOk() ([]string, bool) { - if o == nil || o.ScopesSupported == nil { + if o == nil || IsNil(o.ScopesSupported) { return nil, false } return o.ScopesSupported, true @@ -779,7 +881,7 @@ func (o *OidcConfiguration) GetScopesSupportedOk() ([]string, bool) { // HasScopesSupported returns a boolean if a field has been set. func (o *OidcConfiguration) HasScopesSupported() bool { - if o != nil && o.ScopesSupported != nil { + if o != nil && !IsNil(o.ScopesSupported) { return true } @@ -841,7 +943,7 @@ func (o *OidcConfiguration) SetTokenEndpoint(v string) { // GetTokenEndpointAuthMethodsSupported returns the TokenEndpointAuthMethodsSupported field value if set, zero value otherwise. func (o *OidcConfiguration) GetTokenEndpointAuthMethodsSupported() []string { - if o == nil || o.TokenEndpointAuthMethodsSupported == nil { + if o == nil || IsNil(o.TokenEndpointAuthMethodsSupported) { var ret []string return ret } @@ -851,7 +953,7 @@ func (o *OidcConfiguration) GetTokenEndpointAuthMethodsSupported() []string { // GetTokenEndpointAuthMethodsSupportedOk returns a tuple with the TokenEndpointAuthMethodsSupported field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetTokenEndpointAuthMethodsSupportedOk() ([]string, bool) { - if o == nil || o.TokenEndpointAuthMethodsSupported == nil { + if o == nil || IsNil(o.TokenEndpointAuthMethodsSupported) { return nil, false } return o.TokenEndpointAuthMethodsSupported, true @@ -859,7 +961,7 @@ func (o *OidcConfiguration) GetTokenEndpointAuthMethodsSupportedOk() ([]string, // HasTokenEndpointAuthMethodsSupported returns a boolean if a field has been set. func (o *OidcConfiguration) HasTokenEndpointAuthMethodsSupported() bool { - if o != nil && o.TokenEndpointAuthMethodsSupported != nil { + if o != nil && !IsNil(o.TokenEndpointAuthMethodsSupported) { return true } @@ -873,7 +975,7 @@ func (o *OidcConfiguration) SetTokenEndpointAuthMethodsSupported(v []string) { // GetUserinfoEndpoint returns the UserinfoEndpoint field value if set, zero value otherwise. func (o *OidcConfiguration) GetUserinfoEndpoint() string { - if o == nil || o.UserinfoEndpoint == nil { + if o == nil || IsNil(o.UserinfoEndpoint) { var ret string return ret } @@ -883,7 +985,7 @@ func (o *OidcConfiguration) GetUserinfoEndpoint() string { // GetUserinfoEndpointOk returns a tuple with the UserinfoEndpoint field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetUserinfoEndpointOk() (*string, bool) { - if o == nil || o.UserinfoEndpoint == nil { + if o == nil || IsNil(o.UserinfoEndpoint) { return nil, false } return o.UserinfoEndpoint, true @@ -891,7 +993,7 @@ func (o *OidcConfiguration) GetUserinfoEndpointOk() (*string, bool) { // HasUserinfoEndpoint returns a boolean if a field has been set. func (o *OidcConfiguration) HasUserinfoEndpoint() bool { - if o != nil && o.UserinfoEndpoint != nil { + if o != nil && !IsNil(o.UserinfoEndpoint) { return true } @@ -929,7 +1031,7 @@ func (o *OidcConfiguration) SetUserinfoSignedResponseAlg(v []string) { // GetUserinfoSigningAlgValuesSupported returns the UserinfoSigningAlgValuesSupported field value if set, zero value otherwise. func (o *OidcConfiguration) GetUserinfoSigningAlgValuesSupported() []string { - if o == nil || o.UserinfoSigningAlgValuesSupported == nil { + if o == nil || IsNil(o.UserinfoSigningAlgValuesSupported) { var ret []string return ret } @@ -939,7 +1041,7 @@ func (o *OidcConfiguration) GetUserinfoSigningAlgValuesSupported() []string { // GetUserinfoSigningAlgValuesSupportedOk returns a tuple with the UserinfoSigningAlgValuesSupported field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcConfiguration) GetUserinfoSigningAlgValuesSupportedOk() ([]string, bool) { - if o == nil || o.UserinfoSigningAlgValuesSupported == nil { + if o == nil || IsNil(o.UserinfoSigningAlgValuesSupported) { return nil, false } return o.UserinfoSigningAlgValuesSupported, true @@ -947,7 +1049,7 @@ func (o *OidcConfiguration) GetUserinfoSigningAlgValuesSupportedOk() ([]string, // HasUserinfoSigningAlgValuesSupported returns a boolean if a field has been set. func (o *OidcConfiguration) HasUserinfoSigningAlgValuesSupported() bool { - if o != nil && o.UserinfoSigningAlgValuesSupported != nil { + if o != nil && !IsNil(o.UserinfoSigningAlgValuesSupported) { return true } @@ -960,95 +1062,138 @@ func (o *OidcConfiguration) SetUserinfoSigningAlgValuesSupported(v []string) { } func (o OidcConfiguration) MarshalJSON() ([]byte, error) { - toSerialize := map[string]interface{}{} - if true { - toSerialize["authorization_endpoint"] = o.AuthorizationEndpoint + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err } - if o.BackchannelLogoutSessionSupported != nil { + return json.Marshal(toSerialize) +} + +func (o OidcConfiguration) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + toSerialize["authorization_endpoint"] = o.AuthorizationEndpoint + if !IsNil(o.BackchannelLogoutSessionSupported) { toSerialize["backchannel_logout_session_supported"] = o.BackchannelLogoutSessionSupported } - if o.BackchannelLogoutSupported != nil { + if !IsNil(o.BackchannelLogoutSupported) { toSerialize["backchannel_logout_supported"] = o.BackchannelLogoutSupported } - if o.ClaimsParameterSupported != nil { + if !IsNil(o.ClaimsParameterSupported) { toSerialize["claims_parameter_supported"] = o.ClaimsParameterSupported } - if o.ClaimsSupported != nil { + if !IsNil(o.ClaimsSupported) { toSerialize["claims_supported"] = o.ClaimsSupported } - if o.CodeChallengeMethodsSupported != nil { + if !IsNil(o.CodeChallengeMethodsSupported) { toSerialize["code_challenge_methods_supported"] = o.CodeChallengeMethodsSupported } - if o.EndSessionEndpoint != nil { + if !IsNil(o.CredentialsEndpointDraft00) { + toSerialize["credentials_endpoint_draft_00"] = o.CredentialsEndpointDraft00 + } + if !IsNil(o.CredentialsSupportedDraft00) { + toSerialize["credentials_supported_draft_00"] = o.CredentialsSupportedDraft00 + } + toSerialize["device_authorization_endpoint"] = o.DeviceAuthorizationEndpoint + if !IsNil(o.EndSessionEndpoint) { toSerialize["end_session_endpoint"] = o.EndSessionEndpoint } - if o.FrontchannelLogoutSessionSupported != nil { + if !IsNil(o.FrontchannelLogoutSessionSupported) { toSerialize["frontchannel_logout_session_supported"] = o.FrontchannelLogoutSessionSupported } - if o.FrontchannelLogoutSupported != nil { + if !IsNil(o.FrontchannelLogoutSupported) { toSerialize["frontchannel_logout_supported"] = o.FrontchannelLogoutSupported } - if o.GrantTypesSupported != nil { + if !IsNil(o.GrantTypesSupported) { toSerialize["grant_types_supported"] = o.GrantTypesSupported } - if true { - toSerialize["id_token_signed_response_alg"] = o.IdTokenSignedResponseAlg - } - if true { - toSerialize["id_token_signing_alg_values_supported"] = o.IdTokenSigningAlgValuesSupported - } - if true { - toSerialize["issuer"] = o.Issuer - } - if true { - toSerialize["jwks_uri"] = o.JwksUri - } - if o.RegistrationEndpoint != nil { + toSerialize["id_token_signed_response_alg"] = o.IdTokenSignedResponseAlg + toSerialize["id_token_signing_alg_values_supported"] = o.IdTokenSigningAlgValuesSupported + toSerialize["issuer"] = o.Issuer + toSerialize["jwks_uri"] = o.JwksUri + if !IsNil(o.RegistrationEndpoint) { toSerialize["registration_endpoint"] = o.RegistrationEndpoint } - if o.RequestObjectSigningAlgValuesSupported != nil { + if !IsNil(o.RequestObjectSigningAlgValuesSupported) { toSerialize["request_object_signing_alg_values_supported"] = o.RequestObjectSigningAlgValuesSupported } - if o.RequestParameterSupported != nil { + if !IsNil(o.RequestParameterSupported) { toSerialize["request_parameter_supported"] = o.RequestParameterSupported } - if o.RequestUriParameterSupported != nil { + if !IsNil(o.RequestUriParameterSupported) { toSerialize["request_uri_parameter_supported"] = o.RequestUriParameterSupported } - if o.RequireRequestUriRegistration != nil { + if !IsNil(o.RequireRequestUriRegistration) { toSerialize["require_request_uri_registration"] = o.RequireRequestUriRegistration } - if o.ResponseModesSupported != nil { + if !IsNil(o.ResponseModesSupported) { toSerialize["response_modes_supported"] = o.ResponseModesSupported } - if true { - toSerialize["response_types_supported"] = o.ResponseTypesSupported - } - if o.RevocationEndpoint != nil { + toSerialize["response_types_supported"] = o.ResponseTypesSupported + if !IsNil(o.RevocationEndpoint) { toSerialize["revocation_endpoint"] = o.RevocationEndpoint } - if o.ScopesSupported != nil { + if !IsNil(o.ScopesSupported) { toSerialize["scopes_supported"] = o.ScopesSupported } - if true { - toSerialize["subject_types_supported"] = o.SubjectTypesSupported - } - if true { - toSerialize["token_endpoint"] = o.TokenEndpoint - } - if o.TokenEndpointAuthMethodsSupported != nil { + toSerialize["subject_types_supported"] = o.SubjectTypesSupported + toSerialize["token_endpoint"] = o.TokenEndpoint + if !IsNil(o.TokenEndpointAuthMethodsSupported) { toSerialize["token_endpoint_auth_methods_supported"] = o.TokenEndpointAuthMethodsSupported } - if o.UserinfoEndpoint != nil { + if !IsNil(o.UserinfoEndpoint) { toSerialize["userinfo_endpoint"] = o.UserinfoEndpoint } - if true { - toSerialize["userinfo_signed_response_alg"] = o.UserinfoSignedResponseAlg - } - if o.UserinfoSigningAlgValuesSupported != nil { + toSerialize["userinfo_signed_response_alg"] = o.UserinfoSignedResponseAlg + if !IsNil(o.UserinfoSigningAlgValuesSupported) { toSerialize["userinfo_signing_alg_values_supported"] = o.UserinfoSigningAlgValuesSupported } - return json.Marshal(toSerialize) + return toSerialize, nil +} + +func (o *OidcConfiguration) UnmarshalJSON(data []byte) (err error) { + // This validates that all required properties are included in the JSON object + // by unmarshalling the object into a generic map with string keys and checking + // that every required field exists as a key in the generic map. + requiredProperties := []string{ + "authorization_endpoint", + "device_authorization_endpoint", + "id_token_signed_response_alg", + "id_token_signing_alg_values_supported", + "issuer", + "jwks_uri", + "response_types_supported", + "subject_types_supported", + "token_endpoint", + "userinfo_signed_response_alg", + } + + allProperties := make(map[string]interface{}) + + err = json.Unmarshal(data, &allProperties) + + if err != nil { + return err + } + + for _, requiredProperty := range requiredProperties { + if _, exists := allProperties[requiredProperty]; !exists { + return fmt.Errorf("no value given for required property %v", requiredProperty) + } + } + + varOidcConfiguration := _OidcConfiguration{} + + decoder := json.NewDecoder(bytes.NewReader(data)) + decoder.DisallowUnknownFields() + err = decoder.Decode(&varOidcConfiguration) + + if err != nil { + return err + } + + *o = OidcConfiguration(varOidcConfiguration) + + return err } type NullableOidcConfiguration struct { diff --git a/internal/httpclient/model_oidc_user_info.go b/internal/httpclient/model_oidc_user_info.go index f1b942a7b6e..d6a08aedfe8 100644 --- a/internal/httpclient/model_oidc_user_info.go +++ b/internal/httpclient/model_oidc_user_info.go @@ -15,6 +15,9 @@ import ( "encoding/json" ) +// checks if the OidcUserInfo type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &OidcUserInfo{} + // OidcUserInfo OpenID Connect Userinfo type OidcUserInfo struct { // End-User's birthday, represented as an ISO 8601:2004 [ISO8601‑2004] YYYY-MM-DD format. The year MAY be 0000, indicating that it is omitted. To represent only the year, YYYY format is allowed. Note that depending on the underlying platform's date related function, providing just year can result in varying month and day, so the implementers need to take this factor into account to correctly process the dates. @@ -76,7 +79,7 @@ func NewOidcUserInfoWithDefaults() *OidcUserInfo { // GetBirthdate returns the Birthdate field value if set, zero value otherwise. func (o *OidcUserInfo) GetBirthdate() string { - if o == nil || o.Birthdate == nil { + if o == nil || IsNil(o.Birthdate) { var ret string return ret } @@ -86,7 +89,7 @@ func (o *OidcUserInfo) GetBirthdate() string { // GetBirthdateOk returns a tuple with the Birthdate field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcUserInfo) GetBirthdateOk() (*string, bool) { - if o == nil || o.Birthdate == nil { + if o == nil || IsNil(o.Birthdate) { return nil, false } return o.Birthdate, true @@ -94,7 +97,7 @@ func (o *OidcUserInfo) GetBirthdateOk() (*string, bool) { // HasBirthdate returns a boolean if a field has been set. func (o *OidcUserInfo) HasBirthdate() bool { - if o != nil && o.Birthdate != nil { + if o != nil && !IsNil(o.Birthdate) { return true } @@ -108,7 +111,7 @@ func (o *OidcUserInfo) SetBirthdate(v string) { // GetEmail returns the Email field value if set, zero value otherwise. func (o *OidcUserInfo) GetEmail() string { - if o == nil || o.Email == nil { + if o == nil || IsNil(o.Email) { var ret string return ret } @@ -118,7 +121,7 @@ func (o *OidcUserInfo) GetEmail() string { // GetEmailOk returns a tuple with the Email field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcUserInfo) GetEmailOk() (*string, bool) { - if o == nil || o.Email == nil { + if o == nil || IsNil(o.Email) { return nil, false } return o.Email, true @@ -126,7 +129,7 @@ func (o *OidcUserInfo) GetEmailOk() (*string, bool) { // HasEmail returns a boolean if a field has been set. func (o *OidcUserInfo) HasEmail() bool { - if o != nil && o.Email != nil { + if o != nil && !IsNil(o.Email) { return true } @@ -140,7 +143,7 @@ func (o *OidcUserInfo) SetEmail(v string) { // GetEmailVerified returns the EmailVerified field value if set, zero value otherwise. func (o *OidcUserInfo) GetEmailVerified() bool { - if o == nil || o.EmailVerified == nil { + if o == nil || IsNil(o.EmailVerified) { var ret bool return ret } @@ -150,7 +153,7 @@ func (o *OidcUserInfo) GetEmailVerified() bool { // GetEmailVerifiedOk returns a tuple with the EmailVerified field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcUserInfo) GetEmailVerifiedOk() (*bool, bool) { - if o == nil || o.EmailVerified == nil { + if o == nil || IsNil(o.EmailVerified) { return nil, false } return o.EmailVerified, true @@ -158,7 +161,7 @@ func (o *OidcUserInfo) GetEmailVerifiedOk() (*bool, bool) { // HasEmailVerified returns a boolean if a field has been set. func (o *OidcUserInfo) HasEmailVerified() bool { - if o != nil && o.EmailVerified != nil { + if o != nil && !IsNil(o.EmailVerified) { return true } @@ -172,7 +175,7 @@ func (o *OidcUserInfo) SetEmailVerified(v bool) { // GetFamilyName returns the FamilyName field value if set, zero value otherwise. func (o *OidcUserInfo) GetFamilyName() string { - if o == nil || o.FamilyName == nil { + if o == nil || IsNil(o.FamilyName) { var ret string return ret } @@ -182,7 +185,7 @@ func (o *OidcUserInfo) GetFamilyName() string { // GetFamilyNameOk returns a tuple with the FamilyName field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcUserInfo) GetFamilyNameOk() (*string, bool) { - if o == nil || o.FamilyName == nil { + if o == nil || IsNil(o.FamilyName) { return nil, false } return o.FamilyName, true @@ -190,7 +193,7 @@ func (o *OidcUserInfo) GetFamilyNameOk() (*string, bool) { // HasFamilyName returns a boolean if a field has been set. func (o *OidcUserInfo) HasFamilyName() bool { - if o != nil && o.FamilyName != nil { + if o != nil && !IsNil(o.FamilyName) { return true } @@ -204,7 +207,7 @@ func (o *OidcUserInfo) SetFamilyName(v string) { // GetGender returns the Gender field value if set, zero value otherwise. func (o *OidcUserInfo) GetGender() string { - if o == nil || o.Gender == nil { + if o == nil || IsNil(o.Gender) { var ret string return ret } @@ -214,7 +217,7 @@ func (o *OidcUserInfo) GetGender() string { // GetGenderOk returns a tuple with the Gender field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcUserInfo) GetGenderOk() (*string, bool) { - if o == nil || o.Gender == nil { + if o == nil || IsNil(o.Gender) { return nil, false } return o.Gender, true @@ -222,7 +225,7 @@ func (o *OidcUserInfo) GetGenderOk() (*string, bool) { // HasGender returns a boolean if a field has been set. func (o *OidcUserInfo) HasGender() bool { - if o != nil && o.Gender != nil { + if o != nil && !IsNil(o.Gender) { return true } @@ -236,7 +239,7 @@ func (o *OidcUserInfo) SetGender(v string) { // GetGivenName returns the GivenName field value if set, zero value otherwise. func (o *OidcUserInfo) GetGivenName() string { - if o == nil || o.GivenName == nil { + if o == nil || IsNil(o.GivenName) { var ret string return ret } @@ -246,7 +249,7 @@ func (o *OidcUserInfo) GetGivenName() string { // GetGivenNameOk returns a tuple with the GivenName field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcUserInfo) GetGivenNameOk() (*string, bool) { - if o == nil || o.GivenName == nil { + if o == nil || IsNil(o.GivenName) { return nil, false } return o.GivenName, true @@ -254,7 +257,7 @@ func (o *OidcUserInfo) GetGivenNameOk() (*string, bool) { // HasGivenName returns a boolean if a field has been set. func (o *OidcUserInfo) HasGivenName() bool { - if o != nil && o.GivenName != nil { + if o != nil && !IsNil(o.GivenName) { return true } @@ -268,7 +271,7 @@ func (o *OidcUserInfo) SetGivenName(v string) { // GetLocale returns the Locale field value if set, zero value otherwise. func (o *OidcUserInfo) GetLocale() string { - if o == nil || o.Locale == nil { + if o == nil || IsNil(o.Locale) { var ret string return ret } @@ -278,7 +281,7 @@ func (o *OidcUserInfo) GetLocale() string { // GetLocaleOk returns a tuple with the Locale field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcUserInfo) GetLocaleOk() (*string, bool) { - if o == nil || o.Locale == nil { + if o == nil || IsNil(o.Locale) { return nil, false } return o.Locale, true @@ -286,7 +289,7 @@ func (o *OidcUserInfo) GetLocaleOk() (*string, bool) { // HasLocale returns a boolean if a field has been set. func (o *OidcUserInfo) HasLocale() bool { - if o != nil && o.Locale != nil { + if o != nil && !IsNil(o.Locale) { return true } @@ -300,7 +303,7 @@ func (o *OidcUserInfo) SetLocale(v string) { // GetMiddleName returns the MiddleName field value if set, zero value otherwise. func (o *OidcUserInfo) GetMiddleName() string { - if o == nil || o.MiddleName == nil { + if o == nil || IsNil(o.MiddleName) { var ret string return ret } @@ -310,7 +313,7 @@ func (o *OidcUserInfo) GetMiddleName() string { // GetMiddleNameOk returns a tuple with the MiddleName field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcUserInfo) GetMiddleNameOk() (*string, bool) { - if o == nil || o.MiddleName == nil { + if o == nil || IsNil(o.MiddleName) { return nil, false } return o.MiddleName, true @@ -318,7 +321,7 @@ func (o *OidcUserInfo) GetMiddleNameOk() (*string, bool) { // HasMiddleName returns a boolean if a field has been set. func (o *OidcUserInfo) HasMiddleName() bool { - if o != nil && o.MiddleName != nil { + if o != nil && !IsNil(o.MiddleName) { return true } @@ -332,7 +335,7 @@ func (o *OidcUserInfo) SetMiddleName(v string) { // GetName returns the Name field value if set, zero value otherwise. func (o *OidcUserInfo) GetName() string { - if o == nil || o.Name == nil { + if o == nil || IsNil(o.Name) { var ret string return ret } @@ -342,7 +345,7 @@ func (o *OidcUserInfo) GetName() string { // GetNameOk returns a tuple with the Name field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcUserInfo) GetNameOk() (*string, bool) { - if o == nil || o.Name == nil { + if o == nil || IsNil(o.Name) { return nil, false } return o.Name, true @@ -350,7 +353,7 @@ func (o *OidcUserInfo) GetNameOk() (*string, bool) { // HasName returns a boolean if a field has been set. func (o *OidcUserInfo) HasName() bool { - if o != nil && o.Name != nil { + if o != nil && !IsNil(o.Name) { return true } @@ -364,7 +367,7 @@ func (o *OidcUserInfo) SetName(v string) { // GetNickname returns the Nickname field value if set, zero value otherwise. func (o *OidcUserInfo) GetNickname() string { - if o == nil || o.Nickname == nil { + if o == nil || IsNil(o.Nickname) { var ret string return ret } @@ -374,7 +377,7 @@ func (o *OidcUserInfo) GetNickname() string { // GetNicknameOk returns a tuple with the Nickname field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcUserInfo) GetNicknameOk() (*string, bool) { - if o == nil || o.Nickname == nil { + if o == nil || IsNil(o.Nickname) { return nil, false } return o.Nickname, true @@ -382,7 +385,7 @@ func (o *OidcUserInfo) GetNicknameOk() (*string, bool) { // HasNickname returns a boolean if a field has been set. func (o *OidcUserInfo) HasNickname() bool { - if o != nil && o.Nickname != nil { + if o != nil && !IsNil(o.Nickname) { return true } @@ -396,7 +399,7 @@ func (o *OidcUserInfo) SetNickname(v string) { // GetPhoneNumber returns the PhoneNumber field value if set, zero value otherwise. func (o *OidcUserInfo) GetPhoneNumber() string { - if o == nil || o.PhoneNumber == nil { + if o == nil || IsNil(o.PhoneNumber) { var ret string return ret } @@ -406,7 +409,7 @@ func (o *OidcUserInfo) GetPhoneNumber() string { // GetPhoneNumberOk returns a tuple with the PhoneNumber field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcUserInfo) GetPhoneNumberOk() (*string, bool) { - if o == nil || o.PhoneNumber == nil { + if o == nil || IsNil(o.PhoneNumber) { return nil, false } return o.PhoneNumber, true @@ -414,7 +417,7 @@ func (o *OidcUserInfo) GetPhoneNumberOk() (*string, bool) { // HasPhoneNumber returns a boolean if a field has been set. func (o *OidcUserInfo) HasPhoneNumber() bool { - if o != nil && o.PhoneNumber != nil { + if o != nil && !IsNil(o.PhoneNumber) { return true } @@ -428,7 +431,7 @@ func (o *OidcUserInfo) SetPhoneNumber(v string) { // GetPhoneNumberVerified returns the PhoneNumberVerified field value if set, zero value otherwise. func (o *OidcUserInfo) GetPhoneNumberVerified() bool { - if o == nil || o.PhoneNumberVerified == nil { + if o == nil || IsNil(o.PhoneNumberVerified) { var ret bool return ret } @@ -438,7 +441,7 @@ func (o *OidcUserInfo) GetPhoneNumberVerified() bool { // GetPhoneNumberVerifiedOk returns a tuple with the PhoneNumberVerified field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcUserInfo) GetPhoneNumberVerifiedOk() (*bool, bool) { - if o == nil || o.PhoneNumberVerified == nil { + if o == nil || IsNil(o.PhoneNumberVerified) { return nil, false } return o.PhoneNumberVerified, true @@ -446,7 +449,7 @@ func (o *OidcUserInfo) GetPhoneNumberVerifiedOk() (*bool, bool) { // HasPhoneNumberVerified returns a boolean if a field has been set. func (o *OidcUserInfo) HasPhoneNumberVerified() bool { - if o != nil && o.PhoneNumberVerified != nil { + if o != nil && !IsNil(o.PhoneNumberVerified) { return true } @@ -460,7 +463,7 @@ func (o *OidcUserInfo) SetPhoneNumberVerified(v bool) { // GetPicture returns the Picture field value if set, zero value otherwise. func (o *OidcUserInfo) GetPicture() string { - if o == nil || o.Picture == nil { + if o == nil || IsNil(o.Picture) { var ret string return ret } @@ -470,7 +473,7 @@ func (o *OidcUserInfo) GetPicture() string { // GetPictureOk returns a tuple with the Picture field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcUserInfo) GetPictureOk() (*string, bool) { - if o == nil || o.Picture == nil { + if o == nil || IsNil(o.Picture) { return nil, false } return o.Picture, true @@ -478,7 +481,7 @@ func (o *OidcUserInfo) GetPictureOk() (*string, bool) { // HasPicture returns a boolean if a field has been set. func (o *OidcUserInfo) HasPicture() bool { - if o != nil && o.Picture != nil { + if o != nil && !IsNil(o.Picture) { return true } @@ -492,7 +495,7 @@ func (o *OidcUserInfo) SetPicture(v string) { // GetPreferredUsername returns the PreferredUsername field value if set, zero value otherwise. func (o *OidcUserInfo) GetPreferredUsername() string { - if o == nil || o.PreferredUsername == nil { + if o == nil || IsNil(o.PreferredUsername) { var ret string return ret } @@ -502,7 +505,7 @@ func (o *OidcUserInfo) GetPreferredUsername() string { // GetPreferredUsernameOk returns a tuple with the PreferredUsername field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcUserInfo) GetPreferredUsernameOk() (*string, bool) { - if o == nil || o.PreferredUsername == nil { + if o == nil || IsNil(o.PreferredUsername) { return nil, false } return o.PreferredUsername, true @@ -510,7 +513,7 @@ func (o *OidcUserInfo) GetPreferredUsernameOk() (*string, bool) { // HasPreferredUsername returns a boolean if a field has been set. func (o *OidcUserInfo) HasPreferredUsername() bool { - if o != nil && o.PreferredUsername != nil { + if o != nil && !IsNil(o.PreferredUsername) { return true } @@ -524,7 +527,7 @@ func (o *OidcUserInfo) SetPreferredUsername(v string) { // GetProfile returns the Profile field value if set, zero value otherwise. func (o *OidcUserInfo) GetProfile() string { - if o == nil || o.Profile == nil { + if o == nil || IsNil(o.Profile) { var ret string return ret } @@ -534,7 +537,7 @@ func (o *OidcUserInfo) GetProfile() string { // GetProfileOk returns a tuple with the Profile field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcUserInfo) GetProfileOk() (*string, bool) { - if o == nil || o.Profile == nil { + if o == nil || IsNil(o.Profile) { return nil, false } return o.Profile, true @@ -542,7 +545,7 @@ func (o *OidcUserInfo) GetProfileOk() (*string, bool) { // HasProfile returns a boolean if a field has been set. func (o *OidcUserInfo) HasProfile() bool { - if o != nil && o.Profile != nil { + if o != nil && !IsNil(o.Profile) { return true } @@ -556,7 +559,7 @@ func (o *OidcUserInfo) SetProfile(v string) { // GetSub returns the Sub field value if set, zero value otherwise. func (o *OidcUserInfo) GetSub() string { - if o == nil || o.Sub == nil { + if o == nil || IsNil(o.Sub) { var ret string return ret } @@ -566,7 +569,7 @@ func (o *OidcUserInfo) GetSub() string { // GetSubOk returns a tuple with the Sub field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcUserInfo) GetSubOk() (*string, bool) { - if o == nil || o.Sub == nil { + if o == nil || IsNil(o.Sub) { return nil, false } return o.Sub, true @@ -574,7 +577,7 @@ func (o *OidcUserInfo) GetSubOk() (*string, bool) { // HasSub returns a boolean if a field has been set. func (o *OidcUserInfo) HasSub() bool { - if o != nil && o.Sub != nil { + if o != nil && !IsNil(o.Sub) { return true } @@ -588,7 +591,7 @@ func (o *OidcUserInfo) SetSub(v string) { // GetUpdatedAt returns the UpdatedAt field value if set, zero value otherwise. func (o *OidcUserInfo) GetUpdatedAt() int64 { - if o == nil || o.UpdatedAt == nil { + if o == nil || IsNil(o.UpdatedAt) { var ret int64 return ret } @@ -598,7 +601,7 @@ func (o *OidcUserInfo) GetUpdatedAt() int64 { // GetUpdatedAtOk returns a tuple with the UpdatedAt field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcUserInfo) GetUpdatedAtOk() (*int64, bool) { - if o == nil || o.UpdatedAt == nil { + if o == nil || IsNil(o.UpdatedAt) { return nil, false } return o.UpdatedAt, true @@ -606,7 +609,7 @@ func (o *OidcUserInfo) GetUpdatedAtOk() (*int64, bool) { // HasUpdatedAt returns a boolean if a field has been set. func (o *OidcUserInfo) HasUpdatedAt() bool { - if o != nil && o.UpdatedAt != nil { + if o != nil && !IsNil(o.UpdatedAt) { return true } @@ -620,7 +623,7 @@ func (o *OidcUserInfo) SetUpdatedAt(v int64) { // GetWebsite returns the Website field value if set, zero value otherwise. func (o *OidcUserInfo) GetWebsite() string { - if o == nil || o.Website == nil { + if o == nil || IsNil(o.Website) { var ret string return ret } @@ -630,7 +633,7 @@ func (o *OidcUserInfo) GetWebsite() string { // GetWebsiteOk returns a tuple with the Website field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcUserInfo) GetWebsiteOk() (*string, bool) { - if o == nil || o.Website == nil { + if o == nil || IsNil(o.Website) { return nil, false } return o.Website, true @@ -638,7 +641,7 @@ func (o *OidcUserInfo) GetWebsiteOk() (*string, bool) { // HasWebsite returns a boolean if a field has been set. func (o *OidcUserInfo) HasWebsite() bool { - if o != nil && o.Website != nil { + if o != nil && !IsNil(o.Website) { return true } @@ -652,7 +655,7 @@ func (o *OidcUserInfo) SetWebsite(v string) { // GetZoneinfo returns the Zoneinfo field value if set, zero value otherwise. func (o *OidcUserInfo) GetZoneinfo() string { - if o == nil || o.Zoneinfo == nil { + if o == nil || IsNil(o.Zoneinfo) { var ret string return ret } @@ -662,7 +665,7 @@ func (o *OidcUserInfo) GetZoneinfo() string { // GetZoneinfoOk returns a tuple with the Zoneinfo field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *OidcUserInfo) GetZoneinfoOk() (*string, bool) { - if o == nil || o.Zoneinfo == nil { + if o == nil || IsNil(o.Zoneinfo) { return nil, false } return o.Zoneinfo, true @@ -670,7 +673,7 @@ func (o *OidcUserInfo) GetZoneinfoOk() (*string, bool) { // HasZoneinfo returns a boolean if a field has been set. func (o *OidcUserInfo) HasZoneinfo() bool { - if o != nil && o.Zoneinfo != nil { + if o != nil && !IsNil(o.Zoneinfo) { return true } @@ -683,65 +686,73 @@ func (o *OidcUserInfo) SetZoneinfo(v string) { } func (o OidcUserInfo) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o OidcUserInfo) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.Birthdate != nil { + if !IsNil(o.Birthdate) { toSerialize["birthdate"] = o.Birthdate } - if o.Email != nil { + if !IsNil(o.Email) { toSerialize["email"] = o.Email } - if o.EmailVerified != nil { + if !IsNil(o.EmailVerified) { toSerialize["email_verified"] = o.EmailVerified } - if o.FamilyName != nil { + if !IsNil(o.FamilyName) { toSerialize["family_name"] = o.FamilyName } - if o.Gender != nil { + if !IsNil(o.Gender) { toSerialize["gender"] = o.Gender } - if o.GivenName != nil { + if !IsNil(o.GivenName) { toSerialize["given_name"] = o.GivenName } - if o.Locale != nil { + if !IsNil(o.Locale) { toSerialize["locale"] = o.Locale } - if o.MiddleName != nil { + if !IsNil(o.MiddleName) { toSerialize["middle_name"] = o.MiddleName } - if o.Name != nil { + if !IsNil(o.Name) { toSerialize["name"] = o.Name } - if o.Nickname != nil { + if !IsNil(o.Nickname) { toSerialize["nickname"] = o.Nickname } - if o.PhoneNumber != nil { + if !IsNil(o.PhoneNumber) { toSerialize["phone_number"] = o.PhoneNumber } - if o.PhoneNumberVerified != nil { + if !IsNil(o.PhoneNumberVerified) { toSerialize["phone_number_verified"] = o.PhoneNumberVerified } - if o.Picture != nil { + if !IsNil(o.Picture) { toSerialize["picture"] = o.Picture } - if o.PreferredUsername != nil { + if !IsNil(o.PreferredUsername) { toSerialize["preferred_username"] = o.PreferredUsername } - if o.Profile != nil { + if !IsNil(o.Profile) { toSerialize["profile"] = o.Profile } - if o.Sub != nil { + if !IsNil(o.Sub) { toSerialize["sub"] = o.Sub } - if o.UpdatedAt != nil { + if !IsNil(o.UpdatedAt) { toSerialize["updated_at"] = o.UpdatedAt } - if o.Website != nil { + if !IsNil(o.Website) { toSerialize["website"] = o.Website } - if o.Zoneinfo != nil { + if !IsNil(o.Zoneinfo) { toSerialize["zoneinfo"] = o.Zoneinfo } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableOidcUserInfo struct { diff --git a/internal/httpclient/model_pagination.go b/internal/httpclient/model_pagination.go deleted file mode 100644 index 66402865828..00000000000 --- a/internal/httpclient/model_pagination.go +++ /dev/null @@ -1,160 +0,0 @@ -/* -Ory Hydra API - -Documentation for all of Ory Hydra's APIs. - -API version: -Contact: hi@ory.sh -*/ - -// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. - -package openapi - -import ( - "encoding/json" -) - -// Pagination struct for Pagination -type Pagination struct { - // Items per page This is the number of items per page to return. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). - PageSize *int64 `json:"page_size,omitempty"` - // Next Page Token The next page token. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). - PageToken *string `json:"page_token,omitempty"` -} - -// NewPagination instantiates a new Pagination object -// This constructor will assign default values to properties that have it defined, -// and makes sure properties required by API are set, but the set of arguments -// will change when the set of required properties is changed -func NewPagination() *Pagination { - this := Pagination{} - var pageSize int64 = 250 - this.PageSize = &pageSize - var pageToken string = "1" - this.PageToken = &pageToken - return &this -} - -// NewPaginationWithDefaults instantiates a new Pagination object -// This constructor will only assign default values to properties that have it defined, -// but it doesn't guarantee that properties required by API are set -func NewPaginationWithDefaults() *Pagination { - this := Pagination{} - var pageSize int64 = 250 - this.PageSize = &pageSize - var pageToken string = "1" - this.PageToken = &pageToken - return &this -} - -// GetPageSize returns the PageSize field value if set, zero value otherwise. -func (o *Pagination) GetPageSize() int64 { - if o == nil || o.PageSize == nil { - var ret int64 - return ret - } - return *o.PageSize -} - -// GetPageSizeOk returns a tuple with the PageSize field value if set, nil otherwise -// and a boolean to check if the value has been set. -func (o *Pagination) GetPageSizeOk() (*int64, bool) { - if o == nil || o.PageSize == nil { - return nil, false - } - return o.PageSize, true -} - -// HasPageSize returns a boolean if a field has been set. -func (o *Pagination) HasPageSize() bool { - if o != nil && o.PageSize != nil { - return true - } - - return false -} - -// SetPageSize gets a reference to the given int64 and assigns it to the PageSize field. -func (o *Pagination) SetPageSize(v int64) { - o.PageSize = &v -} - -// GetPageToken returns the PageToken field value if set, zero value otherwise. -func (o *Pagination) GetPageToken() string { - if o == nil || o.PageToken == nil { - var ret string - return ret - } - return *o.PageToken -} - -// GetPageTokenOk returns a tuple with the PageToken field value if set, nil otherwise -// and a boolean to check if the value has been set. -func (o *Pagination) GetPageTokenOk() (*string, bool) { - if o == nil || o.PageToken == nil { - return nil, false - } - return o.PageToken, true -} - -// HasPageToken returns a boolean if a field has been set. -func (o *Pagination) HasPageToken() bool { - if o != nil && o.PageToken != nil { - return true - } - - return false -} - -// SetPageToken gets a reference to the given string and assigns it to the PageToken field. -func (o *Pagination) SetPageToken(v string) { - o.PageToken = &v -} - -func (o Pagination) MarshalJSON() ([]byte, error) { - toSerialize := map[string]interface{}{} - if o.PageSize != nil { - toSerialize["page_size"] = o.PageSize - } - if o.PageToken != nil { - toSerialize["page_token"] = o.PageToken - } - return json.Marshal(toSerialize) -} - -type NullablePagination struct { - value *Pagination - isSet bool -} - -func (v NullablePagination) Get() *Pagination { - return v.value -} - -func (v *NullablePagination) Set(val *Pagination) { - v.value = val - v.isSet = true -} - -func (v NullablePagination) IsSet() bool { - return v.isSet -} - -func (v *NullablePagination) Unset() { - v.value = nil - v.isSet = false -} - -func NewNullablePagination(val *Pagination) *NullablePagination { - return &NullablePagination{value: val, isSet: true} -} - -func (v NullablePagination) MarshalJSON() ([]byte, error) { - return json.Marshal(v.value) -} - -func (v *NullablePagination) UnmarshalJSON(src []byte) error { - v.isSet = true - return json.Unmarshal(src, &v.value) -} diff --git a/internal/httpclient/model_pagination_headers.go b/internal/httpclient/model_pagination_headers.go deleted file mode 100644 index 803a8119b4e..00000000000 --- a/internal/httpclient/model_pagination_headers.go +++ /dev/null @@ -1,152 +0,0 @@ -/* -Ory Hydra API - -Documentation for all of Ory Hydra's APIs. - -API version: -Contact: hi@ory.sh -*/ - -// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. - -package openapi - -import ( - "encoding/json" -) - -// PaginationHeaders struct for PaginationHeaders -type PaginationHeaders struct { - // The link header contains pagination links. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). in: header - Link *string `json:"link,omitempty"` - // The total number of clients. in: header - XTotalCount *string `json:"x-total-count,omitempty"` -} - -// NewPaginationHeaders instantiates a new PaginationHeaders object -// This constructor will assign default values to properties that have it defined, -// and makes sure properties required by API are set, but the set of arguments -// will change when the set of required properties is changed -func NewPaginationHeaders() *PaginationHeaders { - this := PaginationHeaders{} - return &this -} - -// NewPaginationHeadersWithDefaults instantiates a new PaginationHeaders object -// This constructor will only assign default values to properties that have it defined, -// but it doesn't guarantee that properties required by API are set -func NewPaginationHeadersWithDefaults() *PaginationHeaders { - this := PaginationHeaders{} - return &this -} - -// GetLink returns the Link field value if set, zero value otherwise. -func (o *PaginationHeaders) GetLink() string { - if o == nil || o.Link == nil { - var ret string - return ret - } - return *o.Link -} - -// GetLinkOk returns a tuple with the Link field value if set, nil otherwise -// and a boolean to check if the value has been set. -func (o *PaginationHeaders) GetLinkOk() (*string, bool) { - if o == nil || o.Link == nil { - return nil, false - } - return o.Link, true -} - -// HasLink returns a boolean if a field has been set. -func (o *PaginationHeaders) HasLink() bool { - if o != nil && o.Link != nil { - return true - } - - return false -} - -// SetLink gets a reference to the given string and assigns it to the Link field. -func (o *PaginationHeaders) SetLink(v string) { - o.Link = &v -} - -// GetXTotalCount returns the XTotalCount field value if set, zero value otherwise. -func (o *PaginationHeaders) GetXTotalCount() string { - if o == nil || o.XTotalCount == nil { - var ret string - return ret - } - return *o.XTotalCount -} - -// GetXTotalCountOk returns a tuple with the XTotalCount field value if set, nil otherwise -// and a boolean to check if the value has been set. -func (o *PaginationHeaders) GetXTotalCountOk() (*string, bool) { - if o == nil || o.XTotalCount == nil { - return nil, false - } - return o.XTotalCount, true -} - -// HasXTotalCount returns a boolean if a field has been set. -func (o *PaginationHeaders) HasXTotalCount() bool { - if o != nil && o.XTotalCount != nil { - return true - } - - return false -} - -// SetXTotalCount gets a reference to the given string and assigns it to the XTotalCount field. -func (o *PaginationHeaders) SetXTotalCount(v string) { - o.XTotalCount = &v -} - -func (o PaginationHeaders) MarshalJSON() ([]byte, error) { - toSerialize := map[string]interface{}{} - if o.Link != nil { - toSerialize["link"] = o.Link - } - if o.XTotalCount != nil { - toSerialize["x-total-count"] = o.XTotalCount - } - return json.Marshal(toSerialize) -} - -type NullablePaginationHeaders struct { - value *PaginationHeaders - isSet bool -} - -func (v NullablePaginationHeaders) Get() *PaginationHeaders { - return v.value -} - -func (v *NullablePaginationHeaders) Set(val *PaginationHeaders) { - v.value = val - v.isSet = true -} - -func (v NullablePaginationHeaders) IsSet() bool { - return v.isSet -} - -func (v *NullablePaginationHeaders) Unset() { - v.value = nil - v.isSet = false -} - -func NewNullablePaginationHeaders(val *PaginationHeaders) *NullablePaginationHeaders { - return &NullablePaginationHeaders{value: val, isSet: true} -} - -func (v NullablePaginationHeaders) MarshalJSON() ([]byte, error) { - return json.Marshal(v.value) -} - -func (v *NullablePaginationHeaders) UnmarshalJSON(src []byte) error { - v.isSet = true - return json.Unmarshal(src, &v.value) -} diff --git a/internal/httpclient/model_reject_o_auth2_request.go b/internal/httpclient/model_reject_o_auth2_request.go index 4b6817491a5..8d0a178a3fb 100644 --- a/internal/httpclient/model_reject_o_auth2_request.go +++ b/internal/httpclient/model_reject_o_auth2_request.go @@ -15,6 +15,9 @@ import ( "encoding/json" ) +// checks if the RejectOAuth2Request type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &RejectOAuth2Request{} + // RejectOAuth2Request struct for RejectOAuth2Request type RejectOAuth2Request struct { // The error should follow the OAuth2 error format (e.g. `invalid_request`, `login_required`). Defaults to `request_denied`. @@ -48,7 +51,7 @@ func NewRejectOAuth2RequestWithDefaults() *RejectOAuth2Request { // GetError returns the Error field value if set, zero value otherwise. func (o *RejectOAuth2Request) GetError() string { - if o == nil || o.Error == nil { + if o == nil || IsNil(o.Error) { var ret string return ret } @@ -58,7 +61,7 @@ func (o *RejectOAuth2Request) GetError() string { // GetErrorOk returns a tuple with the Error field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *RejectOAuth2Request) GetErrorOk() (*string, bool) { - if o == nil || o.Error == nil { + if o == nil || IsNil(o.Error) { return nil, false } return o.Error, true @@ -66,7 +69,7 @@ func (o *RejectOAuth2Request) GetErrorOk() (*string, bool) { // HasError returns a boolean if a field has been set. func (o *RejectOAuth2Request) HasError() bool { - if o != nil && o.Error != nil { + if o != nil && !IsNil(o.Error) { return true } @@ -80,7 +83,7 @@ func (o *RejectOAuth2Request) SetError(v string) { // GetErrorDebug returns the ErrorDebug field value if set, zero value otherwise. func (o *RejectOAuth2Request) GetErrorDebug() string { - if o == nil || o.ErrorDebug == nil { + if o == nil || IsNil(o.ErrorDebug) { var ret string return ret } @@ -90,7 +93,7 @@ func (o *RejectOAuth2Request) GetErrorDebug() string { // GetErrorDebugOk returns a tuple with the ErrorDebug field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *RejectOAuth2Request) GetErrorDebugOk() (*string, bool) { - if o == nil || o.ErrorDebug == nil { + if o == nil || IsNil(o.ErrorDebug) { return nil, false } return o.ErrorDebug, true @@ -98,7 +101,7 @@ func (o *RejectOAuth2Request) GetErrorDebugOk() (*string, bool) { // HasErrorDebug returns a boolean if a field has been set. func (o *RejectOAuth2Request) HasErrorDebug() bool { - if o != nil && o.ErrorDebug != nil { + if o != nil && !IsNil(o.ErrorDebug) { return true } @@ -112,7 +115,7 @@ func (o *RejectOAuth2Request) SetErrorDebug(v string) { // GetErrorDescription returns the ErrorDescription field value if set, zero value otherwise. func (o *RejectOAuth2Request) GetErrorDescription() string { - if o == nil || o.ErrorDescription == nil { + if o == nil || IsNil(o.ErrorDescription) { var ret string return ret } @@ -122,7 +125,7 @@ func (o *RejectOAuth2Request) GetErrorDescription() string { // GetErrorDescriptionOk returns a tuple with the ErrorDescription field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *RejectOAuth2Request) GetErrorDescriptionOk() (*string, bool) { - if o == nil || o.ErrorDescription == nil { + if o == nil || IsNil(o.ErrorDescription) { return nil, false } return o.ErrorDescription, true @@ -130,7 +133,7 @@ func (o *RejectOAuth2Request) GetErrorDescriptionOk() (*string, bool) { // HasErrorDescription returns a boolean if a field has been set. func (o *RejectOAuth2Request) HasErrorDescription() bool { - if o != nil && o.ErrorDescription != nil { + if o != nil && !IsNil(o.ErrorDescription) { return true } @@ -144,7 +147,7 @@ func (o *RejectOAuth2Request) SetErrorDescription(v string) { // GetErrorHint returns the ErrorHint field value if set, zero value otherwise. func (o *RejectOAuth2Request) GetErrorHint() string { - if o == nil || o.ErrorHint == nil { + if o == nil || IsNil(o.ErrorHint) { var ret string return ret } @@ -154,7 +157,7 @@ func (o *RejectOAuth2Request) GetErrorHint() string { // GetErrorHintOk returns a tuple with the ErrorHint field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *RejectOAuth2Request) GetErrorHintOk() (*string, bool) { - if o == nil || o.ErrorHint == nil { + if o == nil || IsNil(o.ErrorHint) { return nil, false } return o.ErrorHint, true @@ -162,7 +165,7 @@ func (o *RejectOAuth2Request) GetErrorHintOk() (*string, bool) { // HasErrorHint returns a boolean if a field has been set. func (o *RejectOAuth2Request) HasErrorHint() bool { - if o != nil && o.ErrorHint != nil { + if o != nil && !IsNil(o.ErrorHint) { return true } @@ -176,7 +179,7 @@ func (o *RejectOAuth2Request) SetErrorHint(v string) { // GetStatusCode returns the StatusCode field value if set, zero value otherwise. func (o *RejectOAuth2Request) GetStatusCode() int64 { - if o == nil || o.StatusCode == nil { + if o == nil || IsNil(o.StatusCode) { var ret int64 return ret } @@ -186,7 +189,7 @@ func (o *RejectOAuth2Request) GetStatusCode() int64 { // GetStatusCodeOk returns a tuple with the StatusCode field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *RejectOAuth2Request) GetStatusCodeOk() (*int64, bool) { - if o == nil || o.StatusCode == nil { + if o == nil || IsNil(o.StatusCode) { return nil, false } return o.StatusCode, true @@ -194,7 +197,7 @@ func (o *RejectOAuth2Request) GetStatusCodeOk() (*int64, bool) { // HasStatusCode returns a boolean if a field has been set. func (o *RejectOAuth2Request) HasStatusCode() bool { - if o != nil && o.StatusCode != nil { + if o != nil && !IsNil(o.StatusCode) { return true } @@ -207,23 +210,31 @@ func (o *RejectOAuth2Request) SetStatusCode(v int64) { } func (o RejectOAuth2Request) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o RejectOAuth2Request) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.Error != nil { + if !IsNil(o.Error) { toSerialize["error"] = o.Error } - if o.ErrorDebug != nil { + if !IsNil(o.ErrorDebug) { toSerialize["error_debug"] = o.ErrorDebug } - if o.ErrorDescription != nil { + if !IsNil(o.ErrorDescription) { toSerialize["error_description"] = o.ErrorDescription } - if o.ErrorHint != nil { + if !IsNil(o.ErrorHint) { toSerialize["error_hint"] = o.ErrorHint } - if o.StatusCode != nil { + if !IsNil(o.StatusCode) { toSerialize["status_code"] = o.StatusCode } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableRejectOAuth2Request struct { diff --git a/internal/httpclient/model_rfc6749_error_json.go b/internal/httpclient/model_rfc6749_error_json.go new file mode 100644 index 00000000000..d33a2b91e08 --- /dev/null +++ b/internal/httpclient/model_rfc6749_error_json.go @@ -0,0 +1,269 @@ +/* +Ory Hydra API + +Documentation for all of Ory Hydra's APIs. + +API version: +Contact: hi@ory.sh +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "encoding/json" +) + +// checks if the RFC6749ErrorJson type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &RFC6749ErrorJson{} + +// RFC6749ErrorJson struct for RFC6749ErrorJson +type RFC6749ErrorJson struct { + Error *string `json:"error,omitempty"` + ErrorDebug *string `json:"error_debug,omitempty"` + ErrorDescription *string `json:"error_description,omitempty"` + ErrorHint *string `json:"error_hint,omitempty"` + StatusCode *int64 `json:"status_code,omitempty"` +} + +// NewRFC6749ErrorJson instantiates a new RFC6749ErrorJson object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewRFC6749ErrorJson() *RFC6749ErrorJson { + this := RFC6749ErrorJson{} + return &this +} + +// NewRFC6749ErrorJsonWithDefaults instantiates a new RFC6749ErrorJson object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewRFC6749ErrorJsonWithDefaults() *RFC6749ErrorJson { + this := RFC6749ErrorJson{} + return &this +} + +// GetError returns the Error field value if set, zero value otherwise. +func (o *RFC6749ErrorJson) GetError() string { + if o == nil || IsNil(o.Error) { + var ret string + return ret + } + return *o.Error +} + +// GetErrorOk returns a tuple with the Error field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *RFC6749ErrorJson) GetErrorOk() (*string, bool) { + if o == nil || IsNil(o.Error) { + return nil, false + } + return o.Error, true +} + +// HasError returns a boolean if a field has been set. +func (o *RFC6749ErrorJson) HasError() bool { + if o != nil && !IsNil(o.Error) { + return true + } + + return false +} + +// SetError gets a reference to the given string and assigns it to the Error field. +func (o *RFC6749ErrorJson) SetError(v string) { + o.Error = &v +} + +// GetErrorDebug returns the ErrorDebug field value if set, zero value otherwise. +func (o *RFC6749ErrorJson) GetErrorDebug() string { + if o == nil || IsNil(o.ErrorDebug) { + var ret string + return ret + } + return *o.ErrorDebug +} + +// GetErrorDebugOk returns a tuple with the ErrorDebug field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *RFC6749ErrorJson) GetErrorDebugOk() (*string, bool) { + if o == nil || IsNil(o.ErrorDebug) { + return nil, false + } + return o.ErrorDebug, true +} + +// HasErrorDebug returns a boolean if a field has been set. +func (o *RFC6749ErrorJson) HasErrorDebug() bool { + if o != nil && !IsNil(o.ErrorDebug) { + return true + } + + return false +} + +// SetErrorDebug gets a reference to the given string and assigns it to the ErrorDebug field. +func (o *RFC6749ErrorJson) SetErrorDebug(v string) { + o.ErrorDebug = &v +} + +// GetErrorDescription returns the ErrorDescription field value if set, zero value otherwise. +func (o *RFC6749ErrorJson) GetErrorDescription() string { + if o == nil || IsNil(o.ErrorDescription) { + var ret string + return ret + } + return *o.ErrorDescription +} + +// GetErrorDescriptionOk returns a tuple with the ErrorDescription field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *RFC6749ErrorJson) GetErrorDescriptionOk() (*string, bool) { + if o == nil || IsNil(o.ErrorDescription) { + return nil, false + } + return o.ErrorDescription, true +} + +// HasErrorDescription returns a boolean if a field has been set. +func (o *RFC6749ErrorJson) HasErrorDescription() bool { + if o != nil && !IsNil(o.ErrorDescription) { + return true + } + + return false +} + +// SetErrorDescription gets a reference to the given string and assigns it to the ErrorDescription field. +func (o *RFC6749ErrorJson) SetErrorDescription(v string) { + o.ErrorDescription = &v +} + +// GetErrorHint returns the ErrorHint field value if set, zero value otherwise. +func (o *RFC6749ErrorJson) GetErrorHint() string { + if o == nil || IsNil(o.ErrorHint) { + var ret string + return ret + } + return *o.ErrorHint +} + +// GetErrorHintOk returns a tuple with the ErrorHint field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *RFC6749ErrorJson) GetErrorHintOk() (*string, bool) { + if o == nil || IsNil(o.ErrorHint) { + return nil, false + } + return o.ErrorHint, true +} + +// HasErrorHint returns a boolean if a field has been set. +func (o *RFC6749ErrorJson) HasErrorHint() bool { + if o != nil && !IsNil(o.ErrorHint) { + return true + } + + return false +} + +// SetErrorHint gets a reference to the given string and assigns it to the ErrorHint field. +func (o *RFC6749ErrorJson) SetErrorHint(v string) { + o.ErrorHint = &v +} + +// GetStatusCode returns the StatusCode field value if set, zero value otherwise. +func (o *RFC6749ErrorJson) GetStatusCode() int64 { + if o == nil || IsNil(o.StatusCode) { + var ret int64 + return ret + } + return *o.StatusCode +} + +// GetStatusCodeOk returns a tuple with the StatusCode field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *RFC6749ErrorJson) GetStatusCodeOk() (*int64, bool) { + if o == nil || IsNil(o.StatusCode) { + return nil, false + } + return o.StatusCode, true +} + +// HasStatusCode returns a boolean if a field has been set. +func (o *RFC6749ErrorJson) HasStatusCode() bool { + if o != nil && !IsNil(o.StatusCode) { + return true + } + + return false +} + +// SetStatusCode gets a reference to the given int64 and assigns it to the StatusCode field. +func (o *RFC6749ErrorJson) SetStatusCode(v int64) { + o.StatusCode = &v +} + +func (o RFC6749ErrorJson) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o RFC6749ErrorJson) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.Error) { + toSerialize["error"] = o.Error + } + if !IsNil(o.ErrorDebug) { + toSerialize["error_debug"] = o.ErrorDebug + } + if !IsNil(o.ErrorDescription) { + toSerialize["error_description"] = o.ErrorDescription + } + if !IsNil(o.ErrorHint) { + toSerialize["error_hint"] = o.ErrorHint + } + if !IsNil(o.StatusCode) { + toSerialize["status_code"] = o.StatusCode + } + return toSerialize, nil +} + +type NullableRFC6749ErrorJson struct { + value *RFC6749ErrorJson + isSet bool +} + +func (v NullableRFC6749ErrorJson) Get() *RFC6749ErrorJson { + return v.value +} + +func (v *NullableRFC6749ErrorJson) Set(val *RFC6749ErrorJson) { + v.value = val + v.isSet = true +} + +func (v NullableRFC6749ErrorJson) IsSet() bool { + return v.isSet +} + +func (v *NullableRFC6749ErrorJson) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableRFC6749ErrorJson(val *RFC6749ErrorJson) *NullableRFC6749ErrorJson { + return &NullableRFC6749ErrorJson{value: val, isSet: true} +} + +func (v NullableRFC6749ErrorJson) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableRFC6749ErrorJson) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/internal/httpclient/model_token_pagination.go b/internal/httpclient/model_token_pagination.go index 7d96f47f2be..d9457ccd9e8 100644 --- a/internal/httpclient/model_token_pagination.go +++ b/internal/httpclient/model_token_pagination.go @@ -15,6 +15,9 @@ import ( "encoding/json" ) +// checks if the TokenPagination type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &TokenPagination{} + // TokenPagination struct for TokenPagination type TokenPagination struct { // Items per page This is the number of items per page to return. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). @@ -50,7 +53,7 @@ func NewTokenPaginationWithDefaults() *TokenPagination { // GetPageSize returns the PageSize field value if set, zero value otherwise. func (o *TokenPagination) GetPageSize() int64 { - if o == nil || o.PageSize == nil { + if o == nil || IsNil(o.PageSize) { var ret int64 return ret } @@ -60,7 +63,7 @@ func (o *TokenPagination) GetPageSize() int64 { // GetPageSizeOk returns a tuple with the PageSize field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TokenPagination) GetPageSizeOk() (*int64, bool) { - if o == nil || o.PageSize == nil { + if o == nil || IsNil(o.PageSize) { return nil, false } return o.PageSize, true @@ -68,7 +71,7 @@ func (o *TokenPagination) GetPageSizeOk() (*int64, bool) { // HasPageSize returns a boolean if a field has been set. func (o *TokenPagination) HasPageSize() bool { - if o != nil && o.PageSize != nil { + if o != nil && !IsNil(o.PageSize) { return true } @@ -82,7 +85,7 @@ func (o *TokenPagination) SetPageSize(v int64) { // GetPageToken returns the PageToken field value if set, zero value otherwise. func (o *TokenPagination) GetPageToken() string { - if o == nil || o.PageToken == nil { + if o == nil || IsNil(o.PageToken) { var ret string return ret } @@ -92,7 +95,7 @@ func (o *TokenPagination) GetPageToken() string { // GetPageTokenOk returns a tuple with the PageToken field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TokenPagination) GetPageTokenOk() (*string, bool) { - if o == nil || o.PageToken == nil { + if o == nil || IsNil(o.PageToken) { return nil, false } return o.PageToken, true @@ -100,7 +103,7 @@ func (o *TokenPagination) GetPageTokenOk() (*string, bool) { // HasPageToken returns a boolean if a field has been set. func (o *TokenPagination) HasPageToken() bool { - if o != nil && o.PageToken != nil { + if o != nil && !IsNil(o.PageToken) { return true } @@ -113,14 +116,22 @@ func (o *TokenPagination) SetPageToken(v string) { } func (o TokenPagination) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o TokenPagination) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.PageSize != nil { + if !IsNil(o.PageSize) { toSerialize["page_size"] = o.PageSize } - if o.PageToken != nil { + if !IsNil(o.PageToken) { toSerialize["page_token"] = o.PageToken } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableTokenPagination struct { diff --git a/internal/httpclient/model_token_pagination_headers.go b/internal/httpclient/model_token_pagination_headers.go index 7c4c657c968..537d5e59bdd 100644 --- a/internal/httpclient/model_token_pagination_headers.go +++ b/internal/httpclient/model_token_pagination_headers.go @@ -15,6 +15,9 @@ import ( "encoding/json" ) +// checks if the TokenPaginationHeaders type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &TokenPaginationHeaders{} + // TokenPaginationHeaders struct for TokenPaginationHeaders type TokenPaginationHeaders struct { // The link header contains pagination links. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). in: header @@ -42,7 +45,7 @@ func NewTokenPaginationHeadersWithDefaults() *TokenPaginationHeaders { // GetLink returns the Link field value if set, zero value otherwise. func (o *TokenPaginationHeaders) GetLink() string { - if o == nil || o.Link == nil { + if o == nil || IsNil(o.Link) { var ret string return ret } @@ -52,7 +55,7 @@ func (o *TokenPaginationHeaders) GetLink() string { // GetLinkOk returns a tuple with the Link field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TokenPaginationHeaders) GetLinkOk() (*string, bool) { - if o == nil || o.Link == nil { + if o == nil || IsNil(o.Link) { return nil, false } return o.Link, true @@ -60,7 +63,7 @@ func (o *TokenPaginationHeaders) GetLinkOk() (*string, bool) { // HasLink returns a boolean if a field has been set. func (o *TokenPaginationHeaders) HasLink() bool { - if o != nil && o.Link != nil { + if o != nil && !IsNil(o.Link) { return true } @@ -74,7 +77,7 @@ func (o *TokenPaginationHeaders) SetLink(v string) { // GetXTotalCount returns the XTotalCount field value if set, zero value otherwise. func (o *TokenPaginationHeaders) GetXTotalCount() string { - if o == nil || o.XTotalCount == nil { + if o == nil || IsNil(o.XTotalCount) { var ret string return ret } @@ -84,7 +87,7 @@ func (o *TokenPaginationHeaders) GetXTotalCount() string { // GetXTotalCountOk returns a tuple with the XTotalCount field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TokenPaginationHeaders) GetXTotalCountOk() (*string, bool) { - if o == nil || o.XTotalCount == nil { + if o == nil || IsNil(o.XTotalCount) { return nil, false } return o.XTotalCount, true @@ -92,7 +95,7 @@ func (o *TokenPaginationHeaders) GetXTotalCountOk() (*string, bool) { // HasXTotalCount returns a boolean if a field has been set. func (o *TokenPaginationHeaders) HasXTotalCount() bool { - if o != nil && o.XTotalCount != nil { + if o != nil && !IsNil(o.XTotalCount) { return true } @@ -105,14 +108,22 @@ func (o *TokenPaginationHeaders) SetXTotalCount(v string) { } func (o TokenPaginationHeaders) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o TokenPaginationHeaders) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.Link != nil { + if !IsNil(o.Link) { toSerialize["link"] = o.Link } - if o.XTotalCount != nil { + if !IsNil(o.XTotalCount) { toSerialize["x-total-count"] = o.XTotalCount } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableTokenPaginationHeaders struct { diff --git a/internal/httpclient/model_token_pagination_request_parameters.go b/internal/httpclient/model_token_pagination_request_parameters.go index 40ef780d684..e18c491d8fe 100644 --- a/internal/httpclient/model_token_pagination_request_parameters.go +++ b/internal/httpclient/model_token_pagination_request_parameters.go @@ -15,6 +15,9 @@ import ( "encoding/json" ) +// checks if the TokenPaginationRequestParameters type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &TokenPaginationRequestParameters{} + // TokenPaginationRequestParameters The `Link` HTTP header contains multiple links (`first`, `next`, `last`, `previous`) formatted as: `; rel=\"{page}\"` For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). type TokenPaginationRequestParameters struct { // Items per Page This is the number of items per page to return. For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). @@ -50,7 +53,7 @@ func NewTokenPaginationRequestParametersWithDefaults() *TokenPaginationRequestPa // GetPageSize returns the PageSize field value if set, zero value otherwise. func (o *TokenPaginationRequestParameters) GetPageSize() int64 { - if o == nil || o.PageSize == nil { + if o == nil || IsNil(o.PageSize) { var ret int64 return ret } @@ -60,7 +63,7 @@ func (o *TokenPaginationRequestParameters) GetPageSize() int64 { // GetPageSizeOk returns a tuple with the PageSize field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TokenPaginationRequestParameters) GetPageSizeOk() (*int64, bool) { - if o == nil || o.PageSize == nil { + if o == nil || IsNil(o.PageSize) { return nil, false } return o.PageSize, true @@ -68,7 +71,7 @@ func (o *TokenPaginationRequestParameters) GetPageSizeOk() (*int64, bool) { // HasPageSize returns a boolean if a field has been set. func (o *TokenPaginationRequestParameters) HasPageSize() bool { - if o != nil && o.PageSize != nil { + if o != nil && !IsNil(o.PageSize) { return true } @@ -82,7 +85,7 @@ func (o *TokenPaginationRequestParameters) SetPageSize(v int64) { // GetPageToken returns the PageToken field value if set, zero value otherwise. func (o *TokenPaginationRequestParameters) GetPageToken() string { - if o == nil || o.PageToken == nil { + if o == nil || IsNil(o.PageToken) { var ret string return ret } @@ -92,7 +95,7 @@ func (o *TokenPaginationRequestParameters) GetPageToken() string { // GetPageTokenOk returns a tuple with the PageToken field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TokenPaginationRequestParameters) GetPageTokenOk() (*string, bool) { - if o == nil || o.PageToken == nil { + if o == nil || IsNil(o.PageToken) { return nil, false } return o.PageToken, true @@ -100,7 +103,7 @@ func (o *TokenPaginationRequestParameters) GetPageTokenOk() (*string, bool) { // HasPageToken returns a boolean if a field has been set. func (o *TokenPaginationRequestParameters) HasPageToken() bool { - if o != nil && o.PageToken != nil { + if o != nil && !IsNil(o.PageToken) { return true } @@ -113,14 +116,22 @@ func (o *TokenPaginationRequestParameters) SetPageToken(v string) { } func (o TokenPaginationRequestParameters) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o TokenPaginationRequestParameters) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.PageSize != nil { + if !IsNil(o.PageSize) { toSerialize["page_size"] = o.PageSize } - if o.PageToken != nil { + if !IsNil(o.PageToken) { toSerialize["page_token"] = o.PageToken } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableTokenPaginationRequestParameters struct { diff --git a/internal/httpclient/model_token_pagination_response_headers.go b/internal/httpclient/model_token_pagination_response_headers.go index 26722925de6..bddbcd203ea 100644 --- a/internal/httpclient/model_token_pagination_response_headers.go +++ b/internal/httpclient/model_token_pagination_response_headers.go @@ -15,6 +15,9 @@ import ( "encoding/json" ) +// checks if the TokenPaginationResponseHeaders type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &TokenPaginationResponseHeaders{} + // TokenPaginationResponseHeaders The `Link` HTTP header contains multiple links (`first`, `next`, `last`, `previous`) formatted as: `; rel=\"{page}\"` For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). type TokenPaginationResponseHeaders struct { // The Link HTTP Header The `Link` header contains a comma-delimited list of links to the following pages: first: The first page of results. next: The next page of results. prev: The previous page of results. last: The last page of results. Pages are omitted if they do not exist. For example, if there is no next page, the `next` link is omitted. Examples: ; rel=\"first\",; rel=\"next\",; rel=\"prev\",; rel=\"last\" @@ -42,7 +45,7 @@ func NewTokenPaginationResponseHeadersWithDefaults() *TokenPaginationResponseHea // GetLink returns the Link field value if set, zero value otherwise. func (o *TokenPaginationResponseHeaders) GetLink() string { - if o == nil || o.Link == nil { + if o == nil || IsNil(o.Link) { var ret string return ret } @@ -52,7 +55,7 @@ func (o *TokenPaginationResponseHeaders) GetLink() string { // GetLinkOk returns a tuple with the Link field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TokenPaginationResponseHeaders) GetLinkOk() (*string, bool) { - if o == nil || o.Link == nil { + if o == nil || IsNil(o.Link) { return nil, false } return o.Link, true @@ -60,7 +63,7 @@ func (o *TokenPaginationResponseHeaders) GetLinkOk() (*string, bool) { // HasLink returns a boolean if a field has been set. func (o *TokenPaginationResponseHeaders) HasLink() bool { - if o != nil && o.Link != nil { + if o != nil && !IsNil(o.Link) { return true } @@ -74,7 +77,7 @@ func (o *TokenPaginationResponseHeaders) SetLink(v string) { // GetXTotalCount returns the XTotalCount field value if set, zero value otherwise. func (o *TokenPaginationResponseHeaders) GetXTotalCount() int64 { - if o == nil || o.XTotalCount == nil { + if o == nil || IsNil(o.XTotalCount) { var ret int64 return ret } @@ -84,7 +87,7 @@ func (o *TokenPaginationResponseHeaders) GetXTotalCount() int64 { // GetXTotalCountOk returns a tuple with the XTotalCount field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TokenPaginationResponseHeaders) GetXTotalCountOk() (*int64, bool) { - if o == nil || o.XTotalCount == nil { + if o == nil || IsNil(o.XTotalCount) { return nil, false } return o.XTotalCount, true @@ -92,7 +95,7 @@ func (o *TokenPaginationResponseHeaders) GetXTotalCountOk() (*int64, bool) { // HasXTotalCount returns a boolean if a field has been set. func (o *TokenPaginationResponseHeaders) HasXTotalCount() bool { - if o != nil && o.XTotalCount != nil { + if o != nil && !IsNil(o.XTotalCount) { return true } @@ -105,14 +108,22 @@ func (o *TokenPaginationResponseHeaders) SetXTotalCount(v int64) { } func (o TokenPaginationResponseHeaders) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o TokenPaginationResponseHeaders) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.Link != nil { + if !IsNil(o.Link) { toSerialize["link"] = o.Link } - if o.XTotalCount != nil { + if !IsNil(o.XTotalCount) { toSerialize["x-total-count"] = o.XTotalCount } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableTokenPaginationResponseHeaders struct { diff --git a/internal/httpclient/model_trust_o_auth2_jwt_grant_issuer.go b/internal/httpclient/model_trust_o_auth2_jwt_grant_issuer.go index 15f8d9575f4..5803dcffbfb 100644 --- a/internal/httpclient/model_trust_o_auth2_jwt_grant_issuer.go +++ b/internal/httpclient/model_trust_o_auth2_jwt_grant_issuer.go @@ -12,10 +12,15 @@ Contact: hi@ory.sh package openapi import ( + "bytes" "encoding/json" + "fmt" "time" ) +// checks if the TrustOAuth2JwtGrantIssuer type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &TrustOAuth2JwtGrantIssuer{} + // TrustOAuth2JwtGrantIssuer Trust OAuth2 JWT Bearer Grant Type Issuer Request Body type TrustOAuth2JwtGrantIssuer struct { // The \"allow_any_subject\" indicates that the issuer is allowed to have any principal as the subject of the JWT. @@ -31,6 +36,8 @@ type TrustOAuth2JwtGrantIssuer struct { Subject *string `json:"subject,omitempty"` } +type _TrustOAuth2JwtGrantIssuer TrustOAuth2JwtGrantIssuer + // NewTrustOAuth2JwtGrantIssuer instantiates a new TrustOAuth2JwtGrantIssuer object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments @@ -54,7 +61,7 @@ func NewTrustOAuth2JwtGrantIssuerWithDefaults() *TrustOAuth2JwtGrantIssuer { // GetAllowAnySubject returns the AllowAnySubject field value if set, zero value otherwise. func (o *TrustOAuth2JwtGrantIssuer) GetAllowAnySubject() bool { - if o == nil || o.AllowAnySubject == nil { + if o == nil || IsNil(o.AllowAnySubject) { var ret bool return ret } @@ -64,7 +71,7 @@ func (o *TrustOAuth2JwtGrantIssuer) GetAllowAnySubject() bool { // GetAllowAnySubjectOk returns a tuple with the AllowAnySubject field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TrustOAuth2JwtGrantIssuer) GetAllowAnySubjectOk() (*bool, bool) { - if o == nil || o.AllowAnySubject == nil { + if o == nil || IsNil(o.AllowAnySubject) { return nil, false } return o.AllowAnySubject, true @@ -72,7 +79,7 @@ func (o *TrustOAuth2JwtGrantIssuer) GetAllowAnySubjectOk() (*bool, bool) { // HasAllowAnySubject returns a boolean if a field has been set. func (o *TrustOAuth2JwtGrantIssuer) HasAllowAnySubject() bool { - if o != nil && o.AllowAnySubject != nil { + if o != nil && !IsNil(o.AllowAnySubject) { return true } @@ -182,7 +189,7 @@ func (o *TrustOAuth2JwtGrantIssuer) SetScope(v []string) { // GetSubject returns the Subject field value if set, zero value otherwise. func (o *TrustOAuth2JwtGrantIssuer) GetSubject() string { - if o == nil || o.Subject == nil { + if o == nil || IsNil(o.Subject) { var ret string return ret } @@ -192,7 +199,7 @@ func (o *TrustOAuth2JwtGrantIssuer) GetSubject() string { // GetSubjectOk returns a tuple with the Subject field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TrustOAuth2JwtGrantIssuer) GetSubjectOk() (*string, bool) { - if o == nil || o.Subject == nil { + if o == nil || IsNil(o.Subject) { return nil, false } return o.Subject, true @@ -200,7 +207,7 @@ func (o *TrustOAuth2JwtGrantIssuer) GetSubjectOk() (*string, bool) { // HasSubject returns a boolean if a field has been set. func (o *TrustOAuth2JwtGrantIssuer) HasSubject() bool { - if o != nil && o.Subject != nil { + if o != nil && !IsNil(o.Subject) { return true } @@ -213,26 +220,66 @@ func (o *TrustOAuth2JwtGrantIssuer) SetSubject(v string) { } func (o TrustOAuth2JwtGrantIssuer) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o TrustOAuth2JwtGrantIssuer) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.AllowAnySubject != nil { + if !IsNil(o.AllowAnySubject) { toSerialize["allow_any_subject"] = o.AllowAnySubject } - if true { - toSerialize["expires_at"] = o.ExpiresAt + toSerialize["expires_at"] = o.ExpiresAt + toSerialize["issuer"] = o.Issuer + toSerialize["jwk"] = o.Jwk + toSerialize["scope"] = o.Scope + if !IsNil(o.Subject) { + toSerialize["subject"] = o.Subject } - if true { - toSerialize["issuer"] = o.Issuer + return toSerialize, nil +} + +func (o *TrustOAuth2JwtGrantIssuer) UnmarshalJSON(data []byte) (err error) { + // This validates that all required properties are included in the JSON object + // by unmarshalling the object into a generic map with string keys and checking + // that every required field exists as a key in the generic map. + requiredProperties := []string{ + "expires_at", + "issuer", + "jwk", + "scope", } - if true { - toSerialize["jwk"] = o.Jwk + + allProperties := make(map[string]interface{}) + + err = json.Unmarshal(data, &allProperties) + + if err != nil { + return err } - if true { - toSerialize["scope"] = o.Scope + + for _, requiredProperty := range requiredProperties { + if _, exists := allProperties[requiredProperty]; !exists { + return fmt.Errorf("no value given for required property %v", requiredProperty) + } } - if o.Subject != nil { - toSerialize["subject"] = o.Subject + + varTrustOAuth2JwtGrantIssuer := _TrustOAuth2JwtGrantIssuer{} + + decoder := json.NewDecoder(bytes.NewReader(data)) + decoder.DisallowUnknownFields() + err = decoder.Decode(&varTrustOAuth2JwtGrantIssuer) + + if err != nil { + return err } - return json.Marshal(toSerialize) + + *o = TrustOAuth2JwtGrantIssuer(varTrustOAuth2JwtGrantIssuer) + + return err } type NullableTrustOAuth2JwtGrantIssuer struct { diff --git a/internal/httpclient/model_trusted_o_auth2_jwt_grant_issuer.go b/internal/httpclient/model_trusted_o_auth2_jwt_grant_issuer.go index 80fba647b44..7b0c1fcbca9 100644 --- a/internal/httpclient/model_trusted_o_auth2_jwt_grant_issuer.go +++ b/internal/httpclient/model_trusted_o_auth2_jwt_grant_issuer.go @@ -16,6 +16,9 @@ import ( "time" ) +// checks if the TrustedOAuth2JwtGrantIssuer type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &TrustedOAuth2JwtGrantIssuer{} + // TrustedOAuth2JwtGrantIssuer OAuth2 JWT Bearer Grant Type Issuer Trust Relationship type TrustedOAuth2JwtGrantIssuer struct { // The \"allow_any_subject\" indicates that the issuer is allowed to have any principal as the subject of the JWT. @@ -53,7 +56,7 @@ func NewTrustedOAuth2JwtGrantIssuerWithDefaults() *TrustedOAuth2JwtGrantIssuer { // GetAllowAnySubject returns the AllowAnySubject field value if set, zero value otherwise. func (o *TrustedOAuth2JwtGrantIssuer) GetAllowAnySubject() bool { - if o == nil || o.AllowAnySubject == nil { + if o == nil || IsNil(o.AllowAnySubject) { var ret bool return ret } @@ -63,7 +66,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) GetAllowAnySubject() bool { // GetAllowAnySubjectOk returns a tuple with the AllowAnySubject field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TrustedOAuth2JwtGrantIssuer) GetAllowAnySubjectOk() (*bool, bool) { - if o == nil || o.AllowAnySubject == nil { + if o == nil || IsNil(o.AllowAnySubject) { return nil, false } return o.AllowAnySubject, true @@ -71,7 +74,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) GetAllowAnySubjectOk() (*bool, bool) { // HasAllowAnySubject returns a boolean if a field has been set. func (o *TrustedOAuth2JwtGrantIssuer) HasAllowAnySubject() bool { - if o != nil && o.AllowAnySubject != nil { + if o != nil && !IsNil(o.AllowAnySubject) { return true } @@ -85,7 +88,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) SetAllowAnySubject(v bool) { // GetCreatedAt returns the CreatedAt field value if set, zero value otherwise. func (o *TrustedOAuth2JwtGrantIssuer) GetCreatedAt() time.Time { - if o == nil || o.CreatedAt == nil { + if o == nil || IsNil(o.CreatedAt) { var ret time.Time return ret } @@ -95,7 +98,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) GetCreatedAt() time.Time { // GetCreatedAtOk returns a tuple with the CreatedAt field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TrustedOAuth2JwtGrantIssuer) GetCreatedAtOk() (*time.Time, bool) { - if o == nil || o.CreatedAt == nil { + if o == nil || IsNil(o.CreatedAt) { return nil, false } return o.CreatedAt, true @@ -103,7 +106,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) GetCreatedAtOk() (*time.Time, bool) { // HasCreatedAt returns a boolean if a field has been set. func (o *TrustedOAuth2JwtGrantIssuer) HasCreatedAt() bool { - if o != nil && o.CreatedAt != nil { + if o != nil && !IsNil(o.CreatedAt) { return true } @@ -117,7 +120,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) SetCreatedAt(v time.Time) { // GetExpiresAt returns the ExpiresAt field value if set, zero value otherwise. func (o *TrustedOAuth2JwtGrantIssuer) GetExpiresAt() time.Time { - if o == nil || o.ExpiresAt == nil { + if o == nil || IsNil(o.ExpiresAt) { var ret time.Time return ret } @@ -127,7 +130,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) GetExpiresAt() time.Time { // GetExpiresAtOk returns a tuple with the ExpiresAt field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TrustedOAuth2JwtGrantIssuer) GetExpiresAtOk() (*time.Time, bool) { - if o == nil || o.ExpiresAt == nil { + if o == nil || IsNil(o.ExpiresAt) { return nil, false } return o.ExpiresAt, true @@ -135,7 +138,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) GetExpiresAtOk() (*time.Time, bool) { // HasExpiresAt returns a boolean if a field has been set. func (o *TrustedOAuth2JwtGrantIssuer) HasExpiresAt() bool { - if o != nil && o.ExpiresAt != nil { + if o != nil && !IsNil(o.ExpiresAt) { return true } @@ -149,7 +152,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) SetExpiresAt(v time.Time) { // GetId returns the Id field value if set, zero value otherwise. func (o *TrustedOAuth2JwtGrantIssuer) GetId() string { - if o == nil || o.Id == nil { + if o == nil || IsNil(o.Id) { var ret string return ret } @@ -159,7 +162,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) GetId() string { // GetIdOk returns a tuple with the Id field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TrustedOAuth2JwtGrantIssuer) GetIdOk() (*string, bool) { - if o == nil || o.Id == nil { + if o == nil || IsNil(o.Id) { return nil, false } return o.Id, true @@ -167,7 +170,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) GetIdOk() (*string, bool) { // HasId returns a boolean if a field has been set. func (o *TrustedOAuth2JwtGrantIssuer) HasId() bool { - if o != nil && o.Id != nil { + if o != nil && !IsNil(o.Id) { return true } @@ -181,7 +184,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) SetId(v string) { // GetIssuer returns the Issuer field value if set, zero value otherwise. func (o *TrustedOAuth2JwtGrantIssuer) GetIssuer() string { - if o == nil || o.Issuer == nil { + if o == nil || IsNil(o.Issuer) { var ret string return ret } @@ -191,7 +194,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) GetIssuer() string { // GetIssuerOk returns a tuple with the Issuer field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TrustedOAuth2JwtGrantIssuer) GetIssuerOk() (*string, bool) { - if o == nil || o.Issuer == nil { + if o == nil || IsNil(o.Issuer) { return nil, false } return o.Issuer, true @@ -199,7 +202,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) GetIssuerOk() (*string, bool) { // HasIssuer returns a boolean if a field has been set. func (o *TrustedOAuth2JwtGrantIssuer) HasIssuer() bool { - if o != nil && o.Issuer != nil { + if o != nil && !IsNil(o.Issuer) { return true } @@ -213,7 +216,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) SetIssuer(v string) { // GetPublicKey returns the PublicKey field value if set, zero value otherwise. func (o *TrustedOAuth2JwtGrantIssuer) GetPublicKey() TrustedOAuth2JwtGrantJsonWebKey { - if o == nil || o.PublicKey == nil { + if o == nil || IsNil(o.PublicKey) { var ret TrustedOAuth2JwtGrantJsonWebKey return ret } @@ -223,7 +226,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) GetPublicKey() TrustedOAuth2JwtGrantJsonWe // GetPublicKeyOk returns a tuple with the PublicKey field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TrustedOAuth2JwtGrantIssuer) GetPublicKeyOk() (*TrustedOAuth2JwtGrantJsonWebKey, bool) { - if o == nil || o.PublicKey == nil { + if o == nil || IsNil(o.PublicKey) { return nil, false } return o.PublicKey, true @@ -231,7 +234,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) GetPublicKeyOk() (*TrustedOAuth2JwtGrantJs // HasPublicKey returns a boolean if a field has been set. func (o *TrustedOAuth2JwtGrantIssuer) HasPublicKey() bool { - if o != nil && o.PublicKey != nil { + if o != nil && !IsNil(o.PublicKey) { return true } @@ -245,7 +248,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) SetPublicKey(v TrustedOAuth2JwtGrantJsonWe // GetScope returns the Scope field value if set, zero value otherwise. func (o *TrustedOAuth2JwtGrantIssuer) GetScope() []string { - if o == nil || o.Scope == nil { + if o == nil || IsNil(o.Scope) { var ret []string return ret } @@ -255,7 +258,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) GetScope() []string { // GetScopeOk returns a tuple with the Scope field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TrustedOAuth2JwtGrantIssuer) GetScopeOk() ([]string, bool) { - if o == nil || o.Scope == nil { + if o == nil || IsNil(o.Scope) { return nil, false } return o.Scope, true @@ -263,7 +266,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) GetScopeOk() ([]string, bool) { // HasScope returns a boolean if a field has been set. func (o *TrustedOAuth2JwtGrantIssuer) HasScope() bool { - if o != nil && o.Scope != nil { + if o != nil && !IsNil(o.Scope) { return true } @@ -277,7 +280,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) SetScope(v []string) { // GetSubject returns the Subject field value if set, zero value otherwise. func (o *TrustedOAuth2JwtGrantIssuer) GetSubject() string { - if o == nil || o.Subject == nil { + if o == nil || IsNil(o.Subject) { var ret string return ret } @@ -287,7 +290,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) GetSubject() string { // GetSubjectOk returns a tuple with the Subject field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TrustedOAuth2JwtGrantIssuer) GetSubjectOk() (*string, bool) { - if o == nil || o.Subject == nil { + if o == nil || IsNil(o.Subject) { return nil, false } return o.Subject, true @@ -295,7 +298,7 @@ func (o *TrustedOAuth2JwtGrantIssuer) GetSubjectOk() (*string, bool) { // HasSubject returns a boolean if a field has been set. func (o *TrustedOAuth2JwtGrantIssuer) HasSubject() bool { - if o != nil && o.Subject != nil { + if o != nil && !IsNil(o.Subject) { return true } @@ -308,32 +311,40 @@ func (o *TrustedOAuth2JwtGrantIssuer) SetSubject(v string) { } func (o TrustedOAuth2JwtGrantIssuer) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o TrustedOAuth2JwtGrantIssuer) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.AllowAnySubject != nil { + if !IsNil(o.AllowAnySubject) { toSerialize["allow_any_subject"] = o.AllowAnySubject } - if o.CreatedAt != nil { + if !IsNil(o.CreatedAt) { toSerialize["created_at"] = o.CreatedAt } - if o.ExpiresAt != nil { + if !IsNil(o.ExpiresAt) { toSerialize["expires_at"] = o.ExpiresAt } - if o.Id != nil { + if !IsNil(o.Id) { toSerialize["id"] = o.Id } - if o.Issuer != nil { + if !IsNil(o.Issuer) { toSerialize["issuer"] = o.Issuer } - if o.PublicKey != nil { + if !IsNil(o.PublicKey) { toSerialize["public_key"] = o.PublicKey } - if o.Scope != nil { + if !IsNil(o.Scope) { toSerialize["scope"] = o.Scope } - if o.Subject != nil { + if !IsNil(o.Subject) { toSerialize["subject"] = o.Subject } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableTrustedOAuth2JwtGrantIssuer struct { diff --git a/internal/httpclient/model_trusted_o_auth2_jwt_grant_json_web_key.go b/internal/httpclient/model_trusted_o_auth2_jwt_grant_json_web_key.go index 7b358805c77..2752cb5eeea 100644 --- a/internal/httpclient/model_trusted_o_auth2_jwt_grant_json_web_key.go +++ b/internal/httpclient/model_trusted_o_auth2_jwt_grant_json_web_key.go @@ -15,6 +15,9 @@ import ( "encoding/json" ) +// checks if the TrustedOAuth2JwtGrantJsonWebKey type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &TrustedOAuth2JwtGrantJsonWebKey{} + // TrustedOAuth2JwtGrantJsonWebKey OAuth2 JWT Bearer Grant Type Issuer Trusted JSON Web Key type TrustedOAuth2JwtGrantJsonWebKey struct { // The \"key_id\" is key unique identifier (same as kid header in jws/jwt). @@ -42,7 +45,7 @@ func NewTrustedOAuth2JwtGrantJsonWebKeyWithDefaults() *TrustedOAuth2JwtGrantJson // GetKid returns the Kid field value if set, zero value otherwise. func (o *TrustedOAuth2JwtGrantJsonWebKey) GetKid() string { - if o == nil || o.Kid == nil { + if o == nil || IsNil(o.Kid) { var ret string return ret } @@ -52,7 +55,7 @@ func (o *TrustedOAuth2JwtGrantJsonWebKey) GetKid() string { // GetKidOk returns a tuple with the Kid field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TrustedOAuth2JwtGrantJsonWebKey) GetKidOk() (*string, bool) { - if o == nil || o.Kid == nil { + if o == nil || IsNil(o.Kid) { return nil, false } return o.Kid, true @@ -60,7 +63,7 @@ func (o *TrustedOAuth2JwtGrantJsonWebKey) GetKidOk() (*string, bool) { // HasKid returns a boolean if a field has been set. func (o *TrustedOAuth2JwtGrantJsonWebKey) HasKid() bool { - if o != nil && o.Kid != nil { + if o != nil && !IsNil(o.Kid) { return true } @@ -74,7 +77,7 @@ func (o *TrustedOAuth2JwtGrantJsonWebKey) SetKid(v string) { // GetSet returns the Set field value if set, zero value otherwise. func (o *TrustedOAuth2JwtGrantJsonWebKey) GetSet() string { - if o == nil || o.Set == nil { + if o == nil || IsNil(o.Set) { var ret string return ret } @@ -84,7 +87,7 @@ func (o *TrustedOAuth2JwtGrantJsonWebKey) GetSet() string { // GetSetOk returns a tuple with the Set field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *TrustedOAuth2JwtGrantJsonWebKey) GetSetOk() (*string, bool) { - if o == nil || o.Set == nil { + if o == nil || IsNil(o.Set) { return nil, false } return o.Set, true @@ -92,7 +95,7 @@ func (o *TrustedOAuth2JwtGrantJsonWebKey) GetSetOk() (*string, bool) { // HasSet returns a boolean if a field has been set. func (o *TrustedOAuth2JwtGrantJsonWebKey) HasSet() bool { - if o != nil && o.Set != nil { + if o != nil && !IsNil(o.Set) { return true } @@ -105,14 +108,22 @@ func (o *TrustedOAuth2JwtGrantJsonWebKey) SetSet(v string) { } func (o TrustedOAuth2JwtGrantJsonWebKey) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o TrustedOAuth2JwtGrantJsonWebKey) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.Kid != nil { + if !IsNil(o.Kid) { toSerialize["kid"] = o.Kid } - if o.Set != nil { + if !IsNil(o.Set) { toSerialize["set"] = o.Set } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableTrustedOAuth2JwtGrantJsonWebKey struct { diff --git a/internal/httpclient/model_verifiable_credential_priming_response.go b/internal/httpclient/model_verifiable_credential_priming_response.go new file mode 100644 index 00000000000..f0bdf3309b7 --- /dev/null +++ b/internal/httpclient/model_verifiable_credential_priming_response.go @@ -0,0 +1,377 @@ +/* +Ory Hydra API + +Documentation for all of Ory Hydra's APIs. + +API version: +Contact: hi@ory.sh +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "encoding/json" +) + +// checks if the VerifiableCredentialPrimingResponse type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &VerifiableCredentialPrimingResponse{} + +// VerifiableCredentialPrimingResponse struct for VerifiableCredentialPrimingResponse +type VerifiableCredentialPrimingResponse struct { + CNonce *string `json:"c_nonce,omitempty"` + CNonceExpiresIn *int64 `json:"c_nonce_expires_in,omitempty"` + Error *string `json:"error,omitempty"` + ErrorDebug *string `json:"error_debug,omitempty"` + ErrorDescription *string `json:"error_description,omitempty"` + ErrorHint *string `json:"error_hint,omitempty"` + Format *string `json:"format,omitempty"` + StatusCode *int64 `json:"status_code,omitempty"` +} + +// NewVerifiableCredentialPrimingResponse instantiates a new VerifiableCredentialPrimingResponse object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewVerifiableCredentialPrimingResponse() *VerifiableCredentialPrimingResponse { + this := VerifiableCredentialPrimingResponse{} + return &this +} + +// NewVerifiableCredentialPrimingResponseWithDefaults instantiates a new VerifiableCredentialPrimingResponse object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewVerifiableCredentialPrimingResponseWithDefaults() *VerifiableCredentialPrimingResponse { + this := VerifiableCredentialPrimingResponse{} + return &this +} + +// GetCNonce returns the CNonce field value if set, zero value otherwise. +func (o *VerifiableCredentialPrimingResponse) GetCNonce() string { + if o == nil || IsNil(o.CNonce) { + var ret string + return ret + } + return *o.CNonce +} + +// GetCNonceOk returns a tuple with the CNonce field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *VerifiableCredentialPrimingResponse) GetCNonceOk() (*string, bool) { + if o == nil || IsNil(o.CNonce) { + return nil, false + } + return o.CNonce, true +} + +// HasCNonce returns a boolean if a field has been set. +func (o *VerifiableCredentialPrimingResponse) HasCNonce() bool { + if o != nil && !IsNil(o.CNonce) { + return true + } + + return false +} + +// SetCNonce gets a reference to the given string and assigns it to the CNonce field. +func (o *VerifiableCredentialPrimingResponse) SetCNonce(v string) { + o.CNonce = &v +} + +// GetCNonceExpiresIn returns the CNonceExpiresIn field value if set, zero value otherwise. +func (o *VerifiableCredentialPrimingResponse) GetCNonceExpiresIn() int64 { + if o == nil || IsNil(o.CNonceExpiresIn) { + var ret int64 + return ret + } + return *o.CNonceExpiresIn +} + +// GetCNonceExpiresInOk returns a tuple with the CNonceExpiresIn field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *VerifiableCredentialPrimingResponse) GetCNonceExpiresInOk() (*int64, bool) { + if o == nil || IsNil(o.CNonceExpiresIn) { + return nil, false + } + return o.CNonceExpiresIn, true +} + +// HasCNonceExpiresIn returns a boolean if a field has been set. +func (o *VerifiableCredentialPrimingResponse) HasCNonceExpiresIn() bool { + if o != nil && !IsNil(o.CNonceExpiresIn) { + return true + } + + return false +} + +// SetCNonceExpiresIn gets a reference to the given int64 and assigns it to the CNonceExpiresIn field. +func (o *VerifiableCredentialPrimingResponse) SetCNonceExpiresIn(v int64) { + o.CNonceExpiresIn = &v +} + +// GetError returns the Error field value if set, zero value otherwise. +func (o *VerifiableCredentialPrimingResponse) GetError() string { + if o == nil || IsNil(o.Error) { + var ret string + return ret + } + return *o.Error +} + +// GetErrorOk returns a tuple with the Error field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *VerifiableCredentialPrimingResponse) GetErrorOk() (*string, bool) { + if o == nil || IsNil(o.Error) { + return nil, false + } + return o.Error, true +} + +// HasError returns a boolean if a field has been set. +func (o *VerifiableCredentialPrimingResponse) HasError() bool { + if o != nil && !IsNil(o.Error) { + return true + } + + return false +} + +// SetError gets a reference to the given string and assigns it to the Error field. +func (o *VerifiableCredentialPrimingResponse) SetError(v string) { + o.Error = &v +} + +// GetErrorDebug returns the ErrorDebug field value if set, zero value otherwise. +func (o *VerifiableCredentialPrimingResponse) GetErrorDebug() string { + if o == nil || IsNil(o.ErrorDebug) { + var ret string + return ret + } + return *o.ErrorDebug +} + +// GetErrorDebugOk returns a tuple with the ErrorDebug field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *VerifiableCredentialPrimingResponse) GetErrorDebugOk() (*string, bool) { + if o == nil || IsNil(o.ErrorDebug) { + return nil, false + } + return o.ErrorDebug, true +} + +// HasErrorDebug returns a boolean if a field has been set. +func (o *VerifiableCredentialPrimingResponse) HasErrorDebug() bool { + if o != nil && !IsNil(o.ErrorDebug) { + return true + } + + return false +} + +// SetErrorDebug gets a reference to the given string and assigns it to the ErrorDebug field. +func (o *VerifiableCredentialPrimingResponse) SetErrorDebug(v string) { + o.ErrorDebug = &v +} + +// GetErrorDescription returns the ErrorDescription field value if set, zero value otherwise. +func (o *VerifiableCredentialPrimingResponse) GetErrorDescription() string { + if o == nil || IsNil(o.ErrorDescription) { + var ret string + return ret + } + return *o.ErrorDescription +} + +// GetErrorDescriptionOk returns a tuple with the ErrorDescription field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *VerifiableCredentialPrimingResponse) GetErrorDescriptionOk() (*string, bool) { + if o == nil || IsNil(o.ErrorDescription) { + return nil, false + } + return o.ErrorDescription, true +} + +// HasErrorDescription returns a boolean if a field has been set. +func (o *VerifiableCredentialPrimingResponse) HasErrorDescription() bool { + if o != nil && !IsNil(o.ErrorDescription) { + return true + } + + return false +} + +// SetErrorDescription gets a reference to the given string and assigns it to the ErrorDescription field. +func (o *VerifiableCredentialPrimingResponse) SetErrorDescription(v string) { + o.ErrorDescription = &v +} + +// GetErrorHint returns the ErrorHint field value if set, zero value otherwise. +func (o *VerifiableCredentialPrimingResponse) GetErrorHint() string { + if o == nil || IsNil(o.ErrorHint) { + var ret string + return ret + } + return *o.ErrorHint +} + +// GetErrorHintOk returns a tuple with the ErrorHint field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *VerifiableCredentialPrimingResponse) GetErrorHintOk() (*string, bool) { + if o == nil || IsNil(o.ErrorHint) { + return nil, false + } + return o.ErrorHint, true +} + +// HasErrorHint returns a boolean if a field has been set. +func (o *VerifiableCredentialPrimingResponse) HasErrorHint() bool { + if o != nil && !IsNil(o.ErrorHint) { + return true + } + + return false +} + +// SetErrorHint gets a reference to the given string and assigns it to the ErrorHint field. +func (o *VerifiableCredentialPrimingResponse) SetErrorHint(v string) { + o.ErrorHint = &v +} + +// GetFormat returns the Format field value if set, zero value otherwise. +func (o *VerifiableCredentialPrimingResponse) GetFormat() string { + if o == nil || IsNil(o.Format) { + var ret string + return ret + } + return *o.Format +} + +// GetFormatOk returns a tuple with the Format field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *VerifiableCredentialPrimingResponse) GetFormatOk() (*string, bool) { + if o == nil || IsNil(o.Format) { + return nil, false + } + return o.Format, true +} + +// HasFormat returns a boolean if a field has been set. +func (o *VerifiableCredentialPrimingResponse) HasFormat() bool { + if o != nil && !IsNil(o.Format) { + return true + } + + return false +} + +// SetFormat gets a reference to the given string and assigns it to the Format field. +func (o *VerifiableCredentialPrimingResponse) SetFormat(v string) { + o.Format = &v +} + +// GetStatusCode returns the StatusCode field value if set, zero value otherwise. +func (o *VerifiableCredentialPrimingResponse) GetStatusCode() int64 { + if o == nil || IsNil(o.StatusCode) { + var ret int64 + return ret + } + return *o.StatusCode +} + +// GetStatusCodeOk returns a tuple with the StatusCode field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *VerifiableCredentialPrimingResponse) GetStatusCodeOk() (*int64, bool) { + if o == nil || IsNil(o.StatusCode) { + return nil, false + } + return o.StatusCode, true +} + +// HasStatusCode returns a boolean if a field has been set. +func (o *VerifiableCredentialPrimingResponse) HasStatusCode() bool { + if o != nil && !IsNil(o.StatusCode) { + return true + } + + return false +} + +// SetStatusCode gets a reference to the given int64 and assigns it to the StatusCode field. +func (o *VerifiableCredentialPrimingResponse) SetStatusCode(v int64) { + o.StatusCode = &v +} + +func (o VerifiableCredentialPrimingResponse) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o VerifiableCredentialPrimingResponse) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.CNonce) { + toSerialize["c_nonce"] = o.CNonce + } + if !IsNil(o.CNonceExpiresIn) { + toSerialize["c_nonce_expires_in"] = o.CNonceExpiresIn + } + if !IsNil(o.Error) { + toSerialize["error"] = o.Error + } + if !IsNil(o.ErrorDebug) { + toSerialize["error_debug"] = o.ErrorDebug + } + if !IsNil(o.ErrorDescription) { + toSerialize["error_description"] = o.ErrorDescription + } + if !IsNil(o.ErrorHint) { + toSerialize["error_hint"] = o.ErrorHint + } + if !IsNil(o.Format) { + toSerialize["format"] = o.Format + } + if !IsNil(o.StatusCode) { + toSerialize["status_code"] = o.StatusCode + } + return toSerialize, nil +} + +type NullableVerifiableCredentialPrimingResponse struct { + value *VerifiableCredentialPrimingResponse + isSet bool +} + +func (v NullableVerifiableCredentialPrimingResponse) Get() *VerifiableCredentialPrimingResponse { + return v.value +} + +func (v *NullableVerifiableCredentialPrimingResponse) Set(val *VerifiableCredentialPrimingResponse) { + v.value = val + v.isSet = true +} + +func (v NullableVerifiableCredentialPrimingResponse) IsSet() bool { + return v.isSet +} + +func (v *NullableVerifiableCredentialPrimingResponse) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableVerifiableCredentialPrimingResponse(val *VerifiableCredentialPrimingResponse) *NullableVerifiableCredentialPrimingResponse { + return &NullableVerifiableCredentialPrimingResponse{value: val, isSet: true} +} + +func (v NullableVerifiableCredentialPrimingResponse) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableVerifiableCredentialPrimingResponse) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/internal/httpclient/model_verifiable_credential_proof.go b/internal/httpclient/model_verifiable_credential_proof.go new file mode 100644 index 00000000000..28eedafdb9e --- /dev/null +++ b/internal/httpclient/model_verifiable_credential_proof.go @@ -0,0 +1,161 @@ +/* +Ory Hydra API + +Documentation for all of Ory Hydra's APIs. + +API version: +Contact: hi@ory.sh +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "encoding/json" +) + +// checks if the VerifiableCredentialProof type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &VerifiableCredentialProof{} + +// VerifiableCredentialProof struct for VerifiableCredentialProof +type VerifiableCredentialProof struct { + Jwt *string `json:"jwt,omitempty"` + ProofType *string `json:"proof_type,omitempty"` +} + +// NewVerifiableCredentialProof instantiates a new VerifiableCredentialProof object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewVerifiableCredentialProof() *VerifiableCredentialProof { + this := VerifiableCredentialProof{} + return &this +} + +// NewVerifiableCredentialProofWithDefaults instantiates a new VerifiableCredentialProof object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewVerifiableCredentialProofWithDefaults() *VerifiableCredentialProof { + this := VerifiableCredentialProof{} + return &this +} + +// GetJwt returns the Jwt field value if set, zero value otherwise. +func (o *VerifiableCredentialProof) GetJwt() string { + if o == nil || IsNil(o.Jwt) { + var ret string + return ret + } + return *o.Jwt +} + +// GetJwtOk returns a tuple with the Jwt field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *VerifiableCredentialProof) GetJwtOk() (*string, bool) { + if o == nil || IsNil(o.Jwt) { + return nil, false + } + return o.Jwt, true +} + +// HasJwt returns a boolean if a field has been set. +func (o *VerifiableCredentialProof) HasJwt() bool { + if o != nil && !IsNil(o.Jwt) { + return true + } + + return false +} + +// SetJwt gets a reference to the given string and assigns it to the Jwt field. +func (o *VerifiableCredentialProof) SetJwt(v string) { + o.Jwt = &v +} + +// GetProofType returns the ProofType field value if set, zero value otherwise. +func (o *VerifiableCredentialProof) GetProofType() string { + if o == nil || IsNil(o.ProofType) { + var ret string + return ret + } + return *o.ProofType +} + +// GetProofTypeOk returns a tuple with the ProofType field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *VerifiableCredentialProof) GetProofTypeOk() (*string, bool) { + if o == nil || IsNil(o.ProofType) { + return nil, false + } + return o.ProofType, true +} + +// HasProofType returns a boolean if a field has been set. +func (o *VerifiableCredentialProof) HasProofType() bool { + if o != nil && !IsNil(o.ProofType) { + return true + } + + return false +} + +// SetProofType gets a reference to the given string and assigns it to the ProofType field. +func (o *VerifiableCredentialProof) SetProofType(v string) { + o.ProofType = &v +} + +func (o VerifiableCredentialProof) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o VerifiableCredentialProof) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.Jwt) { + toSerialize["jwt"] = o.Jwt + } + if !IsNil(o.ProofType) { + toSerialize["proof_type"] = o.ProofType + } + return toSerialize, nil +} + +type NullableVerifiableCredentialProof struct { + value *VerifiableCredentialProof + isSet bool +} + +func (v NullableVerifiableCredentialProof) Get() *VerifiableCredentialProof { + return v.value +} + +func (v *NullableVerifiableCredentialProof) Set(val *VerifiableCredentialProof) { + v.value = val + v.isSet = true +} + +func (v NullableVerifiableCredentialProof) IsSet() bool { + return v.isSet +} + +func (v *NullableVerifiableCredentialProof) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableVerifiableCredentialProof(val *VerifiableCredentialProof) *NullableVerifiableCredentialProof { + return &NullableVerifiableCredentialProof{value: val, isSet: true} +} + +func (v NullableVerifiableCredentialProof) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableVerifiableCredentialProof) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/internal/httpclient/model_verifiable_credential_response.go b/internal/httpclient/model_verifiable_credential_response.go new file mode 100644 index 00000000000..4c24842e668 --- /dev/null +++ b/internal/httpclient/model_verifiable_credential_response.go @@ -0,0 +1,161 @@ +/* +Ory Hydra API + +Documentation for all of Ory Hydra's APIs. + +API version: +Contact: hi@ory.sh +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "encoding/json" +) + +// checks if the VerifiableCredentialResponse type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &VerifiableCredentialResponse{} + +// VerifiableCredentialResponse struct for VerifiableCredentialResponse +type VerifiableCredentialResponse struct { + CredentialDraft00 *string `json:"credential_draft_00,omitempty"` + Format *string `json:"format,omitempty"` +} + +// NewVerifiableCredentialResponse instantiates a new VerifiableCredentialResponse object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewVerifiableCredentialResponse() *VerifiableCredentialResponse { + this := VerifiableCredentialResponse{} + return &this +} + +// NewVerifiableCredentialResponseWithDefaults instantiates a new VerifiableCredentialResponse object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewVerifiableCredentialResponseWithDefaults() *VerifiableCredentialResponse { + this := VerifiableCredentialResponse{} + return &this +} + +// GetCredentialDraft00 returns the CredentialDraft00 field value if set, zero value otherwise. +func (o *VerifiableCredentialResponse) GetCredentialDraft00() string { + if o == nil || IsNil(o.CredentialDraft00) { + var ret string + return ret + } + return *o.CredentialDraft00 +} + +// GetCredentialDraft00Ok returns a tuple with the CredentialDraft00 field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *VerifiableCredentialResponse) GetCredentialDraft00Ok() (*string, bool) { + if o == nil || IsNil(o.CredentialDraft00) { + return nil, false + } + return o.CredentialDraft00, true +} + +// HasCredentialDraft00 returns a boolean if a field has been set. +func (o *VerifiableCredentialResponse) HasCredentialDraft00() bool { + if o != nil && !IsNil(o.CredentialDraft00) { + return true + } + + return false +} + +// SetCredentialDraft00 gets a reference to the given string and assigns it to the CredentialDraft00 field. +func (o *VerifiableCredentialResponse) SetCredentialDraft00(v string) { + o.CredentialDraft00 = &v +} + +// GetFormat returns the Format field value if set, zero value otherwise. +func (o *VerifiableCredentialResponse) GetFormat() string { + if o == nil || IsNil(o.Format) { + var ret string + return ret + } + return *o.Format +} + +// GetFormatOk returns a tuple with the Format field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *VerifiableCredentialResponse) GetFormatOk() (*string, bool) { + if o == nil || IsNil(o.Format) { + return nil, false + } + return o.Format, true +} + +// HasFormat returns a boolean if a field has been set. +func (o *VerifiableCredentialResponse) HasFormat() bool { + if o != nil && !IsNil(o.Format) { + return true + } + + return false +} + +// SetFormat gets a reference to the given string and assigns it to the Format field. +func (o *VerifiableCredentialResponse) SetFormat(v string) { + o.Format = &v +} + +func (o VerifiableCredentialResponse) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o VerifiableCredentialResponse) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.CredentialDraft00) { + toSerialize["credential_draft_00"] = o.CredentialDraft00 + } + if !IsNil(o.Format) { + toSerialize["format"] = o.Format + } + return toSerialize, nil +} + +type NullableVerifiableCredentialResponse struct { + value *VerifiableCredentialResponse + isSet bool +} + +func (v NullableVerifiableCredentialResponse) Get() *VerifiableCredentialResponse { + return v.value +} + +func (v *NullableVerifiableCredentialResponse) Set(val *VerifiableCredentialResponse) { + v.value = val + v.isSet = true +} + +func (v NullableVerifiableCredentialResponse) IsSet() bool { + return v.isSet +} + +func (v *NullableVerifiableCredentialResponse) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableVerifiableCredentialResponse(val *VerifiableCredentialResponse) *NullableVerifiableCredentialResponse { + return &NullableVerifiableCredentialResponse{value: val, isSet: true} +} + +func (v NullableVerifiableCredentialResponse) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableVerifiableCredentialResponse) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/internal/httpclient/model_verify_user_code_request.go b/internal/httpclient/model_verify_user_code_request.go new file mode 100644 index 00000000000..783283b37cf --- /dev/null +++ b/internal/httpclient/model_verify_user_code_request.go @@ -0,0 +1,270 @@ +/* +Ory Hydra API + +Documentation for all of Ory Hydra's APIs. + +API version: +Contact: hi@ory.sh +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package openapi + +import ( + "encoding/json" +) + +// checks if the VerifyUserCodeRequest type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &VerifyUserCodeRequest{} + +// VerifyUserCodeRequest struct for VerifyUserCodeRequest +type VerifyUserCodeRequest struct { + Client *OAuth2Client `json:"client,omitempty"` + DeviceCodeRequestId *string `json:"device_code_request_id,omitempty"` + // RequestURL is the original Device Authorization URL requested. + RequestUrl *string `json:"request_url,omitempty"` + RequestedAccessTokenAudience []string `json:"requested_access_token_audience,omitempty"` + RequestedScope []string `json:"requested_scope,omitempty"` +} + +// NewVerifyUserCodeRequest instantiates a new VerifyUserCodeRequest object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewVerifyUserCodeRequest() *VerifyUserCodeRequest { + this := VerifyUserCodeRequest{} + return &this +} + +// NewVerifyUserCodeRequestWithDefaults instantiates a new VerifyUserCodeRequest object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewVerifyUserCodeRequestWithDefaults() *VerifyUserCodeRequest { + this := VerifyUserCodeRequest{} + return &this +} + +// GetClient returns the Client field value if set, zero value otherwise. +func (o *VerifyUserCodeRequest) GetClient() OAuth2Client { + if o == nil || IsNil(o.Client) { + var ret OAuth2Client + return ret + } + return *o.Client +} + +// GetClientOk returns a tuple with the Client field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *VerifyUserCodeRequest) GetClientOk() (*OAuth2Client, bool) { + if o == nil || IsNil(o.Client) { + return nil, false + } + return o.Client, true +} + +// HasClient returns a boolean if a field has been set. +func (o *VerifyUserCodeRequest) HasClient() bool { + if o != nil && !IsNil(o.Client) { + return true + } + + return false +} + +// SetClient gets a reference to the given OAuth2Client and assigns it to the Client field. +func (o *VerifyUserCodeRequest) SetClient(v OAuth2Client) { + o.Client = &v +} + +// GetDeviceCodeRequestId returns the DeviceCodeRequestId field value if set, zero value otherwise. +func (o *VerifyUserCodeRequest) GetDeviceCodeRequestId() string { + if o == nil || IsNil(o.DeviceCodeRequestId) { + var ret string + return ret + } + return *o.DeviceCodeRequestId +} + +// GetDeviceCodeRequestIdOk returns a tuple with the DeviceCodeRequestId field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *VerifyUserCodeRequest) GetDeviceCodeRequestIdOk() (*string, bool) { + if o == nil || IsNil(o.DeviceCodeRequestId) { + return nil, false + } + return o.DeviceCodeRequestId, true +} + +// HasDeviceCodeRequestId returns a boolean if a field has been set. +func (o *VerifyUserCodeRequest) HasDeviceCodeRequestId() bool { + if o != nil && !IsNil(o.DeviceCodeRequestId) { + return true + } + + return false +} + +// SetDeviceCodeRequestId gets a reference to the given string and assigns it to the DeviceCodeRequestId field. +func (o *VerifyUserCodeRequest) SetDeviceCodeRequestId(v string) { + o.DeviceCodeRequestId = &v +} + +// GetRequestUrl returns the RequestUrl field value if set, zero value otherwise. +func (o *VerifyUserCodeRequest) GetRequestUrl() string { + if o == nil || IsNil(o.RequestUrl) { + var ret string + return ret + } + return *o.RequestUrl +} + +// GetRequestUrlOk returns a tuple with the RequestUrl field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *VerifyUserCodeRequest) GetRequestUrlOk() (*string, bool) { + if o == nil || IsNil(o.RequestUrl) { + return nil, false + } + return o.RequestUrl, true +} + +// HasRequestUrl returns a boolean if a field has been set. +func (o *VerifyUserCodeRequest) HasRequestUrl() bool { + if o != nil && !IsNil(o.RequestUrl) { + return true + } + + return false +} + +// SetRequestUrl gets a reference to the given string and assigns it to the RequestUrl field. +func (o *VerifyUserCodeRequest) SetRequestUrl(v string) { + o.RequestUrl = &v +} + +// GetRequestedAccessTokenAudience returns the RequestedAccessTokenAudience field value if set, zero value otherwise. +func (o *VerifyUserCodeRequest) GetRequestedAccessTokenAudience() []string { + if o == nil || IsNil(o.RequestedAccessTokenAudience) { + var ret []string + return ret + } + return o.RequestedAccessTokenAudience +} + +// GetRequestedAccessTokenAudienceOk returns a tuple with the RequestedAccessTokenAudience field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *VerifyUserCodeRequest) GetRequestedAccessTokenAudienceOk() ([]string, bool) { + if o == nil || IsNil(o.RequestedAccessTokenAudience) { + return nil, false + } + return o.RequestedAccessTokenAudience, true +} + +// HasRequestedAccessTokenAudience returns a boolean if a field has been set. +func (o *VerifyUserCodeRequest) HasRequestedAccessTokenAudience() bool { + if o != nil && !IsNil(o.RequestedAccessTokenAudience) { + return true + } + + return false +} + +// SetRequestedAccessTokenAudience gets a reference to the given []string and assigns it to the RequestedAccessTokenAudience field. +func (o *VerifyUserCodeRequest) SetRequestedAccessTokenAudience(v []string) { + o.RequestedAccessTokenAudience = v +} + +// GetRequestedScope returns the RequestedScope field value if set, zero value otherwise. +func (o *VerifyUserCodeRequest) GetRequestedScope() []string { + if o == nil || IsNil(o.RequestedScope) { + var ret []string + return ret + } + return o.RequestedScope +} + +// GetRequestedScopeOk returns a tuple with the RequestedScope field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *VerifyUserCodeRequest) GetRequestedScopeOk() ([]string, bool) { + if o == nil || IsNil(o.RequestedScope) { + return nil, false + } + return o.RequestedScope, true +} + +// HasRequestedScope returns a boolean if a field has been set. +func (o *VerifyUserCodeRequest) HasRequestedScope() bool { + if o != nil && !IsNil(o.RequestedScope) { + return true + } + + return false +} + +// SetRequestedScope gets a reference to the given []string and assigns it to the RequestedScope field. +func (o *VerifyUserCodeRequest) SetRequestedScope(v []string) { + o.RequestedScope = v +} + +func (o VerifyUserCodeRequest) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o VerifyUserCodeRequest) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.Client) { + toSerialize["client"] = o.Client + } + if !IsNil(o.DeviceCodeRequestId) { + toSerialize["device_code_request_id"] = o.DeviceCodeRequestId + } + if !IsNil(o.RequestUrl) { + toSerialize["request_url"] = o.RequestUrl + } + if !IsNil(o.RequestedAccessTokenAudience) { + toSerialize["requested_access_token_audience"] = o.RequestedAccessTokenAudience + } + if !IsNil(o.RequestedScope) { + toSerialize["requested_scope"] = o.RequestedScope + } + return toSerialize, nil +} + +type NullableVerifyUserCodeRequest struct { + value *VerifyUserCodeRequest + isSet bool +} + +func (v NullableVerifyUserCodeRequest) Get() *VerifyUserCodeRequest { + return v.value +} + +func (v *NullableVerifyUserCodeRequest) Set(val *VerifyUserCodeRequest) { + v.value = val + v.isSet = true +} + +func (v NullableVerifyUserCodeRequest) IsSet() bool { + return v.isSet +} + +func (v *NullableVerifyUserCodeRequest) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableVerifyUserCodeRequest(val *VerifyUserCodeRequest) *NullableVerifyUserCodeRequest { + return &NullableVerifyUserCodeRequest{value: val, isSet: true} +} + +func (v NullableVerifyUserCodeRequest) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableVerifyUserCodeRequest) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/internal/httpclient/model_version.go b/internal/httpclient/model_version.go index 4e307b565e1..852862bcf07 100644 --- a/internal/httpclient/model_version.go +++ b/internal/httpclient/model_version.go @@ -15,6 +15,9 @@ import ( "encoding/json" ) +// checks if the Version type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &Version{} + // Version struct for Version type Version struct { // Version is the service's version. @@ -40,7 +43,7 @@ func NewVersionWithDefaults() *Version { // GetVersion returns the Version field value if set, zero value otherwise. func (o *Version) GetVersion() string { - if o == nil || o.Version == nil { + if o == nil || IsNil(o.Version) { var ret string return ret } @@ -50,7 +53,7 @@ func (o *Version) GetVersion() string { // GetVersionOk returns a tuple with the Version field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *Version) GetVersionOk() (*string, bool) { - if o == nil || o.Version == nil { + if o == nil || IsNil(o.Version) { return nil, false } return o.Version, true @@ -58,7 +61,7 @@ func (o *Version) GetVersionOk() (*string, bool) { // HasVersion returns a boolean if a field has been set. func (o *Version) HasVersion() bool { - if o != nil && o.Version != nil { + if o != nil && !IsNil(o.Version) { return true } @@ -71,11 +74,19 @@ func (o *Version) SetVersion(v string) { } func (o Version) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o Version) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} - if o.Version != nil { + if !IsNil(o.Version) { toSerialize["version"] = o.Version } - return json.Marshal(toSerialize) + return toSerialize, nil } type NullableVersion struct { diff --git a/internal/httpclient/utils.go b/internal/httpclient/utils.go index 79275ec55ec..89451346f5e 100644 --- a/internal/httpclient/utils.go +++ b/internal/httpclient/utils.go @@ -12,7 +12,10 @@ Contact: hi@ory.sh package openapi import ( + "bytes" "encoding/json" + "fmt" + "reflect" "time" ) @@ -320,10 +323,40 @@ func NewNullableTime(val *time.Time) *NullableTime { } func (v NullableTime) MarshalJSON() ([]byte, error) { - return v.value.MarshalJSON() + return json.Marshal(v.value) } func (v *NullableTime) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) } + +// IsNil checks if an input is nil +func IsNil(i interface{}) bool { + if i == nil { + return true + } + switch reflect.TypeOf(i).Kind() { + case reflect.Chan, reflect.Func, reflect.Map, reflect.Ptr, reflect.UnsafePointer, reflect.Interface, reflect.Slice: + return reflect.ValueOf(i).IsNil() + case reflect.Array: + return reflect.ValueOf(i).IsZero() + } + return false +} + +type MappedNullable interface { + ToMap() (map[string]interface{}, error) +} + +// A wrapper for strict JSON decoding +func newStrictDecoder(data []byte) *json.Decoder { + dec := json.NewDecoder(bytes.NewBuffer(data)) + dec.DisallowUnknownFields() + return dec +} + +// Prevent trying to import "fmt" +func reportError(format string, a ...interface{}) error { + return fmt.Errorf(format, a...) +} diff --git a/internal/kratos/fake_kratos.go b/internal/kratos/fake_kratos.go new file mode 100644 index 00000000000..fc3c4d65c79 --- /dev/null +++ b/internal/kratos/fake_kratos.go @@ -0,0 +1,53 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package kratos + +import ( + "context" + + "github.com/ory/hydra/v2/fosite" + client "github.com/ory/kratos-client-go" +) + +type ( + FakeKratos struct { + DisableSessionWasCalled bool + DisableSessionCB func() + LastDisabledSession string + } +) + +const ( + FakeSessionID = "fake-kratos-session-id" + FakeUsername = "fake-kratos-username" + FakePassword = "fake-kratos-password" // nolint: gosec + FakeIdentityID = "fake-kratos-identity-id" +) + +var _ Client = new(FakeKratos) + +func NewFake() *FakeKratos { + return &FakeKratos{} +} + +func (f *FakeKratos) DisableSession(_ context.Context, identityProviderSessionID string) error { + f.DisableSessionWasCalled = true + f.LastDisabledSession = identityProviderSessionID + if f.DisableSessionCB != nil { + f.DisableSessionCB() + } + + return nil +} + +func (f *FakeKratos) Authenticate(_ context.Context, username, password string) (*client.Session, error) { + if username == FakeUsername && password == FakePassword { + return &client.Session{Identity: &client.Identity{Id: FakeIdentityID}}, nil + } + return nil, fosite.ErrNotFound +} + +func (f *FakeKratos) Reset() { + (*f) = *NewFake() +} diff --git a/internal/kratos/kratos.go b/internal/kratos/kratos.go new file mode 100644 index 00000000000..5196ad52677 --- /dev/null +++ b/internal/kratos/kratos.go @@ -0,0 +1,124 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package kratos + +import ( + "context" + "fmt" + "net/url" + + "github.com/pkg/errors" + "go.opentelemetry.io/otel/attribute" + + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/x" + client "github.com/ory/kratos-client-go" + "github.com/ory/x/httpx" + "github.com/ory/x/otelx" +) + +type ( + dependencies interface { + config.Provider + x.HTTPClientProvider + x.TracingProvider + x.RegistryLogger + } + Provider interface { + Kratos() Client + } + Client interface { + DisableSession(ctx context.Context, identityProviderSessionID string) error + Authenticate(ctx context.Context, name, secret string) (*client.Session, error) + } + Default struct { + dependencies + } +) + +func New(d dependencies) Client { + return &Default{dependencies: d} +} + +func (k *Default) Authenticate(ctx context.Context, name, secret string) (session *client.Session, err error) { + ctx, span := k.Tracer(ctx).Tracer().Start(ctx, "kratos.Authenticate") + otelx.End(span, &err) + + publicURL, ok := k.Config().KratosPublicURL(ctx) + span.SetAttributes(attribute.String("public_url", fmt.Sprintf("%+v", publicURL))) + if !ok { + span.SetAttributes(attribute.Bool("skipped", true)) + span.SetAttributes(attribute.String("reason", "kratos public url not set")) + + return nil, errors.New("kratos public url not set") + } + + kratos := k.newKratosClient(ctx, publicURL) + + flow, _, err := kratos.FrontendAPI.CreateNativeLoginFlow(ctx).Execute() + if err != nil { + return nil, err + } + + res, _, err := kratos.FrontendAPI.UpdateLoginFlow(ctx).Flow(flow.Id).UpdateLoginFlowBody(client.UpdateLoginFlowBody{ + UpdateLoginFlowWithPasswordMethod: &client.UpdateLoginFlowWithPasswordMethod{ + Method: "password", + Identifier: name, + Password: secret, + }, + }).Execute() + if err != nil { + return nil, fosite.ErrNotFound.WithWrap(err) + } + + return &res.Session, nil +} + +func (k *Default) DisableSession(ctx context.Context, identityProviderSessionID string) (err error) { + ctx, span := k.Tracer(ctx).Tracer().Start(ctx, "kratos.DisableSession") + otelx.End(span, &err) + + adminURL, ok := k.Config().KratosAdminURL(ctx) + span.SetAttributes(attribute.String("admin_url", fmt.Sprintf("%+v", adminURL))) + if !ok { + span.SetAttributes(attribute.Bool("skipped", true)) + span.SetAttributes(attribute.String("reason", "kratos admin url not set")) + + return nil + } + + if identityProviderSessionID == "" { + span.SetAttributes(attribute.Bool("skipped", true)) + span.SetAttributes(attribute.String("reason", "kratos session ID is empty")) + + return nil + } + + configuration := k.clientConfiguration(ctx, adminURL) + if header := k.Config().KratosRequestHeader(ctx); header != nil { + configuration.HTTPClient.Transport = httpx.WrapTransportWithHeader(configuration.HTTPClient.Transport, header) + } + kratos := client.NewAPIClient(configuration) + _, err = kratos.IdentityAPI.DisableSession(ctx, identityProviderSessionID).Execute() + + return err +} + +func (k *Default) clientConfiguration(ctx context.Context, adminURL *url.URL) *client.Configuration { + configuration := client.NewConfiguration() + configuration.Servers = client.ServerConfigurations{{URL: adminURL.String()}} + configuration.HTTPClient = k.HTTPClient(ctx).StandardClient() + + return configuration +} + +func (k *Default) newKratosClient(ctx context.Context, publicURL *url.URL) *client.APIClient { + configuration := k.clientConfiguration(ctx, publicURL) + if header := k.Config().KratosRequestHeader(ctx); header != nil { + configuration.HTTPClient.Transport = httpx.WrapTransportWithHeader(configuration.HTTPClient.Transport, header) + } + kratos := client.NewAPIClient(configuration) + return kratos +} diff --git a/internal/mock/config_cookie.go b/internal/mock/config_cookie.go index 5fab6d1d7dc..b326baec6b4 100644 --- a/internal/mock/config_cookie.go +++ b/internal/mock/config_cookie.go @@ -1,8 +1,8 @@ -// Copyright © 2022 Ory Corp +// Copyright © 2025 Ory Corp // SPDX-License-Identifier: Apache-2.0 // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/ory/hydra/x (interfaces: CookieConfigProvider) +// Source: github.com/ory/hydra/v2/x (interfaces: CookieConfigProvider) // Package mock is a generated GoMock package. package mock diff --git a/internal/mock_generator_rs256.go b/internal/mock_generator_rs256.go deleted file mode 100644 index c9c5bd9d526..00000000000 --- a/internal/mock_generator_rs256.go +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package internal - -import ( - "crypto/rand" - "crypto/rsa" - "crypto/x509" - - "github.com/pborman/uuid" - "github.com/pkg/errors" - "gopkg.in/square/go-jose.v2" -) - -type veryInsecureRS256Generator struct{} - -func (g *veryInsecureRS256Generator) Generate(id, use string) (*jose.JSONWebKeySet, error) { - /* #nosec G403 - this is ok because this generator is only used in tests. */ - key, err := rsa.GenerateKey(rand.Reader, 512) - if err != nil { - return nil, errors.Errorf("Could not generate key because %s", err) - } else if err = key.Validate(); err != nil { - return nil, errors.Errorf("Validation failed because %s", err) - } - - if id == "" { - id = uuid.New() - } - - // jose does not support this... - key.Precomputed = rsa.PrecomputedValues{} - return &jose.JSONWebKeySet{ - Keys: []jose.JSONWebKey{ - { - Algorithm: "RS256", - Key: key, - Use: use, - KeyID: id, - Certificates: []*x509.Certificate{}, - }, - }, - }, nil -} diff --git a/internal/testhelpers/driver.go b/internal/testhelpers/driver.go new file mode 100644 index 00000000000..3b0424c7526 --- /dev/null +++ b/internal/testhelpers/driver.go @@ -0,0 +1,160 @@ +// Copyright © 2022 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package testhelpers + +import ( + "path/filepath" + "regexp" + "strings" + "sync" + "testing" + "time" + + "github.com/go-jose/go-jose/v3" + "github.com/gofrs/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/persistence/sql" + "github.com/ory/hydra/v2/spec" + "github.com/ory/pop/v6" + "github.com/ory/x/configx" + "github.com/ory/x/contextx" + "github.com/ory/x/dbal" + "github.com/ory/x/logrusx" + "github.com/ory/x/servicelocatorx" + "github.com/ory/x/sqlcon/dockertest" + "github.com/ory/x/testingx" +) + +var ConfigDefaults = []configx.OptionModifier{ + configx.SkipValidation(), + configx.WithValues(map[string]any{ + config.KeyBCryptCost: 4, + config.KeySubjectIdentifierAlgorithmSalt: "00000000", + config.KeyGetSystemSecret: []string{"000000000000000000000000000000000000000000000000"}, + config.KeyGetCookieSecrets: []string{"000000000000000000000000000000000000000000000000"}, + config.KeyLogLevel: "trace", + config.KeyDevelopmentMode: true, + "serve.public.host": "localhost", + }), + configx.WithValue("log.leak_sensitive_values", true), +} + +func NewConfigurationWithDefaults(t testing.TB, opts ...configx.OptionModifier) *config.DefaultProvider { + p, err := configx.New(t.Context(), spec.ConfigValidationSchema, append(ConfigDefaults, opts...)...) + require.NoError(t, err) + return config.NewCustom(logrusx.New("", ""), p, contextx.NewTestConfigProvider(spec.ConfigValidationSchema, append(ConfigDefaults, opts...)...)) +} + +func NewRegistryMemory(t testing.TB, opts ...driver.OptionsModifier) *driver.RegistrySQL { + return NewRegistrySQLFromURL(t, dbal.NewSQLiteTestDatabase(t), true, true, opts...) +} + +func NewRegistrySQLFromURL(t testing.TB, dsn string, migrate, initNetwork bool, opts ...driver.OptionsModifier) *driver.RegistrySQL { + configOpts := append(ConfigDefaults, configx.WithValue(config.KeyDSN, dsn)) + regOpts := append([]driver.OptionsModifier{ + driver.SkipNetworkInit(), + driver.WithConfigOptions(configOpts...), + driver.WithServiceLocatorOptions(servicelocatorx.WithContextualizer(contextx.NewTestConfigProvider(spec.ConfigValidationSchema, configOpts...))), + }, opts...) + + reg, err := driver.New(t.Context(), regOpts...) + require.NoError(t, err) + if migrate { + if updateDump := dbal.RestoreFromSchemaDump(t, + reg.Persister().Connection(t.Context()), + sql.Migrations, + filepath.Join(testingx.RepoRootPath(t), "internal", "testhelpers", "sql_schemas"), + ); updateDump != nil { + require.NoError(t, reg.Migrator().MigrateUp(t.Context())) + updateDump(t) + } + } + if initNetwork { + require.NoError(t, reg.InitNetwork(t.Context())) + } + return reg +} + +func ConnectToMySQL(t testing.TB) string { return dockertest.RunTestMySQLWithVersion(t, "8.0") } +func ConnectToPG(t testing.TB) string { return dockertest.RunTestPostgreSQLWithVersion(t, "16") } +func ConnectToCRDB(t testing.TB) string { + return dockertest.RunTestCockroachDBWithVersion(t, "latest-v24.1") +} + +func ConnectDatabasesURLs(t *testing.T) (pgURL, mysqlURL, crdbURL string) { + wg := sync.WaitGroup{} + + wg.Add(3) + go func() { + pgURL = ConnectToPG(t) + t.Log("Pg done") + + require.EventuallyWithT(t, func(t *assert.CollectT) { + c, err := pop.NewConnection(&pop.ConnectionDetails{URL: pgURL}) + require.NoError(t, err) + require.NoError(t, c.Open()) + dbName := "testdb" + strings.ReplaceAll(uuid.Must(uuid.NewV4()).String(), "-", "") + require.NoError(t, c.RawQuery("CREATE DATABASE "+dbName).Exec()) + pgURL = regexp.MustCompile(`/[a-z0-9]+\?`).ReplaceAllString(pgURL, "/"+dbName+"?") + }, 20*time.Second, 100*time.Millisecond) + + wg.Done() + }() + go func() { + mysqlURL = ConnectToMySQL(t) + t.Log("myssql done") + + require.EventuallyWithT(t, func(t *assert.CollectT) { + c, err := pop.NewConnection(&pop.ConnectionDetails{URL: mysqlURL}) + require.NoError(t, err) + require.NoError(t, c.Open()) + dbName := "testdb" + strings.ReplaceAll(uuid.Must(uuid.NewV4()).String(), "-", "") + require.NoError(t, c.RawQuery("CREATE DATABASE "+dbName).Exec()) + mysqlURL = regexp.MustCompile(`/[a-z0-9]+\?`).ReplaceAllString(mysqlURL, "/"+dbName+"?") + }, 20*time.Second, 100*time.Millisecond) + + wg.Done() + }() + go func() { + crdbURL = ConnectToCRDB(t) + t.Log("crdb done") + + require.EventuallyWithT(t, func(t *assert.CollectT) { + c, err := pop.NewConnection(&pop.ConnectionDetails{URL: crdbURL}) + require.NoError(t, err) + require.NoError(t, c.Open()) + dbName := "testdb" + strings.ReplaceAll(uuid.Must(uuid.NewV4()).String(), "-", "") + require.NoError(t, c.RawQuery("CREATE DATABASE "+dbName).Exec()) + crdbURL = regexp.MustCompile(`/[a-z0-9]+\?`).ReplaceAllString(crdbURL, "/"+dbName+"?") + }, 20*time.Second, 100*time.Millisecond) + + wg.Done() + }() + t.Log("beginning to wait") + wg.Wait() + t.Log("done waiting") + + return +} + +func ConnectDatabases(t *testing.T, migrate bool, opts ...driver.OptionsModifier) map[string]*driver.RegistrySQL { + regs := make(map[string]*driver.RegistrySQL) + regs["memory"] = NewRegistryMemory(t, opts...) + if !testing.Short() { + pg, mysql, crdb := ConnectDatabasesURLs(t) + regs["postgres"] = NewRegistrySQLFromURL(t, pg, migrate, true, opts...) + regs["mysql"] = NewRegistrySQLFromURL(t, mysql, migrate, true, opts...) + regs["cockroach"] = NewRegistrySQLFromURL(t, crdb, migrate, true, opts...) + } + return regs +} + +func MustEnsureRegistryKeys(t testing.TB, r *driver.RegistrySQL, key string) { + jwk.EnsureAsymmetricKeypairExists(t, r, string(jose.ES256), key) +} diff --git a/internal/testhelpers/janitor_test_helper.go b/internal/testhelpers/janitor_test_helper.go index 5dc04e1e747..add6b838234 100644 --- a/internal/testhelpers/janitor_test_helper.go +++ b/internal/testhelpers/janitor_test_helper.go @@ -10,35 +10,25 @@ import ( "testing" "time" - "github.com/google/uuid" + "github.com/go-jose/go-jose/v3" + "github.com/gofrs/uuid" "github.com/stretchr/testify/require" - "gopkg.in/square/go-jose.v2" - - "github.com/ory/fosite" - "github.com/ory/fosite/handler/openid" - "github.com/ory/hydra/client" - "github.com/ory/hydra/consent" - "github.com/ory/hydra/driver" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal" - "github.com/ory/hydra/oauth2" - "github.com/ory/hydra/oauth2/trust" - "github.com/ory/hydra/x" - "github.com/ory/x/contextx" - "github.com/ory/x/logrusx" - - "github.com/ory/x/sqlxx" + + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/oauth2/trust" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/configx" ) type JanitorConsentTestHelper struct { - uniqueName string - flushLoginRequests []*consent.LoginRequest - flushConsentRequests []*consent.OAuth2ConsentRequest flushAccessRequests []*fosite.Request flushRefreshRequests []*fosite.AccessRequest flushGrants []*createGrantRequest - conf *config.DefaultProvider - Lifespan time.Duration } type createGrantRequest struct { @@ -49,45 +39,34 @@ type createGrantRequest struct { const lifespan = time.Hour func NewConsentJanitorTestHelper(uniqueName string) *JanitorConsentTestHelper { - conf := internal.NewConfigurationWithDefaults() - conf.MustSet(context.Background(), config.KeyScopeStrategy, "DEPRECATED_HIERARCHICAL_SCOPE_STRATEGY") - conf.MustSet(context.Background(), config.KeyIssuerURL, "http://hydra.localhost") - conf.MustSet(context.Background(), config.KeyAccessTokenLifespan, lifespan) - conf.MustSet(context.Background(), config.KeyRefreshTokenLifespan, lifespan) - conf.MustSet(context.Background(), config.KeyConsentRequestMaxAge, lifespan) - conf.MustSet(context.Background(), config.KeyLogLevel, "trace") - return &JanitorConsentTestHelper{ - uniqueName: uniqueName, - conf: conf, - flushLoginRequests: genLoginRequests(uniqueName, lifespan), - flushConsentRequests: genConsentRequests(uniqueName, lifespan), flushAccessRequests: getAccessRequests(uniqueName, lifespan), flushRefreshRequests: getRefreshRequests(uniqueName, lifespan), flushGrants: getGrantRequests(uniqueName, lifespan), - Lifespan: lifespan, } } -func (j *JanitorConsentTestHelper) GetDSN(ctx context.Context) string { - return j.conf.DSN() -} - -func (j *JanitorConsentTestHelper) GetConfig() *config.DefaultProvider { - return j.conf +var NotAfterTestCycles = map[string]time.Duration{ + "notAfter24h": lifespan * 24, + "notAfter1h30m": lifespan + time.Hour/2, + "notAfterNow": 0, } func (j *JanitorConsentTestHelper) GetNotAfterTestCycles() map[string]time.Duration { - return map[string]time.Duration{ - "notAfter24h": j.Lifespan * 24, - "notAfter1h30m": j.Lifespan + time.Hour/2, - "notAfterNow": 0, - } + return map[string]time.Duration{} } -func (j *JanitorConsentTestHelper) GetRegistry(ctx context.Context, dbname string) (driver.Registry, error) { - j.conf.MustSet(ctx, config.KeyDSN, fmt.Sprintf("sqlite://file:%s?mode=memory&_fk=true&cache=shared", dbname)) - return driver.NewRegistryFromDSN(ctx, j.conf, logrusx.New("test_hydra", "master"), false, true, &contextx.Default{}) +func (j *JanitorConsentTestHelper) GetRegistry(t *testing.T) *driver.RegistrySQL { + return NewRegistryMemory(t, driver.WithConfigOptions( + configx.WithValues(map[string]any{ + config.KeyScopeStrategy: "DEPRECATED_HIERARCHICAL_SCOPE_STRATEGY", + config.KeyIssuerURL: "https://hydra.localhost", + config.KeyAccessTokenLifespan: lifespan, + config.KeyRefreshTokenLifespan: lifespan, + config.KeyConsentRequestMaxAge: lifespan, + config.KeyLogLevel: "trace", + config.KeyGetSystemSecret: []string{"0000000000000000"}, + }))) } func (j *JanitorConsentTestHelper) AccessTokenNotAfterSetup(ctx context.Context, cl client.Manager, store x.FositeStorer) func(t *testing.T) { @@ -106,7 +85,7 @@ func (j *JanitorConsentTestHelper) AccessTokenNotAfterValidate(ctx context.Conte var err error ds := new(oauth2.Session) - accessTokenLifespan := time.Now().Round(time.Second).Add(-j.conf.GetAccessTokenLifespan(ctx)) + accessTokenLifespan := time.Now().Round(time.Second).Add(-lifespan) for _, r := range j.flushAccessRequests { t.Logf("access flush check: %s", r.ID) @@ -125,7 +104,7 @@ func (j *JanitorConsentTestHelper) RefreshTokenNotAfterSetup(ctx context.Context // Create refresh token clients and session for _, fr := range j.flushRefreshRequests { require.NoError(t, cl.CreateClient(ctx, fr.Client.(*client.Client))) - require.NoError(t, store.CreateRefreshTokenSession(ctx, fr.ID, fr)) + require.NoError(t, store.CreateRefreshTokenSession(ctx, fr.ID, "", fr)) } } } @@ -135,7 +114,7 @@ func (j *JanitorConsentTestHelper) RefreshTokenNotAfterValidate(ctx context.Cont var err error ds := new(oauth2.Session) - refreshTokenLifespan := time.Now().Round(time.Second).Add(-j.conf.GetRefreshTokenLifespan(ctx)) + refreshTokenLifespan := time.Now().Round(time.Second).Add(-lifespan) for _, r := range j.flushRefreshRequests { t.Logf("refresh flush check: %s", r.ID) @@ -149,10 +128,10 @@ func (j *JanitorConsentTestHelper) RefreshTokenNotAfterValidate(ctx context.Cont } } -func (j *JanitorConsentTestHelper) GrantNotAfterSetup(ctx context.Context, cl client.Manager, gr trust.GrantManager) func(t *testing.T) { +func (j *JanitorConsentTestHelper) GrantNotAfterSetup(ctx context.Context, gm trust.GrantManager) func(t *testing.T) { return func(t *testing.T) { for _, fg := range j.flushGrants { - require.NoError(t, gr.CreateGrant(ctx, fg.grant, fg.pk)) + require.NoError(t, gm.CreateGrant(ctx, fg.grant, fg.pk)) } } } @@ -180,277 +159,16 @@ func (j *JanitorConsentTestHelper) GrantNotAfterValidate(ctx context.Context, no } } -func (j *JanitorConsentTestHelper) LoginRejectionSetup(ctx context.Context, cm consent.Manager, cl client.Manager) func(t *testing.T) { - return func(t *testing.T) { - var err error - - // Create login requests - for _, r := range j.flushLoginRequests { - require.NoError(t, cl.CreateClient(ctx, r.Client)) - require.NoError(t, cm.CreateLoginRequest(ctx, r)) - } - - // Explicit rejection - for _, r := range j.flushLoginRequests { - if r.ID == j.flushLoginRequests[0].ID { - // accept this one - _, err = cm.HandleLoginRequest(ctx, r.ID, consent.NewHandledLoginRequest( - r.ID, false, r.RequestedAt, r.AuthenticatedAt)) - - require.NoError(t, err) - continue - } - - // reject flush-login-2 and 3 - _, err = cm.HandleLoginRequest(ctx, r.ID, consent.NewHandledLoginRequest( - r.ID, true, r.RequestedAt, r.AuthenticatedAt)) - require.NoError(t, err) - } - } -} - -func (j *JanitorConsentTestHelper) LoginRejectionValidate(ctx context.Context, cm consent.Manager) func(t *testing.T) { - return func(t *testing.T) { - // flush-login-2 and 3 should be cleared now - for _, r := range j.flushLoginRequests { - t.Logf("check login: %s", r.ID) - _, err := cm.GetLoginRequest(ctx, r.ID) - if r.ID == j.flushLoginRequests[0].ID { - require.NoError(t, err) - } else { - require.Error(t, err) - } - } - } -} - -func (j *JanitorConsentTestHelper) LimitSetup(ctx context.Context, cm consent.Manager, cl client.Manager) func(t *testing.T) { - return func(t *testing.T) { - var err error - - // Create login requests - for _, r := range j.flushLoginRequests { - require.NoError(t, cl.CreateClient(ctx, r.Client)) - require.NoError(t, cm.CreateLoginRequest(ctx, r)) - } - - // Reject each request - for _, r := range j.flushLoginRequests { - _, err = cm.HandleLoginRequest(ctx, r.ID, consent.NewHandledLoginRequest( - r.ID, true, r.RequestedAt, r.AuthenticatedAt)) - require.NoError(t, err) - } - } -} - -func (j *JanitorConsentTestHelper) LimitValidate(ctx context.Context, cm consent.Manager) func(t *testing.T) { - return func(t *testing.T) { - // flush-login-2 and 3 should be cleared now - for _, r := range j.flushLoginRequests { - t.Logf("check login: %s", r.ID) - _, err := cm.GetLoginRequest(ctx, r.ID) - if r.ID == j.flushLoginRequests[0].ID { - require.NoError(t, err) - } else { - require.Error(t, err) - } - } - } -} - -func (j *JanitorConsentTestHelper) ConsentRejectionSetup(ctx context.Context, cm consent.Manager, cl client.Manager) func(t *testing.T) { - return func(t *testing.T) { - var err error - - // Create login requests - for _, r := range j.flushLoginRequests { - require.NoError(t, cl.CreateClient(ctx, r.Client)) - require.NoError(t, cm.CreateLoginRequest(ctx, r)) - } - - // Create consent requests - for _, r := range j.flushConsentRequests { - require.NoError(t, cm.CreateConsentRequest(ctx, r)) - } - - //Reject the consents - for _, r := range j.flushConsentRequests { - if r.ID == j.flushConsentRequests[0].ID { - // accept this one - _, err = cm.HandleConsentRequest(ctx, consent.NewHandledConsentRequest( - r.ID, false, r.RequestedAt, r.AuthenticatedAt)) - require.NoError(t, err) - continue - } - _, err = cm.HandleConsentRequest(ctx, consent.NewHandledConsentRequest( - r.ID, true, r.RequestedAt, r.AuthenticatedAt)) - require.NoError(t, err) - } - } -} - -func (j *JanitorConsentTestHelper) ConsentRejectionValidate(ctx context.Context, cm consent.Manager) func(t *testing.T) { - return func(t *testing.T) { - var err error - for _, r := range j.flushConsentRequests { - t.Logf("check consent: %s", r.ID) - _, err = cm.GetConsentRequest(ctx, r.ID) - if r.ID == j.flushConsentRequests[0].ID { - require.NoError(t, err) - } else { - require.Error(t, err) - } - } - } -} - -func (j *JanitorConsentTestHelper) LoginTimeoutSetup(ctx context.Context, cm consent.Manager, cl client.Manager) func(t *testing.T) { - return func(t *testing.T) { - var err error - - // Create login requests - for _, r := range j.flushLoginRequests { - require.NoError(t, cl.CreateClient(ctx, r.Client)) - require.NoError(t, cm.CreateLoginRequest(ctx, r)) - } - - // Creating at least 1 that has not timed out - _, err = cm.HandleLoginRequest(ctx, j.flushLoginRequests[0].ID, &consent.HandledLoginRequest{ - ID: j.flushLoginRequests[0].ID, - RequestedAt: j.flushLoginRequests[0].RequestedAt, - AuthenticatedAt: j.flushLoginRequests[0].AuthenticatedAt, - WasHandled: true, - }) - - require.NoError(t, err) - } -} - -func (j *JanitorConsentTestHelper) LoginTimeoutValidate(ctx context.Context, cm consent.Manager) func(t *testing.T) { - return func(t *testing.T) { - var err error - - for _, r := range j.flushLoginRequests { - _, err = cm.GetLoginRequest(ctx, r.ID) - if r.ID == j.flushLoginRequests[0].ID { - require.NoError(t, err) - } else { - require.Error(t, err) - } - } - - } -} - -func (j *JanitorConsentTestHelper) ConsentTimeoutSetup(ctx context.Context, cm consent.Manager, cl client.Manager) func(t *testing.T) { - return func(t *testing.T) { - var err error - - // Let's reset and accept all login requests to test the consent requests - for _, r := range j.flushLoginRequests { - require.NoError(t, cl.CreateClient(ctx, r.Client)) - require.NoError(t, cm.CreateLoginRequest(ctx, r)) - _, err = cm.HandleLoginRequest(ctx, r.ID, &consent.HandledLoginRequest{ - ID: r.ID, - AuthenticatedAt: r.AuthenticatedAt, - RequestedAt: r.RequestedAt, - WasHandled: true, - }) - require.NoError(t, err) - } - - // Create consent requests - for _, r := range j.flushConsentRequests { - require.NoError(t, cm.CreateConsentRequest(ctx, r)) - } - - // Create at least 1 consent request that has been accepted - _, err = cm.HandleConsentRequest(ctx, &consent.AcceptOAuth2ConsentRequest{ - ID: j.flushConsentRequests[0].ID, - WasHandled: true, - HandledAt: sqlxx.NullTime(time.Now()), - RequestedAt: j.flushConsentRequests[0].RequestedAt, - AuthenticatedAt: j.flushConsentRequests[0].AuthenticatedAt, - }) - require.NoError(t, err) - } -} - -func (j *JanitorConsentTestHelper) ConsentTimeoutValidate(ctx context.Context, cm consent.Manager) func(t *testing.T) { - return func(t *testing.T) { - var err error - - for _, r := range j.flushConsentRequests { - _, err = cm.GetConsentRequest(ctx, r.ID) - if r.ID == j.flushConsentRequests[0].ID { - require.NoError(t, err) - } else { - require.Error(t, err) - } - } - - } -} - -func (j *JanitorConsentTestHelper) LoginConsentNotAfterSetup(ctx context.Context, cm consent.Manager, cl client.Manager) func(t *testing.T) { - return func(t *testing.T) { - for _, r := range j.flushLoginRequests { - require.NoError(t, cl.CreateClient(ctx, r.Client)) - require.NoError(t, cm.CreateLoginRequest(ctx, r)) - } - - for _, r := range j.flushConsentRequests { - require.NoError(t, cm.CreateConsentRequest(ctx, r)) - } - } -} - -func (j *JanitorConsentTestHelper) LoginConsentNotAfterValidate(ctx context.Context, notAfter time.Time, consentRequestLifespan time.Time, cm consent.Manager) func(t *testing.T) { - return func(t *testing.T) { - var err error - - for _, r := range j.flushLoginRequests { - t.Logf("login flush check:\nNotAfter: %s\nConsentRequest: %s\n%+v\n", - notAfter.String(), consentRequestLifespan.String(), r) - _, err = cm.GetLoginRequest(ctx, r.ID) - // if the lowest between notAfter and consent-request-lifespan is greater than requested_at - // then the it should expect the value to be deleted. - if j.notAfterCheck(notAfter, consentRequestLifespan, r.RequestedAt) { - // value has been deleted here - require.Error(t, err) - } else { - // value has not been deleted here - require.NoError(t, err) - } - } - - for _, r := range j.flushConsentRequests { - t.Logf("consent flush check:\nNotAfter: %s\nConsentRequest: %s\n%+v\n", - notAfter.String(), consentRequestLifespan.String(), r) - _, err = cm.GetConsentRequest(ctx, r.ID) - // if the lowest between notAfter and consent-request-lifespan is greater than requested_at - // then the it should expect the value to be deleted. - if j.notAfterCheck(notAfter, consentRequestLifespan, r.RequestedAt) { - // value has been deleted here - require.Error(t, err) - } else { - // value has not been deleted here - require.NoError(t, err) - } - } - } -} - -func (j *JanitorConsentTestHelper) GetConsentRequestLifespan(ctx context.Context) time.Duration { - return j.conf.ConsentRequestMaxAge(ctx) +func (j *JanitorConsentTestHelper) GetConsentRequestLifespan() time.Duration { + return lifespan } -func (j *JanitorConsentTestHelper) GetAccessTokenLifespan(ctx context.Context) time.Duration { - return j.conf.GetAccessTokenLifespan(ctx) +func (j *JanitorConsentTestHelper) GetAccessTokenLifespan() time.Duration { + return lifespan } -func (j *JanitorConsentTestHelper) GetRefreshTokenLifespan(ctx context.Context) time.Duration { - return j.conf.GetRefreshTokenLifespan(ctx) +func (j *JanitorConsentTestHelper) GetRefreshTokenLifespan() time.Duration { + return lifespan } func (j *JanitorConsentTestHelper) notAfterCheck(notAfter time.Time, lifespan time.Time, requestedAt time.Time) bool { @@ -470,136 +188,12 @@ func (j *JanitorConsentTestHelper) notAfterCheck(notAfter time.Time, lifespan ti return lesser.Unix() > requestedAt.Unix() } -func JanitorTests(conf *config.DefaultProvider, consentManager consent.Manager, clientManager client.Manager, fositeManager x.FositeStorer, network string, parallel bool) func(t *testing.T) { - return func(t *testing.T) { - if parallel { - t.Parallel() - } - ctx := context.Background() - - jt := NewConsentJanitorTestHelper(network + t.Name()) - - conf.MustSet(context.Background(), config.KeyConsentRequestMaxAge, jt.GetConsentRequestLifespan(ctx)) - - t.Run("case=flush-consent-request-not-after", func(t *testing.T) { - - notAfterTests := jt.GetNotAfterTestCycles() - - for k, v := range notAfterTests { - jt := NewConsentJanitorTestHelper(network + k) - t.Run(fmt.Sprintf("case=%s", k), func(t *testing.T) { - notAfter := time.Now().Round(time.Second).Add(-v) - consentRequestLifespan := time.Now().Round(time.Second).Add(-jt.GetConsentRequestLifespan(ctx)) - - // setup test - t.Run("step=setup", jt.LoginConsentNotAfterSetup(ctx, consentManager, clientManager)) - - // run the cleanup routine - t.Run("step=cleanup", func(t *testing.T) { - require.NoError(t, fositeManager.FlushInactiveLoginConsentRequests(ctx, notAfter, 1000, 100)) - }) - - // validate test - t.Run("step=validate", jt.LoginConsentNotAfterValidate(ctx, notAfter, consentRequestLifespan, consentManager)) - }) - - } - }) - - t.Run("case=flush-consent-request-limit", func(t *testing.T) { - jt := NewConsentJanitorTestHelper(network + "limit") - - t.Run("case=limit", func(t *testing.T) { - // setup - t.Run("step=setup", jt.LimitSetup(ctx, consentManager, clientManager)) - - // cleanup - t.Run("step=cleanup", func(t *testing.T) { - require.NoError(t, fositeManager.FlushInactiveLoginConsentRequests(ctx, time.Now().Round(time.Second), 2, 1)) - }) - - // validate - t.Run("step=validate", jt.LimitValidate(ctx, consentManager)) - }) - }) - - t.Run("case=flush-consent-request-rejection", func(t *testing.T) { - jt := NewConsentJanitorTestHelper(network + "loginRejection") - - t.Run(fmt.Sprintf("case=%s", "loginRejection"), func(t *testing.T) { - // setup - t.Run("step=setup", jt.LoginRejectionSetup(ctx, consentManager, clientManager)) - - // cleanup - t.Run("step=cleanup", func(t *testing.T) { - require.NoError(t, fositeManager.FlushInactiveLoginConsentRequests(ctx, time.Now().Round(time.Second), 1000, 100)) - }) - - // validate - t.Run("step=validate", jt.LoginRejectionValidate(ctx, consentManager)) - }) - - jt = NewConsentJanitorTestHelper(network + "consentRejection") - - t.Run(fmt.Sprintf("case=%s", "consentRejection"), func(t *testing.T) { - // setup - t.Run("step=setup", jt.ConsentRejectionSetup(ctx, consentManager, clientManager)) - - // cleanup - t.Run("step=cleanup", func(t *testing.T) { - require.NoError(t, fositeManager.FlushInactiveLoginConsentRequests(ctx, time.Now().Round(time.Second), 1000, 100)) - }) - - // validate - t.Run("step=validate", jt.ConsentRejectionValidate(ctx, consentManager)) - }) - - }) - - t.Run("case=flush-consent-request-timeout", func(t *testing.T) { - jt := NewConsentJanitorTestHelper(network + "loginTimeout") - - t.Run(fmt.Sprintf("case=%s", "login-timeout"), func(t *testing.T) { - - // setup - t.Run("step=setup", jt.LoginTimeoutSetup(ctx, consentManager, clientManager)) - - // cleanup - t.Run("step=cleanup", func(t *testing.T) { - require.NoError(t, fositeManager.FlushInactiveLoginConsentRequests(ctx, time.Now().Round(time.Second), 1000, 100)) - }) - - // validate - t.Run("step=validate", jt.LoginTimeoutValidate(ctx, consentManager)) - - }) - - jt = NewConsentJanitorTestHelper(network + "consentTimeout") - - t.Run(fmt.Sprintf("case=%s", "consent-timeout"), func(t *testing.T) { - - // setup - t.Run("step=setup", jt.ConsentTimeoutSetup(ctx, consentManager, clientManager)) - - // cleanup - t.Run("step=cleanup", func(t *testing.T) { - require.NoError(t, fositeManager.FlushInactiveLoginConsentRequests(ctx, time.Now().Round(time.Second), 1000, 100)) - }) - - // validate - t.Run("step=validate", jt.ConsentTimeoutValidate(ctx, consentManager)) - - }) - }) - } -} - func getAccessRequests(uniqueName string, lifespan time.Duration) []*fosite.Request { return []*fosite.Request{ { ID: fmt.Sprintf("%s_flush-access-1", uniqueName), RequestedAt: time.Now().Round(time.Second), - Client: &client.Client{LegacyClientID: fmt.Sprintf("%s_flush-access-1", uniqueName)}, + Client: &client.Client{ID: fmt.Sprintf("%s_flush-access-1", uniqueName)}, RequestedScope: fosite.Arguments{"fa", "ba"}, GrantedScope: fosite.Arguments{"fa", "ba"}, Form: url.Values{"foo": []string{"bar", "baz"}}, @@ -608,7 +202,7 @@ func getAccessRequests(uniqueName string, lifespan time.Duration) []*fosite.Requ { ID: fmt.Sprintf("%s_flush-access-2", uniqueName), RequestedAt: time.Now().Round(time.Second).Add(-(lifespan + time.Minute)), - Client: &client.Client{LegacyClientID: fmt.Sprintf("%s_flush-access-2", uniqueName)}, + Client: &client.Client{ID: fmt.Sprintf("%s_flush-access-2", uniqueName)}, RequestedScope: fosite.Arguments{"fa", "ba"}, GrantedScope: fosite.Arguments{"fa", "ba"}, Form: url.Values{"foo": []string{"bar", "baz"}}, @@ -617,7 +211,7 @@ func getAccessRequests(uniqueName string, lifespan time.Duration) []*fosite.Requ { ID: fmt.Sprintf("%s_flush-access-3", uniqueName), RequestedAt: time.Now().Round(time.Second).Add(-(lifespan + time.Hour)), - Client: &client.Client{LegacyClientID: fmt.Sprintf("%s_flush-access-3", uniqueName)}, + Client: &client.Client{ID: fmt.Sprintf("%s_flush-access-3", uniqueName)}, RequestedScope: fosite.Arguments{"fa", "ba"}, GrantedScope: fosite.Arguments{"fa", "ba"}, Form: url.Values{"foo": []string{"bar", "baz"}}, @@ -627,7 +221,7 @@ func getAccessRequests(uniqueName string, lifespan time.Duration) []*fosite.Requ } func getRefreshRequests(uniqueName string, lifespan time.Duration) []*fosite.AccessRequest { - var tokenSignature = "4c7c7e8b3a77ad0c3ec846a21653c48b45dbfa31" + var tokenSignature = "4c7c7e8b3a77ad0c3ec846a21653c48b45dbfa31" //nolint:gosec return []*fosite.AccessRequest{ { GrantTypes: []string{ @@ -636,7 +230,7 @@ func getRefreshRequests(uniqueName string, lifespan time.Duration) []*fosite.Acc Request: fosite.Request{ RequestedAt: time.Now().Round(time.Second), ID: fmt.Sprintf("%s_flush-refresh-1", uniqueName), - Client: &client.Client{LegacyClientID: fmt.Sprintf("%s_flush-refresh-1", uniqueName)}, + Client: &client.Client{ID: fmt.Sprintf("%s_flush-refresh-1", uniqueName)}, RequestedScope: []string{"offline"}, GrantedScope: []string{"offline"}, Session: &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "bar"}}, @@ -652,7 +246,7 @@ func getRefreshRequests(uniqueName string, lifespan time.Duration) []*fosite.Acc Request: fosite.Request{ RequestedAt: time.Now().Round(time.Second).Add(-(lifespan + time.Minute)), ID: fmt.Sprintf("%s_flush-refresh-2", uniqueName), - Client: &client.Client{LegacyClientID: fmt.Sprintf("%s_flush-refresh-2", uniqueName)}, + Client: &client.Client{ID: fmt.Sprintf("%s_flush-refresh-2", uniqueName)}, RequestedScope: []string{"offline"}, GrantedScope: []string{"offline"}, Session: &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "bar"}}, @@ -668,7 +262,7 @@ func getRefreshRequests(uniqueName string, lifespan time.Duration) []*fosite.Acc Request: fosite.Request{ RequestedAt: time.Now().Round(time.Second).Add(-(lifespan + time.Hour)), ID: fmt.Sprintf("%s_flush-refresh-3", uniqueName), - Client: &client.Client{LegacyClientID: fmt.Sprintf("%s_flush-refresh-3", uniqueName)}, + Client: &client.Client{ID: fmt.Sprintf("%s_flush-refresh-3", uniqueName)}, RequestedScope: []string{"offline"}, GrantedScope: []string{"offline"}, Session: &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "bar"}}, @@ -680,96 +274,11 @@ func getRefreshRequests(uniqueName string, lifespan time.Duration) []*fosite.Acc } } -func genLoginRequests(uniqueName string, lifespan time.Duration) []*consent.LoginRequest { - return []*consent.LoginRequest{ - { - ID: fmt.Sprintf("%s_flush-login-1", uniqueName), - RequestedScope: []string{"foo", "bar"}, - Subject: fmt.Sprintf("%s_flush-login-1", uniqueName), - Client: &client.Client{ - LegacyClientID: fmt.Sprintf("%s_flush-login-consent-1", uniqueName), - RedirectURIs: []string{"http://redirect"}, - }, - RequestURL: "http://redirect", - RequestedAt: time.Now().Round(time.Second), - AuthenticatedAt: sqlxx.NullTime(time.Now().Round(time.Second)), - Verifier: fmt.Sprintf("%s_flush-login-1", uniqueName), - }, - { - ID: fmt.Sprintf("%s_flush-login-2", uniqueName), - RequestedScope: []string{"foo", "bar"}, - Subject: fmt.Sprintf("%s_flush-login-2", uniqueName), - Client: &client.Client{ - LegacyClientID: fmt.Sprintf("%s_flush-login-consent-2", uniqueName), - RedirectURIs: []string{"http://redirect"}, - }, - RequestURL: "http://redirect", - RequestedAt: time.Now().Round(time.Second).Add(-(lifespan + time.Minute)), - AuthenticatedAt: sqlxx.NullTime(time.Now().Round(time.Second).Add(-(lifespan + time.Minute))), - Verifier: fmt.Sprintf("%s_flush-login-2", uniqueName), - }, - { - ID: fmt.Sprintf("%s_flush-login-3", uniqueName), - RequestedScope: []string{"foo", "bar"}, - Subject: fmt.Sprintf("%s_flush-login-3", uniqueName), - Client: &client.Client{ - LegacyClientID: fmt.Sprintf("%s_flush-login-consent-3", uniqueName), - RedirectURIs: []string{"http://redirect"}, - }, - RequestURL: "http://redirect", - RequestedAt: time.Now().Round(time.Second).Add(-(lifespan + time.Hour)), - AuthenticatedAt: sqlxx.NullTime(time.Now().Round(time.Second).Add(-(lifespan + time.Hour))), - Verifier: fmt.Sprintf("%s_flush-login-3", uniqueName), - }, - } -} - -func genConsentRequests(uniqueName string, lifespan time.Duration) []*consent.OAuth2ConsentRequest { - return []*consent.OAuth2ConsentRequest{ - { - ID: fmt.Sprintf("%s_flush-consent-1", uniqueName), - RequestedScope: []string{"foo", "bar"}, - Subject: fmt.Sprintf("%s_flush-consent-1", uniqueName), - OpenIDConnectContext: nil, - ClientID: fmt.Sprintf("%s_flush-login-consent-1", uniqueName), - RequestURL: "http://redirect", - LoginChallenge: sqlxx.NullString(fmt.Sprintf("%s_flush-login-1", uniqueName)), - RequestedAt: time.Now().Round(time.Second), - Verifier: fmt.Sprintf("%s_flush-consent-1", uniqueName), - CSRF: fmt.Sprintf("%s_flush-consent-1", uniqueName), - }, - { - ID: fmt.Sprintf("%s_flush-consent-2", uniqueName), - RequestedScope: []string{"foo", "bar"}, - Subject: fmt.Sprintf("%s_flush-consent-2", uniqueName), - OpenIDConnectContext: nil, - ClientID: fmt.Sprintf("%s_flush-login-consent-2", uniqueName), - RequestURL: "http://redirect", - LoginChallenge: sqlxx.NullString(fmt.Sprintf("%s_flush-login-2", uniqueName)), - RequestedAt: time.Now().Round(time.Second).Add(-(lifespan + time.Minute)), - Verifier: fmt.Sprintf("%s_flush-consent-2", uniqueName), - CSRF: fmt.Sprintf("%s_flush-consent-2", uniqueName), - }, - { - ID: fmt.Sprintf("%s_flush-consent-3", uniqueName), - RequestedScope: []string{"foo", "bar"}, - Subject: fmt.Sprintf("%s_flush-consent-3", uniqueName), - OpenIDConnectContext: nil, - ClientID: fmt.Sprintf("%s_flush-login-consent-3", uniqueName), - RequestURL: "http://redirect", - LoginChallenge: sqlxx.NullString(fmt.Sprintf("%s_flush-login-3", uniqueName)), - RequestedAt: time.Now().Round(time.Second).Add(-(lifespan + time.Hour)), - Verifier: fmt.Sprintf("%s_flush-consent-3", uniqueName), - CSRF: fmt.Sprintf("%s_flush-consent-3", uniqueName), - }, - } -} - func getGrantRequests(uniqueName string, lifespan time.Duration) []*createGrantRequest { return []*createGrantRequest{ { grant: trust.Grant{ - ID: uuid.New().String(), + ID: uuid.Must(uuid.NewV4()), Issuer: fmt.Sprintf("%s_flush-grant-iss-1", uniqueName), Subject: fmt.Sprintf("%s_flush-grant-sub-1", uniqueName), Scope: []string{"foo", "bar"}, @@ -787,7 +296,7 @@ func getGrantRequests(uniqueName string, lifespan time.Duration) []*createGrantR }, { grant: trust.Grant{ - ID: uuid.New().String(), + ID: uuid.Must(uuid.NewV4()), Issuer: fmt.Sprintf("%s_flush-grant-iss-2", uniqueName), Subject: fmt.Sprintf("%s_flush-grant-sub-2", uniqueName), Scope: []string{"foo", "bar"}, @@ -805,7 +314,7 @@ func getGrantRequests(uniqueName string, lifespan time.Duration) []*createGrantR }, { grant: trust.Grant{ - ID: uuid.New().String(), + ID: uuid.Must(uuid.NewV4()), Issuer: fmt.Sprintf("%s_flush-grant-iss-3", uniqueName), Subject: fmt.Sprintf("%s_flush-grant-sub-3", uniqueName), Scope: []string{"foo", "bar"}, diff --git a/internal/testhelpers/lifespans.go b/internal/testhelpers/lifespans.go index a3d24e42806..e2ba8a218c4 100644 --- a/internal/testhelpers/lifespans.go +++ b/internal/testhelpers/lifespans.go @@ -6,21 +6,24 @@ package testhelpers import ( "time" - "github.com/ory/hydra/client" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/x" ) var TestLifespans = client.Lifespans{ - AuthorizationCodeGrantAccessTokenLifespan: x.NullDuration{Duration: 31 * time.Hour, Valid: true}, - AuthorizationCodeGrantIDTokenLifespan: x.NullDuration{Duration: 32 * time.Hour, Valid: true}, - AuthorizationCodeGrantRefreshTokenLifespan: x.NullDuration{Duration: 33 * time.Hour, Valid: true}, - ClientCredentialsGrantAccessTokenLifespan: x.NullDuration{Duration: 34 * time.Hour, Valid: true}, - ImplicitGrantAccessTokenLifespan: x.NullDuration{Duration: 35 * time.Hour, Valid: true}, - ImplicitGrantIDTokenLifespan: x.NullDuration{Duration: 36 * time.Hour, Valid: true}, - JwtBearerGrantAccessTokenLifespan: x.NullDuration{Duration: 37 * time.Hour, Valid: true}, - PasswordGrantAccessTokenLifespan: x.NullDuration{Duration: 38 * time.Hour, Valid: true}, - PasswordGrantRefreshTokenLifespan: x.NullDuration{Duration: 39 * time.Hour, Valid: true}, - RefreshTokenGrantIDTokenLifespan: x.NullDuration{Duration: 40 * time.Hour, Valid: true}, - RefreshTokenGrantAccessTokenLifespan: x.NullDuration{Duration: 41 * time.Hour, Valid: true}, - RefreshTokenGrantRefreshTokenLifespan: x.NullDuration{Duration: 42 * time.Hour, Valid: true}, + AuthorizationCodeGrantAccessTokenLifespan: x.NullDuration{Duration: 31 * time.Hour, Valid: true}, + AuthorizationCodeGrantIDTokenLifespan: x.NullDuration{Duration: 32 * time.Hour, Valid: true}, + AuthorizationCodeGrantRefreshTokenLifespan: x.NullDuration{Duration: 33 * time.Hour, Valid: true}, + ClientCredentialsGrantAccessTokenLifespan: x.NullDuration{Duration: 34 * time.Hour, Valid: true}, + ImplicitGrantAccessTokenLifespan: x.NullDuration{Duration: 35 * time.Hour, Valid: true}, + ImplicitGrantIDTokenLifespan: x.NullDuration{Duration: 36 * time.Hour, Valid: true}, + JwtBearerGrantAccessTokenLifespan: x.NullDuration{Duration: 37 * time.Hour, Valid: true}, + PasswordGrantAccessTokenLifespan: x.NullDuration{Duration: 38 * time.Hour, Valid: true}, + PasswordGrantRefreshTokenLifespan: x.NullDuration{Duration: 39 * time.Hour, Valid: true}, + RefreshTokenGrantIDTokenLifespan: x.NullDuration{Duration: 40 * time.Hour, Valid: true}, + RefreshTokenGrantAccessTokenLifespan: x.NullDuration{Duration: 41 * time.Hour, Valid: true}, + RefreshTokenGrantRefreshTokenLifespan: x.NullDuration{Duration: 42 * time.Hour, Valid: true}, + DeviceAuthorizationGrantIDTokenLifespan: x.NullDuration{Duration: 45 * time.Hour, Valid: true}, + DeviceAuthorizationGrantAccessTokenLifespan: x.NullDuration{Duration: 46 * time.Hour, Valid: true}, + DeviceAuthorizationGrantRefreshTokenLifespan: x.NullDuration{Duration: 47 * time.Hour, Valid: true}, } diff --git a/internal/testhelpers/oauth2.go b/internal/testhelpers/oauth2.go index 96a8d87761a..107bff4041a 100644 --- a/internal/testhelpers/oauth2.go +++ b/internal/testhelpers/oauth2.go @@ -6,43 +6,40 @@ package testhelpers import ( "bytes" "context" + "encoding/base64" "encoding/json" + "io" "net/http" - "net/http/cookiejar" + "net/http/httptest" "net/url" "strings" "testing" "time" "github.com/stretchr/testify/assert" - - djwt "github.com/ory/fosite/token/jwt" - - "github.com/ory/fosite/token/jwt" - - "github.com/julienschmidt/httprouter" "github.com/stretchr/testify/require" "github.com/tidwall/gjson" + "github.com/urfave/negroni" "golang.org/x/oauth2" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite/token/jwt" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/httprouterx" "github.com/ory/x/httpx" "github.com/ory/x/ioutilx" - - "net/http/httptest" - - "github.com/ory/hydra/client" - "github.com/ory/hydra/driver" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal" - "github.com/ory/hydra/x" + "github.com/ory/x/prometheusx" + "github.com/ory/x/reqlog" ) -func NewIDToken(t *testing.T, reg driver.Registry, subject string) string { +func NewIDToken(t *testing.T, reg *driver.RegistrySQL, subject string) string { return NewIDTokenWithExpiry(t, reg, subject, time.Hour) } -func NewIDTokenWithExpiry(t *testing.T, reg driver.Registry, subject string, exp time.Duration) string { - token, _, err := reg.OpenIDJWTStrategy().Generate(context.Background(), jwt.IDTokenClaims{ +func NewIDTokenWithExpiry(t *testing.T, reg *driver.RegistrySQL, subject string, exp time.Duration) string { + token, _, err := reg.OpenIDJWTSigner().Generate(context.Background(), jwt.IDTokenClaims{ Subject: subject, ExpiresAt: time.Now().Add(exp), IssuedAt: time.Now(), @@ -51,36 +48,59 @@ func NewIDTokenWithExpiry(t *testing.T, reg driver.Registry, subject string, exp return token } -func NewIDTokenWithClaims(t *testing.T, reg driver.Registry, claims djwt.MapClaims) string { - token, _, err := reg.OpenIDJWTStrategy().Generate(context.Background(), claims, jwt.NewHeaders()) +func NewIDTokenWithClaims(t *testing.T, reg *driver.RegistrySQL, claims jwt.MapClaims) string { + token, _, err := reg.OpenIDJWTSigner().Generate(context.Background(), claims, jwt.NewHeaders()) require.NoError(t, err) return token } -func NewOAuth2Server(ctx context.Context, t *testing.T, reg driver.Registry) (publicTS, adminTS *httptest.Server) { - // Lifespan is two seconds to avoid time synchronization issues with SQL. +// NewOAuth2Server +// Deprecated: use NewConfigurableOAuth2Server instead +func NewOAuth2Server(ctx context.Context, t testing.TB, reg *driver.RegistrySQL) (publicTS, adminTS *httptest.Server) { reg.Config().MustSet(ctx, config.KeySubjectIdentifierAlgorithmSalt, "76d5d2bf-747f-4592-9fbd-d2b895a54b3a") - reg.Config().MustSet(ctx, config.KeyAccessTokenLifespan, time.Second*2) - reg.Config().MustSet(ctx, config.KeyRefreshTokenLifespan, time.Second*3) - reg.Config().MustSet(ctx, config.PublicInterface.Key(config.KeySuffixTLSEnabled), false) - reg.Config().MustSet(ctx, config.AdminInterface.Key(config.KeySuffixTLSEnabled), false) + reg.Config().MustSet(ctx, config.KeyAccessTokenLifespan, 10*time.Second) + reg.Config().MustSet(ctx, config.KeyRefreshTokenLifespan, 20*time.Second) reg.Config().MustSet(ctx, config.KeyScopeStrategy, "exact") - public, admin := x.NewRouterPublic(), x.NewRouterAdmin(reg.Config().AdminURL) - - publicTS = httptest.NewServer(public) - t.Cleanup(publicTS.Close) + return NewConfigurableOAuth2Server(ctx, t, reg) +} - adminTS = httptest.NewServer(admin) - t.Cleanup(adminTS.Close) +func NewConfigurableOAuth2Server(ctx context.Context, t testing.TB, reg *driver.RegistrySQL) (publicTS, adminTS *httptest.Server) { + MustEnsureRegistryKeys(t, reg, x.OpenIDConnectKeyName) + MustEnsureRegistryKeys(t, reg, x.OAuth2JWTKeyName) + + metrics := prometheusx.NewMetricsManagerWithPrefix("hydra", prometheusx.HTTPMetrics, config.Version, config.Commit, config.Date) + { + n := negroni.New() + n.Use(reqlog.NewMiddleware()) + n.UseFunc(httprouterx.TrimTrailingSlashNegroni) + n.UseFunc(httprouterx.NoCacheNegroni) + n.UseFunc(httprouterx.AddAdminPrefixIfNotPresentNegroni) + + router := x.NewRouterAdmin(metrics) + reg.RegisterAdminRoutes(router) + n.UseHandler(router) + + adminTS = httptest.NewServer(n) + t.Cleanup(adminTS.Close) + reg.Config().MustSet(ctx, config.KeyAdminURL, adminTS.URL) + } + { + n := negroni.New() + n.Use(reqlog.NewMiddleware()) + n.UseFunc(httprouterx.TrimTrailingSlashNegroni) + n.UseFunc(httprouterx.NoCacheNegroni) + + router := x.NewRouterPublic(metrics) + reg.RegisterPublicRoutes(ctx, router) + n.UseHandler(router) + + publicTS = httptest.NewServer(n) + t.Cleanup(publicTS.Close) + reg.Config().MustSet(ctx, config.KeyAdminURL, publicTS.URL) + } reg.Config().MustSet(ctx, config.KeyIssuerURL, publicTS.URL) - // SendDebugMessagesToClients: true, - - internal.MustEnsureRegistryKeys(reg, x.OpenIDConnectKeyName) - internal.MustEnsureRegistryKeys(reg, x.OAuth2JWTKeyName) - - reg.RegisterRoutes(ctx, admin, public) return publicTS, adminTS } @@ -89,23 +109,36 @@ func DecodeIDToken(t *testing.T, token *oauth2.Token) gjson.Result { require.True(t, ok) assert.NotEmpty(t, idt) - body, err := x.DecodeSegment(strings.Split(idt, ".")[1]) - require.NoError(t, err) + return gjson.ParseBytes(InsecureDecodeJWT(t, idt)) +} +func IntrospectToken(t testing.TB, token string, adminTS *httptest.Server) gjson.Result { + require.NotEmpty(t, token) + + req := httpx.MustNewRequest("POST", adminTS.URL+"/admin/oauth2/introspect", + strings.NewReader((url.Values{"token": {token}}).Encode()), + "application/x-www-form-urlencoded") + + res, err := adminTS.Client().Do(req) + require.NoError(t, err) + defer res.Body.Close() //nolint:errcheck + body, err := io.ReadAll(res.Body) + require.NoError(t, err) + require.Equalf(t, http.StatusOK, res.StatusCode, "Response body: %s", body) return gjson.ParseBytes(body) } -func IntrospectToken(t *testing.T, conf *oauth2.Config, token string, adminTS *httptest.Server) gjson.Result { +func RevokeToken(t testing.TB, conf *oauth2.Config, token string, publicTS *httptest.Server) gjson.Result { require.NotEmpty(t, token) - req := httpx.MustNewRequest("POST", adminTS.URL+"/admin/oauth2/introspect", + req := httpx.MustNewRequest("POST", publicTS.URL+"/oauth2/revoke", strings.NewReader((url.Values{"token": {token}}).Encode()), "application/x-www-form-urlencoded") req.SetBasicAuth(conf.ClientID, conf.ClientSecret) - res, err := adminTS.Client().Do(req) + res, err := publicTS.Client().Do(req) require.NoError(t, err) - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck return gjson.ParseBytes(ioutilx.MustReadAll(res.Body)) } @@ -121,7 +154,7 @@ func UpdateClientTokenLifespans(t *testing.T, conf *oauth2.Config, clientID stri req.SetBasicAuth(conf.ClientID, conf.ClientSecret) res, err := adminTS.Client().Do(req) require.NoError(t, err) - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck require.Equal(t, res.StatusCode, http.StatusOK) } @@ -134,7 +167,7 @@ func Userinfo(t *testing.T, token *oauth2.Token, publicTS *httptest.Server) gjso res, err := publicTS.Client().Do(req) require.NoError(t, err) - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck return gjson.ParseBytes(ioutilx.MustReadAll(res.Body)) } @@ -142,13 +175,13 @@ func HTTPServerNotImplementedHandler(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusNotImplemented) } -func HTTPServerNoExpectedCallHandler(t *testing.T) http.HandlerFunc { +func HTTPServerNoExpectedCallHandler(t testing.TB) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { t.Fatal("This should not have been called") } } -func NewLoginConsentUI(t *testing.T, c *config.DefaultProvider, login, consent http.HandlerFunc) { +func NewLoginConsentUI(t testing.TB, c *config.DefaultProvider, login, consent http.HandlerFunc) { if login == nil { login = HTTPServerNotImplementedHandler } @@ -167,13 +200,24 @@ func NewLoginConsentUI(t *testing.T, c *config.DefaultProvider, login, consent h c.MustSet(context.Background(), config.KeyConsentURL, ct.URL) } -func NewCallbackURL(t *testing.T, prefix string, h http.HandlerFunc) string { +func NewDeviceLoginConsentUI(t testing.TB, c *config.DefaultProvider, device, login, consent http.HandlerFunc) { + if device == nil { + device = HTTPServerNotImplementedHandler + } + dt := httptest.NewServer(device) + t.Cleanup(dt.Close) + c.MustSet(context.Background(), config.KeyDeviceVerificationURL, dt.URL) + + NewLoginConsentUI(t, c, login, consent) +} + +func NewCallbackURL(t testing.TB, prefix string, h http.HandlerFunc) string { if h == nil { h = HTTPServerNotImplementedHandler } - r := httprouter.New() - r.GET("/"+prefix, func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { + r := http.NewServeMux() + r.HandleFunc("/"+prefix, func(w http.ResponseWriter, r *http.Request) { h(w, r) }) ts := httptest.NewServer(r) @@ -182,14 +226,34 @@ func NewCallbackURL(t *testing.T, prefix string, h http.HandlerFunc) string { return ts.URL + "/" + prefix } -func NewEmptyCookieJar(t *testing.T) *cookiejar.Jar { - c, err := cookiejar.New(&cookiejar.Options{}) - require.NoError(t, err) - return c +// InsecureDecodeJWT decodes a JWT payload without checking the signature. +func InsecureDecodeJWT(t require.TestingT, token string) []byte { + parts := strings.Split(token, ".") + require.Len(t, parts, 3) + dec, err := base64.RawURLEncoding.DecodeString(parts[1]) + require.NoErrorf(t, err, "failed to decode JWT payload: %s", parts[1]) + return dec } -func NewEmptyJarClient(t *testing.T) *http.Client { - return &http.Client{ - Jar: NewEmptyCookieJar(t), - } +var ( + NewEmptyCookieJar = x.NewEmptyCookieJar + NewEmptyJarClient = x.NewEmptyJarClient +) + +func AssertTokenValid(t *testing.T, accessOrIDToken gjson.Result, sub string) { + assert.Equal(t, sub, accessOrIDToken.Get("sub").Str) + assert.WithinDurationf(t, time.Now(), time.Unix(accessOrIDToken.Get("iat").Int(), 0), time.Minute, "%s", accessOrIDToken.Raw) + assert.Truef(t, time.Now().Before(time.Unix(accessOrIDToken.Get("exp").Int(), 0)), "%s", accessOrIDToken.Raw) +} + +func AssertAccessToken(t *testing.T, token gjson.Result, sub, clientID string) { + AssertTokenValid(t, token, sub) + assert.Equalf(t, clientID, token.Get("client_id").Str, "%s", token.Raw) + assert.WithinDurationf(t, time.Now(), time.Unix(token.Get("nbf").Int(), 0), time.Minute, "%s", token.Raw) +} + +func AssertIDToken(t *testing.T, token gjson.Result, sub, clientID string) { + AssertTokenValid(t, token, sub) + assert.Equalf(t, clientID, token.Get("aud.0").Str, "%s", token.Raw) + assert.WithinDurationf(t, time.Now(), time.Unix(token.Get("rat").Int(), 0), time.Minute, "%s", token.Raw) } diff --git a/internal/testhelpers/sql_schemas/cockroach_dump.sql b/internal/testhelpers/sql_schemas/cockroach_dump.sql new file mode 100644 index 00000000000..a8a7dfbd493 --- /dev/null +++ b/internal/testhelpers/sql_schemas/cockroach_dump.sql @@ -0,0 +1,402 @@ +-- migrations hash: 55b905a5cead652db5b4a5e317d60ad18bd9e6de61d4b43edcdb79244947fbf9b106783f10765f2ffa4d7952c745900c2406f522a101dbed17e5785396484e41 + +CREATE TABLE public.schema_migration ( + version VARCHAR(48) NOT NULL, + version_self INT8 NOT NULL DEFAULT 0:::INT8, + rowid INT8 NOT VISIBLE NOT NULL DEFAULT unique_rowid(), + CONSTRAINT schema_migration_pkey PRIMARY KEY (rowid ASC), + UNIQUE INDEX schema_migration_version_idx (version ASC), + INDEX schema_migration_version_self_idx (version_self ASC) +); +CREATE TABLE public.networks ( + id UUID NOT NULL, + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL, + CONSTRAINT networks_pkey PRIMARY KEY (id ASC) +); +CREATE TABLE public.hydra_client ( + id VARCHAR(255) NOT NULL, + client_name STRING NOT NULL, + client_secret STRING NOT NULL, + scope STRING NOT NULL, + owner STRING NOT NULL, + policy_uri STRING NOT NULL, + tos_uri STRING NOT NULL, + client_uri STRING NOT NULL, + logo_uri STRING NOT NULL, + client_secret_expires_at INT8 NOT NULL DEFAULT 0:::INT8, + sector_identifier_uri STRING NOT NULL, + jwks STRING NOT NULL, + jwks_uri STRING NOT NULL, + token_endpoint_auth_method VARCHAR(25) NOT NULL DEFAULT '':::STRING, + request_object_signing_alg VARCHAR(10) NOT NULL DEFAULT '':::STRING, + userinfo_signed_response_alg VARCHAR(10) NOT NULL DEFAULT '':::STRING, + subject_type VARCHAR(15) NOT NULL DEFAULT '':::STRING, + pk_deprecated INT8 NOT NULL DEFAULT unique_rowid(), + created_at TIMESTAMP NOT NULL DEFAULT now():::TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT now():::TIMESTAMP, + frontchannel_logout_uri STRING NOT NULL DEFAULT '':::STRING, + frontchannel_logout_session_required BOOL NOT NULL DEFAULT false, + backchannel_logout_uri STRING NOT NULL DEFAULT '':::STRING, + backchannel_logout_session_required BOOL NOT NULL DEFAULT false, + metadata STRING NOT NULL DEFAULT '{}':::STRING, + token_endpoint_auth_signing_alg VARCHAR(10) NOT NULL DEFAULT '':::STRING, + authorization_code_grant_access_token_lifespan INT8 NULL, + authorization_code_grant_id_token_lifespan INT8 NULL, + authorization_code_grant_refresh_token_lifespan INT8 NULL, + client_credentials_grant_access_token_lifespan INT8 NULL, + implicit_grant_access_token_lifespan INT8 NULL, + implicit_grant_id_token_lifespan INT8 NULL, + jwt_bearer_grant_access_token_lifespan INT8 NULL, + password_grant_access_token_lifespan INT8 NULL, + password_grant_refresh_token_lifespan INT8 NULL, + refresh_token_grant_id_token_lifespan INT8 NULL, + refresh_token_grant_access_token_lifespan INT8 NULL, + refresh_token_grant_refresh_token_lifespan INT8 NULL, + pk UUID NULL, + registration_access_token_signature VARCHAR(128) NOT NULL DEFAULT '':::STRING, + nid UUID NOT NULL, + redirect_uris JSONB NOT NULL, + grant_types JSONB NOT NULL, + response_types JSONB NOT NULL, + audience JSONB NOT NULL, + allowed_cors_origins JSONB NOT NULL, + contacts JSONB NOT NULL, + request_uris JSONB NOT NULL, + post_logout_redirect_uris JSONB NOT NULL DEFAULT '[]':::JSONB, + access_token_strategy VARCHAR(10) NOT NULL DEFAULT '':::STRING, + skip_consent BOOL NOT NULL DEFAULT false, + skip_logout_consent BOOL NULL, + device_authorization_grant_id_token_lifespan INT8 NULL, + device_authorization_grant_access_token_lifespan INT8 NULL, + device_authorization_grant_refresh_token_lifespan INT8 NULL, + CONSTRAINT hydra_client_pkey PRIMARY KEY (id ASC, nid ASC), + UNIQUE INDEX hydra_client_id_key (id ASC, nid ASC), + UNIQUE INDEX hydra_client_pk_key (pk ASC) +); +CREATE TABLE public.hydra_jwk ( + sid VARCHAR(255) NOT NULL, + kid VARCHAR(255) NOT NULL, + version INT8 NOT NULL DEFAULT 0:::INT8, + keydata STRING NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT now():::TIMESTAMP, + pk_deprecated INT8 NOT NULL DEFAULT unique_rowid(), + pk UUID NOT NULL, + nid UUID NOT NULL, + CONSTRAINT hydra_jwk_pkey PRIMARY KEY (pk ASC), + UNIQUE INDEX hydra_jwk_sid_kid_nid_key (sid ASC, kid ASC, nid ASC), + INDEX hydra_jwk_nid_sid_created_at_idx (nid ASC, sid ASC, created_at ASC), + INDEX hydra_jwk_nid_sid_kid_created_at_idx (nid ASC, sid ASC, kid ASC, created_at ASC) +); +CREATE TABLE public.hydra_oauth2_authentication_session ( + id VARCHAR(40) NOT NULL, + authenticated_at TIMESTAMP NULL, + subject VARCHAR(255) NOT NULL, + remember BOOL NOT NULL DEFAULT false, + nid UUID NOT NULL, + identity_provider_session_id VARCHAR(40) NULL, + expires_at TIMESTAMP NULL, + CONSTRAINT hydra_oauth2_authentication_session_pkey PRIMARY KEY (id ASC), + INDEX hydra_oauth2_authentication_session_subject_idx (subject ASC, nid ASC) +); +CREATE TABLE public.hydra_oauth2_obfuscated_authentication_session ( + subject VARCHAR(255) NOT NULL, + client_id VARCHAR(255) NOT NULL, + subject_obfuscated VARCHAR(255) NOT NULL, + nid UUID NOT NULL, + CONSTRAINT hydra_oauth2_obfuscated_authentication_session_pkey PRIMARY KEY (subject ASC, client_id ASC, nid ASC), + UNIQUE INDEX hydra_oauth2_obfuscated_authentication_session_client_id_subject_obfuscated_idx (client_id ASC, subject_obfuscated ASC, nid ASC) +); +CREATE TABLE public.hydra_oauth2_logout_request ( + challenge VARCHAR(36) NOT NULL, + verifier VARCHAR(36) NOT NULL, + subject VARCHAR(255) NOT NULL, + sid VARCHAR(36) NOT NULL, + client_id VARCHAR(255) NULL, + request_url STRING NOT NULL, + redir_url STRING NOT NULL, + was_used BOOL NOT NULL DEFAULT false, + accepted BOOL NOT NULL DEFAULT false, + rejected BOOL NOT NULL DEFAULT false, + rp_initiated BOOL NOT NULL DEFAULT false, + nid UUID NOT NULL, + expires_at TIMESTAMP NULL, + requested_at TIMESTAMP NULL, + CONSTRAINT hydra_oauth2_logout_request_pkey PRIMARY KEY (challenge ASC), + UNIQUE INDEX hydra_oauth2_logout_request_verifier_key (verifier ASC), + INDEX hydra_oauth2_logout_request_client_id_idx (client_id ASC, nid ASC) +); +CREATE TABLE public.hydra_oauth2_flow ( + login_challenge VARCHAR(40) NOT NULL, + login_verifier VARCHAR(40) NULL, + login_csrf VARCHAR(40) NULL, + subject VARCHAR(255) NULL, + request_url STRING NULL, + login_skip BOOL NULL, + client_id VARCHAR(255) NULL, + requested_at TIMESTAMP NOT NULL DEFAULT now():::TIMESTAMP, + login_initialized_at TIMESTAMP NULL, + oidc_context JSONB NULL, + login_session_id VARCHAR(40) NULL, + state INT8 NULL, + login_remember BOOL NULL, + login_remember_for INT8 NULL, + login_error STRING NULL, + acr STRING NULL, + login_authenticated_at TIMESTAMP NULL, + login_was_used BOOL NULL, + forced_subject_identifier VARCHAR(255) NULL, + context JSONB NULL, + consent_challenge_id VARCHAR(40) NULL, + consent_skip BOOL NULL, + consent_verifier VARCHAR(40) NULL, + consent_csrf VARCHAR(40) NULL, + consent_remember BOOL NULL, + consent_remember_for INT8 NULL, + consent_handled_at TIMESTAMP NULL, + consent_error STRING NULL, + session_access_token JSONB NULL, + session_id_token JSONB NULL, + consent_was_used BOOL NULL, + nid UUID NOT NULL, + requested_scope JSONB NULL, + requested_at_audience JSONB NULL, + amr JSONB NULL, + granted_scope JSONB NULL, + granted_at_audience JSONB NULL, + login_extend_session_lifespan BOOL NULL, + identity_provider_session_id VARCHAR(40) NULL, + device_challenge_id VARCHAR(255) NULL, + device_code_request_id VARCHAR(255) NULL, + device_verifier VARCHAR(40) NULL, + device_csrf VARCHAR(40) NULL, + device_was_used BOOL NULL, + device_handled_at TIMESTAMP NULL, + device_error VARCHAR(2048) NULL, + expires_at TIMESTAMP NULL AS (IF(consent_remember_for > 0:::INT8, requested_at + ('00:00:01':::INTERVAL * consent_remember_for), NULL)) VIRTUAL, + CONSTRAINT hydra_oauth2_flow_pkey PRIMARY KEY (login_challenge ASC), + UNIQUE INDEX hydra_oauth2_flow_consent_challenge_idx (consent_challenge_id ASC), + INDEX hydra_oauth2_flow_client_id_subject_idx (client_id ASC, nid ASC, subject ASC), + INDEX hydra_oauth2_flow_cid_idx (client_id ASC, nid ASC), + INDEX hydra_oauth2_flow_login_session_id_idx (login_session_id ASC, nid ASC), + INDEX hydra_oauth2_flow_sub_idx (subject ASC, nid ASC), + INDEX hydra_oauth2_flow_previous_consents_idx (subject ASC, client_id ASC, nid ASC, consent_skip ASC, consent_error ASC, consent_remember ASC), + UNIQUE INDEX hydra_oauth2_flow_device_challenge_idx (device_challenge_id ASC) +); +CREATE TABLE public.hydra_oauth2_access ( + signature VARCHAR(255) NOT NULL, + request_id VARCHAR(40) NOT NULL, + requested_at TIMESTAMP NOT NULL DEFAULT now():::TIMESTAMP, + client_id VARCHAR(255) NOT NULL, + scope STRING NOT NULL, + granted_scope STRING NOT NULL, + form_data STRING NOT NULL, + session_data STRING NOT NULL, + subject VARCHAR(255) NOT NULL DEFAULT '':::STRING, + active BOOL NOT NULL DEFAULT true, + requested_audience STRING NULL DEFAULT '':::STRING, + granted_audience STRING NULL DEFAULT '':::STRING, + challenge_id VARCHAR(40) NULL, + nid UUID NOT NULL, + expires_at TIMESTAMP NULL, + CONSTRAINT "primary" PRIMARY KEY (signature ASC), + INDEX hydra_oauth2_access_requested_at_idx (requested_at ASC, nid ASC), + INDEX hydra_oauth2_access_client_id_idx (client_id ASC, nid ASC), + INDEX hydra_oauth2_access_challenge_id_idx (challenge_id ASC), + INDEX hydra_oauth2_access_request_id_idx (request_id ASC, nid ASC) +); +CREATE TABLE public.hydra_oauth2_refresh ( + signature VARCHAR(255) NOT NULL, + request_id VARCHAR(40) NOT NULL, + requested_at TIMESTAMP NOT NULL DEFAULT now():::TIMESTAMP, + client_id VARCHAR(255) NOT NULL, + scope STRING NOT NULL, + granted_scope STRING NOT NULL, + form_data STRING NOT NULL, + session_data STRING NOT NULL, + subject VARCHAR(255) NOT NULL DEFAULT '':::STRING, + active BOOL NOT NULL DEFAULT true, + requested_audience STRING NULL DEFAULT '':::STRING, + granted_audience STRING NULL DEFAULT '':::STRING, + challenge_id VARCHAR(40) NULL, + nid UUID NOT NULL, + expires_at TIMESTAMP NULL, + first_used_at TIMESTAMP NULL, + access_token_signature VARCHAR(255) NULL, + used_times INT4 NULL, + CONSTRAINT "primary" PRIMARY KEY (signature ASC), + INDEX hydra_oauth2_refresh_client_id_idx (client_id ASC, nid ASC), + INDEX hydra_oauth2_refresh_challenge_id_idx (challenge_id ASC), + INDEX hydra_oauth2_refresh_request_id_idx (request_id ASC), + INDEX hydra_oauth2_refresh_requested_at_idx (nid ASC, requested_at ASC) +); +CREATE TABLE public.hydra_oauth2_code ( + signature VARCHAR(255) NOT NULL, + request_id VARCHAR(40) NOT NULL, + requested_at TIMESTAMP NOT NULL DEFAULT now():::TIMESTAMP, + client_id VARCHAR(255) NOT NULL, + scope STRING NOT NULL, + granted_scope STRING NOT NULL, + form_data STRING NOT NULL, + session_data STRING NOT NULL, + subject VARCHAR(255) NOT NULL DEFAULT '':::STRING, + active BOOL NOT NULL DEFAULT true, + requested_audience STRING NULL DEFAULT '':::STRING, + granted_audience STRING NULL DEFAULT '':::STRING, + challenge_id VARCHAR(40) NULL, + nid UUID NOT NULL, + expires_at TIMESTAMP NULL, + CONSTRAINT "primary" PRIMARY KEY (signature ASC), + INDEX hydra_oauth2_code_client_id_idx (client_id ASC, nid ASC), + INDEX hydra_oauth2_code_challenge_id_idx (challenge_id ASC, nid ASC) +); +CREATE TABLE public.hydra_oauth2_oidc ( + signature VARCHAR(255) NOT NULL, + request_id VARCHAR(40) NOT NULL, + requested_at TIMESTAMP NOT NULL DEFAULT now():::TIMESTAMP, + client_id VARCHAR(255) NOT NULL, + scope STRING NOT NULL, + granted_scope STRING NOT NULL, + form_data STRING NOT NULL, + session_data STRING NOT NULL, + subject VARCHAR(255) NOT NULL DEFAULT '':::STRING, + active BOOL NOT NULL DEFAULT true, + requested_audience STRING NULL DEFAULT '':::STRING, + granted_audience STRING NULL DEFAULT '':::STRING, + challenge_id VARCHAR(40) NULL, + nid UUID NOT NULL, + expires_at TIMESTAMP NULL, + CONSTRAINT "primary" PRIMARY KEY (signature ASC), + INDEX hydra_oauth2_oidc_client_id_idx (client_id ASC, nid ASC), + INDEX hydra_oauth2_oidc_challenge_id_idx (challenge_id ASC) +); +CREATE TABLE public.hydra_oauth2_pkce ( + signature VARCHAR(255) NOT NULL, + request_id VARCHAR(40) NOT NULL, + requested_at TIMESTAMP NOT NULL DEFAULT now():::TIMESTAMP, + client_id VARCHAR(255) NOT NULL, + scope STRING NOT NULL, + granted_scope STRING NOT NULL, + form_data STRING NOT NULL, + session_data STRING NOT NULL, + subject VARCHAR(255) NOT NULL, + active BOOL NOT NULL DEFAULT true, + requested_audience STRING NULL DEFAULT '':::STRING, + granted_audience STRING NULL DEFAULT '':::STRING, + challenge_id VARCHAR(40) NULL, + nid UUID NOT NULL, + expires_at TIMESTAMP NULL, + CONSTRAINT "primary" PRIMARY KEY (signature ASC), + INDEX hydra_oauth2_pkce_client_id_idx (client_id ASC, nid ASC), + INDEX hydra_oauth2_pkce_challenge_id_idx (challenge_id ASC) +); +CREATE TABLE public.hydra_oauth2_jti_blacklist ( + signature VARCHAR(64) NOT NULL, + expires_at TIMESTAMP NOT NULL DEFAULT now():::TIMESTAMP, + nid UUID NOT NULL, + CONSTRAINT hydra_oauth2_jti_blacklist_pkey PRIMARY KEY (signature ASC, nid ASC), + INDEX hydra_oauth2_jti_blacklist_expires_at_idx (expires_at ASC, nid ASC) +); +CREATE TABLE public.hydra_oauth2_trusted_jwt_bearer_issuer ( + id UUID NOT NULL, + issuer VARCHAR(255) NOT NULL, + subject VARCHAR(255) NOT NULL, + scope STRING NOT NULL, + key_set VARCHAR(255) NOT NULL, + key_id VARCHAR(255) NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT now():::TIMESTAMP, + expires_at TIMESTAMP NOT NULL DEFAULT now():::TIMESTAMP, + nid UUID NOT NULL, + allow_any_subject BOOL NOT NULL DEFAULT false, + CONSTRAINT "primary" PRIMARY KEY (id ASC), + INDEX hydra_oauth2_trusted_jwt_bearer_issuer_expires_at_idx (expires_at ASC), + INDEX hydra_oauth2_trusted_jwt_bearer_issuer_nid_idx (id ASC, nid ASC), + UNIQUE INDEX hydra_oauth2_trusted_jwt_bearer_issuer_nid_uq_idx (nid ASC, key_id ASC, issuer ASC, subject ASC) +); +CREATE TABLE public.hydra_oauth2_device_auth_codes ( + device_code_signature VARCHAR(255) NOT NULL, + user_code_signature VARCHAR(255) NOT NULL, + request_id VARCHAR(40) NOT NULL, + requested_at TIMESTAMP NOT NULL DEFAULT now():::TIMESTAMP, + client_id VARCHAR(255) NOT NULL, + scope VARCHAR(1024) NOT NULL, + granted_scope VARCHAR(1024) NOT NULL, + form_data VARCHAR(4096) NOT NULL, + session_data STRING NOT NULL, + subject VARCHAR(255) NOT NULL DEFAULT '':::STRING, + device_code_active BOOL NOT NULL DEFAULT true, + user_code_state INT2 NOT NULL DEFAULT 0:::INT8, + requested_audience VARCHAR(1024) NOT NULL, + granted_audience VARCHAR(1024) NOT NULL, + challenge_id VARCHAR(40) NULL, + expires_at TIMESTAMP NULL, + nid UUID NOT NULL, + CONSTRAINT hydra_oauth2_device_auth_codes_pkey PRIMARY KEY (device_code_signature ASC, nid ASC), + INDEX hydra_oauth2_device_auth_codes_request_id_idx (request_id ASC, nid ASC), + INDEX hydra_oauth2_device_auth_codes_client_id_idx (client_id ASC, nid ASC), + INDEX hydra_oauth2_device_auth_codes_challenge_id_idx (challenge_id ASC), + UNIQUE INDEX hydra_oauth2_device_auth_codes_user_code_signature_idx (nid ASC, user_code_signature ASC) +); +ALTER TABLE public.hydra_client ADD CONSTRAINT hydra_client_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON DELETE CASCADE ON UPDATE RESTRICT; +ALTER TABLE public.hydra_jwk ADD CONSTRAINT hydra_jwk_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON DELETE CASCADE ON UPDATE RESTRICT; +ALTER TABLE public.hydra_oauth2_authentication_session ADD CONSTRAINT hydra_oauth2_authentication_session_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON DELETE CASCADE ON UPDATE RESTRICT; +ALTER TABLE public.hydra_oauth2_obfuscated_authentication_session ADD CONSTRAINT hydra_oauth2_obfuscated_authentication_session_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON DELETE CASCADE ON UPDATE RESTRICT; +ALTER TABLE public.hydra_oauth2_obfuscated_authentication_session ADD CONSTRAINT hydra_oauth2_obfuscated_authentication_session_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES public.hydra_client(id, nid) ON DELETE CASCADE; +ALTER TABLE public.hydra_oauth2_logout_request ADD CONSTRAINT hydra_oauth2_logout_request_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON DELETE CASCADE ON UPDATE RESTRICT; +ALTER TABLE public.hydra_oauth2_logout_request ADD CONSTRAINT hydra_oauth2_logout_request_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES public.hydra_client(id, nid) ON DELETE CASCADE; +ALTER TABLE public.hydra_oauth2_flow ADD CONSTRAINT hydra_oauth2_flow_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON DELETE CASCADE ON UPDATE RESTRICT; +ALTER TABLE public.hydra_oauth2_flow ADD CONSTRAINT hydra_oauth2_flow_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES public.hydra_client(id, nid) ON DELETE CASCADE; +ALTER TABLE public.hydra_oauth2_flow ADD CONSTRAINT hydra_oauth2_flow_login_session_id_fk FOREIGN KEY (login_session_id) REFERENCES public.hydra_oauth2_authentication_session(id) ON DELETE SET NULL; +ALTER TABLE public.hydra_oauth2_access ADD CONSTRAINT hydra_oauth2_access_challenge_id_fk FOREIGN KEY (challenge_id) REFERENCES public.hydra_oauth2_flow(consent_challenge_id) ON DELETE CASCADE; +ALTER TABLE public.hydra_oauth2_access ADD CONSTRAINT hydra_oauth2_access_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON DELETE CASCADE ON UPDATE RESTRICT; +ALTER TABLE public.hydra_oauth2_access ADD CONSTRAINT hydra_oauth2_access_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES public.hydra_client(id, nid) ON DELETE CASCADE; +ALTER TABLE public.hydra_oauth2_refresh ADD CONSTRAINT hydra_oauth2_refresh_challenge_id_fk FOREIGN KEY (challenge_id) REFERENCES public.hydra_oauth2_flow(consent_challenge_id) ON DELETE CASCADE; +ALTER TABLE public.hydra_oauth2_refresh ADD CONSTRAINT hydra_oauth2_refresh_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON DELETE CASCADE ON UPDATE RESTRICT; +ALTER TABLE public.hydra_oauth2_refresh ADD CONSTRAINT hydra_oauth2_refresh_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES public.hydra_client(id, nid) ON DELETE CASCADE; +ALTER TABLE public.hydra_oauth2_code ADD CONSTRAINT hydra_oauth2_code_challenge_id_fk FOREIGN KEY (challenge_id) REFERENCES public.hydra_oauth2_flow(consent_challenge_id) ON DELETE CASCADE; +ALTER TABLE public.hydra_oauth2_code ADD CONSTRAINT hydra_oauth2_code_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON DELETE CASCADE ON UPDATE RESTRICT; +ALTER TABLE public.hydra_oauth2_code ADD CONSTRAINT hydra_oauth2_code_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES public.hydra_client(id, nid) ON DELETE CASCADE; +ALTER TABLE public.hydra_oauth2_oidc ADD CONSTRAINT hydra_oauth2_oidc_challenge_id_fk FOREIGN KEY (challenge_id) REFERENCES public.hydra_oauth2_flow(consent_challenge_id) ON DELETE CASCADE; +ALTER TABLE public.hydra_oauth2_oidc ADD CONSTRAINT hydra_oauth2_oidc_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON DELETE CASCADE ON UPDATE RESTRICT; +ALTER TABLE public.hydra_oauth2_oidc ADD CONSTRAINT hydra_oauth2_oidc_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES public.hydra_client(id, nid) ON DELETE CASCADE; +ALTER TABLE public.hydra_oauth2_pkce ADD CONSTRAINT hydra_oauth2_pkce_challenge_id_fk FOREIGN KEY (challenge_id) REFERENCES public.hydra_oauth2_flow(consent_challenge_id) ON DELETE CASCADE; +ALTER TABLE public.hydra_oauth2_pkce ADD CONSTRAINT hydra_oauth2_pkce_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON DELETE CASCADE ON UPDATE RESTRICT; +ALTER TABLE public.hydra_oauth2_pkce ADD CONSTRAINT hydra_oauth2_pkce_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES public.hydra_client(id, nid) ON DELETE CASCADE; +ALTER TABLE public.hydra_oauth2_jti_blacklist ADD CONSTRAINT hydra_oauth2_jti_blacklist_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON DELETE CASCADE ON UPDATE RESTRICT; +ALTER TABLE public.hydra_oauth2_trusted_jwt_bearer_issuer ADD CONSTRAINT hydra_oauth2_trusted_jwt_bearer_issuer_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON DELETE CASCADE ON UPDATE RESTRICT; +ALTER TABLE public.hydra_oauth2_trusted_jwt_bearer_issuer ADD CONSTRAINT fk_key_set_ref_hydra_jwk FOREIGN KEY (key_set, key_id, nid) REFERENCES public.hydra_jwk(sid, kid, nid) ON DELETE CASCADE; +ALTER TABLE public.hydra_oauth2_device_auth_codes ADD CONSTRAINT hydra_oauth2_device_auth_codes_client_id_nid_fkey FOREIGN KEY (client_id, nid) REFERENCES public.hydra_client(id, nid) ON DELETE CASCADE; +ALTER TABLE public.hydra_oauth2_device_auth_codes ADD CONSTRAINT hydra_oauth2_device_auth_codes_nid_fkey FOREIGN KEY (nid) REFERENCES public.networks(id) ON DELETE CASCADE ON UPDATE RESTRICT; +ALTER TABLE public.hydra_oauth2_device_auth_codes ADD CONSTRAINT hydra_oauth2_device_auth_codes_challenge_id_fkey FOREIGN KEY (challenge_id) REFERENCES public.hydra_oauth2_flow(consent_challenge_id) ON DELETE CASCADE; +ALTER TABLE public.hydra_client VALIDATE CONSTRAINT hydra_client_nid_fk_idx; +ALTER TABLE public.hydra_jwk VALIDATE CONSTRAINT hydra_jwk_nid_fk_idx; +ALTER TABLE public.hydra_oauth2_authentication_session VALIDATE CONSTRAINT hydra_oauth2_authentication_session_nid_fk_idx; +ALTER TABLE public.hydra_oauth2_obfuscated_authentication_session VALIDATE CONSTRAINT hydra_oauth2_obfuscated_authentication_session_nid_fk_idx; +ALTER TABLE public.hydra_oauth2_obfuscated_authentication_session VALIDATE CONSTRAINT hydra_oauth2_obfuscated_authentication_session_client_id_fk; +ALTER TABLE public.hydra_oauth2_logout_request VALIDATE CONSTRAINT hydra_oauth2_logout_request_nid_fk_idx; +ALTER TABLE public.hydra_oauth2_logout_request VALIDATE CONSTRAINT hydra_oauth2_logout_request_client_id_fk; +ALTER TABLE public.hydra_oauth2_flow VALIDATE CONSTRAINT hydra_oauth2_flow_nid_fk_idx; +ALTER TABLE public.hydra_oauth2_flow VALIDATE CONSTRAINT hydra_oauth2_flow_client_id_fk; +ALTER TABLE public.hydra_oauth2_flow VALIDATE CONSTRAINT hydra_oauth2_flow_login_session_id_fk; +ALTER TABLE public.hydra_oauth2_access VALIDATE CONSTRAINT hydra_oauth2_access_challenge_id_fk; +ALTER TABLE public.hydra_oauth2_access VALIDATE CONSTRAINT hydra_oauth2_access_nid_fk_idx; +ALTER TABLE public.hydra_oauth2_access VALIDATE CONSTRAINT hydra_oauth2_access_client_id_fk; +ALTER TABLE public.hydra_oauth2_refresh VALIDATE CONSTRAINT hydra_oauth2_refresh_challenge_id_fk; +ALTER TABLE public.hydra_oauth2_refresh VALIDATE CONSTRAINT hydra_oauth2_refresh_nid_fk_idx; +ALTER TABLE public.hydra_oauth2_refresh VALIDATE CONSTRAINT hydra_oauth2_refresh_client_id_fk; +ALTER TABLE public.hydra_oauth2_code VALIDATE CONSTRAINT hydra_oauth2_code_challenge_id_fk; +ALTER TABLE public.hydra_oauth2_code VALIDATE CONSTRAINT hydra_oauth2_code_nid_fk_idx; +ALTER TABLE public.hydra_oauth2_code VALIDATE CONSTRAINT hydra_oauth2_code_client_id_fk; +ALTER TABLE public.hydra_oauth2_oidc VALIDATE CONSTRAINT hydra_oauth2_oidc_challenge_id_fk; +ALTER TABLE public.hydra_oauth2_oidc VALIDATE CONSTRAINT hydra_oauth2_oidc_nid_fk_idx; +ALTER TABLE public.hydra_oauth2_oidc VALIDATE CONSTRAINT hydra_oauth2_oidc_client_id_fk; +ALTER TABLE public.hydra_oauth2_pkce VALIDATE CONSTRAINT hydra_oauth2_pkce_challenge_id_fk; +ALTER TABLE public.hydra_oauth2_pkce VALIDATE CONSTRAINT hydra_oauth2_pkce_nid_fk_idx; +ALTER TABLE public.hydra_oauth2_pkce VALIDATE CONSTRAINT hydra_oauth2_pkce_client_id_fk; +ALTER TABLE public.hydra_oauth2_jti_blacklist VALIDATE CONSTRAINT hydra_oauth2_jti_blacklist_nid_fk_idx; +ALTER TABLE public.hydra_oauth2_trusted_jwt_bearer_issuer VALIDATE CONSTRAINT hydra_oauth2_trusted_jwt_bearer_issuer_nid_fk_idx; +ALTER TABLE public.hydra_oauth2_trusted_jwt_bearer_issuer VALIDATE CONSTRAINT fk_key_set_ref_hydra_jwk; +ALTER TABLE public.hydra_oauth2_device_auth_codes VALIDATE CONSTRAINT hydra_oauth2_device_auth_codes_client_id_nid_fkey; +ALTER TABLE public.hydra_oauth2_device_auth_codes VALIDATE CONSTRAINT hydra_oauth2_device_auth_codes_nid_fkey; +ALTER TABLE public.hydra_oauth2_device_auth_codes VALIDATE CONSTRAINT hydra_oauth2_device_auth_codes_challenge_id_fkey; + diff --git a/internal/testhelpers/sql_schemas/mysql_dump.sql b/internal/testhelpers/sql_schemas/mysql_dump.sql new file mode 100644 index 00000000000..46a6b435da4 --- /dev/null +++ b/internal/testhelpers/sql_schemas/mysql_dump.sql @@ -0,0 +1,486 @@ +-- migrations hash: 55b905a5cead652db5b4a5e317d60ad18bd9e6de61d4b43edcdb79244947fbf9b106783f10765f2ffa4d7952c745900c2406f522a101dbed17e5785396484e41 + + +/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; +/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; +/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; +/*!50503 SET NAMES utf8mb4 */; +/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; +/*!40103 SET TIME_ZONE='+00:00' */; +/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; +/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; +/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; +/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; + +DROP TABLE IF EXISTS `hydra_client`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!50503 SET character_set_client = utf8mb4 */; +CREATE TABLE `hydra_client` ( + `id` varchar(255) NOT NULL, + `client_name` text NOT NULL, + `client_secret` text NOT NULL, + `scope` text NOT NULL, + `owner` text NOT NULL, + `policy_uri` text NOT NULL, + `tos_uri` text NOT NULL, + `client_uri` text NOT NULL, + `logo_uri` text NOT NULL, + `client_secret_expires_at` int NOT NULL DEFAULT '0', + `sector_identifier_uri` text NOT NULL, + `jwks` text NOT NULL, + `jwks_uri` text NOT NULL, + `token_endpoint_auth_method` varchar(25) NOT NULL DEFAULT '', + `request_object_signing_alg` varchar(10) NOT NULL DEFAULT '', + `userinfo_signed_response_alg` varchar(10) NOT NULL DEFAULT '', + `subject_type` varchar(15) NOT NULL DEFAULT '', + `pk_deprecated` int unsigned DEFAULT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `frontchannel_logout_uri` text NOT NULL, + `frontchannel_logout_session_required` tinyint(1) NOT NULL DEFAULT '0', + `backchannel_logout_uri` text NOT NULL, + `backchannel_logout_session_required` tinyint(1) NOT NULL DEFAULT '0', + `metadata` text NOT NULL, + `token_endpoint_auth_signing_alg` varchar(10) NOT NULL DEFAULT '', + `authorization_code_grant_access_token_lifespan` bigint DEFAULT NULL, + `authorization_code_grant_id_token_lifespan` bigint DEFAULT NULL, + `authorization_code_grant_refresh_token_lifespan` bigint DEFAULT NULL, + `client_credentials_grant_access_token_lifespan` bigint DEFAULT NULL, + `implicit_grant_access_token_lifespan` bigint DEFAULT NULL, + `implicit_grant_id_token_lifespan` bigint DEFAULT NULL, + `jwt_bearer_grant_access_token_lifespan` bigint DEFAULT NULL, + `password_grant_access_token_lifespan` bigint DEFAULT NULL, + `password_grant_refresh_token_lifespan` bigint DEFAULT NULL, + `refresh_token_grant_id_token_lifespan` bigint DEFAULT NULL, + `refresh_token_grant_access_token_lifespan` bigint DEFAULT NULL, + `refresh_token_grant_refresh_token_lifespan` bigint DEFAULT NULL, + `pk` char(36) DEFAULT NULL, + `registration_access_token_signature` varchar(128) NOT NULL DEFAULT '', + `nid` char(36) NOT NULL, + `redirect_uris` json NOT NULL, + `grant_types` json NOT NULL, + `response_types` json NOT NULL, + `audience` json NOT NULL, + `allowed_cors_origins` json NOT NULL, + `contacts` json NOT NULL, + `request_uris` json NOT NULL, + `post_logout_redirect_uris` json NOT NULL DEFAULT (_utf8mb4'[]'), + `access_token_strategy` varchar(10) NOT NULL DEFAULT '', + `skip_consent` tinyint(1) NOT NULL DEFAULT '0', + `skip_logout_consent` tinyint(1) DEFAULT NULL, + `device_authorization_grant_id_token_lifespan` bigint DEFAULT NULL, + `device_authorization_grant_access_token_lifespan` bigint DEFAULT NULL, + `device_authorization_grant_refresh_token_lifespan` bigint DEFAULT NULL, + PRIMARY KEY (`id`,`nid`), + UNIQUE KEY `hydra_client_id_key` (`id`,`nid`), + KEY `pk_deprecated` (`pk_deprecated`), + KEY `hydra_client_nid_fk_idx` (`nid`), + CONSTRAINT `hydra_client_nid_fk_idx` FOREIGN KEY (`nid`) REFERENCES `networks` (`id`) ON DELETE CASCADE ON UPDATE RESTRICT +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +DROP TABLE IF EXISTS `hydra_jwk`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!50503 SET character_set_client = utf8mb4 */; +CREATE TABLE `hydra_jwk` ( + `sid` varchar(255) NOT NULL, + `kid` varchar(255) CHARACTER SET ascii COLLATE ascii_general_ci NOT NULL, + `version` int NOT NULL DEFAULT '0', + `keydata` text NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `pk_deprecated` int unsigned NOT NULL AUTO_INCREMENT, + `pk` char(36) NOT NULL, + `nid` char(36) NOT NULL, + PRIMARY KEY (`pk`), + UNIQUE KEY `hydra_jwk_sid_kid_nid_key` (`sid`,`kid`,`nid`), + KEY `pk_deprecated` (`pk_deprecated`), + KEY `hydra_jwk_nid_sid_created_at_idx` (`nid`,`sid`,`created_at`), + KEY `hydra_jwk_nid_sid_kid_created_at_idx` (`nid`,`sid`,`kid`,`created_at`), + CONSTRAINT `hydra_jwk_nid_fk_idx` FOREIGN KEY (`nid`) REFERENCES `networks` (`id`) ON DELETE CASCADE ON UPDATE RESTRICT +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +DROP TABLE IF EXISTS `hydra_oauth2_access`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!50503 SET character_set_client = utf8mb4 */; +CREATE TABLE `hydra_oauth2_access` ( + `signature` varchar(255) NOT NULL, + `request_id` varchar(40) NOT NULL DEFAULT '', + `requested_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `client_id` varchar(255) NOT NULL DEFAULT '', + `scope` text NOT NULL, + `granted_scope` text NOT NULL, + `form_data` text NOT NULL, + `session_data` text NOT NULL, + `subject` varchar(255) NOT NULL DEFAULT '', + `active` tinyint(1) NOT NULL DEFAULT '1', + `requested_audience` text NOT NULL, + `granted_audience` text NOT NULL, + `challenge_id` varchar(40) DEFAULT NULL, + `nid` char(36) NOT NULL, + `expires_at` timestamp NULL DEFAULT NULL, + PRIMARY KEY (`signature`), + KEY `hydra_oauth2_access_challenge_id_idx` (`challenge_id`), + KEY `hydra_oauth2_access_nid_fk_idx` (`nid`), + KEY `hydra_oauth2_access_client_id_fk` (`client_id`,`nid`), + KEY `hydra_oauth2_access_requested_at_idx` (`requested_at`,`nid`), + KEY `hydra_oauth2_access_client_id_subject_nid_idx` (`client_id`,`subject`,`nid`), + KEY `hydra_oauth2_access_request_id_idx` (`request_id`,`nid`), + CONSTRAINT `hydra_oauth2_access_challenge_id_fk` FOREIGN KEY (`challenge_id`) REFERENCES `hydra_oauth2_flow` (`consent_challenge_id`) ON DELETE CASCADE, + CONSTRAINT `hydra_oauth2_access_client_id_fk` FOREIGN KEY (`client_id`, `nid`) REFERENCES `hydra_client` (`id`, `nid`) ON DELETE CASCADE, + CONSTRAINT `hydra_oauth2_access_nid_fk_idx` FOREIGN KEY (`nid`) REFERENCES `networks` (`id`) ON DELETE CASCADE ON UPDATE RESTRICT +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +DROP TABLE IF EXISTS `hydra_oauth2_authentication_session`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!50503 SET character_set_client = utf8mb4 */; +CREATE TABLE `hydra_oauth2_authentication_session` ( + `id` varchar(40) NOT NULL, + `authenticated_at` timestamp NULL DEFAULT NULL, + `subject` varchar(255) NOT NULL, + `remember` tinyint(1) NOT NULL DEFAULT '0', + `nid` char(36) NOT NULL, + `identity_provider_session_id` varchar(40) DEFAULT NULL, + `expires_at` timestamp NULL DEFAULT NULL, + PRIMARY KEY (`id`), + KEY `hydra_oauth2_authentication_session_sub_idx` (`subject`), + KEY `hydra_oauth2_authentication_session_nid_fk_idx` (`nid`), + KEY `hydra_oauth2_authentication_session_subject_nid_idx` (`subject`,`nid`), + CONSTRAINT `hydra_oauth2_authentication_session_nid_fk_idx` FOREIGN KEY (`nid`) REFERENCES `networks` (`id`) ON DELETE CASCADE ON UPDATE RESTRICT +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +DROP TABLE IF EXISTS `hydra_oauth2_code`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!50503 SET character_set_client = utf8mb4 */; +CREATE TABLE `hydra_oauth2_code` ( + `signature` varchar(255) NOT NULL, + `request_id` varchar(40) NOT NULL DEFAULT '', + `requested_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `client_id` varchar(255) NOT NULL DEFAULT '', + `scope` text NOT NULL, + `granted_scope` text NOT NULL, + `form_data` text NOT NULL, + `session_data` text NOT NULL, + `subject` varchar(255) NOT NULL DEFAULT '', + `active` tinyint(1) NOT NULL DEFAULT '1', + `requested_audience` text NOT NULL, + `granted_audience` text NOT NULL, + `challenge_id` varchar(40) DEFAULT NULL, + `nid` char(36) NOT NULL, + `expires_at` timestamp NULL DEFAULT NULL, + PRIMARY KEY (`signature`), + KEY `hydra_oauth2_code_challenge_id_idx` (`challenge_id`), + KEY `hydra_oauth2_code_nid_fk_idx` (`nid`), + KEY `hydra_oauth2_code_client_id_fk` (`client_id`,`nid`), + CONSTRAINT `hydra_oauth2_code_challenge_id_fk` FOREIGN KEY (`challenge_id`) REFERENCES `hydra_oauth2_flow` (`consent_challenge_id`) ON DELETE CASCADE, + CONSTRAINT `hydra_oauth2_code_client_id_fk` FOREIGN KEY (`client_id`, `nid`) REFERENCES `hydra_client` (`id`, `nid`) ON DELETE CASCADE, + CONSTRAINT `hydra_oauth2_code_nid_fk_idx` FOREIGN KEY (`nid`) REFERENCES `networks` (`id`) ON DELETE CASCADE ON UPDATE RESTRICT +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +DROP TABLE IF EXISTS `hydra_oauth2_device_auth_codes`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!50503 SET character_set_client = utf8mb4 */; +CREATE TABLE `hydra_oauth2_device_auth_codes` ( + `device_code_signature` varchar(255) NOT NULL, + `user_code_signature` varchar(255) NOT NULL, + `request_id` varchar(40) NOT NULL, + `requested_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `client_id` varchar(255) NOT NULL, + `scope` varchar(1024) NOT NULL, + `granted_scope` varchar(1024) NOT NULL, + `form_data` varchar(4096) NOT NULL, + `session_data` text NOT NULL, + `subject` varchar(255) NOT NULL DEFAULT '', + `device_code_active` tinyint(1) NOT NULL DEFAULT '1', + `user_code_state` smallint NOT NULL DEFAULT '0', + `requested_audience` varchar(1024) NOT NULL, + `granted_audience` varchar(1024) NOT NULL, + `challenge_id` varchar(40) DEFAULT NULL, + `expires_at` timestamp NULL DEFAULT NULL, + `nid` char(36) NOT NULL, + PRIMARY KEY (`device_code_signature`,`nid`), + UNIQUE KEY `hydra_oauth2_device_auth_codes_user_code_signature_idx` (`nid`,`user_code_signature`), + KEY `hydra_oauth2_device_auth_codes_request_id_idx` (`request_id`,`nid`), + KEY `hydra_oauth2_device_auth_codes_client_id_idx` (`client_id`,`nid`), + KEY `hydra_oauth2_device_auth_codes_challenge_id_idx` (`challenge_id`), + CONSTRAINT `hydra_oauth2_device_auth_codes_ibfk_1` FOREIGN KEY (`client_id`, `nid`) REFERENCES `hydra_client` (`id`, `nid`) ON DELETE CASCADE, + CONSTRAINT `hydra_oauth2_device_auth_codes_ibfk_2` FOREIGN KEY (`nid`) REFERENCES `networks` (`id`) ON DELETE CASCADE ON UPDATE RESTRICT, + CONSTRAINT `hydra_oauth2_device_auth_codes_ibfk_3` FOREIGN KEY (`challenge_id`) REFERENCES `hydra_oauth2_flow` (`consent_challenge_id`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +DROP TABLE IF EXISTS `hydra_oauth2_flow`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!50503 SET character_set_client = utf8mb4 */; +CREATE TABLE `hydra_oauth2_flow` ( + `login_challenge` varchar(40) NOT NULL, + `login_verifier` varchar(40) DEFAULT NULL, + `login_csrf` varchar(40) DEFAULT NULL, + `subject` varchar(255) DEFAULT NULL, + `request_url` text, + `login_skip` tinyint(1) DEFAULT NULL, + `client_id` varchar(255) DEFAULT NULL, + `requested_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `login_initialized_at` timestamp NULL DEFAULT NULL, + `oidc_context` json DEFAULT NULL, + `login_session_id` varchar(40) DEFAULT NULL, + `state` smallint DEFAULT NULL, + `login_remember` tinyint(1) DEFAULT NULL, + `login_remember_for` int DEFAULT NULL, + `login_error` text, + `acr` text, + `login_authenticated_at` timestamp NULL DEFAULT NULL, + `login_was_used` tinyint(1) DEFAULT NULL, + `forced_subject_identifier` varchar(255) DEFAULT NULL, + `context` json DEFAULT NULL, + `consent_challenge_id` varchar(40) DEFAULT NULL, + `consent_skip` tinyint(1) DEFAULT NULL, + `consent_verifier` varchar(40) DEFAULT NULL, + `consent_csrf` varchar(40) DEFAULT NULL, + `consent_remember` tinyint(1) DEFAULT NULL, + `consent_remember_for` int DEFAULT NULL, + `consent_handled_at` timestamp NULL DEFAULT NULL, + `consent_error` text, + `session_access_token` json DEFAULT NULL, + `session_id_token` json DEFAULT NULL, + `consent_was_used` tinyint(1) DEFAULT NULL, + `nid` char(36) NOT NULL, + `requested_scope` json DEFAULT NULL, + `requested_at_audience` json DEFAULT NULL, + `amr` json DEFAULT NULL, + `granted_scope` json DEFAULT NULL, + `granted_at_audience` json DEFAULT NULL, + `login_extend_session_lifespan` tinyint(1) DEFAULT NULL, + `identity_provider_session_id` varchar(40) DEFAULT NULL, + `device_challenge_id` varchar(255) DEFAULT NULL, + `device_code_request_id` varchar(255) DEFAULT NULL, + `device_verifier` varchar(40) DEFAULT NULL, + `device_csrf` varchar(40) DEFAULT NULL, + `device_was_used` tinyint(1) DEFAULT NULL, + `device_handled_at` timestamp NULL DEFAULT NULL, + `device_error` varchar(2048) DEFAULT NULL, + `expires_at` timestamp GENERATED ALWAYS AS (if((`consent_remember_for` > 0),(`requested_at` + interval `consent_remember_for` second),NULL)) VIRTUAL NULL, + PRIMARY KEY (`login_challenge`), + UNIQUE KEY `hydra_oauth2_flow_consent_challenge_idx` (`consent_challenge_id`), + UNIQUE KEY `hydra_oauth2_flow_device_challenge_idx` (`device_challenge_id`), + KEY `hydra_oauth2_flow_login_session_id_idx` (`login_session_id`), + KEY `hydra_oauth2_flow_nid_fk_idx` (`nid`), + KEY `hydra_oauth2_flow_client_id_subject_idx` (`client_id`,`nid`,`subject`), + KEY `hydra_oauth2_flow_sub_idx` (`subject`,`nid`), + KEY `hydra_oauth2_flow_previous_consents_idx` (`subject`,`client_id`,`nid`,`consent_skip`,`consent_error`(2),`consent_remember`), + CONSTRAINT `hydra_oauth2_flow_client_id_fk` FOREIGN KEY (`client_id`, `nid`) REFERENCES `hydra_client` (`id`, `nid`) ON DELETE CASCADE, + CONSTRAINT `hydra_oauth2_flow_login_session_id_fk` FOREIGN KEY (`login_session_id`) REFERENCES `hydra_oauth2_authentication_session` (`id`) ON DELETE SET NULL, + CONSTRAINT `hydra_oauth2_flow_nid_fk_idx` FOREIGN KEY (`nid`) REFERENCES `networks` (`id`) ON DELETE CASCADE ON UPDATE RESTRICT +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +DROP TABLE IF EXISTS `hydra_oauth2_jti_blacklist`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!50503 SET character_set_client = utf8mb4 */; +CREATE TABLE `hydra_oauth2_jti_blacklist` ( + `signature` varchar(64) NOT NULL, + `expires_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `nid` char(36) NOT NULL, + PRIMARY KEY (`signature`,`nid`), + KEY `hydra_oauth2_jti_blacklist_nid_fk_idx` (`nid`), + KEY `hydra_oauth2_jti_blacklist_expiry` (`expires_at`,`nid`), + CONSTRAINT `hydra_oauth2_jti_blacklist_nid_fk_idx` FOREIGN KEY (`nid`) REFERENCES `networks` (`id`) ON DELETE CASCADE ON UPDATE RESTRICT +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +DROP TABLE IF EXISTS `hydra_oauth2_logout_request`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!50503 SET character_set_client = utf8mb4 */; +CREATE TABLE `hydra_oauth2_logout_request` ( + `challenge` varchar(36) NOT NULL, + `verifier` varchar(36) NOT NULL, + `subject` varchar(255) NOT NULL, + `sid` varchar(36) NOT NULL, + `client_id` varchar(255) DEFAULT NULL, + `request_url` text NOT NULL, + `redir_url` text NOT NULL, + `was_used` tinyint(1) NOT NULL DEFAULT '0', + `accepted` tinyint(1) NOT NULL DEFAULT '0', + `rejected` tinyint(1) NOT NULL DEFAULT '0', + `rp_initiated` tinyint(1) NOT NULL DEFAULT '0', + `nid` char(36) NOT NULL, + `expires_at` timestamp NULL DEFAULT NULL, + `requested_at` timestamp NULL DEFAULT NULL, + PRIMARY KEY (`challenge`), + UNIQUE KEY `hydra_oauth2_logout_request_veri_idx` (`verifier`), + KEY `hydra_oauth2_logout_request_nid_fk_idx` (`nid`), + KEY `hydra_oauth2_logout_request_client_id_fk` (`client_id`,`nid`), + CONSTRAINT `hydra_oauth2_logout_request_client_id_fk` FOREIGN KEY (`client_id`, `nid`) REFERENCES `hydra_client` (`id`, `nid`) ON DELETE CASCADE, + CONSTRAINT `hydra_oauth2_logout_request_nid_fk_idx` FOREIGN KEY (`nid`) REFERENCES `networks` (`id`) ON DELETE CASCADE ON UPDATE RESTRICT +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +DROP TABLE IF EXISTS `hydra_oauth2_obfuscated_authentication_session`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!50503 SET character_set_client = utf8mb4 */; +CREATE TABLE `hydra_oauth2_obfuscated_authentication_session` ( + `subject` varchar(255) NOT NULL, + `client_id` varchar(255) NOT NULL, + `subject_obfuscated` varchar(255) NOT NULL, + `nid` char(36) NOT NULL, + PRIMARY KEY (`subject`,`client_id`,`nid`), + UNIQUE KEY `hydra_oauth2_obfuscated_authentication_session_so_nid_idx` (`client_id`,`subject_obfuscated`,`nid`), + KEY `hydra_oauth2_obfuscated_authentication_session_nid_fk_idx` (`nid`), + KEY `hydra_oauth2_obfuscated_authentication_session_client_id_fk` (`client_id`,`nid`), + CONSTRAINT `hydra_oauth2_obfuscated_authentication_session_client_id_fk` FOREIGN KEY (`client_id`, `nid`) REFERENCES `hydra_client` (`id`, `nid`) ON DELETE CASCADE, + CONSTRAINT `hydra_oauth2_obfuscated_authentication_session_nid_fk_idx` FOREIGN KEY (`nid`) REFERENCES `networks` (`id`) ON DELETE CASCADE ON UPDATE RESTRICT +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +DROP TABLE IF EXISTS `hydra_oauth2_oidc`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!50503 SET character_set_client = utf8mb4 */; +CREATE TABLE `hydra_oauth2_oidc` ( + `signature` varchar(255) NOT NULL, + `request_id` varchar(40) NOT NULL DEFAULT '', + `requested_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `client_id` varchar(255) NOT NULL DEFAULT '', + `scope` text NOT NULL, + `granted_scope` text NOT NULL, + `form_data` text NOT NULL, + `session_data` text NOT NULL, + `subject` varchar(255) NOT NULL DEFAULT '', + `active` tinyint(1) NOT NULL DEFAULT '1', + `requested_audience` text NOT NULL, + `granted_audience` text NOT NULL, + `challenge_id` varchar(40) DEFAULT NULL, + `nid` char(36) NOT NULL, + `expires_at` timestamp NULL DEFAULT NULL, + PRIMARY KEY (`signature`), + KEY `hydra_oauth2_oidc_client_id_idx` (`client_id`), + KEY `hydra_oauth2_oidc_challenge_id_idx` (`challenge_id`), + KEY `hydra_oauth2_oidc_nid_fk_idx` (`nid`), + KEY `hydra_oauth2_oidc_client_id_fk` (`client_id`,`nid`), + CONSTRAINT `hydra_oauth2_oidc_challenge_id_fk` FOREIGN KEY (`challenge_id`) REFERENCES `hydra_oauth2_flow` (`consent_challenge_id`) ON DELETE CASCADE, + CONSTRAINT `hydra_oauth2_oidc_client_id_fk` FOREIGN KEY (`client_id`, `nid`) REFERENCES `hydra_client` (`id`, `nid`) ON DELETE CASCADE, + CONSTRAINT `hydra_oauth2_oidc_nid_fk_idx` FOREIGN KEY (`nid`) REFERENCES `networks` (`id`) ON DELETE CASCADE ON UPDATE RESTRICT +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +DROP TABLE IF EXISTS `hydra_oauth2_pkce`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!50503 SET character_set_client = utf8mb4 */; +CREATE TABLE `hydra_oauth2_pkce` ( + `signature` varchar(255) NOT NULL, + `request_id` varchar(40) NOT NULL DEFAULT '', + `requested_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `client_id` varchar(255) NOT NULL DEFAULT '', + `scope` text NOT NULL, + `granted_scope` text NOT NULL, + `form_data` text NOT NULL, + `session_data` text NOT NULL, + `subject` varchar(255) NOT NULL, + `active` tinyint(1) NOT NULL DEFAULT '1', + `requested_audience` text NOT NULL, + `granted_audience` text NOT NULL, + `challenge_id` varchar(40) DEFAULT NULL, + `nid` char(36) NOT NULL, + `expires_at` timestamp NULL DEFAULT NULL, + PRIMARY KEY (`signature`), + KEY `hydra_oauth2_pkce_client_id_idx` (`client_id`), + KEY `hydra_oauth2_pkce_challenge_id_idx` (`challenge_id`), + KEY `hydra_oauth2_pkce_nid_fk_idx` (`nid`), + KEY `hydra_oauth2_pkce_client_id_fk` (`client_id`,`nid`), + CONSTRAINT `hydra_oauth2_pkce_challenge_id_fk` FOREIGN KEY (`challenge_id`) REFERENCES `hydra_oauth2_flow` (`consent_challenge_id`) ON DELETE CASCADE, + CONSTRAINT `hydra_oauth2_pkce_client_id_fk` FOREIGN KEY (`client_id`, `nid`) REFERENCES `hydra_client` (`id`, `nid`) ON DELETE CASCADE, + CONSTRAINT `hydra_oauth2_pkce_nid_fk_idx` FOREIGN KEY (`nid`) REFERENCES `networks` (`id`) ON DELETE CASCADE ON UPDATE RESTRICT +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +DROP TABLE IF EXISTS `hydra_oauth2_refresh`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!50503 SET character_set_client = utf8mb4 */; +CREATE TABLE `hydra_oauth2_refresh` ( + `signature` varchar(255) NOT NULL, + `request_id` varchar(40) NOT NULL DEFAULT '', + `requested_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `client_id` varchar(255) NOT NULL DEFAULT '', + `scope` text NOT NULL, + `granted_scope` text NOT NULL, + `form_data` text NOT NULL, + `session_data` text NOT NULL, + `subject` varchar(255) NOT NULL DEFAULT '', + `active` tinyint(1) NOT NULL DEFAULT '1', + `requested_audience` text NOT NULL, + `granted_audience` text NOT NULL, + `challenge_id` varchar(40) DEFAULT NULL, + `nid` char(36) NOT NULL, + `expires_at` timestamp NULL DEFAULT NULL, + `first_used_at` timestamp NULL DEFAULT NULL, + `access_token_signature` varchar(255) DEFAULT NULL, + `used_times` int DEFAULT NULL, + PRIMARY KEY (`signature`), + KEY `hydra_oauth2_refresh_client_id_idx` (`client_id`), + KEY `hydra_oauth2_refresh_challenge_id_idx` (`challenge_id`), + KEY `hydra_oauth2_refresh_client_id_fk` (`client_id`,`nid`), + KEY `hydra_oauth2_refresh_request_id_idx` (`request_id`), + KEY `hydra_oauth2_refresh_requested_at_idx` (`nid`,`requested_at`), + CONSTRAINT `hydra_oauth2_refresh_challenge_id_fk` FOREIGN KEY (`challenge_id`) REFERENCES `hydra_oauth2_flow` (`consent_challenge_id`) ON DELETE CASCADE, + CONSTRAINT `hydra_oauth2_refresh_client_id_fk` FOREIGN KEY (`client_id`, `nid`) REFERENCES `hydra_client` (`id`, `nid`) ON DELETE CASCADE, + CONSTRAINT `hydra_oauth2_refresh_nid_fk_idx` FOREIGN KEY (`nid`) REFERENCES `networks` (`id`) ON DELETE CASCADE ON UPDATE RESTRICT +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +DROP TABLE IF EXISTS `hydra_oauth2_trusted_jwt_bearer_issuer`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!50503 SET character_set_client = utf8mb4 */; +CREATE TABLE `hydra_oauth2_trusted_jwt_bearer_issuer` ( + `id` varchar(36) NOT NULL, + `issuer` varchar(255) NOT NULL, + `subject` varchar(255) NOT NULL, + `scope` text NOT NULL, + `key_set` varchar(255) NOT NULL, + `key_id` varchar(255) CHARACTER SET ascii COLLATE ascii_general_ci NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `expires_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `nid` char(36) NOT NULL, + `allow_any_subject` tinyint(1) NOT NULL DEFAULT '0', + PRIMARY KEY (`id`), + UNIQUE KEY `hydra_oauth2_trusted_jwt_bearer_issuer_nid_uq_idx` (`nid`,`key_id`,`issuer`,`subject`), + KEY `hydra_oauth2_trusted_jwt_bearer_issuer_ibfk_1` (`key_set`,`key_id`,`nid`), + KEY `hydra_oauth2_trusted_jwt_bearer_issuer_expires_at_idx` (`expires_at`), + KEY `hydra_oauth2_trusted_jwt_bearer_issuer_nid_idx` (`id`,`nid`), + CONSTRAINT `hydra_oauth2_trusted_jwt_bearer_issuer_ibfk_1` FOREIGN KEY (`key_set`, `key_id`, `nid`) REFERENCES `hydra_jwk` (`sid`, `kid`, `nid`) ON DELETE CASCADE, + CONSTRAINT `hydra_oauth2_trusted_jwt_bearer_issuer_nid_fk_idx` FOREIGN KEY (`nid`) REFERENCES `networks` (`id`) ON DELETE CASCADE ON UPDATE RESTRICT +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +DROP TABLE IF EXISTS `networks`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!50503 SET character_set_client = utf8mb4 */; +CREATE TABLE `networks` ( + `id` char(36) NOT NULL, + `created_at` datetime NOT NULL, + `updated_at` datetime NOT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +DROP TABLE IF EXISTS `schema_migration`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!50503 SET character_set_client = utf8mb4 */; +CREATE TABLE `schema_migration` ( + `version` varchar(48) NOT NULL, + `version_self` int NOT NULL DEFAULT '0', + UNIQUE KEY `schema_migration_version_idx` (`version`), + KEY `schema_migration_version_self_idx` (`version_self`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; +/*!40101 SET character_set_client = @saved_cs_client */; +/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; + +/*!40101 SET SQL_MODE=@OLD_SQL_MODE */; +/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; +/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; +/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; +/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; +/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; +/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; + diff --git a/internal/testhelpers/sql_schemas/postgres_dump.sql b/internal/testhelpers/sql_schemas/postgres_dump.sql new file mode 100644 index 00000000000..071e0691a35 --- /dev/null +++ b/internal/testhelpers/sql_schemas/postgres_dump.sql @@ -0,0 +1,603 @@ +-- migrations hash: 55b905a5cead652db5b4a5e317d60ad18bd9e6de61d4b43edcdb79244947fbf9b106783f10765f2ffa4d7952c745900c2406f522a101dbed17e5785396484e41 + + + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET xmloption = content; +SET client_min_messages = warning; +SET row_security = off; + +CREATE EXTENSION IF NOT EXISTS "uuid-ossp" WITH SCHEMA public; + +COMMENT ON EXTENSION "uuid-ossp" IS 'generate universally unique identifiers (UUIDs)'; + +SET default_tablespace = ''; + +SET default_table_access_method = heap; + +CREATE TABLE public.hydra_client ( + id character varying(255) NOT NULL, + client_name text NOT NULL, + client_secret text NOT NULL, + scope text NOT NULL, + owner text NOT NULL, + policy_uri text NOT NULL, + tos_uri text NOT NULL, + client_uri text NOT NULL, + logo_uri text NOT NULL, + client_secret_expires_at integer DEFAULT 0 NOT NULL, + sector_identifier_uri text NOT NULL, + jwks text NOT NULL, + jwks_uri text NOT NULL, + token_endpoint_auth_method character varying(25) DEFAULT ''::character varying NOT NULL, + request_object_signing_alg character varying(10) DEFAULT ''::character varying NOT NULL, + userinfo_signed_response_alg character varying(10) DEFAULT ''::character varying NOT NULL, + subject_type character varying(15) DEFAULT ''::character varying NOT NULL, + pk_deprecated integer NOT NULL, + created_at timestamp without time zone DEFAULT now() NOT NULL, + updated_at timestamp without time zone DEFAULT now() NOT NULL, + frontchannel_logout_uri text DEFAULT ''::text NOT NULL, + frontchannel_logout_session_required boolean DEFAULT false NOT NULL, + backchannel_logout_uri text DEFAULT ''::text NOT NULL, + backchannel_logout_session_required boolean DEFAULT false NOT NULL, + metadata text NOT NULL, + token_endpoint_auth_signing_alg character varying(10) DEFAULT ''::character varying NOT NULL, + authorization_code_grant_access_token_lifespan bigint, + authorization_code_grant_id_token_lifespan bigint, + authorization_code_grant_refresh_token_lifespan bigint, + client_credentials_grant_access_token_lifespan bigint, + implicit_grant_access_token_lifespan bigint, + implicit_grant_id_token_lifespan bigint, + jwt_bearer_grant_access_token_lifespan bigint, + password_grant_access_token_lifespan bigint, + password_grant_refresh_token_lifespan bigint, + refresh_token_grant_id_token_lifespan bigint, + refresh_token_grant_access_token_lifespan bigint, + refresh_token_grant_refresh_token_lifespan bigint, + pk uuid, + registration_access_token_signature character varying(128) DEFAULT ''::character varying NOT NULL, + nid uuid NOT NULL, + redirect_uris jsonb NOT NULL, + grant_types jsonb NOT NULL, + response_types jsonb NOT NULL, + audience jsonb NOT NULL, + allowed_cors_origins jsonb NOT NULL, + contacts jsonb NOT NULL, + request_uris jsonb NOT NULL, + post_logout_redirect_uris jsonb DEFAULT '[]'::jsonb NOT NULL, + access_token_strategy character varying(10) DEFAULT ''::character varying NOT NULL, + skip_consent boolean DEFAULT false NOT NULL, + skip_logout_consent boolean, + device_authorization_grant_id_token_lifespan bigint, + device_authorization_grant_access_token_lifespan bigint, + device_authorization_grant_refresh_token_lifespan bigint +); + +ALTER TABLE public.hydra_client OWNER TO postgres; + +CREATE SEQUENCE public.hydra_client_pk_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.hydra_client_pk_seq OWNER TO postgres; + +ALTER SEQUENCE public.hydra_client_pk_seq OWNED BY public.hydra_client.pk_deprecated; + +CREATE TABLE public.hydra_jwk ( + sid character varying(255) NOT NULL, + kid character varying(255) NOT NULL, + version integer DEFAULT 0 NOT NULL, + keydata text NOT NULL, + created_at timestamp without time zone DEFAULT now() NOT NULL, + pk_deprecated integer NOT NULL, + pk uuid NOT NULL, + nid uuid NOT NULL +); + +ALTER TABLE public.hydra_jwk OWNER TO postgres; + +CREATE SEQUENCE public.hydra_jwk_pk_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.hydra_jwk_pk_seq OWNER TO postgres; + +ALTER SEQUENCE public.hydra_jwk_pk_seq OWNED BY public.hydra_jwk.pk_deprecated; + +CREATE TABLE public.hydra_oauth2_access ( + signature character varying(255) NOT NULL, + request_id character varying(40) NOT NULL, + requested_at timestamp without time zone DEFAULT now() NOT NULL, + client_id character varying(255) NOT NULL, + scope text NOT NULL, + granted_scope text NOT NULL, + form_data text NOT NULL, + session_data text NOT NULL, + subject character varying(255) DEFAULT ''::character varying NOT NULL, + active boolean DEFAULT true NOT NULL, + requested_audience text DEFAULT ''::text, + granted_audience text DEFAULT ''::text, + challenge_id character varying(40), + nid uuid NOT NULL, + expires_at timestamp without time zone +); + +ALTER TABLE public.hydra_oauth2_access OWNER TO postgres; + +CREATE TABLE public.hydra_oauth2_authentication_session ( + id character varying(40) NOT NULL, + authenticated_at timestamp without time zone, + subject character varying(255) NOT NULL, + remember boolean DEFAULT false NOT NULL, + nid uuid NOT NULL, + identity_provider_session_id character varying(40), + expires_at timestamp without time zone +); + +ALTER TABLE public.hydra_oauth2_authentication_session OWNER TO postgres; + +CREATE TABLE public.hydra_oauth2_code ( + signature character varying(255) NOT NULL, + request_id character varying(40) NOT NULL, + requested_at timestamp without time zone DEFAULT now() NOT NULL, + client_id character varying(255) NOT NULL, + scope text NOT NULL, + granted_scope text NOT NULL, + form_data text NOT NULL, + session_data text NOT NULL, + subject character varying(255) DEFAULT ''::character varying NOT NULL, + active boolean DEFAULT true NOT NULL, + requested_audience text DEFAULT ''::text, + granted_audience text DEFAULT ''::text, + challenge_id character varying(40), + nid uuid NOT NULL, + expires_at timestamp without time zone +); + +ALTER TABLE public.hydra_oauth2_code OWNER TO postgres; + +CREATE TABLE public.hydra_oauth2_device_auth_codes ( + device_code_signature character varying(255) NOT NULL, + user_code_signature character varying(255) NOT NULL, + request_id character varying(40) NOT NULL, + requested_at timestamp without time zone DEFAULT now() NOT NULL, + client_id character varying(255) NOT NULL, + scope character varying(1024) NOT NULL, + granted_scope character varying(1024) NOT NULL, + form_data character varying(4096) NOT NULL, + session_data text NOT NULL, + subject character varying(255) DEFAULT ''::character varying NOT NULL, + device_code_active boolean DEFAULT true NOT NULL, + user_code_state smallint DEFAULT 0 NOT NULL, + requested_audience character varying(1024) NOT NULL, + granted_audience character varying(1024) NOT NULL, + challenge_id character varying(40), + expires_at timestamp without time zone, + nid uuid NOT NULL +); + +ALTER TABLE public.hydra_oauth2_device_auth_codes OWNER TO postgres; + +CREATE TABLE public.hydra_oauth2_flow ( + login_challenge character varying(40) NOT NULL, + login_verifier character varying(40), + login_csrf character varying(40), + subject character varying(255), + request_url text, + login_skip boolean, + client_id character varying(255), + requested_at timestamp without time zone DEFAULT now() NOT NULL, + login_initialized_at timestamp without time zone, + oidc_context jsonb, + login_session_id character varying(40), + state integer, + login_remember boolean, + login_remember_for integer, + login_error text, + acr text, + login_authenticated_at timestamp without time zone, + login_was_used boolean, + forced_subject_identifier character varying(255), + context jsonb, + consent_challenge_id character varying(40), + consent_skip boolean, + consent_verifier character varying(40), + consent_csrf character varying(40), + consent_remember boolean, + consent_remember_for integer, + consent_handled_at timestamp without time zone, + consent_error text, + session_access_token jsonb, + session_id_token jsonb, + consent_was_used boolean, + nid uuid NOT NULL, + requested_scope jsonb, + requested_at_audience jsonb, + amr jsonb, + granted_scope jsonb, + granted_at_audience jsonb, + login_extend_session_lifespan boolean, + identity_provider_session_id character varying(40), + device_challenge_id character varying(255), + device_code_request_id character varying(255), + device_verifier character varying(40), + device_csrf character varying(40), + device_was_used boolean, + device_handled_at timestamp without time zone, + device_error character varying(2048), + expires_at timestamp without time zone GENERATED ALWAYS AS ( +CASE + WHEN (consent_remember_for > 0) THEN (requested_at + ((consent_remember_for)::double precision * '00:00:01'::interval)) + ELSE NULL::timestamp without time zone +END) STORED +); + +ALTER TABLE public.hydra_oauth2_flow OWNER TO postgres; + +CREATE TABLE public.hydra_oauth2_jti_blacklist ( + signature character varying(64) NOT NULL, + expires_at timestamp without time zone DEFAULT now() NOT NULL, + nid uuid NOT NULL +); + +ALTER TABLE public.hydra_oauth2_jti_blacklist OWNER TO postgres; + +CREATE TABLE public.hydra_oauth2_logout_request ( + challenge character varying(36) NOT NULL, + verifier character varying(36) NOT NULL, + subject character varying(255) NOT NULL, + sid character varying(36) NOT NULL, + client_id character varying(255), + request_url text NOT NULL, + redir_url text NOT NULL, + was_used boolean DEFAULT false NOT NULL, + accepted boolean DEFAULT false NOT NULL, + rejected boolean DEFAULT false NOT NULL, + rp_initiated boolean DEFAULT false NOT NULL, + nid uuid NOT NULL, + expires_at timestamp without time zone, + requested_at timestamp without time zone +); + +ALTER TABLE public.hydra_oauth2_logout_request OWNER TO postgres; + +CREATE TABLE public.hydra_oauth2_obfuscated_authentication_session ( + subject character varying(255) NOT NULL, + client_id character varying(255) NOT NULL, + subject_obfuscated character varying(255) NOT NULL, + nid uuid NOT NULL +); + +ALTER TABLE public.hydra_oauth2_obfuscated_authentication_session OWNER TO postgres; + +CREATE TABLE public.hydra_oauth2_oidc ( + signature character varying(255) NOT NULL, + request_id character varying(40) NOT NULL, + requested_at timestamp without time zone DEFAULT now() NOT NULL, + client_id character varying(255) NOT NULL, + scope text NOT NULL, + granted_scope text NOT NULL, + form_data text NOT NULL, + session_data text NOT NULL, + subject character varying(255) DEFAULT ''::character varying NOT NULL, + active boolean DEFAULT true NOT NULL, + requested_audience text DEFAULT ''::text, + granted_audience text DEFAULT ''::text, + challenge_id character varying(40), + nid uuid NOT NULL, + expires_at timestamp without time zone +); + +ALTER TABLE public.hydra_oauth2_oidc OWNER TO postgres; + +CREATE TABLE public.hydra_oauth2_pkce ( + signature character varying(255) NOT NULL, + request_id character varying(40) NOT NULL, + requested_at timestamp without time zone DEFAULT now() NOT NULL, + client_id character varying(255) NOT NULL, + scope text NOT NULL, + granted_scope text NOT NULL, + form_data text NOT NULL, + session_data text NOT NULL, + subject character varying(255) NOT NULL, + active boolean DEFAULT true NOT NULL, + requested_audience text DEFAULT ''::text, + granted_audience text DEFAULT ''::text, + challenge_id character varying(40), + nid uuid NOT NULL, + expires_at timestamp without time zone +); + +ALTER TABLE public.hydra_oauth2_pkce OWNER TO postgres; + +CREATE TABLE public.hydra_oauth2_refresh ( + signature character varying(255) NOT NULL, + request_id character varying(40) NOT NULL, + requested_at timestamp without time zone DEFAULT now() NOT NULL, + client_id character varying(255) NOT NULL, + scope text NOT NULL, + granted_scope text NOT NULL, + form_data text NOT NULL, + session_data text NOT NULL, + subject character varying(255) DEFAULT ''::character varying NOT NULL, + active boolean DEFAULT true NOT NULL, + requested_audience text DEFAULT ''::text, + granted_audience text DEFAULT ''::text, + challenge_id character varying(40), + nid uuid NOT NULL, + expires_at timestamp without time zone, + first_used_at timestamp without time zone, + access_token_signature character varying(255) DEFAULT NULL::character varying, + used_times integer +); + +ALTER TABLE public.hydra_oauth2_refresh OWNER TO postgres; + +CREATE TABLE public.hydra_oauth2_trusted_jwt_bearer_issuer ( + id uuid NOT NULL, + issuer character varying(255) NOT NULL, + subject character varying(255) NOT NULL, + scope text NOT NULL, + key_set character varying(255) NOT NULL, + key_id character varying(255) NOT NULL, + created_at timestamp without time zone DEFAULT now() NOT NULL, + expires_at timestamp without time zone DEFAULT now() NOT NULL, + nid uuid NOT NULL, + allow_any_subject boolean DEFAULT false NOT NULL +); + +ALTER TABLE public.hydra_oauth2_trusted_jwt_bearer_issuer OWNER TO postgres; + +CREATE TABLE public.networks ( + id uuid NOT NULL, + created_at timestamp without time zone NOT NULL, + updated_at timestamp without time zone NOT NULL +); + +ALTER TABLE public.networks OWNER TO postgres; + +CREATE TABLE public.schema_migration ( + version character varying(48) NOT NULL, + version_self integer DEFAULT 0 NOT NULL +); + +ALTER TABLE public.schema_migration OWNER TO postgres; + +ALTER TABLE ONLY public.hydra_client ALTER COLUMN pk_deprecated SET DEFAULT nextval('public.hydra_client_pk_seq'::regclass); + +ALTER TABLE ONLY public.hydra_jwk ALTER COLUMN pk_deprecated SET DEFAULT nextval('public.hydra_jwk_pk_seq'::regclass); + +ALTER TABLE ONLY public.hydra_client + ADD CONSTRAINT hydra_client_pkey PRIMARY KEY (id, nid); + +ALTER TABLE ONLY public.hydra_jwk + ADD CONSTRAINT hydra_jwk_pkey PRIMARY KEY (pk); + +ALTER TABLE ONLY public.hydra_oauth2_access + ADD CONSTRAINT hydra_oauth2_access_pkey PRIMARY KEY (signature); + +ALTER TABLE ONLY public.hydra_oauth2_authentication_session + ADD CONSTRAINT hydra_oauth2_authentication_session_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.hydra_oauth2_code + ADD CONSTRAINT hydra_oauth2_code_pkey PRIMARY KEY (signature); + +ALTER TABLE ONLY public.hydra_oauth2_device_auth_codes + ADD CONSTRAINT hydra_oauth2_device_auth_codes_pkey PRIMARY KEY (device_code_signature, nid); + +ALTER TABLE ONLY public.hydra_oauth2_flow + ADD CONSTRAINT hydra_oauth2_flow_pkey PRIMARY KEY (login_challenge); + +ALTER TABLE ONLY public.hydra_oauth2_jti_blacklist + ADD CONSTRAINT hydra_oauth2_jti_blacklist_pkey PRIMARY KEY (signature, nid); + +ALTER TABLE ONLY public.hydra_oauth2_logout_request + ADD CONSTRAINT hydra_oauth2_logout_request_pkey PRIMARY KEY (challenge); + +ALTER TABLE ONLY public.hydra_oauth2_obfuscated_authentication_session + ADD CONSTRAINT hydra_oauth2_obfuscated_authentication_session_pkey PRIMARY KEY (subject, client_id, nid); + +ALTER TABLE ONLY public.hydra_oauth2_oidc + ADD CONSTRAINT hydra_oauth2_oidc_pkey PRIMARY KEY (signature); + +ALTER TABLE ONLY public.hydra_oauth2_pkce + ADD CONSTRAINT hydra_oauth2_pkce_pkey PRIMARY KEY (signature); + +ALTER TABLE ONLY public.hydra_oauth2_refresh + ADD CONSTRAINT hydra_oauth2_refresh_pkey PRIMARY KEY (signature); + +ALTER TABLE ONLY public.hydra_oauth2_trusted_jwt_bearer_issuer + ADD CONSTRAINT hydra_oauth2_trusted_jwt_bearer_issue_issuer_subject_key_id_key UNIQUE (issuer, subject, key_id, nid); + +ALTER TABLE ONLY public.hydra_oauth2_trusted_jwt_bearer_issuer + ADD CONSTRAINT hydra_oauth2_trusted_jwt_bearer_issuer_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.networks + ADD CONSTRAINT networks_pkey PRIMARY KEY (id); + +CREATE UNIQUE INDEX hydra_client_idx_id_uq ON public.hydra_client USING btree (id, nid); + +CREATE INDEX hydra_jwk_nid_sid_created_at_idx ON public.hydra_jwk USING btree (nid, sid, created_at); + +CREATE INDEX hydra_jwk_nid_sid_kid_created_at_idx ON public.hydra_jwk USING btree (nid, sid, kid, created_at); + +CREATE UNIQUE INDEX hydra_jwk_sid_kid_nid_key ON public.hydra_jwk USING btree (sid, kid, nid); + +CREATE INDEX hydra_oauth2_access_challenge_id_idx ON public.hydra_oauth2_access USING btree (challenge_id); + +CREATE INDEX hydra_oauth2_access_client_id_idx ON public.hydra_oauth2_access USING btree (client_id, nid); + +CREATE INDEX hydra_oauth2_access_request_id_idx ON public.hydra_oauth2_access USING btree (request_id, nid); + +CREATE INDEX hydra_oauth2_access_requested_at_idx ON public.hydra_oauth2_access USING btree (requested_at, nid); + +CREATE INDEX hydra_oauth2_authentication_session_sub_idx ON public.hydra_oauth2_authentication_session USING btree (subject, nid); + +CREATE INDEX hydra_oauth2_code_challenge_id_idx ON public.hydra_oauth2_code USING btree (challenge_id, nid); + +CREATE INDEX hydra_oauth2_code_client_id_idx ON public.hydra_oauth2_code USING btree (client_id, nid); + +CREATE INDEX hydra_oauth2_device_auth_codes_challenge_id_idx ON public.hydra_oauth2_device_auth_codes USING btree (challenge_id); + +CREATE INDEX hydra_oauth2_device_auth_codes_client_id_idx ON public.hydra_oauth2_device_auth_codes USING btree (client_id, nid); + +CREATE INDEX hydra_oauth2_device_auth_codes_request_id_idx ON public.hydra_oauth2_device_auth_codes USING btree (request_id, nid); + +CREATE UNIQUE INDEX hydra_oauth2_device_auth_codes_user_code_signature_idx ON public.hydra_oauth2_device_auth_codes USING btree (nid, user_code_signature); + +CREATE INDEX hydra_oauth2_flow_cid_idx ON public.hydra_oauth2_flow USING btree (client_id, nid); + +CREATE INDEX hydra_oauth2_flow_client_id_subject_idx ON public.hydra_oauth2_flow USING btree (client_id, nid, subject); + +CREATE UNIQUE INDEX hydra_oauth2_flow_consent_challenge_idx ON public.hydra_oauth2_flow USING btree (consent_challenge_id); + +CREATE UNIQUE INDEX hydra_oauth2_flow_device_challenge_idx ON public.hydra_oauth2_flow USING btree (device_challenge_id); + +CREATE INDEX hydra_oauth2_flow_login_session_id_idx ON public.hydra_oauth2_flow USING btree (login_session_id, nid); + +CREATE INDEX hydra_oauth2_flow_previous_consents_idx ON public.hydra_oauth2_flow USING btree (subject, client_id, nid, consent_skip, consent_error, consent_remember); + +CREATE INDEX hydra_oauth2_flow_sub_idx ON public.hydra_oauth2_flow USING btree (subject, nid); + +CREATE INDEX hydra_oauth2_jti_blacklist_expires_at_idx ON public.hydra_oauth2_jti_blacklist USING btree (expires_at, nid); + +CREATE INDEX hydra_oauth2_logout_request_client_id_idx ON public.hydra_oauth2_logout_request USING btree (client_id, nid); + +CREATE UNIQUE INDEX hydra_oauth2_logout_request_veri_idx ON public.hydra_oauth2_logout_request USING btree (verifier); + +CREATE UNIQUE INDEX hydra_oauth2_obfuscated_authentication_session_so_idx ON public.hydra_oauth2_obfuscated_authentication_session USING btree (client_id, subject_obfuscated, nid); + +CREATE INDEX hydra_oauth2_oidc_challenge_id_idx ON public.hydra_oauth2_oidc USING btree (challenge_id); + +CREATE INDEX hydra_oauth2_oidc_client_id_idx ON public.hydra_oauth2_oidc USING btree (client_id, nid); + +CREATE INDEX hydra_oauth2_pkce_challenge_id_idx ON public.hydra_oauth2_pkce USING btree (challenge_id); + +CREATE INDEX hydra_oauth2_pkce_client_id_idx ON public.hydra_oauth2_pkce USING btree (client_id, nid); + +CREATE INDEX hydra_oauth2_refresh_challenge_id_idx ON public.hydra_oauth2_refresh USING btree (challenge_id); + +CREATE INDEX hydra_oauth2_refresh_client_id_idx ON public.hydra_oauth2_refresh USING btree (client_id, nid); + +CREATE INDEX hydra_oauth2_refresh_request_id_idx ON public.hydra_oauth2_refresh USING btree (request_id); + +CREATE INDEX hydra_oauth2_refresh_requested_at_idx ON public.hydra_oauth2_refresh USING btree (nid, requested_at); + +CREATE INDEX hydra_oauth2_trusted_jwt_bearer_issuer_expires_at_idx ON public.hydra_oauth2_trusted_jwt_bearer_issuer USING btree (expires_at); + +CREATE INDEX hydra_oauth2_trusted_jwt_bearer_issuer_nid_idx ON public.hydra_oauth2_trusted_jwt_bearer_issuer USING btree (id, nid); + +CREATE UNIQUE INDEX hydra_oauth2_trusted_jwt_bearer_issuer_nid_uq_idx ON public.hydra_oauth2_trusted_jwt_bearer_issuer USING btree (nid, key_id, issuer, subject); + +CREATE UNIQUE INDEX schema_migration_version_idx ON public.schema_migration USING btree (version); + +CREATE INDEX schema_migration_version_self_idx ON public.schema_migration USING btree (version_self); + +ALTER TABLE ONLY public.hydra_client + ADD CONSTRAINT hydra_client_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON UPDATE RESTRICT ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_jwk + ADD CONSTRAINT hydra_jwk_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON UPDATE RESTRICT ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_access + ADD CONSTRAINT hydra_oauth2_access_challenge_id_fk FOREIGN KEY (challenge_id) REFERENCES public.hydra_oauth2_flow(consent_challenge_id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_access + ADD CONSTRAINT hydra_oauth2_access_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES public.hydra_client(id, nid) ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_access + ADD CONSTRAINT hydra_oauth2_access_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON UPDATE RESTRICT ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_authentication_session + ADD CONSTRAINT hydra_oauth2_authentication_session_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON UPDATE RESTRICT ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_code + ADD CONSTRAINT hydra_oauth2_code_challenge_id_fk FOREIGN KEY (challenge_id) REFERENCES public.hydra_oauth2_flow(consent_challenge_id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_code + ADD CONSTRAINT hydra_oauth2_code_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES public.hydra_client(id, nid) ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_code + ADD CONSTRAINT hydra_oauth2_code_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON UPDATE RESTRICT ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_device_auth_codes + ADD CONSTRAINT hydra_oauth2_device_auth_codes_challenge_id_fkey FOREIGN KEY (challenge_id) REFERENCES public.hydra_oauth2_flow(consent_challenge_id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_device_auth_codes + ADD CONSTRAINT hydra_oauth2_device_auth_codes_client_id_nid_fkey FOREIGN KEY (client_id, nid) REFERENCES public.hydra_client(id, nid) ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_device_auth_codes + ADD CONSTRAINT hydra_oauth2_device_auth_codes_nid_fkey FOREIGN KEY (nid) REFERENCES public.networks(id) ON UPDATE RESTRICT ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_flow + ADD CONSTRAINT hydra_oauth2_flow_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES public.hydra_client(id, nid) ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_flow + ADD CONSTRAINT hydra_oauth2_flow_login_session_id_fk FOREIGN KEY (login_session_id) REFERENCES public.hydra_oauth2_authentication_session(id) ON DELETE SET NULL; + +ALTER TABLE ONLY public.hydra_oauth2_flow + ADD CONSTRAINT hydra_oauth2_flow_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON UPDATE RESTRICT ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_jti_blacklist + ADD CONSTRAINT hydra_oauth2_jti_blacklist_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON UPDATE RESTRICT ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_logout_request + ADD CONSTRAINT hydra_oauth2_logout_request_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES public.hydra_client(id, nid) ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_logout_request + ADD CONSTRAINT hydra_oauth2_logout_request_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON UPDATE RESTRICT ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_obfuscated_authentication_session + ADD CONSTRAINT hydra_oauth2_obfuscated_authentication_session_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES public.hydra_client(id, nid) ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_obfuscated_authentication_session + ADD CONSTRAINT hydra_oauth2_obfuscated_authentication_session_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON UPDATE RESTRICT ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_oidc + ADD CONSTRAINT hydra_oauth2_oidc_challenge_id_fk FOREIGN KEY (challenge_id) REFERENCES public.hydra_oauth2_flow(consent_challenge_id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_oidc + ADD CONSTRAINT hydra_oauth2_oidc_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES public.hydra_client(id, nid) ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_oidc + ADD CONSTRAINT hydra_oauth2_oidc_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON UPDATE RESTRICT ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_pkce + ADD CONSTRAINT hydra_oauth2_pkce_challenge_id_fk FOREIGN KEY (challenge_id) REFERENCES public.hydra_oauth2_flow(consent_challenge_id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_pkce + ADD CONSTRAINT hydra_oauth2_pkce_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES public.hydra_client(id, nid) ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_pkce + ADD CONSTRAINT hydra_oauth2_pkce_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON UPDATE RESTRICT ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_refresh + ADD CONSTRAINT hydra_oauth2_refresh_challenge_id_fk FOREIGN KEY (challenge_id) REFERENCES public.hydra_oauth2_flow(consent_challenge_id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_refresh + ADD CONSTRAINT hydra_oauth2_refresh_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES public.hydra_client(id, nid) ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_refresh + ADD CONSTRAINT hydra_oauth2_refresh_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON UPDATE RESTRICT ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_trusted_jwt_bearer_issuer + ADD CONSTRAINT hydra_oauth2_trusted_jwt_bearer_issuer_key_set_fkey FOREIGN KEY (key_set, key_id, nid) REFERENCES public.hydra_jwk(sid, kid, nid) ON DELETE CASCADE; + +ALTER TABLE ONLY public.hydra_oauth2_trusted_jwt_bearer_issuer + ADD CONSTRAINT hydra_oauth2_trusted_jwt_bearer_issuer_nid_fk_idx FOREIGN KEY (nid) REFERENCES public.networks(id) ON UPDATE RESTRICT ON DELETE CASCADE; + +SET search_path TO public; diff --git a/internal/testhelpers/sql_schemas/sqlite3_dump.sql b/internal/testhelpers/sql_schemas/sqlite3_dump.sql new file mode 100644 index 00000000000..76af9d2d5ab --- /dev/null +++ b/internal/testhelpers/sql_schemas/sqlite3_dump.sql @@ -0,0 +1,329 @@ +-- migrations hash: 55b905a5cead652db5b4a5e317d60ad18bd9e6de61d4b43edcdb79244947fbf9b106783f10765f2ffa4d7952c745900c2406f522a101dbed17e5785396484e41 + +CREATE TABLE "hydra_client" +( + id VARCHAR(255) NOT NULL, + client_name TEXT NOT NULL, + client_secret TEXT NOT NULL, + redirect_uris TEXT NOT NULL, + grant_types TEXT NOT NULL, + response_types TEXT NOT NULL, + scope TEXT NOT NULL, + owner TEXT NOT NULL, + policy_uri TEXT NOT NULL, + tos_uri TEXT NOT NULL, + client_uri TEXT NOT NULL, + logo_uri TEXT NOT NULL, + contacts TEXT NOT NULL, + client_secret_expires_at INTEGER NOT NULL DEFAULT 0, + sector_identifier_uri TEXT NOT NULL, + jwks TEXT NOT NULL, + jwks_uri TEXT NOT NULL, + request_uris TEXT NOT NULL, + token_endpoint_auth_method VARCHAR(25) NOT NULL DEFAULT '', + request_object_signing_alg VARCHAR(10) NOT NULL DEFAULT '', + userinfo_signed_response_alg VARCHAR(10) NOT NULL DEFAULT '', + subject_type VARCHAR(15) NOT NULL DEFAULT '', + allowed_cors_origins TEXT NOT NULL, + pk TEXT NULL, + pk_deprecated INTEGER NULL DEFAULT NULL, + audience TEXT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + frontchannel_logout_uri TEXT NOT NULL DEFAULT '', + frontchannel_logout_session_required INTEGER NOT NULL DEFAULT false, + post_logout_redirect_uris TEXT NOT NULL DEFAULT '', + backchannel_logout_uri TEXT NOT NULL DEFAULT '', + backchannel_logout_session_required INTEGER NOT NULL DEFAULT false, + metadata TEXT NOT NULL DEFAULT '{}', + token_endpoint_auth_signing_alg VARCHAR(10) NOT NULL DEFAULT '', + registration_access_token_signature VARCHAR(128) NOT NULL DEFAULT '', + access_token_strategy VARCHAR(10) NOT NULL DEFAULT '', + authorization_code_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + authorization_code_grant_id_token_lifespan BIGINT NULL DEFAULT NULL, + authorization_code_grant_refresh_token_lifespan BIGINT NULL DEFAULT NULL, + client_credentials_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + implicit_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + implicit_grant_id_token_lifespan BIGINT NULL DEFAULT NULL, + jwt_bearer_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + password_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + password_grant_refresh_token_lifespan BIGINT NULL DEFAULT NULL, + refresh_token_grant_id_token_lifespan BIGINT NULL DEFAULT NULL, + refresh_token_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + refresh_token_grant_refresh_token_lifespan BIGINT NULL DEFAULT NULL, + skip_consent BOOLEAN NOT NULL DEFAULT false, + nid CHAR(36) NOT NULL, skip_logout_consent BOOLEAN NULL, device_authorization_grant_id_token_lifespan BIGINT NULL DEFAULT NULL, device_authorization_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, device_authorization_grant_refresh_token_lifespan BIGINT NULL DEFAULT NULL, + PRIMARY KEY (id, nid) +); +CREATE TABLE "hydra_jwk" ( + sid VARCHAR(255) NOT NULL, + kid VARCHAR(255) NOT NULL, + nid CHAR(36) NOT NULL, + version INTEGER DEFAULT 0 NOT NULL, + keydata TEXT NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL, + pk TEXT PRIMARY KEY, + pk_deprecated INTEGER NULL DEFAULT NULL, + CHECK (nid != '00000000-0000-0000-0000-000000000000') +); +CREATE INDEX hydra_jwk_nid_sid_created_at_idx ON hydra_jwk (nid, sid, created_at); +CREATE INDEX hydra_jwk_nid_sid_kid_created_at_idx ON hydra_jwk (nid, sid, kid, created_at); +CREATE UNIQUE INDEX hydra_jwk_sid_kid_nid_key ON hydra_jwk (sid, kid, nid); +CREATE TABLE "hydra_oauth2_access" ( + signature VARCHAR(255) NOT NULL PRIMARY KEY, + request_id VARCHAR(40) NOT NULL, + requested_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + client_id VARCHAR(255) NOT NULL, + scope TEXT NOT NULL, + granted_scope TEXT NOT NULL, + form_data TEXT NOT NULL, + session_data TEXT NOT NULL, + subject VARCHAR(255) NOT NULL DEFAULT '', + active INTEGER NOT NULL DEFAULT true, + requested_audience TEXT NULL DEFAULT '', + granted_audience TEXT NULL DEFAULT '', + challenge_id VARCHAR(40) NULL REFERENCES hydra_oauth2_flow (consent_challenge_id) ON DELETE CASCADE, + nid CHAR(36) NOT NULL, expires_at TIMESTAMP NULL, + FOREIGN KEY (client_id, nid) REFERENCES hydra_client (id, nid) ON DELETE CASCADE +); +CREATE INDEX hydra_oauth2_access_challenge_id_idx ON hydra_oauth2_access (challenge_id, nid); +CREATE INDEX hydra_oauth2_access_client_id_idx ON hydra_oauth2_access (client_id, nid); +CREATE INDEX hydra_oauth2_access_request_id_idx ON hydra_oauth2_access (request_id, nid); +CREATE INDEX hydra_oauth2_access_requested_at_idx ON hydra_oauth2_access (requested_at, nid); +CREATE TABLE "hydra_oauth2_authentication_session" ( + id VARCHAR(40) NOT NULL PRIMARY KEY, + authenticated_at TIMESTAMP NULL, + subject VARCHAR(255) NOT NULL, + nid CHAR(36) NOT NULL, + remember INTEGER NOT NULL DEFAULT false, identity_provider_session_id VARCHAR(40), expires_at TIMESTAMP NULL, + CHECK (nid != '00000000-0000-0000-0000-000000000000') +); +CREATE INDEX hydra_oauth2_authentication_session_subject_idx ON hydra_oauth2_authentication_session (subject, nid); +CREATE TABLE "hydra_oauth2_code" ( + signature VARCHAR(255) NOT NULL, + request_id VARCHAR(40) NOT NULL, + requested_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + client_id VARCHAR(255) NOT NULL, + scope TEXT NOT NULL, + granted_scope TEXT NOT NULL, + form_data TEXT NOT NULL, + session_data TEXT NOT NULL, + subject VARCHAR(255) NOT NULL DEFAULT '', + active INTEGER NOT NULL DEFAULT true, + requested_audience TEXT NULL DEFAULT '', + granted_audience TEXT NULL DEFAULT '', + challenge_id VARCHAR(40) NULL REFERENCES hydra_oauth2_flow (consent_challenge_id) ON DELETE CASCADE, + nid CHAR(36) NOT NULL, expires_at TIMESTAMP NULL, + FOREIGN KEY (client_id, nid) REFERENCES hydra_client (id, nid) ON DELETE CASCADE +); +CREATE INDEX hydra_oauth2_code_challenge_id_idx ON hydra_oauth2_code (challenge_id, nid); +CREATE INDEX hydra_oauth2_code_client_id_idx ON hydra_oauth2_code (client_id, nid); +CREATE TABLE hydra_oauth2_device_auth_codes +( + device_code_signature VARCHAR(255) NOT NULL, + user_code_signature VARCHAR(255) NOT NULL, + request_id VARCHAR(40) NOT NULL, + requested_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + client_id VARCHAR(255) NOT NULL, + scope VARCHAR(1024) NOT NULL, + granted_scope VARCHAR(1024) NOT NULL, + form_data VARCHAR(4096) NOT NULL, + session_data TEXT NOT NULL, + subject VARCHAR(255) NOT NULL DEFAULT '', + device_code_active BOOL NOT NULL DEFAULT true, + user_code_state SMALLINT NOT NULL DEFAULT 0, + requested_audience VARCHAR(1024) NOT NULL, + granted_audience VARCHAR(1024) NOT NULL, + challenge_id VARCHAR(40) NULL, + expires_at TIMESTAMP NULL, + nid UUID NOT NULL, + + FOREIGN KEY (client_id, nid) REFERENCES hydra_client (id, nid) ON DELETE CASCADE, + FOREIGN KEY (nid) REFERENCES networks (id) ON UPDATE RESTRICT ON DELETE CASCADE, + FOREIGN KEY (challenge_id) REFERENCES hydra_oauth2_flow (consent_challenge_id) ON DELETE CASCADE, + PRIMARY KEY (device_code_signature, nid) +); +CREATE INDEX hydra_oauth2_device_auth_codes_challenge_id_idx ON hydra_oauth2_device_auth_codes (challenge_id); +CREATE INDEX hydra_oauth2_device_auth_codes_client_id_idx ON hydra_oauth2_device_auth_codes (client_id, nid); +CREATE INDEX hydra_oauth2_device_auth_codes_request_id_idx ON hydra_oauth2_device_auth_codes (request_id, nid); +CREATE UNIQUE INDEX hydra_oauth2_device_auth_codes_user_code_signature_idx ON hydra_oauth2_device_auth_codes (nid, user_code_signature); +CREATE TABLE "hydra_oauth2_flow" ( + login_challenge VARCHAR(40) NOT NULL PRIMARY KEY, + nid CHAR(36) NOT NULL, + requested_scope TEXT NULL, + login_verifier VARCHAR(40) NULL, + login_csrf VARCHAR(40) NULL, + subject VARCHAR(255) NULL, + request_url TEXT NULL, + login_skip INTEGER NULL, + client_id VARCHAR(255) NULL, + requested_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + oidc_context TEXT NULL, + login_session_id VARCHAR(40) NULL REFERENCES hydra_oauth2_authentication_session (id) ON DELETE SET NULL, + requested_at_audience TEXT NULL, + login_initialized_at TIMESTAMP NULL, + + state INTEGER NULL, + + login_remember INTEGER NULL, + login_remember_for INTEGER NULL, + login_error TEXT NULL, + acr TEXT NULL, + login_authenticated_at TIMESTAMP NULL, + login_was_used INTEGER NULL, + forced_subject_identifier VARCHAR(255) NULL, + context TEXT NULL, + amr TEXT NULL, + + consent_challenge_id VARCHAR(40) NULL, + consent_skip INTEGER NULL, + consent_verifier VARCHAR(40) NULL, + consent_csrf VARCHAR(40) NULL, + + granted_scope TEXT NULL, + granted_at_audience TEXT NULL, + consent_remember INTEGER NULL, + consent_remember_for INTEGER NULL, + consent_handled_at TIMESTAMP NULL, + consent_was_used INTEGER NULL, + consent_error TEXT NULL, + session_id_token TEXT NULL, + session_access_token TEXT NULL, + login_extend_session_lifespan BOOLEAN NULL, + identity_provider_session_id VARCHAR(40) NULL, + device_challenge_id VARCHAR(255) NULL, + device_code_request_id VARCHAR(255) NULL, + device_verifier VARCHAR(40) NULL, + device_csrf VARCHAR(40) NULL, + device_was_used BOOLEAN NULL, + device_handled_at TIMESTAMP NULL, + device_error VARCHAR(2048) NULL, + expires_at TIMESTAMP GENERATED ALWAYS AS (IF(consent_remember_for > 0, + datetime(requested_at, '+' || consent_remember_for || ' seconds'), + NULL)) VIRTUAL, + + FOREIGN KEY (client_id, nid) REFERENCES hydra_client (id, nid) ON DELETE CASCADE +); +CREATE INDEX hydra_oauth2_flow_client_id_idx ON hydra_oauth2_flow (client_id, nid); +CREATE UNIQUE INDEX hydra_oauth2_flow_consent_challenge_id_idx ON hydra_oauth2_flow (consent_challenge_id); +CREATE UNIQUE INDEX hydra_oauth2_flow_device_challenge_idx ON hydra_oauth2_flow (device_challenge_id); +CREATE INDEX hydra_oauth2_flow_login_session_id_idx ON hydra_oauth2_flow (login_session_id); +CREATE INDEX hydra_oauth2_flow_previous_consents_idx ON hydra_oauth2_flow (subject, client_id, nid, consent_skip, + consent_error, consent_remember); +CREATE INDEX hydra_oauth2_flow_subject_idx ON hydra_oauth2_flow (subject, nid); +CREATE TABLE "hydra_oauth2_jti_blacklist" ( + signature VARCHAR(64) NOT NULL, + expires_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + nid CHAR(36) NOT NULL, + CHECK (nid != '00000000-0000-0000-0000-000000000000'), + PRIMARY KEY (signature, nid) +); +CREATE INDEX hydra_oauth2_jti_blacklist_expires_at_idx ON hydra_oauth2_jti_blacklist (expires_at, nid); +CREATE TABLE "hydra_oauth2_logout_request" ( + challenge VARCHAR(36) NOT NULL PRIMARY KEY, + verifier VARCHAR(36) NOT NULL, + subject VARCHAR(255) NOT NULL, + sid VARCHAR(36) NOT NULL, + client_id VARCHAR(255) NULL, + nid CHAR(36) NOT NULL, + request_url TEXT NOT NULL, + redir_url TEXT NOT NULL, + was_used INTEGER NOT NULL DEFAULT false, + accepted INTEGER NOT NULL DEFAULT false, + rejected INTEGER NOT NULL DEFAULT false, + rp_initiated INTEGER NOT NULL DEFAULT false, expires_at timestamp NULL, requested_at timestamp NULL, + FOREIGN KEY (client_id, nid) REFERENCES hydra_client (id, nid) ON DELETE CASCADE, + UNIQUE (verifier) +); +CREATE INDEX hydra_oauth2_logout_request_client_id_idx ON hydra_oauth2_logout_request (client_id, nid); +CREATE TABLE "hydra_oauth2_obfuscated_authentication_session" ( + subject VARCHAR(255) NOT NULL, + client_id VARCHAR(255) NOT NULL, + subject_obfuscated VARCHAR(255) NOT NULL, + nid CHAR(36) NOT NULL, + FOREIGN KEY (client_id, nid) REFERENCES hydra_client (id, nid) ON DELETE CASCADE, + PRIMARY KEY (subject, client_id, nid) +); +CREATE UNIQUE INDEX hydra_oauth2_obfuscated_authentication_session_client_id_subject_obfuscated_idx ON hydra_oauth2_obfuscated_authentication_session (client_id, subject_obfuscated, nid); +CREATE TABLE "hydra_oauth2_oidc" ( + signature VARCHAR(255) NOT NULL PRIMARY KEY, + request_id VARCHAR(40) NOT NULL, + requested_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + client_id VARCHAR(255) NOT NULL, + scope TEXT NOT NULL, + granted_scope TEXT NOT NULL, + form_data TEXT NOT NULL, + session_data TEXT NOT NULL, + subject VARCHAR(255) NOT NULL DEFAULT '', + active INTEGER NOT NULL DEFAULT true, + requested_audience TEXT NULL DEFAULT '', + granted_audience TEXT NULL DEFAULT '', + challenge_id VARCHAR(40) NULL REFERENCES hydra_oauth2_flow (consent_challenge_id) ON DELETE CASCADE, + nid CHAR(36) NOT NULL, expires_at TIMESTAMP NULL, + FOREIGN KEY (client_id, nid) REFERENCES hydra_client (id, nid) ON DELETE CASCADE +); +CREATE INDEX hydra_oauth2_oidc_challenge_id_idx ON hydra_oauth2_oidc (challenge_id, nid); +CREATE INDEX hydra_oauth2_oidc_client_id_idx ON hydra_oauth2_oidc (client_id, nid); +CREATE TABLE "hydra_oauth2_pkce" ( + signature VARCHAR(255) NOT NULL PRIMARY KEY, + request_id VARCHAR(40) NOT NULL, + requested_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + client_id VARCHAR(255) NOT NULL, + scope TEXT NOT NULL, + granted_scope TEXT NOT NULL, + form_data TEXT NOT NULL, + session_data TEXT NOT NULL, + subject VARCHAR(255) NOT NULL, + active INTEGER NOT NULL DEFAULT true, + requested_audience TEXT NULL DEFAULT '', + granted_audience TEXT NULL DEFAULT '', + challenge_id VARCHAR(40) NULL REFERENCES hydra_oauth2_flow (consent_challenge_id) ON DELETE CASCADE, + nid CHAR(36) NOT NULL, expires_at TIMESTAMP NULL, + FOREIGN KEY (client_id, nid) REFERENCES hydra_client (id, nid) ON DELETE CASCADE +); +CREATE INDEX hydra_oauth2_pkce_challenge_id_idx ON hydra_oauth2_pkce (challenge_id, nid); +CREATE INDEX hydra_oauth2_pkce_client_id_idx ON hydra_oauth2_pkce (client_id, nid); +CREATE TABLE "hydra_oauth2_refresh" ( + signature VARCHAR(255) NOT NULL PRIMARY KEY, + request_id VARCHAR(40) NOT NULL, + requested_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + client_id VARCHAR(255) NOT NULL, + scope TEXT NOT NULL, + granted_scope TEXT NOT NULL, + form_data TEXT NOT NULL, + session_data TEXT NOT NULL, + subject VARCHAR(255) NOT NULL DEFAULT '', + active INTEGER NOT NULL DEFAULT true, + requested_audience TEXT NULL DEFAULT '', + granted_audience TEXT NULL DEFAULT '', + challenge_id VARCHAR(40) NULL REFERENCES hydra_oauth2_flow (consent_challenge_id) ON DELETE CASCADE, + nid CHAR(36) NOT NULL, expires_at TIMESTAMP NULL, first_used_at TIMESTAMP DEFAULT NULL, access_token_signature VARCHAR(255) DEFAULT NULL, used_times INT NULL, + FOREIGN KEY (client_id, nid) REFERENCES hydra_client (id, nid) ON DELETE CASCADE +); +CREATE INDEX hydra_oauth2_refresh_challenge_id_idx ON hydra_oauth2_refresh (challenge_id, nid); +CREATE INDEX hydra_oauth2_refresh_client_id_idx ON hydra_oauth2_refresh (client_id, nid); +CREATE INDEX hydra_oauth2_refresh_request_id_idx ON hydra_oauth2_refresh (request_id, nid); +CREATE INDEX hydra_oauth2_refresh_requested_at_idx ON hydra_oauth2_refresh (nid, requested_at); +CREATE TABLE "hydra_oauth2_trusted_jwt_bearer_issuer" ( + id VARCHAR(36) PRIMARY KEY, + issuer VARCHAR(255) NOT NULL, + subject VARCHAR(255) NOT NULL, + scope TEXT NOT NULL, + key_set varchar(255) NOT NULL, + key_id varchar(255) NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL, + expires_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL, + nid CHAR(36) NOT NULL, allow_any_subject INTEGER NOT NULL DEFAULT FALSE, + UNIQUE (issuer, subject, key_id, nid), + FOREIGN KEY (key_set, key_id, nid) REFERENCES hydra_jwk (sid, kid, nid) ON DELETE CASCADE +); +CREATE INDEX hydra_oauth2_trusted_jwt_bearer_issuer_expires_at_idx ON hydra_oauth2_trusted_jwt_bearer_issuer (expires_at); +CREATE UNIQUE INDEX hydra_oauth2_trusted_jwt_bearer_issuer_nid_uq_idx ON hydra_oauth2_trusted_jwt_bearer_issuer (nid ASC, key_id ASC, issuer ASC, subject ASC); +CREATE TABLE "networks" ( + "id" TEXT PRIMARY KEY, + "created_at" DATETIME NOT NULL, + "updated_at" DATETIME NOT NULL +); +CREATE TABLE schema_migration (version VARCHAR (48) NOT NULL, version_self INT NOT NULL DEFAULT 0); +CREATE UNIQUE INDEX schema_migration_version_idx ON schema_migration (version); +CREATE INDEX schema_migration_version_self_idx ON schema_migration (version_self); diff --git a/internal/testhelpers/uuid/uuid.go b/internal/testhelpers/uuid/uuid.go index 8fae25139ab..b8800b384e5 100644 --- a/internal/testhelpers/uuid/uuid.go +++ b/internal/testhelpers/uuid/uuid.go @@ -11,7 +11,16 @@ import ( ) // AssertUUID helper requires that a UUID is non-zero, common version/variant used in Hydra. -func AssertUUID(t *testing.T, id *uuid.UUID) { - require.Equal(t, id.Version(), uuid.V4) - require.Equal(t, id.Variant(), uuid.VariantRFC4122) +func AssertUUID[T string | uuid.UUID](t *testing.T, id T) { + var uid uuid.UUID + switch idt := any(id).(type) { + case uuid.UUID: + uid = idt + case string: + var err error + uid, err = uuid.FromString(idt) + require.NoError(t, err) + } + require.Equal(t, uid.Version(), uuid.V4) + require.Equal(t, uid.Variant(), uuid.VariantRFC4122) } diff --git a/jwk/aead.go b/jwk/aead.go deleted file mode 100644 index a3f34728ea6..00000000000 --- a/jwk/aead.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package jwk - -import ( - "context" - "encoding/base64" - - "github.com/ory/x/errorsx" - - "github.com/ory/hydra/driver/config" - - "github.com/gtank/cryptopasta" - "github.com/pkg/errors" -) - -type AEAD struct { - c *config.DefaultProvider -} - -func NewAEAD(c *config.DefaultProvider) *AEAD { - return &AEAD{c: c} -} - -func aeadKey(key []byte) *[32]byte { - var result [32]byte - copy(result[:], key[:32]) - return &result -} - -func (c *AEAD) Encrypt(ctx context.Context, plaintext []byte) (string, error) { - global, err := c.c.GetGlobalSecret(ctx) - if err != nil { - return "", err - } - - rotated, err := c.c.GetRotatedGlobalSecrets(ctx) - if err != nil { - return "", err - } - - keys := append([][]byte{global}, rotated...) - if len(keys) == 0 { - return "", errors.Errorf("at least one encryption key must be defined but none were") - } - - if len(keys[0]) < 32 { - return "", errors.Errorf("key must be exactly 32 long bytes, got %d bytes", len(keys[0])) - } - - ciphertext, err := cryptopasta.Encrypt(plaintext, aeadKey(keys[0])) - if err != nil { - return "", errorsx.WithStack(err) - } - - return base64.URLEncoding.EncodeToString(ciphertext), nil -} - -func (c *AEAD) Decrypt(ctx context.Context, ciphertext string) (p []byte, err error) { - global, err := c.c.GetGlobalSecret(ctx) - if err != nil { - return nil, err - } - - rotated, err := c.c.GetRotatedGlobalSecrets(ctx) - if err != nil { - return nil, err - } - - keys := append([][]byte{global}, rotated...) - if len(keys) == 0 { - return nil, errors.Errorf("at least one decryption key must be defined but none were") - } - - for _, key := range keys { - if p, err = c.decrypt(ciphertext, key); err == nil { - return p, nil - } - } - - return nil, err -} - -func (c *AEAD) decrypt(ciphertext string, key []byte) ([]byte, error) { - if len(key) != 32 { - return nil, errors.Errorf("key must be exactly 32 long bytes, got %d bytes", len(key)) - } - - raw, err := base64.URLEncoding.DecodeString(ciphertext) - if err != nil { - return nil, errorsx.WithStack(err) - } - - plaintext, err := cryptopasta.Decrypt(raw, aeadKey(key)) - if err != nil { - return nil, errorsx.WithStack(err) - } - - return plaintext, nil -} diff --git a/jwk/aead_test.go b/jwk/aead_test.go deleted file mode 100644 index 7b69a644f77..00000000000 --- a/jwk/aead_test.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package jwk_test - -import ( - "context" - "crypto/rand" - "fmt" - "io" - "testing" - - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal" - . "github.com/ory/hydra/jwk" - - "github.com/pborman/uuid" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func secret(t *testing.T) string { - bytes := make([]byte, 32) - _, err := io.ReadFull(rand.Reader, bytes) - require.NoError(t, err) - return fmt.Sprintf("%X", bytes) -} - -func TestAEAD(t *testing.T) { - ctx := context.Background() - c := internal.NewConfigurationWithDefaults() - t.Run("case=without-rotation", func(t *testing.T) { - c.MustSet(ctx, config.KeyGetSystemSecret, []string{secret(t)}) - a := NewAEAD(c) - - plain := []byte(uuid.New()) - ct, err := a.Encrypt(ctx, plain) - assert.NoError(t, err) - - res, err := a.Decrypt(ctx, ct) - assert.NoError(t, err) - assert.Equal(t, plain, res) - }) - - t.Run("case=wrong-secret", func(t *testing.T) { - c.MustSet(ctx, config.KeyGetSystemSecret, []string{secret(t)}) - a := NewAEAD(c) - - ct, err := a.Encrypt(ctx, []byte(uuid.New())) - require.NoError(t, err) - - c.MustSet(ctx, config.KeyGetSystemSecret, []string{secret(t)}) - _, err = a.Decrypt(ctx, ct) - require.Error(t, err) - }) - - t.Run("case=with-rotation", func(t *testing.T) { - old := secret(t) - c.MustSet(ctx, config.KeyGetSystemSecret, []string{old}) - a := NewAEAD(c) - - plain := []byte(uuid.New()) - ct, err := a.Encrypt(ctx, plain) - require.NoError(t, err) - - // Sets the old secret as a rotated secret and creates a new one. - c.MustSet(ctx, config.KeyGetSystemSecret, []string{secret(t), old}) - res, err := a.Decrypt(ctx, ct) - require.NoError(t, err) - assert.Equal(t, plain, res) - - // THis should also work when we re-encrypt the same plain text. - ct2, err := a.Encrypt(ctx, plain) - require.NoError(t, err) - assert.NotEqual(t, ct2, ct) - - res, err = a.Decrypt(ctx, ct) - require.NoError(t, err) - assert.Equal(t, plain, res) - }) - - t.Run("case=with-rotation-wrong-secret", func(t *testing.T) { - c.MustSet(ctx, config.KeyGetSystemSecret, []string{secret(t)}) - a := NewAEAD(c) - - plain := []byte(uuid.New()) - ct, err := a.Encrypt(ctx, plain) - require.NoError(t, err) - - // When the secrets do not match, an error should be thrown during decryption. - c.MustSet(ctx, config.KeyGetSystemSecret, []string{secret(t), secret(t)}) - _, err = a.Decrypt(ctx, ct) - require.Error(t, err) - }) -} diff --git a/jwk/cast.go b/jwk/cast.go index 7fc2b2c8906..1b41b407852 100644 --- a/jwk/cast.go +++ b/jwk/cast.go @@ -8,8 +8,8 @@ import ( "github.com/ory/x/josex" + jose "github.com/go-jose/go-jose/v3" "github.com/pkg/errors" - jose "gopkg.in/square/go-jose.v2" ) func MustRSAPublic(key *jose.JSONWebKey) *rsa.PublicKey { diff --git a/jwk/cast_test.go b/jwk/cast_test.go index d55b81ba518..7f2b369b444 100644 --- a/jwk/cast_test.go +++ b/jwk/cast_test.go @@ -4,17 +4,17 @@ package jwk import ( - "context" "testing" + "github.com/go-jose/go-jose/v3" "github.com/stretchr/testify/require" - "gopkg.in/square/go-jose.v2" "github.com/stretchr/testify/assert" ) func TestMustRSAPrivate(t *testing.T) { - keys, err := GenerateJWK(context.Background(), jose.RS256, "foo", "sig") + t.Parallel() + keys, err := GenerateJWK(jose.RS256, "foo", "sig") require.NoError(t, err) priv := keys.Key("foo")[0] diff --git a/jwk/generate.go b/jwk/generate.go index e5a30592cc7..1b65de3b4e3 100644 --- a/jwk/generate.go +++ b/jwk/generate.go @@ -4,18 +4,16 @@ package jwk import ( - "context" "crypto/x509" + "github.com/go-jose/go-jose/v3" "github.com/gofrs/uuid" - "github.com/pkg/errors" - "gopkg.in/square/go-jose.v2" "github.com/ory/x/josex" ) -func GenerateJWK(ctx context.Context, alg jose.SignatureAlgorithm, kid, use string) (*jose.JSONWebKeySet, error) { +func GenerateJWK(alg jose.SignatureAlgorithm, kid, use string) (*jose.JSONWebKeySet, error) { bits := 0 if alg == jose.RS256 || alg == jose.RS384 || alg == jose.RS512 { bits = 4096 diff --git a/jwk/generate_test.go b/jwk/generate_test.go index 01a47d4ec67..76d83dda958 100644 --- a/jwk/generate_test.go +++ b/jwk/generate_test.go @@ -4,16 +4,16 @@ package jwk import ( - "context" "testing" + "github.com/go-jose/go-jose/v3" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "gopkg.in/square/go-jose.v2" ) func TestGenerateJWK(t *testing.T) { - jwks, err := GenerateJWK(context.Background(), jose.RS256, "", "") + t.Parallel() + jwks, err := GenerateJWK(jose.RS256, "", "") require.NoError(t, err) assert.NotEmpty(t, jwks.Keys[0].KeyID) assert.EqualValues(t, jose.RS256, jwks.Keys[0].Algorithm) diff --git a/jwk/generator.go b/jwk/generator.go index 03bdbd6a82d..99522fc4349 100644 --- a/jwk/generator.go +++ b/jwk/generator.go @@ -3,7 +3,7 @@ package jwk -import jose "gopkg.in/square/go-jose.v2" +import jose "github.com/go-jose/go-jose/v3" type KeyGenerator interface { Generate(id, use string) (*jose.JSONWebKeySet, error) diff --git a/jwk/handler.go b/jwk/handler.go index 07a7667db11..b17fe73de57 100644 --- a/jwk/handler.go +++ b/jwk/handler.go @@ -6,22 +6,17 @@ package jwk import ( "encoding/json" "net/http" + "net/url" + "sync/atomic" - "github.com/ory/x/httprouterx" - - "github.com/gofrs/uuid" + "github.com/go-jose/go-jose/v3" "github.com/pkg/errors" + "golang.org/x/sync/errgroup" + "github.com/ory/herodot" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/httprouterx" "github.com/ory/x/urlx" - - "github.com/ory/x/errorsx" - - "github.com/ory/x/stringslice" - - "github.com/ory/hydra/x" - - "github.com/julienschmidt/httprouter" - jose "gopkg.in/square/go-jose.v2" ) const ( @@ -36,6 +31,8 @@ type Handler struct { // JSON Web Key Set // // swagger:model jsonWebKeySet +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type jsonWebKeySet struct { // List of JSON Web Keys // @@ -51,20 +48,22 @@ func NewHandler(r InternalRegistry) *Handler { return &Handler{r: r} } -func (h *Handler) SetRoutes(admin *httprouterx.RouterAdmin, public *httprouterx.RouterPublic, corsMiddleware func(http.Handler) http.Handler) { - public.Handler("OPTIONS", WellKnownKeysPath, corsMiddleware(http.HandlerFunc(h.handleOptions))) - public.Handler("GET", WellKnownKeysPath, corsMiddleware(http.HandlerFunc(h.discoverJsonWebKeys))) +func (h *Handler) SetPublicRoutes(r *httprouterx.RouterPublic, corsMiddleware func(http.Handler) http.Handler) { + r.Handler("OPTIONS", WellKnownKeysPath, corsMiddleware(http.HandlerFunc(h.handleOptions))) + r.Handler("GET", WellKnownKeysPath, corsMiddleware(http.HandlerFunc(h.discoverJsonWebKeys))) +} - admin.GET(KeyHandlerPath+"/:set/:key", h.getJsonWebKey) - admin.GET(KeyHandlerPath+"/:set", h.getJsonWebKeySet) +func (h *Handler) SetAdminRoutes(r *httprouterx.RouterAdmin) { + r.GET(KeyHandlerPath+"/{set}/{key}", h.getJsonWebKey) + r.GET(KeyHandlerPath+"/{set}", h.getJsonWebKeySet) - admin.POST(KeyHandlerPath+"/:set", h.createJsonWebKeySet) + r.POST(KeyHandlerPath+"/{set}", h.createJsonWebKeySet) - admin.PUT(KeyHandlerPath+"/:set/:key", h.adminUpdateJsonWebKey) - admin.PUT(KeyHandlerPath+"/:set", h.setJsonWebKeySet) + r.PUT(KeyHandlerPath+"/{set}/{key}", h.adminUpdateJsonWebKey) + r.PUT(KeyHandlerPath+"/{set}", h.setJsonWebKeySet) - admin.DELETE(KeyHandlerPath+"/:set/:key", h.deleteJsonWebKey) - admin.DELETE(KeyHandlerPath+"/:set", h.adminDeleteJsonWebKeySet) + r.DELETE(KeyHandlerPath+"/{set}/{key}", h.deleteJsonWebKey) + r.DELETE(KeyHandlerPath+"/{set}", h.adminDeleteJsonWebKeySet) } // swagger:route GET /.well-known/jwks.json wellknown discoverJsonWebKeys @@ -75,6 +74,9 @@ func (h *Handler) SetRoutes(admin *httprouterx.RouterAdmin, public *httprouterx. // if enabled, OAuth 2.0 JWT Access Tokens. This endpoint can be used with client libraries like // [node-jwks-rsa](https://github.com/auth0/node-jwks-rsa) among others. // +// Adding custom keys requires first creating a keyset via the createJsonWebKeySet operation, +// and then configuring the webfinger.jwks.broadcast_keys configuration value to include the keyset name. +// // Consumes: // - application/json // @@ -87,25 +89,36 @@ func (h *Handler) SetRoutes(admin *httprouterx.RouterAdmin, public *httprouterx. // 200: jsonWebKeySet // default: errorOAuth2 func (h *Handler) discoverJsonWebKeys(w http.ResponseWriter, r *http.Request) { - var jwks jose.JSONWebKeySet - - ctx := r.Context() - for _, set := range stringslice.Unique(h.r.Config().WellKnownKeys(ctx)) { - keys, err := h.r.KeyManager().GetKeySet(ctx, set) - if errors.Is(err, x.ErrNotFound) { - h.r.Logger().Warnf("JSON Web Key Set \"%s\" does not exist yet, generating new key pair...", set) - keys, err = h.r.KeyManager().GenerateAndPersistKeySet(ctx, set, uuid.Must(uuid.NewV4()).String(), string(jose.RS256), "sig") - if err != nil { - h.r.Writer().WriteError(w, r, err) - return + eg, ctx := errgroup.WithContext(r.Context()) + wellKnownKeys := h.r.Config().WellKnownKeys(ctx) + + keys := make([]*jose.JSONWebKeySet, len(wellKnownKeys)) + nTotalKeys := atomic.Int64{} + for i, set := range wellKnownKeys { + eg.Go(func() error { + k, err := h.r.KeyManager().GetKeySet(ctx, set) + if errors.Is(err, x.ErrNotFound) { + h.r.Logger().Warnf("JSON Web Key Set %q does not exist yet, generating new key pair...", set) + k, err = h.r.KeyManager().GenerateAndPersistKeySet(ctx, set, "", string(jose.RS256), "sig") + if err != nil { + return err + } + } else if err != nil { + return err } - } else if err != nil { - h.r.Writer().WriteError(w, r, err) - return - } + keys[i] = ExcludePrivateKeys(k) + nTotalKeys.Add(int64(len(keys[i].Keys))) + return nil + }) + } + if err := eg.Wait(); err != nil { + h.r.Writer().WriteError(w, r, err) + return + } - keys = ExcludePrivateKeys(keys) - jwks.Keys = append(jwks.Keys, keys.Keys...) + jwks := jose.JSONWebKeySet{Keys: make([]jose.JSONWebKey, 0, nTotalKeys.Load())} + for _, k := range keys { + jwks.Keys = append(jwks.Keys, k.Keys...) } h.r.Writer().Write(w, r, &jwks) @@ -114,7 +127,7 @@ func (h *Handler) discoverJsonWebKeys(w http.ResponseWriter, r *http.Request) { // Get JSON Web Key Request // // swagger:parameters getJsonWebKey -type getJsonWebKey struct { +type _ struct { // JSON Web Key Set ID // // in: path @@ -145,9 +158,9 @@ type getJsonWebKey struct { // Responses: // 200: jsonWebKeySet // default: errorOAuth2 -func (h *Handler) getJsonWebKey(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - var setName = ps.ByName("set") - var keyName = ps.ByName("key") +func (h *Handler) getJsonWebKey(w http.ResponseWriter, r *http.Request) { + var setName = r.PathValue("set") + var keyName = r.PathValue("key") keys, err := h.r.KeyManager().GetKey(r.Context(), setName, keyName) if err != nil { @@ -162,7 +175,7 @@ func (h *Handler) getJsonWebKey(w http.ResponseWriter, r *http.Request, ps httpr // Get JSON Web Key Set Parameters // // swagger:parameters getJsonWebKeySet -type getJsonWebKeySet struct { +type _ struct { // JSON Web Key Set ID // // in: path @@ -189,8 +202,8 @@ type getJsonWebKeySet struct { // Responses: // 200: jsonWebKeySet // default: errorOAuth2 -func (h *Handler) getJsonWebKeySet(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - var setName = ps.ByName("set") +func (h *Handler) getJsonWebKeySet(w http.ResponseWriter, r *http.Request) { + var setName = r.PathValue("set") keys, err := h.r.KeyManager().GetKeySet(r.Context(), setName) if err != nil { @@ -205,7 +218,7 @@ func (h *Handler) getJsonWebKeySet(w http.ResponseWriter, r *http.Request, ps ht // Create JSON Web Key Set Request // // swagger:parameters createJsonWebKeySet -type adminCreateJsonWebKeySet struct { +type _ struct { // The JSON Web Key Set ID // // in: path @@ -249,7 +262,7 @@ type createJsonWebKeySetBody struct { // // # Create JSON Web Key // -// This endpoint is capable of generating JSON Web Key Sets for you. There a different strategies available, such as symmetric cryptographic keys (HS256, HS512) and asymetric cryptographic keys (RS256, ECDSA). If the specified JSON Web Key Set does not exist, it will be created. +// This endpoint is capable of generating JSON Web Key Sets for you. There are different strategies available, such as symmetric cryptographic keys (HS256, HS512) and asymmetric cryptographic keys (RS256, ECDSA). If the specified JSON Web Key Set does not exist, it will be created. // // A JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that represents a cryptographic key. A JWK Set is a JSON data structure that represents a set of JWKs. A JSON Web Key is identified by its set and key id. ORY Hydra uses this functionality to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), and allows storing user-defined keys as well. // @@ -264,17 +277,18 @@ type createJsonWebKeySetBody struct { // Responses: // 201: jsonWebKeySet // default: errorOAuth2 -func (h *Handler) createJsonWebKeySet(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { +func (h *Handler) createJsonWebKeySet(w http.ResponseWriter, r *http.Request) { var keyRequest createJsonWebKeySetBody - var set = ps.ByName("set") + var set = r.PathValue("set") if err := json.NewDecoder(r.Body).Decode(&keyRequest); err != nil { - h.r.Writer().WriteError(w, r, errorsx.WithStack(err)) + h.r.Writer().WriteError(w, r, errors.WithStack(herodot.ErrBadRequest.WithReasonf("Unable to decode the request body: %s", err))) + return } if keys, err := h.r.KeyManager().GenerateAndPersistKeySet(r.Context(), set, keyRequest.KeyID, keyRequest.Algorithm, keyRequest.Use); err == nil { keys = ExcludeOpaquePrivateKeys(keys) - h.r.Writer().WriteCreated(w, r, urlx.AppendPaths(h.r.Config().IssuerURL(r.Context()), "/keys/"+set).String(), keys) + h.r.Writer().WriteCreated(w, r, urlx.AppendPaths(h.r.Config().IssuerURL(r.Context()), "keys", url.PathEscape(set)).String(), keys) } else { h.r.Writer().WriteError(w, r, err) } @@ -283,7 +297,7 @@ func (h *Handler) createJsonWebKeySet(w http.ResponseWriter, r *http.Request, ps // Set JSON Web Key Set Request // // swagger:parameters setJsonWebKeySet -type setJsonWebKeySet struct { +type _ struct { // The JSON Web Key Set ID // // in: path @@ -313,12 +327,12 @@ type setJsonWebKeySet struct { // Responses: // 200: jsonWebKeySet // default: errorOAuth2 -func (h *Handler) setJsonWebKeySet(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { +func (h *Handler) setJsonWebKeySet(w http.ResponseWriter, r *http.Request) { var keySet jose.JSONWebKeySet - var set = ps.ByName("set") + var set = r.PathValue("set") if err := json.NewDecoder(r.Body).Decode(&keySet); err != nil { - h.r.Writer().WriteError(w, r, errorsx.WithStack(err)) + h.r.Writer().WriteError(w, r, errors.WithStack(herodot.ErrBadRequest.WithReasonf("Unable to decode the request body: %s", err))) return } @@ -333,7 +347,7 @@ func (h *Handler) setJsonWebKeySet(w http.ResponseWriter, r *http.Request, ps ht // Set JSON Web Key Request // // swagger:parameters setJsonWebKey -type setJsonWebKey struct { +type _ struct { // The JSON Web Key Set ID // // in: path @@ -369,12 +383,12 @@ type setJsonWebKey struct { // Responses: // 200: jsonWebKey // default: errorOAuth2 -func (h *Handler) adminUpdateJsonWebKey(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { +func (h *Handler) adminUpdateJsonWebKey(w http.ResponseWriter, r *http.Request) { var key jose.JSONWebKey - var set = ps.ByName("set") + var set = r.PathValue("set") if err := json.NewDecoder(r.Body).Decode(&key); err != nil { - h.r.Writer().WriteError(w, r, errorsx.WithStack(err)) + h.r.Writer().WriteError(w, r, errors.WithStack(herodot.ErrBadRequest.WithReasonf("Unable to decode the request body: %s", err))) return } @@ -389,7 +403,7 @@ func (h *Handler) adminUpdateJsonWebKey(w http.ResponseWriter, r *http.Request, // Delete JSON Web Key Set Parameters // // swagger:parameters deleteJsonWebKeySet -type deleteJsonWebKeySet struct { +type _ struct { // The JSON Web Key Set // in: path // required: true @@ -415,8 +429,8 @@ type deleteJsonWebKeySet struct { // Responses: // 204: emptyResponse // default: errorOAuth2 -func (h *Handler) adminDeleteJsonWebKeySet(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - var setName = ps.ByName("set") +func (h *Handler) adminDeleteJsonWebKeySet(w http.ResponseWriter, r *http.Request) { + var setName = r.PathValue("set") if err := h.r.KeyManager().DeleteKeySet(r.Context(), setName); err != nil { h.r.Writer().WriteError(w, r, err) @@ -429,7 +443,7 @@ func (h *Handler) adminDeleteJsonWebKeySet(w http.ResponseWriter, r *http.Reques // Delete JSON Web Key Parameters // // swagger:parameters deleteJsonWebKey -type deleteJsonWebKey struct { +type _ struct { // The JSON Web Key Set // in: path // required: true @@ -464,9 +478,8 @@ type deleteJsonWebKey struct { // Responses: // 204: emptyResponse // default: errorOAuth2 -func (h *Handler) deleteJsonWebKey(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - var setName = ps.ByName("set") - var keyName = ps.ByName("key") +func (h *Handler) deleteJsonWebKey(w http.ResponseWriter, r *http.Request) { + setName, keyName := r.PathValue("set"), r.PathValue("key") if err := h.r.KeyManager().DeleteKey(r.Context(), setName, keyName); err != nil { h.r.Writer().WriteError(w, r, err) @@ -478,4 +491,4 @@ func (h *Handler) deleteJsonWebKey(w http.ResponseWriter, r *http.Request, ps ht // This function will not be called, OPTIONS request will be handled by cors // this is just a placeholder. -func (h *Handler) handleOptions(w http.ResponseWriter, r *http.Request) {} +func (h *Handler) handleOptions(http.ResponseWriter, *http.Request) {} diff --git a/jwk/handler_test.go b/jwk/handler_test.go index 67140e7dc25..0dfcca39cb1 100644 --- a/jwk/handler_test.go +++ b/jwk/handler_test.go @@ -10,47 +10,57 @@ import ( "net/http/httptest" "testing" - "github.com/ory/x/httprouterx" - - "github.com/ory/hydra/jwk" - "github.com/ory/x/contextx" - + "github.com/go-jose/go-jose/v3" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - jose "gopkg.in/square/go-jose.v2" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/configx" + "github.com/ory/x/dbal" + "github.com/ory/x/prometheusx" + "github.com/ory/x/urlx" ) func TestHandlerWellKnown(t *testing.T) { - conf := internal.NewConfigurationWithDefaults() - reg := internal.NewRegistryMemory(t, conf, &contextx.Default{}) - conf.MustSet(context.Background(), config.KeyWellKnownKeys, []string{x.OpenIDConnectKeyName, x.OpenIDConnectKeyName}) - router := x.NewRouterPublic() - h := reg.KeyHandler() - h.SetRoutes(httprouterx.NewRouterAdminWithPrefixAndRouter(router.Router, "/admin", conf.AdminURL), router, func(h http.Handler) http.Handler { - return h - }) - testServer := httptest.NewServer(router) + t.Parallel() + JWKPath := "/.well-known/jwks.json" t.Run("Test_Handler_WellKnown/Run_public_key_With_public_prefix", func(t *testing.T) { - if conf.HSMEnabled() { + t.Parallel() + + dsn := dbal.NewSQLiteTestDatabase(t) + var testServer *httptest.Server + { + reg := testhelpers.NewRegistrySQLFromURL(t, dsn, true, true, driver.WithConfigOptions(configx.WithValue(config.KeyWellKnownKeys, []string{x.OpenIDConnectKeyName, x.OpenIDConnectKeyName}))) + router := x.NewRouterPublic(prometheusx.NewMetricsManager("", "", "", "")) + h := jwk.NewHandler(reg) + h.SetPublicRoutes(router, func(h http.Handler) http.Handler { + return h + }) + testServer = httptest.NewServer(router) + t.Cleanup(testServer.Close) + } + + reg := testhelpers.NewRegistrySQLFromURL(t, dsn, false, true, driver.WithConfigOptions(configx.WithValue(config.KeyWellKnownKeys, []string{x.OpenIDConnectKeyName, x.OpenIDConnectKeyName}))) + if reg.Config().HSMEnabled() { t.Skip("Skipping test. Not applicable when Hardware Security Module is enabled. Public/private keys on HSM are generated with equal key id's and are not using prefixes") } - IDKS, _ := jwk.GenerateJWK(context.Background(), jose.RS256, "test-id-1", "sig") + IDKS, _ := jwk.GenerateJWK(jose.RS256, "test-id-1", "sig") require.NoError(t, reg.KeyManager().AddKeySet(context.TODO(), x.OpenIDConnectKeyName, IDKS)) - res, err := http.Get(testServer.URL + JWKPath) + res, err := http.Get(urlx.MustJoin(testServer.URL, JWKPath)) require.NoError(t, err, "problem in http request") - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck var known jose.JSONWebKeySet err = json.NewDecoder(res.Body).Decode(&known) require.NoError(t, err, "problem in decoding response") - require.Len(t, known.Keys, 1) + require.GreaterOrEqual(t, len(known.Keys), 1) knownKey := known.Key("test-id-1")[0].Public() require.NotNil(t, knownKey, "Could not find key public") @@ -62,31 +72,47 @@ func TestHandlerWellKnown(t *testing.T) { }) t.Run("Test_Handler_WellKnown/Run_public_key_Without_public_prefix", func(t *testing.T) { + t.Parallel() + + dsn := dbal.NewSQLiteTestDatabase(t) + var testServer *httptest.Server + { + reg := testhelpers.NewRegistrySQLFromURL(t, dsn, true, true, driver.WithConfigOptions(configx.WithValue(config.KeyWellKnownKeys, []string{x.OpenIDConnectKeyName, x.OpenIDConnectKeyName}))) + router := x.NewRouterPublic(prometheusx.NewMetricsManager("", "", "", "")) + h := jwk.NewHandler(reg) + h.SetPublicRoutes(router, func(h http.Handler) http.Handler { + return h + }) + testServer = httptest.NewServer(router) + t.Cleanup(testServer.Close) + } + + reg := testhelpers.NewRegistrySQLFromURL(t, dsn, false, true, driver.WithConfigOptions(configx.WithValue(config.KeyWellKnownKeys, []string{x.OpenIDConnectKeyName, x.OpenIDConnectKeyName}))) var IDKS *jose.JSONWebKeySet - if conf.HSMEnabled() { + if reg.Config().HSMEnabled() { var err error IDKS, err = reg.KeyManager().GenerateAndPersistKeySet(context.TODO(), x.OpenIDConnectKeyName, "test-id-2", "RS256", "sig") require.NoError(t, err, "problem in generating keys") } else { var err error - IDKS, err = jwk.GenerateJWK(context.Background(), jose.RS256, "test-id-2", "sig") + IDKS, err = jwk.GenerateJWK(jose.RS256, "test-id-2", "sig") require.NoError(t, err, "problem in generating keys") IDKS.Keys[0].KeyID = "test-id-2" require.NoError(t, reg.KeyManager().AddKeySet(context.TODO(), x.OpenIDConnectKeyName, IDKS)) } - res, err := http.Get(testServer.URL + JWKPath) + res, err := http.Get(urlx.MustJoin(testServer.URL, JWKPath)) require.NoError(t, err, "problem in http request") - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck var known jose.JSONWebKeySet err = json.NewDecoder(res.Body).Decode(&known) require.NoError(t, err, "problem in decoding response") - if conf.HSMEnabled() { - require.Len(t, known.Keys, 2) + if reg.Config().HSMEnabled() { + require.GreaterOrEqual(t, len(known.Keys), 2) } else { - require.Len(t, known.Keys, 1) + require.GreaterOrEqual(t, len(known.Keys), 1) } knownKey := known.Key("test-id-2")[0] diff --git a/jwk/helper.go b/jwk/helper.go index 2506074764b..cc0627bd7a9 100644 --- a/jwk/helper.go +++ b/jwk/helper.go @@ -4,22 +4,24 @@ package jwk import ( + "bytes" "context" "crypto/ecdsa" "crypto/ed25519" "crypto/rsa" "crypto/x509" + "encoding/json" "encoding/pem" "sync" + "testing" - "github.com/ory/x/josex" - - "github.com/ory/x/errorsx" - - "github.com/ory/hydra/x" - + "github.com/go-jose/go-jose/v3" "github.com/pkg/errors" - jose "gopkg.in/square/go-jose.v2" + "github.com/stretchr/testify/require" + + hydra "github.com/ory/hydra-client-go/v2" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/josex" ) var mapLock sync.RWMutex @@ -34,19 +36,19 @@ func getLock(set string) *sync.RWMutex { return locks[set] } -func EnsureAsymmetricKeypairExists(ctx context.Context, r InternalRegistry, alg, set string) error { - _, err := GetOrGenerateKeys(ctx, r, r.KeyManager(), set, set, alg) - return err +func EnsureAsymmetricKeypairExists(t testing.TB, r InternalRegistry, alg, set string) { + _, err := GetOrGenerateKeys(t.Context(), r, set, alg) + require.NoError(t, err) } -func GetOrGenerateKeys(ctx context.Context, r InternalRegistry, m Manager, set, kid, alg string) (private *jose.JSONWebKey, err error) { +func GetOrGenerateKeys(ctx context.Context, r InternalRegistry, set, alg string) (private *jose.JSONWebKey, err error) { getLock(set).Lock() defer getLock(set).Unlock() - keys, err := m.GetKeySet(ctx, set) - if errors.Is(err, x.ErrNotFound) || keys != nil && len(keys.Keys) == 0 { - r.Logger().Warnf("JSON Web Key Set \"%s\" does not exist yet, generating new key pair...", set) - keys, err = m.GenerateAndPersistKeySet(ctx, set, kid, alg, "sig") + keys, err := r.KeyManager().GetKeySet(ctx, set) + if errors.Is(err, x.ErrNotFound) || err == nil && len(keys.Keys) == 0 { + r.Logger().Warnf("JSON Web Key Set %q does not exist yet, generating new key pair...", set) + keys, err = r.KeyManager().GenerateAndPersistKeySet(ctx, set, "", alg, "sig") if err != nil { return nil, err } @@ -57,20 +59,15 @@ func GetOrGenerateKeys(ctx context.Context, r InternalRegistry, m Manager, set, privKey, privKeyErr := FindPrivateKey(keys) if privKeyErr == nil { return privKey, nil - } else { - r.Logger().WithField("jwks", set).Warnf("JSON Web Key not found in JSON Web Key Set %s, generating new key pair...", set) - - keys, err = m.GenerateAndPersistKeySet(ctx, set, kid, alg, "sig") - if err != nil { - return nil, err - } + } + r.Logger().WithField("jwks", set).Warnf("JSON Web Key not found in JSON Web Key Set %s, generating new key pair...", set) - privKey, err := FindPrivateKey(keys) - if err != nil { - return nil, err - } - return privKey, nil + keys, err = r.KeyManager().GenerateAndPersistKeySet(ctx, set, "", alg, "sig") + if err != nil { + return nil, err } + + return FindPrivateKey(keys) } func First(keys []jose.JSONWebKey) *jose.JSONWebKey { @@ -119,9 +116,11 @@ func ExcludePrivateKeys(set *jose.JSONWebKeySet) *jose.JSONWebKeySet { func ExcludeOpaquePrivateKeys(set *jose.JSONWebKeySet) *jose.JSONWebKeySet { keys := new(jose.JSONWebKeySet) - for _, k := range set.Keys { - if _, opaque := k.Key.(jose.OpaqueSigner); !opaque { - keys.Keys = append(keys.Keys, k) + for i := range set.Keys { + if _, opaque := set.Keys[i].Key.(jose.OpaqueSigner); opaque { + keys.Keys = append(keys.Keys, josex.ToPublicKey(&set.Keys[i])) + } else { + keys.Keys = append(keys.Keys, set.Keys[i]) } } return keys @@ -134,16 +133,45 @@ func PEMBlockForKey(key interface{}) (*pem.Block, error) { case *ecdsa.PrivateKey: b, err := x509.MarshalECPrivateKey(k) if err != nil { - return nil, errorsx.WithStack(err) + return nil, errors.WithStack(err) } return &pem.Block{Type: "EC PRIVATE KEY", Bytes: b}, nil case ed25519.PrivateKey: b, err := x509.MarshalPKCS8PrivateKey(k) if err != nil { - return nil, errorsx.WithStack(err) + return nil, errors.WithStack(err) } return &pem.Block{Type: "PRIVATE KEY", Bytes: b}, nil default: return nil, errors.New("Invalid key type") } } + +func OnlyPublicSDKKeys(in []hydra.JsonWebKey) (out []hydra.JsonWebKey, _ error) { + var interim []jose.JSONWebKey + var b bytes.Buffer + + if err := json.NewEncoder(&b).Encode(&in); err != nil { + return nil, errors.Wrap(err, "failed to encode JSON Web Key Set") + } + + if err := json.NewDecoder(&b).Decode(&interim); err != nil { + return nil, errors.Wrap(err, "failed to encode JSON Web Key Set") + } + + for i, key := range interim { + interim[i] = key.Public() + } + + b.Reset() + if err := json.NewEncoder(&b).Encode(&interim); err != nil { + return nil, errors.Wrap(err, "failed to encode JSON Web Key Set") + } + + var keys []hydra.JsonWebKey + if err := json.NewDecoder(&b).Decode(&keys); err != nil { + return nil, errors.Wrap(err, "failed to encode JSON Web Key Set") + } + + return keys, nil +} diff --git a/jwk/helper_test.go b/jwk/helper_test.go index fa77b917cbd..561dec58c03 100644 --- a/jwk/helper_test.go +++ b/jwk/helper_test.go @@ -4,33 +4,31 @@ package jwk_test import ( - "context" "crypto" - "crypto/dsa" + "crypto/dsa" //lint:ignore SA1019 used for testing invalid key types "crypto/ecdsa" "crypto/ed25519" "crypto/rsa" "crypto/x509" + "encoding/json" "encoding/pem" "io" "strings" "testing" + "github.com/go-jose/go-jose/v3" + "github.com/go-jose/go-jose/v3/cryptosigner" "github.com/golang/mock/gomock" "github.com/pborman/uuid" "github.com/pkg/errors" - - "github.com/ory/hydra/internal" - "github.com/ory/hydra/jwk" - "github.com/ory/hydra/x" - "github.com/ory/x/contextx" - - "gopkg.in/square/go-jose.v2/cryptosigner" - - "gopkg.in/square/go-jose.v2" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + + hydra "github.com/ory/hydra-client-go/v2" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/x" ) type fakeSigner struct { @@ -46,8 +44,11 @@ func (f *fakeSigner) Public() crypto.PublicKey { } func TestHandlerFindPublicKey(t *testing.T) { + t.Parallel() + t.Run("Test_Helper/Run_FindPublicKey_With_RSA", func(t *testing.T) { - RSIDKS, err := jwk.GenerateJWK(context.Background(), jose.RS256, "test-id-1", "sig") + t.Parallel() + RSIDKS, err := jwk.GenerateJWK(jose.RS256, "test-id-1", "sig") require.NoError(t, err) keys, err := jwk.FindPublicKey(RSIDKS) require.NoError(t, err) @@ -56,7 +57,8 @@ func TestHandlerFindPublicKey(t *testing.T) { }) t.Run("Test_Helper/Run_FindPublicKey_With_Opaque", func(t *testing.T) { - key, err := jwk.GenerateJWK(context.Background(), jose.RS256, "test-id-1", "sig") + t.Parallel() + key, err := jwk.GenerateJWK(jose.RS256, "test-id-1", "sig") RSIDKS := &jose.JSONWebKeySet{Keys: []jose.JSONWebKey{{ Algorithm: "RS256", Use: "sig", @@ -82,7 +84,8 @@ func TestHandlerFindPublicKey(t *testing.T) { }) t.Run("Test_Helper/Run_FindPublicKey_With_ECDSA", func(t *testing.T) { - ECDSAIDKS, err := jwk.GenerateJWK(context.Background(), jose.ES256, "test-id-2", "sig") + t.Parallel() + ECDSAIDKS, err := jwk.GenerateJWK(jose.ES256, "test-id-2", "sig") require.NoError(t, err) keys, err := jwk.FindPublicKey(ECDSAIDKS) require.NoError(t, err) @@ -91,7 +94,8 @@ func TestHandlerFindPublicKey(t *testing.T) { }) t.Run("Test_Helper/Run_FindPublicKey_With_EdDSA", func(t *testing.T) { - EdDSAIDKS, err := jwk.GenerateJWK(context.Background(), jose.EdDSA, "test-id-3", "sig") + t.Parallel() + EdDSAIDKS, err := jwk.GenerateJWK(jose.EdDSA, "test-id-3", "sig") require.NoError(t, err) keys, err := jwk.FindPublicKey(EdDSAIDKS) require.NoError(t, err) @@ -100,6 +104,7 @@ func TestHandlerFindPublicKey(t *testing.T) { }) t.Run("Test_Helper/Run_FindPublicKey_With_KeyNotFound", func(t *testing.T) { + t.Parallel() keySet := &jose.JSONWebKeySet{Keys: []jose.JSONWebKey{}} _, err := jwk.FindPublicKey(keySet) require.Error(t, err) @@ -108,8 +113,9 @@ func TestHandlerFindPublicKey(t *testing.T) { } func TestHandlerFindPrivateKey(t *testing.T) { + t.Parallel() t.Run("Test_Helper/Run_FindPrivateKey_With_RSA", func(t *testing.T) { - RSIDKS, _ := jwk.GenerateJWK(context.Background(), jose.RS256, "test-id-1", "sig") + RSIDKS, _ := jwk.GenerateJWK(jose.RS256, "test-id-1", "sig") keys, err := jwk.FindPrivateKey(RSIDKS) require.NoError(t, err) assert.Equal(t, keys.KeyID, "test-id-1") @@ -117,7 +123,7 @@ func TestHandlerFindPrivateKey(t *testing.T) { }) t.Run("Test_Helper/Run_FindPrivateKey_With_ECDSA", func(t *testing.T) { - ECDSAIDKS, err := jwk.GenerateJWK(context.Background(), jose.ES256, "test-id-2", "sig") + ECDSAIDKS, err := jwk.GenerateJWK(jose.ES256, "test-id-2", "sig") require.NoError(t, err) keys, err := jwk.FindPrivateKey(ECDSAIDKS) require.NoError(t, err) @@ -126,7 +132,7 @@ func TestHandlerFindPrivateKey(t *testing.T) { }) t.Run("Test_Helper/Run_FindPrivateKey_With_EdDSA", func(t *testing.T) { - EdDSAIDKS, err := jwk.GenerateJWK(context.Background(), jose.EdDSA, "test-id-3", "sig") + EdDSAIDKS, err := jwk.GenerateJWK(jose.EdDSA, "test-id-3", "sig") require.NoError(t, err) keys, err := jwk.FindPrivateKey(EdDSAIDKS) require.NoError(t, err) @@ -143,8 +149,9 @@ func TestHandlerFindPrivateKey(t *testing.T) { } func TestPEMBlockForKey(t *testing.T) { + t.Parallel() t.Run("Test_Helper/Run_PEMBlockForKey_With_RSA", func(t *testing.T) { - RSIDKS, err := jwk.GenerateJWK(context.Background(), jose.RS256, "test-id-1", "sig") + RSIDKS, err := jwk.GenerateJWK(jose.RS256, "test-id-1", "sig") require.NoError(t, err) key, err := jwk.FindPrivateKey(RSIDKS) require.NoError(t, err) @@ -155,7 +162,7 @@ func TestPEMBlockForKey(t *testing.T) { }) t.Run("Test_Helper/Run_PEMBlockForKey_With_ECDSA", func(t *testing.T) { - ECDSAIDKS, err := jwk.GenerateJWK(context.Background(), jose.ES256, "test-id-2", "sig") + ECDSAIDKS, err := jwk.GenerateJWK(jose.ES256, "test-id-2", "sig") require.NoError(t, err) key, err := jwk.FindPrivateKey(ECDSAIDKS) require.NoError(t, err) @@ -166,7 +173,7 @@ func TestPEMBlockForKey(t *testing.T) { }) t.Run("Test_Helper/Run_PEMBlockForKey_With_EdDSA", func(t *testing.T) { - EdDSAIDKS, err := jwk.GenerateJWK(context.Background(), jose.EdDSA, "test-id-3", "sig") + EdDSAIDKS, err := jwk.GenerateJWK(jose.EdDSA, "test-id-3", "sig") require.NoError(t, err) key, err := jwk.FindPrivateKey(EdDSAIDKS) require.NoError(t, err) @@ -185,21 +192,37 @@ func TestPEMBlockForKey(t *testing.T) { } func TestExcludeOpaquePrivateKeys(t *testing.T) { - opaqueKeys, err := jwk.GenerateJWK(context.Background(), jose.RS256, "test-id-1", "sig") + t.Parallel() + opaqueKeys, err := jwk.GenerateJWK(jose.RS256, "test-id-1", "sig") assert.NoError(t, err) require.Len(t, opaqueKeys.Keys, 1) opaqueKeys.Keys[0].Key = cryptosigner.Opaque(opaqueKeys.Keys[0].Key.(*rsa.PrivateKey)) + keys := jwk.ExcludeOpaquePrivateKeys(opaqueKeys) - require.Len(t, keys.Keys, 0) + + require.Len(t, keys.Keys, 1) + k := keys.Keys[0] + _, isPublic := k.Key.(*rsa.PublicKey) + assert.True(t, isPublic) } +type regWithManager struct { + *driver.RegistrySQL + km *MockManager +} + +func (r regWithManager) KeyManager() jwk.Manager { return r.km } + func TestGetOrGenerateKeys(t *testing.T) { - reg := internal.NewMockedRegistry(t, &contextx.Default{}) + t.Parallel() + reg := testhelpers.NewRegistryMemory(t) - setId := uuid.NewUUID().String() - keyId := uuid.NewUUID().String() + setID := uuid.NewUUID().String() + keyID := uuid.NewUUID().String() - keySet, _ := jwk.GenerateJWK(context.Background(), jose.RS256, keyId, "sig") + keySet, err := jwk.GenerateJWK(jose.RS256, keyID, "sig") + require.NoError(t, err) + require.Len(t, keySet.Keys, 1) keySetWithoutPrivateKey := &jose.JSONWebKeySet{ Keys: []jose.JSONWebKey{keySet.Keys[0].Public()}, } @@ -212,45 +235,62 @@ func TestGetOrGenerateKeys(t *testing.T) { t.Run("Test_Helper/Run_GetOrGenerateKeys_With_GetKeySetError", func(t *testing.T) { keyManager := km(t) - keyManager.EXPECT().GetKeySet(gomock.Any(), gomock.Eq(setId)).Return(nil, errors.New("GetKeySetError")) - privKey, err := jwk.GetOrGenerateKeys(context.TODO(), reg, keyManager, setId, keyId, "RS256") + keyManager.EXPECT().GetKeySet(gomock.Any(), gomock.Eq(setID)).Return(nil, errors.New("GetKeySetError")) + privKey, err := jwk.GetOrGenerateKeys(t.Context(), regWithManager{RegistrySQL: reg, km: keyManager}, setID, "RS256") assert.Nil(t, privKey) assert.EqualError(t, err, "GetKeySetError") }) t.Run("Test_Helper/Run_GetOrGenerateKeys_With_GenerateAndPersistKeySetError", func(t *testing.T) { keyManager := km(t) - keyManager.EXPECT().GetKeySet(gomock.Any(), gomock.Eq(setId)).Return(nil, errors.Wrap(x.ErrNotFound, "")) - keyManager.EXPECT().GenerateAndPersistKeySet(gomock.Any(), gomock.Eq(setId), gomock.Eq(keyId), gomock.Eq("RS256"), gomock.Eq("sig")).Return(nil, errors.New("GetKeySetError")) - privKey, err := jwk.GetOrGenerateKeys(context.TODO(), reg, keyManager, setId, keyId, "RS256") + keyManager.EXPECT().GetKeySet(gomock.Any(), gomock.Eq(setID)).Return(nil, errors.Wrap(x.ErrNotFound, "")) + keyManager.EXPECT().GenerateAndPersistKeySet(gomock.Any(), gomock.Eq(setID), gomock.Eq(""), gomock.Eq("RS256"), gomock.Eq("sig")).Return(nil, errors.New("GetKeySetError")) + privKey, err := jwk.GetOrGenerateKeys(t.Context(), regWithManager{RegistrySQL: reg, km: keyManager}, setID, "RS256") assert.Nil(t, privKey) assert.EqualError(t, err, "GetKeySetError") }) t.Run("Test_Helper/Run_GetOrGenerateKeys_With_GenerateAndPersistKeySetError", func(t *testing.T) { keyManager := km(t) - keyManager.EXPECT().GetKeySet(gomock.Any(), gomock.Eq(setId)).Return(keySetWithoutPrivateKey, nil) - keyManager.EXPECT().GenerateAndPersistKeySet(gomock.Any(), gomock.Eq(setId), gomock.Eq(keyId), gomock.Eq("RS256"), gomock.Eq("sig")).Return(nil, errors.New("GetKeySetError")) - privKey, err := jwk.GetOrGenerateKeys(context.TODO(), reg, keyManager, setId, keyId, "RS256") + keyManager.EXPECT().GetKeySet(gomock.Any(), gomock.Eq(setID)).Return(keySetWithoutPrivateKey, nil) + keyManager.EXPECT().GenerateAndPersistKeySet(gomock.Any(), gomock.Eq(setID), gomock.Eq(""), gomock.Eq("RS256"), gomock.Eq("sig")).Return(nil, errors.New("GetKeySetError")) + privKey, err := jwk.GetOrGenerateKeys(t.Context(), regWithManager{RegistrySQL: reg, km: keyManager}, setID, "RS256") assert.Nil(t, privKey) assert.EqualError(t, err, "GetKeySetError") }) t.Run("Test_Helper/Run_GetOrGenerateKeys_With_GetKeySet_ContainsMissingPrivateKey", func(t *testing.T) { keyManager := km(t) - keyManager.EXPECT().GetKeySet(gomock.Any(), gomock.Eq(setId)).Return(keySetWithoutPrivateKey, nil) - keyManager.EXPECT().GenerateAndPersistKeySet(gomock.Any(), gomock.Eq(setId), gomock.Eq(keyId), gomock.Eq("RS256"), gomock.Eq("sig")).Return(keySet, nil) - privKey, err := jwk.GetOrGenerateKeys(context.TODO(), reg, keyManager, setId, keyId, "RS256") + keyManager.EXPECT().GetKeySet(gomock.Any(), gomock.Eq(setID)).Return(keySetWithoutPrivateKey, nil) + keyManager.EXPECT().GenerateAndPersistKeySet(gomock.Any(), gomock.Eq(setID), gomock.Eq(""), gomock.Eq("RS256"), gomock.Eq("sig")).Return(keySet, nil) + privKey, err := jwk.GetOrGenerateKeys(t.Context(), regWithManager{RegistrySQL: reg, km: keyManager}, setID, "RS256") assert.NoError(t, err) assert.Equal(t, privKey, &keySet.Keys[0]) }) t.Run("Test_Helper/Run_GetOrGenerateKeys_With_GenerateAndPersistKeySet_ContainsMissingPrivateKey", func(t *testing.T) { keyManager := km(t) - keyManager.EXPECT().GetKeySet(gomock.Any(), gomock.Eq(setId)).Return(keySetWithoutPrivateKey, nil) - keyManager.EXPECT().GenerateAndPersistKeySet(gomock.Any(), gomock.Eq(setId), gomock.Eq(keyId), gomock.Eq("RS256"), gomock.Eq("sig")).Return(keySetWithoutPrivateKey, nil).Times(1) - privKey, err := jwk.GetOrGenerateKeys(context.TODO(), reg, keyManager, setId, keyId, "RS256") + keyManager.EXPECT().GetKeySet(gomock.Any(), gomock.Eq(setID)).Return(keySetWithoutPrivateKey, nil) + keyManager.EXPECT().GenerateAndPersistKeySet(gomock.Any(), gomock.Eq(setID), gomock.Eq(""), gomock.Eq("RS256"), gomock.Eq("sig")).Return(keySetWithoutPrivateKey, nil).Times(1) + privKey, err := jwk.GetOrGenerateKeys(t.Context(), regWithManager{RegistrySQL: reg, km: keyManager}, setID, "RS256") assert.Nil(t, privKey) assert.EqualError(t, err, "key not found") }) } + +func TestOnlyPublicSDKKeys(t *testing.T) { + set, err := jwk.GenerateJWK(jose.RS256, "test-id-1", "sig") + require.NoError(t, err) + + out, err := json.Marshal(set.Keys) + require.NoError(t, err) + + var sdkSet []hydra.JsonWebKey + require.NoError(t, json.Unmarshal(out, &sdkSet)) + + assert.NotEmpty(t, sdkSet[0].P) + result, err := jwk.OnlyPublicSDKKeys(sdkSet) + require.NoError(t, err) + + assert.Empty(t, result[0].P) +} diff --git a/jwk/jwt_strategy.go b/jwk/jwt_strategy.go index 76792578e24..bbcc67e0ffd 100644 --- a/jwk/jwt_strategy.go +++ b/jwk/jwt_strategy.go @@ -7,40 +7,41 @@ import ( "context" "net" - "github.com/ory/x/josex" - - "github.com/gofrs/uuid" - "gopkg.in/square/go-jose.v2" - - "github.com/ory/fosite" - "github.com/ory/hydra/driver/config" - + "github.com/go-jose/go-jose/v3" "github.com/pkg/errors" - "github.com/ory/fosite/token/jwt" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/token/jwt" + "github.com/ory/x/josex" ) -type JWTSigner interface { - GetPublicKeyID(ctx context.Context) (string, error) - GetPublicKey(ctx context.Context) (jose.JSONWebKey, error) - jwt.Signer -} - -type DefaultJWTSigner struct { - *jwt.DefaultSigner - r InternalRegistry - c *config.DefaultProvider - setID string -} +type ( + JWTSigner interface { + GetPublicKeyID(ctx context.Context) (string, error) + GetPublicKey(ctx context.Context) (jose.JSONWebKey, error) + jwt.Signer + } + DefaultJWTSigner struct { + *jwt.DefaultSigner + r InternalRegistry + setID string + } + AccessTokenSignerProvider interface { + AccessTokenJWTSigner() JWTSigner + } + OpenIDSignerProvider interface { + OpenIDJWTSigner() JWTSigner + } +) -func NewDefaultJWTSigner(c *config.DefaultProvider, r InternalRegistry, setID string) *DefaultJWTSigner { - j := &DefaultJWTSigner{c: c, r: r, setID: setID, DefaultSigner: &jwt.DefaultSigner{}} +func NewDefaultJWTSigner(r InternalRegistry, setID string) *DefaultJWTSigner { + j := &DefaultJWTSigner{r: r, setID: setID, DefaultSigner: &jwt.DefaultSigner{}} j.DefaultSigner.GetPrivateKey = j.getPrivateKey return j } func (j *DefaultJWTSigner) getKeys(ctx context.Context) (private *jose.JSONWebKey, err error) { - private, err = GetOrGenerateKeys(ctx, j.r, j.r.KeyManager(), j.setID, uuid.Must(uuid.NewV4()).String(), string(jose.RS256)) + private, err = GetOrGenerateKeys(ctx, j.r, j.setID, string(jose.RS256)) if err == nil { return private, nil } @@ -53,7 +54,7 @@ func (j *DefaultJWTSigner) getKeys(ctx context.Context) (private *jose.JSONWebKe return nil, errors.WithStack(fosite.ErrServerError. WithWrap(err). - WithHintf(`Could not ensure that signing keys for "%s" exists. If you are running against a persistent SQL database this is most likely because your "secrets.system" ("SECRETS_SYSTEM" environment variable) is not set or changed. When running with an SQL database backend you need to make sure that the secret is set and stays the same, unless when doing key rotation. This may also happen when you forget to run "hydra migrate sql..`, j.setID)) + WithHintf(`Could not ensure that signing keys for "%s" exists. If you are running against a persistent SQL database this is most likely because your "secrets.system" ("SECRETS_SYSTEM" environment variable) is not set or changed. When running with an SQL database backend you need to make sure that the secret is set and stays the same, unless when doing key rotation. This may also happen when you forget to run "hydra migrate sql up -e".`, j.setID)) } func (j *DefaultJWTSigner) GetPublicKeyID(ctx context.Context) (string, error) { @@ -72,11 +73,6 @@ func (j *DefaultJWTSigner) GetPublicKey(ctx context.Context) (jose.JSONWebKey, e return josex.ToPublicKey(private), nil } -func (j *DefaultJWTSigner) getPrivateKey(ctx context.Context) (interface{}, error) { - private, err := j.getKeys(ctx) - if err != nil { - return nil, err - } - - return private, nil +func (j *DefaultJWTSigner) getPrivateKey(ctx context.Context) (any, error) { + return j.getKeys(ctx) } diff --git a/jwk/jwt_strategy_test.go b/jwk/jwt_strategy_test.go index 5a71febb89c..4beffcb6e81 100644 --- a/jwk/jwt_strategy_test.go +++ b/jwk/jwt_strategy_test.go @@ -9,32 +9,26 @@ import ( "strings" "testing" - "github.com/tidwall/gjson" - - "github.com/ory/hydra/internal" - "github.com/ory/x/contextx" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/tidwall/gjson" - jwt2 "github.com/ory/fosite/token/jwt" - - "github.com/ory/fosite/token/jwt" - . "github.com/ory/hydra/jwk" + "github.com/ory/hydra/v2/fosite/token/jwt" + "github.com/ory/hydra/v2/internal/testhelpers" + . "github.com/ory/hydra/v2/jwk" ) func TestJWTStrategy(t *testing.T) { for _, alg := range []string{"RS256", "ES256", "ES512"} { t.Run("case="+alg, func(t *testing.T) { - conf := internal.NewConfigurationWithDefaults() - reg := internal.NewRegistryMemory(t, conf, &contextx.Default{}) + reg := testhelpers.NewRegistryMemory(t) m := reg.KeyManager() _, err := m.GenerateAndPersistKeySet(context.Background(), "foo-set", "foo", alg, "sig") require.NoError(t, err) - s := NewDefaultJWTSigner(conf, reg, "foo-set") - a, b, err := s.Generate(context.Background(), jwt2.MapClaims{"foo": "bar"}, &jwt.Headers{}) + s := NewDefaultJWTSigner(reg, "foo-set") + a, b, err := s.Generate(context.Background(), jwt.MapClaims{"foo": "bar"}, &jwt.Headers{}) require.NoError(t, err) assert.NotEmpty(t, a) assert.NotEmpty(t, b) @@ -52,7 +46,7 @@ func TestJWTStrategy(t *testing.T) { _, err = m.GenerateAndPersistKeySet(context.Background(), "foo-set", "bar", alg, "sig") require.NoError(t, err) - a, b, err = s.Generate(context.Background(), jwt2.MapClaims{"foo": "bar"}, &jwt.Headers{}) + a, b, err = s.Generate(context.Background(), jwt.MapClaims{"foo": "bar"}, &jwt.Headers{}) require.NoError(t, err) assert.NotEmpty(t, a) assert.NotEmpty(t, b) diff --git a/jwk/manager.go b/jwk/manager.go index 3ab298b43e5..37210ac1312 100644 --- a/jwk/manager.go +++ b/jwk/manager.go @@ -5,13 +5,17 @@ package jwk import ( "context" + "encoding/json" "net/http" "time" + "github.com/go-jose/go-jose/v3" "github.com/gofrs/uuid" - jose "gopkg.in/square/go-jose.v2" + "github.com/pkg/errors" - "github.com/ory/fosite" + "github.com/ory/hydra/v2/aead" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/x" ) var ErrUnsupportedKeyAlgorithm = &fosite.RFC6749Error{ @@ -26,6 +30,12 @@ var ErrUnsupportedEllipticCurve = &fosite.RFC6749Error{ DescriptionField: "Unsupported elliptic curve", } +var ErrMinimalRsaKeyLength = &fosite.RFC6749Error{ + CodeField: http.StatusBadRequest, + ErrorField: http.StatusText(http.StatusBadRequest), + DescriptionField: "Unsupported RSA key length", +} + type ( Manager interface { GenerateAndPersistKeySet(ctx context.Context, set, kid, alg, use string) (*jose.JSONWebKeySet, error) @@ -46,6 +56,9 @@ type ( DeleteKeySet(ctx context.Context, set string) error } + ManagerProvider interface { + KeyManager() Manager + } SQLData struct { ID uuid.UUID `db:"pk"` @@ -58,8 +71,30 @@ type ( CreatedAt time.Time `db:"created_at"` Key string `db:"keydata"` } + + SQLDataRows []SQLData ) -func (d SQLData) TableName() string { - return "hydra_jwk" +func (d SQLData) TableName() string { return "hydra_jwk" } + +func (d SQLDataRows) ToJWK(ctx context.Context, aes *aead.AESGCM) (keys *jose.JSONWebKeySet, err error) { + if len(d) == 0 { + return nil, errors.Wrap(x.ErrNotFound, "") + } + + keys = &jose.JSONWebKeySet{ + Keys: make([]jose.JSONWebKey, len(d)), + } + for i, d := range d { + key, err := aes.Decrypt(ctx, d.Key, nil) + if err != nil { + return nil, errors.WithStack(err) + } + + if err := json.Unmarshal(key, &keys.Keys[i]); err != nil { + return nil, errors.WithStack(err) + } + } + + return keys, nil } diff --git a/jwk/manager_mock_test.go b/jwk/manager_mock_test.go index 4e3dc82f03f..65627a0e595 100644 --- a/jwk/manager_mock_test.go +++ b/jwk/manager_mock_test.go @@ -11,8 +11,8 @@ import ( context "context" reflect "reflect" + jose "github.com/go-jose/go-jose/v3" gomock "github.com/golang/mock/gomock" - jose "gopkg.in/square/go-jose.v2" ) // MockManager is a mock of Manager interface. diff --git a/jwk/manager_strategy.go b/jwk/manager_strategy.go index 0119d9eeecf..2519ba3d151 100644 --- a/jwk/manager_strategy.go +++ b/jwk/manager_strategy.go @@ -6,15 +6,17 @@ package jwk import ( "context" + "github.com/go-jose/go-jose/v3" "github.com/pkg/errors" "go.opentelemetry.io/otel" - "gopkg.in/square/go-jose.v2" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/x" "github.com/ory/x/otelx" ) -const tracingComponent = "github.com/ory/hydra/jwk" +const tracingComponent = "github.com/ory/hydra/v2/jwk" type ManagerStrategy struct { hardwareKeyManager Manager @@ -28,72 +30,52 @@ func NewManagerStrategy(hardwareKeyManager Manager, softwareKeyManager Manager) } } -func (m ManagerStrategy) GenerateAndPersistKeySet(ctx context.Context, set, kid, alg, use string) (*jose.JSONWebKeySet, error) { - ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "jwk.GenerateAndPersistKeySet") - defer span.End() - attrs := map[string]string{ - "set": set, - "kid": kid, - "alg": alg, - "use": use, - } - span.SetAttributes(otelx.StringAttrs(attrs)...) +func (m ManagerStrategy) GenerateAndPersistKeySet(ctx context.Context, set, kid, alg, use string) (_ *jose.JSONWebKeySet, err error) { + ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "jwk.GenerateAndPersistKeySet", + trace.WithAttributes( + attribute.String("set", set), + attribute.String("kid", kid), + attribute.String("alg", alg), + attribute.String("use", use))) + defer otelx.End(span, &err) return m.hardwareKeyManager.GenerateAndPersistKeySet(ctx, set, kid, alg, use) } -func (m ManagerStrategy) AddKey(ctx context.Context, set string, key *jose.JSONWebKey) error { - ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "jwk.GenerateAndPersistKeySet") - defer span.End() - attrs := map[string]string{ - "set": set, - } - span.SetAttributes(otelx.StringAttrs(attrs)...) +func (m ManagerStrategy) AddKey(ctx context.Context, set string, key *jose.JSONWebKey) (err error) { + ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "jwk.AddKey", trace.WithAttributes(attribute.String("set", set))) + defer otelx.End(span, &err) return m.softwareKeyManager.AddKey(ctx, set, key) } -func (m ManagerStrategy) AddKeySet(ctx context.Context, set string, keys *jose.JSONWebKeySet) error { - ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "jwk.GenerateAndPersistKeySet") - defer span.End() - attrs := map[string]string{ - "set": set, - } - span.SetAttributes(otelx.StringAttrs(attrs)...) +func (m ManagerStrategy) AddKeySet(ctx context.Context, set string, keys *jose.JSONWebKeySet) (err error) { + ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "jwk.AddKeySet", trace.WithAttributes(attribute.String("set", set))) + otelx.End(span, &err) return m.softwareKeyManager.AddKeySet(ctx, set, keys) } -func (m ManagerStrategy) UpdateKey(ctx context.Context, set string, key *jose.JSONWebKey) error { - ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "jwk.GenerateAndPersistKeySet") - defer span.End() - attrs := map[string]string{ - "set": set, - } - span.SetAttributes(otelx.StringAttrs(attrs)...) +func (m ManagerStrategy) UpdateKey(ctx context.Context, set string, key *jose.JSONWebKey) (err error) { + ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "jwk.UpdateKey", trace.WithAttributes(attribute.String("set", set))) + defer otelx.End(span, &err) return m.softwareKeyManager.UpdateKey(ctx, set, key) } -func (m ManagerStrategy) UpdateKeySet(ctx context.Context, set string, keys *jose.JSONWebKeySet) error { - ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "jwk.GenerateAndPersistKeySet") - defer span.End() - attrs := map[string]string{ - "set": set, - } - span.SetAttributes(otelx.StringAttrs(attrs)...) +func (m ManagerStrategy) UpdateKeySet(ctx context.Context, set string, keys *jose.JSONWebKeySet) (err error) { + ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "jwk.UpdateKeySet", trace.WithAttributes(attribute.String("set", set))) + defer otelx.End(span, &err) return m.softwareKeyManager.UpdateKeySet(ctx, set, keys) } -func (m ManagerStrategy) GetKey(ctx context.Context, set, kid string) (*jose.JSONWebKeySet, error) { - ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "jwk.GenerateAndPersistKeySet") - defer span.End() - attrs := map[string]string{ - "set": set, - "kid": kid, - } - span.SetAttributes(otelx.StringAttrs(attrs)...) +func (m ManagerStrategy) GetKey(ctx context.Context, set, kid string) (_ *jose.JSONWebKeySet, err error) { + ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "jwk.GetKey", + trace.WithAttributes( + attribute.String("set", set), + attribute.String("kid", kid))) + defer otelx.End(span, &err) keySet, err := m.hardwareKeyManager.GetKey(ctx, set, kid) if err != nil && !errors.Is(err, x.ErrNotFound) { @@ -105,13 +87,9 @@ func (m ManagerStrategy) GetKey(ctx context.Context, set, kid string) (*jose.JSO } } -func (m ManagerStrategy) GetKeySet(ctx context.Context, set string) (*jose.JSONWebKeySet, error) { - ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "jwk.GenerateAndPersistKeySet") - defer span.End() - attrs := map[string]string{ - "set": set, - } - span.SetAttributes(otelx.StringAttrs(attrs)...) +func (m ManagerStrategy) GetKeySet(ctx context.Context, set string) (_ *jose.JSONWebKeySet, err error) { + ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "jwk.GetKeySet", trace.WithAttributes(attribute.String("set", set))) + defer otelx.End(span, &err) keySet, err := m.hardwareKeyManager.GetKeySet(ctx, set) if err != nil && !errors.Is(err, x.ErrNotFound) { @@ -123,16 +101,14 @@ func (m ManagerStrategy) GetKeySet(ctx context.Context, set string) (*jose.JSONW } } -func (m ManagerStrategy) DeleteKey(ctx context.Context, set, kid string) error { - ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "jwk.GenerateAndPersistKeySet") - defer span.End() - attrs := map[string]string{ - "set": set, - "kid": kid, - } - span.SetAttributes(otelx.StringAttrs(attrs)...) +func (m ManagerStrategy) DeleteKey(ctx context.Context, set, kid string) (err error) { + ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "jwk.DeleteKey", + trace.WithAttributes( + attribute.String("set", set), + attribute.String("kid", kid))) + defer otelx.End(span, &err) - err := m.hardwareKeyManager.DeleteKey(ctx, set, kid) + err = m.hardwareKeyManager.DeleteKey(ctx, set, kid) if err != nil && !errors.Is(err, x.ErrNotFound) { return err } else if errors.Is(err, x.ErrNotFound) { @@ -142,15 +118,11 @@ func (m ManagerStrategy) DeleteKey(ctx context.Context, set, kid string) error { } } -func (m ManagerStrategy) DeleteKeySet(ctx context.Context, set string) error { - ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "jwk.GenerateAndPersistKeySet") - defer span.End() - attrs := map[string]string{ - "set": set, - } - span.SetAttributes(otelx.StringAttrs(attrs)...) +func (m ManagerStrategy) DeleteKeySet(ctx context.Context, set string) (err error) { + ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "jwk.DeleteKeySet", trace.WithAttributes(attribute.String("set", set))) + defer otelx.End(span, &err) - err := m.hardwareKeyManager.DeleteKeySet(ctx, set) + err = m.hardwareKeyManager.DeleteKeySet(ctx, set) if err != nil && !errors.Is(err, x.ErrNotFound) { return err } else if errors.Is(err, x.ErrNotFound) { diff --git a/jwk/manager_strategy_test.go b/jwk/manager_strategy_test.go index 452273a123b..23d587988aa 100644 --- a/jwk/manager_strategy_test.go +++ b/jwk/manager_strategy_test.go @@ -7,16 +7,17 @@ import ( "context" "testing" + "github.com/go-jose/go-jose/v3" "github.com/golang/mock/gomock" "github.com/pkg/errors" "github.com/stretchr/testify/assert" - "gopkg.in/square/go-jose.v2" - "github.com/ory/hydra/jwk" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/x" ) func TestKeyManagerStrategy(t *testing.T) { + t.Parallel() ctrl := gomock.NewController(t) softwareKeyManager := NewMockManager(ctrl) hardwareKeyManager := NewMockManager(ctrl) diff --git a/jwk/manager_test_helpers.go b/jwk/manager_test_helpers.go index 26f94cd5539..71b62252a25 100644 --- a/jwk/manager_test_helpers.go +++ b/jwk/manager_test_helpers.go @@ -10,21 +10,19 @@ import ( "testing" "time" - "github.com/google/uuid" - - "github.com/ory/x/assertx" - - "github.com/ory/x/errorsx" - + "github.com/go-jose/go-jose/v3" + "github.com/gofrs/uuid" + "github.com/pkg/errors" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - jose "gopkg.in/square/go-jose.v2" + + "github.com/ory/x/assertx" ) func RandomBytes(n int) ([]byte, error) { bytes := make([]byte, n) if _, err := io.ReadFull(rand.Reader, bytes); err != nil { - return []byte{}, errorsx.WithStack(err) + return []byte{}, errors.WithStack(err) } return bytes, nil } @@ -54,24 +52,26 @@ func TestHelperManagerKey(m Manager, algo string, keys *jose.JSONWebKeySet, suff } return func(t *testing.T) { - set := algo + uuid.NewString() + ctx := t.Context() + + set := algo + uuid.Must(uuid.NewV4()).String() - _, err := m.GetKey(context.TODO(), set, suffix) + _, err := m.GetKey(ctx, set, suffix) assert.NotNil(t, err) - err = m.AddKey(context.TODO(), set, First(priv)) + err = m.AddKey(ctx, set, First(priv)) require.NoError(t, err) - got, err := m.GetKey(context.TODO(), set, suffix) + got, err := m.GetKey(ctx, set, suffix) require.NoError(t, err) assertx.EqualAsJSON(t, priv, canonicalizeThumbprints(got.Keys)) addKey := First(pub) - addKey.KeyID = uuid.NewString() - err = m.AddKey(context.TODO(), set, addKey) + addKey.KeyID = uuid.Must(uuid.NewV4()).String() + err = m.AddKey(ctx, set, addKey) require.NoError(t, err) - got, err = m.GetKey(context.TODO(), set, suffix) + got, err = m.GetKey(ctx, set, suffix) require.NoError(t, err) assertx.EqualAsJSON(t, priv, canonicalizeThumbprints(got.Keys)) @@ -81,23 +81,23 @@ func TestHelperManagerKey(m Manager, algo string, keys *jose.JSONWebKeySet, suff newKID := "new-key-id:" + suffix pub[0].KeyID = newKID pub[0].Use = "sig" - err = m.AddKey(context.TODO(), set, First(pub)) + err = m.AddKey(ctx, set, First(pub)) require.NoError(t, err) - got, err = m.GetKey(context.TODO(), set, newKID) + got, err = m.GetKey(ctx, set, newKID) require.NoError(t, err) newKey := First(got.Keys) assert.EqualValues(t, "sig", newKey.Use) newKey.Use = "enc" - err = m.UpdateKey(context.TODO(), set, newKey) + err = m.UpdateKey(ctx, set, newKey) require.NoError(t, err) - updated, err := m.GetKey(context.TODO(), set, newKID) + updated, err := m.GetKey(ctx, set, newKID) require.NoError(t, err) updatedKey := First(updated.Keys) assert.EqualValues(t, "enc", updatedKey.Use) - keys, err = m.GetKeySet(context.TODO(), set) + keys, err = m.GetKeySet(ctx, set) require.NoError(t, err) var found bool for _, k := range keys.Keys { @@ -109,13 +109,13 @@ func TestHelperManagerKey(m Manager, algo string, keys *jose.JSONWebKeySet, suff assert.True(t, found, "Key not found in key set: %s / %s\n%+v", keys, newKID) beforeDeleteKeysCount := len(keys.Keys) - err = m.DeleteKey(context.TODO(), set, suffix) + err = m.DeleteKey(ctx, set, suffix) require.NoError(t, err) - _, err = m.GetKey(context.TODO(), set, suffix) + _, err = m.GetKey(ctx, set, suffix) require.Error(t, err) - keys, err = m.GetKeySet(context.TODO(), set) + keys, err = m.GetKeySet(ctx, set) require.NoError(t, err) assert.EqualValues(t, beforeDeleteKeysCount-1, len(keys.Keys)) } @@ -123,17 +123,19 @@ func TestHelperManagerKey(m Manager, algo string, keys *jose.JSONWebKeySet, suff func TestHelperManagerKeySet(m Manager, algo string, keys *jose.JSONWebKeySet, suffix string, parallel bool) func(t *testing.T) { return func(t *testing.T) { + ctx := t.Context() + if parallel { t.Parallel() } - set := uuid.NewString() - _, err := m.GetKeySet(context.TODO(), algo+set) + set := uuid.Must(uuid.NewV4()).String() + _, err := m.GetKeySet(ctx, algo+set) require.Error(t, err) - err = m.AddKeySet(context.TODO(), algo+set, keys) + err = m.AddKeySet(ctx, algo+set, keys) require.NoError(t, err) - got, err := m.GetKeySet(context.TODO(), algo+set) + got, err := m.GetKeySet(ctx, algo+set) require.NoError(t, err) assertx.EqualAsJSON(t, canonicalizeThumbprints(keys.Key(suffix)), canonicalizeThumbprints(got.Key(suffix))) assertx.EqualAsJSON(t, canonicalizeThumbprints(keys.Key(suffix)), canonicalizeThumbprints(got.Key(suffix))) @@ -141,31 +143,33 @@ func TestHelperManagerKeySet(m Manager, algo string, keys *jose.JSONWebKeySet, s for i := range got.Keys { got.Keys[i].Use = "enc" } - err = m.UpdateKeySet(context.TODO(), algo+set, got) + err = m.UpdateKeySet(ctx, algo+set, got) require.NoError(t, err) - updated, err := m.GetKeySet(context.TODO(), algo+set) + updated, err := m.GetKeySet(ctx, algo+set) require.NoError(t, err) assert.EqualValues(t, "enc", updated.Key(suffix)[0].Public().Use) assert.EqualValues(t, "enc", updated.Key(suffix)[0].Use) - err = m.DeleteKeySet(context.TODO(), algo+set) + err = m.DeleteKeySet(ctx, algo+set) require.NoError(t, err) - _, err = m.GetKeySet(context.TODO(), algo+set) + _, err = m.GetKeySet(ctx, algo+set) require.Error(t, err) } } func TestHelperManagerGenerateAndPersistKeySet(m Manager, alg string, parallel bool) func(t *testing.T) { return func(t *testing.T) { + ctx := t.Context() + if parallel { t.Parallel() } - _, err := m.GetKeySet(context.TODO(), "foo") + _, err := m.GetKeySet(ctx, "foo") require.Error(t, err) - keys, err := m.GenerateAndPersistKeySet(context.TODO(), "foo", "bar", alg, "sig") + keys, err := m.GenerateAndPersistKeySet(ctx, "foo", "bar", alg, "sig") require.NoError(t, err) genPub, err := FindPublicKey(keys) require.NoError(t, err) @@ -173,7 +177,7 @@ func TestHelperManagerGenerateAndPersistKeySet(m Manager, alg string, parallel b genPriv, err := FindPrivateKey(keys) require.NoError(t, err) - got, err := m.GetKeySet(context.TODO(), "foo") + got, err := m.GetKeySet(ctx, "foo") require.NoError(t, err) gotPub, err := FindPublicKey(got) require.NoError(t, err) @@ -185,64 +189,18 @@ func TestHelperManagerGenerateAndPersistKeySet(m Manager, alg string, parallel b assert.EqualValues(t, genPriv.KeyID, gotPriv.KeyID) - err = m.DeleteKeySet(context.TODO(), "foo") - require.NoError(t, err) - - _, err = m.GetKeySet(context.TODO(), "foo") - require.Error(t, err) - } -} - -func TestHelperManagerNIDIsolationKeySet(t1 Manager, t2 Manager, alg string) func(t *testing.T) { - return func(t *testing.T) { - _, err := t1.GetKeySet(context.TODO(), "foo") - require.Error(t, err) - _, err = t2.GetKeySet(context.TODO(), "foo") - require.Error(t, err) - - _, err = t1.GenerateAndPersistKeySet(context.TODO(), "foo", "bar", alg, "sig") - require.NoError(t, err) - keys, err := t1.GetKeySet(context.TODO(), "foo") + err = m.DeleteKeySet(ctx, "foo") require.NoError(t, err) - _, err = t2.GetKeySet(context.TODO(), "foo") - require.Error(t, err) - - err = t2.DeleteKeySet(context.TODO(), "foo") - require.Error(t, err) - err = t1.DeleteKeySet(context.TODO(), "foo") - require.NoError(t, err) - _, err = t1.GetKeySet(context.TODO(), "foo") - require.Error(t, err) - - err = t1.AddKeySet(context.TODO(), "foo", keys) - require.NoError(t, err) - err = t2.DeleteKeySet(context.TODO(), "foo") - require.Error(t, err) - - for i := range keys.Keys { - keys.Keys[i].Use = "enc" - } - err = t1.UpdateKeySet(context.TODO(), "foo", keys) - for i := range keys.Keys { - keys.Keys[i].Use = "err" - } - err = t2.UpdateKeySet(context.TODO(), "foo", keys) - require.Error(t, err) - updated, err := t1.GetKeySet(context.TODO(), "foo") - require.NoError(t, err) - for i := range updated.Keys { - assert.EqualValues(t, "enc", updated.Keys[i].Use) - } - err = t1.DeleteKeySet(context.TODO(), "foo") + _, err = m.GetKeySet(ctx, "foo") require.Error(t, err) } } -func TestHelperNID(t1ValidNID Manager, t2InvalidNID Manager) func(t *testing.T) { +func TestHelperNID(t1ValidNID, t2InvalidNID Manager) func(t *testing.T) { return func(t *testing.T) { ctx := context.Background() - jwks, err := GenerateJWK(ctx, jose.RS256, "2022-03-11-ks-1-kid", "test") + jwks, err := GenerateJWK(jose.RS256, "2022-03-11-ks-1-kid", "test") require.NoError(t, err) require.Error(t, t2InvalidNID.AddKey(ctx, "2022-03-11-k-1", &jwks.Keys[0])) require.NoError(t, t1ValidNID.AddKey(ctx, "2022-03-11-k-1", &jwks.Keys[0])) diff --git a/jwk/registry.go b/jwk/registry.go index 1fa8ae42d51..b71e0a28524 100644 --- a/jwk/registry.go +++ b/jwk/registry.go @@ -4,8 +4,8 @@ package jwk import ( - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/x" ) type InternalRegistry interface { @@ -16,7 +16,5 @@ type InternalRegistry interface { type Registry interface { config.Provider - KeyManager() Manager - SoftwareKeyManager() Manager - KeyCipher() *AEAD + ManagerProvider } diff --git a/jwk/registry_mock_test.go b/jwk/registry_mock_test.go index 7188054e5c7..f78dec13627 100644 --- a/jwk/registry_mock_test.go +++ b/jwk/registry_mock_test.go @@ -1,4 +1,4 @@ -// Copyright © 2022 Ory Corp +// Copyright © 2025 Ory Corp // SPDX-License-Identifier: Apache-2.0 // Code generated by MockGen. DO NOT EDIT. @@ -13,8 +13,9 @@ import ( gomock "github.com/golang/mock/gomock" herodot "github.com/ory/herodot" - config "github.com/ory/hydra/driver/config" - jwk "github.com/ory/hydra/jwk" + aead "github.com/ory/hydra/v2/aead" + config "github.com/ory/hydra/v2/driver/config" + jwk "github.com/ory/hydra/v2/jwk" logrusx "github.com/ory/x/logrusx" ) @@ -70,10 +71,10 @@ func (mr *MockInternalRegistryMockRecorder) Config() *gomock.Call { } // KeyCipher mocks base method. -func (m *MockInternalRegistry) KeyCipher() *jwk.AEAD { +func (m *MockInternalRegistry) KeyCipher() *aead.AESGCM { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "KeyCipher") - ret0, _ := ret[0].(*jwk.AEAD) + ret0, _ := ret[0].(*aead.AESGCM) return ret0 } @@ -177,10 +178,10 @@ func (mr *MockRegistryMockRecorder) Config() *gomock.Call { } // KeyCipher mocks base method. -func (m *MockRegistry) KeyCipher() *jwk.AEAD { +func (m *MockRegistry) KeyCipher() *aead.AESGCM { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "KeyCipher") - ret0, _ := ret[0].(*jwk.AEAD) + ret0, _ := ret[0].(*aead.AESGCM) return ret0 } diff --git a/jwk/sdk_test.go b/jwk/sdk_test.go index b1c87c8e934..0df6f2d17b2 100644 --- a/jwk/sdk_test.go +++ b/jwk/sdk_test.go @@ -9,41 +9,38 @@ import ( "net/http/httptest" "testing" - "github.com/ory/hydra/driver/config" - - hydra "github.com/ory/hydra-client-go/v2" - - "github.com/ory/hydra/internal" - "github.com/ory/hydra/x" - "github.com/ory/x/contextx" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - . "github.com/ory/hydra/jwk" + hydra "github.com/ory/hydra-client-go/v2" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/internal/testhelpers" + . "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/prometheusx" ) func TestJWKSDK(t *testing.T) { + t.Parallel() ctx := context.Background() - conf := internal.NewConfigurationWithDefaults() - reg := internal.NewRegistryMemory(t, conf, &contextx.Default{}) + reg := testhelpers.NewRegistryMemory(t) - router := x.NewRouterAdmin(conf.AdminURL) + metrics := prometheusx.NewMetricsManagerWithPrefix("hydra", prometheusx.HTTPMetrics, config.Version, config.Commit, config.Date) + router := x.NewRouterAdmin(metrics) h := NewHandler(reg) - h.SetRoutes(router, x.NewRouterPublic(), func(h http.Handler) http.Handler { - return h - }) + h.SetAdminRoutes(router) server := httptest.NewServer(router) - conf.MustSet(ctx, config.KeyAdminURL, server.URL) + reg.Config().MustSet(ctx, config.KeyAdminURL, server.URL) sdk := hydra.NewAPIClient(hydra.NewConfiguration()) sdk.GetConfig().Servers = hydra.ServerConfigurations{{URL: server.URL}} expectedKid := "key-bar" t.Run("JSON Web Key", func(t *testing.T) { + t.Parallel() t.Run("CreateJwkSetKey", func(t *testing.T) { // Create a key called set-foo - resultKeys, _, err := sdk.JwkApi.CreateJsonWebKeySet(context.Background(), "set-foo").CreateJsonWebKeySet(hydra.CreateJsonWebKeySet{ + resultKeys, _, err := sdk.JwkAPI.CreateJsonWebKeySet(context.Background(), "set-foo").CreateJsonWebKeySet(hydra.CreateJsonWebKeySet{ Alg: "RS256", Kid: "key-bar", Use: "sig", @@ -57,7 +54,7 @@ func TestJWKSDK(t *testing.T) { var resultKeys *hydra.JsonWebKeySet t.Run("GetJwkSetKey after create", func(t *testing.T) { - result, _, err := sdk.JwkApi.GetJsonWebKey(ctx, "set-foo", expectedKid).Execute() + result, _, err := sdk.JwkAPI.GetJsonWebKey(ctx, "set-foo", expectedKid).Execute() require.NoError(t, err) require.Len(t, result.Keys, 1) require.Equal(t, expectedKid, result.Keys[0].Kid) @@ -67,25 +64,25 @@ func TestJWKSDK(t *testing.T) { }) t.Run("UpdateJwkSetKey", func(t *testing.T) { - if conf.HSMEnabled() { + if reg.Config().HSMEnabled() { t.Skip("Skipping test. Keys cannot be updated when Hardware Security Module is enabled") } require.Len(t, resultKeys.Keys, 1) resultKeys.Keys[0].Alg = "ES256" - resultKey, _, err := sdk.JwkApi.SetJsonWebKey(ctx, "set-foo", expectedKid).JsonWebKey(resultKeys.Keys[0]).Execute() + resultKey, _, err := sdk.JwkAPI.SetJsonWebKey(ctx, "set-foo", expectedKid).JsonWebKey(resultKeys.Keys[0]).Execute() require.NoError(t, err) assert.Equal(t, expectedKid, resultKey.Kid) assert.Equal(t, "ES256", resultKey.Alg) }) t.Run("DeleteJwkSetKey after delete", func(t *testing.T) { - _, err := sdk.JwkApi.DeleteJsonWebKey(ctx, "set-foo", expectedKid).Execute() + _, err := sdk.JwkAPI.DeleteJsonWebKey(ctx, "set-foo", expectedKid).Execute() require.NoError(t, err) }) t.Run("GetJwkSetKey after delete", func(t *testing.T) { - _, res, err := sdk.JwkApi.GetJsonWebKey(ctx, "set-foo", expectedKid).Execute() + _, res, err := sdk.JwkAPI.GetJsonWebKey(ctx, "set-foo", expectedKid).Execute() require.Error(t, err) assert.Equal(t, http.StatusNotFound, res.StatusCode) }) @@ -93,10 +90,12 @@ func TestJWKSDK(t *testing.T) { }) t.Run("JWK Set", func(t *testing.T) { + t.Parallel() t.Run("CreateJwkSetKey", func(t *testing.T) { - resultKeys, _, err := sdk.JwkApi.CreateJsonWebKeySet(ctx, "set-foo2").CreateJsonWebKeySet(hydra.CreateJsonWebKeySet{ + resultKeys, _, err := sdk.JwkAPI.CreateJsonWebKeySet(ctx, "set-foo2").CreateJsonWebKeySet(hydra.CreateJsonWebKeySet{ Alg: "RS256", Kid: "key-bar", + Use: "sig", }).Execute() require.NoError(t, err) require.Len(t, resultKeys.Keys, 1) @@ -104,10 +103,10 @@ func TestJWKSDK(t *testing.T) { assert.Equal(t, "RS256", resultKeys.Keys[0].Alg) }) - resultKeys, _, err := sdk.JwkApi.GetJsonWebKeySet(ctx, "set-foo2").Execute() + resultKeys, _, err := sdk.JwkAPI.GetJsonWebKeySet(ctx, "set-foo2").Execute() t.Run("GetJwkSet after create", func(t *testing.T) { require.NoError(t, err) - if conf.HSMEnabled() { + if reg.Config().HSMEnabled() { require.Len(t, resultKeys.Keys, 1) assert.Equal(t, expectedKid, resultKeys.Keys[0].Kid) assert.Equal(t, "RS256", resultKeys.Keys[0].Alg) @@ -119,13 +118,13 @@ func TestJWKSDK(t *testing.T) { }) t.Run("UpdateJwkSet", func(t *testing.T) { - if conf.HSMEnabled() { + if reg.Config().HSMEnabled() { t.Skip("Skipping test. Keys cannot be updated when Hardware Security Module is enabled") } require.Len(t, resultKeys.Keys, 1) resultKeys.Keys[0].Alg = "ES256" - result, _, err := sdk.JwkApi.SetJsonWebKeySet(ctx, "set-foo2").JsonWebKeySet(*resultKeys).Execute() + result, _, err := sdk.JwkAPI.SetJsonWebKeySet(ctx, "set-foo2").JsonWebKeySet(*resultKeys).Execute() require.NoError(t, err) require.Len(t, result.Keys, 1) assert.Equal(t, expectedKid, result.Keys[0].Kid) @@ -133,18 +132,18 @@ func TestJWKSDK(t *testing.T) { }) t.Run("DeleteJwkSet", func(t *testing.T) { - _, err := sdk.JwkApi.DeleteJsonWebKeySet(ctx, "set-foo2").Execute() + _, err := sdk.JwkAPI.DeleteJsonWebKeySet(ctx, "set-foo2").Execute() require.NoError(t, err) }) t.Run("GetJwkSet after delete", func(t *testing.T) { - _, res, err := sdk.JwkApi.GetJsonWebKeySet(ctx, "set-foo2").Execute() + _, res, err := sdk.JwkAPI.GetJsonWebKeySet(ctx, "set-foo2").Execute() require.Error(t, err) assert.Equal(t, http.StatusNotFound, res.StatusCode) }) t.Run("GetJwkSetKey after delete", func(t *testing.T) { - _, res, err := sdk.JwkApi.GetJsonWebKey(ctx, "set-foo2", expectedKid).Execute() + _, res, err := sdk.JwkAPI.GetJsonWebKey(ctx, "set-foo2", expectedKid).Execute() require.Error(t, err) assert.Equal(t, http.StatusNotFound, res.StatusCode) }) diff --git a/main.go b/main.go index 9697e46b2ad..f897ce57eb1 100644 --- a/main.go +++ b/main.go @@ -4,7 +4,7 @@ package main import ( - "github.com/ory/hydra/cmd" + "github.com/ory/hydra/v2/cmd" "github.com/ory/x/profilex" ) diff --git a/oauth2/.snapshots/TestAuthCodeFlowE2E-auth_code_flow-strategy=jwt-access_and_id_tokens_with_extra_claims_consent_request.json b/oauth2/.snapshots/TestAuthCodeFlowE2E-auth_code_flow-strategy=jwt-access_and_id_tokens_with_extra_claims_consent_request.json new file mode 100644 index 00000000000..149180ca51d --- /dev/null +++ b/oauth2/.snapshots/TestAuthCodeFlowE2E-auth_code_flow-strategy=jwt-access_and_id_tokens_with_extra_claims_consent_request.json @@ -0,0 +1,53 @@ +{ + "acr": "acr-value", + "amr": [ + "amr1", + "amr2" + ], + "client": { + "access_token_strategy": "jwt", + "allowed_cors_origins": [], + "audience": [ + "audience-1", + "audience-2" + ], + "client_id": "64f78bf1-f388-4eeb-9fee-e7207226c6be-jwt", + "client_name": "", + "client_secret_expires_at": 0, + "client_uri": "", + "contacts": [], + "grant_types": [ + "implicit", + "refresh_token", + "authorization_code", + "password", + "client_credentials" + ], + "jwks": {}, + "logo_uri": "", + "metadata": {}, + "owner": "", + "policy_uri": "", + "redirect_uris": [ + "https://client.ory/callback" + ], + "response_types": [ + "id_token", + "code", + "token" + ], + "scope": "hydra offline openid", + "skip_consent": false, + "subject_type": "", + "tos_uri": "" + }, + "oidc_context": {}, + "requested_access_token_audience": [], + "requested_scope": [ + "hydra", + "offline", + "openid" + ], + "skip": false, + "subject": "c6a8ee1c-e0c4-404c-bba7-6a5b8702a2e9" +} diff --git a/oauth2/.snapshots/TestAuthCodeFlowE2E-auth_code_flow-strategy=jwt-access_and_id_tokens_with_extra_claims_login_request.json b/oauth2/.snapshots/TestAuthCodeFlowE2E-auth_code_flow-strategy=jwt-access_and_id_tokens_with_extra_claims_login_request.json new file mode 100644 index 00000000000..e823adef92b --- /dev/null +++ b/oauth2/.snapshots/TestAuthCodeFlowE2E-auth_code_flow-strategy=jwt-access_and_id_tokens_with_extra_claims_login_request.json @@ -0,0 +1,48 @@ +{ + "client": { + "access_token_strategy": "jwt", + "allowed_cors_origins": [], + "audience": [ + "audience-1", + "audience-2" + ], + "client_id": "64f78bf1-f388-4eeb-9fee-e7207226c6be-jwt", + "client_name": "", + "client_secret_expires_at": 0, + "client_uri": "", + "contacts": [], + "grant_types": [ + "implicit", + "refresh_token", + "authorization_code", + "password", + "client_credentials" + ], + "jwks": {}, + "logo_uri": "", + "metadata": {}, + "owner": "", + "policy_uri": "", + "redirect_uris": [ + "https://client.ory/callback" + ], + "response_types": [ + "id_token", + "code", + "token" + ], + "scope": "hydra offline openid", + "skip_consent": false, + "subject_type": "", + "tos_uri": "" + }, + "oidc_context": {}, + "requested_access_token_audience": [], + "requested_scope": [ + "hydra", + "offline", + "openid" + ], + "skip": false, + "subject": "" +} diff --git a/oauth2/.snapshots/TestAuthCodeFlowE2E-auth_code_flow-strategy=opaque-access_and_id_tokens_with_extra_claims_consent_request.json b/oauth2/.snapshots/TestAuthCodeFlowE2E-auth_code_flow-strategy=opaque-access_and_id_tokens_with_extra_claims_consent_request.json new file mode 100644 index 00000000000..f297515a26f --- /dev/null +++ b/oauth2/.snapshots/TestAuthCodeFlowE2E-auth_code_flow-strategy=opaque-access_and_id_tokens_with_extra_claims_consent_request.json @@ -0,0 +1,53 @@ +{ + "acr": "acr-value", + "amr": [ + "amr1", + "amr2" + ], + "client": { + "access_token_strategy": "opaque", + "allowed_cors_origins": [], + "audience": [ + "audience-1", + "audience-2" + ], + "client_id": "64f78bf1-f388-4eeb-9fee-e7207226c6be-opaque", + "client_name": "", + "client_secret_expires_at": 0, + "client_uri": "", + "contacts": [], + "grant_types": [ + "implicit", + "refresh_token", + "authorization_code", + "password", + "client_credentials" + ], + "jwks": {}, + "logo_uri": "", + "metadata": {}, + "owner": "", + "policy_uri": "", + "redirect_uris": [ + "https://client.ory/callback" + ], + "response_types": [ + "id_token", + "code", + "token" + ], + "scope": "hydra offline openid", + "skip_consent": false, + "subject_type": "", + "tos_uri": "" + }, + "oidc_context": {}, + "requested_access_token_audience": [], + "requested_scope": [ + "hydra", + "offline", + "openid" + ], + "skip": false, + "subject": "c6a8ee1c-e0c4-404c-bba7-6a5b8702a2e9" +} diff --git a/oauth2/.snapshots/TestAuthCodeFlowE2E-auth_code_flow-strategy=opaque-access_and_id_tokens_with_extra_claims_login_request.json b/oauth2/.snapshots/TestAuthCodeFlowE2E-auth_code_flow-strategy=opaque-access_and_id_tokens_with_extra_claims_login_request.json new file mode 100644 index 00000000000..6e18cae17b0 --- /dev/null +++ b/oauth2/.snapshots/TestAuthCodeFlowE2E-auth_code_flow-strategy=opaque-access_and_id_tokens_with_extra_claims_login_request.json @@ -0,0 +1,48 @@ +{ + "client": { + "access_token_strategy": "opaque", + "allowed_cors_origins": [], + "audience": [ + "audience-1", + "audience-2" + ], + "client_id": "64f78bf1-f388-4eeb-9fee-e7207226c6be-opaque", + "client_name": "", + "client_secret_expires_at": 0, + "client_uri": "", + "contacts": [], + "grant_types": [ + "implicit", + "refresh_token", + "authorization_code", + "password", + "client_credentials" + ], + "jwks": {}, + "logo_uri": "", + "metadata": {}, + "owner": "", + "policy_uri": "", + "redirect_uris": [ + "https://client.ory/callback" + ], + "response_types": [ + "id_token", + "code", + "token" + ], + "scope": "hydra offline openid", + "skip_consent": false, + "subject_type": "", + "tos_uri": "" + }, + "oidc_context": {}, + "requested_access_token_audience": [], + "requested_scope": [ + "hydra", + "offline", + "openid" + ], + "skip": false, + "subject": "" +} diff --git a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=0-description=should_pass_request_if_strategy_passes-should_call_refresh_token_hook_if_configured.json b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=0-description=should_pass_request_if_strategy_passes-should_call_refresh_token_hook_if_configured-hook=legacy.json similarity index 92% rename from oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=0-description=should_pass_request_if_strategy_passes-should_call_refresh_token_hook_if_configured.json rename to oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=0-description=should_pass_request_if_strategy_passes-should_call_refresh_token_hook_if_configured-hook=legacy.json index 66fbfb5af98..b21a79b621d 100644 --- a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=0-description=should_pass_request_if_strategy_passes-should_call_refresh_token_hook_if_configured.json +++ b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=0-description=should_pass_request_if_strategy_passes-should_call_refresh_token_hook_if_configured-hook=legacy.json @@ -22,14 +22,14 @@ "extra": { } }, - "username": "", "subject": "foo" }, "extra": {}, "client_id": "app-client", "consent_challenge": "", "exclude_not_before_claim": false, - "allowed_top_level_claims": [] + "allowed_top_level_claims": [], + "mirror_top_level_claims": true }, "requester": { "client_id": "app-client", diff --git a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=0-description=should_pass_request_if_strategy_passes-should_call_refresh_token_hook_if_configured-hook=new.json b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=0-description=should_pass_request_if_strategy_passes-should_call_refresh_token_hook_if_configured-hook=new.json new file mode 100644 index 00000000000..3ebfb438e65 --- /dev/null +++ b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=0-description=should_pass_request_if_strategy_passes-should_call_refresh_token_hook_if_configured-hook=new.json @@ -0,0 +1,57 @@ +{ + "session": { + "id_token": { + "id_token_claims": { + "jti": "", + "iss": "http://localhost:4444/", + "sub": "foo", + "aud": [ + "app-client" + ], + "nonce": "", + "at_hash": "", + "acr": "1", + "amr": null, + "c_hash": "", + "ext": { + "hooked": "legacy" + } + }, + "headers": { + "extra": { + } + }, + "subject": "foo" + }, + "extra": { + "hooked": "legacy" + }, + "client_id": "app-client", + "consent_challenge": "", + "exclude_not_before_claim": false, + "allowed_top_level_claims": [], + "mirror_top_level_claims": true + }, + "request": { + "client_id": "app-client", + "requested_scopes": [ + "offline", + "openid", + "hydra.*" + ], + "granted_scopes": [ + "offline", + "openid", + "hydra.*" + ], + "granted_audience": [], + "grant_types": [ + "refresh_token" + ], + "payload": { + "grant_type": [ + "refresh_token" + ] + } + } +} diff --git a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=5-description=should_pass_with_prompt=login_when_authentication_time_is_recent-should_call_refresh_token_hook_if_configured.json b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=2-description=should_pass_because_prompt=none_and_max_age_is_less_than_auth_time-should_call_refresh_token_hook_if_configured-hook=legacy.json similarity index 92% rename from oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=5-description=should_pass_with_prompt=login_when_authentication_time_is_recent-should_call_refresh_token_hook_if_configured.json rename to oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=2-description=should_pass_because_prompt=none_and_max_age_is_less_than_auth_time-should_call_refresh_token_hook_if_configured-hook=legacy.json index 66fbfb5af98..b21a79b621d 100644 --- a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=5-description=should_pass_with_prompt=login_when_authentication_time_is_recent-should_call_refresh_token_hook_if_configured.json +++ b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=2-description=should_pass_because_prompt=none_and_max_age_is_less_than_auth_time-should_call_refresh_token_hook_if_configured-hook=legacy.json @@ -22,14 +22,14 @@ "extra": { } }, - "username": "", "subject": "foo" }, "extra": {}, "client_id": "app-client", "consent_challenge": "", "exclude_not_before_claim": false, - "allowed_top_level_claims": [] + "allowed_top_level_claims": [], + "mirror_top_level_claims": true }, "requester": { "client_id": "app-client", diff --git a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=2-description=should_pass_because_prompt=none_and_max_age_is_less_than_auth_time-should_call_refresh_token_hook_if_configured-hook=new.json b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=2-description=should_pass_because_prompt=none_and_max_age_is_less_than_auth_time-should_call_refresh_token_hook_if_configured-hook=new.json new file mode 100644 index 00000000000..3ebfb438e65 --- /dev/null +++ b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=2-description=should_pass_because_prompt=none_and_max_age_is_less_than_auth_time-should_call_refresh_token_hook_if_configured-hook=new.json @@ -0,0 +1,57 @@ +{ + "session": { + "id_token": { + "id_token_claims": { + "jti": "", + "iss": "http://localhost:4444/", + "sub": "foo", + "aud": [ + "app-client" + ], + "nonce": "", + "at_hash": "", + "acr": "1", + "amr": null, + "c_hash": "", + "ext": { + "hooked": "legacy" + } + }, + "headers": { + "extra": { + } + }, + "subject": "foo" + }, + "extra": { + "hooked": "legacy" + }, + "client_id": "app-client", + "consent_challenge": "", + "exclude_not_before_claim": false, + "allowed_top_level_claims": [], + "mirror_top_level_claims": true + }, + "request": { + "client_id": "app-client", + "requested_scopes": [ + "offline", + "openid", + "hydra.*" + ], + "granted_scopes": [ + "offline", + "openid", + "hydra.*" + ], + "granted_audience": [], + "grant_types": [ + "refresh_token" + ], + "payload": { + "grant_type": [ + "refresh_token" + ] + } + } +} diff --git a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=0-description=should_pass_request_if_strategy_passes-should_call_refresh_token_hook_if_configured.json b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=5-description=should_pass_with_prompt=login_when_authentication_time_is_recent-should_call_refresh_token_hook_if_configured-hook=legacy.json similarity index 92% rename from oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=0-description=should_pass_request_if_strategy_passes-should_call_refresh_token_hook_if_configured.json rename to oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=5-description=should_pass_with_prompt=login_when_authentication_time_is_recent-should_call_refresh_token_hook_if_configured-hook=legacy.json index 66fbfb5af98..b21a79b621d 100644 --- a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=0-description=should_pass_request_if_strategy_passes-should_call_refresh_token_hook_if_configured.json +++ b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=5-description=should_pass_with_prompt=login_when_authentication_time_is_recent-should_call_refresh_token_hook_if_configured-hook=legacy.json @@ -22,14 +22,14 @@ "extra": { } }, - "username": "", "subject": "foo" }, "extra": {}, "client_id": "app-client", "consent_challenge": "", "exclude_not_before_claim": false, - "allowed_top_level_claims": [] + "allowed_top_level_claims": [], + "mirror_top_level_claims": true }, "requester": { "client_id": "app-client", diff --git a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=5-description=should_pass_with_prompt=login_when_authentication_time_is_recent-should_call_refresh_token_hook_if_configured-hook=new.json b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=5-description=should_pass_with_prompt=login_when_authentication_time_is_recent-should_call_refresh_token_hook_if_configured-hook=new.json new file mode 100644 index 00000000000..3ebfb438e65 --- /dev/null +++ b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=5-description=should_pass_with_prompt=login_when_authentication_time_is_recent-should_call_refresh_token_hook_if_configured-hook=new.json @@ -0,0 +1,57 @@ +{ + "session": { + "id_token": { + "id_token_claims": { + "jti": "", + "iss": "http://localhost:4444/", + "sub": "foo", + "aud": [ + "app-client" + ], + "nonce": "", + "at_hash": "", + "acr": "1", + "amr": null, + "c_hash": "", + "ext": { + "hooked": "legacy" + } + }, + "headers": { + "extra": { + } + }, + "subject": "foo" + }, + "extra": { + "hooked": "legacy" + }, + "client_id": "app-client", + "consent_challenge": "", + "exclude_not_before_claim": false, + "allowed_top_level_claims": [], + "mirror_top_level_claims": true + }, + "request": { + "client_id": "app-client", + "requested_scopes": [ + "offline", + "openid", + "hydra.*" + ], + "granted_scopes": [ + "offline", + "openid", + "hydra.*" + ], + "granted_audience": [], + "grant_types": [ + "refresh_token" + ], + "payload": { + "grant_type": [ + "refresh_token" + ] + } + } +} diff --git a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=2-description=should_pass_because_prompt=none_and_max_age_is_less_than_auth_time-should_call_refresh_token_hook_if_configured.json b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=0-description=should_pass_request_if_strategy_passes-should_call_refresh_token_hook_if_configured-hook=legacy.json similarity index 92% rename from oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=2-description=should_pass_because_prompt=none_and_max_age_is_less_than_auth_time-should_call_refresh_token_hook_if_configured.json rename to oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=0-description=should_pass_request_if_strategy_passes-should_call_refresh_token_hook_if_configured-hook=legacy.json index 66fbfb5af98..b21a79b621d 100644 --- a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=jwt-case=2-description=should_pass_because_prompt=none_and_max_age_is_less_than_auth_time-should_call_refresh_token_hook_if_configured.json +++ b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=0-description=should_pass_request_if_strategy_passes-should_call_refresh_token_hook_if_configured-hook=legacy.json @@ -22,14 +22,14 @@ "extra": { } }, - "username": "", "subject": "foo" }, "extra": {}, "client_id": "app-client", "consent_challenge": "", "exclude_not_before_claim": false, - "allowed_top_level_claims": [] + "allowed_top_level_claims": [], + "mirror_top_level_claims": true }, "requester": { "client_id": "app-client", diff --git a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=0-description=should_pass_request_if_strategy_passes-should_call_refresh_token_hook_if_configured-hook=new.json b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=0-description=should_pass_request_if_strategy_passes-should_call_refresh_token_hook_if_configured-hook=new.json new file mode 100644 index 00000000000..3ebfb438e65 --- /dev/null +++ b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=0-description=should_pass_request_if_strategy_passes-should_call_refresh_token_hook_if_configured-hook=new.json @@ -0,0 +1,57 @@ +{ + "session": { + "id_token": { + "id_token_claims": { + "jti": "", + "iss": "http://localhost:4444/", + "sub": "foo", + "aud": [ + "app-client" + ], + "nonce": "", + "at_hash": "", + "acr": "1", + "amr": null, + "c_hash": "", + "ext": { + "hooked": "legacy" + } + }, + "headers": { + "extra": { + } + }, + "subject": "foo" + }, + "extra": { + "hooked": "legacy" + }, + "client_id": "app-client", + "consent_challenge": "", + "exclude_not_before_claim": false, + "allowed_top_level_claims": [], + "mirror_top_level_claims": true + }, + "request": { + "client_id": "app-client", + "requested_scopes": [ + "offline", + "openid", + "hydra.*" + ], + "granted_scopes": [ + "offline", + "openid", + "hydra.*" + ], + "granted_audience": [], + "grant_types": [ + "refresh_token" + ], + "payload": { + "grant_type": [ + "refresh_token" + ] + } + } +} diff --git a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=2-description=should_pass_because_prompt=none_and_max_age_is_less_than_auth_time-should_call_refresh_token_hook_if_configured-hook=legacy.json b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=2-description=should_pass_because_prompt=none_and_max_age_is_less_than_auth_time-should_call_refresh_token_hook_if_configured-hook=legacy.json new file mode 100644 index 00000000000..b21a79b621d --- /dev/null +++ b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=2-description=should_pass_because_prompt=none_and_max_age_is_less_than_auth_time-should_call_refresh_token_hook_if_configured-hook=legacy.json @@ -0,0 +1,53 @@ +{ + "subject": "foo", + "session": { + "id_token": { + "id_token_claims": { + "jti": "", + "iss": "http://localhost:4444/", + "sub": "foo", + "aud": [ + "app-client" + ], + "nonce": "", + "at_hash": "", + "acr": "1", + "amr": null, + "c_hash": "", + "ext": { + "sid": "" + } + }, + "headers": { + "extra": { + } + }, + "subject": "foo" + }, + "extra": {}, + "client_id": "app-client", + "consent_challenge": "", + "exclude_not_before_claim": false, + "allowed_top_level_claims": [], + "mirror_top_level_claims": true + }, + "requester": { + "client_id": "app-client", + "granted_scopes": [ + "offline", + "openid", + "hydra.*" + ], + "granted_audience": [], + "grant_types": [ + "refresh_token" + ] + }, + "client_id": "app-client", + "granted_scopes": [ + "offline", + "openid", + "hydra.*" + ], + "granted_audience": [] +} diff --git a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=2-description=should_pass_because_prompt=none_and_max_age_is_less_than_auth_time-should_call_refresh_token_hook_if_configured-hook=new.json b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=2-description=should_pass_because_prompt=none_and_max_age_is_less_than_auth_time-should_call_refresh_token_hook_if_configured-hook=new.json new file mode 100644 index 00000000000..3ebfb438e65 --- /dev/null +++ b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=2-description=should_pass_because_prompt=none_and_max_age_is_less_than_auth_time-should_call_refresh_token_hook_if_configured-hook=new.json @@ -0,0 +1,57 @@ +{ + "session": { + "id_token": { + "id_token_claims": { + "jti": "", + "iss": "http://localhost:4444/", + "sub": "foo", + "aud": [ + "app-client" + ], + "nonce": "", + "at_hash": "", + "acr": "1", + "amr": null, + "c_hash": "", + "ext": { + "hooked": "legacy" + } + }, + "headers": { + "extra": { + } + }, + "subject": "foo" + }, + "extra": { + "hooked": "legacy" + }, + "client_id": "app-client", + "consent_challenge": "", + "exclude_not_before_claim": false, + "allowed_top_level_claims": [], + "mirror_top_level_claims": true + }, + "request": { + "client_id": "app-client", + "requested_scopes": [ + "offline", + "openid", + "hydra.*" + ], + "granted_scopes": [ + "offline", + "openid", + "hydra.*" + ], + "granted_audience": [], + "grant_types": [ + "refresh_token" + ], + "payload": { + "grant_type": [ + "refresh_token" + ] + } + } +} diff --git a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=2-description=should_pass_because_prompt=none_and_max_age_is_less_than_auth_time-should_call_refresh_token_hook_if_configured.json b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=2-description=should_pass_because_prompt=none_and_max_age_is_less_than_auth_time-should_call_refresh_token_hook_if_configured.json deleted file mode 100644 index 66fbfb5af98..00000000000 --- a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=2-description=should_pass_because_prompt=none_and_max_age_is_less_than_auth_time-should_call_refresh_token_hook_if_configured.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "subject": "foo", - "session": { - "id_token": { - "id_token_claims": { - "jti": "", - "iss": "http://localhost:4444/", - "sub": "foo", - "aud": [ - "app-client" - ], - "nonce": "", - "at_hash": "", - "acr": "1", - "amr": null, - "c_hash": "", - "ext": { - "sid": "" - } - }, - "headers": { - "extra": { - } - }, - "username": "", - "subject": "foo" - }, - "extra": {}, - "client_id": "app-client", - "consent_challenge": "", - "exclude_not_before_claim": false, - "allowed_top_level_claims": [] - }, - "requester": { - "client_id": "app-client", - "granted_scopes": [ - "offline", - "openid", - "hydra.*" - ], - "granted_audience": [], - "grant_types": [ - "refresh_token" - ] - }, - "client_id": "app-client", - "granted_scopes": [ - "offline", - "openid", - "hydra.*" - ], - "granted_audience": [] -} diff --git a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=5-description=should_pass_with_prompt=login_when_authentication_time_is_recent-should_call_refresh_token_hook_if_configured-hook=legacy.json b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=5-description=should_pass_with_prompt=login_when_authentication_time_is_recent-should_call_refresh_token_hook_if_configured-hook=legacy.json new file mode 100644 index 00000000000..b21a79b621d --- /dev/null +++ b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=5-description=should_pass_with_prompt=login_when_authentication_time_is_recent-should_call_refresh_token_hook_if_configured-hook=legacy.json @@ -0,0 +1,53 @@ +{ + "subject": "foo", + "session": { + "id_token": { + "id_token_claims": { + "jti": "", + "iss": "http://localhost:4444/", + "sub": "foo", + "aud": [ + "app-client" + ], + "nonce": "", + "at_hash": "", + "acr": "1", + "amr": null, + "c_hash": "", + "ext": { + "sid": "" + } + }, + "headers": { + "extra": { + } + }, + "subject": "foo" + }, + "extra": {}, + "client_id": "app-client", + "consent_challenge": "", + "exclude_not_before_claim": false, + "allowed_top_level_claims": [], + "mirror_top_level_claims": true + }, + "requester": { + "client_id": "app-client", + "granted_scopes": [ + "offline", + "openid", + "hydra.*" + ], + "granted_audience": [], + "grant_types": [ + "refresh_token" + ] + }, + "client_id": "app-client", + "granted_scopes": [ + "offline", + "openid", + "hydra.*" + ], + "granted_audience": [] +} diff --git a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=5-description=should_pass_with_prompt=login_when_authentication_time_is_recent-should_call_refresh_token_hook_if_configured-hook=new.json b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=5-description=should_pass_with_prompt=login_when_authentication_time_is_recent-should_call_refresh_token_hook_if_configured-hook=new.json new file mode 100644 index 00000000000..3ebfb438e65 --- /dev/null +++ b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=5-description=should_pass_with_prompt=login_when_authentication_time_is_recent-should_call_refresh_token_hook_if_configured-hook=new.json @@ -0,0 +1,57 @@ +{ + "session": { + "id_token": { + "id_token_claims": { + "jti": "", + "iss": "http://localhost:4444/", + "sub": "foo", + "aud": [ + "app-client" + ], + "nonce": "", + "at_hash": "", + "acr": "1", + "amr": null, + "c_hash": "", + "ext": { + "hooked": "legacy" + } + }, + "headers": { + "extra": { + } + }, + "subject": "foo" + }, + "extra": { + "hooked": "legacy" + }, + "client_id": "app-client", + "consent_challenge": "", + "exclude_not_before_claim": false, + "allowed_top_level_claims": [], + "mirror_top_level_claims": true + }, + "request": { + "client_id": "app-client", + "requested_scopes": [ + "offline", + "openid", + "hydra.*" + ], + "granted_scopes": [ + "offline", + "openid", + "hydra.*" + ], + "granted_audience": [], + "grant_types": [ + "refresh_token" + ], + "payload": { + "grant_type": [ + "refresh_token" + ] + } + } +} diff --git a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=5-description=should_pass_with_prompt=login_when_authentication_time_is_recent-should_call_refresh_token_hook_if_configured.json b/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=5-description=should_pass_with_prompt=login_when_authentication_time_is_recent-should_call_refresh_token_hook_if_configured.json deleted file mode 100644 index 66fbfb5af98..00000000000 --- a/oauth2/.snapshots/TestAuthCodeWithMockStrategy-strategy=opaque-case=5-description=should_pass_with_prompt=login_when_authentication_time_is_recent-should_call_refresh_token_hook_if_configured.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "subject": "foo", - "session": { - "id_token": { - "id_token_claims": { - "jti": "", - "iss": "http://localhost:4444/", - "sub": "foo", - "aud": [ - "app-client" - ], - "nonce": "", - "at_hash": "", - "acr": "1", - "amr": null, - "c_hash": "", - "ext": { - "sid": "" - } - }, - "headers": { - "extra": { - } - }, - "username": "", - "subject": "foo" - }, - "extra": {}, - "client_id": "app-client", - "consent_challenge": "", - "exclude_not_before_claim": false, - "allowed_top_level_claims": [] - }, - "requester": { - "client_id": "app-client", - "granted_scopes": [ - "offline", - "openid", - "hydra.*" - ], - "granted_audience": [], - "grant_types": [ - "refresh_token" - ] - }, - "client_id": "app-client", - "granted_scopes": [ - "offline", - "openid", - "hydra.*" - ], - "granted_audience": [] -} diff --git a/oauth2/.snapshots/TestHandlerOauthAuthorizationServer-hsm_enabled=false.json b/oauth2/.snapshots/TestHandlerOauthAuthorizationServer-hsm_enabled=false.json new file mode 100644 index 00000000000..e6ca36d8ce7 --- /dev/null +++ b/oauth2/.snapshots/TestHandlerOauthAuthorizationServer-hsm_enabled=false.json @@ -0,0 +1,104 @@ +{ + "authorization_endpoint": "http://hydra.localhost/oauth2/auth", + "backchannel_logout_session_supported": true, + "backchannel_logout_supported": true, + "claims_parameter_supported": false, + "claims_supported": [ + "sub" + ], + "code_challenge_methods_supported": [ + "plain", + "S256" + ], + "credentials_endpoint_draft_00": "http://hydra.localhost/credentials", + "credentials_supported_draft_00": [ + { + "cryptographic_binding_methods_supported": [ + "jwk" + ], + "cryptographic_suites_supported": [ + "PS256", + "RS256", + "ES256", + "PS384", + "RS384", + "ES384", + "PS512", + "RS512", + "ES512", + "EdDSA" + ], + "format": "jwt_vc_json", + "types": [ + "VerifiableCredential", + "UserInfoCredential" + ] + } + ], + "device_authorization_endpoint": "http://hydra.localhost/oauth2/device/auth", + "end_session_endpoint": "http://hydra.localhost/oauth2/sessions/logout", + "frontchannel_logout_session_supported": true, + "frontchannel_logout_supported": true, + "grant_types_supported": [ + "authorization_code", + "implicit", + "client_credentials", + "refresh_token", + "urn:ietf:params:oauth:grant-type:device_code" + ], + "id_token_signed_response_alg": [ + "ES256" + ], + "id_token_signing_alg_values_supported": [ + "ES256" + ], + "issuer": "http://hydra.localhost", + "jwks_uri": "http://hydra.localhost/.well-known/jwks.json", + "registration_endpoint": "http://client-register/registration", + "request_object_signing_alg_values_supported": [ + "none", + "RS256", + "ES256" + ], + "request_parameter_supported": true, + "request_uri_parameter_supported": true, + "require_request_uri_registration": true, + "response_modes_supported": [ + "query", + "fragment", + "form_post" + ], + "response_types_supported": [ + "code", + "code id_token", + "id_token", + "token id_token", + "token", + "token id_token code" + ], + "revocation_endpoint": "http://hydra.localhost/oauth2/revoke", + "scopes_supported": [ + "offline_access", + "offline", + "openid" + ], + "subject_types_supported": [ + "public", + "pairwise" + ], + "token_endpoint": "http://hydra.localhost/oauth2/token", + "token_endpoint_auth_methods_supported": [ + "client_secret_post", + "client_secret_basic", + "private_key_jwt", + "none" + ], + "userinfo_endpoint": "/userinfo", + "userinfo_signed_response_alg": [ + "ES256" + ], + "userinfo_signing_alg_values_supported": [ + "none", + "ES256" + ] +} diff --git a/oauth2/.snapshots/TestHandlerOauthAuthorizationServer-hsm_enabled=true.json b/oauth2/.snapshots/TestHandlerOauthAuthorizationServer-hsm_enabled=true.json new file mode 100644 index 00000000000..d7ec470b34a --- /dev/null +++ b/oauth2/.snapshots/TestHandlerOauthAuthorizationServer-hsm_enabled=true.json @@ -0,0 +1,91 @@ +{ + "authorization_endpoint": "http://hydra.localhost/oauth2/auth", + "backchannel_logout_session_supported": true, + "backchannel_logout_supported": true, + "claims_parameter_supported": false, + "claims_supported": [ + "sub" + ], + "code_challenge_methods_supported": [ + "plain", + "S256" + ], + "credentials_endpoint_draft_00": "http://hydra.localhost/credentials", + "credentials_supported_draft_00": [ + { + "cryptographic_binding_methods_supported": [ + "jwk" + ], + "cryptographic_suites_supported": [ + "PS256", + "RS256", + "ES256", + "PS384", + "RS384", + "ES384", + "PS512", + "RS512", + "ES512", + "EdDSA" + ], + "format": "jwt_vc_json", + "types": [ + "VerifiableCredential", + "UserInfoCredential" + ] + } + ], + "device_authorization_endpoint": "http://hydra.localhost/oauth2/device/auth", + "end_session_endpoint": "http://hydra.localhost/oauth2/sessions/logout", + "frontchannel_logout_session_supported": true, + "frontchannel_logout_supported": true, + "grant_types_supported": [ + "authorization_code", + "implicit", + "client_credentials", + "refresh_token", + "urn:ietf:params:oauth:grant-type:device_code" + ], + "issuer": "http://hydra.localhost", + "jwks_uri": "http://hydra.localhost/.well-known/jwks.json", + "registration_endpoint": "http://client-register/registration", + "request_object_signing_alg_values_supported": [ + "none", + "RS256", + "ES256" + ], + "request_parameter_supported": true, + "request_uri_parameter_supported": true, + "require_request_uri_registration": true, + "response_modes_supported": [ + "query", + "fragment", + "form_post" + ], + "response_types_supported": [ + "code", + "code id_token", + "id_token", + "token id_token", + "token", + "token id_token code" + ], + "revocation_endpoint": "http://hydra.localhost/oauth2/revoke", + "scopes_supported": [ + "offline_access", + "offline", + "openid" + ], + "subject_types_supported": [ + "public", + "pairwise" + ], + "token_endpoint": "http://hydra.localhost/oauth2/token", + "token_endpoint_auth_methods_supported": [ + "client_secret_post", + "client_secret_basic", + "private_key_jwt", + "none" + ], + "userinfo_endpoint": "/userinfo" +} diff --git a/oauth2/.snapshots/TestHandlerWellKnown-hsm_enabled=false.json b/oauth2/.snapshots/TestHandlerWellKnown-hsm_enabled=false.json index 330e25c43a4..e6ca36d8ce7 100644 --- a/oauth2/.snapshots/TestHandlerWellKnown-hsm_enabled=false.json +++ b/oauth2/.snapshots/TestHandlerWellKnown-hsm_enabled=false.json @@ -10,6 +10,32 @@ "plain", "S256" ], + "credentials_endpoint_draft_00": "http://hydra.localhost/credentials", + "credentials_supported_draft_00": [ + { + "cryptographic_binding_methods_supported": [ + "jwk" + ], + "cryptographic_suites_supported": [ + "PS256", + "RS256", + "ES256", + "PS384", + "RS384", + "ES384", + "PS512", + "RS512", + "ES512", + "EdDSA" + ], + "format": "jwt_vc_json", + "types": [ + "VerifiableCredential", + "UserInfoCredential" + ] + } + ], + "device_authorization_endpoint": "http://hydra.localhost/oauth2/device/auth", "end_session_endpoint": "http://hydra.localhost/oauth2/sessions/logout", "frontchannel_logout_session_supported": true, "frontchannel_logout_supported": true, @@ -17,13 +43,14 @@ "authorization_code", "implicit", "client_credentials", - "refresh_token" + "refresh_token", + "urn:ietf:params:oauth:grant-type:device_code" ], "id_token_signed_response_alg": [ - "RS256" + "ES256" ], "id_token_signing_alg_values_supported": [ - "RS256" + "ES256" ], "issuer": "http://hydra.localhost", "jwks_uri": "http://hydra.localhost/.well-known/jwks.json", @@ -38,7 +65,8 @@ "require_request_uri_registration": true, "response_modes_supported": [ "query", - "fragment" + "fragment", + "form_post" ], "response_types_supported": [ "code", @@ -55,8 +83,8 @@ "openid" ], "subject_types_supported": [ - "pairwise", - "public" + "public", + "pairwise" ], "token_endpoint": "http://hydra.localhost/oauth2/token", "token_endpoint_auth_methods_supported": [ @@ -67,10 +95,10 @@ ], "userinfo_endpoint": "/userinfo", "userinfo_signed_response_alg": [ - "RS256" + "ES256" ], "userinfo_signing_alg_values_supported": [ "none", - "RS256" + "ES256" ] } diff --git a/oauth2/.snapshots/TestHandlerWellKnown-hsm_enabled=true.json b/oauth2/.snapshots/TestHandlerWellKnown-hsm_enabled=true.json index 330e25c43a4..d7ec470b34a 100644 --- a/oauth2/.snapshots/TestHandlerWellKnown-hsm_enabled=true.json +++ b/oauth2/.snapshots/TestHandlerWellKnown-hsm_enabled=true.json @@ -10,6 +10,32 @@ "plain", "S256" ], + "credentials_endpoint_draft_00": "http://hydra.localhost/credentials", + "credentials_supported_draft_00": [ + { + "cryptographic_binding_methods_supported": [ + "jwk" + ], + "cryptographic_suites_supported": [ + "PS256", + "RS256", + "ES256", + "PS384", + "RS384", + "ES384", + "PS512", + "RS512", + "ES512", + "EdDSA" + ], + "format": "jwt_vc_json", + "types": [ + "VerifiableCredential", + "UserInfoCredential" + ] + } + ], + "device_authorization_endpoint": "http://hydra.localhost/oauth2/device/auth", "end_session_endpoint": "http://hydra.localhost/oauth2/sessions/logout", "frontchannel_logout_session_supported": true, "frontchannel_logout_supported": true, @@ -17,13 +43,8 @@ "authorization_code", "implicit", "client_credentials", - "refresh_token" - ], - "id_token_signed_response_alg": [ - "RS256" - ], - "id_token_signing_alg_values_supported": [ - "RS256" + "refresh_token", + "urn:ietf:params:oauth:grant-type:device_code" ], "issuer": "http://hydra.localhost", "jwks_uri": "http://hydra.localhost/.well-known/jwks.json", @@ -38,7 +59,8 @@ "require_request_uri_registration": true, "response_modes_supported": [ "query", - "fragment" + "fragment", + "form_post" ], "response_types_supported": [ "code", @@ -55,8 +77,8 @@ "openid" ], "subject_types_supported": [ - "pairwise", - "public" + "public", + "pairwise" ], "token_endpoint": "http://hydra.localhost/oauth2/token", "token_endpoint_auth_methods_supported": [ @@ -65,12 +87,5 @@ "private_key_jwt", "none" ], - "userinfo_endpoint": "/userinfo", - "userinfo_signed_response_alg": [ - "RS256" - ], - "userinfo_signing_alg_values_supported": [ - "none", - "RS256" - ] + "userinfo_endpoint": "/userinfo" } diff --git a/oauth2/.snapshots/TestUnmarshalSession-v1.11.8.json b/oauth2/.snapshots/TestUnmarshalSession-v1.11.8.json index 723df624f4a..03e8881ee72 100644 --- a/oauth2/.snapshots/TestUnmarshalSession-v1.11.8.json +++ b/oauth2/.snapshots/TestUnmarshalSession-v1.11.8.json @@ -17,7 +17,8 @@ "amr": [], "c_hash": "", "ext": { - "sid": "177e1f44-a1e9-415c-bfa3-8b62280b182d" + "sid": "177e1f44-a1e9-415c-bfa3-8b62280b182d", + "timestamp": 1723546027 } }, "headers": { @@ -45,5 +46,6 @@ "market", "zone", "login_session_id" - ] + ], + "mirror_top_level_claims": false } diff --git a/oauth2/.snapshots/TestUnmarshalSession-v1.11.9.json b/oauth2/.snapshots/TestUnmarshalSession-v1.11.9.json index 723df624f4a..03e8881ee72 100644 --- a/oauth2/.snapshots/TestUnmarshalSession-v1.11.9.json +++ b/oauth2/.snapshots/TestUnmarshalSession-v1.11.9.json @@ -17,7 +17,8 @@ "amr": [], "c_hash": "", "ext": { - "sid": "177e1f44-a1e9-415c-bfa3-8b62280b182d" + "sid": "177e1f44-a1e9-415c-bfa3-8b62280b182d", + "timestamp": 1723546027 } }, "headers": { @@ -45,5 +46,6 @@ "market", "zone", "login_session_id" - ] + ], + "mirror_top_level_claims": false } diff --git a/oauth2/e2e_test.go b/oauth2/e2e_test.go new file mode 100644 index 00000000000..8d22b5bf60c --- /dev/null +++ b/oauth2/e2e_test.go @@ -0,0 +1,203 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2_test + +import ( + "encoding/json" + "fmt" + "net/http" + "strings" + "testing" + "time" + + "github.com/go-jose/go-jose/v3" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/tidwall/gjson" + "golang.org/x/oauth2" + + hydra "github.com/ory/hydra-client-go/v2" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/configx" + "github.com/ory/x/pointerx" + "github.com/ory/x/snapshotx" + "github.com/ory/x/uuidx" +) + +func TestAuthCodeFlowE2E(t *testing.T) { + t.Parallel() + + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions(configx.WithValues(map[string]any{ + config.KeyAccessTokenStrategy: "opaque", + config.KeyRefreshTokenHook: "", + config.KeyLoginURL: x.LoginURL, + config.KeyConsentURL: x.ConsentURL, + config.KeyAccessTokenLifespan: 10 * time.Minute, // allow to debug + config.KeyRefreshTokenLifespan: 20 * time.Minute, // allow to debug + config.KeyScopeStrategy: "exact", + config.KeyIssuerURL: "https://hydra.ory", + }))) + + jwk.EnsureAsymmetricKeypairExists(t, reg, string(jose.ES256), x.OpenIDConnectKeyName) + jwk.EnsureAsymmetricKeypairExists(t, reg, string(jose.ES256), x.OAuth2JWTKeyName) + + publicTS, adminTS := testhelpers.NewConfigurableOAuth2Server(t.Context(), t, reg) + publicClient := hydra.NewAPIClient(hydra.NewConfiguration()) + publicClient.GetConfig().Servers = hydra.ServerConfigurations{{URL: publicTS.URL}} + adminClient := hydra.NewAPIClient(hydra.NewConfiguration()) + adminClient.GetConfig().Servers = hydra.ServerConfigurations{{URL: adminTS.URL}} + + t.Run("auth code flow", func(t *testing.T) { + t.Run("rejects invalid audience", func(t *testing.T) { + cl := x.NewEmptyJarClient(t) + cl.CheckRedirect = func(*http.Request, []*http.Request) error { return http.ErrUseLastResponse } + _, conf := newOAuth2Client(t, reg, x.ClientCallbackURL) + loc := x.GetExpectRedirect(t, cl, conf.AuthCodeURL(uuidx.NewV4().String(), oauth2.SetAuthURLParam("audience", "invalid-audience"))) + require.Equal(t, x.ClientCallbackURL, fmt.Sprintf("%s://%s%s", loc.Scheme, loc.Host, loc.Path)) + assert.Equal(t, "invalid_request", loc.Query().Get("error")) + assert.Contains(t, loc.Query().Get("error_description"), "Requested audience 'invalid-audience' has not been whitelisted by the OAuth 2.0 Client.") + }) + + for _, accessTokenStrategy := range []string{"opaque", "jwt"} { + t.Run("strategy="+accessTokenStrategy, func(t *testing.T) { + cl, conf := newOAuth2Client(t, reg, x.ClientCallbackURL, func(c *client.Client) { + c.AccessTokenStrategy = accessTokenStrategy + c.Audience = []string{"audience-1", "audience-2"} + c.ID = "64f78bf1-f388-4eeb-9fee-e7207226c6be-" + accessTokenStrategy + }) + sub := "c6a8ee1c-e0c4-404c-bba7-6a5b8702a2e9" + + t.Run("access and id tokens with extra claims", func(t *testing.T) { + token := x.PerformAuthCodeFlow(t.Context(), t, nil, conf, adminClient, func(t *testing.T, req *hydra.OAuth2LoginRequest) hydra.AcceptOAuth2LoginRequest { + snapshotx.SnapshotT(t, req, + snapshotx.ExceptPaths("challenge", "client.created_at", "client.updated_at", "session_id", "request_url"), + snapshotx.WithName("login_request")) + return hydra.AcceptOAuth2LoginRequest{ + Amr: []string{"amr1", "amr2"}, + Acr: pointerx.Ptr("acr-value"), + Subject: sub, + } + }, func(t *testing.T, req *hydra.OAuth2ConsentRequest) hydra.AcceptOAuth2ConsentRequest { + snapshotx.SnapshotT(t, req, + snapshotx.ExceptPaths("challenge", "client.created_at", "client.updated_at", "consent_request_id", "login_challenge", "login_session_id", "request_url"), + snapshotx.WithName("consent_request")) + return hydra.AcceptOAuth2ConsentRequest{ + GrantScope: []string{"openid"}, + Session: &hydra.AcceptOAuth2ConsentRequestSession{ + AccessToken: map[string]any{"key_access": "extra access token value"}, + IdToken: map[string]any{"key_id": "extra id token value"}, + }, + } + }) + + // check access token + introspected := testhelpers.IntrospectToken(t, token.AccessToken, adminTS) + require.True(t, introspected.Get("active").Bool()) + testhelpers.AssertAccessToken(t, introspected, sub, cl.ID) + assert.Equal(t, "extra access token value", introspected.Get("ext.key_access").Str) + + if accessTokenStrategy == "jwt" { + dec := gjson.ParseBytes(testhelpers.InsecureDecodeJWT(t, token.AccessToken)) + testhelpers.AssertAccessToken(t, dec, sub, cl.ID) + assert.Equal(t, "extra access token value", dec.Get("ext.key_access").Str) + } else { + assert.Len(t, strings.Split(token.AccessToken, "."), 2) + } + + idToken := testhelpers.DecodeIDToken(t, token) + testhelpers.AssertIDToken(t, idToken, sub, cl.ID) + assert.Equal(t, "extra id token value", idToken.Get("key_id").Str) + assert.JSONEq(t, `["amr1", "amr2"]`, idToken.Get("amr").Raw) + assert.Equal(t, "acr-value", idToken.Get("acr").Str) + }) + + t.Run("refreshed access and id tokens with extra claims", func(t *testing.T) { + token := x.PerformAuthCodeFlow(t.Context(), t, nil, conf, adminClient, func(*testing.T, *hydra.OAuth2LoginRequest) hydra.AcceptOAuth2LoginRequest { + return hydra.AcceptOAuth2LoginRequest{Subject: sub} + }, func(*testing.T, *hydra.OAuth2ConsentRequest) hydra.AcceptOAuth2ConsentRequest { + return hydra.AcceptOAuth2ConsentRequest{ + GrantScope: []string{"openid", "offline"}, + Session: &hydra.AcceptOAuth2ConsentRequestSession{ + AccessToken: map[string]any{"key_access": "extra access token value"}, + IdToken: map[string]any{"key_id": "extra id token value"}, + }, + } + }) + + token.Expiry = time.Now().Add(-time.Hour) + refreshed, err := conf.TokenSource(t.Context(), token).Token() + require.NoError(t, err) + require.NotEqual(t, token.AccessToken, refreshed.AccessToken) + require.NotEqual(t, token.RefreshToken, refreshed.RefreshToken) + require.NotEqual(t, token.Extra("id_token"), refreshed.Extra("id_token")) + + // check access token + introspected := testhelpers.IntrospectToken(t, refreshed.AccessToken, adminTS) + require.True(t, introspected.Get("active").Bool()) + testhelpers.AssertAccessToken(t, introspected, sub, cl.ID) + assert.Equal(t, "extra access token value", introspected.Get("ext.key_access").Str) + + if accessTokenStrategy == "jwt" { + dec := gjson.ParseBytes(testhelpers.InsecureDecodeJWT(t, refreshed.AccessToken)) + testhelpers.AssertAccessToken(t, dec, sub, cl.ID) + assert.Equal(t, "extra access token value", dec.Get("ext.key_access").Str) + } else { + assert.Len(t, strings.Split(refreshed.AccessToken, "."), 2) + } + + // check id token + idToken := testhelpers.DecodeIDToken(t, refreshed) + testhelpers.AssertIDToken(t, idToken, sub, cl.ID) + assert.Equal(t, "extra id token value", idToken.Get("key_id").Str) + + t.Run("original tokens are invalidated", func(t *testing.T) { + introspected := testhelpers.IntrospectToken(t, token.AccessToken, adminTS) + assert.False(t, introspected.Get("active").Bool(), introspected.Raw) + introspected = testhelpers.IntrospectToken(t, token.RefreshToken, adminTS) + assert.False(t, introspected.Get("active").Bool(), introspected.Raw) + }) + }) + + t.Run("audience is forwarded to access token", func(t *testing.T) { + token := x.PerformAuthCodeFlow(t.Context(), t, nil, conf, adminClient, func(t *testing.T, req *hydra.OAuth2LoginRequest) hydra.AcceptOAuth2LoginRequest { + assert.EqualValues(t, cl.Audience, req.RequestedAccessTokenAudience) + return hydra.AcceptOAuth2LoginRequest{Subject: sub} + }, func(t *testing.T, req *hydra.OAuth2ConsentRequest) hydra.AcceptOAuth2ConsentRequest { + assert.EqualValues(t, cl.Audience, req.RequestedAccessTokenAudience) + return hydra.AcceptOAuth2ConsentRequest{ + GrantScope: []string{"openid"}, + GrantAccessTokenAudience: req.RequestedAccessTokenAudience, + } + }, oauth2.SetAuthURLParam("audience", strings.Join(cl.Audience, " "))) + + expectedAud, err := json.Marshal(cl.Audience) + require.NoError(t, err) + + introspected := testhelpers.IntrospectToken(t, token.AccessToken, adminTS) + require.True(t, introspected.Get("active").Bool()) + testhelpers.AssertAccessToken(t, introspected, sub, cl.ID) + assert.JSONEq(t, string(expectedAud), introspected.Get("aud").Raw) + + if accessTokenStrategy == "jwt" { + decoded := gjson.ParseBytes(testhelpers.InsecureDecodeJWT(t, token.AccessToken)) + testhelpers.AssertAccessToken(t, decoded, sub, cl.ID) + assert.JSONEq(t, string(expectedAud), decoded.Get("aud").Raw) + } else { + assert.Len(t, strings.Split(token.AccessToken, "."), 2) + } + + idToken := testhelpers.DecodeIDToken(t, token) + testhelpers.AssertIDToken(t, idToken, sub, cl.ID) + require.Len(t, idToken.Get("aud").Array(), 1) + assert.Equal(t, cl.ID, idToken.Get("aud.0").Str) + }) + }) + } + }) +} diff --git a/oauth2/equalKeys_test.go b/oauth2/equalKeys_test.go deleted file mode 100644 index 13243a94bf3..00000000000 --- a/oauth2/equalKeys_test.go +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package oauth2 - -import "testing" - -func TestAssertObjectsAreEqualByKeys(t *testing.T) { - type foo struct { - Name string - Body int - } - a := &foo{"foo", 1} - b := &foo{"bar", 1} - c := &foo{"baz", 3} - - AssertObjectKeysEqual(t, a, a, "Name", "Body") - AssertObjectKeysNotEqual(t, a, b, "Name") - AssertObjectKeysNotEqual(t, a, c, "Name", "Body") -} diff --git a/oauth2/fixtures/v1.11.8-session.json b/oauth2/fixtures/v1.11.8-session.json index a7070d03c32..4608026d74e 100644 --- a/oauth2/fixtures/v1.11.8-session.json +++ b/oauth2/fixtures/v1.11.8-session.json @@ -15,7 +15,8 @@ "AuthenticationMethodsReferences": [], "CodeHash": "", "Extra": { - "sid": "177e1f44-a1e9-415c-bfa3-8b62280b182d" + "sid": "177e1f44-a1e9-415c-bfa3-8b62280b182d", + "timestamp": 1723546027 } }, "Headers": { diff --git a/oauth2/fixtures/v1.11.9-session.json b/oauth2/fixtures/v1.11.9-session.json index 2ded034a556..9636d07b8d6 100644 --- a/oauth2/fixtures/v1.11.9-session.json +++ b/oauth2/fixtures/v1.11.9-session.json @@ -15,7 +15,8 @@ "amr": [], "c_hash": "", "ext": { - "sid": "177e1f44-a1e9-415c-bfa3-8b62280b182d" + "sid": "177e1f44-a1e9-415c-bfa3-8b62280b182d", + "timestamp": 1723546027 } }, "headers": { diff --git a/oauth2/fosite_store_helpers.go b/oauth2/fosite_store_helpers.go deleted file mode 100644 index d17f5fa25ea..00000000000 --- a/oauth2/fosite_store_helpers.go +++ /dev/null @@ -1,1066 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package oauth2 - -import ( - "context" - "crypto/sha256" - "fmt" - "net/url" - "testing" - "time" - - "github.com/ory/hydra/jwk" - - "github.com/gobuffalo/pop/v6" - "github.com/pborman/uuid" - "gopkg.in/square/go-jose.v2" - - "github.com/ory/fosite/handler/rfc7523" - - "github.com/ory/hydra/oauth2/trust" - - "github.com/ory/hydra/x" - - "github.com/ory/fosite/storage" - "github.com/ory/x/sqlxx" - - gofrsuuid "github.com/gofrs/uuid" - "github.com/pkg/errors" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/ory/fosite" - "github.com/ory/fosite/handler/openid" - "github.com/ory/x/sqlcon" - - "github.com/ory/hydra/client" - "github.com/ory/hydra/consent" -) - -func signatureFromJTI(jti string) string { - return fmt.Sprintf("%x", sha256.Sum256([]byte(jti))) -} - -type BlacklistedJTI struct { - JTI string `db:"-"` - ID string `db:"signature"` - Expiry time.Time `db:"expires_at"` - NID gofrsuuid.UUID `db:"nid"` -} - -func (j *BlacklistedJTI) AfterFind(_ *pop.Connection) error { - j.Expiry = j.Expiry.UTC() - return nil -} - -func (BlacklistedJTI) TableName() string { - return "hydra_oauth2_jti_blacklist" -} - -func NewBlacklistedJTI(jti string, exp time.Time) *BlacklistedJTI { - return &BlacklistedJTI{ - JTI: jti, - ID: signatureFromJTI(jti), - // because the database timestamp types are not as accurate as time.Time we truncate to seconds (which should always work) - Expiry: exp.UTC().Truncate(time.Second), - } -} - -type AssertionJWTReader interface { - x.FositeStorer - - GetClientAssertionJWT(ctx context.Context, jti string) (*BlacklistedJTI, error) - - SetClientAssertionJWTRaw(context.Context, *BlacklistedJTI) error -} - -var defaultRequest = fosite.Request{ - ID: "blank", - RequestedAt: time.Now().UTC().Round(time.Second), - Client: &client.Client{LegacyClientID: "foobar"}, - RequestedScope: fosite.Arguments{"fa", "ba"}, - GrantedScope: fosite.Arguments{"fa", "ba"}, - RequestedAudience: fosite.Arguments{"ad1", "ad2"}, - GrantedAudience: fosite.Arguments{"ad1", "ad2"}, - Form: url.Values{"foo": []string{"bar", "baz"}}, - Session: &Session{DefaultSession: &openid.DefaultSession{Subject: "bar"}}, -} - -var lifespan = time.Hour -var flushRequests = []*fosite.Request{ - { - ID: "flush-1", - RequestedAt: time.Now().Round(time.Second), - Client: &client.Client{LegacyClientID: "foobar"}, - RequestedScope: fosite.Arguments{"fa", "ba"}, - GrantedScope: fosite.Arguments{"fa", "ba"}, - Form: url.Values{"foo": []string{"bar", "baz"}}, - Session: &Session{DefaultSession: &openid.DefaultSession{Subject: "bar"}}, - }, - { - ID: "flush-2", - RequestedAt: time.Now().Round(time.Second).Add(-(lifespan + time.Minute)), - Client: &client.Client{LegacyClientID: "foobar"}, - RequestedScope: fosite.Arguments{"fa", "ba"}, - GrantedScope: fosite.Arguments{"fa", "ba"}, - Form: url.Values{"foo": []string{"bar", "baz"}}, - Session: &Session{DefaultSession: &openid.DefaultSession{Subject: "bar"}}, - }, - { - ID: "flush-3", - RequestedAt: time.Now().Round(time.Second).Add(-(lifespan + time.Hour)), - Client: &client.Client{LegacyClientID: "foobar"}, - RequestedScope: fosite.Arguments{"fa", "ba"}, - GrantedScope: fosite.Arguments{"fa", "ba"}, - Form: url.Values{"foo": []string{"bar", "baz"}}, - Session: &Session{DefaultSession: &openid.DefaultSession{Subject: "bar"}}, - }, -} - -func mockRequestForeignKey(t *testing.T, id string, x InternalRegistry, createClient bool) { - cl := &client.Client{LegacyClientID: "foobar"} - cr := &consent.OAuth2ConsentRequest{ - Client: cl, - OpenIDConnectContext: new(consent.OAuth2ConsentRequestOpenIDConnectContext), - LoginChallenge: sqlxx.NullString(id), - ID: id, - Verifier: id, - CSRF: id, - AuthenticatedAt: sqlxx.NullTime(time.Now()), - RequestedAt: time.Now(), - } - - if createClient { - require.NoError(t, x.ClientManager().CreateClient(context.Background(), cl)) - } - - require.NoError(t, x.ConsentManager().CreateLoginRequest(context.Background(), &consent.LoginRequest{Client: cl, OpenIDConnectContext: new(consent.OAuth2ConsentRequestOpenIDConnectContext), ID: id, Verifier: id, AuthenticatedAt: sqlxx.NullTime(time.Now()), RequestedAt: time.Now()})) - require.NoError(t, x.ConsentManager().CreateConsentRequest(context.Background(), cr)) - _, err := x.ConsentManager().HandleConsentRequest(context.Background(), &consent.AcceptOAuth2ConsentRequest{ - ConsentRequest: cr, Session: new(consent.AcceptOAuth2ConsentRequestSession), AuthenticatedAt: sqlxx.NullTime(time.Now()), - ID: id, - RequestedAt: time.Now(), - HandledAt: sqlxx.NullTime(time.Now()), - }) - require.NoError(t, err) -} - -// TestHelperRunner is used to run the database suite of tests in this package. -// KEEP EXPORTED AND AVAILABLE FOR THIRD PARTIES TO TEST PLUGINS! -func TestHelperRunner(t *testing.T, store InternalRegistry, k string) { - t.Helper() - if k != "memory" { - t.Run(fmt.Sprintf("case=testHelperUniqueConstraints/db=%s", k), testHelperRequestIDMultiples(store, k)) - t.Run("case=testFositeSqlStoreTransactionsCommitAccessToken", testFositeSqlStoreTransactionCommitAccessToken(store)) - t.Run("case=testFositeSqlStoreTransactionsRollbackAccessToken", testFositeSqlStoreTransactionRollbackAccessToken(store)) - t.Run("case=testFositeSqlStoreTransactionCommitRefreshToken", testFositeSqlStoreTransactionCommitRefreshToken(store)) - t.Run("case=testFositeSqlStoreTransactionRollbackRefreshToken", testFositeSqlStoreTransactionRollbackRefreshToken(store)) - t.Run("case=testFositeSqlStoreTransactionCommitAuthorizeCode", testFositeSqlStoreTransactionCommitAuthorizeCode(store)) - t.Run("case=testFositeSqlStoreTransactionRollbackAuthorizeCode", testFositeSqlStoreTransactionRollbackAuthorizeCode(store)) - t.Run("case=testFositeSqlStoreTransactionCommitPKCERequest", testFositeSqlStoreTransactionCommitPKCERequest(store)) - t.Run("case=testFositeSqlStoreTransactionRollbackPKCERequest", testFositeSqlStoreTransactionRollbackPKCERequest(store)) - t.Run("case=testFositeSqlStoreTransactionCommitOpenIdConnectSession", testFositeSqlStoreTransactionCommitOpenIdConnectSession(store)) - t.Run("case=testFositeSqlStoreTransactionRollbackOpenIdConnectSession", testFositeSqlStoreTransactionRollbackOpenIdConnectSession(store)) - - } - t.Run(fmt.Sprintf("case=testHelperCreateGetDeleteAuthorizeCodes/db=%s", k), testHelperCreateGetDeleteAuthorizeCodes(store)) - t.Run(fmt.Sprintf("case=testHelperCreateGetDeleteAccessTokenSession/db=%s", k), testHelperCreateGetDeleteAccessTokenSession(store)) - t.Run(fmt.Sprintf("case=testHelperNilAccessToken/db=%s", k), testHelperNilAccessToken(store)) - t.Run(fmt.Sprintf("case=testHelperCreateGetDeleteOpenIDConnectSession/db=%s", k), testHelperCreateGetDeleteOpenIDConnectSession(store)) - t.Run(fmt.Sprintf("case=testHelperCreateGetDeleteRefreshTokenSession/db=%s", k), testHelperCreateGetDeleteRefreshTokenSession(store)) - t.Run(fmt.Sprintf("case=testHelperRevokeRefreshToken/db=%s", k), testHelperRevokeRefreshToken(store)) - t.Run(fmt.Sprintf("case=testHelperCreateGetDeletePKCERequestSession/db=%s", k), testHelperCreateGetDeletePKCERequestSession(store)) - t.Run(fmt.Sprintf("case=testHelperFlushTokens/db=%s", k), testHelperFlushTokens(store, time.Hour)) - t.Run(fmt.Sprintf("case=testHelperFlushTokensWithLimitAndBatchSize/db=%s", k), testHelperFlushTokensWithLimitAndBatchSize(store, 3, 2)) - t.Run(fmt.Sprintf("case=testFositeStoreSetClientAssertionJWT/db=%s", k), testFositeStoreSetClientAssertionJWT(store)) - t.Run(fmt.Sprintf("case=testFositeStoreClientAssertionJWTValid/db=%s", k), testFositeStoreClientAssertionJWTValid(store)) - t.Run(fmt.Sprintf("case=testHelperDeleteAccessTokens/db=%s", k), testHelperDeleteAccessTokens(store)) - t.Run(fmt.Sprintf("case=testHelperRevokeAccessToken/db=%s", k), testHelperRevokeAccessToken(store)) - t.Run(fmt.Sprintf("case=testFositeJWTBearerGrantStorage/db=%s", k), testFositeJWTBearerGrantStorage(store)) -} - -func testHelperRequestIDMultiples(m InternalRegistry, _ string) func(t *testing.T) { - return func(t *testing.T) { - requestId := uuid.New() - mockRequestForeignKey(t, requestId, m, true) - cl := &client.Client{LegacyClientID: "foobar"} - - fositeRequest := &fosite.Request{ - ID: requestId, - Client: cl, - RequestedAt: time.Now().UTC().Round(time.Second), - Session: &Session{}, - } - - for i := 0; i < 4; i++ { - signature := uuid.New() - err := m.OAuth2Storage().CreateRefreshTokenSession(context.TODO(), signature, fositeRequest) - assert.NoError(t, err) - err = m.OAuth2Storage().CreateAccessTokenSession(context.TODO(), signature, fositeRequest) - assert.NoError(t, err) - err = m.OAuth2Storage().CreateOpenIDConnectSession(context.TODO(), signature, fositeRequest) - assert.NoError(t, err) - err = m.OAuth2Storage().CreatePKCERequestSession(context.TODO(), signature, fositeRequest) - assert.NoError(t, err) - err = m.OAuth2Storage().CreateAuthorizeCodeSession(context.TODO(), signature, fositeRequest) - assert.NoError(t, err) - } - } -} - -func testHelperCreateGetDeleteOpenIDConnectSession(x InternalRegistry) func(t *testing.T) { - return func(t *testing.T) { - m := x.OAuth2Storage() - - ctx := context.Background() - _, err := m.GetOpenIDConnectSession(ctx, "4321", &fosite.Request{}) - assert.NotNil(t, err) - - err = m.CreateOpenIDConnectSession(ctx, "4321", &defaultRequest) - require.NoError(t, err) - - res, err := m.GetOpenIDConnectSession(ctx, "4321", &fosite.Request{Session: &Session{}}) - require.NoError(t, err) - AssertObjectKeysEqual(t, &defaultRequest, res, "RequestedScope", "GrantedScope", "Form", "Session") - - err = m.DeleteOpenIDConnectSession(ctx, "4321") - require.NoError(t, err) - - _, err = m.GetOpenIDConnectSession(ctx, "4321", &fosite.Request{}) - assert.NotNil(t, err) - } -} - -func testHelperCreateGetDeleteRefreshTokenSession(x InternalRegistry) func(t *testing.T) { - return func(t *testing.T) { - m := x.OAuth2Storage() - - ctx := context.Background() - _, err := m.GetRefreshTokenSession(ctx, "4321", &Session{}) - assert.NotNil(t, err) - - err = m.CreateRefreshTokenSession(ctx, "4321", &defaultRequest) - require.NoError(t, err) - - res, err := m.GetRefreshTokenSession(ctx, "4321", &Session{}) - require.NoError(t, err) - AssertObjectKeysEqual(t, &defaultRequest, res, "RequestedScope", "GrantedScope", "Form", "Session") - - err = m.DeleteRefreshTokenSession(ctx, "4321") - require.NoError(t, err) - - _, err = m.GetRefreshTokenSession(ctx, "4321", &Session{}) - assert.NotNil(t, err) - } -} - -func testHelperRevokeRefreshToken(x InternalRegistry) func(t *testing.T) { - return func(t *testing.T) { - m := x.OAuth2Storage() - - ctx := context.Background() - _, err := m.GetRefreshTokenSession(ctx, "1111", &Session{}) - assert.Error(t, err) - - reqIdOne := uuid.New() - reqIdTwo := uuid.New() - - mockRequestForeignKey(t, reqIdOne, x, false) - mockRequestForeignKey(t, reqIdTwo, x, false) - - err = m.CreateRefreshTokenSession(ctx, "1111", &fosite.Request{ID: reqIdOne, Client: &client.Client{LegacyClientID: "foobar"}, RequestedAt: time.Now().UTC().Round(time.Second), Session: &Session{}}) - require.NoError(t, err) - - err = m.CreateRefreshTokenSession(ctx, "1122", &fosite.Request{ID: reqIdTwo, Client: &client.Client{LegacyClientID: "foobar"}, RequestedAt: time.Now().UTC().Round(time.Second), Session: &Session{}}) - require.NoError(t, err) - - _, err = m.GetRefreshTokenSession(ctx, "1111", &Session{}) - require.NoError(t, err) - - err = m.RevokeRefreshToken(ctx, reqIdOne) - require.NoError(t, err) - - err = m.RevokeRefreshToken(ctx, reqIdTwo) - require.NoError(t, err) - - req, err := m.GetRefreshTokenSession(ctx, "1111", &Session{}) - assert.NotNil(t, req) - assert.EqualError(t, err, fosite.ErrInactiveToken.Error()) - - req, err = m.GetRefreshTokenSession(ctx, "1122", &Session{}) - assert.NotNil(t, req) - assert.EqualError(t, err, fosite.ErrInactiveToken.Error()) - - } -} - -func testHelperCreateGetDeleteAuthorizeCodes(x InternalRegistry) func(t *testing.T) { - return func(t *testing.T) { - m := x.OAuth2Storage() - - mockRequestForeignKey(t, "blank", x, false) - - ctx := context.Background() - res, err := m.GetAuthorizeCodeSession(ctx, "4321", &Session{}) - assert.Error(t, err) - assert.Nil(t, res) - - err = m.CreateAuthorizeCodeSession(ctx, "4321", &defaultRequest) - require.NoError(t, err) - - res, err = m.GetAuthorizeCodeSession(ctx, "4321", &Session{}) - require.NoError(t, err) - AssertObjectKeysEqual(t, &defaultRequest, res, "RequestedScope", "GrantedScope", "Form", "Session") - - err = m.InvalidateAuthorizeCodeSession(ctx, "4321") - require.NoError(t, err) - - res, err = m.GetAuthorizeCodeSession(ctx, "4321", &Session{}) - require.Error(t, err) - assert.EqualError(t, err, fosite.ErrInvalidatedAuthorizeCode.Error()) - assert.NotNil(t, res) - } -} - -func testHelperNilAccessToken(x InternalRegistry) func(t *testing.T) { - return func(t *testing.T) { - m := x.OAuth2Storage() - c := &client.Client{LegacyClientID: "nil-request-client-id-123"} - require.NoError(t, x.ClientManager().CreateClient(context.Background(), c)) - err := m.CreateAccessTokenSession(context.TODO(), "nil-request-id", &fosite.Request{ - ID: "", - RequestedAt: time.Now().UTC().Round(time.Second), - Client: c, - RequestedScope: fosite.Arguments{"fa", "ba"}, - GrantedScope: fosite.Arguments{"fa", "ba"}, - RequestedAudience: fosite.Arguments{"ad1", "ad2"}, - GrantedAudience: fosite.Arguments{"ad1", "ad2"}, - Form: url.Values{"foo": []string{"bar", "baz"}}, - Session: &Session{DefaultSession: &openid.DefaultSession{Subject: "bar"}}, - }) - require.NoError(t, err) - } -} - -func testHelperCreateGetDeleteAccessTokenSession(x InternalRegistry) func(t *testing.T) { - return func(t *testing.T) { - m := x.OAuth2Storage() - - ctx := context.Background() - _, err := m.GetAccessTokenSession(ctx, "4321", &Session{}) - assert.Error(t, err) - - err = m.CreateAccessTokenSession(ctx, "4321", &defaultRequest) - require.NoError(t, err) - - res, err := m.GetAccessTokenSession(ctx, "4321", &Session{}) - require.NoError(t, err) - AssertObjectKeysEqual(t, &defaultRequest, res, "RequestedScope", "GrantedScope", "Form", "Session") - - err = m.DeleteAccessTokenSession(ctx, "4321") - require.NoError(t, err) - - _, err = m.GetAccessTokenSession(ctx, "4321", &Session{}) - assert.Error(t, err) - } -} - -func testHelperDeleteAccessTokens(x InternalRegistry) func(t *testing.T) { - return func(t *testing.T) { - m := x.OAuth2Storage() - ctx := context.Background() - - err := m.CreateAccessTokenSession(ctx, "4321", &defaultRequest) - require.NoError(t, err) - - _, err = m.GetAccessTokenSession(ctx, "4321", &Session{}) - require.NoError(t, err) - - err = m.DeleteAccessTokens(ctx, defaultRequest.Client.GetID()) - require.NoError(t, err) - - req, err := m.GetAccessTokenSession(ctx, "4321", &Session{}) - assert.Nil(t, req) - assert.EqualError(t, err, fosite.ErrNotFound.Error()) - } -} - -func testHelperRevokeAccessToken(x InternalRegistry) func(t *testing.T) { - return func(t *testing.T) { - m := x.OAuth2Storage() - ctx := context.Background() - - err := m.CreateAccessTokenSession(ctx, "4321", &defaultRequest) - require.NoError(t, err) - - _, err = m.GetAccessTokenSession(ctx, "4321", &Session{}) - require.NoError(t, err) - - err = m.RevokeAccessToken(ctx, defaultRequest.GetID()) - require.NoError(t, err) - - req, err := m.GetAccessTokenSession(ctx, "4321", &Session{}) - assert.Nil(t, req) - assert.EqualError(t, err, fosite.ErrNotFound.Error()) - } -} - -func testHelperCreateGetDeletePKCERequestSession(x InternalRegistry) func(t *testing.T) { - return func(t *testing.T) { - m := x.OAuth2Storage() - - ctx := context.Background() - _, err := m.GetPKCERequestSession(ctx, "4321", &Session{}) - assert.NotNil(t, err) - - err = m.CreatePKCERequestSession(ctx, "4321", &defaultRequest) - require.NoError(t, err) - - res, err := m.GetPKCERequestSession(ctx, "4321", &Session{}) - require.NoError(t, err) - AssertObjectKeysEqual(t, &defaultRequest, res, "RequestedScope", "GrantedScope", "Form", "Session") - - err = m.DeletePKCERequestSession(ctx, "4321") - require.NoError(t, err) - - _, err = m.GetPKCERequestSession(ctx, "4321", &Session{}) - assert.NotNil(t, err) - } -} - -func testHelperFlushTokens(x InternalRegistry, lifespan time.Duration) func(t *testing.T) { - m := x.OAuth2Storage() - ds := &Session{} - - return func(t *testing.T) { - ctx := context.Background() - for _, r := range flushRequests { - mockRequestForeignKey(t, r.ID, x, false) - require.NoError(t, m.CreateAccessTokenSession(ctx, r.ID, r)) - _, err := m.GetAccessTokenSession(ctx, r.ID, ds) - require.NoError(t, err) - } - - require.NoError(t, m.FlushInactiveAccessTokens(ctx, time.Now().Add(-time.Hour*24), 100, 10)) - _, err := m.GetAccessTokenSession(ctx, "flush-1", ds) - require.NoError(t, err) - _, err = m.GetAccessTokenSession(ctx, "flush-2", ds) - require.NoError(t, err) - _, err = m.GetAccessTokenSession(ctx, "flush-3", ds) - require.NoError(t, err) - - require.NoError(t, m.FlushInactiveAccessTokens(ctx, time.Now().Add(-(lifespan+time.Hour/2)), 100, 10)) - _, err = m.GetAccessTokenSession(ctx, "flush-1", ds) - require.NoError(t, err) - _, err = m.GetAccessTokenSession(ctx, "flush-2", ds) - require.NoError(t, err) - _, err = m.GetAccessTokenSession(ctx, "flush-3", ds) - require.Error(t, err) - - require.NoError(t, m.FlushInactiveAccessTokens(ctx, time.Now(), 100, 10)) - _, err = m.GetAccessTokenSession(ctx, "flush-1", ds) - require.NoError(t, err) - _, err = m.GetAccessTokenSession(ctx, "flush-2", ds) - require.Error(t, err) - _, err = m.GetAccessTokenSession(ctx, "flush-3", ds) - require.Error(t, err) - require.NoError(t, m.DeleteAccessTokens(ctx, "foobar")) - } -} - -func testHelperFlushTokensWithLimitAndBatchSize(x InternalRegistry, limit int, batchSize int) func(t *testing.T) { - m := x.OAuth2Storage() - ds := &Session{} - - return func(t *testing.T) { - ctx := context.Background() - var requests []*fosite.Request - - // create five expired requests - id := uuid.New() - for i := 0; i < 5; i++ { - r := createTestRequest(fmt.Sprintf("%s-%d", id, i+1)) - r.RequestedAt = time.Now().Add(-2 * time.Hour) - mockRequestForeignKey(t, r.ID, x, false) - require.NoError(t, m.CreateAccessTokenSession(ctx, r.ID, r)) - _, err := m.GetAccessTokenSession(ctx, r.ID, ds) - require.NoError(t, err) - requests = append(requests, r) - } - - require.NoError(t, m.FlushInactiveAccessTokens(ctx, time.Now(), limit, batchSize)) - for i := range requests { - _, err := m.GetAccessTokenSession(ctx, requests[i].ID, ds) - if i >= limit { - require.NoError(t, err) - } else { - require.Error(t, err) - } - } - } -} - -func testFositeSqlStoreTransactionCommitAccessToken(m InternalRegistry) func(t *testing.T) { - return func(t *testing.T) { - { - doTestCommit(m, t, m.OAuth2Storage().CreateAccessTokenSession, m.OAuth2Storage().GetAccessTokenSession, m.OAuth2Storage().RevokeAccessToken) - doTestCommit(m, t, m.OAuth2Storage().CreateAccessTokenSession, m.OAuth2Storage().GetAccessTokenSession, m.OAuth2Storage().DeleteAccessTokenSession) - } - } -} - -func testFositeSqlStoreTransactionRollbackAccessToken(m InternalRegistry) func(t *testing.T) { - return func(t *testing.T) { - { - doTestRollback(m, t, m.OAuth2Storage().CreateAccessTokenSession, m.OAuth2Storage().GetAccessTokenSession, m.OAuth2Storage().RevokeAccessToken) - doTestRollback(m, t, m.OAuth2Storage().CreateAccessTokenSession, m.OAuth2Storage().GetAccessTokenSession, m.OAuth2Storage().DeleteAccessTokenSession) - } - } -} - -func testFositeSqlStoreTransactionCommitRefreshToken(m InternalRegistry) func(t *testing.T) { - - return func(t *testing.T) { - doTestCommit(m, t, m.OAuth2Storage().CreateRefreshTokenSession, m.OAuth2Storage().GetRefreshTokenSession, m.OAuth2Storage().RevokeRefreshToken) - doTestCommit(m, t, m.OAuth2Storage().CreateRefreshTokenSession, m.OAuth2Storage().GetRefreshTokenSession, m.OAuth2Storage().DeleteRefreshTokenSession) - } -} - -func testFositeSqlStoreTransactionRollbackRefreshToken(m InternalRegistry) func(t *testing.T) { - return func(t *testing.T) { - doTestRollback(m, t, m.OAuth2Storage().CreateRefreshTokenSession, m.OAuth2Storage().GetRefreshTokenSession, m.OAuth2Storage().RevokeRefreshToken) - doTestRollback(m, t, m.OAuth2Storage().CreateRefreshTokenSession, m.OAuth2Storage().GetRefreshTokenSession, m.OAuth2Storage().DeleteRefreshTokenSession) - } -} - -func testFositeSqlStoreTransactionCommitAuthorizeCode(m InternalRegistry) func(t *testing.T) { - - return func(t *testing.T) { - doTestCommit(m, t, m.OAuth2Storage().CreateAuthorizeCodeSession, m.OAuth2Storage().GetAuthorizeCodeSession, m.OAuth2Storage().InvalidateAuthorizeCodeSession) - } -} - -func testFositeSqlStoreTransactionRollbackAuthorizeCode(m InternalRegistry) func(t *testing.T) { - return func(t *testing.T) { - doTestRollback(m, t, m.OAuth2Storage().CreateAuthorizeCodeSession, m.OAuth2Storage().GetAuthorizeCodeSession, m.OAuth2Storage().InvalidateAuthorizeCodeSession) - } -} - -func testFositeSqlStoreTransactionCommitPKCERequest(m InternalRegistry) func(t *testing.T) { - - return func(t *testing.T) { - doTestCommit(m, t, m.OAuth2Storage().CreatePKCERequestSession, m.OAuth2Storage().GetPKCERequestSession, m.OAuth2Storage().DeletePKCERequestSession) - } -} - -func testFositeSqlStoreTransactionRollbackPKCERequest(m InternalRegistry) func(t *testing.T) { - return func(t *testing.T) { - doTestRollback(m, t, m.OAuth2Storage().CreatePKCERequestSession, m.OAuth2Storage().GetPKCERequestSession, m.OAuth2Storage().DeletePKCERequestSession) - } -} - -// OpenIdConnect tests can't use the helper functions, due to the signature of GetOpenIdConnectSession being -// different from the other getter methods -func testFositeSqlStoreTransactionCommitOpenIdConnectSession(m InternalRegistry) func(t *testing.T) { - return func(t *testing.T) { - txnStore, ok := m.OAuth2Storage().(storage.Transactional) - require.True(t, ok) - ctx := context.Background() - ctx, err := txnStore.BeginTX(ctx) - require.NoError(t, err) - signature := uuid.New() - testRequest := createTestRequest(signature) - err = m.OAuth2Storage().CreateOpenIDConnectSession(ctx, signature, testRequest) - require.NoError(t, err) - err = txnStore.Commit(ctx) - require.NoError(t, err) - - // Require a new context, since the old one contains the transaction. - res, err := m.OAuth2Storage().GetOpenIDConnectSession(context.Background(), signature, testRequest) - // session should have been created successfully because Commit did not return an error - require.NoError(t, err) - AssertObjectKeysEqual(t, &defaultRequest, res, "RequestedScope", "GrantedScope", "Form", "Session") - - // test delete within a transaction - ctx, err = txnStore.BeginTX(context.Background()) - require.NoError(t, err) - err = m.OAuth2Storage().DeleteOpenIDConnectSession(ctx, signature) - require.NoError(t, err) - err = txnStore.Commit(ctx) - require.NoError(t, err) - - // Require a new context, since the old one contains the transaction. - _, err = m.OAuth2Storage().GetOpenIDConnectSession(context.Background(), signature, testRequest) - // Since commit worked for delete, we should get an error here. - require.Error(t, err) - } -} - -func testFositeSqlStoreTransactionRollbackOpenIdConnectSession(m InternalRegistry) func(t *testing.T) { - return func(t *testing.T) { - txnStore, ok := m.OAuth2Storage().(storage.Transactional) - require.True(t, ok) - ctx := context.Background() - ctx, err := txnStore.BeginTX(ctx) - require.NoError(t, err) - - signature := uuid.New() - testRequest := createTestRequest(signature) - err = m.OAuth2Storage().CreateOpenIDConnectSession(ctx, signature, testRequest) - require.NoError(t, err) - err = txnStore.Rollback(ctx) - require.NoError(t, err) - - // Require a new context, since the old one contains the transaction. - ctx = context.Background() - _, err = m.OAuth2Storage().GetOpenIDConnectSession(ctx, signature, testRequest) - // Since we rolled back above, the session should not exist and getting it should result in an error - require.Error(t, err) - - // create a new session, delete it, then rollback the delete. We should be able to then get it. - signature2 := uuid.New() - testRequest2 := createTestRequest(signature2) - err = m.OAuth2Storage().CreateOpenIDConnectSession(ctx, signature2, testRequest2) - require.NoError(t, err) - _, err = m.OAuth2Storage().GetOpenIDConnectSession(ctx, signature2, testRequest2) - require.NoError(t, err) - - ctx, err = txnStore.BeginTX(context.Background()) - require.NoError(t, err) - err = m.OAuth2Storage().DeleteOpenIDConnectSession(ctx, signature2) - require.NoError(t, err) - err = txnStore.Rollback(ctx) - - require.NoError(t, err) - _, err = m.OAuth2Storage().GetOpenIDConnectSession(context.Background(), signature2, testRequest2) - require.NoError(t, err) - } -} - -func testFositeStoreSetClientAssertionJWT(m InternalRegistry) func(*testing.T) { - return func(t *testing.T) { - t.Run("case=basic setting works", func(t *testing.T) { - store, ok := m.OAuth2Storage().(AssertionJWTReader) - require.True(t, ok) - jti := NewBlacklistedJTI("basic jti", time.Now().Add(time.Minute)) - - require.NoError(t, store.SetClientAssertionJWT(context.Background(), jti.JTI, jti.Expiry)) - - cmp, err := store.GetClientAssertionJWT(context.Background(), jti.JTI) - require.NotEqual(t, cmp.NID, gofrsuuid.Nil) - cmp.NID = gofrsuuid.Nil - require.NoError(t, err) - assert.Equal(t, jti, cmp) - }) - - t.Run("case=errors when the JTI is blacklisted", func(t *testing.T) { - store, ok := m.OAuth2Storage().(AssertionJWTReader) - require.True(t, ok) - jti := NewBlacklistedJTI("already set jti", time.Now().Add(time.Minute)) - require.NoError(t, store.SetClientAssertionJWTRaw(context.Background(), jti)) - - assert.ErrorIs(t, store.SetClientAssertionJWT(context.Background(), jti.JTI, jti.Expiry), fosite.ErrJTIKnown) - }) - - t.Run("case=deletes expired JTIs", func(t *testing.T) { - store, ok := m.OAuth2Storage().(AssertionJWTReader) - require.True(t, ok) - expiredJTI := NewBlacklistedJTI("expired jti", time.Now().Add(-time.Minute)) - require.NoError(t, store.SetClientAssertionJWTRaw(context.Background(), expiredJTI)) - newJTI := NewBlacklistedJTI("some new jti", time.Now().Add(time.Minute)) - - require.NoError(t, store.SetClientAssertionJWT(context.Background(), newJTI.JTI, newJTI.Expiry)) - - _, err := store.GetClientAssertionJWT(context.Background(), expiredJTI.JTI) - assert.True(t, errors.Is(err, sqlcon.ErrNoRows)) - cmp, err := store.GetClientAssertionJWT(context.Background(), newJTI.JTI) - require.NoError(t, err) - require.NotEqual(t, cmp.NID, gofrsuuid.Nil) - cmp.NID = gofrsuuid.Nil - assert.Equal(t, newJTI, cmp) - }) - - t.Run("case=inserts same JTI if expired", func(t *testing.T) { - store, ok := m.OAuth2Storage().(AssertionJWTReader) - require.True(t, ok) - jti := NewBlacklistedJTI("going to be reused jti", time.Now().Add(-time.Minute)) - require.NoError(t, store.SetClientAssertionJWTRaw(context.Background(), jti)) - - jti.Expiry = jti.Expiry.Add(2 * time.Minute) - assert.NoError(t, store.SetClientAssertionJWT(context.Background(), jti.JTI, jti.Expiry)) - cmp, err := store.GetClientAssertionJWT(context.Background(), jti.JTI) - assert.NoError(t, err) - assert.Equal(t, jti, cmp) - }) - } -} - -func testFositeStoreClientAssertionJWTValid(m InternalRegistry) func(*testing.T) { - return func(t *testing.T) { - t.Run("case=returns valid on unknown JTI", func(t *testing.T) { - store, ok := m.OAuth2Storage().(AssertionJWTReader) - require.True(t, ok) - - assert.NoError(t, store.ClientAssertionJWTValid(context.Background(), "unknown jti")) - }) - - t.Run("case=returns invalid on known JTI", func(t *testing.T) { - store, ok := m.OAuth2Storage().(AssertionJWTReader) - require.True(t, ok) - jti := NewBlacklistedJTI("known jti", time.Now().Add(time.Minute)) - - require.NoError(t, store.SetClientAssertionJWTRaw(context.Background(), jti)) - - assert.True(t, errors.Is(store.ClientAssertionJWTValid(context.Background(), jti.JTI), fosite.ErrJTIKnown)) - }) - - t.Run("case=returns valid on expired JTI", func(t *testing.T) { - store, ok := m.OAuth2Storage().(AssertionJWTReader) - require.True(t, ok) - jti := NewBlacklistedJTI("expired jti 2", time.Now().Add(-time.Minute)) - - require.NoError(t, store.SetClientAssertionJWTRaw(context.Background(), jti)) - - assert.NoError(t, store.ClientAssertionJWTValid(context.Background(), jti.JTI)) - }) - } -} - -func testFositeJWTBearerGrantStorage(x InternalRegistry) func(t *testing.T) { - return func(t *testing.T) { - grantManager := x.GrantManager() - keyManager := x.KeyManager() - grantStorage := x.OAuth2Storage().(rfc7523.RFC7523KeyStorage) - - t.Run("case=associated key added with grant", func(t *testing.T) { - keySet, err := jwk.GenerateJWK(context.Background(), jose.RS256, "token-service-key", "sig") - require.NoError(t, err) - - publicKey := keySet.Keys[0].Public() - issuer := "token-service" - subject := "bob@example.com" - grant := trust.Grant{ - ID: uuid.New(), - Issuer: issuer, - Subject: subject, - AllowAnySubject: false, - Scope: []string{"openid", "offline"}, - PublicKey: trust.PublicKey{Set: issuer, KeyID: publicKey.KeyID}, - CreatedAt: time.Now().UTC().Round(time.Second), - ExpiresAt: time.Now().UTC().Round(time.Second).AddDate(1, 0, 0), - } - - storedKeySet, err := grantStorage.GetPublicKeys(context.TODO(), issuer, subject) - require.NoError(t, err) - require.Len(t, storedKeySet.Keys, 0) - - err = grantManager.CreateGrant(context.TODO(), grant, publicKey) - require.NoError(t, err) - - storedKeySet, err = grantStorage.GetPublicKeys(context.TODO(), issuer, subject) - require.NoError(t, err) - assert.Len(t, storedKeySet.Keys, 1) - - storedKey, err := grantStorage.GetPublicKey(context.TODO(), issuer, subject, publicKey.KeyID) - require.NoError(t, err) - assert.Equal(t, publicKey.KeyID, storedKey.KeyID) - assert.Equal(t, publicKey.Use, storedKey.Use) - assert.Equal(t, publicKey.Key, storedKey.Key) - - storedScopes, err := grantStorage.GetPublicKeyScopes(context.TODO(), issuer, subject, publicKey.KeyID) - require.NoError(t, err) - assert.Equal(t, grant.Scope, storedScopes) - - storedKeySet, err = keyManager.GetKey(context.TODO(), issuer, publicKey.KeyID) - require.NoError(t, err) - assert.Equal(t, publicKey.KeyID, storedKeySet.Keys[0].KeyID) - assert.Equal(t, publicKey.Use, storedKeySet.Keys[0].Use) - assert.Equal(t, publicKey.Key, storedKeySet.Keys[0].Key) - }) - - t.Run("case=only associated key returns", func(t *testing.T) { - keySet, err := jwk.GenerateJWK(context.Background(), jose.RS256, "some-key", "sig") - require.NoError(t, err) - - err = keyManager.AddKeySet(context.TODO(), "some-set", keySet) - require.NoError(t, err) - - keySet, err = jwk.GenerateJWK(context.Background(), jose.RS256, "maria-key", "sig") - require.NoError(t, err) - - publicKey := keySet.Keys[0].Public() - issuer := "maria" - subject := "maria@example.com" - grant := trust.Grant{ - ID: uuid.New(), - Issuer: issuer, - Subject: subject, - AllowAnySubject: false, - Scope: []string{"openid"}, - PublicKey: trust.PublicKey{Set: issuer, KeyID: publicKey.KeyID}, - CreatedAt: time.Now().UTC().Round(time.Second), - ExpiresAt: time.Now().UTC().Round(time.Second).AddDate(1, 0, 0), - } - - err = grantManager.CreateGrant(context.TODO(), grant, publicKey) - require.NoError(t, err) - - storedKeySet, err := grantStorage.GetPublicKeys(context.TODO(), issuer, subject) - require.NoError(t, err) - assert.Len(t, storedKeySet.Keys, 1) - assert.Equal(t, publicKey.KeyID, storedKeySet.Keys[0].KeyID) - assert.Equal(t, publicKey.Use, storedKeySet.Keys[0].Use) - assert.Equal(t, publicKey.Key, storedKeySet.Keys[0].Key) - - storedKeySet, err = grantStorage.GetPublicKeys(context.TODO(), issuer, "non-existing-subject") - require.NoError(t, err) - assert.Len(t, storedKeySet.Keys, 0) - - _, err = grantStorage.GetPublicKeyScopes(context.TODO(), issuer, "non-existing-subject", publicKey.KeyID) - require.Error(t, err) - }) - - t.Run("case=associated key is deleted, when granted is deleted", func(t *testing.T) { - keySet, err := jwk.GenerateJWK(context.Background(), jose.RS256, "hackerman-key", "sig") - require.NoError(t, err) - - publicKey := keySet.Keys[0].Public() - issuer := "aeneas" - subject := "aeneas@example.com" - grant := trust.Grant{ - ID: uuid.New(), - Issuer: issuer, - Subject: subject, - AllowAnySubject: false, - Scope: []string{"openid", "offline"}, - PublicKey: trust.PublicKey{Set: issuer, KeyID: publicKey.KeyID}, - CreatedAt: time.Now().UTC().Round(time.Second), - ExpiresAt: time.Now().UTC().Round(time.Second).AddDate(1, 0, 0), - } - - err = grantManager.CreateGrant(context.TODO(), grant, publicKey) - require.NoError(t, err) - - _, err = grantStorage.GetPublicKey(context.TODO(), issuer, subject, grant.PublicKey.KeyID) - require.NoError(t, err) - - _, err = keyManager.GetKey(context.TODO(), issuer, publicKey.KeyID) - require.NoError(t, err) - - err = grantManager.DeleteGrant(context.TODO(), grant.ID) - require.NoError(t, err) - - _, err = grantStorage.GetPublicKey(context.TODO(), issuer, subject, publicKey.KeyID) - assert.Error(t, err) - - _, err = keyManager.GetKey(context.TODO(), issuer, publicKey.KeyID) - assert.Error(t, err) - }) - - t.Run("case=associated grant is deleted, when key is deleted", func(t *testing.T) { - keySet, err := jwk.GenerateJWK(context.Background(), jose.RS256, "vladimir-key", "sig") - require.NoError(t, err) - - publicKey := keySet.Keys[0].Public() - issuer := "vladimir" - subject := "vladimir@example.com" - grant := trust.Grant{ - ID: uuid.New(), - Issuer: issuer, - Subject: subject, - AllowAnySubject: false, - Scope: []string{"openid", "offline"}, - PublicKey: trust.PublicKey{Set: issuer, KeyID: publicKey.KeyID}, - CreatedAt: time.Now().UTC().Round(time.Second), - ExpiresAt: time.Now().UTC().Round(time.Second).AddDate(1, 0, 0), - } - - err = grantManager.CreateGrant(context.TODO(), grant, publicKey) - require.NoError(t, err) - - _, err = grantStorage.GetPublicKey(context.TODO(), issuer, subject, publicKey.KeyID) - require.NoError(t, err) - - _, err = keyManager.GetKey(context.TODO(), issuer, publicKey.KeyID) - require.NoError(t, err) - - err = keyManager.DeleteKey(context.TODO(), issuer, publicKey.KeyID) - require.NoError(t, err) - - _, err = keyManager.GetKey(context.TODO(), issuer, publicKey.KeyID) - assert.Error(t, err) - - _, err = grantManager.GetConcreteGrant(context.TODO(), grant.ID) - assert.Error(t, err) - }) - - t.Run("case=only returns the key when subject matches", func(t *testing.T) { - keySet, err := jwk.GenerateJWK(context.Background(), jose.RS256, "issuer-key", "sig") - require.NoError(t, err) - - publicKey := keySet.Keys[0].Public() - issuer := "limited-issuer" - subject := "jagoba" - grant := trust.Grant{ - ID: uuid.New(), - Issuer: issuer, - Subject: subject, - AllowAnySubject: false, - Scope: []string{"openid", "offline"}, - PublicKey: trust.PublicKey{Set: issuer, KeyID: publicKey.KeyID}, - CreatedAt: time.Now().UTC().Round(time.Second), - ExpiresAt: time.Now().UTC().Round(time.Second).AddDate(1, 0, 0), - } - - err = grantManager.CreateGrant(context.TODO(), grant, publicKey) - require.NoError(t, err) - - // All three get methods should only return the public key when using the valid subject - _, err = grantStorage.GetPublicKey(context.TODO(), issuer, "any-subject-1", publicKey.KeyID) - require.Error(t, err) - _, err = grantStorage.GetPublicKey(context.TODO(), issuer, subject, publicKey.KeyID) - require.NoError(t, err) - - _, err = grantStorage.GetPublicKeyScopes(context.TODO(), issuer, "any-subject-2", publicKey.KeyID) - require.Error(t, err) - _, err = grantStorage.GetPublicKeyScopes(context.TODO(), issuer, subject, publicKey.KeyID) - require.NoError(t, err) - - jwks, err := grantStorage.GetPublicKeys(context.TODO(), issuer, "any-subject-3") - require.NoError(t, err) - require.NotNil(t, jwks) - require.Empty(t, jwks.Keys) - jwks, err = grantStorage.GetPublicKeys(context.TODO(), issuer, subject) - require.NoError(t, err) - require.NotNil(t, jwks) - require.NotEmpty(t, jwks.Keys) - }) - - t.Run("case=returns the key when any subject is allowed", func(t *testing.T) { - keySet, err := jwk.GenerateJWK(context.Background(), jose.RS256, "issuer-key", "sig") - require.NoError(t, err) - - publicKey := keySet.Keys[0].Public() - issuer := "unlimited-issuer" - grant := trust.Grant{ - ID: uuid.New(), - Issuer: issuer, - Subject: "", - AllowAnySubject: true, - Scope: []string{"openid", "offline"}, - PublicKey: trust.PublicKey{Set: issuer, KeyID: publicKey.KeyID}, - CreatedAt: time.Now().UTC().Round(time.Second), - ExpiresAt: time.Now().UTC().Round(time.Second).AddDate(1, 0, 0), - } - - err = grantManager.CreateGrant(context.TODO(), grant, publicKey) - require.NoError(t, err) - - // All three get methods should always return the public key - _, err = grantStorage.GetPublicKey(context.TODO(), issuer, "any-subject-1", publicKey.KeyID) - require.NoError(t, err) - - _, err = grantStorage.GetPublicKeyScopes(context.TODO(), issuer, "any-subject-2", publicKey.KeyID) - require.NoError(t, err) - - jwks, err := grantStorage.GetPublicKeys(context.TODO(), issuer, "any-subject-3") - require.NoError(t, err) - require.NotNil(t, jwks) - require.NotEmpty(t, jwks.Keys) - }) - } -} - -func doTestCommit(m InternalRegistry, t *testing.T, - createFn func(context.Context, string, fosite.Requester) error, - getFn func(context.Context, string, fosite.Session) (fosite.Requester, error), - revokeFn func(context.Context, string) error, -) { - - txnStore, ok := m.OAuth2Storage().(storage.Transactional) - require.True(t, ok) - ctx := context.Background() - ctx, err := txnStore.BeginTX(ctx) - require.NoError(t, err) - signature := uuid.New() - err = createFn(ctx, signature, createTestRequest(signature)) - require.NoError(t, err) - err = txnStore.Commit(ctx) - require.NoError(t, err) - - // Require a new context, since the old one contains the transaction. - res, err := getFn(context.Background(), signature, &Session{}) - // token should have been created successfully because Commit did not return an error - require.NoError(t, err) - AssertObjectKeysEqual(t, &defaultRequest, res, "RequestedScope", "GrantedScope", "Form", "Session") - - // testrevoke within a transaction - ctx, err = txnStore.BeginTX(context.Background()) - require.NoError(t, err) - err = revokeFn(ctx, signature) - require.NoError(t, err) - err = txnStore.Commit(ctx) - require.NoError(t, err) - - // Require a new context, since the old one contains the transaction. - _, err = getFn(context.Background(), signature, &Session{}) - // Since commit worked for revoke, we should get an error here. - require.Error(t, err) -} - -func doTestRollback(m InternalRegistry, t *testing.T, - createFn func(context.Context, string, fosite.Requester) error, - getFn func(context.Context, string, fosite.Session) (fosite.Requester, error), - revokeFn func(context.Context, string) error, -) { - txnStore, ok := m.OAuth2Storage().(storage.Transactional) - require.True(t, ok) - - ctx := context.Background() - ctx, err := txnStore.BeginTX(ctx) - require.NoError(t, err) - signature := uuid.New() - err = createFn(ctx, signature, createTestRequest(signature)) - require.NoError(t, err) - err = txnStore.Rollback(ctx) - require.NoError(t, err) - - // Require a new context, since the old one contains the transaction. - ctx = context.Background() - _, err = getFn(ctx, signature, &Session{}) - // Since we rolled back above, the token should not exist and getting it should result in an error - require.Error(t, err) - - // create a new token, revoke it, then rollback the revoke. We should be able to then get it successfully. - signature2 := uuid.New() - err = createFn(ctx, signature2, createTestRequest(signature2)) - require.NoError(t, err) - _, err = getFn(ctx, signature2, &Session{}) - require.NoError(t, err) - - ctx, err = txnStore.BeginTX(context.Background()) - require.NoError(t, err) - err = revokeFn(ctx, signature2) - require.NoError(t, err) - err = txnStore.Rollback(ctx) - require.NoError(t, err) - - _, err = getFn(context.Background(), signature2, &Session{}) - require.NoError(t, err) -} - -func createTestRequest(id string) *fosite.Request { - return &fosite.Request{ - ID: id, - RequestedAt: time.Now().UTC().Round(time.Second), - Client: &client.Client{LegacyClientID: "foobar"}, - RequestedScope: fosite.Arguments{"fa", "ba"}, - GrantedScope: fosite.Arguments{"fa", "ba"}, - RequestedAudience: fosite.Arguments{"ad1", "ad2"}, - GrantedAudience: fosite.Arguments{"ad1", "ad2"}, - Form: url.Values{"foo": []string{"bar", "baz"}}, - Session: &Session{DefaultSession: &openid.DefaultSession{Subject: "bar"}}, - } -} diff --git a/oauth2/fosite_store_helpers_test.go b/oauth2/fosite_store_helpers_test.go new file mode 100644 index 00000000000..8c6ed0d6881 --- /dev/null +++ b/oauth2/fosite_store_helpers_test.go @@ -0,0 +1,1632 @@ +// Copyright © 2022 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2_test + +import ( + "context" + "fmt" + "net/url" + "slices" + "testing" + "time" + + "github.com/go-jose/go-jose/v3" + "github.com/gofrs/uuid" + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/handler/rfc7523" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/oauth2/trust" + "github.com/ory/hydra/v2/persistence/sql" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/assertx" + "github.com/ory/x/sqlcon" + "github.com/ory/x/sqlxx" +) + +var defaultIgnoreKeys = []string{ + "id", + "session", + "requested_scope", + "granted_scope", + "form", + "created_at", + "updated_at", + "client.created_at", + "client.updated_at", + "requestedAt", + "client.client_secret", +} + +func newDefaultRequest(t testing.TB, id string) *fosite.Request { + return &fosite.Request{ + ID: id, + RequestedAt: time.Now().UTC().Round(time.Second), + Client: &client.Client{ + ID: "foobar", + Contacts: []string{}, + RedirectURIs: []string{}, + Audience: []string{}, + AllowedCORSOrigins: []string{}, + ResponseTypes: []string{}, + GrantTypes: []string{}, + JSONWebKeys: &x.JoseJSONWebKeySet{}, + Metadata: sqlxx.JSONRawMessage("{}"), + }, + RequestedScope: fosite.Arguments{"fa", "ba"}, + GrantedScope: fosite.Arguments{"fa", "ba"}, + RequestedAudience: fosite.Arguments{"ad1", "ad2"}, + GrantedAudience: fosite.Arguments{"ad1", "ad2"}, + Form: url.Values{"foo": []string{"bar", "baz"}}, + Session: oauth2.NewTestSession(t, "bar"), + } +} + +// var lifespan = time.Hour +var flushRequests = []*fosite.Request{ + { + ID: "flush-1", + RequestedAt: time.Now().Round(time.Second), + Client: &client.Client{ID: "foobar"}, + RequestedScope: fosite.Arguments{"fa", "ba"}, + GrantedScope: fosite.Arguments{"fa", "ba"}, + Form: url.Values{"foo": []string{"bar", "baz"}}, + Session: &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "bar"}}, + }, + { + ID: "flush-2", + RequestedAt: time.Now().Round(time.Second).Add(-(lifespan + time.Minute)), + Client: &client.Client{ID: "foobar"}, + RequestedScope: fosite.Arguments{"fa", "ba"}, + GrantedScope: fosite.Arguments{"fa", "ba"}, + Form: url.Values{"foo": []string{"bar", "baz"}}, + Session: &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "bar"}}, + }, + { + ID: "flush-3", + RequestedAt: time.Now().Round(time.Second).Add(-(lifespan + time.Hour)), + Client: &client.Client{ID: "foobar"}, + RequestedScope: fosite.Arguments{"fa", "ba"}, + GrantedScope: fosite.Arguments{"fa", "ba"}, + Form: url.Values{"foo": []string{"bar", "baz"}}, + Session: &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "bar"}}, + }, +} + +func mockRequestForeignKey(t *testing.T, _ string, x *driver.RegistrySQL) { + cl := &client.Client{ID: "foobar"} + if _, err := x.ClientManager().GetClient(t.Context(), cl.ID); errors.Is(err, sqlcon.ErrNoRows) { + require.NoError(t, x.ClientManager().CreateClient(t.Context(), cl)) + } +} + +func testHelperRequestIDMultiples(m *driver.RegistrySQL, _ string) func(t *testing.T) { + return func(t *testing.T) { + ctx := t.Context() + requestID := uuid.Must(uuid.NewV4()).String() + mockRequestForeignKey(t, requestID, m) + cl := &client.Client{ID: "foobar"} + + fositeRequest := &fosite.Request{ + ID: requestID, + Client: cl, + RequestedAt: time.Now().UTC().Round(time.Second), + Session: oauth2.NewTestSession(t, "bar"), + } + + for range 4 { + signature := uuid.Must(uuid.NewV4()).String() + accessSignature := uuid.Must(uuid.NewV4()).String() + err := m.OAuth2Storage().CreateRefreshTokenSession(ctx, signature, accessSignature, fositeRequest) + assert.NoError(t, err) + err = m.OAuth2Storage().CreateAccessTokenSession(ctx, signature, fositeRequest) + assert.NoError(t, err) + err = m.OAuth2Storage().CreateOpenIDConnectSession(ctx, signature, fositeRequest) + assert.NoError(t, err) + err = m.OAuth2Storage().CreatePKCERequestSession(ctx, signature, fositeRequest) + assert.NoError(t, err) + err = m.OAuth2Storage().CreateAuthorizeCodeSession(ctx, signature, fositeRequest) + assert.NoError(t, err) + } + } +} + +func testHelperCreateGetDeleteOpenIDConnectSession(x *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + m := x.OAuth2Storage() + + code := uuid.Must(uuid.NewV4()).String() + ctx := t.Context() + _, err := m.GetOpenIDConnectSession(ctx, code, &fosite.Request{Session: oauth2.NewTestSession(t, "bar")}) + assert.NotNil(t, err) + + err = m.CreateOpenIDConnectSession(ctx, code, newDefaultRequest(t, "blank")) + require.NoError(t, err) + + res, err := m.GetOpenIDConnectSession(ctx, code, &fosite.Request{Session: oauth2.NewTestSession(t, "bar")}) + require.NoError(t, err) + AssertObjectKeysEqual(t, newDefaultRequest(t, "blank"), res, "RequestedScope", "GrantedScope", "Form", "Session") + + err = m.DeleteOpenIDConnectSession(ctx, code) + require.NoError(t, err) + + _, err = m.GetOpenIDConnectSession(ctx, code, &fosite.Request{Session: oauth2.NewTestSession(t, "bar")}) + assert.NotNil(t, err) + } +} + +func testHelperCreateGetDeleteRefreshTokenSession(x *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + m := x.OAuth2Storage() + + code := uuid.Must(uuid.NewV4()).String() + ctx := t.Context() + _, err := m.GetRefreshTokenSession(ctx, code, oauth2.NewTestSession(t, "bar")) + assert.NotNil(t, err) + + err = m.CreateRefreshTokenSession(ctx, code, "", newDefaultRequest(t, "blank")) + require.NoError(t, err) + + res, err := m.GetRefreshTokenSession(ctx, code, oauth2.NewTestSession(t, "bar")) + require.NoError(t, err) + AssertObjectKeysEqual(t, newDefaultRequest(t, "blank"), res, "RequestedScope", "GrantedScope", "Form", "Session") + + err = m.DeleteRefreshTokenSession(ctx, code) + require.NoError(t, err) + + _, err = m.GetRefreshTokenSession(ctx, code, oauth2.NewTestSession(t, "bar")) + assert.NotNil(t, err) + } +} + +func testHelperRevokeRefreshToken(x *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + m := x.OAuth2Storage() + + ctx := t.Context() + _, err := m.GetRefreshTokenSession(ctx, "1111", oauth2.NewTestSession(t, "bar")) + assert.Error(t, err) + + reqIdOne := uuid.Must(uuid.NewV4()).String() + reqIdTwo := uuid.Must(uuid.NewV4()).String() + + mockRequestForeignKey(t, reqIdOne, x) + mockRequestForeignKey(t, reqIdTwo, x) + + err = m.CreateRefreshTokenSession(ctx, "1111", "", &fosite.Request{ + ID: reqIdOne, + Client: &client.Client{ID: "foobar"}, + RequestedAt: time.Now().UTC().Round(time.Second), + Session: oauth2.NewTestSession(t, "user"), + }) + require.NoError(t, err) + + err = m.CreateRefreshTokenSession(ctx, "1122", "", &fosite.Request{ + ID: reqIdTwo, + Client: &client.Client{ID: "foobar"}, + RequestedAt: time.Now().UTC().Round(time.Second), + Session: oauth2.NewTestSession(t, "user"), + }) + require.NoError(t, err) + + _, err = m.GetRefreshTokenSession(ctx, "1111", oauth2.NewTestSession(t, "bar")) + require.NoError(t, err) + + err = m.RevokeRefreshToken(ctx, reqIdOne) + require.NoError(t, err) + + err = m.RevokeRefreshToken(ctx, reqIdTwo) + require.NoError(t, err) + + req, err := m.GetRefreshTokenSession(ctx, "1111", oauth2.NewTestSession(t, "bar")) + assert.Nil(t, req) + assert.EqualError(t, err, fosite.ErrNotFound.Error()) + + req, err = m.GetRefreshTokenSession(ctx, "1122", oauth2.NewTestSession(t, "bar")) + assert.Nil(t, req) + assert.EqualError(t, err, fosite.ErrNotFound.Error()) + } +} + +func testHelperCreateGetDeleteAuthorizeCodes(x *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + m := x.OAuth2Storage() + + mockRequestForeignKey(t, "blank", x) + + code := uuid.Must(uuid.NewV4()).String() + + ctx := t.Context() + res, err := m.GetAuthorizeCodeSession(ctx, code, oauth2.NewTestSession(t, "bar")) + assert.Error(t, err) + assert.Nil(t, res) + + err = m.CreateAuthorizeCodeSession(ctx, code, newDefaultRequest(t, "blank")) + require.NoError(t, err) + + res, err = m.GetAuthorizeCodeSession(ctx, code, oauth2.NewTestSession(t, "bar")) + require.NoError(t, err) + AssertObjectKeysEqual(t, newDefaultRequest(t, "blank"), res, "RequestedScope", "GrantedScope", "Form", "Session") + + err = m.InvalidateAuthorizeCodeSession(ctx, code) + require.NoError(t, err) + + res, err = m.GetAuthorizeCodeSession(ctx, code, oauth2.NewTestSession(t, "bar")) + require.Error(t, err) + assert.EqualError(t, err, fosite.ErrInvalidatedAuthorizeCode.Error()) + assert.NotNil(t, res) + } +} + +type testHelperExpiryFieldsResult struct { + ExpiresAt time.Time `db:"expires_at"` + name string +} + +func (r testHelperExpiryFieldsResult) TableName() string { + return "hydra_oauth2_" + r.name +} + +func testHelperExpiryFields(reg *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + m := reg.OAuth2Storage() + t.Parallel() + + mockRequestForeignKey(t, "blank", reg) + + ctx := t.Context() + + s := oauth2.NewTestSession(t, "bar") + s.SetExpiresAt(fosite.AccessToken, time.Now().Add(time.Hour).Round(time.Minute)) + s.SetExpiresAt(fosite.RefreshToken, time.Now().Add(time.Hour*2).Round(time.Minute)) + s.SetExpiresAt(fosite.AuthorizeCode, time.Now().Add(time.Hour*3).Round(time.Minute)) + request := fosite.Request{ + ID: uuid.Must(uuid.NewV4()).String(), + RequestedAt: time.Now().UTC().Round(time.Second), + Client: &client.Client{ + ID: "foobar", + Metadata: sqlxx.JSONRawMessage("{}"), + }, + RequestedScope: fosite.Arguments{"fa", "ba"}, + GrantedScope: fosite.Arguments{"fa", "ba"}, + RequestedAudience: fosite.Arguments{"ad1", "ad2"}, + GrantedAudience: fosite.Arguments{"ad1", "ad2"}, + Form: url.Values{"foo": []string{"bar", "baz"}}, + Session: s, + } + + t.Run("case=CreateAccessTokenSession", func(t *testing.T) { + id := uuid.Must(uuid.NewV4()).String() + err := m.CreateAccessTokenSession(ctx, id, &request) + require.NoError(t, err) + + r := testHelperExpiryFieldsResult{name: "access"} + require.NoError(t, reg.Persister().Connection(ctx).Select("expires_at").Where("signature = ?", x.SignatureHash(id)).First(&r)) + + assert.EqualValues(t, s.GetExpiresAt(fosite.AccessToken).UTC(), r.ExpiresAt.UTC()) + }) + + t.Run("case=CreateRefreshTokenSession", func(t *testing.T) { + id := uuid.Must(uuid.NewV4()).String() + err := m.CreateRefreshTokenSession(ctx, id, "", &request) + require.NoError(t, err) + + r := testHelperExpiryFieldsResult{name: "refresh"} + require.NoError(t, reg.Persister().Connection(ctx).Select("expires_at").Where("signature = ?", id).First(&r)) + assert.EqualValues(t, s.GetExpiresAt(fosite.RefreshToken).UTC(), r.ExpiresAt.UTC()) + }) + + t.Run("case=CreateAuthorizeCodeSession", func(t *testing.T) { + id := uuid.Must(uuid.NewV4()).String() + err := m.CreateAuthorizeCodeSession(ctx, id, &request) + require.NoError(t, err) + + r := testHelperExpiryFieldsResult{name: "code"} + require.NoError(t, reg.Persister().Connection(ctx).Select("expires_at").Where("signature = ?", id).First(&r)) + assert.EqualValues(t, s.GetExpiresAt(fosite.AuthorizeCode).UTC(), r.ExpiresAt.UTC()) + }) + + t.Run("case=CreatePKCERequestSession", func(t *testing.T) { + id := uuid.Must(uuid.NewV4()).String() + err := m.CreatePKCERequestSession(ctx, id, &request) + require.NoError(t, err) + + r := testHelperExpiryFieldsResult{name: "pkce"} + require.NoError(t, reg.Persister().Connection(ctx).Select("expires_at").Where("signature = ?", id).First(&r)) + assert.EqualValues(t, s.GetExpiresAt(fosite.AuthorizeCode).UTC(), r.ExpiresAt.UTC()) + }) + + t.Run("case=CreateOpenIDConnectSession", func(t *testing.T) { + id := uuid.Must(uuid.NewV4()).String() + err := m.CreateOpenIDConnectSession(ctx, id, &request) + require.NoError(t, err) + + r := testHelperExpiryFieldsResult{name: "oidc"} + require.NoError(t, reg.Persister().Connection(ctx).Select("expires_at").Where("signature = ?", id).First(&r)) + assert.EqualValues(t, s.GetExpiresAt(fosite.AuthorizeCode).UTC(), r.ExpiresAt.UTC()) + }) + } +} + +func testHelperNilAccessToken(x *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + m := x.OAuth2Storage() + c := &client.Client{ID: uuid.Must(uuid.NewV4()).String()} + require.NoError(t, x.ClientManager().CreateClient(context.Background(), c)) + err := m.CreateAccessTokenSession(context.Background(), uuid.Must(uuid.NewV4()).String(), &fosite.Request{ + ID: "", + RequestedAt: time.Now().UTC().Round(time.Second), + Client: c, + RequestedScope: fosite.Arguments{"fa", "ba"}, + GrantedScope: fosite.Arguments{"fa", "ba"}, + RequestedAudience: fosite.Arguments{"ad1", "ad2"}, + GrantedAudience: fosite.Arguments{"ad1", "ad2"}, + Form: url.Values{"foo": []string{"bar", "baz"}}, + Session: oauth2.NewTestSession(t, "bar"), + }) + require.NoError(t, err) + } +} + +func testHelperCreateGetDeleteAccessTokenSession(x *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + m := x.OAuth2Storage() + + code := uuid.Must(uuid.NewV4()).String() + ctx := t.Context() + _, err := m.GetAccessTokenSession(ctx, code, oauth2.NewTestSession(t, "bar")) + assert.Error(t, err) + + err = m.CreateAccessTokenSession(ctx, code, newDefaultRequest(t, "blank")) + require.NoError(t, err) + + res, err := m.GetAccessTokenSession(ctx, code, oauth2.NewTestSession(t, "bar")) + require.NoError(t, err) + AssertObjectKeysEqual(t, newDefaultRequest(t, "blank"), res, "RequestedScope", "GrantedScope", "Form", "Session") + + err = m.DeleteAccessTokenSession(ctx, code) + require.NoError(t, err) + + _, err = m.GetAccessTokenSession(ctx, code, oauth2.NewTestSession(t, "bar")) + assert.Error(t, err) + } +} + +func testHelperDeleteAccessTokens(x *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + m := x.OAuth2Storage() + ctx := t.Context() + + code := uuid.Must(uuid.NewV4()).String() + err := m.CreateAccessTokenSession(ctx, code, newDefaultRequest(t, "blank")) + require.NoError(t, err) + + _, err = m.GetAccessTokenSession(ctx, code, oauth2.NewTestSession(t, "bar")) + require.NoError(t, err) + + err = m.DeleteAccessTokens(ctx, newDefaultRequest(t, "blank").Client.GetID()) + require.NoError(t, err) + + req, err := m.GetAccessTokenSession(ctx, code, oauth2.NewTestSession(t, "bar")) + assert.Nil(t, req) + assert.EqualError(t, err, fosite.ErrNotFound.Error()) + } +} + +func testHelperRevokeAccessToken(x *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + m := x.OAuth2Storage() + ctx := t.Context() + + code := uuid.Must(uuid.NewV4()).String() + err := m.CreateAccessTokenSession(ctx, code, newDefaultRequest(t, "blank")) + require.NoError(t, err) + + _, err = m.GetAccessTokenSession(ctx, code, oauth2.NewTestSession(t, "bar")) + require.NoError(t, err) + + err = m.RevokeAccessToken(ctx, newDefaultRequest(t, "blank").GetID()) + require.NoError(t, err) + + req, err := m.GetAccessTokenSession(ctx, code, oauth2.NewTestSession(t, "bar")) + assert.Nil(t, req) + assert.EqualError(t, err, fosite.ErrNotFound.Error()) + } +} + +func testHelperRotateRefreshToken(x *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + ctx := t.Context() + + createTokens := func(t *testing.T, r *fosite.Request) (refreshTokenSession string, accessTokenSession string) { + refreshTokenSession = fmt.Sprintf("refresh_token_%s", uuid.Must(uuid.NewV4()).String()) + accessTokenSession = fmt.Sprintf("access_token_%s", uuid.Must(uuid.NewV4()).String()) + err := x.OAuth2Storage().CreateAccessTokenSession(ctx, accessTokenSession, r) + require.NoError(t, err) + + err = x.OAuth2Storage().CreateRefreshTokenSession(ctx, refreshTokenSession, accessTokenSession, r) + require.NoError(t, err) + + // Sanity check + req, err := x.OAuth2Storage().GetRefreshTokenSession(ctx, refreshTokenSession, nil) + require.NoError(t, err) + require.EqualValues(t, r.GetID(), req.GetID()) + + req, err = x.OAuth2Storage().GetAccessTokenSession(ctx, accessTokenSession, nil) + require.NoError(t, err) + require.EqualValues(t, r.GetID(), req.GetID()) + return + } + + t.Run("Revokes refresh token when grace period not configured", func(t *testing.T) { + m := x.OAuth2Storage() + r := newDefaultRequest(t, uuid.Must(uuid.NewV4()).String()) + refreshTokenSession, accessTokenSession := createTokens(t, r) + + err := m.RotateRefreshToken(ctx, r.GetID(), refreshTokenSession) + require.NoError(t, err) + + _, err = m.GetAccessTokenSession(ctx, accessTokenSession, nil) + assert.ErrorIs(t, err, fosite.ErrNotFound, "Token is no longer active because it was refreshed") + + _, err = m.GetRefreshTokenSession(ctx, refreshTokenSession, nil) + assert.ErrorIs(t, err, fosite.ErrInactiveToken, "Token is no longer active because it was refreshed") + }) + + t.Run("Rotation works when access token is already pruned", func(t *testing.T) { + // Test both with and without grace period + testCases := []struct { + name string + configureGrace bool + expectTokenActive bool + }{ + { + name: "with grace period", + configureGrace: true, + expectTokenActive: true, + }, + { + name: "without grace period", + configureGrace: false, + expectTokenActive: false, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + if tc.configureGrace { + x.Config().MustSet(ctx, config.KeyRefreshTokenRotationGracePeriod, "1s") + } else { + x.Config().Delete(ctx, config.KeyRefreshTokenRotationGracePeriod) + } + t.Cleanup(func() { + x.Config().Delete(ctx, config.KeyRefreshTokenRotationGracePeriod) + }) + + m := x.OAuth2Storage() + r := newDefaultRequest(t, uuid.Must(uuid.NewV4()).String()) + + // Create tokens + refreshTokenSession := fmt.Sprintf("refresh_token_%s", uuid.Must(uuid.NewV4()).String()) + accessTokenSession := fmt.Sprintf("access_token_%s", uuid.Must(uuid.NewV4()).String()) + + // Create access token + err := m.CreateAccessTokenSession(ctx, accessTokenSession, r) + require.NoError(t, err) + + // Create refresh token linked to the access token + err = m.CreateRefreshTokenSession(ctx, refreshTokenSession, accessTokenSession, r) + require.NoError(t, err) + + // Verify tokens were created successfully + req, err := m.GetRefreshTokenSession(ctx, refreshTokenSession, nil) + require.NoError(t, err) + require.Equal(t, r.GetID(), req.GetID()) + + req, err = m.GetAccessTokenSession(ctx, accessTokenSession, nil) + require.NoError(t, err) + require.Equal(t, r.GetID(), req.GetID()) + + // Delete the access token (simulating it being pruned) + err = m.DeleteAccessTokenSession(ctx, accessTokenSession) + require.NoError(t, err) + + // Verify access token is gone + _, err = m.GetAccessTokenSession(ctx, accessTokenSession, nil) + assert.Error(t, err) + + // Rotation should still work even though the access token is gone + err = m.RotateRefreshToken(ctx, r.GetID(), refreshTokenSession) + require.NoError(t, err) + + // Check refresh token state based on grace period configuration + req, err = m.GetRefreshTokenSession(ctx, refreshTokenSession, nil) + if tc.expectTokenActive { + assert.NoError(t, err) + assert.Equal(t, r.GetID(), req.GetID()) + } else { + assert.ErrorIs(t, err, fosite.ErrInactiveToken, "Token should be inactive when no grace period is configured") + } + }) + } + }) + + t.Run("refresh token is valid until the grace period has ended", func(t *testing.T) { + x.Config().MustSet(ctx, config.KeyRefreshTokenRotationGracePeriod, "1s") + + // By setting this to one hour we ensure that using the refresh token triggers the start of the grace period. + x.Config().MustSet(ctx, config.KeyRefreshTokenLifespan, "1h") + t.Cleanup(func() { + x.Config().Delete(ctx, config.KeyRefreshTokenRotationGracePeriod) + }) + + m := x.OAuth2Storage() + r := newDefaultRequest(t, uuid.Must(uuid.NewV4()).String()) + refreshTokenSession, accessTokenSession1 := createTokens(t, r) + accessTokenSession2 := fmt.Sprintf("access_token_%s", uuid.Must(uuid.NewV4()).String()) + require.NoError(t, m.CreateAccessTokenSession(ctx, accessTokenSession2, r)) + + // Create a second access token + require.NoError(t, m.RotateRefreshToken(ctx, r.GetID(), refreshTokenSession)) + require.NoError(t, m.RotateRefreshToken(ctx, r.GetID(), refreshTokenSession)) + require.NoError(t, m.RotateRefreshToken(ctx, r.GetID(), refreshTokenSession)) + + _, err := m.GetAccessTokenSession(ctx, accessTokenSession1, nil) + assert.ErrorIs(t, err, fosite.ErrNotFound) + + _, err = m.GetAccessTokenSession(ctx, accessTokenSession2, nil) + assert.NoError(t, err, "The second access token is still valid.") + + req, err := m.GetRefreshTokenSession(ctx, refreshTokenSession, nil) + assert.NoError(t, err) + assert.Equal(t, r.GetID(), req.GetID()) + + // We only wait a second, meaning that the token is theoretically still within TTL, but since the + // grace period was issued, the token is still valid. + time.Sleep(time.Second * 2) + _, err = m.GetRefreshTokenSession(ctx, refreshTokenSession, nil) + assert.Error(t, err) + }) + + t.Run("the used at time does not change", func(t *testing.T) { + x.Config().MustSet(ctx, config.KeyRefreshTokenRotationGracePeriod, "1s") + + // By setting this to one hour we ensure that using the refresh token triggers the start of the grace period. + x.Config().MustSet(ctx, config.KeyRefreshTokenLifespan, "1h") + t.Cleanup(func() { + x.Config().Delete(ctx, config.KeyRefreshTokenRotationGracePeriod) + }) + + m := x.OAuth2Storage() + r := newDefaultRequest(t, uuid.Must(uuid.NewV4()).String()) + + refreshTokenSession, _ := createTokens(t, r) + require.NoError(t, m.RotateRefreshToken(ctx, r.GetID(), refreshTokenSession)) + + var expected sql.OAuth2RefreshTable + require.NoError(t, x.Persister().Connection(ctx).Where("signature=?", refreshTokenSession).First(&expected)) + assert.False(t, expected.FirstUsedAt.Time.IsZero()) + assert.True(t, expected.FirstUsedAt.Valid) + + // Refresh does not change the time + time.Sleep(time.Second * 2) + require.NoError(t, m.RotateRefreshToken(ctx, r.GetID(), refreshTokenSession)) + + var actual sql.OAuth2RefreshTable + require.NoError(t, x.Persister().Connection(ctx).Where("signature=?", refreshTokenSession).First(&actual)) + assert.Equal(t, expected.FirstUsedAt.Time, actual.FirstUsedAt.Time) + }) + + t.Run("refresh token revokes all access tokens from the request if the access token signature is not found", func(t *testing.T) { + x.Config().MustSet(ctx, config.KeyRefreshTokenRotationGracePeriod, "1s") + t.Cleanup(func() { + x.Config().Delete(ctx, config.KeyRefreshTokenRotationGracePeriod) + }) + + m := x.OAuth2Storage() + r := newDefaultRequest(t, uuid.Must(uuid.NewV4()).String()) + + refreshTokenSession := fmt.Sprintf("refresh_token_%s", uuid.Must(uuid.NewV4()).String()) + accessTokenSession1 := fmt.Sprintf("access_token_%s", uuid.Must(uuid.NewV4()).String()) + accessTokenSession2 := fmt.Sprintf("access_token_%s", uuid.Must(uuid.NewV4()).String()) + require.NoError(t, m.CreateAccessTokenSession(ctx, accessTokenSession1, r)) + require.NoError(t, m.CreateAccessTokenSession(ctx, accessTokenSession2, r)) + + require.NoError(t, m.CreateRefreshTokenSession(ctx, refreshTokenSession, "", r), + "precondition failed: could not create refresh token session") + + // ACT + require.NoError(t, m.RotateRefreshToken(ctx, r.GetID(), refreshTokenSession)) + require.NoError(t, m.RotateRefreshToken(ctx, r.GetID(), refreshTokenSession)) + require.NoError(t, m.RotateRefreshToken(ctx, r.GetID(), refreshTokenSession)) + + _, err := m.GetAccessTokenSession(ctx, accessTokenSession1, nil) + assert.ErrorIs(t, err, fosite.ErrNotFound) + + _, err = m.GetAccessTokenSession(ctx, accessTokenSession2, nil) + assert.ErrorIs(t, err, fosite.ErrNotFound) + + req, err := m.GetRefreshTokenSession(ctx, refreshTokenSession, nil) + assert.NoError(t, err) + assert.Equal(t, r.GetID(), req.GetID()) + + time.Sleep(time.Second * 2) + + req, err = m.GetRefreshTokenSession(ctx, refreshTokenSession, nil) + assert.Error(t, err) + }) + } +} + +func testHelperCreateGetDeletePKCERequestSession(x *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + m := x.OAuth2Storage() + + code := uuid.Must(uuid.NewV4()).String() + ctx := t.Context() + _, err := m.GetPKCERequestSession(ctx, code, oauth2.NewTestSession(t, "bar")) + assert.NotNil(t, err) + + err = m.CreatePKCERequestSession(ctx, code, newDefaultRequest(t, "blank")) + require.NoError(t, err) + + res, err := m.GetPKCERequestSession(ctx, code, oauth2.NewTestSession(t, "bar")) + require.NoError(t, err) + AssertObjectKeysEqual(t, newDefaultRequest(t, "blank"), res, "RequestedScope", "GrantedScope", "Form", "Session") + + err = m.DeletePKCERequestSession(ctx, code) + require.NoError(t, err) + + _, err = m.GetPKCERequestSession(ctx, code, oauth2.NewTestSession(t, "bar")) + assert.NotNil(t, err) + } +} + +func testHelperFlushTokens(x *driver.RegistrySQL, lifespan time.Duration) func(t *testing.T) { + m := x.OAuth2Storage() + ds := &oauth2.Session{} + + return func(t *testing.T) { + ctx := t.Context() + for _, r := range flushRequests { + mockRequestForeignKey(t, r.ID, x) + require.NoError(t, m.CreateAccessTokenSession(ctx, r.ID, r)) + _, err := m.GetAccessTokenSession(ctx, r.ID, ds) + require.NoError(t, err) + } + + require.NoError(t, m.FlushInactiveAccessTokens(ctx, time.Now().Add(-time.Hour*24), 100, 10)) + _, err := m.GetAccessTokenSession(ctx, "flush-1", ds) + require.NoError(t, err) + _, err = m.GetAccessTokenSession(ctx, "flush-2", ds) + require.NoError(t, err) + _, err = m.GetAccessTokenSession(ctx, "flush-3", ds) + require.NoError(t, err) + + require.NoError(t, m.FlushInactiveAccessTokens(ctx, time.Now().Add(-(lifespan+time.Hour/2)), 100, 10)) + _, err = m.GetAccessTokenSession(ctx, "flush-1", ds) + require.NoError(t, err) + _, err = m.GetAccessTokenSession(ctx, "flush-2", ds) + require.NoError(t, err) + _, err = m.GetAccessTokenSession(ctx, "flush-3", ds) + require.Error(t, err) + + require.NoError(t, m.FlushInactiveAccessTokens(ctx, time.Now(), 100, 10)) + _, err = m.GetAccessTokenSession(ctx, "flush-1", ds) + require.NoError(t, err) + _, err = m.GetAccessTokenSession(ctx, "flush-2", ds) + require.Error(t, err) + _, err = m.GetAccessTokenSession(ctx, "flush-3", ds) + require.Error(t, err) + require.NoError(t, m.DeleteAccessTokens(ctx, "foobar")) + } +} + +func testHelperFlushTokensWithLimitAndBatchSize(x *driver.RegistrySQL, limit int, batchSize int) func(t *testing.T) { + m := x.OAuth2Storage() + ds := &oauth2.Session{} + + return func(t *testing.T) { + ctx := t.Context() + var requests []*fosite.Request + + // create five expired requests + id := uuid.Must(uuid.NewV4()).String() + totalCount := 5 + for i := 0; i < totalCount; i++ { + r := createTestRequest(fmt.Sprintf("%s-%d", id, i+1)) + r.RequestedAt = time.Now().Add(-2 * time.Hour) + mockRequestForeignKey(t, r.ID, x) + require.NoError(t, m.CreateAccessTokenSession(ctx, r.ID, r)) + _, err := m.GetAccessTokenSession(ctx, r.ID, ds) + require.NoError(t, err) + requests = append(requests, r) + } + + require.NoError(t, m.FlushInactiveAccessTokens(ctx, time.Now(), limit, batchSize)) + var notFoundCount, foundCount int + for i := range requests { + if _, err := m.GetAccessTokenSession(ctx, requests[i].ID, ds); err == nil { + foundCount++ + } else { + require.ErrorIs(t, err, fosite.ErrNotFound) + notFoundCount++ + } + } + assert.Equal(t, limit, notFoundCount, "should have deleted %d tokens", limit) + assert.Equal(t, totalCount-limit, foundCount, "should have found %d tokens", totalCount-limit) + } +} + +func testFositeSqlStoreTransactionCommitAccessToken(m *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + { + doTestCommit(m, t, m.OAuth2Storage().CreateAccessTokenSession, m.OAuth2Storage().GetAccessTokenSession, m.OAuth2Storage().RevokeAccessToken) + doTestCommit(m, t, m.OAuth2Storage().CreateAccessTokenSession, m.OAuth2Storage().GetAccessTokenSession, m.OAuth2Storage().DeleteAccessTokenSession) + } + } +} + +func testFositeSqlStoreTransactionRollbackAccessToken(m *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + { + doTestRollback(m, t, m.OAuth2Storage().CreateAccessTokenSession, m.OAuth2Storage().GetAccessTokenSession, m.OAuth2Storage().RevokeAccessToken) + doTestRollback(m, t, m.OAuth2Storage().CreateAccessTokenSession, m.OAuth2Storage().GetAccessTokenSession, m.OAuth2Storage().DeleteAccessTokenSession) + } + } +} + +func testFositeSqlStoreTransactionCommitRefreshToken(m *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + doTestCommitRefresh(m, t, m.OAuth2Storage().CreateRefreshTokenSession, m.OAuth2Storage().GetRefreshTokenSession, m.OAuth2Storage().RevokeRefreshToken) + doTestCommitRefresh(m, t, m.OAuth2Storage().CreateRefreshTokenSession, m.OAuth2Storage().GetRefreshTokenSession, m.OAuth2Storage().DeleteRefreshTokenSession) + } +} + +func testFositeSqlStoreTransactionRollbackRefreshToken(m *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + doTestRollbackRefresh(m, t, m.OAuth2Storage().CreateRefreshTokenSession, m.OAuth2Storage().GetRefreshTokenSession, m.OAuth2Storage().RevokeRefreshToken) + doTestRollbackRefresh(m, t, m.OAuth2Storage().CreateRefreshTokenSession, m.OAuth2Storage().GetRefreshTokenSession, m.OAuth2Storage().DeleteRefreshTokenSession) + } +} + +func testFositeSqlStoreTransactionCommitAuthorizeCode(m *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + doTestCommit(m, t, m.OAuth2Storage().CreateAuthorizeCodeSession, m.OAuth2Storage().GetAuthorizeCodeSession, m.OAuth2Storage().InvalidateAuthorizeCodeSession) + } +} + +func testFositeSqlStoreTransactionRollbackAuthorizeCode(m *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + doTestRollback(m, t, m.OAuth2Storage().CreateAuthorizeCodeSession, m.OAuth2Storage().GetAuthorizeCodeSession, m.OAuth2Storage().InvalidateAuthorizeCodeSession) + } +} + +func testFositeSqlStoreTransactionCommitPKCERequest(m *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + doTestCommit(m, t, m.OAuth2Storage().CreatePKCERequestSession, m.OAuth2Storage().GetPKCERequestSession, m.OAuth2Storage().DeletePKCERequestSession) + } +} + +func testFositeSqlStoreTransactionRollbackPKCERequest(m *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + doTestRollback(m, t, m.OAuth2Storage().CreatePKCERequestSession, m.OAuth2Storage().GetPKCERequestSession, m.OAuth2Storage().DeletePKCERequestSession) + } +} + +// OpenIdConnect tests can't use the helper functions, due to the signature of GetOpenIdConnectSession being +// different from the other getter methods +func testFositeSqlStoreTransactionCommitOpenIdConnectSession(m *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + txnStore, ok := m.OAuth2Storage().(fosite.Transactional) + require.True(t, ok) + ctx := t.Context() + ctx, err := txnStore.BeginTX(ctx) + require.NoError(t, err) + signature := uuid.Must(uuid.NewV4()).String() + testRequest := createTestRequest(signature) + err = m.OAuth2Storage().CreateOpenIDConnectSession(ctx, signature, testRequest) + require.NoError(t, err) + err = txnStore.Commit(ctx) + require.NoError(t, err) + + // Require a new context, since the old one contains the transaction. + res, err := m.OAuth2Storage().GetOpenIDConnectSession(context.Background(), signature, testRequest) + // session should have been created successfully because Commit did not return an error + require.NoError(t, err) + assertx.EqualAsJSONExcept(t, newDefaultRequest(t, "blank"), res, defaultIgnoreKeys) + + // test delete within a transaction + ctx, err = txnStore.BeginTX(context.Background()) + require.NoError(t, err) + err = m.OAuth2Storage().DeleteOpenIDConnectSession(ctx, signature) + require.NoError(t, err) + err = txnStore.Commit(ctx) + require.NoError(t, err) + + // Require a new context, since the old one contains the transaction. + _, err = m.OAuth2Storage().GetOpenIDConnectSession(context.Background(), signature, testRequest) + // Since commit worked for delete, we should get an error here. + require.Error(t, err) + } +} + +func testFositeSqlStoreTransactionRollbackOpenIdConnectSession(m *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + txnStore, ok := m.OAuth2Storage().(fosite.Transactional) + require.True(t, ok) + ctx := t.Context() + ctx, err := txnStore.BeginTX(ctx) + require.NoError(t, err) + + signature := uuid.Must(uuid.NewV4()).String() + testRequest := createTestRequest(signature) + err = m.OAuth2Storage().CreateOpenIDConnectSession(ctx, signature, testRequest) + require.NoError(t, err) + err = txnStore.Rollback(ctx) + require.NoError(t, err) + + // Require a new context, since the old one contains the transaction. + ctx = context.Background() + _, err = m.OAuth2Storage().GetOpenIDConnectSession(ctx, signature, testRequest) + // Since we rolled back above, the session should not exist and getting it should result in an error + require.Error(t, err) + + // create a new session, delete it, then rollback the delete. We should be able to then get it. + signature2 := uuid.Must(uuid.NewV4()).String() + testRequest2 := createTestRequest(signature2) + err = m.OAuth2Storage().CreateOpenIDConnectSession(ctx, signature2, testRequest2) + require.NoError(t, err) + _, err = m.OAuth2Storage().GetOpenIDConnectSession(ctx, signature2, testRequest2) + require.NoError(t, err) + + ctx, err = txnStore.BeginTX(context.Background()) + require.NoError(t, err) + err = m.OAuth2Storage().DeleteOpenIDConnectSession(ctx, signature2) + require.NoError(t, err) + err = txnStore.Rollback(ctx) + + require.NoError(t, err) + _, err = m.OAuth2Storage().GetOpenIDConnectSession(context.Background(), signature2, testRequest2) + require.NoError(t, err) + } +} + +func testFositeStoreSetClientAssertionJWT(m *driver.RegistrySQL) func(*testing.T) { + return func(t *testing.T) { + t.Run("case=basic setting works", func(t *testing.T) { + store, ok := m.OAuth2Storage().(oauth2.AssertionJWTReader) + require.True(t, ok) + jti := oauth2.NewBlacklistedJTI(uuid.Must(uuid.NewV4()).String(), time.Now().Add(time.Minute)) + + require.NoError(t, store.SetClientAssertionJWT(context.Background(), jti.JTI, jti.Expiry)) + + cmp, err := store.GetClientAssertionJWT(context.Background(), jti.JTI) + require.NotEqual(t, cmp.NID, uuid.Nil) + cmp.NID = uuid.Nil + require.NoError(t, err) + assert.Equal(t, jti, cmp) + }) + + t.Run("case=errors when the JTI is blacklisted", func(t *testing.T) { + store, ok := m.OAuth2Storage().(oauth2.AssertionJWTReader) + require.True(t, ok) + jti := oauth2.NewBlacklistedJTI(uuid.Must(uuid.NewV4()).String(), time.Now().Add(time.Minute)) + require.NoError(t, store.SetClientAssertionJWTRaw(context.Background(), jti)) + + assert.ErrorIs(t, store.SetClientAssertionJWT(context.Background(), jti.JTI, jti.Expiry), fosite.ErrJTIKnown) + }) + + t.Run("case=deletes expired JTIs", func(t *testing.T) { + store, ok := m.OAuth2Storage().(oauth2.AssertionJWTReader) + require.True(t, ok) + expiredJTI := oauth2.NewBlacklistedJTI(uuid.Must(uuid.NewV4()).String(), time.Now().Add(-time.Minute)) + require.NoError(t, store.SetClientAssertionJWTRaw(context.Background(), expiredJTI)) + newJTI := oauth2.NewBlacklistedJTI(uuid.Must(uuid.NewV4()).String(), time.Now().Add(time.Minute)) + + require.NoError(t, store.SetClientAssertionJWT(context.Background(), newJTI.JTI, newJTI.Expiry)) + + _, err := store.GetClientAssertionJWT(context.Background(), expiredJTI.JTI) + assert.True(t, errors.Is(err, sqlcon.ErrNoRows)) + cmp, err := store.GetClientAssertionJWT(context.Background(), newJTI.JTI) + require.NoError(t, err) + require.NotEqual(t, cmp.NID, uuid.Nil) + cmp.NID = uuid.Nil + assert.Equal(t, newJTI, cmp) + }) + + t.Run("case=inserts same JTI if expired", func(t *testing.T) { + store, ok := m.OAuth2Storage().(oauth2.AssertionJWTReader) + require.True(t, ok) + jti := oauth2.NewBlacklistedJTI(uuid.Must(uuid.NewV4()).String(), time.Now().Add(-time.Minute)) + require.NoError(t, store.SetClientAssertionJWTRaw(context.Background(), jti)) + + jti.Expiry = jti.Expiry.Add(2 * time.Minute) + assert.NoError(t, store.SetClientAssertionJWT(context.Background(), jti.JTI, jti.Expiry)) + cmp, err := store.GetClientAssertionJWT(context.Background(), jti.JTI) + assert.NoError(t, err) + assert.Equal(t, jti, cmp) + }) + } +} + +func testFositeStoreClientAssertionJWTValid(m *driver.RegistrySQL) func(*testing.T) { + return func(t *testing.T) { + t.Run("case=returns valid on unknown JTI", func(t *testing.T) { + store, ok := m.OAuth2Storage().(oauth2.AssertionJWTReader) + require.True(t, ok) + + assert.NoError(t, store.ClientAssertionJWTValid(context.Background(), uuid.Must(uuid.NewV4()).String())) + }) + + t.Run("case=returns invalid on known JTI", func(t *testing.T) { + store, ok := m.OAuth2Storage().(oauth2.AssertionJWTReader) + require.True(t, ok) + jti := oauth2.NewBlacklistedJTI(uuid.Must(uuid.NewV4()).String(), time.Now().Add(time.Minute)) + + require.NoError(t, store.SetClientAssertionJWTRaw(context.Background(), jti)) + + assert.True(t, errors.Is(store.ClientAssertionJWTValid(context.Background(), jti.JTI), fosite.ErrJTIKnown)) + }) + + t.Run("case=returns valid on expired JTI", func(t *testing.T) { + store, ok := m.OAuth2Storage().(oauth2.AssertionJWTReader) + require.True(t, ok) + jti := oauth2.NewBlacklistedJTI(uuid.Must(uuid.NewV4()).String(), time.Now().Add(-time.Minute)) + + require.NoError(t, store.SetClientAssertionJWTRaw(context.Background(), jti)) + + assert.NoError(t, store.ClientAssertionJWTValid(context.Background(), jti.JTI)) + }) + } +} + +func testFositeJWTBearerGrantStorage(x *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + ctx := t.Context() + grantManager := x.GrantManager() + keyManager := x.KeyManager() + grantStorage := x.OAuth2Storage().(rfc7523.RFC7523KeyStorage) + + t.Run("case=associated key added with grant", func(t *testing.T) { + keySet, err := jwk.GenerateJWK(jose.RS256, uuid.Must(uuid.NewV4()).String(), "sig") + require.NoError(t, err) + + publicKey := keySet.Keys[0].Public() + issuer := uuid.Must(uuid.NewV4()).String() + subject := "bob+" + uuid.Must(uuid.NewV4()).String() + "@example.com" + grant := trust.Grant{ + ID: uuid.Must(uuid.NewV4()), + Issuer: issuer, + Subject: subject, + AllowAnySubject: false, + Scope: []string{"openid", "offline"}, + PublicKey: trust.PublicKey{Set: issuer, KeyID: publicKey.KeyID}, + CreatedAt: time.Now().UTC().Round(time.Second), + ExpiresAt: time.Now().UTC().Round(time.Second).AddDate(1, 0, 0), + } + + storedKeySet, err := grantStorage.GetPublicKeys(ctx, issuer, subject) + require.NoError(t, err) + require.Len(t, storedKeySet.Keys, 0) + + require.NoError(t, grantManager.CreateGrant(ctx, grant, publicKey)) + + storedKeySet, err = grantStorage.GetPublicKeys(ctx, issuer, subject) + require.NoError(t, err) + assert.Len(t, storedKeySet.Keys, 1) + + storedKey, err := grantStorage.GetPublicKey(ctx, issuer, subject, publicKey.KeyID) + require.NoError(t, err) + assert.Equal(t, publicKey.KeyID, storedKey.KeyID) + assert.Equal(t, publicKey.Use, storedKey.Use) + assert.Equal(t, publicKey.Key, storedKey.Key) + + storedScopes, err := grantStorage.GetPublicKeyScopes(ctx, issuer, subject, publicKey.KeyID) + require.NoError(t, err) + assert.Equal(t, grant.Scope, storedScopes) + + storedKeySet, err = keyManager.GetKey(ctx, issuer, publicKey.KeyID) + require.NoError(t, err) + assert.Equal(t, publicKey.KeyID, storedKeySet.Keys[0].KeyID) + assert.Equal(t, publicKey.Use, storedKeySet.Keys[0].Use) + assert.Equal(t, publicKey.Key, storedKeySet.Keys[0].Key) + }) + + t.Run("case=only associated key returns", func(t *testing.T) { + keySetToNotReturn, err := jwk.GenerateJWK(jose.ES256, uuid.Must(uuid.NewV4()).String(), "sig") + require.NoError(t, err) + require.NoError(t, keyManager.AddKeySet(context.Background(), uuid.Must(uuid.NewV4()).String(), keySetToNotReturn), "adding a random key should not fail") + + issuer := uuid.Must(uuid.NewV4()).String() + subject := "maria+" + uuid.Must(uuid.NewV4()).String() + "@example.com" + + keySet1ToReturn, err := jwk.GenerateJWK(jose.ES256, uuid.Must(uuid.NewV4()).String(), "sig") + require.NoError(t, err) + require.NoError(t, grantManager.CreateGrant(t.Context(), trust.Grant{ + ID: uuid.Must(uuid.NewV4()), + Issuer: issuer, + Subject: subject, + AllowAnySubject: false, + Scope: []string{"openid"}, + PublicKey: trust.PublicKey{Set: issuer, KeyID: keySet1ToReturn.Keys[0].Public().KeyID}, + CreatedAt: time.Now().UTC().Round(time.Second), + ExpiresAt: time.Now().UTC().Round(time.Second).AddDate(1, 0, 0), + }, keySet1ToReturn.Keys[0].Public())) + + keySet2ToReturn, err := jwk.GenerateJWK(jose.ES256, uuid.Must(uuid.NewV4()).String(), "sig") + require.NoError(t, err) + require.NoError(t, grantManager.CreateGrant(ctx, trust.Grant{ + ID: uuid.Must(uuid.NewV4()), + Issuer: issuer, + Subject: subject, + AllowAnySubject: false, + Scope: []string{"openid"}, + PublicKey: trust.PublicKey{Set: issuer, KeyID: keySet2ToReturn.Keys[0].Public().KeyID}, + CreatedAt: time.Now().UTC().Round(time.Second), + ExpiresAt: time.Now().UTC().Round(time.Second).AddDate(1, 0, 0), + }, keySet2ToReturn.Keys[0].Public())) + + storedKeySet, err := grantStorage.GetPublicKeys(context.Background(), issuer, subject) + require.NoError(t, err) + require.Len(t, storedKeySet.Keys, 2) + + // Cannot rely on sort order because the created_at timestamps may alias. + idx1 := slices.IndexFunc(storedKeySet.Keys, func(k jose.JSONWebKey) bool { + return k.KeyID == keySet1ToReturn.Keys[0].Public().KeyID + }) + require.GreaterOrEqual(t, idx1, 0) + idx2 := slices.IndexFunc(storedKeySet.Keys, func(k jose.JSONWebKey) bool { + return k.KeyID == keySet2ToReturn.Keys[0].Public().KeyID + }) + require.GreaterOrEqual(t, idx2, 0) + + assert.Equal(t, keySet1ToReturn.Keys[0].Public().KeyID, storedKeySet.Keys[idx1].KeyID) + assert.Equal(t, keySet1ToReturn.Keys[0].Public().Use, storedKeySet.Keys[idx1].Use) + assert.Equal(t, keySet1ToReturn.Keys[0].Public().Key, storedKeySet.Keys[idx1].Key) + assert.Equal(t, keySet2ToReturn.Keys[0].Public().KeyID, storedKeySet.Keys[idx2].KeyID) + assert.Equal(t, keySet2ToReturn.Keys[0].Public().Use, storedKeySet.Keys[idx2].Use) + assert.Equal(t, keySet2ToReturn.Keys[0].Public().Key, storedKeySet.Keys[idx2].Key) + + storedKeySet, err = grantStorage.GetPublicKeys(context.Background(), issuer, "non-existing-subject") + require.NoError(t, err) + assert.Len(t, storedKeySet.Keys, 0) + + _, err = grantStorage.GetPublicKeyScopes(context.Background(), issuer, "non-existing-subject", keySet2ToReturn.Keys[0].Public().KeyID) + require.Error(t, err) + }) + + t.Run("case=associated key is deleted, when granted is deleted", func(t *testing.T) { + keySet, err := jwk.GenerateJWK(jose.RS256, uuid.Must(uuid.NewV4()).String(), "sig") + require.NoError(t, err) + + publicKey := keySet.Keys[0].Public() + issuer := uuid.Must(uuid.NewV4()).String() + subject := "aeneas+" + uuid.Must(uuid.NewV4()).String() + "@example.com" + grant := trust.Grant{ + ID: uuid.Must(uuid.NewV4()), + Issuer: issuer, + Subject: subject, + AllowAnySubject: false, + Scope: []string{"openid", "offline"}, + PublicKey: trust.PublicKey{Set: issuer, KeyID: publicKey.KeyID}, + CreatedAt: time.Now().UTC().Round(time.Second), + ExpiresAt: time.Now().UTC().Round(time.Second).AddDate(1, 0, 0), + } + + require.NoError(t, grantManager.CreateGrant(ctx, grant, publicKey)) + + _, err = grantStorage.GetPublicKey(ctx, issuer, subject, grant.PublicKey.KeyID) + require.NoError(t, err) + + _, err = keyManager.GetKey(ctx, issuer, publicKey.KeyID) + require.NoError(t, err) + + err = grantManager.DeleteGrant(ctx, grant.ID) + require.NoError(t, err) + + _, err = grantStorage.GetPublicKey(ctx, issuer, subject, publicKey.KeyID) + assert.Error(t, err) + + _, err = keyManager.GetKey(ctx, issuer, publicKey.KeyID) + assert.Error(t, err) + }) + + t.Run("case=associated grant is deleted, when key is deleted", func(t *testing.T) { + keySet, err := jwk.GenerateJWK(jose.RS256, uuid.Must(uuid.NewV4()).String(), "sig") + require.NoError(t, err) + + publicKey := keySet.Keys[0].Public() + issuer := uuid.Must(uuid.NewV4()).String() + subject := "vladimir+" + uuid.Must(uuid.NewV4()).String() + "@example.com" + grant := trust.Grant{ + ID: uuid.Must(uuid.NewV4()), + Issuer: issuer, + Subject: subject, + AllowAnySubject: false, + Scope: []string{"openid", "offline"}, + PublicKey: trust.PublicKey{Set: issuer, KeyID: publicKey.KeyID}, + CreatedAt: time.Now().UTC().Round(time.Second), + ExpiresAt: time.Now().UTC().Round(time.Second).AddDate(1, 0, 0), + } + + require.NoError(t, grantManager.CreateGrant(ctx, grant, publicKey)) + + _, err = grantStorage.GetPublicKey(ctx, issuer, subject, publicKey.KeyID) + require.NoError(t, err) + + _, err = keyManager.GetKey(ctx, issuer, publicKey.KeyID) + require.NoError(t, err) + + err = keyManager.DeleteKey(ctx, issuer, publicKey.KeyID) + require.NoError(t, err) + + _, err = keyManager.GetKey(ctx, issuer, publicKey.KeyID) + assert.Error(t, err) + + _, err = grantManager.GetConcreteGrant(ctx, grant.ID) + assert.Error(t, err) + }) + + t.Run("case=only returns the key when subject matches", func(t *testing.T) { + keySet, err := jwk.GenerateJWK(jose.RS256, uuid.Must(uuid.NewV4()).String(), "sig") + require.NoError(t, err) + + publicKey := keySet.Keys[0].Public() + issuer := uuid.Must(uuid.NewV4()).String() + subject := "jagoba+" + uuid.Must(uuid.NewV4()).String() + "@example.com" + grant := trust.Grant{ + ID: uuid.Must(uuid.NewV4()), + Issuer: issuer, + Subject: subject, + AllowAnySubject: false, + Scope: []string{"openid", "offline"}, + PublicKey: trust.PublicKey{Set: issuer, KeyID: publicKey.KeyID}, + CreatedAt: time.Now().UTC().Round(time.Second), + ExpiresAt: time.Now().UTC().Round(time.Second).AddDate(1, 0, 0), + } + + require.NoError(t, grantManager.CreateGrant(ctx, grant, publicKey)) + + // All three get methods should only return the public key when using the valid subject + _, err = grantStorage.GetPublicKey(ctx, issuer, "any-subject-1", publicKey.KeyID) + require.Error(t, err) + _, err = grantStorage.GetPublicKey(ctx, issuer, subject, publicKey.KeyID) + require.NoError(t, err) + + _, err = grantStorage.GetPublicKeyScopes(ctx, issuer, "any-subject-2", publicKey.KeyID) + require.Error(t, err) + _, err = grantStorage.GetPublicKeyScopes(ctx, issuer, subject, publicKey.KeyID) + require.NoError(t, err) + + jwks, err := grantStorage.GetPublicKeys(ctx, issuer, "any-subject-3") + require.NoError(t, err) + require.NotNil(t, jwks) + require.Empty(t, jwks.Keys) + jwks, err = grantStorage.GetPublicKeys(ctx, issuer, subject) + require.NoError(t, err) + require.NotNil(t, jwks) + require.NotEmpty(t, jwks.Keys) + }) + + t.Run("case=returns the key when any subject is allowed", func(t *testing.T) { + keySet, err := jwk.GenerateJWK(jose.RS256, uuid.Must(uuid.NewV4()).String(), "sig") + require.NoError(t, err) + + publicKey := keySet.Keys[0].Public() + issuer := uuid.Must(uuid.NewV4()).String() + grant := trust.Grant{ + ID: uuid.Must(uuid.NewV4()), + Issuer: issuer, + Subject: "", + AllowAnySubject: true, + Scope: []string{"openid", "offline"}, + PublicKey: trust.PublicKey{Set: issuer, KeyID: publicKey.KeyID}, + CreatedAt: time.Now().UTC().Round(time.Second), + ExpiresAt: time.Now().UTC().Round(time.Second).AddDate(1, 0, 0), + } + + require.NoError(t, grantManager.CreateGrant(ctx, grant, publicKey)) + + // All three get methods should always return the public key + _, err = grantStorage.GetPublicKey(ctx, issuer, "any-subject-1", publicKey.KeyID) + require.NoError(t, err) + + _, err = grantStorage.GetPublicKeyScopes(ctx, issuer, "any-subject-2", publicKey.KeyID) + require.NoError(t, err) + + jwks, err := grantStorage.GetPublicKeys(ctx, issuer, "any-subject-3") + require.NoError(t, err) + require.NotNil(t, jwks) + require.NotEmpty(t, jwks.Keys) + }) + + t.Run("case=does not return expired values", func(t *testing.T) { + keySet, err := jwk.GenerateJWK(jose.RS256, uuid.Must(uuid.NewV4()).String(), "sig") + require.NoError(t, err) + + publicKey := keySet.Keys[0].Public() + issuer := uuid.Must(uuid.NewV4()).String() + grant := trust.Grant{ + ID: uuid.Must(uuid.NewV4()), + Issuer: issuer, + Subject: "", + AllowAnySubject: true, + Scope: []string{"openid", "offline"}, + PublicKey: trust.PublicKey{Set: issuer, KeyID: publicKey.KeyID}, + CreatedAt: time.Now().UTC().Round(time.Second), + ExpiresAt: time.Now().UTC().Round(time.Second).AddDate(-1, 0, 0), + } + + require.NoError(t, grantManager.CreateGrant(ctx, grant, publicKey)) + + keys, err := grantStorage.GetPublicKeys(ctx, issuer, "any-subject-3") + require.NoError(t, err) + assert.Len(t, keys.Keys, 0) + }) + } +} + +func doTestCommit(m *driver.RegistrySQL, t *testing.T, + createFn func(context.Context, string, fosite.Requester) error, + getFn func(context.Context, string, fosite.Session) (fosite.Requester, error), + revokeFn func(context.Context, string) error, +) { + txnStore, ok := m.OAuth2Storage().(fosite.Transactional) + require.True(t, ok) + ctx := t.Context() + ctx, err := txnStore.BeginTX(ctx) + require.NoError(t, err) + signature := uuid.Must(uuid.NewV4()).String() + err = createFn(ctx, signature, createTestRequest(signature)) + require.NoError(t, err) + err = txnStore.Commit(ctx) + require.NoError(t, err) + + // Require a new context, since the old one contains the transaction. + res, err := getFn(context.Background(), signature, oauth2.NewTestSession(t, "bar")) + // token should have been created successfully because Commit did not return an error + require.NoError(t, err) + assertx.EqualAsJSONExcept(t, newDefaultRequest(t, "blank"), res, defaultIgnoreKeys) + // AssertObjectKeysEqual(t, &defaultRequest, res, "RequestedScope", "GrantedScope", "Form", "Session") + + // testrevoke within a transaction + ctx, err = txnStore.BeginTX(context.Background()) + require.NoError(t, err) + err = revokeFn(ctx, signature) + require.NoError(t, err) + err = txnStore.Commit(ctx) + require.NoError(t, err) + + // Require a new context, since the old one contains the transaction. + _, err = getFn(context.Background(), signature, oauth2.NewTestSession(t, "bar")) + // Since commit worked for revoke, we should get an error here. + require.Error(t, err) +} + +func doTestCommitRefresh(m *driver.RegistrySQL, t *testing.T, + createFn func(context.Context, string, string, fosite.Requester) error, + getFn func(context.Context, string, fosite.Session) (fosite.Requester, error), + revokeFn func(context.Context, string) error, +) { + txnStore, ok := m.OAuth2Storage().(fosite.Transactional) + require.True(t, ok) + ctx := t.Context() + ctx, err := txnStore.BeginTX(ctx) + require.NoError(t, err) + signature := uuid.Must(uuid.NewV4()).String() + err = createFn(ctx, signature, "", createTestRequest(signature)) + require.NoError(t, err) + err = txnStore.Commit(ctx) + require.NoError(t, err) + + // Require a new context, since the old one contains the transaction. + res, err := getFn(context.Background(), signature, oauth2.NewTestSession(t, "bar")) + // token should have been created successfully because Commit did not return an error + require.NoError(t, err) + assertx.EqualAsJSONExcept(t, newDefaultRequest(t, "blank"), res, defaultIgnoreKeys) + // AssertObjectKeysEqual(t, &defaultRequest, res, "RequestedScope", "GrantedScope", "Form", "Session") + + // testrevoke within a transaction + ctx, err = txnStore.BeginTX(context.Background()) + require.NoError(t, err) + err = revokeFn(ctx, signature) + require.NoError(t, err) + err = txnStore.Commit(ctx) + require.NoError(t, err) + + // Require a new context, since the old one contains the transaction. + _, err = getFn(context.Background(), signature, oauth2.NewTestSession(t, "bar")) + // Since commit worked for revoke, we should get an error here. + require.Error(t, err) +} + +func doTestRollback(m *driver.RegistrySQL, t *testing.T, + createFn func(context.Context, string, fosite.Requester) error, + getFn func(context.Context, string, fosite.Session) (fosite.Requester, error), + revokeFn func(context.Context, string) error, +) { + txnStore, ok := m.OAuth2Storage().(fosite.Transactional) + require.True(t, ok) + + ctx := t.Context() + ctx, err := txnStore.BeginTX(ctx) + require.NoError(t, err) + signature := uuid.Must(uuid.NewV4()).String() + err = createFn(ctx, signature, createTestRequest(signature)) + require.NoError(t, err) + err = txnStore.Rollback(ctx) + require.NoError(t, err) + + // Require a new context, since the old one contains the transaction. + ctx = context.Background() + _, err = getFn(ctx, signature, oauth2.NewTestSession(t, "bar")) + // Since we rolled back above, the token should not exist and getting it should result in an error + require.Error(t, err) + + // create a new token, revoke it, then rollback the revoke. We should be able to then get it successfully. + signature2 := uuid.Must(uuid.NewV4()).String() + err = createFn(ctx, signature2, createTestRequest(signature2)) + require.NoError(t, err) + _, err = getFn(ctx, signature2, oauth2.NewTestSession(t, "bar")) + require.NoError(t, err) + + ctx, err = txnStore.BeginTX(context.Background()) + require.NoError(t, err) + err = revokeFn(ctx, signature2) + require.NoError(t, err) + err = txnStore.Rollback(ctx) + require.NoError(t, err) + + _, err = getFn(context.Background(), signature2, oauth2.NewTestSession(t, "bar")) + require.NoError(t, err) +} + +func doTestRollbackRefresh(m *driver.RegistrySQL, t *testing.T, + createFn func(context.Context, string, string, fosite.Requester) error, + getFn func(context.Context, string, fosite.Session) (fosite.Requester, error), + revokeFn func(context.Context, string) error, +) { + txnStore, ok := m.OAuth2Storage().(fosite.Transactional) + require.True(t, ok) + + ctx := t.Context() + ctx, err := txnStore.BeginTX(ctx) + require.NoError(t, err) + signature := uuid.Must(uuid.NewV4()).String() + err = createFn(ctx, signature, "", createTestRequest(signature)) + require.NoError(t, err) + err = txnStore.Rollback(ctx) + require.NoError(t, err) + + // Require a new context, since the old one contains the transaction. + ctx = context.Background() + _, err = getFn(ctx, signature, oauth2.NewTestSession(t, "bar")) + // Since we rolled back above, the token should not exist and getting it should result in an error + require.Error(t, err) + + // create a new token, revoke it, then rollback the revoke. We should be able to then get it successfully. + signature2 := uuid.Must(uuid.NewV4()).String() + err = createFn(ctx, signature2, "", createTestRequest(signature2)) + require.NoError(t, err) + _, err = getFn(ctx, signature2, oauth2.NewTestSession(t, "bar")) + require.NoError(t, err) + + ctx, err = txnStore.BeginTX(context.Background()) + require.NoError(t, err) + err = revokeFn(ctx, signature2) + require.NoError(t, err) + err = txnStore.Rollback(ctx) + require.NoError(t, err) + + _, err = getFn(context.Background(), signature2, oauth2.NewTestSession(t, "bar")) + require.NoError(t, err) +} + +func createTestRequest(id string) *fosite.Request { + return &fosite.Request{ + ID: id, + RequestedAt: time.Now().UTC().Round(time.Second), + Client: &client.Client{ID: "foobar"}, + RequestedScope: fosite.Arguments{"fa", "ba"}, + GrantedScope: fosite.Arguments{"fa", "ba"}, + RequestedAudience: fosite.Arguments{"ad1", "ad2"}, + GrantedAudience: fosite.Arguments{"ad1", "ad2"}, + Form: url.Values{"foo": []string{"bar", "baz"}}, + Session: &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "bar"}}, + } +} + +func testHelperRefreshTokenExpiryUpdate(x *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + ctx := t.Context() + + // Create client + cl := &client.Client{ID: "refresh-expiry-client"} + require.NoError(t, x.ClientManager().CreateClient(ctx, cl)) + + // Create a request with a long expiry + initialRequest := fosite.Request{ + ID: uuid.Must(uuid.NewV4()).String(), + RequestedAt: time.Now().UTC().Round(time.Second), + Client: cl, + Session: oauth2.NewTestSession(t, "sub"), + } + + // Set a long expiry time (24 hours) + initialExpiry := time.Now().Add(24 * time.Hour) + initialRequest.Session.SetExpiresAt(fosite.RefreshToken, initialExpiry) + + t.Run("regular rotation", func(t *testing.T) { + // Create original refresh token + regularSignature := uuid.Must(uuid.NewV4()).String() + require.NoError(t, x.OAuth2Storage().CreateRefreshTokenSession(ctx, regularSignature, "", &initialRequest)) + + // Verify initial expiry is set correctly + originalToken, err := x.OAuth2Storage().GetRefreshTokenSession(ctx, regularSignature, oauth2.NewTestSession(t, "sub")) + require.NoError(t, err) + require.Equal(t, initialExpiry.Unix(), originalToken.GetSession().GetExpiresAt(fosite.RefreshToken).Unix()) + + // Set up a connection to directly query the database + var actualExpiresAt time.Time + require.NoError(t, x.Persister().Connection(ctx).RawQuery("SELECT expires_at FROM hydra_oauth2_refresh WHERE signature=?", regularSignature).First(&actualExpiresAt)) + require.Equal(t, initialExpiry.UTC().Round(time.Second), actualExpiresAt.UTC().Round(time.Second)) + + // Rotate the token + err = x.OAuth2Storage().RotateRefreshToken(ctx, initialRequest.ID, regularSignature) + require.NoError(t, err) + + // Check that the original token's expiry was updated to be closer to now + var revokedData struct { + ExpiresAt time.Time `db:"expires_at"` + Active bool `db:"active"` + } + require.NoError(t, x.Persister().Connection(ctx).RawQuery("SELECT expires_at, active FROM hydra_oauth2_refresh WHERE signature=?", regularSignature).First(&revokedData)) + + // Verify the token is now inactive + require.False(t, revokedData.Active) + + // Verify the expiry is updated to be closer to now than the original expiry + require.True(t, revokedData.ExpiresAt.Before(initialExpiry), "Expiry should be updated to be sooner than original") + require.True(t, revokedData.ExpiresAt.After(time.Now()), "Expiry should still be in the future") + require.True(t, time.Until(revokedData.ExpiresAt) < time.Until(initialExpiry), "New expiry should be closer to now than original expiry") + + t.Logf("Original expiry: %v, Updated expiry: %v, Now: %v", initialExpiry, revokedData.ExpiresAt, time.Now()) + }) + + t.Run("graceful rotation", func(t *testing.T) { + // Create refresh token for graceful rotation + gracefulSignature := uuid.Must(uuid.NewV4()).String() + require.NoError(t, x.OAuth2Storage().CreateRefreshTokenSession(ctx, gracefulSignature, "", &initialRequest)) + + // Set config to graceful rotation + oldPeriod := x.Config().GracefulRefreshTokenRotation(ctx).Period + t.Cleanup(func() { + x.Config().MustSet(ctx, config.KeyRefreshTokenRotationGracePeriod, oldPeriod) + x.Config().MustSet(ctx, config.KeyRefreshTokenRotationGraceReuseCount, 0) + }) + x.Config().MustSet(ctx, config.KeyRefreshTokenRotationGracePeriod, time.Minute*30) + x.Config().MustSet(ctx, config.KeyRefreshTokenRotationGraceReuseCount, 3) + + // Record time before rotation + beforeRotation := time.Now().UTC().Add(-time.Second) // Ensure we have a different timestamp for first_used_at + + // Rotate the token + err := x.OAuth2Storage().RotateRefreshToken(ctx, initialRequest.ID, gracefulSignature) + require.NoError(t, err) + + // Check the token's expiry and status + var rotatedData struct { + ExpiresAt time.Time `db:"expires_at"` + Active bool `db:"active"` + FirstUsedAt sqlxx.NullTime `db:"first_used_at"` + UsedTimes sqlxx.NullInt64 `db:"used_times"` + } + require.NoError(t, x.Persister().Connection(ctx).RawQuery("SELECT expires_at, active, first_used_at, used_times FROM hydra_oauth2_refresh WHERE signature=?", gracefulSignature).First(&rotatedData)) + + // Token is used + require.False(t, rotatedData.Active) + + // Verify first_used_at is set and reasonable + assert.True(t, time.Time(rotatedData.FirstUsedAt).After(beforeRotation) || time.Time(rotatedData.FirstUsedAt).Equal(beforeRotation), "%s should be after or equal to %s", time.Time(rotatedData.FirstUsedAt), beforeRotation) + + now := time.Now().UTC().Add(time.Second) + assert.True(t, time.Time(rotatedData.FirstUsedAt).Before(now) || time.Time(rotatedData.FirstUsedAt).Equal(now), "%s should be before or equal to %s", time.Time(rotatedData.FirstUsedAt), now) + + // Verify used_times was incremented + assert.True(t, rotatedData.UsedTimes.Valid) + assert.Equal(t, int64(1), rotatedData.UsedTimes.Int) + + // Verify the expiry is updated and is in the future + assert.True(t, rotatedData.ExpiresAt.Before(initialExpiry), "Expiry should be updated to be sooner than original") + assert.True(t, rotatedData.ExpiresAt.After(time.Now().UTC()), "Expiry should still be in the future") + assert.True(t, time.Until(rotatedData.ExpiresAt) < time.Until(initialExpiry), "New expiry should be closer to now than original expiry") + + t.Logf("Original expiry: %v, Updated expiry: %v, Now: %v", initialExpiry, rotatedData.ExpiresAt, time.Now()) + }) + } +} + +func testHelperAuthorizeCodeInvalidation(x *driver.RegistrySQL) func(t *testing.T) { + return func(t *testing.T) { + ctx := t.Context() + + // Create client + cl := &client.Client{ID: "auth-code-client"} + require.NoError(t, x.ClientManager().CreateClient(ctx, cl)) + + // Create a request with a long expiry + initialRequest := fosite.Request{ + ID: uuid.Must(uuid.NewV4()).String(), + RequestedAt: time.Now().UTC().Round(time.Second), + Client: cl, + Session: oauth2.NewTestSession(t, "sub"), + } + + // Set a long expiry time (1 hour) + initialExpiry := time.Now().Add(1 * time.Hour) + initialRequest.Session.SetExpiresAt(fosite.AuthorizeCode, initialExpiry) + + // Create authorize code session + authCodeSignature := uuid.Must(uuid.NewV4()).String() + require.NoError(t, x.OAuth2Storage().CreateAuthorizeCodeSession(ctx, authCodeSignature, &initialRequest)) + + // Verify initial state + originalCode, err := x.OAuth2Storage().GetAuthorizeCodeSession(ctx, authCodeSignature, oauth2.NewTestSession(t, "sub")) + require.NoError(t, err) + require.Equal(t, initialExpiry.Unix(), originalCode.GetSession().GetExpiresAt(fosite.AuthorizeCode).Unix()) + + // Check database directly + var codeData struct { + ExpiresAt time.Time `db:"expires_at"` + Active bool `db:"active"` + } + require.NoError(t, x.Persister().Connection(ctx).RawQuery( + "SELECT expires_at, active FROM hydra_oauth2_code WHERE signature=?", + authCodeSignature).First(&codeData)) + require.Equal(t, initialExpiry.UTC().Round(time.Second), codeData.ExpiresAt.UTC().Round(time.Second)) + require.True(t, codeData.Active) + + // Invalidate the code + err = x.OAuth2Storage().InvalidateAuthorizeCodeSession(ctx, authCodeSignature) + require.NoError(t, err) + + // Check that the code was invalidated but is still retrievable + invalidatedCode, err := x.OAuth2Storage().GetAuthorizeCodeSession(ctx, authCodeSignature, oauth2.NewTestSession(t, "sub")) + require.Error(t, err) + require.ErrorIs(t, err, fosite.ErrInvalidatedAuthorizeCode) + require.NotNil(t, invalidatedCode) // Should still be retrievable + + // Verify database state after invalidation + var invalidatedData struct { + ExpiresAt time.Time `db:"expires_at"` + Active bool `db:"active"` + } + require.NoError(t, x.Persister().Connection(ctx).RawQuery( + "SELECT expires_at, active FROM hydra_oauth2_code WHERE signature=?", + authCodeSignature).First(&invalidatedData)) + + // Verify the code is now inactive + require.False(t, invalidatedData.Active) + + // Verify the expiry is updated to be closer to now than the original expiry + require.True(t, invalidatedData.ExpiresAt.Before(initialExpiry), + "Expiry should be updated to be sooner than original") + require.True(t, invalidatedData.ExpiresAt.After(time.Now()), + "Expiry should still be in the future") + require.True(t, time.Until(invalidatedData.ExpiresAt) < time.Until(initialExpiry), + "New expiry should be closer to now than original expiry") + + t.Logf("Original expiry: %v, Updated expiry: %v, Now: %v", + initialExpiry, invalidatedData.ExpiresAt, time.Now()) + } +} diff --git a/oauth2/fosite_store_test.go b/oauth2/fosite_store_test.go index c8fb8d52618..f2d8b00eb04 100644 --- a/oauth2/fosite_store_test.go +++ b/oauth2/fosite_store_test.go @@ -4,56 +4,25 @@ package oauth2_test import ( - "context" - "flag" "testing" + "time" - "github.com/stretchr/testify/require" - - "github.com/ory/hydra/client" - "github.com/ory/hydra/driver" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal" - . "github.com/ory/hydra/oauth2" - "github.com/ory/x/contextx" - "github.com/ory/x/networkx" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/x/configx" "github.com/ory/x/sqlcon/dockertest" ) func TestMain(m *testing.M) { - flag.Parse() - - runner := dockertest.Register() - runner.Exit(m.Run()) -} - -var registries = make(map[string]driver.Registry) -var cleanRegistries = func(t *testing.T) { - registries["memory"] = internal.NewRegistryMemory(t, internal.NewConfigurationWithDefaults(), &contextx.Default{}) -} - -// returns clean registries that can safely be used for one test -// to reuse call cleanRegistries -func setupRegistries(t *testing.T) { - if len(registries) == 0 && !testing.Short() { - // first time called and sql tests - var cleanSQL func(*testing.T) - registries["postgres"], registries["mysql"], registries["cockroach"], cleanSQL = internal.ConnectDatabases(t, true, &contextx.Default{}) - cleanMem := cleanRegistries - cleanMem(t) - cleanRegistries = func(t *testing.T) { - cleanMem(t) - cleanSQL(t) - } - } else { - // reset all/init mem - cleanRegistries(t) - } + defer dockertest.KillAllTestDatabases() + m.Run() } func TestManagers(t *testing.T) { - ctx := context.TODO() - tests := []struct { + t.Parallel() + + for _, tc := range []struct { name string enableSessionEncrypted bool }{ @@ -65,21 +34,44 @@ func TestManagers(t *testing.T) { name: "EnableSessionEncrypted", enableSessionEncrypted: true, }, - } - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - setupRegistries(t) + } { + t.Run("suite="+tc.name, func(t *testing.T) { + for k, store := range testhelpers.ConnectDatabases(t, true, driver.WithConfigOptions(configx.WithValue(config.KeyEncryptSessionData, tc.enableSessionEncrypted))) { + t.Run("database="+k, func(t *testing.T) { + if k != "memory" { + t.Run("testHelperUniqueConstraints", testHelperRequestIDMultiples(store, k)) + t.Run("case=testFositeSqlStoreTransactionsCommitAccessToken", testFositeSqlStoreTransactionCommitAccessToken(store)) + t.Run("case=testFositeSqlStoreTransactionsRollbackAccessToken", testFositeSqlStoreTransactionRollbackAccessToken(store)) + t.Run("case=testFositeSqlStoreTransactionCommitRefreshToken", testFositeSqlStoreTransactionCommitRefreshToken(store)) + t.Run("case=testFositeSqlStoreTransactionRollbackRefreshToken", testFositeSqlStoreTransactionRollbackRefreshToken(store)) + t.Run("case=testFositeSqlStoreTransactionCommitAuthorizeCode", testFositeSqlStoreTransactionCommitAuthorizeCode(store)) + t.Run("case=testFositeSqlStoreTransactionRollbackAuthorizeCode", testFositeSqlStoreTransactionRollbackAuthorizeCode(store)) + t.Run("case=testFositeSqlStoreTransactionCommitPKCERequest", testFositeSqlStoreTransactionCommitPKCERequest(store)) + t.Run("case=testFositeSqlStoreTransactionRollbackPKCERequest", testFositeSqlStoreTransactionRollbackPKCERequest(store)) + t.Run("case=testFositeSqlStoreTransactionCommitOpenIdConnectSession", testFositeSqlStoreTransactionCommitOpenIdConnectSession(store)) + t.Run("case=testFositeSqlStoreTransactionRollbackOpenIdConnectSession", testFositeSqlStoreTransactionRollbackOpenIdConnectSession(store)) + } - require.NoError(t, registries["memory"].ClientManager().CreateClient(context.Background(), &client.Client{LegacyClientID: "foobar"})) // this is a workaround because the client is not being created for memory store by test helpers. - - for k, store := range registries { - net := &networkx.Network{} - require.NoError(t, store.Persister().Connection(context.Background()).First(net)) - store.Config().MustSet(ctx, config.KeyEncryptSessionData, tc.enableSessionEncrypted) - store.WithContextualizer(&contextx.Static{NID: net.ID, C: store.Config().Source(ctx)}) - TestHelperRunner(t, store, k) + t.Run("testHelperCreateGetDeleteAuthorizeCodes", testHelperCreateGetDeleteAuthorizeCodes(store)) + t.Run("testHelperExpiryFields", testHelperExpiryFields(store)) + t.Run("testHelperCreateGetDeleteAccessTokenSession", testHelperCreateGetDeleteAccessTokenSession(store)) + t.Run("testHelperNilAccessToken", testHelperNilAccessToken(store)) + t.Run("testHelperCreateGetDeleteOpenIDConnectSession", testHelperCreateGetDeleteOpenIDConnectSession(store)) + t.Run("testHelperCreateGetDeleteRefreshTokenSession", testHelperCreateGetDeleteRefreshTokenSession(store)) + t.Run("testHelperRevokeRefreshToken", testHelperRevokeRefreshToken(store)) + t.Run("testHelperCreateGetDeletePKCERequestSession", testHelperCreateGetDeletePKCERequestSession(store)) + t.Run("testHelperFlushTokens", testHelperFlushTokens(store, time.Hour)) + t.Run("testHelperFlushTokensWithLimitAndBatchSize", testHelperFlushTokensWithLimitAndBatchSize(store, 3, 2)) + t.Run("testFositeStoreSetClientAssertionJWT", testFositeStoreSetClientAssertionJWT(store)) + t.Run("testFositeStoreClientAssertionJWTValid", testFositeStoreClientAssertionJWTValid(store)) + t.Run("testHelperDeleteAccessTokens", testHelperDeleteAccessTokens(store)) + t.Run("testHelperRevokeAccessToken", testHelperRevokeAccessToken(store)) + t.Run("testFositeJWTBearerGrantStorage", testFositeJWTBearerGrantStorage(store)) + t.Run("testHelperRotateRefreshToken", testHelperRotateRefreshToken(store)) + t.Run("testHelperRefreshTokenExpiryUpdate", testHelperRefreshTokenExpiryUpdate(store)) + t.Run("testHelperAuthorizeCodeInvalidation", testHelperAuthorizeCodeInvalidation(store)) + }) } }) - } } diff --git a/oauth2/handler.go b/oauth2/handler.go index 009ec2d7073..93de3d25be0 100644 --- a/oauth2/handler.go +++ b/oauth2/handler.go @@ -4,68 +4,87 @@ package oauth2 import ( + "cmp" + "context" + "encoding/base64" "encoding/json" "fmt" "html/template" "net/http" + "net/url" "reflect" "strings" "time" - "go.step.sm/crypto/jose" - - "github.com/ory/x/httprouterx" - + jwtV5 "github.com/golang-jwt/jwt/v5" "github.com/pborman/uuid" - - "github.com/ory/x/errorsx" - - "github.com/julienschmidt/httprouter" "github.com/pkg/errors" - - jwt2 "github.com/ory/fosite/token/jwt" - - "github.com/ory/fosite" - "github.com/ory/fosite/handler/openid" - "github.com/ory/fosite/token/jwt" + "github.com/tidwall/gjson" + + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/consent" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/flow" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/token/jwt" + "github.com/ory/hydra/v2/x" + "github.com/ory/hydra/v2/x/events" + "github.com/ory/x/httprouterx" + "github.com/ory/x/josex" + "github.com/ory/x/otelx" "github.com/ory/x/urlx" - - "github.com/ory/hydra/client" - "github.com/ory/hydra/consent" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/x" ) const ( - DefaultLoginPath = "/oauth2/fallbacks/login" - DefaultConsentPath = "/oauth2/fallbacks/consent" - DefaultPostLogoutPath = "/oauth2/fallbacks/logout/callback" - DefaultLogoutPath = "/oauth2/fallbacks/logout" - DefaultErrorPath = "/oauth2/fallbacks/error" - TokenPath = "/oauth2/token" // #nosec G101 - AuthPath = "/oauth2/auth" - LogoutPath = "/oauth2/sessions/logout" - - UserinfoPath = "/userinfo" - WellKnownPath = "/.well-known/openid-configuration" - JWKPath = "/.well-known/jwks.json" + DefaultLoginPath = "/oauth2/fallbacks/login" + DefaultConsentPath = "/oauth2/fallbacks/consent" + DefaultPostLogoutPath = "/oauth2/fallbacks/logout/callback" + DefaultDeviceVerificationPath = "/oauth2/fallbacks/device" + DefaultPostDevicePath = "/oauth2/fallbacks/device/done" + DefaultLogoutPath = "/oauth2/fallbacks/logout" + DefaultErrorPath = "/oauth2/fallbacks/error" + TokenPath = "/oauth2/token" // #nosec G101 + AuthPath = "/oauth2/auth" + LogoutPath = "/oauth2/sessions/logout" + + VerifiableCredentialsPath = "/credentials" + UserinfoPath = "/userinfo" + WellKnownPath = "/.well-known/openid-configuration" + OauthAuthorizationServerPath = "/.well-known/oauth-authorization-server" + JWKPath = "/.well-known/jwks.json" // IntrospectPath points to the OAuth2 introspection endpoint. IntrospectPath = "/oauth2/introspect" RevocationPath = "/oauth2/revoke" DeleteTokensPath = "/oauth2/tokens" // #nosec G101 + + DeviceAuthPath = "/oauth2/device/auth" + DeviceVerificationPath = "/oauth2/device/verify" ) +// Taken from https://github.com/ory/hydra/v2/fosite/blob/049ed1924cd0b41f12357b0fe617530c264421ac/handler/openid/flow_explicit_auth.go#L29 +var oidcParameters = []string{"grant_type", + "max_age", + "prompt", + "acr_values", + "id_token_hint", + "nonce", +} + type Handler struct { r InternalRegistry c *config.DefaultProvider } -func NewHandler(r InternalRegistry, c *config.DefaultProvider) *Handler { - return &Handler{r: r, c: c} +func NewHandler(r InternalRegistry) *Handler { + return &Handler{ + r: r, + c: r.Config(), + } } -func (h *Handler) SetRoutes(admin *httprouterx.RouterAdmin, public *httprouterx.RouterPublic, corsMiddleware func(http.Handler) http.Handler) { +func (h *Handler) SetPublicRoutes(public *httprouterx.RouterPublic, corsMiddleware func(http.Handler) http.Handler) { public.Handler("OPTIONS", TokenPath, corsMiddleware(http.HandlerFunc(h.handleOptions))) public.Handler("POST", TokenPath, corsMiddleware(http.HandlerFunc(h.oauth2TokenExchange))) @@ -83,16 +102,33 @@ func (h *Handler) SetRoutes(admin *httprouterx.RouterAdmin, public *httprouterx. http.StatusOK, config.KeyLogoutRedirectURL, )) + public.GET(DefaultDeviceVerificationPath, h.fallbackHandler("", "", http.StatusOK, config.KeyDeviceVerificationURL)) + public.GET(DefaultPostDevicePath, h.fallbackHandler( + "You successfully authenticated on your device!", + "The Default Post Device URL is not set which is why you are seeing this fallback page. Your device login request however succeeded.", + http.StatusOK, + config.KeyDeviceDoneURL, + )) public.GET(DefaultErrorPath, h.DefaultErrorHandler) public.Handler("OPTIONS", RevocationPath, corsMiddleware(http.HandlerFunc(h.handleOptions))) public.Handler("POST", RevocationPath, corsMiddleware(http.HandlerFunc(h.revokeOAuth2Token))) public.Handler("OPTIONS", WellKnownPath, corsMiddleware(http.HandlerFunc(h.handleOptions))) public.Handler("GET", WellKnownPath, corsMiddleware(http.HandlerFunc(h.discoverOidcConfiguration))) + public.Handler("OPTIONS", OauthAuthorizationServerPath, corsMiddleware(http.HandlerFunc(h.handleOptions))) + public.Handler("GET", OauthAuthorizationServerPath, corsMiddleware(http.HandlerFunc(h.discoverOidcConfiguration))) public.Handler("OPTIONS", UserinfoPath, corsMiddleware(http.HandlerFunc(h.handleOptions))) public.Handler("GET", UserinfoPath, corsMiddleware(http.HandlerFunc(h.getOidcUserInfo))) public.Handler("POST", UserinfoPath, corsMiddleware(http.HandlerFunc(h.getOidcUserInfo))) + public.Handler("OPTIONS", VerifiableCredentialsPath, corsMiddleware(http.HandlerFunc(h.handleOptions))) + public.Handler("POST", VerifiableCredentialsPath, corsMiddleware(http.HandlerFunc(h.createVerifiableCredential))) + + public.POST(DeviceAuthPath, h.oAuth2DeviceFlow) + public.GET(DeviceVerificationPath, h.performOAuth2DeviceVerificationFlow) +} + +func (h *Handler) SetAdminRoutes(admin *httprouterx.RouterAdmin) { admin.POST(IntrospectPath, h.introspectOAuth2Token) admin.DELETE(DeleteTokensPath, h.deleteOAuth2Token) } @@ -112,10 +148,10 @@ func (h *Handler) SetRoutes(admin *httprouterx.RouterAdmin, public *httprouterx. // // Responses: // 302: emptyResponse -func (h *Handler) performOidcFrontOrBackChannelLogout(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { +func (h *Handler) performOidcFrontOrBackChannelLogout(w http.ResponseWriter, r *http.Request) { ctx := r.Context() - handled, err := h.r.ConsentStrategy().HandleOpenIDConnectLogout(ctx, w, r) + handled, err := h.r.ConsentStrategy().HandleOpenIDConnectLogout(ctx, w, r) if errors.Is(err, consent.ErrAbortOAuth2Request) { return } else if err != nil { @@ -233,6 +269,12 @@ type oidcConfiguration struct { // example: https://playground.ory.sh/ory-hydra/public/oauth2/auth AuthURL string `json:"authorization_endpoint"` + // OAuth 2.0 Device Authorization Endpoint URL + // + // required: true + // example: https://playground.ory.sh/ory-hydra/public/oauth2/device/oauth + DeviceAuthorizationURL string `json:"device_authorization_endpoint"` + // OpenID Connect Dynamic Client Registration Endpoint URL // // example: https://playground.ory.sh/ory-hydra/admin/client @@ -404,6 +446,43 @@ type oidcConfiguration struct { // JSON array containing a list of Proof Key for Code Exchange (PKCE) [RFC7636] code challenge methods supported // by this authorization server. CodeChallengeMethodsSupported []string `json:"code_challenge_methods_supported"` + + // OpenID Connect Verifiable Credentials Endpoint + // + // Contains the URL of the Verifiable Credentials Endpoint. + CredentialsEndpointDraft00 string `json:"credentials_endpoint_draft_00"` + + // OpenID Connect Verifiable Credentials Supported + // + // JSON array containing a list of the Verifiable Credentials supported by this authorization server. + CredentialsSupportedDraft00 []CredentialSupportedDraft00 `json:"credentials_supported_draft_00"` +} + +// Verifiable Credentials Metadata (Draft 00) +// +// Includes information about the supported verifiable credentials. +// +// swagger:model credentialSupportedDraft00 +type CredentialSupportedDraft00 struct { + // OpenID Connect Verifiable Credentials Format + // + // Contains the format that is supported by this authorization server. + Format string `json:"format"` + + // OpenID Connect Verifiable Credentials Types + // + // Contains the types of verifiable credentials supported. + Types []string `json:"types"` + + // OpenID Connect Verifiable Credentials Cryptographic Binding Methods Supported + // + // Contains a list of cryptographic binding methods supported for signing the proof. + CryptographicBindingMethodsSupported []string `json:"cryptographic_binding_methods_supported"` + + // OpenID Connect Verifiable Credentials Cryptographic Suites Supported + // + // Contains a list of cryptographic suites methods supported for signing the proof. + CryptographicSuitesSupported []string `json:"cryptographic_suites_supported"` } // swagger:route GET /.well-known/openid-configuration oidc discoverOidcConfiguration @@ -424,29 +503,31 @@ type oidcConfiguration struct { // 200: oidcConfiguration // default: errorOAuth2 func (h *Handler) discoverOidcConfiguration(w http.ResponseWriter, r *http.Request) { - key, err := h.r.OpenIDJWTStrategy().GetPublicKey(r.Context()) + ctx := r.Context() + key, err := h.r.OpenIDJWTSigner().GetPublicKey(ctx) if err != nil { h.r.Writer().WriteError(w, r, err) return } h.r.Writer().Write(w, r, &oidcConfiguration{ - Issuer: h.c.IssuerURL(r.Context()).String(), - AuthURL: h.c.OAuth2AuthURL(r.Context()).String(), - TokenURL: h.c.OAuth2TokenURL(r.Context()).String(), - JWKsURI: h.c.JWKSURL(r.Context()).String(), - RevocationEndpoint: urlx.AppendPaths(h.c.IssuerURL(r.Context()), RevocationPath).String(), - RegistrationEndpoint: h.c.OAuth2ClientRegistrationURL(r.Context()).String(), - SubjectTypes: h.c.SubjectTypesSupported(r.Context()), + Issuer: h.c.IssuerURL(ctx).String(), + AuthURL: h.c.OAuth2AuthURL(ctx).String(), + DeviceAuthorizationURL: h.c.OAuth2DeviceAuthorisationURL(ctx).String(), + TokenURL: h.c.OAuth2TokenURL(ctx).String(), + JWKsURI: h.c.JWKSURL(ctx).String(), + RevocationEndpoint: urlx.AppendPaths(h.c.IssuerURL(ctx), RevocationPath).String(), + RegistrationEndpoint: h.c.OAuth2ClientRegistrationURL(ctx).String(), + SubjectTypes: h.c.SubjectTypesSupported(ctx), ResponseTypes: []string{"code", "code id_token", "id_token", "token id_token", "token", "token id_token code"}, - ClaimsSupported: h.c.OIDCDiscoverySupportedClaims(r.Context()), - ScopesSupported: h.c.OIDCDiscoverySupportedScope(r.Context()), - UserinfoEndpoint: h.c.OIDCDiscoveryUserinfoEndpoint(r.Context()).String(), + ClaimsSupported: h.c.OIDCDiscoverySupportedClaims(ctx), + ScopesSupported: h.c.OIDCDiscoverySupportedScope(ctx), + UserinfoEndpoint: h.c.OIDCDiscoveryUserinfoEndpoint(ctx).String(), TokenEndpointAuthMethodsSupported: []string{"client_secret_post", "client_secret_basic", "private_key_jwt", "none"}, IDTokenSigningAlgValuesSupported: []string{key.Algorithm}, IDTokenSignedResponseAlg: []string{key.Algorithm}, UserinfoSignedResponseAlg: []string{key.Algorithm}, - GrantTypesSupported: []string{"authorization_code", "implicit", "client_credentials", "refresh_token"}, - ResponseModesSupported: []string{"query", "fragment"}, + GrantTypesSupported: []string{"authorization_code", "implicit", "client_credentials", "refresh_token", "urn:ietf:params:oauth:grant-type:device_code"}, + ResponseModesSupported: []string{"query", "fragment", "form_post"}, UserinfoSigningAlgValuesSupported: []string{"none", key.Algorithm}, RequestParameterSupported: true, RequestURIParameterSupported: true, @@ -455,16 +536,28 @@ func (h *Handler) discoverOidcConfiguration(w http.ResponseWriter, r *http.Reque BackChannelLogoutSessionSupported: true, FrontChannelLogoutSupported: true, FrontChannelLogoutSessionSupported: true, - EndSessionEndpoint: urlx.AppendPaths(h.c.IssuerURL(r.Context()), LogoutPath).String(), - RequestObjectSigningAlgValuesSupported: []string{"none", string(jose.RS256), string(jose.ES256)}, + EndSessionEndpoint: urlx.AppendPaths(h.c.IssuerURL(ctx), LogoutPath).String(), + RequestObjectSigningAlgValuesSupported: []string{"none", "RS256", "ES256"}, CodeChallengeMethodsSupported: []string{"plain", "S256"}, + CredentialsEndpointDraft00: h.c.CredentialsEndpointURL(ctx).String(), + CredentialsSupportedDraft00: []CredentialSupportedDraft00{{ + Format: "jwt_vc_json", + Types: []string{"VerifiableCredential", "UserInfoCredential"}, + CryptographicBindingMethodsSupported: []string{"jwk"}, + CryptographicSuitesSupported: []string{ + "PS256", "RS256", "ES256", + "PS384", "RS384", "ES384", + "PS512", "RS512", "ES512", + "EdDSA", + }, + }}, }) } // OpenID Connect Userinfo // // swagger:model oidcUserInfo -type oidcUserInfo struct { +type _ struct { // Subject - Identifier for the End-User at the IssuerURL. Subject string `json:"sub"` @@ -547,7 +640,7 @@ type oidcUserInfo struct { // default: errorOAuth2 func (h *Handler) getOidcUserInfo(w http.ResponseWriter, r *http.Request) { ctx := r.Context() - session := NewSessionWithCustomClaims("", h.c.AllowedTopLevelClaims(ctx)) + session := NewSessionWithCustomClaims(ctx, h.c, "") tokenType, ar, err := h.r.OAuth2Provider().IntrospectToken(ctx, fosite.AccessTokenFromRequest(r), fosite.AccessToken, session) if err != nil { rfcerr := fosite.ErrorToRFC6749Error(err) @@ -567,7 +660,7 @@ func (h *Handler) getOidcUserInfo(w http.ResponseWriter, r *http.Request) { c, ok := ar.GetClient().(*client.Client) if !ok { - h.r.Writer().WriteError(w, r, errorsx.WithStack(fosite.ErrServerError.WithHint("Unable to type assert to *client.Client."))) + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrServerError.WithHint("Unable to type assert to *client.Client."))) return } @@ -600,13 +693,13 @@ func (h *Handler) getOidcUserInfo(w http.ResponseWriter, r *http.Request) { interim["jti"] = uuid.New() interim["iat"] = time.Now().Unix() - keyID, err := h.r.OpenIDJWTStrategy().GetPublicKeyID(r.Context()) + keyID, err := h.r.OpenIDJWTSigner().GetPublicKeyID(ctx) if err != nil { h.r.Writer().WriteError(w, r, err) return } - token, _, err := h.r.OpenIDJWTStrategy().Generate(ctx, jwt2.MapClaims(interim), &jwt.Headers{ + token, _, err := h.r.OpenIDJWTSigner().Generate(ctx, interim, &jwt.Headers{ Extra: map[string]interface{}{"kid": keyID}, }) if err != nil { @@ -619,15 +712,189 @@ func (h *Handler) getOidcUserInfo(w http.ResponseWriter, r *http.Request) { } else if c.UserinfoSignedResponseAlg == "" || c.UserinfoSignedResponseAlg == "none" { h.r.Writer().Write(w, r, interim) } else { - h.r.Writer().WriteError(w, r, errorsx.WithStack(fosite.ErrServerError.WithHintf("Unsupported userinfo signing algorithm '%s'.", c.UserinfoSignedResponseAlg))) + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrServerError.WithHintf("Unsupported userinfo signing algorithm '%s'.", c.UserinfoSignedResponseAlg))) return } } +// swagger:route GET /oauth2/device/verify oAuth2 performOAuth2DeviceVerificationFlow +// +// # OAuth 2.0 Device Verification Endpoint +// +// This is the device user verification endpoint. The user is redirected here when trying to log in using the device flow. +// +// Consumes: +// - application/x-www-form-urlencoded +// +// Schemes: http, https +// +// Responses: +// 302: emptyResponse +// default: errorOAuth2 +func (h *Handler) performOAuth2DeviceVerificationFlow(w http.ResponseWriter, r *http.Request) { + var ( + ctx = r.Context() + err error + ) + + ctx, span := h.r.Tracer(ctx).Tracer().Start(ctx, "oauth2.handler.performOAuth2DeviceVerificationFlow") + defer otelx.End(span, &err) + + // When this endpoint is called with a valid consent_verifier (meaning that the login flow completed successfully) + // there are 3 writes happening to the database: + // - The flow is created + // - The device auth session is updated (user_code is marked as accepted) + // - The OpenID session is created + // If there were multiple flows created for the same user_code then we may end up with multiple flow objects + // persisted to the database, while only one of them was actually used to validate the user_code + // (see https://github.com/ory/hydra/pull/3851#discussion_r1843678761) + f, err := h.r.ConsentStrategy().HandleOAuth2DeviceAuthorizationRequest(ctx, w, r) + if errors.Is(err, consent.ErrAbortOAuth2Request) { + x.LogError(r, err, h.r.Logger()) + return + } else if e := &(fosite.RFC6749Error{}); errors.As(err, &e) { + x.LogError(r, err, h.r.Logger()) + h.r.Writer().WriteError(w, r, err) + return + } else if err != nil { + x.LogError(r, err, h.r.Logger()) + h.r.Writer().WriteError(w, r, err) + return + } + + req, sig, err := h.r.OAuth2Storage().GetDeviceCodeSessionByRequestID(ctx, f.DeviceCodeRequestID.String(), &Session{}) + if err != nil { + x.LogError(r, err, h.r.Logger()) + h.r.Writer().WriteError(w, r, err) + return + } + + req.SetUserCodeState(fosite.UserCodeAccepted) + session, err := h.updateSessionWithRequest(ctx, f, r, req, req.GetSession().(*Session)) + if err != nil { + x.LogError(r, err, h.r.Logger()) + h.r.Writer().WriteError(w, r, err) + return + } + + req.SetSession(session) + if err := h.r.Transaction(ctx, func(ctx context.Context) error { + // Update the device code session with + // - the claims for which the user gave consent + // - the granted scopes + // - the granted audiences + // - the user_code_state set to `accepted` + // This marks it as ready to be used for the token exchange endpoint. + if err = h.r.OAuth2Storage().UpdateDeviceCodeSessionBySignature(ctx, sig, req); err != nil { + return err + } + + // Update the OpenID Connect session if "openid" scope is granted + if req.GetGrantedScopes().Has("openid") { + if err := h.r.OAuth2Storage().CreateOpenIDConnectSession(ctx, sig, req.Sanitize(oidcParameters)); err != nil { + return err + } + } + + return nil + }); err != nil { + x.LogError(r, err, h.r.Logger()) + h.r.Writer().WriteError(w, r, err) + return + } + + redirectURL := urlx.SetQuery(h.c.DeviceDoneURL(ctx), url.Values{"client_id": {f.Client.GetID()}}).String() + http.Redirect(w, r, redirectURL, http.StatusFound) +} + +// OAuth2 Device Flow +// +// # Ory's OAuth 2.0 Device Authorization API +// +// swagger:model deviceAuthorization +type _ struct { + // The device verification code. + // + // example: ory_dc_smldfksmdfkl.mslkmlkmlk + DeviceCode string `json:"device_code"` + + // The end-user verification code. + // + // example: AAAAAA + UserCode string `json:"user_code"` + + // The end-user verification URI on the authorization + // server. The URI should be short and easy to remember as end users + // will be asked to manually type it into their user agent. + // + // example: https://auth.ory.sh/tv + VerificationUri string `json:"verification_uri"` + + // A verification URI that includes the "user_code" (or + // other information with the same function as the "user_code"), + // which is designed for non-textual transmission. + // + // example: https://auth.ory.sh/tv?user_code=AAAAAA + VerificationUriComplete string `json:"verification_uri_complete"` + + // The lifetime in seconds of the "device_code" and "user_code". + // + // example: 16830 + ExpiresIn int `json:"expires_in"` + + // The minimum amount of time in seconds that the client + // SHOULD wait between polling requests to the token endpoint. If no + // value is provided, clients MUST use 5 as the default. + // + // example: 5 + Interval int `json:"interval"` +} + +// swagger:route POST /oauth2/device/auth oAuth2 oAuth2DeviceFlow +// +// # The OAuth 2.0 Device Authorize Endpoint +// +// This endpoint is not documented here because you should never use your own implementation to perform OAuth2 flows. +// OAuth2 is a very popular protocol and a library for your programming language will exist. +// +// To learn more about this flow please refer to the specification: https://tools.ietf.org/html/rfc8628 +// +// Consumes: +// - application/x-www-form-urlencoded +// +// Schemes: http, https +// +// Responses: +// 200: deviceAuthorization +// default: errorOAuth2 +func (h *Handler) oAuth2DeviceFlow(w http.ResponseWriter, r *http.Request) { + var ctx = r.Context() + + request, err := h.r.OAuth2Provider().NewDeviceRequest(ctx, r) + if err != nil { + h.r.OAuth2Provider().WriteAccessError(ctx, w, request, err) + return + } + + var session = &Session{ + DefaultSession: &openid.DefaultSession{ + Headers: &jwt.Headers{}, + }, + } + + resp, err := h.r.OAuth2Provider().NewDeviceResponse(ctx, request, session) + if err != nil { + h.r.OAuth2Provider().WriteAccessError(ctx, w, request, err) + return + } + + h.r.OAuth2Provider().WriteDeviceResponse(ctx, w, request, resp) +} + // Revoke OAuth 2.0 Access or Refresh Token Request // // swagger:parameters revokeOAuth2Token -type revokeOAuth2Token struct { +type _ struct { // in: formData // required: true Token string `json:"token"` @@ -659,11 +926,15 @@ type revokeOAuth2Token struct { // 200: emptyResponse // default: errorOAuth2 func (h *Handler) revokeOAuth2Token(w http.ResponseWriter, r *http.Request) { - var ctx = r.Context() + ctx := r.Context() - err := h.r.OAuth2Provider().NewRevocationRequest(ctx, r) + err := h.r.Transaction(ctx, func(ctx context.Context) error { + return h.r.OAuth2Provider().NewRevocationRequest(ctx, r) + }) if err != nil { x.LogError(r, err, h.r.Logger()) + } else { + events.Trace(ctx, events.AccessTokenRevoked) } h.r.OAuth2Provider().WriteRevocationResponse(ctx, w, err) @@ -672,7 +943,7 @@ func (h *Handler) revokeOAuth2Token(w http.ResponseWriter, r *http.Request) { // Introspect OAuth 2.0 Access or Refresh Token Request // // swagger:parameters introspectOAuth2Token -type introspectOAuth2Token struct { +type _ struct { // The string value of the token. For access tokens, this // is the "access_token" value returned from the token endpoint // defined in OAuth 2.0. For refresh tokens, this is the "refresh_token" @@ -708,22 +979,17 @@ type introspectOAuth2Token struct { // Responses: // 200: introspectedOAuth2Token // default: errorOAuth2 -func (h *Handler) introspectOAuth2Token(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { - var session = NewSessionWithCustomClaims("", h.c.AllowedTopLevelClaims(r.Context())) - var ctx = r.Context() +func (h *Handler) introspectOAuth2Token(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session := NewSessionWithCustomClaims(ctx, h.c, "") - if r.Method != "POST" { - err := errorsx.WithStack(fosite.ErrInvalidRequest.WithHintf("HTTP method is \"%s\", expected \"POST\".", r.Method)) - x.LogError(r, err, h.r.Logger()) - h.r.OAuth2Provider().WriteIntrospectionError(ctx, w, err) - return - } else if err := r.ParseMultipartForm(1 << 20); err != nil && err != http.ErrNotMultipart { - err := errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("Unable to parse HTTP body, make sure to send a properly formatted form request body.").WithDebug(err.Error())) + if err := r.ParseMultipartForm(1 << 20); err != nil && !errors.Is(err, http.ErrNotMultipart) { + err := errors.WithStack(fosite.ErrInvalidRequest.WithHint("Unable to parse HTTP body, make sure to send a properly formatted form request body.").WithDebug(err.Error())) x.LogError(r, err, h.r.Logger()) h.r.OAuth2Provider().WriteIntrospectionError(ctx, w, err) return } else if len(r.PostForm) == 0 { - err := errorsx.WithStack(fosite.ErrInvalidRequest.WithHint("The POST body can not be empty.")) + err := errors.WithStack(fosite.ErrInvalidRequest.WithHint("The POST body can not be empty.")) x.LogError(r, err, h.r.Logger()) h.r.OAuth2Provider().WriteIntrospectionError(ctx, w, err) return @@ -735,8 +1001,8 @@ func (h *Handler) introspectOAuth2Token(w http.ResponseWriter, r *http.Request, tt, ar, err := h.r.OAuth2Provider().IntrospectToken(ctx, token, fosite.TokenType(tokenType), session, strings.Split(scope, " ")...) if err != nil { - x.LogAudit(r, err, h.r.Logger()) - err := errorsx.WithStack(fosite.ErrInactiveToken.WithHint("An introspection strategy indicated that the token is inactive.").WithDebug(err.Error())) + x.LogError(r, err, h.r.Logger()) + err := errors.WithStack(fosite.ErrInactiveToken.WithHint("An introspection strategy indicated that the token is inactive.").WithDebug(err.Error())) h.r.OAuth2Provider().WriteIntrospectionError(ctx, w, err) return } @@ -759,7 +1025,7 @@ func (h *Handler) introspectOAuth2Token(w http.ResponseWriter, r *http.Request, session, ok := resp.GetAccessRequester().GetSession().(*Session) if !ok { - err := errorsx.WithStack(fosite.ErrServerError.WithHint("Expected session to be of type *Session, but got another type.").WithDebug(fmt.Sprintf("Got type %s", reflect.TypeOf(resp.GetAccessRequester().GetSession())))) + err := errors.WithStack(fosite.ErrServerError.WithHint("Expected session to be of type *Session, but got another type.").WithDebug(fmt.Sprintf("Got type %s", reflect.TypeOf(resp.GetAccessRequester().GetSession())))) x.LogError(r, err, h.r.Logger()) h.r.OAuth2Provider().WriteIntrospectionError(ctx, w, err) return @@ -793,14 +1059,20 @@ func (h *Handler) introspectOAuth2Token(w http.ResponseWriter, r *http.Request, TokenUse: string(resp.GetTokenUse()), NotBefore: resp.GetAccessRequester().GetRequestedAt().Unix(), }); err != nil { - x.LogError(r, errorsx.WithStack(err), h.r.Logger()) + x.LogError(r, errors.WithStack(err), h.r.Logger()) } + + events.Trace(ctx, + events.AccessTokenInspected, + events.WithSubject(session.GetSubject()), + events.WithClientID(resp.GetAccessRequester().GetClient().GetID()), + ) } // OAuth 2.0 Token Exchange Parameters // // swagger:parameters oauth2TokenExchange -type performOAuth2TokenFlow struct { +type _ struct { // in: formData // required: true GrantType string `json:"grant_type"` @@ -821,7 +1093,7 @@ type performOAuth2TokenFlow struct { // OAuth2 Token Exchange Result // // swagger:model oAuth2TokenExchange -type oAuth2TokenExchange struct { +type _ struct { // The lifetime in seconds of the access token. For // example, the value "3600" denotes that the access token will // expire in one hour from the time the response was generated. @@ -831,7 +1103,7 @@ type oAuth2TokenExchange struct { Scope string `json:"scope"` // To retrieve a refresh token request the id_token scope. - IDToken int `json:"id_token"` + IDToken string `json:"id_token"` // The access token issued by the authorization server. AccessToken string `json:"access_token"` @@ -851,7 +1123,8 @@ type oAuth2TokenExchange struct { // Use open source libraries to perform OAuth 2.0 and OpenID Connect // available for any programming language. You can find a list of libraries here https://oauth.net/code/ // -// The Ory SDK is not yet able to this endpoint properly. +// This endpoint should not be used via the Ory SDK and is only included for technical reasons. +// Instead, use one of the libraries linked above. // // Consumes: // - application/x-www-form-urlencoded @@ -869,40 +1142,62 @@ type oAuth2TokenExchange struct { // 200: oAuth2TokenExchange // default: errorOAuth2 func (h *Handler) oauth2TokenExchange(w http.ResponseWriter, r *http.Request) { - var session = NewSessionWithCustomClaims("", h.c.AllowedTopLevelClaims(r.Context())) - var ctx = r.Context() + ctx := r.Context() + session := NewSessionWithCustomClaims(ctx, h.c, "") accessRequest, err := h.r.OAuth2Provider().NewAccessRequest(ctx, r, session) if err != nil { - h.logOrAudit(err, r) + x.LogError(r, err, h.r.Logger()) h.r.OAuth2Provider().WriteAccessError(ctx, w, accessRequest, err) + // NewAccessRequest sometimes returns the accessRequest even if an error occurs + // If that is the case, we want to log it to get information about the client + if accessRequest != nil { + events.Trace(ctx, events.TokenExchangeError, events.WithError(err), events.WithRequest(accessRequest)) + } else { + events.Trace(ctx, events.TokenExchangeError, events.WithError(err)) + } return } - if accessRequest.GetGrantTypes().ExactOne("client_credentials") || accessRequest.GetGrantTypes().ExactOne("urn:ietf:params:oauth:grant-type:jwt-bearer") { + if accessRequest.GetGrantTypes().ExactOne(string(fosite.GrantTypeClientCredentials)) || + accessRequest.GetGrantTypes().ExactOne(string(fosite.GrantTypeJWTBearer)) || + accessRequest.GetGrantTypes().ExactOne(string(fosite.GrantTypePassword)) { var accessTokenKeyID string - if h.c.AccessTokenStrategy(ctx) == "jwt" { - accessTokenKeyID, err = h.r.AccessTokenJWTStrategy().GetPublicKeyID(ctx) + if h.c.AccessTokenStrategy(ctx, client.AccessTokenStrategySource(accessRequest.GetClient())) == "jwt" { + accessTokenKeyID, err = h.r.AccessTokenJWTSigner().GetPublicKeyID(ctx) if err != nil { x.LogError(r, err, h.r.Logger()) h.r.OAuth2Provider().WriteAccessError(ctx, w, accessRequest, err) + events.Trace(ctx, events.TokenExchangeError, events.WithRequest(accessRequest), events.WithError(err)) return } } // only for client_credentials, otherwise Authentication is included in session - if accessRequest.GetGrantTypes().ExactOne("client_credentials") { + if accessRequest.GetGrantTypes().ExactOne(string(fosite.GrantTypeClientCredentials)) { session.Subject = accessRequest.GetClient().GetID() } + // only for password grant, otherwise Authentication is included in session + if accessRequest.GetGrantTypes().ExactOne(string(fosite.GrantTypePassword)) { + if sess, ok := accessRequest.GetSession().(fosite.ExtraClaimsSession); ok { + sess.GetExtraClaims()["username"] = accessRequest.GetRequestForm().Get("username") + session.DefaultSession.Username = accessRequest.GetRequestForm().Get("username") + } + + // Also add audience claims + for _, aud := range accessRequest.GetClient().GetAudience() { + accessRequest.GrantAudience(aud) + } + } session.ClientID = accessRequest.GetClient().GetID() session.KID = accessTokenKeyID - session.DefaultSession.Claims.Issuer = h.c.IssuerURL(r.Context()).String() + session.DefaultSession.Claims.Issuer = h.c.IssuerURL(ctx).String() session.DefaultSession.Claims.IssuedAt = time.Now().UTC() - var scopes = accessRequest.GetRequestedScopes() + scopes := accessRequest.GetRequestedScopes() // Added for compatibility with MITREid - if h.c.GrantAllClientCredentialsScopesPerDefault(r.Context()) && len(scopes) == 0 { + if h.c.GrantAllClientCredentialsScopesPerDefault(ctx) && len(scopes) == 0 { for _, scope := range accessRequest.GetClient().GetScopes() { accessRequest.GrantScope(scope) } @@ -915,7 +1210,7 @@ func (h *Handler) oauth2TokenExchange(w http.ResponseWriter, r *http.Request) { } for _, audience := range accessRequest.GetRequestedAudience() { - if h.r.AudienceStrategy()(accessRequest.GetClient().GetAudience(), []string{audience}) == nil { + if fosite.DefaultAudienceMatchingStrategy(accessRequest.GetClient().GetAudience(), []string{audience}) == nil { accessRequest.GrantAudience(audience) } } @@ -923,16 +1218,21 @@ func (h *Handler) oauth2TokenExchange(w http.ResponseWriter, r *http.Request) { for _, hook := range h.r.AccessRequestHooks() { if err := hook(ctx, accessRequest); err != nil { - h.logOrAudit(err, r) + x.LogError(r, err, h.r.Logger()) h.r.OAuth2Provider().WriteAccessError(ctx, w, accessRequest, err) + events.Trace(ctx, events.TokenExchangeError, events.WithRequest(accessRequest), events.WithError(err)) return } } - accessResponse, err := h.r.OAuth2Provider().NewAccessResponse(ctx, accessRequest) - if err != nil { - h.logOrAudit(err, r) + var accessResponse fosite.AccessResponder + if err := h.r.Transaction(ctx, func(ctx context.Context) (err error) { + accessResponse, err = h.r.OAuth2Provider().NewAccessResponse(ctx, accessRequest) + return err + }); err != nil { + x.LogError(r, err, h.r.Logger()) h.r.OAuth2Provider().WriteAccessError(ctx, w, accessRequest, err) + events.Trace(ctx, events.TokenExchangeError, events.WithRequest(accessRequest), events.WithError(err)) return } @@ -946,18 +1246,20 @@ func (h *Handler) oauth2TokenExchange(w http.ResponseWriter, r *http.Request) { // Use open source libraries to perform OAuth 2.0 and OpenID Connect // available for any programming language. You can find a list of libraries at https://oauth.net/code/ // -// The Ory SDK is not yet able to this endpoint properly. +// This endpoint should not be used via the Ory SDK and is only included for technical reasons. +// Instead, use one of the libraries linked above. // -// Consumes: -// - application/x-www-form-urlencoded +// Consumes: +// - application/x-www-form-urlencoded // -// Schemes: http, https +// Schemes: http, https // -// Responses: -// 302: emptyResponse -// default: errorOAuth2 -func (h *Handler) oAuth2Authorize(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { - var ctx = r.Context() +// Responses: +// +// 302: emptyResponse +// default: errorOAuth2 +func (h *Handler) oAuth2Authorize(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() authorizeRequest, err := h.r.OAuth2Provider().NewAuthorizeRequest(ctx, r) if err != nil { @@ -966,13 +1268,13 @@ func (h *Handler) oAuth2Authorize(w http.ResponseWriter, r *http.Request, _ http return } - session, err := h.r.ConsentStrategy().HandleOAuth2AuthorizationRequest(ctx, w, r, authorizeRequest) + fl, err := h.r.ConsentStrategy().HandleOAuth2AuthorizationRequest(ctx, w, r, authorizeRequest) if errors.Is(err, consent.ErrAbortOAuth2Request) { - x.LogAudit(r, nil, h.r.AuditLogger()) + x.LogError(r, err, h.r.Logger()) // do nothing return } else if e := &(fosite.RFC6749Error{}); errors.As(err, &e) { - x.LogAudit(r, err, h.r.AuditLogger()) + x.LogError(r, err, h.r.Logger()) h.writeAuthorizeError(w, r, authorizeRequest, err) return } else if err != nil { @@ -981,80 +1283,17 @@ func (h *Handler) oAuth2Authorize(w http.ResponseWriter, r *http.Request, _ http return } - for _, scope := range session.GrantedScope { - authorizeRequest.GrantScope(scope) - } - - for _, audience := range session.GrantedAudience { - authorizeRequest.GrantAudience(audience) - } - - openIDKeyID, err := h.r.OpenIDJWTStrategy().GetPublicKeyID(ctx) + authorizeRequest.SetID(fl.ConsentRequestID.String()) + session, err := h.updateSessionWithRequest(ctx, fl, r, authorizeRequest, nil) if err != nil { - x.LogError(r, err, h.r.Logger()) h.writeAuthorizeError(w, r, authorizeRequest, err) return } - - var accessTokenKeyID string - if h.c.AccessTokenStrategy(r.Context()) == "jwt" { - accessTokenKeyID, err = h.r.AccessTokenJWTStrategy().GetPublicKeyID(ctx) - if err != nil { - x.LogError(r, err, h.r.Logger()) - h.writeAuthorizeError(w, r, authorizeRequest, err) - return - } - } - - obfuscatedSubject, err := h.r.ConsentStrategy().ObfuscateSubjectIdentifier(ctx, authorizeRequest.GetClient(), session.ConsentRequest.Subject, session.ConsentRequest.ForceSubjectIdentifier) - if e := &(fosite.RFC6749Error{}); errors.As(err, &e) { - x.LogAudit(r, err, h.r.AuditLogger()) - h.writeAuthorizeError(w, r, authorizeRequest, err) - return - } else if err != nil { - x.LogError(r, err, h.r.Logger()) - h.writeAuthorizeError(w, r, authorizeRequest, err) - return - } - - authorizeRequest.SetID(session.ID) - claims := &jwt.IDTokenClaims{ - Subject: obfuscatedSubject, - Issuer: h.c.IssuerURL(ctx).String(), - AuthTime: time.Time(session.AuthenticatedAt), - RequestedAt: session.RequestedAt, - Extra: session.Session.IDToken, - AuthenticationContextClassReference: session.ConsentRequest.ACR, - AuthenticationMethodsReferences: session.ConsentRequest.AMR, - - // These are required for work around https://github.com/ory/fosite/issues/530 - Nonce: authorizeRequest.GetRequestForm().Get("nonce"), - Audience: []string{authorizeRequest.GetClient().GetID()}, - IssuedAt: time.Now().Truncate(time.Second).UTC(), - - // This is set by the fosite strategy - // ExpiresAt: time.Now().Add(h.IDTokenLifespan).UTC(), - } - claims.Add("sid", session.ConsentRequest.LoginSessionID) - - // done - response, err := h.r.OAuth2Provider().NewAuthorizeResponse(ctx, authorizeRequest, &Session{ - DefaultSession: &openid.DefaultSession{ - Claims: claims, - Headers: &jwt.Headers{Extra: map[string]interface{}{ - // required for lookup on jwk endpoint - "kid": openIDKeyID, - }}, - Subject: session.ConsentRequest.Subject, - }, - Extra: session.Session.AccessToken, - KID: accessTokenKeyID, - ClientID: authorizeRequest.GetClient().GetID(), - ConsentChallenge: session.ID, - ExcludeNotBeforeClaim: h.c.ExcludeNotBeforeClaim(ctx), - AllowedTopLevelClaims: h.c.AllowedTopLevelClaims(ctx), - }) - if err != nil { + var response fosite.AuthorizeResponder + if err := h.r.Transaction(ctx, func(ctx context.Context) (err error) { + response, err = h.r.OAuth2Provider().NewAuthorizeResponse(ctx, authorizeRequest, session) + return err + }); err != nil { x.LogError(r, err, h.r.Logger()) h.writeAuthorizeError(w, r, authorizeRequest, err) return @@ -1066,7 +1305,7 @@ func (h *Handler) oAuth2Authorize(w http.ResponseWriter, r *http.Request, _ http // Delete OAuth 2.0 Access Token Parameters // // swagger:parameters deleteOAuth2Token -type deleteOAuth2Token struct { +type _ struct { // OAuth 2.0 Client ID // // required: true @@ -1088,10 +1327,10 @@ type deleteOAuth2Token struct { // Responses: // 204: emptyResponse // default: errorOAuth2 -func (h *Handler) deleteOAuth2Token(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { +func (h *Handler) deleteOAuth2Token(w http.ResponseWriter, r *http.Request) { clientID := r.URL.Query().Get("client_id") if clientID == "" { - h.r.Writer().WriteError(w, r, errorsx.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter 'client_id' is not defined but it should have been.`))) + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHint(`Query parameter 'client_id' is not defined but it should have been.`))) return } @@ -1105,7 +1344,7 @@ func (h *Handler) deleteOAuth2Token(w http.ResponseWriter, r *http.Request, _ ht // This function will not be called, OPTIONS request will be handled by cors // this is just a placeholder. -func (h *Handler) handleOptions(w http.ResponseWriter, r *http.Request) {} +func (h *Handler) handleOptions(http.ResponseWriter, *http.Request) {} func (h *Handler) forwardError(w http.ResponseWriter, r *http.Request, err error) { rfcErr := fosite.ErrorToRFC6749Error(err).WithExposeDebug(h.c.GetSendDebugMessagesToClients(r.Context())) @@ -1122,10 +1361,248 @@ func (h *Handler) writeAuthorizeError(w http.ResponseWriter, r *http.Request, ar h.r.OAuth2Provider().WriteAuthorizeError(r.Context(), w, ar, err) } -func (h *Handler) logOrAudit(err error, r *http.Request) { - if errors.Is(err, fosite.ErrServerError) || errors.Is(err, fosite.ErrTemporarilyUnavailable) || errors.Is(err, fosite.ErrMisconfiguration) { +// updateSessionWithRequest takes a session and a fosite.request as input and returns a new session. +// If any errors occur, they are logged. +func (h *Handler) updateSessionWithRequest( + ctx context.Context, + flow *flow.Flow, + r *http.Request, + request fosite.Requester, + session *Session, +) (*Session, error) { + for _, scope := range flow.GrantedScope { + request.GrantScope(scope) + } + + for _, audience := range flow.GrantedAudience { + request.GrantAudience(audience) + } + + openIDKeyID, err := h.r.OpenIDJWTSigner().GetPublicKeyID(ctx) + if err != nil { x.LogError(r, err, h.r.Logger()) - } else { - x.LogAudit(r, err, h.r.Logger()) + return nil, err + } + + var accessTokenKeyID string + if h.c.AccessTokenStrategy(ctx, client.AccessTokenStrategySource(request.GetClient())) == "jwt" { + accessTokenKeyID, err = h.r.AccessTokenJWTSigner().GetPublicKeyID(ctx) + if err != nil { + x.LogError(r, err, h.r.Logger()) + return nil, err + } + } + + obfuscatedSubject, err := h.r.ConsentStrategy().ObfuscateSubjectIdentifier(ctx, request.GetClient(), flow.Subject, flow.ForceSubjectIdentifier) + if err != nil { + x.LogError(r, err, h.r.Logger()) + return nil, err + } + + request.SetID(flow.ConsentRequestID.String()) + claims := &jwt.IDTokenClaims{ + Subject: obfuscatedSubject, + Issuer: h.c.IssuerURL(ctx).String(), + AuthTime: time.Time(flow.LoginAuthenticatedAt), + RequestedAt: flow.RequestedAt, + Extra: flow.SessionIDToken, + AuthenticationContextClassReference: flow.ACR, + AuthenticationMethodsReferences: flow.AMR, + + // These are required for work around https://github.com/ory/hydra/v2/fosite/issues/530 + Nonce: request.GetRequestForm().Get("nonce"), + Audience: []string{request.GetClient().GetID()}, + IssuedAt: time.Now().Truncate(time.Second).UTC(), + + // This is set by the fosite strategy + // ExpiresAt: time.Now().Add(h.IDTokenLifespan).UTC(), + } + claims.Add("sid", flow.SessionID) + + if session == nil { + session = &Session{} + } + + if session.DefaultSession == nil { + session.DefaultSession = &openid.DefaultSession{} + } + session.DefaultSession.Claims = claims + session.DefaultSession.Headers = &jwt.Headers{Extra: map[string]interface{}{ + // required for lookup on jwk endpoint + "kid": openIDKeyID, + }} + session.DefaultSession.Subject = flow.Subject + session.Extra = flow.SessionAccessToken + session.KID = accessTokenKeyID + session.ClientID = request.GetClient().GetID() + session.ConsentChallenge = flow.ConsentRequestID.String() + session.ExcludeNotBeforeClaim = h.c.ExcludeNotBeforeClaim(ctx) + session.AllowedTopLevelClaims = h.c.AllowedTopLevelClaims(ctx) + session.MirrorTopLevelClaims = h.c.MirrorTopLevelClaims(ctx) + + return session, nil +} + +// swagger:route POST /credentials oidc createVerifiableCredential +// +// # Issues a Verifiable Credential +// +// This endpoint creates a verifiable credential that attests that the user +// authenticated with the provided access token owns a certain public/private key +// pair. +// +// More information can be found at +// https://openid.net/specs/openid-connect-userinfo-vc-1_0.html. +// +// Consumes: +// - application/json +// +// Schemes: http, https +// +// Responses: +// 200: verifiableCredentialResponse +// 400: verifiableCredentialPrimingResponse +// default: errorOAuth2 +func (h *Handler) createVerifiableCredential(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session := NewSessionWithCustomClaims(ctx, h.c, "") + accessToken := fosite.AccessTokenFromRequest(r) + tokenType, _, err := h.r.OAuth2Provider().IntrospectToken(ctx, accessToken, fosite.AccessToken, session) + + if err != nil { + h.r.Writer().WriteError(w, r, err) + return + } + if tokenType != fosite.AccessToken { + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHintf("The provided token is not an access token."))) + return + } + + var request CreateVerifiableCredentialRequestBody + if err := json.NewDecoder(r.Body).Decode(&request); err != nil { + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithWrap(err).WithHint("Unable to decode request body."))) + return + } + + if request.Format != "jwt_vc_json" { + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHintf("The format %q is not supported.", request.Format))) + return + } + if request.Proof == nil { + // Handle priming request + nonceLifespan := h.r.Config().GetVerifiableCredentialsNonceLifespan(ctx) + nonceExpiresIn := time.Now().Add(nonceLifespan).UTC() + nonce, err := h.r.OAuth2Storage().NewNonce(ctx, accessToken, nonceExpiresIn) + if err != nil { + h.r.Writer().WriteError(w, r, err) + return + } + h.r.Writer().WriteCode(w, r, http.StatusBadRequest, &VerifiableCredentialPrimingResponse{ + RFC6749ErrorJson: fosite.RFC6749ErrorJson{ + Name: "missing_proof", + Description: "Could not issue a verifiable credential because the proof is missing in the request.", + }, + Format: "jwt_vc", + Nonce: nonce, + NonceExpiresIn: int64(nonceLifespan.Seconds()), + }) + return + } + if request.Proof.ProofType != "jwt" { + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHintf("The proof type %q is not supported.", request.Proof.ProofType))) + return + } + + header, _, ok := strings.Cut(request.Proof.JWT, ".") + if !ok { + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHintf("The JWT in the proof is malformed."))) + return + } + + rawHeader, err := jwtV5.NewParser().DecodeSegment(header) + if err != nil { + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHintf("The JWT header in the proof is malformed."))) + return + } + jwk := gjson.GetBytes(rawHeader, "jwk").String() + proofJWK, err := josex.LoadJSONWebKey([]byte(jwk), true) + if err != nil { + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHintf("The JWK in the JWT header is malformed."))) + return + } + + token, err := jwt.Parse(request.Proof.JWT, func(token *jwt.Token) (any, error) { + return proofJWK, nil + }) + if err != nil { + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHintf("The JWT was not signed with the correct key supplied in the JWK header."))) + return + } + + nonce, ok := token.Claims["nonce"].(string) + if !ok { + h.r.Writer().WriteError(w, r, errors.WithStack(fosite.ErrInvalidRequest.WithHintf(`The JWT did not contain the "nonce" claim.`))) + return + } + + if err = h.r.OAuth2Storage().IsNonceValid(ctx, accessToken, nonce); err != nil { + h.r.Writer().WriteError(w, r, err) + return } + + var response VerifiableCredentialResponse + response.Format = "jwt_vc_json" + + proofJWKJSON, err := json.Marshal(proofJWK) + if err != nil { + h.r.Writer().WriteError(w, r, errors.WithStack(err)) + return + } + + // Encode ID according to https://github.com/quartzjer/did-jwk/blob/main/spec.md + vcID := fmt.Sprintf("did:jwk:%s", base64.RawURLEncoding.EncodeToString(proofJWKJSON)) + vcClaims := &VerifableCredentialClaims{ + RegisteredClaims: jwtV5.RegisteredClaims{ + Issuer: session.Claims.Issuer, + ID: cmp.Or(session.Claims.JTI, uuid.New()), + IssuedAt: jwtV5.NewNumericDate(session.Claims.IssuedAt), + NotBefore: jwtV5.NewNumericDate(session.Claims.IssuedAt), + ExpiresAt: jwtV5.NewNumericDate(session.Claims.IssuedAt.Add(1 * time.Hour)), + Subject: vcID, + }, + VerifiableCredential: VerifiableCredentialClaim{ + Context: []string{"https://www.w3.org/2018/credentials/v1"}, + Type: []string{"VerifiableCredential", "UserInfoCredential"}, + Subject: map[string]any{ + "id": vcID, + "sub": session.Claims.Subject, + }, + }, + } + if session.Claims.Extra != nil { + for claim, val := range session.Claims.Extra { + vcClaims.VerifiableCredential.Subject[claim] = val + } + } + + signingKeyID, err := h.r.OpenIDJWTSigner().GetPublicKeyID(ctx) + if err != nil { + h.r.Writer().WriteError(w, r, errors.WithStack(err)) + return + } + headers := jwt.NewHeaders() + headers.Add("kid", signingKeyID) + mapClaims, err := vcClaims.ToMapClaims() + if err != nil { + h.r.Writer().WriteError(w, r, errors.WithStack(err)) + return + } + rawToken, _, err := h.r.OpenIDJWTSigner().Generate(ctx, mapClaims, headers) + if err != nil { + h.r.Writer().WriteError(w, r, errors.WithStack(err)) + return + } + + response.Credential = rawToken + h.r.Writer().Write(w, r, &response) } diff --git a/oauth2/handler_fallback_endpoints.go b/oauth2/handler_fallback_endpoints.go index 5f2830b1790..fb44d42dd79 100644 --- a/oauth2/handler_fallback_endpoints.go +++ b/oauth2/handler_fallback_endpoints.go @@ -7,12 +7,10 @@ import ( "html/template" "net/http" - "github.com/ory/hydra/driver/config" - - "github.com/julienschmidt/httprouter" + "github.com/ory/hydra/v2/driver/config" ) -func (h *Handler) fallbackHandler(title, heading string, sc int, configKey string) httprouter.Handle { +func (h *Handler) fallbackHandler(title, heading string, sc int, configKey string) func(w http.ResponseWriter, r *http.Request) { if title == "" { title = "The request could not be executed because a mandatory configuration key is missing or malformed" } @@ -21,7 +19,7 @@ func (h *Handler) fallbackHandler(title, heading string, sc int, configKey strin heading = "The request could not be executed because a mandatory configuration key is missing or malformed" } - return func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { + return func(w http.ResponseWriter, r *http.Request) { h.r.Logger().Errorf(`A request failed because configuration key "%s" is missing or malformed.`, configKey) t, err := template.New(configKey).Parse(` @@ -58,7 +56,7 @@ func (h *Handler) fallbackHandler(title, heading string, sc int, configKey strin } } -func (h *Handler) DefaultErrorHandler(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { +func (h *Handler) DefaultErrorHandler(w http.ResponseWriter, r *http.Request) { h.r.Logger().WithRequest(r).Error("A client requested the default error URL, environment variable URLS_ERROR is probably not set.") t, err := template.New("consent").Parse(` diff --git a/oauth2/handler_fallback_endpoints_test.go b/oauth2/handler_fallback_endpoints_test.go index b72f1452ab6..d0cac2dd227 100644 --- a/oauth2/handler_fallback_endpoints_test.go +++ b/oauth2/handler_fallback_endpoints_test.go @@ -4,40 +4,38 @@ package oauth2_test import ( - "context" "io" "net/http" "net/http/httptest" "testing" - "github.com/ory/x/httprouterx" - - "github.com/ory/hydra/x" - "github.com/ory/x/contextx" - - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal" - "github.com/ory/hydra/oauth2" - "github.com/stretchr/testify/assert" + + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/x/configx" + "github.com/ory/x/httprouterx" + "github.com/ory/x/prometheusx" ) func TestHandlerConsent(t *testing.T) { - conf := internal.NewConfigurationWithDefaults() - conf.MustSet(context.Background(), config.KeyScopeStrategy, "DEPRECATED_HIERARCHICAL_SCOPE_STRATEGY") - reg := internal.NewRegistryMemory(t, conf, &contextx.Default{}) - - h := reg.OAuth2Handler() - r := x.NewRouterAdmin(conf.AdminURL) - h.SetRoutes(r, &httprouterx.RouterPublic{Router: r.Router}, func(h http.Handler) http.Handler { - return h - }) + t.Parallel() + + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions(configx.WithValue(config.KeyScopeStrategy, "DEPRECATED_HIERARCHICAL_SCOPE_STRATEGY"))) + + h := oauth2.NewHandler(reg) + metrics := prometheusx.NewMetricsManagerWithPrefix("hydra", prometheusx.HTTPMetrics, config.Version, config.Commit, config.Date) + r := httprouterx.NewRouterAdminWithPrefix(metrics) + h.SetPublicRoutes(r.ToPublic(), func(h http.Handler) http.Handler { return h }) + h.SetAdminRoutes(r) ts := httptest.NewServer(r) defer ts.Close() res, err := http.Get(ts.URL + oauth2.DefaultConsentPath) assert.Nil(t, err) - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck body, err := io.ReadAll(res.Body) assert.Nil(t, err) diff --git a/oauth2/handler_test.go b/oauth2/handler_test.go index bcf52de4a25..d467b1e458a 100644 --- a/oauth2/handler_test.go +++ b/oauth2/handler_test.go @@ -15,63 +15,58 @@ import ( "testing" "time" - hydra "github.com/ory/hydra-client-go/v2" - - "github.com/ory/x/httprouterx" - - "github.com/ory/x/snapshotx" - - "github.com/ory/x/contextx" - - "github.com/ory/hydra/jwk" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/driver" "github.com/golang/mock/gomock" "github.com/pkg/errors" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal" - - jwt2 "github.com/ory/fosite/token/jwt" - - "github.com/ory/fosite" - "github.com/ory/fosite/handler/openid" - "github.com/ory/fosite/token/jwt" - "github.com/ory/hydra/client" - "github.com/ory/hydra/oauth2" + hydra "github.com/ory/hydra-client-go/v2" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/token/jwt" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/configx" + "github.com/ory/x/httprouterx" + "github.com/ory/x/prometheusx" + "github.com/ory/x/snapshotx" ) var lifespan = time.Hour func TestHandlerDeleteHandler(t *testing.T) { - ctx := context.Background() - conf := internal.NewConfigurationWithDefaults() - conf.MustSet(ctx, config.KeyIssuerURL, "http://hydra.localhost") - reg := internal.NewRegistryMemory(t, conf, &contextx.Default{}) + t.Parallel() + + ctx := t.Context() + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions(configx.WithValue(config.KeyIssuerURL, "http://hydra.localhost"))) cm := reg.ClientManager() store := reg.OAuth2Storage() - h := oauth2.NewHandler(reg, conf) + h := oauth2.NewHandler(reg) deleteRequest := &fosite.Request{ ID: "del-1", RequestedAt: time.Now().Round(time.Second), - Client: &client.Client{LegacyClientID: "foobar"}, + Client: &client.Client{ID: "foobar"}, RequestedScope: fosite.Arguments{"fa", "ba"}, GrantedScope: fosite.Arguments{"fa", "ba"}, Form: url.Values{"foo": []string{"bar", "baz"}}, Session: &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "bar"}}, } - require.NoError(t, cm.CreateClient(context.Background(), deleteRequest.Client.(*client.Client))) - require.NoError(t, store.CreateAccessTokenSession(context.Background(), deleteRequest.ID, deleteRequest)) + require.NoError(t, cm.CreateClient(ctx, deleteRequest.Client.(*client.Client))) + require.NoError(t, store.CreateAccessTokenSession(ctx, deleteRequest.ID, deleteRequest)) - r := x.NewRouterAdmin(conf.AdminURL) - h.SetRoutes(r, &httprouterx.RouterPublic{Router: r.Router}, func(h http.Handler) http.Handler { - return h - }) + metrics := prometheusx.NewMetricsManagerWithPrefix("hydra", prometheusx.HTTPMetrics, config.Version, config.Commit, config.Date) + r := httprouterx.NewRouterAdminWithPrefix(metrics) + h.SetPublicRoutes(r.ToPublic(), func(h http.Handler) http.Handler { return h }) + h.SetAdminRoutes(r) ts := httptest.NewServer(r) defer ts.Close() @@ -79,7 +74,7 @@ func TestHandlerDeleteHandler(t *testing.T) { c.GetConfig().Servers = hydra.ServerConfigurations{{URL: ts.URL}} _, err := c. - OAuth2Api.DeleteOAuth2Token(context.Background()). + OAuth2API.DeleteOAuth2Token(ctx). ClientId("foobar").Execute() require.NoError(t, err) @@ -89,25 +84,25 @@ func TestHandlerDeleteHandler(t *testing.T) { } func TestUserinfo(t *testing.T) { - ctx := context.Background() - conf := internal.NewConfigurationWithDefaults() - conf.MustSet(ctx, config.KeyScopeStrategy, "") - conf.MustSet(ctx, config.KeyAuthCodeLifespan, lifespan) - conf.MustSet(ctx, config.KeyIssuerURL, "http://hydra.localhost") - reg := internal.NewRegistryMemory(t, conf, &contextx.Default{}) - internal.MustEnsureRegistryKeys(reg, x.OpenIDConnectKeyName) + t.Parallel() ctrl := gomock.NewController(t) op := NewMockOAuth2Provider(ctrl) - defer ctrl.Finish() - reg.WithOAuth2Provider(op) + t.Cleanup(ctrl.Finish) - h := reg.OAuth2Handler() + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions(configx.WithValues(map[string]any{ + config.KeyScopeStrategy: "", + config.KeyAuthCodeLifespan: lifespan, + config.KeyIssuerURL: "http://hydra.localhost", + })), driver.WithOAuth2Provider(op)) + testhelpers.MustEnsureRegistryKeys(t, reg, x.OpenIDConnectKeyName) - router := x.NewRouterAdmin(conf.AdminURL) - h.SetRoutes(router, &httprouterx.RouterPublic{Router: router.Router}, func(h http.Handler) http.Handler { - return h - }) + h := oauth2.NewHandler(reg) + + metrics := prometheusx.NewMetricsManagerWithPrefix("hydra", prometheusx.HTTPMetrics, config.Version, config.Commit, config.Date) + router := httprouterx.NewRouterAdminWithPrefix(metrics) + h.SetPublicRoutes(router.ToPublic(), func(h http.Handler) http.Handler { return h }) + h.SetAdminRoutes(router) ts := httptest.NewServer(router) defer ts.Close() @@ -149,8 +144,8 @@ func TestUserinfo(t *testing.T) { setup: func(t *testing.T) { op.EXPECT(). IntrospectToken(gomock.Any(), gomock.Eq("access-token"), gomock.Eq(fosite.AccessToken), gomock.Any()). - DoAndReturn(func(_ context.Context, _ string, _ fosite.TokenType, session fosite.Session, _ ...string) (fosite.TokenType, fosite.AccessRequester, error) { - session = &oauth2.Session{ + DoAndReturn(func(_ context.Context, _ string, _ fosite.TokenType, _ fosite.Session, _ ...string) (fosite.TokenType, fosite.AccessRequester, error) { + session := &oauth2.Session{ DefaultSession: &openid.DefaultSession{ Claims: &jwt.IDTokenClaims{ Subject: "alice", @@ -164,7 +159,7 @@ func TestUserinfo(t *testing.T) { return fosite.AccessToken, &fosite.AccessRequest{ Request: fosite.Request{ Client: &client.Client{ - LegacyClientID: "foobar", + ID: "foobar", }, Session: session, }, @@ -182,8 +177,8 @@ func TestUserinfo(t *testing.T) { setup: func(t *testing.T) { op.EXPECT(). IntrospectToken(gomock.Any(), gomock.Eq("access-token"), gomock.Eq(fosite.AccessToken), gomock.Any()). - DoAndReturn(func(_ context.Context, _ string, _ fosite.TokenType, session fosite.Session, _ ...string) (fosite.TokenType, fosite.AccessRequester, error) { - session = &oauth2.Session{ + DoAndReturn(func(_ context.Context, _ string, _ fosite.TokenType, _ fosite.Session, _ ...string) (fosite.TokenType, fosite.AccessRequester, error) { + session := &oauth2.Session{ DefaultSession: &openid.DefaultSession{ Claims: &jwt.IDTokenClaims{ Subject: "another-alice", @@ -198,7 +193,7 @@ func TestUserinfo(t *testing.T) { return fosite.AccessToken, &fosite.AccessRequest{ Request: fosite.Request{ Client: &client.Client{ - LegacyClientID: "foobar", + ID: "foobar", }, Session: session, }, @@ -217,8 +212,8 @@ func TestUserinfo(t *testing.T) { setup: func(t *testing.T) { op.EXPECT(). IntrospectToken(gomock.Any(), gomock.Eq("access-token"), gomock.Eq(fosite.AccessToken), gomock.Any()). - DoAndReturn(func(_ context.Context, _ string, _ fosite.TokenType, session fosite.Session, _ ...string) (fosite.TokenType, fosite.AccessRequester, error) { - session = &oauth2.Session{ + DoAndReturn(func(_ context.Context, _ string, _ fosite.TokenType, _ fosite.Session, _ ...string) (fosite.TokenType, fosite.AccessRequester, error) { + session := &oauth2.Session{ DefaultSession: &openid.DefaultSession{ Claims: &jwt.IDTokenClaims{ Subject: "alice", @@ -233,7 +228,7 @@ func TestUserinfo(t *testing.T) { return fosite.AccessToken, &fosite.AccessRequest{ Request: fosite.Request{ Client: &client.Client{ - LegacyClientID: "foobar", + ID: "foobar", UserinfoSignedResponseAlg: "none", }, Session: session, @@ -252,8 +247,8 @@ func TestUserinfo(t *testing.T) { setup: func(t *testing.T) { op.EXPECT(). IntrospectToken(gomock.Any(), gomock.Eq("access-token"), gomock.Eq(fosite.AccessToken), gomock.Any()). - DoAndReturn(func(_ context.Context, _ string, _ fosite.TokenType, session fosite.Session, _ ...string) (fosite.TokenType, fosite.AccessRequester, error) { - session = &oauth2.Session{ + DoAndReturn(func(_ context.Context, _ string, _ fosite.TokenType, _ fosite.Session, _ ...string) (fosite.TokenType, fosite.AccessRequester, error) { + session := &oauth2.Session{ DefaultSession: &openid.DefaultSession{ Claims: &jwt.IDTokenClaims{ Subject: "alice", @@ -280,8 +275,8 @@ func TestUserinfo(t *testing.T) { setup: func(t *testing.T) { op.EXPECT(). IntrospectToken(gomock.Any(), gomock.Eq("access-token"), gomock.Eq(fosite.AccessToken), gomock.Any()). - DoAndReturn(func(_ context.Context, _ string, _ fosite.TokenType, session fosite.Session, _ ...string) (fosite.TokenType, fosite.AccessRequester, error) { - session = &oauth2.Session{ + DoAndReturn(func(_ context.Context, _ string, _ fosite.TokenType, _ fosite.Session, _ ...string) (fosite.TokenType, fosite.AccessRequester, error) { + session := &oauth2.Session{ DefaultSession: &openid.DefaultSession{ Claims: &jwt.IDTokenClaims{ Subject: "alice", @@ -295,7 +290,7 @@ func TestUserinfo(t *testing.T) { return fosite.AccessToken, &fosite.AccessRequest{ Request: fosite.Request{ Client: &client.Client{ - LegacyClientID: "foobar-client", + ID: "foobar-client", UserinfoSignedResponseAlg: "RS256", }, Session: session, @@ -305,11 +300,11 @@ func TestUserinfo(t *testing.T) { }, expectStatusCode: http.StatusOK, checkForSuccess: func(t *testing.T, body []byte) { - claims, err := jwt2.Parse(string(body), func(token *jwt2.Token) (interface{}, error) { - keys, err := reg.KeyManager().GetKeySet(context.Background(), x.OpenIDConnectKeyName) + claims, err := jwt.Parse(string(body), func(token *jwt.Token) (interface{}, error) { + keys, err := reg.KeyManager().GetKeySet(t.Context(), x.OpenIDConnectKeyName) require.NoError(t, err) t.Logf("%+v", keys) - key, err := jwk.FindPublicKey(keys) + key, _ := jwk.FindPublicKey(keys) return key.Key, nil }) require.NoError(t, err) @@ -327,7 +322,7 @@ func TestUserinfo(t *testing.T) { req.Header.Set("Authorization", "Bearer access-token") resp, err := http.DefaultClient.Do(req) require.NoError(t, err) - defer resp.Body.Close() + defer resp.Body.Close() //nolint:errcheck require.EqualValues(t, tc.expectStatusCode, resp.StatusCode) body, err := io.ReadAll(resp.Body) require.NoError(t, err) @@ -341,33 +336,92 @@ func TestUserinfo(t *testing.T) { } func TestHandlerWellKnown(t *testing.T) { - ctx := context.Background() - conf := internal.NewConfigurationWithDefaults() - t.Run(fmt.Sprintf("hsm_enabled=%v", conf.HSMEnabled()), func(t *testing.T) { - conf.MustSet(ctx, config.KeyScopeStrategy, "DEPRECATED_HIERARCHICAL_SCOPE_STRATEGY") - conf.MustSet(ctx, config.KeyIssuerURL, "http://hydra.localhost") - conf.MustSet(ctx, config.KeySubjectTypesSupported, []string{"pairwise", "public"}) - conf.MustSet(ctx, config.KeyOIDCDiscoverySupportedClaims, []string{"sub"}) - conf.MustSet(ctx, config.KeyOAuth2ClientRegistrationURL, "http://client-register/registration") - conf.MustSet(ctx, config.KeyOIDCDiscoveryUserinfoEndpoint, "/userinfo") - reg := internal.NewRegistryMemory(t, conf, &contextx.Default{}) - - h := oauth2.NewHandler(reg, conf) - - r := x.NewRouterAdmin(conf.AdminURL) - h.SetRoutes(r, &httprouterx.RouterPublic{Router: r.Router}, func(h http.Handler) http.Handler { - return h - }) + t.Parallel() + + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions(configx.WithValues(map[string]any{ + config.KeyScopeStrategy: "DEPRECATED_HIERARCHICAL_SCOPE_STRATEGY", + config.KeyIssuerURL: "http://hydra.localhost", + config.KeySubjectTypesSupported: []string{"pairwise", "public"}, + config.KeyOIDCDiscoverySupportedClaims: []string{"sub"}, + config.KeyOAuth2ClientRegistrationURL: "http://client-register/registration", + config.KeyOIDCDiscoveryUserinfoEndpoint: "/userinfo", + }))) + t.Run(fmt.Sprintf("hsm_enabled=%v", reg.Config().HSMEnabled()), func(t *testing.T) { + testhelpers.MustEnsureRegistryKeys(t, reg, x.OpenIDConnectKeyName) + + h := oauth2.NewHandler(reg) + + metrics := prometheusx.NewMetricsManagerWithPrefix("hydra", prometheusx.HTTPMetrics, config.Version, config.Commit, config.Date) + r := httprouterx.NewRouterAdminWithPrefix(metrics) + h.SetPublicRoutes(r.ToPublic(), func(h http.Handler) http.Handler { return h }) + h.SetAdminRoutes(r) ts := httptest.NewServer(r) defer ts.Close() res, err := http.Get(ts.URL + "/.well-known/openid-configuration") require.NoError(t, err) - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck + + var wellKnownResp hydra.OidcConfiguration + err = json.NewDecoder(res.Body).Decode(&wellKnownResp) + require.NoError(t, err, "problem decoding wellknown json response: %+v", err) + + snapshotOpts := []snapshotx.Opt{} + if reg.Config().HSMEnabled() { + // The signing algorithm is not stable in the HSM tests, because the key is kept + // in the HSM and persists across test runs. + snapshotOpts = append(snapshotOpts, snapshotx.ExceptPaths( + "id_token_signed_response_alg", + "id_token_signing_alg_values_supported", + "userinfo_signed_response_alg", + "userinfo_signing_alg_values_supported", + )) + } + snapshotx.SnapshotT(t, wellKnownResp, snapshotOpts...) + }) +} + +func TestHandlerOauthAuthorizationServer(t *testing.T) { + t.Parallel() + + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions(configx.WithValues(map[string]any{ + config.KeyScopeStrategy: "DEPRECATED_HIERARCHICAL_SCOPE_STRATEGY", + config.KeyIssuerURL: "http://hydra.localhost", + config.KeySubjectTypesSupported: []string{"pairwise", "public"}, + config.KeyOIDCDiscoverySupportedClaims: []string{"sub"}, + config.KeyOAuth2ClientRegistrationURL: "http://client-register/registration", + config.KeyOIDCDiscoveryUserinfoEndpoint: "/userinfo", + }))) + t.Run(fmt.Sprintf("hsm_enabled=%v", reg.Config().HSMEnabled()), func(t *testing.T) { + testhelpers.MustEnsureRegistryKeys(t, reg, x.OpenIDConnectKeyName) + + h := oauth2.NewHandler(reg) + + metrics := prometheusx.NewMetricsManagerWithPrefix("hydra", prometheusx.HTTPMetrics, config.Version, config.Commit, config.Date) + r := httprouterx.NewRouterAdminWithPrefix(metrics) + h.SetPublicRoutes(r.ToPublic(), func(h http.Handler) http.Handler { return h }) + h.SetAdminRoutes(r) + ts := httptest.NewServer(r) + defer ts.Close() + + res, err := http.Get(ts.URL + "/.well-known/oauth-authorization-server") + require.NoError(t, err) + defer res.Body.Close() //nolint:errcheck var wellKnownResp hydra.OidcConfiguration err = json.NewDecoder(res.Body).Decode(&wellKnownResp) require.NoError(t, err, "problem decoding wellknown json response: %+v", err) - snapshotx.SnapshotT(t, wellKnownResp) + snapshotOpts := []snapshotx.Opt{} + if reg.Config().HSMEnabled() { + // The signing algorithm is not stable in the HSM tests, because the key is kept + // in the HSM and persists across test runs. + snapshotOpts = append(snapshotOpts, snapshotx.ExceptPaths( + "id_token_signed_response_alg", + "id_token_signing_alg_values_supported", + "userinfo_signed_response_alg", + "userinfo_signing_alg_values_supported", + )) + } + snapshotx.SnapshotT(t, wellKnownResp, snapshotOpts...) }) } diff --git a/oauth2/helper_test.go b/oauth2/helper_test.go index 74c25bdcfb0..837e86bbf97 100644 --- a/oauth2/helper_test.go +++ b/oauth2/helper_test.go @@ -5,18 +5,43 @@ package oauth2_test import ( "context" + "testing" - "github.com/ory/fosite" - "github.com/ory/fosite/handler/oauth2" - "github.com/ory/fosite/token/hmac" + "github.com/oleiade/reflections" + "github.com/stretchr/testify/assert" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/token/hmac" ) -func Tokens(c fosite.Configurator, length int) (res [][]string) { - s := &oauth2.HMACSHAStrategy{Enigma: &hmac.HMACStrategy{Config: c}, Config: c} +func Tokens(c fosite.Configurator, length int) []struct{ sig, tok string } { + s := oauth2.NewHMACSHAStrategy(&hmac.HMACStrategy{Config: c}, c) - for i := 0; i < length; i++ { - tok, sig, _ := s.Enigma.Generate(context.Background()) - res = append(res, []string{sig, tok}) + res := make([]struct{ sig, tok string }, length) + for i := range res { + res[i].tok, res[i].sig, _ = s.Enigma.Generate(context.Background()) } return res } + +func AssertObjectKeysEqual(t *testing.T, a, b interface{}, keys ...string) { + assert.True(t, len(keys) > 0, "No keys provided.") + for _, k := range keys { + c, err := reflections.GetField(a, k) + assert.Nil(t, err) + d, err := reflections.GetField(b, k) + assert.Nil(t, err) + assert.Equal(t, c, d, "%s", k) + } +} + +func TestAssertObjectsAreEqualByKeys(t *testing.T) { + type foo struct { + Name string + Body int + } + a := &foo{"foo", 1} + + AssertObjectKeysEqual(t, a, a, "Name", "Body") +} diff --git a/oauth2/helpers.go b/oauth2/helpers.go new file mode 100644 index 00000000000..a36da7179b7 --- /dev/null +++ b/oauth2/helpers.go @@ -0,0 +1,51 @@ +// Copyright © 2024 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "context" + "crypto/sha256" + "fmt" + "time" + + "github.com/gofrs/uuid" + + "github.com/ory/hydra/v2/x" + "github.com/ory/pop/v6" +) + +func signatureFromJTI(jti string) string { + return fmt.Sprintf("%x", sha256.Sum256([]byte(jti))) +} + +type BlacklistedJTI struct { + JTI string `db:"-"` + ID string `db:"signature"` + Expiry time.Time `db:"expires_at"` + NID uuid.UUID `db:"nid"` +} + +func (j *BlacklistedJTI) AfterFind(_ *pop.Connection) error { + j.Expiry = j.Expiry.UTC() + return nil +} + +func (BlacklistedJTI) TableName() string { + return "hydra_oauth2_jti_blacklist" +} + +func NewBlacklistedJTI(jti string, exp time.Time) *BlacklistedJTI { + return &BlacklistedJTI{ + JTI: jti, + ID: signatureFromJTI(jti), + // because the database timestamp types are not as accurate as time.Time we truncate to seconds (which should always work) + Expiry: exp.UTC().Truncate(time.Second), + } +} + +type AssertionJWTReader interface { + x.FositeStorer + GetClientAssertionJWT(ctx context.Context, jti string) (*BlacklistedJTI, error) + SetClientAssertionJWTRaw(context.Context, *BlacklistedJTI) error +} diff --git a/oauth2/hook.go b/oauth2/hook.go deleted file mode 100644 index 10985466d98..00000000000 --- a/oauth2/hook.go +++ /dev/null @@ -1,168 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package oauth2 - -import ( - "bytes" - "context" - "encoding/json" - "net/http" - - "github.com/hashicorp/go-retryablehttp" - - "github.com/ory/hydra/x" - - "github.com/ory/fosite" - "github.com/ory/hydra/consent" - "github.com/ory/hydra/driver/config" - "github.com/ory/x/errorsx" -) - -// AccessRequestHook is called when an access token is being refreshed. -type AccessRequestHook func(ctx context.Context, requester fosite.AccessRequester) error - -// Requester is a token endpoint's request context. -// -// swagger:ignore -type Requester struct { - // ClientID is the identifier of the OAuth 2.0 client. - ClientID string `json:"client_id"` - // GrantedScopes is the list of scopes granted to the OAuth 2.0 client. - GrantedScopes []string `json:"granted_scopes"` - // GrantedAudience is the list of audiences granted to the OAuth 2.0 client. - GrantedAudience []string `json:"granted_audience"` - // GrantTypes is the requests grant types. - GrantTypes []string `json:"grant_types"` -} - -// RefreshTokenHookRequest is the request body sent to the refresh token hook. -// -// swagger:ignore -type RefreshTokenHookRequest struct { - // Subject is the identifier of the authenticated end-user. - Subject string `json:"subject"` - // Session is the request's session.. - Session *Session `json:"session"` - // Requester is a token endpoint's request context. - Requester Requester `json:"requester"` - // ClientID is the identifier of the OAuth 2.0 client. - ClientID string `json:"client_id"` - // GrantedScopes is the list of scopes granted to the OAuth 2.0 client. - GrantedScopes []string `json:"granted_scopes"` - // GrantedAudience is the list of audiences granted to the OAuth 2.0 client. - GrantedAudience []string `json:"granted_audience"` -} - -// RefreshTokenHookResponse is the response body received from the refresh token hook. -// -// swagger:ignore -type RefreshTokenHookResponse struct { - // Session is the session data returned by the hook. - Session consent.AcceptOAuth2ConsentRequestSession `json:"session"` -} - -// RefreshTokenHook is an AccessRequestHook called for `refresh_token` grant type. -func RefreshTokenHook(reg interface { - config.Provider - x.HTTPClientProvider -}) AccessRequestHook { - return func(ctx context.Context, requester fosite.AccessRequester) error { - hookURL := reg.Config().TokenRefreshHookURL(ctx) - if hookURL == nil { - return nil - } - - if !requester.GetGrantTypes().ExactOne("refresh_token") { - return nil - } - - session, ok := requester.GetSession().(*Session) - if !ok { - return nil - } - - requesterInfo := Requester{ - ClientID: requester.GetClient().GetID(), - GrantedScopes: requester.GetGrantedScopes(), - GrantedAudience: requester.GetGrantedAudience(), - GrantTypes: requester.GetGrantTypes(), - } - - reqBody := RefreshTokenHookRequest{ - Session: session, - Requester: requesterInfo, - Subject: session.GetSubject(), - ClientID: requester.GetClient().GetID(), - GrantedScopes: requester.GetGrantedScopes(), - GrantedAudience: requester.GetGrantedAudience(), - } - reqBodyBytes, err := json.Marshal(&reqBody) - if err != nil { - return errorsx.WithStack( - fosite.ErrServerError. - WithWrap(err). - WithDescription("An error occurred while encoding the refresh token hook."). - WithDebugf("Unable to encode the refresh token hook body: %s", err), - ) - } - - req, err := retryablehttp.NewRequestWithContext(ctx, http.MethodPost, hookURL.String(), bytes.NewReader(reqBodyBytes)) - if err != nil { - return errorsx.WithStack( - fosite.ErrServerError. - WithWrap(err). - WithDescription("An error occurred while preparing the refresh token hook."). - WithDebugf("Unable to prepare the HTTP Request: %s", err), - ) - } - req.Header.Set("Content-Type", "application/json; charset=UTF-8") - - resp, err := reg.HTTPClient(ctx).Do(req) - if err != nil { - return errorsx.WithStack( - fosite.ErrServerError. - WithWrap(err). - WithDescription("An error occurred while executing the refresh token hook."). - WithDebugf("Unable to execute HTTP Request: %s", err), - ) - } - defer resp.Body.Close() - - switch resp.StatusCode { - case http.StatusOK: - // Token refresh permitted with new session data - case http.StatusNoContent: - // Token refresh is permitted without overriding session data - return nil - case http.StatusForbidden: - return errorsx.WithStack( - fosite.ErrAccessDenied. - WithDescription("The refresh token hook target responded with an error."). - WithDebugf("Refresh token hook responded with HTTP status code: %s", resp.Status), - ) - default: - return errorsx.WithStack( - fosite.ErrServerError. - WithDescription("The refresh token hook target responded with an error."). - WithDebugf("Refresh token hook responded with HTTP status code: %s", resp.Status), - ) - } - - var respBody RefreshTokenHookResponse - if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil { - return errorsx.WithStack( - fosite.ErrServerError. - WithWrap(err). - WithDescription("The refresh token hook target responded with an error."). - WithDebugf("Response from refresh token hook could not be decoded: %s", err), - ) - } - - // Overwrite existing session data (extra claims). - session.Extra = respBody.Session.AccessToken - idTokenClaims := session.IDTokenClaims() - idTokenClaims.Extra = respBody.Session.IDToken - return nil - } -} diff --git a/oauth2/introspector.go b/oauth2/introspector.go index 11da6b75ef9..b66037dde5b 100644 --- a/oauth2/introspector.go +++ b/oauth2/introspector.go @@ -25,7 +25,7 @@ type Introspection struct { // scopes associated with this token. Scope string `json:"scope,omitempty"` - // ID is aclient identifier for the OAuth 2.0 client that + // ID is a client identifier for the OAuth 2.0 client that // requested this token. ClientID string `json:"client_id"` diff --git a/oauth2/introspector_test.go b/oauth2/introspector_test.go index 72fb08387d9..e279029e015 100644 --- a/oauth2/introspector_test.go +++ b/oauth2/introspector_test.go @@ -6,37 +6,36 @@ package oauth2_test import ( "context" "fmt" - "net/http" "net/http/httptest" "strings" "testing" "time" - hydra "github.com/ory/hydra-client-go/v2" - - "github.com/ory/x/httprouterx" - - "github.com/ory/hydra/x" - "github.com/ory/x/contextx" - - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/ory/fosite" + hydra "github.com/ory/hydra-client-go/v2" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/internal" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/configx" + "github.com/ory/x/prometheusx" ) func TestIntrospectorSDK(t *testing.T) { - ctx := context.Background() - conf := internal.NewConfigurationWithDefaults() - conf.MustSet(ctx, config.KeyScopeStrategy, "wildcard") - conf.MustSet(ctx, config.KeyIssuerURL, "https://foobariss") - reg := internal.NewRegistryMemory(t, conf, &contextx.Default{}) + t.Parallel() + + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions(configx.WithValues(map[string]any{ + config.KeyScopeStrategy: "wildcard", + config.KeyIssuerURL: "https://foobariss", + }))) - internal.MustEnsureRegistryKeys(reg, x.OpenIDConnectKeyName) - internal.AddFositeExamples(reg) + testhelpers.MustEnsureRegistryKeys(t, reg, x.OpenIDConnectKeyName) + internal.AddFositeExamples(t, reg) tokens := Tokens(reg.OAuth2ProviderConfig(), 4) @@ -45,19 +44,18 @@ func TestIntrospectorSDK(t *testing.T) { c.Scope = "fosite,openid,photos,offline,foo.*" require.NoError(t, reg.ClientManager().UpdateClient(context.TODO(), c)) - router := x.NewRouterAdmin(conf.AdminURL) - handler := reg.OAuth2Handler() - handler.SetRoutes(router, &httprouterx.RouterPublic{Router: router.Router}, func(h http.Handler) http.Handler { - return h - }) + metrics := prometheusx.NewMetricsManagerWithPrefix("hydra", prometheusx.HTTPMetrics, config.Version, config.Commit, config.Date) + router := x.NewRouterAdmin(metrics) + handler := oauth2.NewHandler(reg) + handler.SetAdminRoutes(router) server := httptest.NewServer(router) defer server.Close() now := time.Now().UTC().Round(time.Minute) - createAccessTokenSession("alice", "my-client", tokens[0][0], now.Add(time.Hour), reg.OAuth2Storage(), fosite.Arguments{"core", "foo.*"}) - createAccessTokenSession("siri", "my-client", tokens[1][0], now.Add(-time.Hour), reg.OAuth2Storage(), fosite.Arguments{"core", "foo.*"}) - createAccessTokenSession("my-client", "my-client", tokens[2][0], now.Add(time.Hour), reg.OAuth2Storage(), fosite.Arguments{"hydra.introspect"}) - createAccessTokenSessionPairwise("alice", "my-client", tokens[3][0], now.Add(time.Hour), reg.OAuth2Storage(), fosite.Arguments{"core", "foo.*"}, "alice-obfuscated") + createAccessTokenSession(t, "alice", "my-client", tokens[0].sig, now.Add(time.Hour), reg.OAuth2Storage(), fosite.Arguments{"core", "foo.*"}) + createAccessTokenSession(t, "siri", "my-client", tokens[1].sig, now.Add(-time.Hour), reg.OAuth2Storage(), fosite.Arguments{"core", "foo.*"}) + createAccessTokenSession(t, "my-client", "my-client", tokens[2].sig, now.Add(time.Hour), reg.OAuth2Storage(), fosite.Arguments{"hydra.introspect"}) + createAccessTokenSessionPairwise(t, "alice", "my-client", tokens[3].sig, now.Add(time.Hour), reg.OAuth2Storage(), fosite.Arguments{"core", "foo.*"}, "alice-obfuscated") t.Run("TestIntrospect", func(t *testing.T) { for k, c := range []struct { @@ -75,7 +73,7 @@ func TestIntrospectorSDK(t *testing.T) { }, { description: "should fail because token is expired", - token: tokens[1][1], + token: tokens[1].tok, expectInactive: true, }, // { @@ -83,8 +81,8 @@ func TestIntrospectorSDK(t *testing.T) { // token: tokens[0][1], // expectInactive: true, // expectCode: http.StatusUnauthorized, - // prepare: func(*testing.T) *hydra.OAuth2Api.{ - // client := hydra.Ne.OAuth2Api.ithBasePath(server.URL) + // prepare: func(*testing.T) *hydra.OAuth2API.{ + // client := hydra.Ne.OAuth2API.ithBasePath(server.URL) // client.config.Username = "foo" // client.config.Password = "foo" // return client @@ -92,18 +90,18 @@ func TestIntrospectorSDK(t *testing.T) { // }, { description: "should fail because scope `bar` was requested but only `foo` is granted", - token: tokens[0][1], + token: tokens[0].tok, expectInactive: true, scopes: []string{"bar"}, }, { description: "should pass", - token: tokens[0][1], + token: tokens[0].tok, expectInactive: false, }, { description: "should pass using bearer authorization", - token: tokens[0][1], + token: tokens[0].tok, expectInactive: false, scopes: []string{"foo.bar"}, assert: func(t *testing.T, c *hydra.IntrospectedOAuth2Token) { @@ -116,7 +114,7 @@ func TestIntrospectorSDK(t *testing.T) { }, { description: "should pass using regular authorization", - token: tokens[0][1], + token: tokens[0].tok, expectInactive: false, scopes: []string{"foo.bar"}, assert: func(t *testing.T, c *hydra.IntrospectedOAuth2Token) { @@ -130,7 +128,7 @@ func TestIntrospectorSDK(t *testing.T) { }, { description: "should pass and check for obfuscated subject", - token: tokens[3][1], + token: tokens[3].tok, expectInactive: false, scopes: []string{"foo.bar"}, assert: func(t *testing.T, c *hydra.IntrospectedOAuth2Token) { @@ -148,15 +146,11 @@ func TestIntrospectorSDK(t *testing.T) { client.GetConfig().Servers = hydra.ServerConfigurations{{URL: server.URL}} } - ctx, _, err := client.OAuth2Api.IntrospectOAuth2Token(context.Background()). + ctx, _, err := client.OAuth2API.IntrospectOAuth2Token(context.Background()). Token(c.token).Scope(strings.Join(c.scopes, " ")).Execute() require.NoError(t, err) - if c.expectInactive { - assert.False(t, ctx.Active) - } else { - assert.True(t, ctx.Active) - } + assert.Equal(t, c.expectInactive, !ctx.Active) if !c.expectInactive && c.assert != nil { c.assert(t, ctx) diff --git a/oauth2/oauth2_auth_code_bench_test.go b/oauth2/oauth2_auth_code_bench_test.go new file mode 100644 index 00000000000..372f89c0211 --- /dev/null +++ b/oauth2/oauth2_auth_code_bench_test.go @@ -0,0 +1,306 @@ +// Copyright © 2022 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2_test + +import ( + "cmp" + "context" + "flag" + "net/http" + "os" + "runtime" + "runtime/pprof" + "strings" + "sync/atomic" + "testing" + "time" + + "github.com/go-jose/go-jose/v3" + "github.com/pborman/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp" + "go.opentelemetry.io/otel/propagation" + "go.opentelemetry.io/otel/sdk/resource" + "go.opentelemetry.io/otel/sdk/trace" + "go.opentelemetry.io/otel/sdk/trace/tracetest" + semconv "go.opentelemetry.io/otel/semconv/v1.12.0" + "golang.org/x/oauth2" + + hydra "github.com/ory/hydra-client-go/v2" + hc "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/configx" + "github.com/ory/x/otelx" + "github.com/ory/x/pointerx" +) + +var ( + prof = flag.String("profile", "", "write a CPU profile to this filename") + conc = flag.Int("conc", 100, "dispatch this many requests concurrently") + tracing = flag.Bool("tracing", false, "send OpenTelemetry traces to localhost:4318") +) + +func BenchmarkAuthCode(b *testing.B) { + flag.Parse() + + ctx := context.Background() + + spans := tracetest.NewSpanRecorder() + opts := []trace.TracerProviderOption{ + trace.WithSpanProcessor(spans), + trace.WithResource(resource.NewWithAttributes( + semconv.SchemaURL, attribute.String(string(semconv.ServiceNameKey), "BenchmarkAuthCode"), + )), + } + if *tracing { + exporter, err := otlptracehttp.New(ctx, otlptracehttp.WithInsecure(), otlptracehttp.WithEndpoint("localhost:4318")) + require.NoError(b, err) + opts = append(opts, trace.WithSpanProcessor(trace.NewSimpleSpanProcessor(exporter))) + } + provider := trace.NewTracerProvider(opts...) + + tracer := provider.Tracer("BenchmarkAuthCode") + otel.SetTextMapPropagator(propagation.TraceContext{}) + otel.SetTracerProvider(provider) + + ctx, span := tracer.Start(ctx, "BenchmarkAuthCode") + defer span.End() + + ctx = context.WithValue(ctx, oauth2.HTTPClient, otelhttp.DefaultClient) + + dsn := cmp.Or(os.Getenv("DSN"), "postgres://postgres:secret@127.0.0.1:3445/postgres?sslmode=disable&max_conns=20&max_idle_conns=20") + // dsn := "mysql://root:secret@tcp(localhost:3444)/mysql?max_conns=16&max_idle_conns=16" + // dsn := "cockroach://root@localhost:3446/defaultdb?sslmode=disable&max_conns=16&max_idle_conns=16" + reg := testhelpers.NewRegistrySQLFromURL(b, dsn, true, true, driver.WithConfigOptions(configx.WithValues(map[string]any{ + config.KeyLogLevel: "error", + config.KeyAccessTokenStrategy: "opaque", + config.KeyRefreshTokenHook: "", + "tracing.providers.otlp.server_url": "http://localhost:4318", + "tracing.providers.otlp.insecure": true, + })), driver.WithTracerWrapper(func(t *otelx.Tracer) *otelx.Tracer { return new(otelx.Tracer).WithOTLP(tracer) })) + oauth2Keys, err := jwk.GenerateJWK(jose.ES256, x.OAuth2JWTKeyName, "sig") + require.NoError(b, err) + oidcKeys, err := jwk.GenerateJWK(jose.ES256, x.OpenIDConnectKeyName, "sig") + require.NoError(b, err) + _, _ = oauth2Keys, oidcKeys + require.NoError(b, reg.KeyManager().UpdateKeySet(ctx, x.OAuth2JWTKeyName, oauth2Keys)) + require.NoError(b, reg.KeyManager().UpdateKeySet(ctx, x.OpenIDConnectKeyName, oidcKeys)) + _, adminTS := testhelpers.NewOAuth2Server(ctx, b, reg) + var ( + authURL = reg.Config().OAuth2AuthURL(ctx).String() + tokenURL = reg.Config().OAuth2TokenURL(ctx).String() + nonce = uuid.New() + ) + + newOAuth2Client := func(b *testing.B, cb string) (*hc.Client, *oauth2.Config) { + secret := uuid.New() + c := &hc.Client{ + Secret: secret, + RedirectURIs: []string{cb}, + ResponseTypes: []string{"id_token", "code", "token"}, + GrantTypes: []string{"implicit", "refresh_token", "authorization_code", "password", "client_credentials"}, + Scope: "hydra offline openid", + Audience: []string{"https://api.ory.sh/"}, + } + require.NoError(b, reg.ClientManager().CreateClient(ctx, c)) + return c, &oauth2.Config{ + ClientID: c.GetID(), + ClientSecret: secret, + Endpoint: oauth2.Endpoint{ + AuthURL: authURL, + TokenURL: tokenURL, + AuthStyle: oauth2.AuthStyleInHeader, + }, + Scopes: strings.Split(c.Scope, " "), + } + } + + cfg := hydra.NewConfiguration() + cfg.HTTPClient = otelhttp.DefaultClient + adminClient := hydra.NewAPIClient(cfg) + adminClient.GetConfig().Servers = hydra.ServerConfigurations{{URL: adminTS.URL}} + + getAuthorizeCode := func(ctx context.Context, b *testing.B, conf *oauth2.Config, c *http.Client, params ...oauth2.AuthCodeOption) (string, *http.Response) { + if c == nil { + c = testhelpers.NewEmptyJarClient(b) + } + + state := uuid.New() + + req, err := http.NewRequestWithContext(ctx, "GET", conf.AuthCodeURL(state, params...), nil) + require.NoError(b, err) + resp, err := c.Do(req) + require.NoError(b, err) + defer resp.Body.Close() //nolint:errcheck + + q := resp.Request.URL.Query() + require.EqualValues(b, state, q.Get("state")) + return q.Get("code"), resp + } + + acceptLoginHandler := func(b *testing.B, c *hc.Client, checkRequestPayload func(request *hydra.OAuth2LoginRequest) *hydra.AcceptOAuth2LoginRequest) http.HandlerFunc { + return otelhttp.NewHandler(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + rr, _, err := adminClient.OAuth2API.GetOAuth2LoginRequest(ctx).LoginChallenge(r.URL.Query().Get("login_challenge")).Execute() + require.NoError(b, err) + + assert.EqualValues(b, c.GetID(), pointerx.Deref(rr.Client.ClientId)) + assert.Empty(b, pointerx.Deref(rr.Client.ClientSecret)) + assert.EqualValues(b, c.GrantTypes, rr.Client.GrantTypes) + assert.EqualValues(b, c.LogoURI, pointerx.Deref(rr.Client.LogoUri)) + assert.EqualValues(b, c.RedirectURIs, rr.Client.RedirectUris) + assert.EqualValues(b, r.URL.Query().Get("login_challenge"), rr.Challenge) + assert.EqualValues(b, []string{"hydra", "offline", "openid"}, rr.RequestedScope) + assert.Contains(b, rr.RequestUrl, authURL) + + acceptBody := hydra.AcceptOAuth2LoginRequest{ + Subject: uuid.New(), + Remember: pointerx.Ptr(!rr.Skip), + Acr: pointerx.Ptr("1"), + Amr: []string{"pwd"}, + Context: map[string]interface{}{"context": "bar"}, + } + if checkRequestPayload != nil { + if b := checkRequestPayload(rr); b != nil { + acceptBody = *b + } + } + + v, _, err := adminClient.OAuth2API.AcceptOAuth2LoginRequest(ctx). + LoginChallenge(r.URL.Query().Get("login_challenge")). + AcceptOAuth2LoginRequest(acceptBody). + Execute() + require.NoError(b, err) + require.NotEmpty(b, v.RedirectTo) + http.Redirect(w, r, v.RedirectTo, http.StatusFound) + }), "acceptLoginHandler").ServeHTTP + } + + acceptConsentHandler := func(b *testing.B, c *hc.Client, checkRequestPayload func(*hydra.OAuth2ConsentRequest)) http.HandlerFunc { + return otelhttp.NewHandler(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + rr, _, err := adminClient.OAuth2API.GetOAuth2ConsentRequest(ctx).ConsentChallenge(r.URL.Query().Get("consent_challenge")).Execute() + require.NoError(b, err) + + assert.EqualValues(b, c.GetID(), pointerx.Deref(rr.Client.ClientId)) + assert.Empty(b, pointerx.Deref(rr.Client.ClientSecret)) + assert.EqualValues(b, c.GrantTypes, rr.Client.GrantTypes) + assert.EqualValues(b, c.LogoURI, pointerx.Deref(rr.Client.LogoUri)) + assert.EqualValues(b, c.RedirectURIs, rr.Client.RedirectUris) + // assert.EqualValues(b, subject, pointerx.Deref(rr.Subject)) + assert.EqualValues(b, []string{"hydra", "offline", "openid"}, rr.RequestedScope) + assert.EqualValues(b, r.URL.Query().Get("consent_challenge"), rr.Challenge) + assert.Contains(b, *rr.RequestUrl, authURL) + if checkRequestPayload != nil { + checkRequestPayload(rr) + } + + assert.Equal(b, map[string]interface{}{"context": "bar"}, rr.Context) + v, _, err := adminClient.OAuth2API.AcceptOAuth2ConsentRequest(ctx). + ConsentChallenge(r.URL.Query().Get("consent_challenge")). + AcceptOAuth2ConsentRequest(hydra.AcceptOAuth2ConsentRequest{ + GrantScope: []string{"hydra", "offline", "openid"}, Remember: pointerx.Ptr(true), RememberFor: pointerx.Ptr[int64](0), + GrantAccessTokenAudience: rr.RequestedAccessTokenAudience, + Session: &hydra.AcceptOAuth2ConsentRequestSession{ + AccessToken: map[string]interface{}{"foo": "bar"}, + IdToken: map[string]interface{}{"bar": "baz"}, + }, + }). + Execute() + require.NoError(b, err) + require.NotEmpty(b, v.RedirectTo) + http.Redirect(w, r, v.RedirectTo, http.StatusFound) + }), "acceptConsentHandler").ServeHTTP + } + + run := func(b *testing.B, strategy string) func(*testing.B) { + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, strategy) + c, conf := newOAuth2Client(b, testhelpers.NewCallbackURL(b, "callback", testhelpers.HTTPServerNotImplementedHandler)) + testhelpers.NewLoginConsentUI(b, reg.Config(), + acceptLoginHandler(b, c, nil), + acceptConsentHandler(b, c, nil), + ) + + return func(b *testing.B) { + code, _ := getAuthorizeCode(ctx, b, conf, nil, oauth2.SetAuthURLParam("nonce", nonce)) + require.NotEmpty(b, code) + + _, err := conf.Exchange(ctx, code) + require.NoError(b, err) + } + } + + b.ResetTimer() + + b.SetParallelism(*conc / runtime.GOMAXPROCS(0)) + + b.Run("strategy=jwt", func(b *testing.B) { + initialDBSpans := dbSpans(spans) + B := run(b, "jwt") + + stop := profile(b) + defer stop() + + var totalMS int64 = 0 + b.RunParallel(func(p *testing.PB) { + defer func(t0 time.Time) { + atomic.AddInt64(&totalMS, int64(time.Since(t0).Milliseconds())) + }(time.Now()) + for p.Next() { + B(b) + } + }) + + b.ReportMetric(0, "ns/op") + b.ReportMetric(float64(atomic.LoadInt64(&totalMS))/float64(b.N), "ms/op") + b.ReportMetric((float64(dbSpans(spans)-initialDBSpans))/float64(b.N), "queries/op") + b.ReportMetric(float64(b.N)/b.Elapsed().Seconds(), "ops/s") + }) + + b.Run("strategy=opaque", func(b *testing.B) { + initialDBSpans := dbSpans(spans) + B := run(b, "opaque") + + stop := profile(b) + defer stop() + + var totalMS int64 = 0 + b.RunParallel(func(p *testing.PB) { + defer func(t0 time.Time) { + atomic.AddInt64(&totalMS, int64(time.Since(t0).Milliseconds())) + }(time.Now()) + for p.Next() { + B(b) + } + }) + + b.ReportMetric(0, "ns/op") + b.ReportMetric(float64(atomic.LoadInt64(&totalMS))/float64(b.N), "ms/op") + b.ReportMetric((float64(dbSpans(spans)-initialDBSpans))/float64(b.N), "queries/op") + b.ReportMetric(float64(b.N)/b.Elapsed().Seconds(), "ops/s") + }) +} + +func profile(t testing.TB) (stop func()) { + t.Helper() + if *prof == "" { + return func() {} // noop + } + f, err := os.Create(*prof) + require.NoError(t, err) + require.NoError(t, pprof.StartCPUProfile(f)) + return func() { + pprof.StopCPUProfile() + require.NoError(t, f.Close()) + t.Log("Wrote profile to", f.Name()) + } +} diff --git a/oauth2/oauth2_auth_code_test.go b/oauth2/oauth2_auth_code_test.go index 1a1ef0120e5..8b768126b0e 100644 --- a/oauth2/oauth2_auth_code_test.go +++ b/oauth2/oauth2_auth_code_test.go @@ -5,61 +5,154 @@ package oauth2_test import ( "bytes" + "cmp" "context" + "encoding/base64" "encoding/json" + "errors" "fmt" "io" + "math/rand" "net/http" "net/http/httptest" "net/url" - "strconv" "strings" "sync" "testing" "time" - "github.com/ory/x/ioutilx" - "github.com/ory/x/requirex" - - hydra "github.com/ory/hydra-client-go/v2" - - "github.com/ory/x/httprouterx" - - "github.com/ory/x/assertx" - + "github.com/go-jose/go-jose/v3" + "github.com/golang-jwt/jwt/v5" "github.com/pborman/uuid" - "github.com/tidwall/gjson" - - "github.com/ory/hydra/client" - "github.com/ory/hydra/consent" - "github.com/ory/hydra/internal/testhelpers" - "github.com/ory/x/contextx" - - "github.com/julienschmidt/httprouter" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/tidwall/gjson" + "github.com/urfave/negroni" "golang.org/x/oauth2" - goauth2 "golang.org/x/oauth2" "golang.org/x/oauth2/clientcredentials" + "golang.org/x/sync/errgroup" - "github.com/ory/fosite" - hc "github.com/ory/hydra/client" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal" - hydraoauth2 "github.com/ory/hydra/oauth2" - "github.com/ory/hydra/x" + hydra "github.com/ory/hydra-client-go/v2" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/flow" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/jwk" + hydraoauth2 "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/assertx" + "github.com/ory/x/configx" + "github.com/ory/x/httprouterx" + "github.com/ory/x/httpx" + "github.com/ory/x/ioutilx" + "github.com/ory/x/josex" "github.com/ory/x/pointerx" + "github.com/ory/x/prometheusx" "github.com/ory/x/snapshotx" ) -func noopHandler(t *testing.T) httprouter.Handle { - return func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { +func noopHandler(*testing.T) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusNotImplemented) } } -type clientCreator interface { - CreateClient(cxt context.Context, client *hc.Client) error +func getAuthorizeCode(t *testing.T, conf *oauth2.Config, c *http.Client, params ...oauth2.AuthCodeOption) (string, *http.Response) { + if c == nil { + c = testhelpers.NewEmptyJarClient(t) + } + + state := uuid.New() + resp, err := c.Get(conf.AuthCodeURL(state, params...)) + require.NoError(t, err) + defer resp.Body.Close() //nolint:errcheck + + q := resp.Request.URL.Query() + require.EqualValues(t, state, q.Get("state")) + return q.Get("code"), resp +} + +func acceptLoginHandler(t *testing.T, c *client.Client, adminClient *hydra.APIClient, reg *driver.RegistrySQL, subject string, checkRequestPayload func(request *hydra.OAuth2LoginRequest) *hydra.AcceptOAuth2LoginRequest) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + rr, res, err := adminClient.OAuth2API.GetOAuth2LoginRequest(context.Background()).LoginChallenge(r.URL.Query().Get("login_challenge")).Execute() + require.NoErrorf(t, err, "%s\n%s", res.Request.URL, ioutilx.MustReadAll(res.Body)) + + assert.EqualValues(t, c.GetID(), pointerx.Deref(rr.Client.ClientId)) + assert.Empty(t, pointerx.Deref(rr.Client.ClientSecret)) + assert.EqualValues(t, c.GrantTypes, rr.Client.GrantTypes) + assert.EqualValues(t, c.LogoURI, pointerx.Deref(rr.Client.LogoUri)) + assert.EqualValues(t, c.RedirectURIs, rr.Client.RedirectUris) + assert.EqualValues(t, r.URL.Query().Get("login_challenge"), rr.Challenge) + assert.EqualValues(t, []string{"hydra", "offline", "openid"}, rr.RequestedScope) + assert.Contains(t, rr.RequestUrl, reg.Config().OAuth2AuthURL(ctx).String()) + + acceptBody := hydra.AcceptOAuth2LoginRequest{ + Subject: subject, + Remember: pointerx.Ptr(!rr.Skip), + Acr: pointerx.Ptr("1"), + Amr: []string{"pwd"}, + Context: map[string]interface{}{"context": "bar"}, + } + if checkRequestPayload != nil { + if b := checkRequestPayload(rr); b != nil { + acceptBody = *b + } + } + + v, _, err := adminClient.OAuth2API.AcceptOAuth2LoginRequest(context.Background()). + LoginChallenge(r.URL.Query().Get("login_challenge")). + AcceptOAuth2LoginRequest(acceptBody). + Execute() + require.NoError(t, err) + require.NotEmpty(t, v.RedirectTo) + http.Redirect(w, r, v.RedirectTo, http.StatusFound) + } +} + +func acceptConsentHandler(t *testing.T, c *client.Client, adminClient *hydra.APIClient, reg *driver.RegistrySQL, subject string, checkRequestPayload func(*hydra.OAuth2ConsentRequest) *hydra.AcceptOAuth2ConsentRequest) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + challenge := r.URL.Query().Get("consent_challenge") + rr, _, err := adminClient.OAuth2API.GetOAuth2ConsentRequest(context.Background()).ConsentChallenge(challenge).Execute() + require.NoError(t, err) + require.Equal(t, challenge, rr.Challenge) + + assert.EqualValues(t, c.GetID(), pointerx.Deref(rr.Client.ClientId)) + assert.Empty(t, pointerx.Deref(rr.Client.ClientSecret)) + assert.EqualValues(t, c.GrantTypes, rr.Client.GrantTypes) + assert.EqualValues(t, c.LogoURI, pointerx.Deref(rr.Client.LogoUri)) + assert.EqualValues(t, c.RedirectURIs, rr.Client.RedirectUris) + assert.EqualValues(t, subject, pointerx.Deref(rr.Subject)) + assert.EqualValues(t, []string{"hydra", "offline", "openid"}, rr.RequestedScope) + assert.Contains(t, *rr.RequestUrl, reg.Config().OAuth2AuthURL(r.Context()).String()) + assert.Equal(t, map[string]interface{}{"context": "bar"}, rr.Context) + + acceptBody := hydra.AcceptOAuth2ConsentRequest{ + GrantScope: []string{"hydra", "offline", "openid"}, + GrantAccessTokenAudience: rr.RequestedAccessTokenAudience, + Remember: pointerx.Ptr(true), + RememberFor: pointerx.Ptr[int64](0), + Session: &hydra.AcceptOAuth2ConsentRequestSession{ + AccessToken: map[string]interface{}{"foo": "bar"}, + IdToken: map[string]interface{}{"bar": "baz", "email": "foo@bar.com"}, + }, + } + if checkRequestPayload != nil { + if b := checkRequestPayload(rr); b != nil { + acceptBody = *b + } + } + + v, _, err := adminClient.OAuth2API.AcceptOAuth2ConsentRequest(context.Background()). + ConsentChallenge(challenge). + AcceptOAuth2ConsentRequest(acceptBody). + Execute() + require.NoError(t, err) + require.NotEmpty(t, v.RedirectTo) + http.Redirect(w, r, v.RedirectTo, http.StatusFound) + } } // TestAuthCodeWithDefaultStrategy runs proper integration tests against in-memory and database connectors, specifically @@ -76,614 +169,1683 @@ type clientCreator interface { // - [x] If `id_token_hint` is handled properly // - [x] What happens if `id_token_hint` does not match the value from the handled authentication request ("accept login") func TestAuthCodeWithDefaultStrategy(t *testing.T) { - ctx := context.TODO() - reg := internal.NewMockedRegistry(t, &contextx.Default{}) - reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "opaque") - reg.Config().MustSet(ctx, config.KeyRefreshTokenHookURL, "") - publicTS, adminTS := testhelpers.NewOAuth2Server(ctx, t, reg) - - newOAuth2Client := func(t *testing.T, cb string) (*hc.Client, *oauth2.Config) { - secret := uuid.New() - c := &hc.Client{ - Secret: secret, - RedirectURIs: []string{cb}, - ResponseTypes: []string{"id_token", "code", "token"}, - GrantTypes: []string{"implicit", "refresh_token", "authorization_code", "password", "client_credentials"}, - Scope: "hydra offline openid", - Audience: []string{"https://api.ory.sh/"}, - } - require.NoError(t, reg.ClientManager().CreateClient(context.TODO(), c)) - return c, &oauth2.Config{ - ClientID: c.GetID(), - ClientSecret: secret, - Endpoint: oauth2.Endpoint{ - AuthURL: reg.Config().OAuth2AuthURL(ctx).String(), - TokenURL: reg.Config().OAuth2TokenURL(ctx).String(), - AuthStyle: oauth2.AuthStyleInHeader, - }, - Scopes: strings.Split(c.Scope, " "), - } - } + t.Parallel() + + ctx := context.Background() + + for dbName, reg := range testhelpers.ConnectDatabases(t, true, driver.WithConfigOptions(configx.WithValues(map[string]any{ + config.KeyAccessTokenStrategy: "opaque", + config.KeyRefreshTokenHook: "", + }))) { + t.Run("registry="+dbName, func(t *testing.T) { + t.Parallel() + + rng := rand.New(rand.NewSource(time.Now().UnixNano())) + + jwk.EnsureAsymmetricKeypairExists(t, reg, string(jose.ES256), x.OpenIDConnectKeyName) + jwk.EnsureAsymmetricKeypairExists(t, reg, string(jose.ES256), x.OAuth2JWTKeyName) + + publicTS, adminTS := testhelpers.NewOAuth2Server(ctx, t, reg) + + publicClient := hydra.NewAPIClient(hydra.NewConfiguration()) + publicClient.GetConfig().Servers = hydra.ServerConfigurations{{URL: publicTS.URL}} + adminClient := hydra.NewAPIClient(hydra.NewConfiguration()) + adminClient.GetConfig().Servers = hydra.ServerConfigurations{{URL: adminTS.URL}} + + assertRefreshToken := func(t *testing.T, token *oauth2.Token, c *oauth2.Config, expectedExp time.Time) gjson.Result { + introspect := testhelpers.IntrospectToken(t, token.RefreshToken, adminTS) + if !expectedExp.IsZero() { + require.WithinDuration(t, expectedExp, time.Unix(introspect.Get("exp").Int(), 0), time.Second*3) + } + return introspect + } + + assertIDToken := func(t *testing.T, token *oauth2.Token, c *oauth2.Config, expectedSubject, expectedNonce string, expectedExp time.Time) gjson.Result { + idt, ok := token.Extra("id_token").(string) + require.True(t, ok) + assert.NotEmpty(t, idt) + + claims := gjson.ParseBytes(testhelpers.InsecureDecodeJWT(t, idt)) + assert.Truef(t, time.Now().After(time.Unix(claims.Get("iat").Int(), 0)), "%s", claims) + assert.Truef(t, time.Now().After(time.Unix(claims.Get("nbf").Int(), 0)), "%s", claims) + assert.Truef(t, time.Now().Before(time.Unix(claims.Get("exp").Int(), 0)), "%s", claims) + if !expectedExp.IsZero() { + // 1.5s due to rounding + require.WithinDuration(t, expectedExp, time.Unix(claims.Get("exp").Int(), 0), 1*time.Second+500*time.Millisecond) + } + assert.NotEmptyf(t, claims.Get("jti").String(), "%s", claims) + assert.EqualValuesf(t, reg.Config().IssuerURL(ctx).String(), claims.Get("iss").String(), "%s", claims) + assert.NotEmptyf(t, claims.Get("sid").String(), "%s", claims) + assert.Equalf(t, "1", claims.Get("acr").String(), "%s", claims) + require.Lenf(t, claims.Get("amr").Array(), 1, "%s", claims) + assert.EqualValuesf(t, "pwd", claims.Get("amr.0").String(), "%s", claims) + + require.Lenf(t, claims.Get("aud").Array(), 1, "%s", claims) + assert.EqualValuesf(t, c.ClientID, claims.Get("aud.0").String(), "%s", claims) + assert.EqualValuesf(t, expectedSubject, claims.Get("sub").String(), "%s", claims) + assert.EqualValuesf(t, expectedNonce, claims.Get("nonce").String(), "%s", claims) + assert.EqualValuesf(t, `baz`, claims.Get("bar").String(), "%s", claims) + assert.EqualValuesf(t, `foo@bar.com`, claims.Get("email").String(), "%s", claims) + assert.NotEmptyf(t, claims.Get("sid").String(), "%s", claims) + + return claims + } + + introspectAccessToken := func(t *testing.T, conf *oauth2.Config, token *oauth2.Token, expectedSubject string) gjson.Result { + require.NotEmpty(t, token.AccessToken) + i := testhelpers.IntrospectToken(t, token.AccessToken, adminTS) + assert.True(t, i.Get("active").Bool(), "%s", i) + assert.EqualValues(t, conf.ClientID, i.Get("client_id").String(), "%s", i) + assert.EqualValues(t, expectedSubject, i.Get("sub").String(), "%s", i) + assert.EqualValues(t, `bar`, i.Get("ext.foo").String(), "%s", i) + return i + } + + assertJWTAccessToken := func(t *testing.T, strat string, conf *oauth2.Config, token *oauth2.Token, expectedSubject string, expectedExp time.Time, scopes string) gjson.Result { + require.NotEmpty(t, token.AccessToken) + parts := strings.Split(token.AccessToken, ".") + if strat != "jwt" { + require.Len(t, parts, 2) + return gjson.Parse("null") + } + require.Len(t, parts, 3) + + i := gjson.ParseBytes(testhelpers.InsecureDecodeJWT(t, token.AccessToken)) + assert.NotEmpty(t, i.Get("jti").String()) + assert.EqualValues(t, conf.ClientID, i.Get("client_id").String(), "%s", i) + assert.EqualValues(t, expectedSubject, i.Get("sub").String(), "%s", i) + assert.EqualValues(t, reg.Config().IssuerURL(ctx).String(), i.Get("iss").String(), "%s", i) + assert.True(t, time.Now().After(time.Unix(i.Get("iat").Int(), 0)), "%s", i) + assert.True(t, time.Now().After(time.Unix(i.Get("nbf").Int(), 0)), "%s", i) + assert.True(t, time.Now().Before(time.Unix(i.Get("exp").Int(), 0)), "%s", i) + require.WithinDuration(t, expectedExp, time.Unix(i.Get("exp").Int(), 0), time.Second) + assert.EqualValues(t, `bar`, i.Get("ext.foo").String(), "%s", i) + assert.EqualValues(t, scopes, i.Get("scp").Raw, "%s", i) + return i + } + + subject := "aeneas-rekkas" + nonce := uuid.New() + + t.Run("case=checks if request fails when audience does not match", func(t *testing.T) { + testhelpers.NewLoginConsentUI(t, reg.Config(), testhelpers.HTTPServerNoExpectedCallHandler(t), testhelpers.HTTPServerNoExpectedCallHandler(t)) + _, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + code, _ := getAuthorizeCode(t, conf, nil, oauth2.SetAuthURLParam("audience", "https://not-ory-api/")) + require.Empty(t, code) + }) + + t.Run("case=perform authorize code flow with ID token and refresh tokens", func(t *testing.T) { + run := func(t *testing.T, strategy string) { + c, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, c, adminClient, reg, subject, nil), + acceptConsentHandler(t, c, adminClient, reg, subject, nil), + ) + + code, _ := getAuthorizeCode(t, conf, nil, oauth2.SetAuthURLParam("nonce", nonce)) + require.NotEmpty(t, code) + token, err := conf.Exchange(context.Background(), code) + iat := time.Now() + require.NoError(t, err) + + assert.Empty(t, token.Extra("c_nonce_draft_00"), "should not be set if not requested") + assert.Empty(t, token.Extra("c_nonce_expires_in_draft_00"), "should not be set if not requested") + introspectAccessToken(t, conf, token, subject) + assertJWTAccessToken(t, strategy, conf, token, subject, iat.Add(reg.Config().GetAccessTokenLifespan(ctx)), `["hydra","offline","openid"]`) + assertIDToken(t, token, conf, subject, nonce, iat.Add(reg.Config().GetIDTokenLifespan(ctx))) + assertRefreshToken(t, token, conf, iat.Add(reg.Config().GetRefreshTokenLifespan(ctx))) + + t.Run("followup=successfully perform refresh token flow", func(t *testing.T) { + require.NotEmpty(t, token.RefreshToken) + token.Expiry = token.Expiry.Add(-time.Hour * 24) + iat = time.Now() + refreshedToken, err := conf.TokenSource(context.Background(), token).Token() + require.NoError(t, err) + + require.NotEqual(t, token.AccessToken, refreshedToken.AccessToken) + require.NotEqual(t, token.RefreshToken, refreshedToken.RefreshToken) + require.NotEqual(t, token.Extra("id_token"), refreshedToken.Extra("id_token")) + introspectAccessToken(t, conf, refreshedToken, subject) + + t.Run("followup=refreshed tokens contain valid tokens", func(t *testing.T) { + assertJWTAccessToken(t, strategy, conf, refreshedToken, subject, iat.Add(reg.Config().GetAccessTokenLifespan(ctx)), `["hydra","offline","openid"]`) + assertIDToken(t, refreshedToken, conf, subject, nonce, iat.Add(reg.Config().GetIDTokenLifespan(ctx))) + assertRefreshToken(t, refreshedToken, conf, iat.Add(reg.Config().GetRefreshTokenLifespan(ctx))) + }) + + t.Run("followup=original access token is no longer valid", func(t *testing.T) { + i := testhelpers.IntrospectToken(t, token.AccessToken, adminTS) + assert.False(t, i.Get("active").Bool(), "%s", i) + }) + + t.Run("followup=original refresh token is no longer valid", func(t *testing.T) { + _, err := conf.TokenSource(context.Background(), token).Token() + assert.Error(t, err) + }) + + t.Run("followup=but fail subsequent refresh because reuse was detected", func(t *testing.T) { + // Force golang to refresh token + refreshedToken.Expiry = refreshedToken.Expiry.Add(-time.Hour * 24) + _, err := conf.TokenSource(context.Background(), refreshedToken).Token() + require.Error(t, err) + }) + }) + } + + t.Run("strategy=jwt", func(t *testing.T) { + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "jwt") + run(t, "jwt") + }) + + t.Run("strategy=opaque", func(t *testing.T) { + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "opaque") + run(t, "opaque") + }) + }) + + t.Run("case=removing the authentication session does not cause an issue when refreshing tokens", func(t *testing.T) { + run := func(t *testing.T, strategy string) { + c, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, c, adminClient, reg, subject, nil), + acceptConsentHandler(t, c, adminClient, reg, subject, nil), + ) + + code, _ := getAuthorizeCode(t, conf, nil, oauth2.SetAuthURLParam("nonce", nonce)) + require.NotEmpty(t, code) + token, err := conf.Exchange(context.Background(), code) + iat := time.Now() + require.NoError(t, err) + + assert.Empty(t, token.Extra("c_nonce_draft_00"), "should not be set if not requested") + assert.Empty(t, token.Extra("c_nonce_expires_in_draft_00"), "should not be set if not requested") + introspectAccessToken(t, conf, token, subject) + assertJWTAccessToken(t, strategy, conf, token, subject, iat.Add(reg.Config().GetAccessTokenLifespan(ctx)), `["hydra","offline","openid"]`) + firstIdToken := assertIDToken(t, token, conf, subject, nonce, iat.Add(reg.Config().GetIDTokenLifespan(ctx))) + firstAccessToken := assertRefreshToken(t, token, conf, iat.Add(reg.Config().GetRefreshTokenLifespan(ctx))) + + assert.EqualValues(t, subject, firstIdToken.Get("sub").String(), "%s", firstIdToken) + assert.NotEmpty(t, firstIdToken.Get("sid").String(), "%s", firstIdToken) + + assert.EqualValues(t, subject, firstAccessToken.Get("sub").String(), "%s", firstAccessToken) + rows, err := reg.Persister().Connection(ctx).RawQuery("DELETE FROM hydra_oauth2_authentication_session WHERE id = ?", firstIdToken.Get("sid").String()).ExecWithCount() + require.NoError(t, err) + require.EqualValues(t, 1, rows, "Expected to delete one row, but deleted %d", rows) + + t.Run("followup=successfully perform refresh token flow", func(t *testing.T) { + require.NotEmpty(t, token.RefreshToken) + token.Expiry = token.Expiry.Add(-time.Hour * 24) + iat = time.Now() + refreshedToken, err := conf.TokenSource(context.Background(), token).Token() + require.NoError(t, err) + + secondIdToken := assertIDToken(t, refreshedToken, conf, subject, nonce, iat.Add(reg.Config().GetIDTokenLifespan(ctx))) + secondAccessToken := introspectAccessToken(t, conf, refreshedToken, subject) + + assert.EqualValues(t, subject, secondIdToken.Get("sub").String(), "%s", secondIdToken) + assert.EqualValues(t, firstIdToken.Get("sid").String(), secondIdToken.Get("sid").String(), "%s", secondIdToken) + + assert.EqualValues(t, subject, secondAccessToken.Get("sub").String(), "%s", secondAccessToken) + }) + } + + t.Run("strategy=jwt", func(t *testing.T) { + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "jwt") + run(t, "jwt") + }) + + t.Run("strategy=opaque", func(t *testing.T) { + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "opaque") + run(t, "opaque") + }) + }) + + t.Run("case=perform authorize code flow with verifiable credentials", func(t *testing.T) { + reg.Config().MustSet(ctx, config.KeyAccessTokenLifespan, "24h") + t.Cleanup(func() { + reg.Config().Delete(ctx, config.KeyAccessTokenLifespan) + }) + + // Make sure we test against all crypto suites that we advertise. + cfg, _, err := publicClient.OidcAPI.DiscoverOidcConfiguration(ctx).Execute() + require.NoError(t, err) + supportedCryptoSuites := cfg.CredentialsSupportedDraft00[0].CryptographicSuitesSupported + + run := func(t *testing.T, strategy string) { + _, conf := newOAuth2Client( + t, + reg, + testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler), + withScope("openid userinfo_credential_draft_00"), + ) + testhelpers.NewLoginConsentUI(t, reg.Config(), + func(w http.ResponseWriter, r *http.Request) { + acceptBody := hydra.AcceptOAuth2LoginRequest{ + Subject: subject, + Acr: pointerx.Ptr("1"), + Amr: []string{"pwd"}, + Context: map[string]interface{}{"context": "bar"}, + } + v, _, err := adminClient.OAuth2API.AcceptOAuth2LoginRequest(context.Background()). + LoginChallenge(r.URL.Query().Get("login_challenge")). + AcceptOAuth2LoginRequest(acceptBody). + Execute() + require.NoError(t, err) + require.NotEmpty(t, v.RedirectTo) + http.Redirect(w, r, v.RedirectTo, http.StatusFound) + }, + func(w http.ResponseWriter, r *http.Request) { + rr, _, err := adminClient.OAuth2API.GetOAuth2ConsentRequest(context.Background()).ConsentChallenge(r.URL.Query().Get("consent_challenge")).Execute() + require.NoError(t, err) + + assert.Equal(t, map[string]interface{}{"context": "bar"}, rr.Context) + v, _, err := adminClient.OAuth2API.AcceptOAuth2ConsentRequest(context.Background()). + ConsentChallenge(r.URL.Query().Get("consent_challenge")). + AcceptOAuth2ConsentRequest(hydra.AcceptOAuth2ConsentRequest{ + GrantScope: []string{"openid", "userinfo_credential_draft_00"}, + GrantAccessTokenAudience: rr.RequestedAccessTokenAudience, + Session: &hydra.AcceptOAuth2ConsentRequestSession{ + AccessToken: map[string]interface{}{"foo": "bar"}, + IdToken: map[string]interface{}{"email": "foo@bar.com", "bar": "baz"}, + }, + }). + Execute() + require.NoError(t, err) + require.NotEmpty(t, v.RedirectTo) + http.Redirect(w, r, v.RedirectTo, http.StatusFound) + }, + ) + + code, _ := getAuthorizeCode(t, conf, nil, + oauth2.SetAuthURLParam("nonce", nonce), + oauth2.SetAuthURLParam("scope", "openid userinfo_credential_draft_00"), + ) + require.NotEmpty(t, code) + token, err := conf.Exchange(context.Background(), code) + require.NoError(t, err) + iat := time.Now() + + vcNonce := token.Extra("c_nonce_draft_00").(string) + assert.NotEmpty(t, vcNonce) + expiry := token.Extra("c_nonce_expires_in_draft_00") + assert.NotEmpty(t, expiry) + assert.NoError(t, reg.Persister().IsNonceValid(ctx, token.AccessToken, vcNonce)) + + t.Run("followup=successfully create a verifiable credential", func(t *testing.T) { + t.Parallel() + + for _, alg := range supportedCryptoSuites { + t.Run(fmt.Sprintf("alg=%s", alg), func(t *testing.T) { + t.Parallel() + assertCreateVerifiableCredential(t, reg, vcNonce, token, jose.SignatureAlgorithm(alg)) + }) + } + }) + + t.Run("followup=get new nonce from priming request", func(t *testing.T) { + t.Parallel() + // Assert that we can fetch a verifiable credential with the nonce. + res, err := doPrimingRequest(t, reg, token, &hydraoauth2.CreateVerifiableCredentialRequestBody{ + Format: "jwt_vc_json", + Types: []string{"VerifiableCredential", "UserInfoCredential"}, + }) + assert.NoError(t, err) + + t.Run("followup=successfully create a verifiable credential from fresh nonce", func(t *testing.T) { + assertCreateVerifiableCredential(t, reg, res.Nonce, token, jose.ES256) + }) + }) + + t.Run("followup=rejects proof signed by another key", func(t *testing.T) { + t.Parallel() + for _, tc := range []struct { + name string + format string + proofType string + proof func() string + }{ + { + name: "proof=mismatching keys", + proof: func() string { + // Create mismatching public and private keys. + pubKey, _, err := josex.NewSigningKey(jose.ES256, 0) + require.NoError(t, err) + _, privKey, err := josex.NewSigningKey(jose.ES256, 0) + require.NoError(t, err) + pubKeyJWK := &jose.JSONWebKey{Key: pubKey, Algorithm: string(jose.ES256)} + return createVCProofJWT(t, pubKeyJWK, privKey, vcNonce) + }, + }, + { + name: "proof=invalid format", + format: "invalid_format", + proof: func() string { + // Create mismatching public and private keys. + pubKey, privKey, err := josex.NewSigningKey(jose.ES256, 0) + require.NoError(t, err) + pubKeyJWK := &jose.JSONWebKey{Key: pubKey, Algorithm: string(jose.ES256)} + return createVCProofJWT(t, pubKeyJWK, privKey, vcNonce) + }, + }, + { + name: "proof=invalid type", + proofType: "invalid", + proof: func() string { + // Create mismatching public and private keys. + pubKey, privKey, err := josex.NewSigningKey(jose.ES256, 0) + require.NoError(t, err) + pubKeyJWK := &jose.JSONWebKey{Key: pubKey, Algorithm: string(jose.ES256)} + return createVCProofJWT(t, pubKeyJWK, privKey, vcNonce) + }, + }, + { + name: "proof=invalid nonce", + proof: func() string { + // Create mismatching public and private keys. + pubKey, privKey, err := josex.NewSigningKey(jose.ES256, 0) + require.NoError(t, err) + pubKeyJWK := &jose.JSONWebKey{Key: pubKey, Algorithm: string(jose.ES256)} + return createVCProofJWT(t, pubKeyJWK, privKey, "invalid nonce") + }, + }, + } { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + _, err := createVerifiableCredential(t, reg, token, &hydraoauth2.CreateVerifiableCredentialRequestBody{ + Format: cmp.Or(tc.format, "jwt_vc_json"), + Types: []string{"VerifiableCredential", "UserInfoCredential"}, + Proof: &hydraoauth2.VerifiableCredentialProof{ + ProofType: cmp.Or(tc.proofType, "jwt"), + JWT: tc.proof(), + }, + }) + require.EqualError(t, err, "invalid_request") + }) + } + + }) + + t.Run("followup=access token and id token are valid", func(t *testing.T) { + assertJWTAccessToken(t, strategy, conf, token, subject, iat.Add(reg.Config().GetAccessTokenLifespan(ctx)), `["openid","userinfo_credential_draft_00"]`) + assertIDToken(t, token, conf, subject, nonce, iat.Add(reg.Config().GetIDTokenLifespan(ctx))) + }) + } + + t.Run("strategy=jwt", func(t *testing.T) { + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "jwt") + run(t, "jwt") + }) + + t.Run("strategy=opaque", func(t *testing.T) { + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "opaque") + run(t, "opaque") + }) + }) + + t.Run("suite=invalid query params", func(t *testing.T) { + c, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + otherClient, _ := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, c, adminClient, reg, subject, nil), + acceptConsentHandler(t, c, adminClient, reg, subject, nil), + ) + + withWrongClientAfterLogin := &http.Client{ + Jar: testhelpers.NewEmptyCookieJar(t), + CheckRedirect: func(req *http.Request, _ []*http.Request) error { + if req.URL.Path != "/oauth2/auth" { + return nil + } + q := req.URL.Query() + if !q.Has("login_verifier") { + return nil + } + q.Set("client_id", otherClient.GetID()) + req.URL.RawQuery = q.Encode() + return nil + }, + } + withWrongClientAfterConsent := &http.Client{ + Jar: testhelpers.NewEmptyCookieJar(t), + CheckRedirect: func(req *http.Request, _ []*http.Request) error { + if req.URL.Path != "/oauth2/auth" { + return nil + } + q := req.URL.Query() + if !q.Has("consent_verifier") { + return nil + } + q.Set("client_id", otherClient.GetID()) + req.URL.RawQuery = q.Encode() + return nil + }, + } + + withWrongScopeAfterLogin := &http.Client{ + Jar: testhelpers.NewEmptyCookieJar(t), + CheckRedirect: func(req *http.Request, _ []*http.Request) error { + if req.URL.Path != "/oauth2/auth" { + return nil + } + q := req.URL.Query() + if !q.Has("login_verifier") { + return nil + } + q.Set("scope", "invalid scope") + req.URL.RawQuery = q.Encode() + return nil + }, + } + + withWrongScopeAfterConsent := &http.Client{ + Jar: testhelpers.NewEmptyCookieJar(t), + CheckRedirect: func(req *http.Request, _ []*http.Request) error { + if req.URL.Path != "/oauth2/auth" { + return nil + } + q := req.URL.Query() + if !q.Has("consent_verifier") { + return nil + } + q.Set("scope", "invalid scope") + req.URL.RawQuery = q.Encode() + return nil + }, + } + for _, tc := range []struct { + name string + client *http.Client + expectedResponse string + }{{ + name: "fails with wrong client ID after login", + client: withWrongClientAfterLogin, + expectedResponse: "invalid_client", + }, { + name: "fails with wrong client ID after consent", + client: withWrongClientAfterConsent, + expectedResponse: "invalid_client", + }, { + name: "fails with wrong scopes after login", + client: withWrongScopeAfterLogin, + expectedResponse: "invalid_scope", + }, { + name: "fails with wrong scopes after consent", + client: withWrongScopeAfterConsent, + expectedResponse: "invalid_scope", + }} { + t.Run("case="+tc.name, func(t *testing.T) { + state := uuid.New() + resp, err := tc.client.Get(conf.AuthCodeURL(state)) + require.NoError(t, err) + assert.Equal(t, tc.expectedResponse, resp.Request.URL.Query().Get("error"), "%s", resp.Request.URL.RawQuery) + resp.Body.Close() //nolint:errcheck + }) + } + }) + + t.Run("case=checks if request fails when subject is empty", func(t *testing.T) { + testhelpers.NewLoginConsentUI(t, reg.Config(), func(w http.ResponseWriter, r *http.Request) { + _, res, err := adminClient.OAuth2API.AcceptOAuth2LoginRequest(ctx). + LoginChallenge(r.URL.Query().Get("login_challenge")). + AcceptOAuth2LoginRequest(hydra.AcceptOAuth2LoginRequest{Subject: "", Remember: pointerx.Ptr(true)}).Execute() + require.Error(t, err) // expects 400 + body := string(ioutilx.MustReadAll(res.Body)) + assert.Contains(t, body, "Field 'subject' must not be empty", "%s", body) + }, testhelpers.HTTPServerNoExpectedCallHandler(t)) + _, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + + _, err := testhelpers.NewEmptyJarClient(t).Get(conf.AuthCodeURL(uuid.New())) + require.NoError(t, err) + }) + + t.Run("case=perform flow with prompt=registration", func(t *testing.T) { + c, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + + regUI := httptest.NewServer(acceptLoginHandler(t, c, adminClient, reg, subject, nil)) + t.Cleanup(regUI.Close) + reg.Config().MustSet(ctx, config.KeyRegistrationURL, regUI.URL) + + testhelpers.NewLoginConsentUI(t, reg.Config(), + nil, + acceptConsentHandler(t, c, adminClient, reg, subject, nil)) + + code, _ := getAuthorizeCode(t, conf, nil, + oauth2.SetAuthURLParam("prompt", "registration"), + oauth2.SetAuthURLParam("nonce", nonce)) + require.NotEmpty(t, code) + + token, err := conf.Exchange(context.Background(), code) + require.NoError(t, err) + + assertIDToken(t, token, conf, subject, nonce, time.Now().Add(reg.Config().GetIDTokenLifespan(ctx))) + }) + + t.Run("case=perform flow with audience", func(t *testing.T) { + expectAud := "https://api.ory.sh/" + c, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, c, adminClient, reg, subject, func(r *hydra.OAuth2LoginRequest) *hydra.AcceptOAuth2LoginRequest { + assert.False(t, r.Skip) + assert.EqualValues(t, []string{expectAud}, r.RequestedAccessTokenAudience) + return nil + }), + acceptConsentHandler(t, c, adminClient, reg, subject, func(r *hydra.OAuth2ConsentRequest) *hydra.AcceptOAuth2ConsentRequest { + assert.False(t, *r.Skip) + assert.EqualValues(t, []string{expectAud}, r.RequestedAccessTokenAudience) + return nil + })) + + code, _ := getAuthorizeCode(t, conf, nil, + oauth2.SetAuthURLParam("audience", "https://api.ory.sh/"), + oauth2.SetAuthURLParam("nonce", nonce)) + require.NotEmpty(t, code) + + token, err := conf.Exchange(context.Background(), code) + require.NoError(t, err) + + claims := introspectAccessToken(t, conf, token, subject) + aud := claims.Get("aud").Array() + require.Len(t, aud, 1) + assert.EqualValues(t, aud[0].String(), expectAud) + + assertIDToken(t, token, conf, subject, nonce, time.Now().Add(reg.Config().GetIDTokenLifespan(ctx))) + }) + + t.Run("case=respects client token lifespan configuration", func(t *testing.T) { + run := func(t *testing.T, strategy string, c *client.Client, conf *oauth2.Config, expectedLifespans client.Lifespans) { + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, c, adminClient, reg, subject, nil), + acceptConsentHandler(t, c, adminClient, reg, subject, nil), + ) + + code, _ := getAuthorizeCode(t, conf, nil, oauth2.SetAuthURLParam("nonce", nonce)) + require.NotEmpty(t, code) + token, err := conf.Exchange(context.Background(), code) + iat := time.Now() + require.NoError(t, err) + + body := introspectAccessToken(t, conf, token, subject) + require.WithinDuration(t, iat.Add(expectedLifespans.AuthorizationCodeGrantAccessTokenLifespan.Duration), time.Unix(body.Get("exp").Int(), 0), time.Second) + + assertJWTAccessToken(t, strategy, conf, token, subject, iat.Add(expectedLifespans.AuthorizationCodeGrantAccessTokenLifespan.Duration), `["hydra","offline","openid"]`) + assertIDToken(t, token, conf, subject, nonce, iat.Add(expectedLifespans.AuthorizationCodeGrantIDTokenLifespan.Duration)) + assertRefreshToken(t, token, conf, iat.Add(expectedLifespans.AuthorizationCodeGrantRefreshTokenLifespan.Duration)) + + t.Run("followup=successfully perform refresh token flow", func(t *testing.T) { + require.NotEmpty(t, token.RefreshToken) + token.Expiry = token.Expiry.Add(-time.Hour * 24) + refreshedToken, err := conf.TokenSource(context.Background(), token).Token() + iat = time.Now() + require.NoError(t, err) + assertRefreshToken(t, refreshedToken, conf, iat.Add(expectedLifespans.RefreshTokenGrantRefreshTokenLifespan.Duration)) + assertJWTAccessToken(t, strategy, conf, refreshedToken, subject, iat.Add(expectedLifespans.RefreshTokenGrantAccessTokenLifespan.Duration), `["hydra","offline","openid"]`) + assertIDToken(t, refreshedToken, conf, subject, nonce, iat.Add(expectedLifespans.RefreshTokenGrantIDTokenLifespan.Duration)) + + require.NotEqual(t, token.AccessToken, refreshedToken.AccessToken) + require.NotEqual(t, token.RefreshToken, refreshedToken.RefreshToken) + require.NotEqual(t, token.Extra("id_token"), refreshedToken.Extra("id_token")) + + body := introspectAccessToken(t, conf, refreshedToken, subject) + require.WithinDuration(t, iat.Add(expectedLifespans.RefreshTokenGrantAccessTokenLifespan.Duration), time.Unix(body.Get("exp").Int(), 0), time.Second) + + t.Run("followup=original access token is no longer valid", func(t *testing.T) { + i := testhelpers.IntrospectToken(t, token.AccessToken, adminTS) + assert.False(t, i.Get("active").Bool(), "%s", i) + }) + + t.Run("followup=original refresh token is no longer valid", func(t *testing.T) { + _, err := conf.TokenSource(context.Background(), token).Token() + assert.Error(t, err) + }) + }) + } + + t.Run("case=custom-lifespans-active-jwt", func(t *testing.T) { + c, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + ls := testhelpers.TestLifespans + ls.AuthorizationCodeGrantAccessTokenLifespan = x.NullDuration{Valid: true, Duration: 6 * time.Second} + testhelpers.UpdateClientTokenLifespans( + t, + &oauth2.Config{ClientID: c.GetID(), ClientSecret: conf.ClientSecret}, + c.GetID(), + ls, adminTS, + ) + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "jwt") + run(t, "jwt", c, conf, ls) + }) + + t.Run("case=custom-lifespans-active-opaque", func(t *testing.T) { + c, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + ls := testhelpers.TestLifespans + ls.AuthorizationCodeGrantAccessTokenLifespan = x.NullDuration{Valid: true, Duration: 6 * time.Second} + testhelpers.UpdateClientTokenLifespans( + t, + &oauth2.Config{ClientID: c.GetID(), ClientSecret: conf.ClientSecret}, + c.GetID(), + ls, adminTS, + ) + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "opaque") + run(t, "opaque", c, conf, ls) + }) + + t.Run("case=custom-lifespans-unset", func(t *testing.T) { + c, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + testhelpers.UpdateClientTokenLifespans(t, &oauth2.Config{ClientID: c.GetID(), ClientSecret: conf.ClientSecret}, c.GetID(), testhelpers.TestLifespans, adminTS) + testhelpers.UpdateClientTokenLifespans(t, &oauth2.Config{ClientID: c.GetID(), ClientSecret: conf.ClientSecret}, c.GetID(), client.Lifespans{}, adminTS) + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "opaque") + + //goland:noinspection GoDeprecation + expectedLifespans := client.Lifespans{ + AuthorizationCodeGrantAccessTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetAccessTokenLifespan(ctx)}, + AuthorizationCodeGrantIDTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetIDTokenLifespan(ctx)}, + AuthorizationCodeGrantRefreshTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetRefreshTokenLifespan(ctx)}, + ClientCredentialsGrantAccessTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetAccessTokenLifespan(ctx)}, + ImplicitGrantAccessTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetAccessTokenLifespan(ctx)}, + ImplicitGrantIDTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetIDTokenLifespan(ctx)}, + JwtBearerGrantAccessTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetAccessTokenLifespan(ctx)}, + PasswordGrantAccessTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetAccessTokenLifespan(ctx)}, + PasswordGrantRefreshTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetRefreshTokenLifespan(ctx)}, + RefreshTokenGrantIDTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetIDTokenLifespan(ctx)}, + RefreshTokenGrantAccessTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetAccessTokenLifespan(ctx)}, + RefreshTokenGrantRefreshTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetRefreshTokenLifespan(ctx)}, + } + run(t, "opaque", c, conf, expectedLifespans) + }) + }) + + t.Run("case=use remember feature and prompt=none", func(t *testing.T) { + c, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, c, adminClient, reg, subject, nil), + acceptConsentHandler(t, c, adminClient, reg, subject, nil), + ) + + oc := testhelpers.NewEmptyJarClient(t) + code, _ := getAuthorizeCode(t, conf, oc, + oauth2.SetAuthURLParam("nonce", nonce), + oauth2.SetAuthURLParam("prompt", "login consent"), + oauth2.SetAuthURLParam("max_age", "1"), + ) + require.NotEmpty(t, code) + token, err := conf.Exchange(context.Background(), code) + require.NoError(t, err) + introspectAccessToken(t, conf, token, subject) + + // Reset UI to check for skip values + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, c, adminClient, reg, subject, func(r *hydra.OAuth2LoginRequest) *hydra.AcceptOAuth2LoginRequest { + require.True(t, r.Skip) + require.EqualValues(t, subject, r.Subject) + return nil + }), + acceptConsentHandler(t, c, adminClient, reg, subject, func(r *hydra.OAuth2ConsentRequest) *hydra.AcceptOAuth2ConsentRequest { + require.True(t, *r.Skip) + require.EqualValues(t, subject, *r.Subject) + return nil + }), + ) + + t.Run("followup=checks if authenticatedAt/requestedAt is properly forwarded across the lifecycle by checking if prompt=none works", func(t *testing.T) { + // In order to check if authenticatedAt/requestedAt works, we'll sleep first in order to ensure that authenticatedAt is in the past + // if handled correctly. + time.Sleep(time.Second + time.Nanosecond) + + code, _ := getAuthorizeCode(t, conf, oc, + oauth2.SetAuthURLParam("nonce", nonce), + oauth2.SetAuthURLParam("prompt", "none"), + oauth2.SetAuthURLParam("max_age", "60"), + ) + require.NotEmpty(t, code) + token, err := conf.Exchange(context.Background(), code) + require.NoError(t, err) + original := introspectAccessToken(t, conf, token, subject) + + t.Run("followup=run the flow three more times", func(t *testing.T) { + for i := range 3 { + t.Run(fmt.Sprintf("run=%d", i), func(t *testing.T) { + code, _ := getAuthorizeCode(t, conf, oc, + oauth2.SetAuthURLParam("nonce", nonce), + oauth2.SetAuthURLParam("prompt", "none"), + oauth2.SetAuthURLParam("max_age", "60"), + ) + require.NotEmpty(t, code) + token, err := conf.Exchange(context.Background(), code) + require.NoError(t, err) + followup := introspectAccessToken(t, conf, token, subject) + assert.Equal(t, original.Get("auth_time").Int(), followup.Get("auth_time").Int()) + }) + } + }) + + t.Run("followup=fails when max age is reached and prompt is none", func(t *testing.T) { + code, _ := getAuthorizeCode(t, conf, oc, + oauth2.SetAuthURLParam("nonce", nonce), + oauth2.SetAuthURLParam("prompt", "none"), + oauth2.SetAuthURLParam("max_age", "1"), + ) + require.Empty(t, code) + }) + + t.Run("followup=passes and resets skip when prompt=login", func(t *testing.T) { + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, c, adminClient, reg, subject, func(r *hydra.OAuth2LoginRequest) *hydra.AcceptOAuth2LoginRequest { + require.False(t, r.Skip) + require.Empty(t, r.Subject) + return nil + }), + acceptConsentHandler(t, c, adminClient, reg, subject, func(r *hydra.OAuth2ConsentRequest) *hydra.AcceptOAuth2ConsentRequest { + require.True(t, *r.Skip) + require.EqualValues(t, subject, *r.Subject) + return nil + }), + ) + code, _ := getAuthorizeCode(t, conf, oc, + oauth2.SetAuthURLParam("nonce", nonce), + oauth2.SetAuthURLParam("prompt", "login"), + oauth2.SetAuthURLParam("max_age", "1"), + ) + require.NotEmpty(t, code) + token, err := conf.Exchange(context.Background(), code) + require.NoError(t, err) + introspectAccessToken(t, conf, token, subject) + assertIDToken(t, token, conf, subject, nonce, time.Now().Add(reg.Config().GetIDTokenLifespan(ctx))) + }) + }) + }) + + t.Run("case=should fail if prompt=none but no auth session given", func(t *testing.T) { + c, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, c, adminClient, reg, subject, nil), + acceptConsentHandler(t, c, adminClient, reg, subject, nil), + ) + + oc := testhelpers.NewEmptyJarClient(t) + code, _ := getAuthorizeCode(t, conf, oc, + oauth2.SetAuthURLParam("prompt", "none"), + ) + require.Empty(t, code) + }) + + t.Run("case=requires re-authentication when id_token_hint is set to a user 'patrik-neu' but the session is 'aeneas-rekkas' and then fails because the user id from the log in endpoint is 'aeneas-rekkas'", func(t *testing.T) { + c, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, c, adminClient, reg, subject, func(r *hydra.OAuth2LoginRequest) *hydra.AcceptOAuth2LoginRequest { + require.False(t, r.Skip) + require.Empty(t, r.Subject) + return nil + }), + acceptConsentHandler(t, c, adminClient, reg, subject, nil), + ) + + oc := testhelpers.NewEmptyJarClient(t) + + // Create login session for aeneas-rekkas + code, _ := getAuthorizeCode(t, conf, oc) + require.NotEmpty(t, code) + + // Perform authentication for aeneas-rekkas which fails because id_token_hint is patrik-neu + code, _ = getAuthorizeCode(t, conf, oc, + oauth2.SetAuthURLParam("id_token_hint", testhelpers.NewIDToken(t, reg, "patrik-neu")), + ) + require.Empty(t, code) + }) + + t.Run("case=should not cause issues if max_age is very low and consent takes a long time", func(t *testing.T) { + c, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, c, adminClient, reg, subject, func(r *hydra.OAuth2LoginRequest) *hydra.AcceptOAuth2LoginRequest { + time.Sleep(time.Second * 2) + return nil + }), + acceptConsentHandler(t, c, adminClient, reg, subject, nil), + ) + + code, _ := getAuthorizeCode(t, conf, nil) + require.NotEmpty(t, code) + }) + + t.Run("case=ensure consistent claims returned for userinfo", func(t *testing.T) { + c, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, c, adminClient, reg, subject, nil), + acceptConsentHandler(t, c, adminClient, reg, subject, nil), + ) + + code, _ := getAuthorizeCode(t, conf, nil) + require.NotEmpty(t, code) + + token, err := conf.Exchange(context.Background(), code) + require.NoError(t, err) + + idClaims := assertIDToken(t, token, conf, subject, "", time.Now().Add(reg.Config().GetIDTokenLifespan(ctx))) + + uiClaims := testhelpers.Userinfo(t, token, publicTS) + + for _, f := range []string{ + "sub", + "iss", + "aud", + "bar", + "auth_time", + } { + assert.NotEmpty(t, uiClaims.Get(f).Raw, "%s: %s", f, uiClaims) + assert.EqualValues(t, idClaims.Get(f).Raw, uiClaims.Get(f).Raw, "%s\nuserinfo: %s\nidtoken: %s", f, uiClaims, idClaims) + } + + for _, f := range []string{ + "at_hash", + "c_hash", + "nonce", + "sid", + "jti", + } { + assert.Empty(t, uiClaims.Get(f).Raw, "%s: %s", f, uiClaims) + } + }) + + t.Run("case=add ext claims from hook if configured", func(t *testing.T) { + run := func(strategy string) func(t *testing.T) { + return func(t *testing.T) { + hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, r.Header.Get("Content-Type"), "application/json; charset=UTF-8") + assert.Equal(t, r.Header.Get("Authorization"), "Bearer secret value") + + var hookReq hydraoauth2.TokenHookRequest + require.NoError(t, json.NewDecoder(r.Body).Decode(&hookReq)) + require.NotEmpty(t, hookReq.Session) + require.Equal(t, map[string]interface{}{"foo": "bar"}, hookReq.Session.Extra) + require.NotEmpty(t, hookReq.Request) + require.ElementsMatch(t, []string{}, hookReq.Request.GrantedAudience) + require.Equal(t, map[string][]string{"grant_type": {"authorization_code"}}, hookReq.Request.Payload) + + claims := map[string]interface{}{ + "hooked": true, + } + + hookResp := hydraoauth2.TokenHookResponse{ + Session: flow.AcceptOAuth2ConsentRequestSession{ + AccessToken: claims, + IDToken: claims, + }, + } + + w.WriteHeader(http.StatusOK) + require.NoError(t, json.NewEncoder(w).Encode(&hookResp)) + })) + defer hs.Close() + + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, strategy) + reg.Config().MustSet(ctx, config.KeyTokenHook, &config.HookConfig{ + URL: hs.URL, + Auth: &config.Auth{ + Type: "api_key", + Config: config.AuthConfig{ + In: "header", + Name: "Authorization", + Value: "Bearer secret value", + }, + }, + }) + + t.Cleanup(func() { + reg.Config().Delete(ctx, config.KeyTokenHook) + }) + + expectAud := "https://api.ory.sh/" + c, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, c, adminClient, reg, subject, func(r *hydra.OAuth2LoginRequest) *hydra.AcceptOAuth2LoginRequest { + assert.False(t, r.Skip) + assert.EqualValues(t, []string{expectAud}, r.RequestedAccessTokenAudience) + return nil + }), + acceptConsentHandler(t, c, adminClient, reg, subject, func(r *hydra.OAuth2ConsentRequest) *hydra.AcceptOAuth2ConsentRequest { + assert.False(t, *r.Skip) + assert.EqualValues(t, []string{expectAud}, r.RequestedAccessTokenAudience) + return nil + })) + + code, _ := getAuthorizeCode(t, conf, nil, + oauth2.SetAuthURLParam("audience", "https://api.ory.sh/"), + oauth2.SetAuthURLParam("nonce", nonce)) + require.NotEmpty(t, code) + + token, err := conf.Exchange(context.Background(), code) + require.NoError(t, err) + + assertJWTAccessToken(t, strategy, conf, token, subject, time.Now().Add(reg.Config().GetAccessTokenLifespan(ctx)), `["hydra","offline","openid"]`) + + // NOTE: using introspect to cover both jwt and opaque strategies + accessTokenClaims := introspectAccessToken(t, conf, token, subject) + require.True(t, accessTokenClaims.Get("ext.hooked").Bool()) + + idTokenClaims := assertIDToken(t, token, conf, subject, nonce, time.Now().Add(reg.Config().GetIDTokenLifespan(ctx))) + require.True(t, idTokenClaims.Get("hooked").Bool()) + } + } + + t.Run("strategy=opaque", run("opaque")) + t.Run("strategy=jwt", run("jwt")) + }) + + t.Run("case=fail token exchange if hook fails", func(t *testing.T) { + run := func(strategy string) func(t *testing.T) { + return func(t *testing.T) { + hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusInternalServerError) + })) + defer hs.Close() + + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, strategy) + reg.Config().MustSet(ctx, config.KeyTokenHook, hs.URL) + + defer reg.Config().MustSet(ctx, config.KeyTokenHook, nil) + + expectAud := "https://api.ory.sh/" + c, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, c, adminClient, reg, subject, func(r *hydra.OAuth2LoginRequest) *hydra.AcceptOAuth2LoginRequest { + assert.False(t, r.Skip) + assert.EqualValues(t, []string{expectAud}, r.RequestedAccessTokenAudience) + return nil + }), + acceptConsentHandler(t, c, adminClient, reg, subject, func(r *hydra.OAuth2ConsentRequest) *hydra.AcceptOAuth2ConsentRequest { + assert.False(t, *r.Skip) + assert.EqualValues(t, []string{expectAud}, r.RequestedAccessTokenAudience) + return nil + })) + + code, _ := getAuthorizeCode(t, conf, nil, + oauth2.SetAuthURLParam("audience", "https://api.ory.sh/"), + oauth2.SetAuthURLParam("nonce", nonce)) + require.NotEmpty(t, code) + + _, err := conf.Exchange(context.Background(), code) + require.Error(t, err) + } + } + + t.Run("strategy=opaque", run("opaque")) + t.Run("strategy=jwt", run("jwt")) + }) + + t.Run("case=fail token exchange if hook denies the request", func(t *testing.T) { + run := func(strategy string) func(t *testing.T) { + return func(t *testing.T) { + hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusForbidden) + })) + defer hs.Close() + + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, strategy) + reg.Config().MustSet(ctx, config.KeyTokenHook, hs.URL) + + defer reg.Config().MustSet(ctx, config.KeyTokenHook, nil) + + expectAud := "https://api.ory.sh/" + c, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, c, adminClient, reg, subject, func(r *hydra.OAuth2LoginRequest) *hydra.AcceptOAuth2LoginRequest { + assert.False(t, r.Skip) + assert.EqualValues(t, []string{expectAud}, r.RequestedAccessTokenAudience) + return nil + }), + acceptConsentHandler(t, c, adminClient, reg, subject, func(r *hydra.OAuth2ConsentRequest) *hydra.AcceptOAuth2ConsentRequest { + assert.False(t, *r.Skip) + assert.EqualValues(t, []string{expectAud}, r.RequestedAccessTokenAudience) + return nil + })) + + code, _ := getAuthorizeCode(t, conf, nil, + oauth2.SetAuthURLParam("audience", "https://api.ory.sh/"), + oauth2.SetAuthURLParam("nonce", nonce)) + require.NotEmpty(t, code) + + _, err := conf.Exchange(context.Background(), code) + require.Error(t, err) + } + } + + t.Run("strategy=opaque", run("opaque")) + t.Run("strategy=jwt", run("jwt")) + }) + + t.Run("case=fail token exchange if hook response is malformed", func(t *testing.T) { + run := func(strategy string) func(t *testing.T) { + return func(t *testing.T) { + hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + })) + defer hs.Close() + + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, strategy) + reg.Config().MustSet(ctx, config.KeyTokenHook, hs.URL) + + defer reg.Config().MustSet(ctx, config.KeyTokenHook, nil) + + expectAud := "https://api.ory.sh/" + c, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, c, adminClient, reg, subject, func(r *hydra.OAuth2LoginRequest) *hydra.AcceptOAuth2LoginRequest { + assert.False(t, r.Skip) + assert.EqualValues(t, []string{expectAud}, r.RequestedAccessTokenAudience) + return nil + }), + acceptConsentHandler(t, c, adminClient, reg, subject, func(r *hydra.OAuth2ConsentRequest) *hydra.AcceptOAuth2ConsentRequest { + assert.False(t, *r.Skip) + assert.EqualValues(t, []string{expectAud}, r.RequestedAccessTokenAudience) + return nil + })) + + code, _ := getAuthorizeCode(t, conf, nil, + oauth2.SetAuthURLParam("audience", "https://api.ory.sh/"), + oauth2.SetAuthURLParam("nonce", nonce)) + require.NotEmpty(t, code) + + _, err := conf.Exchange(context.Background(), code) + require.Error(t, err) + } + } - adminClient := hydra.NewAPIClient(hydra.NewConfiguration()) - adminClient.GetConfig().Servers = hydra.ServerConfigurations{{URL: adminTS.URL}} + t.Run("strategy=opaque", run("opaque")) + t.Run("strategy=jwt", run("jwt")) + }) - getAuthorizeCode := func(t *testing.T, conf *oauth2.Config, c *http.Client, params ...oauth2.AuthCodeOption) (string, *http.Response) { - if c == nil { - c = testhelpers.NewEmptyJarClient(t) - } + t.Run("case=can revoke token chains with ID obtained from consent requests", func(t *testing.T) { + c, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) - state := uuid.New() - resp, err := c.Get(conf.AuthCodeURL(state, params...)) - require.NoError(t, err) - defer resp.Body.Close() + // go through an auth code flow and store the consent request id in the tokens + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, c, adminClient, reg, subject, nil), + acceptConsentHandler(t, c, adminClient, reg, subject, func(ocr *hydra.OAuth2ConsentRequest) *hydra.AcceptOAuth2ConsentRequest { + require.NotZero(t, ocr.Challenge) + require.NotNil(t, ocr.ConsentRequestId) + require.NotZero(t, *ocr.ConsentRequestId) + t.Logf("Consent challenge: %s", ocr.Challenge) + t.Logf("Consent request ID: %s", *ocr.ConsentRequestId) + return &hydra.AcceptOAuth2ConsentRequest{ + GrantScope: ocr.RequestedScope, + GrantAccessTokenAudience: ocr.RequestedAccessTokenAudience, + Remember: pointerx.Ptr(true), + RememberFor: pointerx.Ptr[int64](0), + Session: &hydra.AcceptOAuth2ConsentRequestSession{ + AccessToken: map[string]interface{}{"crid": ocr.ConsentRequestId}, + IdToken: map[string]interface{}{"crid": ocr.ConsentRequestId}, + }, + } + }), + ) - q := resp.Request.URL.Query() - require.EqualValues(t, state, q.Get("state")) - return q.Get("code"), resp - } + code, _ := getAuthorizeCode(t, conf, nil) + require.NotEmpty(t, code) + token, err := conf.Exchange(context.Background(), code) + require.NoError(t, err) - acceptLoginHandler := func(t *testing.T, c *client.Client, subject string, checkRequestPayload func(request *hydra.OAuth2LoginRequest) *hydra.AcceptOAuth2LoginRequest) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - rr, _, err := adminClient.OAuth2Api.GetOAuth2LoginRequest(context.Background()).LoginChallenge(r.URL.Query().Get("login_challenge")).Execute() - require.NoError(t, err) - - assert.EqualValues(t, c.GetID(), pointerx.StringR(rr.Client.ClientId)) - assert.Empty(t, pointerx.StringR(rr.Client.ClientSecret)) - assert.EqualValues(t, c.GrantTypes, rr.Client.GrantTypes) - assert.EqualValues(t, c.LogoURI, pointerx.StringR(rr.Client.LogoUri)) - assert.EqualValues(t, c.RedirectURIs, rr.Client.RedirectUris) - assert.EqualValues(t, r.URL.Query().Get("login_challenge"), rr.Challenge) - assert.EqualValues(t, []string{"hydra", "offline", "openid"}, rr.RequestedScope) - assert.Contains(t, rr.RequestUrl, reg.Config().OAuth2AuthURL(ctx).String()) - - acceptBody := hydra.AcceptOAuth2LoginRequest{ - Subject: subject, - Remember: pointerx.Bool(!rr.Skip), - Acr: pointerx.String("1"), - Amr: []string{"pwd"}, - Context: map[string]interface{}{"context": "bar"}, - } - if checkRequestPayload != nil { - if b := checkRequestPayload(rr); b != nil { - acceptBody = *b - } - } + // go through a second auth code flow and get a second set of tokens + code, _ = getAuthorizeCode(t, conf, nil) + require.NotEmpty(t, code) + token2, err := conf.Exchange(context.Background(), code) + require.NoError(t, err) - v, _, err := adminClient.OAuth2Api.AcceptOAuth2LoginRequest(context.Background()). - LoginChallenge(r.URL.Query().Get("login_challenge")). - AcceptOAuth2LoginRequest(acceptBody). - Execute() - require.NoError(t, err) - require.NotEmpty(t, v.RedirectTo) - http.Redirect(w, r, v.RedirectTo, http.StatusFound) - } - } + // Access and refresh tokens from both flows should be active + at := testhelpers.IntrospectToken(t, token.AccessToken, adminTS) + assert.True(t, at.Get("active").Bool(), "%s", at) + rt := testhelpers.IntrospectToken(t, token.RefreshToken, adminTS) + assert.True(t, rt.Get("active").Bool(), "%s", rt) + + at2 := testhelpers.IntrospectToken(t, token2.AccessToken, adminTS) + assert.True(t, at2.Get("active").Bool(), "%s", at2) + rt2 := testhelpers.IntrospectToken(t, token2.RefreshToken, adminTS) + assert.True(t, rt2.Get("active").Bool(), "%s", rt2) + + // extract consent request id from first access token + consentRequestID := at.Get("ext.crid").Str + assert.NotZero(t, consentRequestID, "%s", at) + assert.Equal(t, consentRequestID, rt.Get("ext.crid").Str, "%s", rt) + + // second set of tokens have different consent request ids + assert.NotEqual(t, consentRequestID, at2.Get("ext.crid").Str, "%s", at2) + assert.NotEqual(t, consentRequestID, rt2.Get("ext.crid").Str, "%s", rt2) + + // revoke the first token chain by consent request id + _, err = adminClient.OAuth2API. + RevokeOAuth2ConsentSessions(context.Background()). + ConsentRequestId(consentRequestID). + Execute() + require.NoError(t, err) - acceptConsentHandler := func(t *testing.T, c *client.Client, subject string, checkRequestPayload func(*hydra.OAuth2ConsentRequest)) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - rr, _, err := adminClient.OAuth2Api.GetOAuth2ConsentRequest(context.Background()).ConsentChallenge(r.URL.Query().Get("consent_challenge")).Execute() - require.NoError(t, err) - - assert.EqualValues(t, c.GetID(), pointerx.StringR(rr.Client.ClientId)) - assert.Empty(t, pointerx.StringR(rr.Client.ClientSecret)) - assert.EqualValues(t, c.GrantTypes, rr.Client.GrantTypes) - assert.EqualValues(t, c.LogoURI, pointerx.StringR(rr.Client.LogoUri)) - assert.EqualValues(t, c.RedirectURIs, rr.Client.RedirectUris) - assert.EqualValues(t, subject, pointerx.StringR(rr.Subject)) - assert.EqualValues(t, []string{"hydra", "offline", "openid"}, rr.RequestedScope) - assert.EqualValues(t, r.URL.Query().Get("consent_challenge"), rr.Challenge) - assert.Contains(t, *rr.RequestUrl, reg.Config().OAuth2AuthURL(ctx).String()) - if checkRequestPayload != nil { - checkRequestPayload(rr) - } + // first token chain should be inactive + at = testhelpers.IntrospectToken(t, token.AccessToken, adminTS) + assert.False(t, at.Get("active").Bool(), "%s", at) + rt = testhelpers.IntrospectToken(t, token.RefreshToken, adminTS) + assert.False(t, rt.Get("active").Bool(), "%s", rt) + + // second token chain should still be active + at2 = testhelpers.IntrospectToken(t, token2.AccessToken, adminTS) + assert.True(t, at2.Get("active").Bool(), "%s", at2) + rt2 = testhelpers.IntrospectToken(t, token2.RefreshToken, adminTS) + assert.True(t, rt2.Get("active").Bool(), "%s", rt2) + }) - assert.Equal(t, map[string]interface{}{"context": "bar"}, rr.Context) - v, _, err := adminClient.OAuth2Api.AcceptOAuth2ConsentRequest(context.Background()). - ConsentChallenge(r.URL.Query().Get("consent_challenge")). - AcceptOAuth2ConsentRequest(hydra.AcceptOAuth2ConsentRequest{ - GrantScope: []string{"hydra", "offline", "openid"}, Remember: pointerx.Bool(true), RememberFor: pointerx.Int64(0), - GrantAccessTokenAudience: rr.RequestedAccessTokenAudience, - Session: &hydra.AcceptOAuth2ConsentRequestSession{ - AccessToken: map[string]interface{}{"foo": "bar"}, - IdToken: map[string]interface{}{"bar": "baz"}, - }, - }). - Execute() - require.NoError(t, err) - require.NotEmpty(t, v.RedirectTo) - http.Redirect(w, r, v.RedirectTo, http.StatusFound) - } - } + t.Run("case=graceful token rotation", func(t *testing.T) { + reg.Config().MustSet(ctx, config.KeyRefreshTokenRotationGracePeriod, "2s") + reg.Config().Delete(ctx, config.KeyTokenHook) + reg.Config().Delete(ctx, config.KeyRefreshTokenHook) + reg.Config().MustSet(ctx, config.KeyRefreshTokenLifespan, "1m") + reg.Config().MustSet(ctx, config.KeyAccessTokenLifespan, "1m") + t.Cleanup(func() { + reg.Config().Delete(ctx, config.KeyRefreshTokenRotationGracePeriod) + reg.Config().Delete(ctx, config.KeyRefreshTokenLifespan) + reg.Config().Delete(ctx, config.KeyAccessTokenLifespan) + }) - assertRefreshToken := func(t *testing.T, token *oauth2.Token, c *oauth2.Config, expectedExp time.Time) { - actualExp, err := strconv.ParseInt(testhelpers.IntrospectToken(t, c, token.RefreshToken, adminTS).Get("exp").String(), 10, 64) - require.NoError(t, err) - requirex.EqualTime(t, expectedExp, time.Unix(actualExp, 0), time.Second) - } + // This is an essential and complex test suite. We need to cover the following cases: + // + // * Graceful refresh token rotation invalidates the previous access token. + // * An expired refresh token cannot be used even if grace period is active. + // * A used refresh token cannot be re-used once the grace period ends, and it triggers re-use detection. + // * A test suite with a variety of concurrent refresh token chains. + run := func(t *testing.T, strategy string) { + c, conf := newOAuth2Client(t, reg, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) + testhelpers.NewLoginConsentUI(t, reg.Config(), + acceptLoginHandler(t, c, adminClient, reg, subject, nil), + acceptConsentHandler(t, c, adminClient, reg, subject, nil), + ) + + issueTokens := func(t *testing.T) *oauth2.Token { + code, _ := getAuthorizeCode(t, conf, nil, oauth2.SetAuthURLParam("nonce", nonce)) + require.NotEmpty(t, code) + token, err := conf.Exchange(context.Background(), code) + iat := time.Now() + require.NoError(t, err) - assertIDToken := func(t *testing.T, token *oauth2.Token, c *oauth2.Config, expectedSubject, expectedNonce string, expectedExp time.Time) gjson.Result { - idt, ok := token.Extra("id_token").(string) - require.True(t, ok) - assert.NotEmpty(t, idt) + introspectAccessToken(t, conf, token, subject) + assertJWTAccessToken(t, strategy, conf, token, subject, iat.Add(reg.Config().GetAccessTokenLifespan(ctx)), `["hydra","offline","openid"]`) + assertIDToken(t, token, conf, subject, nonce, iat.Add(reg.Config().GetIDTokenLifespan(ctx))) + assertRefreshToken(t, token, conf, iat.Add(reg.Config().GetRefreshTokenLifespan(ctx))) + return token + } - body, err := x.DecodeSegment(strings.Split(idt, ".")[1]) - require.NoError(t, err) + refreshTokens := func(t *testing.T, token *oauth2.Token) *oauth2.Token { + require.NotEmpty(t, token.RefreshToken) + token.Expiry = time.Now().Add(-time.Hour * 24) + iat := time.Now() + refreshedToken, err := conf.TokenSource(context.Background(), token).Token() + require.NoError(t, err) - claims := gjson.ParseBytes(body) - assert.True(t, time.Now().After(time.Unix(claims.Get("iat").Int(), 0)), "%s", claims) - assert.True(t, time.Now().After(time.Unix(claims.Get("nbf").Int(), 0)), "%s", claims) - assert.True(t, time.Now().Before(time.Unix(claims.Get("exp").Int(), 0)), "%s", claims) - requirex.EqualTime(t, expectedExp, time.Unix(claims.Get("exp").Int(), 0), 2*time.Second) - assert.NotEmpty(t, claims.Get("jti").String(), "%s", claims) - assert.EqualValues(t, reg.Config().IssuerURL(ctx).String(), claims.Get("iss").String(), "%s", claims) - assert.NotEmpty(t, claims.Get("sid").String(), "%s", claims) - assert.Equal(t, "1", claims.Get("acr").String(), "%s", claims) - require.Len(t, claims.Get("amr").Array(), 1, "%s", claims) - assert.EqualValues(t, "pwd", claims.Get("amr").Array()[0].String(), "%s", claims) - - require.Len(t, claims.Get("aud").Array(), 1, "%s", claims) - assert.EqualValues(t, c.ClientID, claims.Get("aud").Array()[0].String(), "%s", claims) - assert.EqualValues(t, expectedSubject, claims.Get("sub").String(), "%s", claims) - assert.EqualValues(t, expectedNonce, claims.Get("nonce").String(), "%s", claims) - assert.EqualValues(t, `baz`, claims.Get("bar").String(), "%s", claims) - - return claims - } + require.NotEqual(t, token.AccessToken, refreshedToken.AccessToken) + require.NotEqual(t, token.RefreshToken, refreshedToken.RefreshToken) + require.NotEqual(t, token.Extra("id_token"), refreshedToken.Extra("id_token")) - introspectAccessToken := func(t *testing.T, conf *oauth2.Config, token *oauth2.Token, expectedSubject string) gjson.Result { - require.NotEmpty(t, token.AccessToken) - i := testhelpers.IntrospectToken(t, conf, token.AccessToken, adminTS) - assert.True(t, i.Get("active").Bool(), "%s", i) - assert.EqualValues(t, conf.ClientID, i.Get("client_id").String(), "%s", i) - assert.EqualValues(t, expectedSubject, i.Get("sub").String(), "%s", i) - assert.EqualValues(t, `{"foo":"bar"}`, i.Get("ext").Raw, "%s", i) - return i - } + introspectAccessToken(t, conf, refreshedToken, subject) + assertJWTAccessToken(t, strategy, conf, refreshedToken, subject, iat.Add(reg.Config().GetAccessTokenLifespan(ctx)), `["hydra","offline","openid"]`) + assertIDToken(t, refreshedToken, conf, subject, nonce, iat.Add(reg.Config().GetIDTokenLifespan(ctx))) + assertRefreshToken(t, refreshedToken, conf, iat.Add(reg.Config().GetRefreshTokenLifespan(ctx))) + return refreshedToken + } - assertJWTAccessToken := func(t *testing.T, strat string, conf *oauth2.Config, token *oauth2.Token, expectedSubject string, expectedExp time.Time) gjson.Result { - require.NotEmpty(t, token.AccessToken) - parts := strings.Split(token.AccessToken, ".") - if strat != "jwt" { - require.Len(t, parts, 2) - return gjson.Parse("null") - } - require.Len(t, parts, 3) + assertInactive := func(t *testing.T, token string, c *oauth2.Config) { + t.Helper() + at := testhelpers.IntrospectToken(t, token, adminTS) + assert.False(t, at.Get("active").Bool(), "%s", at) + } - body, err := x.DecodeSegment(parts[1]) - require.NoError(t, err) + t.Run("gracefully refreshing a token does invalidate the previous access token", func(t *testing.T) { + reg.Config().MustSet(ctx, config.KeyRefreshTokenRotationGracePeriod, "2s") + reg.Config().MustSet(ctx, config.KeyRefreshTokenLifespan, "1m") - i := gjson.ParseBytes(body) - assert.NotEmpty(t, i.Get("jti").String()) - assert.EqualValues(t, conf.ClientID, i.Get("client_id").String(), "%s", i) - assert.EqualValues(t, expectedSubject, i.Get("sub").String(), "%s", i) - assert.EqualValues(t, reg.Config().IssuerURL(ctx).String(), i.Get("iss").String(), "%s", i) - assert.True(t, time.Now().After(time.Unix(i.Get("iat").Int(), 0)), "%s", i) - assert.True(t, time.Now().After(time.Unix(i.Get("nbf").Int(), 0)), "%s", i) - assert.True(t, time.Now().Before(time.Unix(i.Get("exp").Int(), 0)), "%s", i) - requirex.EqualTime(t, expectedExp, time.Unix(i.Get("exp").Int(), 0), time.Second) - assert.EqualValues(t, `{"foo":"bar"}`, i.Get("ext").Raw, "%s", i) - assert.EqualValues(t, `["hydra","offline","openid"]`, i.Get("scp").Raw, "%s", i) - return i - } + token := issueTokens(t) + _ = refreshTokens(t, token) - waitForRefreshTokenExpiry := func() { - time.Sleep(reg.Config().GetRefreshTokenLifespan(ctx) + time.Second) - } + assertInactive(t, token.AccessToken, conf) // Original access token is invalid - t.Run("case=checks if request fails when audience does not match", func(t *testing.T) { - testhelpers.NewLoginConsentUI(t, reg.Config(), testhelpers.HTTPServerNoExpectedCallHandler(t), testhelpers.HTTPServerNoExpectedCallHandler(t)) - _, conf := newOAuth2Client(t, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) - code, _ := getAuthorizeCode(t, conf, nil, oauth2.SetAuthURLParam("audience", "https://not-ory-api/")) - require.Empty(t, code) - }) + _ = refreshTokens(t, token) + assertInactive(t, token.AccessToken, conf) // Original access token is still invalid + }) - subject := "aeneas-rekkas" - nonce := uuid.New() - t.Run("case=perform authorize code flow with ID token and refresh tokens", func(t *testing.T) { - run := func(t *testing.T, strategy string) { - c, conf := newOAuth2Client(t, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) - testhelpers.NewLoginConsentUI(t, reg.Config(), - acceptLoginHandler(t, c, subject, nil), - acceptConsentHandler(t, c, subject, nil), - ) - - code, _ := getAuthorizeCode(t, conf, nil, oauth2.SetAuthURLParam("nonce", nonce)) - require.NotEmpty(t, code) - token, err := conf.Exchange(context.Background(), code) - iat := time.Now() - require.NoError(t, err) - - introspectAccessToken(t, conf, token, subject) - assertJWTAccessToken(t, strategy, conf, token, subject, iat.Add(reg.Config().GetAccessTokenLifespan(ctx))) - assertIDToken(t, token, conf, subject, nonce, iat.Add(reg.Config().GetIDTokenLifespan(ctx))) - assertRefreshToken(t, token, conf, iat.Add(reg.Config().GetRefreshTokenLifespan(ctx))) - - t.Run("followup=successfully perform refresh token flow", func(t *testing.T) { - require.NotEmpty(t, token.RefreshToken) - token.Expiry = token.Expiry.Add(-time.Hour * 24) - iat = time.Now() - refreshedToken, err := conf.TokenSource(context.Background(), token).Token() - require.NoError(t, err) + t.Run("an expired refresh token can not be used even if we are in the grace period", func(t *testing.T) { + reg.Config().MustSet(ctx, config.KeyRefreshTokenRotationGracePeriod, "5s") + reg.Config().MustSet(ctx, config.KeyRefreshTokenLifespan, "1s") - require.NotEqual(t, token.AccessToken, refreshedToken.AccessToken) - require.NotEqual(t, token.RefreshToken, refreshedToken.RefreshToken) - require.NotEqual(t, token.Extra("id_token"), refreshedToken.Extra("id_token")) - introspectAccessToken(t, conf, refreshedToken, subject) + token := issueTokens(t) + time.Sleep(time.Second * 2) // Let token expire - we need 2 seconds to reliably be longer than TTL - t.Run("followup=refreshed tokens contain valid tokens", func(t *testing.T) { - assertJWTAccessToken(t, strategy, conf, refreshedToken, subject, iat.Add(reg.Config().GetAccessTokenLifespan(ctx))) - assertIDToken(t, refreshedToken, conf, subject, nonce, iat.Add(reg.Config().GetIDTokenLifespan(ctx))) - assertRefreshToken(t, refreshedToken, conf, iat.Add(reg.Config().GetRefreshTokenLifespan(ctx))) - }) + token.Expiry = time.Now().Add(-time.Hour * 24) + _, err := conf.TokenSource(ctx, token).Token() + require.Error(t, err, "Rotating an expired token is not possible even when we are in the grace period") - t.Run("followup=original access token is no longer valid", func(t *testing.T) { - i := testhelpers.IntrospectToken(t, conf, token.AccessToken, adminTS) - assert.False(t, i.Get("active").Bool(), "%s", i) - }) + // The access token is still valid because using an expired refresh token has no effect on the access token. + assertInactive(t, token.RefreshToken, conf) + }) - t.Run("followup=original refresh token is no longer valid", func(t *testing.T) { - _, err := conf.TokenSource(context.Background(), token).Token() - assert.Error(t, err) - }) + t.Run("a used refresh token can not be re-used once the grace period ends and it triggers re-use detection", func(t *testing.T) { + reg.Config().MustSet(ctx, config.KeyRefreshTokenRotationGracePeriod, "1s") + reg.Config().MustSet(ctx, config.KeyRefreshTokenLifespan, "1m") - t.Run("followup=but fail subsequent refresh because expiry was reached", func(t *testing.T) { - waitForRefreshTokenExpiry() + token := issueTokens(t) + refreshed := refreshTokens(t, token) - // Force golang to refresh token - refreshedToken.Expiry = refreshedToken.Expiry.Add(-time.Hour * 24) - _, err := conf.TokenSource(context.Background(), refreshedToken).Token() - require.Error(t, err) - }) - }) - } + time.Sleep(time.Second * 2) // Wait for the grace period to end - t.Run("strategy=jwt", func(t *testing.T) { - reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "jwt") - run(t, "jwt") - }) + token.Expiry = time.Now().Add(-time.Hour * 24) + _, err := conf.TokenSource(ctx, token).Token() + require.Error(t, err, "Rotating a used refresh token is not possible after the grace period") - t.Run("strategy=opaque", func(t *testing.T) { - reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "opaque") - run(t, "opaque") - }) - }) + assertInactive(t, token.AccessToken, conf) + assertInactive(t, token.RefreshToken, conf) - t.Run("case=checks if request fails when subject is empty", func(t *testing.T) { - testhelpers.NewLoginConsentUI(t, reg.Config(), func(w http.ResponseWriter, r *http.Request) { - _, res, err := adminClient.OAuth2Api.AcceptOAuth2LoginRequest(ctx). - LoginChallenge(r.URL.Query().Get("login_challenge")). - AcceptOAuth2LoginRequest(hydra.AcceptOAuth2LoginRequest{Subject: "", Remember: pointerx.Bool(true)}).Execute() - require.Error(t, err) // expects 400 - body := string(ioutilx.MustReadAll(res.Body)) - assert.Contains(t, body, "Field 'subject' must not be empty", "%s", body) - }, testhelpers.HTTPServerNoExpectedCallHandler(t)) - _, conf := newOAuth2Client(t, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) - - _, err := testhelpers.NewEmptyJarClient(t).Get(conf.AuthCodeURL(uuid.New())) - require.NoError(t, err) - }) + assertInactive(t, refreshed.AccessToken, conf) + assertInactive(t, refreshed.RefreshToken, conf) + }) - t.Run("case=perform flow with audience", func(t *testing.T) { - expectAud := "https://api.ory.sh/" - c, conf := newOAuth2Client(t, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) - testhelpers.NewLoginConsentUI(t, reg.Config(), - acceptLoginHandler(t, c, subject, func(r *hydra.OAuth2LoginRequest) *hydra.AcceptOAuth2LoginRequest { - assert.False(t, r.Skip) - assert.EqualValues(t, []string{expectAud}, r.RequestedAccessTokenAudience) - return nil - }), - acceptConsentHandler(t, c, subject, func(r *hydra.OAuth2ConsentRequest) { - assert.False(t, *r.Skip) - assert.EqualValues(t, []string{expectAud}, r.RequestedAccessTokenAudience) - })) + // This test suite covers complex scenarios where we have multiple generations of tokens and we need to ensure + // that key security mitigations are in place: + // + // - Token re-use detection clears all tokens if a refresh token is re-used after the grace period. + // - Revoking consent clears all tokens. + // - Token revocation clears all tokens. + // + // The test creates 4 token generations, where each generation has twice as many tokens as the previous generation. + // The generations are created like this: + // + // - In the first scenario, all token generations are created at the same time. + // - In the second scenario, we create token generations with a delay that is longer than the grace period between them. + // + // Tokens for each generation are created in parallel to ensure we have no state leak anywhere. + t.Run("token generations", func(t *testing.T) { + gracePeriod := time.Second + aboveGracePeriod := 2 * time.Second + reg.Config().MustSet(ctx, config.KeyRefreshTokenLifespan, "1m") + reg.Config().MustSet(ctx, config.KeyRefreshTokenRotationGracePeriod, gracePeriod) + reg.Config().Delete(ctx, config.KeyTokenHook) + reg.Config().Delete(ctx, config.KeyRefreshTokenHook) + + createTokenGenerations := func(t *testing.T, count int, sleep time.Duration) [][]*oauth2.Token { + generations := make([][]*oauth2.Token, count) + generations[0] = []*oauth2.Token{issueTokens(t)} + // Start from the first generation. For every next generation, we refresh all the tokens of the previous generation twice. + for i := range len(generations) - 1 { + // Loop invariants: + // - `generations` is constant is size (it is right-sized when created), thus it is safe to index it concurrently. + // - The current generation (`generations[i]`) is constant in size, thus it is safe to iterate over it. + // - The next generation (`generations[i+1]`) is *not* constant in size. Elements are appended to it concurrently, and thus it is guarded by `mtx`. + // - Elements of the current generation *are* modified in `refreshToken` (!), and thus are guarded by `mtx`. + // - Elements of the current and next generation are concurrently read/written inside the `gen` function, and thus are guarded by `mtx`. + generations[i+1] = make([]*oauth2.Token, 0, len(generations[i])*2) + mtx := sync.Mutex{} + + var wg sync.WaitGroup + gen := func(token *oauth2.Token) { + defer wg.Done() + + mtx.Lock() + generations[i+1] = append(generations[i+1], refreshTokens(t, token)) + mtx.Unlock() + } + + for _, token := range generations[i] { + wg.Add(2) + if dbName != "cockroach" { + // We currently only support TX retries on cockroach + gen(token) + gen(token) + } else { + go gen(token) + go gen(token) + } + } + + wg.Wait() + time.Sleep(sleep) + } + return generations + } - code, _ := getAuthorizeCode(t, conf, nil, - oauth2.SetAuthURLParam("audience", "https://api.ory.sh/"), - oauth2.SetAuthURLParam("nonce", nonce)) - require.NotEmpty(t, code) + t.Run("re-using an old graceful refresh token invalidates all tokens", func(t *testing.T) { + reg.Config().MustSet(ctx, config.KeyRefreshTokenRotationGracePeriod, "1s") + reg.Config().MustSet(ctx, config.KeyRefreshTokenLifespan, "1m") + // This test only works if the refresh token lifespan is longer than the grace period. + generations := createTokenGenerations(t, 4, time.Second*2) + + generationIndex := rng.Intn(len(generations) - 1) // Exclude the last generation + tokenIndex := rng.Intn(len(generations[generationIndex])) + + token := generations[generationIndex][tokenIndex] + token.Expiry = time.Now().Add(-time.Hour * 24) + _, err := conf.TokenSource(ctx, token).Token() + require.Error(t, err) + + // Now all tokens are inactive + for i, generation := range generations { + t.Run(fmt.Sprintf("generation=%d", i), func(t *testing.T) { + for j, token := range generation { + t.Run(fmt.Sprintf("token=%d", j), func(t *testing.T) { + assertInactive(t, token.AccessToken, conf) + assertInactive(t, token.RefreshToken, conf) + }) + } + }) + } + }) + + t.Run("re-using a graceful refresh token above the count limit invalidates all tokens", func(t *testing.T) { + reg.Config().MustSet(ctx, config.KeyRefreshTokenRotationGracePeriod, "1m") + reg.Config().MustSet(ctx, config.KeyRefreshTokenLifespan, "1m") + reg.Config().MustSet(ctx, config.KeyRefreshTokenRotationGraceReuseCount, 2) + t.Cleanup(func() { + reg.Config().MustSet(ctx, config.KeyRefreshTokenRotationGracePeriod, "1s") + reg.Config().MustSet(ctx, config.KeyRefreshTokenLifespan, "1m") + reg.Config().MustSet(ctx, config.KeyRefreshTokenRotationGraceReuseCount, 0) + }) + generations := createTokenGenerations(t, 4, time.Second*2) + + token := generations[0][0] + token.Expiry = time.Now().Add(-time.Hour * 24) + _, err := conf.TokenSource(ctx, token).Token() + require.Error(t, err) + + // Now all tokens are inactive + for i, generation := range generations { + t.Run(fmt.Sprintf("generation=%d", i), func(t *testing.T) { + for j, token := range generation { + t.Run(fmt.Sprintf("token=%d", j), func(t *testing.T) { + assertInactive(t, token.AccessToken, conf) + assertInactive(t, token.RefreshToken, conf) + }) + } + }) + } + }) + + for _, withSleep := range []time.Duration{0, aboveGracePeriod} { + t.Run(fmt.Sprintf("withSleep=%s", withSleep), func(t *testing.T) { + createTokenGenerations := func(t *testing.T, count int) [][]*oauth2.Token { + return createTokenGenerations(t, count, withSleep) + } + + t.Run("only the most recent token generation is valid across the board", func(t *testing.T) { + generations := createTokenGenerations(t, 4) + + // All generations except the last one are valid. + for i, generation := range generations[:len(generations)-1] { + t.Run(fmt.Sprintf("generation=%d", i), func(t *testing.T) { + for j, token := range generation { + t.Run(fmt.Sprintf("token=%d", j), func(t *testing.T) { + assertInactive(t, token.AccessToken, conf) + }) + } + }) + } + + // The last generation is valid: + t.Run(fmt.Sprintf("generation=%d", len(generations)-1), func(t *testing.T) { + for j, token := range generations[len(generations)-1] { + t.Run(fmt.Sprintf("token=%d", j), func(t *testing.T) { + introspectAccessToken(t, conf, token, subject) + assertIDToken(t, token, conf, subject, nonce, time.Time{}) + assertRefreshToken(t, token, conf, time.Time{}) + }) + } + }) + }) + + t.Run("revoking consent revokes all tokens", func(t *testing.T) { + generations := createTokenGenerations(t, 4) + + // After revoking consent, all generations are invalid. + err := reg.ConsentManager().RevokeSubjectConsentSession(context.Background(), subject) + require.NoError(t, err) + + for i, generation := range generations { + t.Run(fmt.Sprintf("generation=%d", i), func(t *testing.T) { + for j, token := range generation { + t.Run(fmt.Sprintf("token=%d", j), func(t *testing.T) { + assertInactive(t, token.AccessToken, conf) + assertInactive(t, token.RefreshToken, conf) + }) + } + }) + } + }) + + t.Run("re-using the a recent refresh token after the grace period has ended invalidates all tokens", func(t *testing.T) { + generations := createTokenGenerations(t, 4) + + token := generations[len(generations)-1][0] + + finalToken := refreshTokens(t, token) + time.Sleep(aboveGracePeriod) // Wait for the grace period to end + + token.Expiry = time.Now().Add(-time.Hour * 24) + _, err := conf.TokenSource(ctx, token).Token() + require.Error(t, err) + + // Now all tokens are inactive + for i, generation := range append(generations, []*oauth2.Token{finalToken}) { + t.Run(fmt.Sprintf("generation=%d", i), func(t *testing.T) { + for j, token := range generation { + t.Run(fmt.Sprintf("token=%d", j), func(t *testing.T) { + assertInactive(t, token.AccessToken, conf) + assertInactive(t, token.RefreshToken, conf) + }) + } + }) + } + }) + + t.Run("revoking a refresh token in the chain revokes all tokens", func(t *testing.T) { + generations := createTokenGenerations(t, 4) + + testhelpers.RevokeToken(t, conf, generations[len(generations)-1][0].RefreshToken, publicTS) + + for i, generation := range generations { + t.Run(fmt.Sprintf("generation=%d", i), func(t *testing.T) { + for j, token := range generation { + token := token + t.Run(fmt.Sprintf("token=%d", j), func(t *testing.T) { + assertInactive(t, token.AccessToken, conf) + assertInactive(t, token.RefreshToken, conf) + }) + } + }) + } + }) + }) + } + }) - token, err := conf.Exchange(context.Background(), code) - require.NoError(t, err) + t.Run("it is possible to refresh tokens concurrently", func(t *testing.T) { + // SQLite can not handle concurrency + if dbName == "memory" { + t.Skip("Skipping test because SQLite can not handle concurrency") + } - claims := introspectAccessToken(t, conf, token, subject) - aud := claims.Get("aud").Array() - require.Len(t, aud, 1) - assert.EqualValues(t, aud[0].String(), expectAud) + const nRefreshes = 5 - assertIDToken(t, token, conf, subject, nonce, time.Now().Add(reg.Config().GetIDTokenLifespan(ctx))) - }) + reg.Config().MustSet(ctx, config.KeyRefreshTokenLifespan, "1m") + reg.Config().MustSet(ctx, config.KeyRefreshTokenRotationGracePeriod, "10s") + reg.Config().MustSet(ctx, config.KeyRefreshTokenRotationGraceReuseCount, 0) - t.Run("case=respects client token lifespan configuration", func(t *testing.T) { - run := func(t *testing.T, strategy string, c *hc.Client, conf *oauth2.Config, expectedLifespans client.Lifespans) { - testhelpers.NewLoginConsentUI(t, reg.Config(), - acceptLoginHandler(t, c, subject, nil), - acceptConsentHandler(t, c, subject, nil), - ) - - code, _ := getAuthorizeCode(t, conf, nil, oauth2.SetAuthURLParam("nonce", nonce)) - require.NotEmpty(t, code) - token, err := conf.Exchange(context.Background(), code) - iat := time.Now() - require.NoError(t, err) - - body := introspectAccessToken(t, conf, token, subject) - requirex.EqualTime(t, iat.Add(expectedLifespans.AuthorizationCodeGrantAccessTokenLifespan.Duration), time.Unix(body.Get("exp").Int(), 0), time.Second) - - assertJWTAccessToken(t, strategy, conf, token, subject, iat.Add(expectedLifespans.AuthorizationCodeGrantAccessTokenLifespan.Duration)) - assertIDToken(t, token, conf, subject, nonce, iat.Add(expectedLifespans.AuthorizationCodeGrantIDTokenLifespan.Duration)) - assertRefreshToken(t, token, conf, iat.Add(expectedLifespans.AuthorizationCodeGrantRefreshTokenLifespan.Duration)) - - t.Run("followup=successfully perform refresh token flow", func(t *testing.T) { - require.NotEmpty(t, token.RefreshToken) - token.Expiry = token.Expiry.Add(-time.Hour * 24) - refreshedToken, err := conf.TokenSource(context.Background(), token).Token() - iat = time.Now() - require.NoError(t, err) - assertRefreshToken(t, refreshedToken, conf, iat.Add(expectedLifespans.RefreshTokenGrantRefreshTokenLifespan.Duration)) - assertJWTAccessToken(t, strategy, conf, refreshedToken, subject, iat.Add(expectedLifespans.RefreshTokenGrantAccessTokenLifespan.Duration)) - assertIDToken(t, refreshedToken, conf, subject, nonce, iat.Add(expectedLifespans.RefreshTokenGrantIDTokenLifespan.Duration)) + token := issueTokens(t) + token.Expiry = time.Now().Add(-time.Hour * 24) - require.NotEqual(t, token.AccessToken, refreshedToken.AccessToken) - require.NotEqual(t, token.RefreshToken, refreshedToken.RefreshToken) - require.NotEqual(t, token.Extra("id_token"), refreshedToken.Extra("id_token")) + eg, ctx := errgroup.WithContext(t.Context()) + refreshes := make([]*oauth2.Token, nRefreshes) + for k := range refreshes { + eg.Go(func() (err error) { + refreshes[k], err = conf.TokenSource(ctx, token).Token() + return + }) + } + require.NoError(t, eg.Wait()) + require.NotContains(t, refreshes, nil) - body := introspectAccessToken(t, conf, refreshedToken, subject) - requirex.EqualTime(t, iat.Add(expectedLifespans.RefreshTokenGrantAccessTokenLifespan.Duration), time.Unix(body.Get("exp").Int(), 0), time.Second) + // All tokens are valid. + allTokens := map[string]struct{}{ + token.AccessToken: {}, + token.RefreshToken: {}, + } + for k, actual := range refreshes { + require.NotEmptyf(t, actual.RefreshToken, "token %d:\ntoken:%+v", k, actual) + require.NotEmptyf(t, actual.AccessToken, "token %d:\ntoken:%+v", k, actual) + require.NotEmptyf(t, actual.Extra("id_token"), "token %d:\ntoken:%+v", k, actual) - t.Run("followup=original access token is no longer valid", func(t *testing.T) { - i := testhelpers.IntrospectToken(t, conf, token.AccessToken, adminTS) - assert.False(t, i.Get("active").Bool(), "%s", i) - }) + allTokens[actual.AccessToken], allTokens[actual.RefreshToken] = struct{}{}, struct{}{} - t.Run("followup=original refresh token is no longer valid", func(t *testing.T) { - _, err := conf.TokenSource(context.Background(), token).Token() - assert.Error(t, err) - }) - }) - } + i := testhelpers.IntrospectToken(t, actual.AccessToken, adminTS) + assert.Truef(t, i.Get("active").Bool(), "token %d:\ntoken:%+v\nresult:%s", k, actual, i) - t.Run("case=custom-lifespans-active-jwt", func(t *testing.T) { - c, conf := newOAuth2Client(t, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) - ls := testhelpers.TestLifespans - ls.AuthorizationCodeGrantAccessTokenLifespan = x.NullDuration{Valid: true, Duration: 6 * time.Second} - testhelpers.UpdateClientTokenLifespans( - t, - &goauth2.Config{ClientID: c.GetID(), ClientSecret: conf.ClientSecret}, - c.GetID(), - ls, adminTS, - ) - reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "jwt") - run(t, "jwt", c, conf, ls) - }) + i = testhelpers.IntrospectToken(t, actual.RefreshToken, adminTS) + assert.Truef(t, i.Get("active").Bool(), "token %d:\ntoken:%+v\nresult:%s", k, actual, i) + } + assert.Len(t, allTokens, (1+nRefreshes)*2) + }) - t.Run("case=custom-lifespans-active-opaque", func(t *testing.T) { - c, conf := newOAuth2Client(t, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) - ls := testhelpers.TestLifespans - ls.AuthorizationCodeGrantAccessTokenLifespan = x.NullDuration{Valid: true, Duration: 6 * time.Second} - testhelpers.UpdateClientTokenLifespans( - t, - &goauth2.Config{ClientID: c.GetID(), ClientSecret: conf.ClientSecret}, - c.GetID(), - ls, adminTS, - ) - reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "opaque") - run(t, "opaque", c, conf, ls) - }) + t.Run("graceful refresh count limit is respected when set", func(t *testing.T) { + reg.Config().MustSet(ctx, config.KeyRefreshTokenRotationGraceReuseCount, 3) + reg.Config().MustSet(ctx, config.KeyRefreshTokenRotationGracePeriod, "1m") + reg.Config().MustSet(ctx, config.KeyRefreshTokenLifespan, "1m") - t.Run("case=custom-lifespans-unset", func(t *testing.T) { - c, conf := newOAuth2Client(t, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) - testhelpers.UpdateClientTokenLifespans(t, &goauth2.Config{ClientID: c.GetID(), ClientSecret: conf.ClientSecret}, c.GetID(), testhelpers.TestLifespans, adminTS) - testhelpers.UpdateClientTokenLifespans(t, &goauth2.Config{ClientID: c.GetID(), ClientSecret: conf.ClientSecret}, c.GetID(), client.Lifespans{}, adminTS) - reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "opaque") - - expectedLifespans := client.Lifespans{ - AuthorizationCodeGrantAccessTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetAccessTokenLifespan(ctx)}, - AuthorizationCodeGrantIDTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetIDTokenLifespan(ctx)}, - AuthorizationCodeGrantRefreshTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetRefreshTokenLifespan(ctx)}, - ClientCredentialsGrantAccessTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetAccessTokenLifespan(ctx)}, - ImplicitGrantAccessTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetAccessTokenLifespan(ctx)}, - ImplicitGrantIDTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetIDTokenLifespan(ctx)}, - JwtBearerGrantAccessTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetAccessTokenLifespan(ctx)}, - PasswordGrantAccessTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetAccessTokenLifespan(ctx)}, - PasswordGrantRefreshTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetRefreshTokenLifespan(ctx)}, - RefreshTokenGrantIDTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetIDTokenLifespan(ctx)}, - RefreshTokenGrantAccessTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetAccessTokenLifespan(ctx)}, - RefreshTokenGrantRefreshTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetRefreshTokenLifespan(ctx)}, - } - run(t, "opaque", c, conf, expectedLifespans) - }) - }) + token := issueTokens(t) + token.Expiry = time.Now().Add(-time.Hour * 24) - t.Run("case=use remember feature and prompt=none", func(t *testing.T) { - c, conf := newOAuth2Client(t, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) - testhelpers.NewLoginConsentUI(t, reg.Config(), - acceptLoginHandler(t, c, subject, nil), - acceptConsentHandler(t, c, subject, nil), - ) - - oc := testhelpers.NewEmptyJarClient(t) - code, _ := getAuthorizeCode(t, conf, oc, - oauth2.SetAuthURLParam("nonce", nonce), - oauth2.SetAuthURLParam("prompt", "login consent"), - oauth2.SetAuthURLParam("max_age", "1"), - ) - require.NotEmpty(t, code) - token, err := conf.Exchange(context.Background(), code) - require.NoError(t, err) - introspectAccessToken(t, conf, token, subject) - - // Reset UI to check for skip values - testhelpers.NewLoginConsentUI(t, reg.Config(), - acceptLoginHandler(t, c, subject, func(r *hydra.OAuth2LoginRequest) *hydra.AcceptOAuth2LoginRequest { - require.True(t, r.Skip) - require.EqualValues(t, subject, r.Subject) - return nil - }), - acceptConsentHandler(t, c, subject, func(r *hydra.OAuth2ConsentRequest) { - require.True(t, *r.Skip) - require.EqualValues(t, subject, *r.Subject) - }), - ) - - t.Run("followup=checks if authenticatedAt/requestedAt is properly forwarded across the lifecycle by checking if prompt=none works", func(t *testing.T) { - // In order to check if authenticatedAt/requestedAt works, we'll sleep first in order to ensure that authenticatedAt is in the past - // if handled correctly. - time.Sleep(time.Second + time.Nanosecond) - - code, _ := getAuthorizeCode(t, conf, oc, - oauth2.SetAuthURLParam("nonce", nonce), - oauth2.SetAuthURLParam("prompt", "none"), - oauth2.SetAuthURLParam("max_age", "60"), - ) - require.NotEmpty(t, code) - token, err := conf.Exchange(context.Background(), code) - require.NoError(t, err) - original := introspectAccessToken(t, conf, token, subject) - - t.Run("followup=run the flow three more times", func(t *testing.T) { - for i := 0; i < 3; i++ { - t.Run(fmt.Sprintf("run=%d", i), func(t *testing.T) { - code, _ := getAuthorizeCode(t, conf, oc, - oauth2.SetAuthURLParam("nonce", nonce), - oauth2.SetAuthURLParam("prompt", "none"), - oauth2.SetAuthURLParam("max_age", "60"), - ) - require.NotEmpty(t, code) - token, err := conf.Exchange(context.Background(), code) - require.NoError(t, err) - followup := introspectAccessToken(t, conf, token, subject) - assert.Equal(t, original.Get("auth_time").Int(), followup.Get("auth_time").Int()) + for range 3 { + _, err := conf.TokenSource(ctx, token).Token() + require.NoError(t, err) + } + _, err := conf.TokenSource(ctx, token).Token() + assert.Error(t, err, "Rotating a used refresh token is not possible after the limit is exceeded") }) } - }) - t.Run("followup=fails when max age is reached and prompt is none", func(t *testing.T) { - code, _ := getAuthorizeCode(t, conf, oc, - oauth2.SetAuthURLParam("nonce", nonce), - oauth2.SetAuthURLParam("prompt", "none"), - oauth2.SetAuthURLParam("max_age", "1"), - ) - require.Empty(t, code) - }) + t.Run("strategy=jwt", func(t *testing.T) { + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "jwt") + run(t, "jwt") + }) - t.Run("followup=passes and resets skip when prompt=login", func(t *testing.T) { - testhelpers.NewLoginConsentUI(t, reg.Config(), - acceptLoginHandler(t, c, subject, func(r *hydra.OAuth2LoginRequest) *hydra.AcceptOAuth2LoginRequest { - require.False(t, r.Skip) - require.Empty(t, r.Subject) - return nil - }), - acceptConsentHandler(t, c, subject, func(r *hydra.OAuth2ConsentRequest) { - require.True(t, *r.Skip) - require.EqualValues(t, subject, *r.Subject) - }), - ) - code, _ := getAuthorizeCode(t, conf, oc, - oauth2.SetAuthURLParam("nonce", nonce), - oauth2.SetAuthURLParam("prompt", "login"), - oauth2.SetAuthURLParam("max_age", "1"), - ) - require.NotEmpty(t, code) - token, err := conf.Exchange(context.Background(), code) - require.NoError(t, err) - introspectAccessToken(t, conf, token, subject) - assertIDToken(t, token, conf, subject, nonce, time.Now().Add(reg.Config().GetIDTokenLifespan(ctx))) + t.Run("strategy=opaque", func(t *testing.T) { + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "opaque") + run(t, "opaque") + }) }) }) - }) + } +} - t.Run("case=should fail if prompt=none but no auth session given", func(t *testing.T) { - c, conf := newOAuth2Client(t, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) - testhelpers.NewLoginConsentUI(t, reg.Config(), - acceptLoginHandler(t, c, subject, nil), - acceptConsentHandler(t, c, subject, nil), - ) - - oc := testhelpers.NewEmptyJarClient(t) - code, _ := getAuthorizeCode(t, conf, oc, - oauth2.SetAuthURLParam("prompt", "none"), - ) - require.Empty(t, code) +func assertCreateVerifiableCredential(t *testing.T, reg *driver.RegistrySQL, nonce string, accessToken *oauth2.Token, alg jose.SignatureAlgorithm) { + // Build a proof from the nonce. + pubKey, privKey, err := josex.NewSigningKey(alg, 0) + require.NoError(t, err) + pubKeyJWK := &jose.JSONWebKey{Key: pubKey, Algorithm: string(alg)} + proofJWT := createVCProofJWT(t, pubKeyJWK, privKey, nonce) + + // Assert that we can fetch a verifiable credential with the nonce. + verifiableCredential, err := createVerifiableCredential(t, reg, accessToken, &hydraoauth2.CreateVerifiableCredentialRequestBody{ + Format: "jwt_vc_json", + Types: []string{"VerifiableCredential", "UserInfoCredential"}, + Proof: &hydraoauth2.VerifiableCredentialProof{ + ProofType: "jwt", + JWT: proofJWT, + }, }) + require.NoError(t, err, "Error: %+v", err) + require.NotNil(t, verifiableCredential) - t.Run("case=requires re-authentication when id_token_hint is set to a user 'patrik-neu' but the session is 'aeneas-rekkas' and then fails because the user id from the log in endpoint is 'aeneas-rekkas'", func(t *testing.T) { - c, conf := newOAuth2Client(t, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) - testhelpers.NewLoginConsentUI(t, reg.Config(), - acceptLoginHandler(t, c, subject, func(r *hydra.OAuth2LoginRequest) *hydra.AcceptOAuth2LoginRequest { - require.False(t, r.Skip) - require.Empty(t, r.Subject) - return nil - }), - acceptConsentHandler(t, c, subject, nil), - ) - - oc := testhelpers.NewEmptyJarClient(t) - - // Create login session for aeneas-rekkas - code, _ := getAuthorizeCode(t, conf, oc) - require.NotEmpty(t, code) - - // Perform authentication for aeneas-rekkas which fails because id_token_hint is patrik-neu - code, _ = getAuthorizeCode(t, conf, oc, - oauth2.SetAuthURLParam("id_token_hint", testhelpers.NewIDToken(t, reg, "patrik-neu")), - ) - require.Empty(t, code) - }) + _, claims := claimsFromVCResponse(t, reg, verifiableCredential) + assertClaimsContainPublicKey(t, claims, pubKeyJWK) +} - t.Run("case=should not cause issues if max_age is very low and consent takes a long time", func(t *testing.T) { - c, conf := newOAuth2Client(t, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) - testhelpers.NewLoginConsentUI(t, reg.Config(), - acceptLoginHandler(t, c, subject, func(r *hydra.OAuth2LoginRequest) *hydra.AcceptOAuth2LoginRequest { - time.Sleep(time.Second * 2) - return nil - }), - acceptConsentHandler(t, c, subject, nil), - ) - - code, _ := getAuthorizeCode(t, conf, nil) - require.NotEmpty(t, code) +func claimsFromVCResponse(t *testing.T, reg *driver.RegistrySQL, vc *hydraoauth2.VerifiableCredentialResponse) (*jwt.Token, *hydraoauth2.VerifableCredentialClaims) { + ctx := context.Background() + token, err := jwt.ParseWithClaims(vc.Credential, new(hydraoauth2.VerifableCredentialClaims), func(token *jwt.Token) (interface{}, error) { + kid, found := token.Header["kid"] + if !found { + return nil, errors.New("missing kid header") + } + openIDKey, err := reg.OpenIDJWTSigner().GetPublicKeyID(ctx) + if err != nil { + return nil, err + } + if kid != openIDKey { + return nil, errors.New("invalid kid header") + } + + return x.Must(reg.OpenIDJWTSigner().GetPublicKey(ctx)).Key, nil }) + require.NoError(t, err) - t.Run("case=ensure consistent claims returned for userinfo", func(t *testing.T) { - c, conf := newOAuth2Client(t, testhelpers.NewCallbackURL(t, "callback", testhelpers.HTTPServerNotImplementedHandler)) - testhelpers.NewLoginConsentUI(t, reg.Config(), - acceptLoginHandler(t, c, subject, nil), - acceptConsentHandler(t, c, subject, nil), - ) + return token, token.Claims.(*hydraoauth2.VerifableCredentialClaims) +} - code, _ := getAuthorizeCode(t, conf, nil) - require.NotEmpty(t, code) +func assertClaimsContainPublicKey(t *testing.T, claims *hydraoauth2.VerifableCredentialClaims, pubKeyJWK *jose.JSONWebKey) { + pubKeyRaw, err := pubKeyJWK.MarshalJSON() + require.NoError(t, err) + expectedID := fmt.Sprintf("did:jwk:%s", base64.RawURLEncoding.EncodeToString(pubKeyRaw)) + require.Equal(t, expectedID, claims.VerifiableCredential.Subject["id"]) +} - token, err := conf.Exchange(context.Background(), code) - require.NoError(t, err) +func createVerifiableCredential( + t *testing.T, + reg *driver.RegistrySQL, + token *oauth2.Token, + createVerifiableCredentialReq *hydraoauth2.CreateVerifiableCredentialRequestBody, +) (vcRes *hydraoauth2.VerifiableCredentialResponse, _ error) { + var ( + ctx = context.Background() + body bytes.Buffer + ) + require.NoError(t, json.NewEncoder(&body).Encode(createVerifiableCredentialReq)) + req := httpx.MustNewRequest("POST", reg.Config().CredentialsEndpointURL(ctx).String(), &body, "application/json") + req.Header.Set("Authorization", "Bearer "+token.AccessToken) + res, err := http.DefaultClient.Do(req) + require.NoError(t, err) + defer res.Body.Close() //nolint:errcheck - idClaims := assertIDToken(t, token, conf, subject, "", time.Now().Add(reg.Config().GetIDTokenLifespan(ctx))) + if res.StatusCode != http.StatusOK { + var errRes fosite.RFC6749Error + require.NoError(t, json.NewDecoder(res.Body).Decode(&errRes)) + return nil, &errRes + } + require.Equal(t, http.StatusOK, res.StatusCode) + var vc hydraoauth2.VerifiableCredentialResponse + require.NoError(t, json.NewDecoder(res.Body).Decode(&vc)) - time.Sleep(time.Second) - uiClaims := testhelpers.Userinfo(t, token, publicTS) + return &vc, nil +} - for _, f := range []string{ - "sub", - "iss", - "aud", - "bar", - "auth_time", - } { - assert.NotEmpty(t, uiClaims.Get(f).Raw, "%s: %s", f, uiClaims) - assert.EqualValues(t, idClaims.Get(f).Raw, uiClaims.Get(f).Raw, "%s\nuserinfo: %s\nidtoken: %s", f, uiClaims, idClaims) - } +func doPrimingRequest( + t *testing.T, + reg *driver.RegistrySQL, + token *oauth2.Token, + createVerifiableCredentialReq *hydraoauth2.CreateVerifiableCredentialRequestBody, +) (*hydraoauth2.VerifiableCredentialPrimingResponse, error) { + var ( + ctx = context.Background() + body bytes.Buffer + ) + require.NoError(t, json.NewEncoder(&body).Encode(createVerifiableCredentialReq)) + req := httpx.MustNewRequest("POST", reg.Config().CredentialsEndpointURL(ctx).String(), &body, "application/json") + req.Header.Set("Authorization", "Bearer "+token.AccessToken) + res, err := http.DefaultClient.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() //nolint:errcheck + require.Equal(t, http.StatusBadRequest, res.StatusCode) + var vc hydraoauth2.VerifiableCredentialPrimingResponse + require.NoError(t, json.NewDecoder(res.Body).Decode(&vc)) - for _, f := range []string{ - "at_hash", - "c_hash", - "nonce", - "sid", - "jti", - } { - assert.Empty(t, uiClaims.Get(f).Raw, "%s: %s", f, uiClaims) - } - }) + return &vc, nil +} + +func createVCProofJWT(t *testing.T, pubKey *jose.JSONWebKey, privKey any, nonce string) string { + proofToken := jwt.NewWithClaims(jwt.GetSigningMethod(pubKey.Algorithm), jwt.MapClaims{"nonce": nonce}) + proofToken.Header["jwk"] = pubKey + proofJWT, err := proofToken.SignedString(privKey) + require.NoError(t, err) + + return proofJWT } // TestAuthCodeWithMockStrategy runs the authorization_code flow against various ConsentStrategy scenarios. @@ -697,59 +1859,85 @@ func TestAuthCodeWithDefaultStrategy(t *testing.T) { // - [x] should pass with prompt=login when authentication time is recent // - [x] should fail with prompt=login when authentication time is in the past func TestAuthCodeWithMockStrategy(t *testing.T) { + t.Parallel() + ctx := context.Background() for _, strat := range []struct{ d string }{{d: "opaque"}, {d: "jwt"}} { t.Run("strategy="+strat.d, func(t *testing.T) { - conf := internal.NewConfigurationWithDefaults() - conf.MustSet(ctx, config.KeyAccessTokenLifespan, time.Second*2) - conf.MustSet(ctx, config.KeyScopeStrategy, "DEPRECATED_HIERARCHICAL_SCOPE_STRATEGY") - conf.MustSet(ctx, config.KeyAccessTokenStrategy, strat.d) - reg := internal.NewRegistryMemory(t, conf, &contextx.Default{}) - internal.MustEnsureRegistryKeys(reg, x.OpenIDConnectKeyName) - internal.MustEnsureRegistryKeys(reg, x.OAuth2JWTKeyName) + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions(configx.WithValues(map[string]any{ + config.KeyAccessTokenLifespan: time.Second * 2, + config.KeyScopeStrategy: "DEPRECATED_HIERARCHICAL_SCOPE_STRATEGY", + config.KeyAccessTokenStrategy: strat.d, + }))) + testhelpers.MustEnsureRegistryKeys(t, reg, x.OpenIDConnectKeyName) + testhelpers.MustEnsureRegistryKeys(t, reg, x.OAuth2JWTKeyName) consentStrategy := &consentMock{} - router := x.NewRouterPublic() - ts := httptest.NewServer(router) - defer ts.Close() reg.WithConsentStrategy(consentStrategy) - handler := reg.OAuth2Handler() - handler.SetRoutes(httprouterx.NewRouterAdminWithPrefixAndRouter(router.Router, "/admin", conf.AdminURL), router, func(h http.Handler) http.Handler { - return h - }) + handler := hydraoauth2.NewHandler(reg) + var callbackHandler http.HandlerFunc - var callbackHandler *httprouter.Handle - router.GET("/callback", func(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - (*callbackHandler)(w, r, ps) - }) - var mutex sync.Mutex - - require.NoError(t, reg.ClientManager().CreateClient(context.TODO(), &hc.Client{ - LegacyClientID: "app-client", - Secret: "secret", - RedirectURIs: []string{ts.URL + "/callback"}, - ResponseTypes: []string{"id_token", "code", "token"}, - GrantTypes: []string{"implicit", "refresh_token", "authorization_code", "password", "client_credentials"}, - Scope: "hydra.* offline openid", + metrics := prometheusx.NewMetricsManagerWithPrefix("hydra", prometheusx.HTTPMetrics, config.Version, config.Commit, config.Date) + + var adminTs *httptest.Server + { + n := negroni.New() + n.UseFunc(httprouterx.TrimTrailingSlashNegroni) + n.UseFunc(httprouterx.NoCacheNegroni) + n.UseFunc(httprouterx.AddAdminPrefixIfNotPresentNegroni) + + router := x.NewRouterAdmin(metrics) + handler.SetAdminRoutes(router) + n.UseHandler(router) + + adminTs = httptest.NewServer(n) + t.Cleanup(adminTs.Close) + reg.Config().MustSet(ctx, config.KeyAdminURL, adminTs.URL) + } + var publicTs *httptest.Server + { + n := negroni.New() + n.UseFunc(httprouterx.TrimTrailingSlashNegroni) + n.UseFunc(httprouterx.NoCacheNegroni) + + router := x.NewRouterPublic(metrics) + router.GET("/callback", func(w http.ResponseWriter, r *http.Request) { + callbackHandler(w, r) + }) + handler.SetPublicRoutes(router, func(h http.Handler) http.Handler { return h }) + n.UseHandler(router) + + publicTs = httptest.NewServer(n) + t.Cleanup(publicTs.Close) + reg.Config().MustSet(ctx, config.KeyAdminURL, publicTs.URL) + } + + require.NoError(t, reg.ClientManager().CreateClient(ctx, &client.Client{ + ID: "app-client", + Secret: "secret", + RedirectURIs: []string{publicTs.URL + "/callback"}, + ResponseTypes: []string{"id_token", "code", "token"}, + GrantTypes: []string{"implicit", "refresh_token", "authorization_code", "password", "client_credentials"}, + Scope: "hydra.* offline openid", })) oauthConfig := &oauth2.Config{ ClientID: "app-client", ClientSecret: "secret", Endpoint: oauth2.Endpoint{ - AuthURL: ts.URL + "/oauth2/auth", - TokenURL: ts.URL + "/oauth2/token", + AuthURL: publicTs.URL + "/oauth2/auth", + TokenURL: publicTs.URL + "/oauth2/token", }, - RedirectURL: ts.URL + "/callback", - Scopes: []string{"hydra.*", "offline", "openid"}, + RedirectURL: publicTs.URL + "/callback", + Scopes: []string{"offline", "openid", "hydra.*"}, } var code string for k, tc := range []struct { cj http.CookieJar d string - cb func(t *testing.T) httprouter.Handle + cb func(t *testing.T) http.HandlerFunc authURL string shouldPassConsentStrategy bool expectOAuthAuthError bool @@ -764,11 +1952,11 @@ func TestAuthCodeWithMockStrategy(t *testing.T) { authURL: oauthConfig.AuthCodeURL("some-foo-state"), shouldPassConsentStrategy: true, checkExpiry: true, - cb: func(t *testing.T) httprouter.Handle { - return func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { + cb: func(t *testing.T) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { code = r.URL.Query().Get("code") require.NotEmpty(t, code) - w.Write([]byte(r.URL.Query().Get("code"))) + _, _ = w.Write([]byte(r.URL.Query().Get("code"))) } }, assertAccessToken: func(t *testing.T, token string) { @@ -776,11 +1964,8 @@ func TestAuthCodeWithMockStrategy(t *testing.T) { return } - body, err := x.DecodeSegment(strings.Split(token, ".")[1]) - require.NoError(t, err) - data := map[string]interface{}{} - require.NoError(t, json.Unmarshal(body, &data)) + require.NoError(t, json.Unmarshal(testhelpers.InsecureDecodeJWT(t, token), &data)) assert.EqualValues(t, "app-client", data["client_id"]) assert.EqualValues(t, "foo", data["sub"]) @@ -799,8 +1984,8 @@ func TestAuthCodeWithMockStrategy(t *testing.T) { authTime: time.Now().UTC().Add(-time.Minute), requestTime: time.Now().UTC(), shouldPassConsentStrategy: true, - cb: func(t *testing.T) httprouter.Handle { - return func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { + cb: func(t *testing.T) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { code = r.URL.Query().Get("code") err := r.URL.Query().Get("error") require.Empty(t, code) @@ -815,11 +2000,11 @@ func TestAuthCodeWithMockStrategy(t *testing.T) { authTime: time.Now().UTC().Add(-time.Minute), requestTime: time.Now().UTC(), shouldPassConsentStrategy: true, - cb: func(t *testing.T) httprouter.Handle { - return func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { + cb: func(t *testing.T) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { code = r.URL.Query().Get("code") require.NotEmpty(t, code) - w.Write([]byte(r.URL.Query().Get("code"))) + _, _ = w.Write([]byte(r.URL.Query().Get("code"))) } }, }, @@ -829,8 +2014,8 @@ func TestAuthCodeWithMockStrategy(t *testing.T) { authTime: time.Now().UTC().Add(-time.Minute), requestTime: time.Now().UTC().Add(-time.Hour), shouldPassConsentStrategy: true, - cb: func(t *testing.T) httprouter.Handle { - return func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { + cb: func(t *testing.T) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { code = r.URL.Query().Get("code") err := r.URL.Query().Get("error") require.Empty(t, code) @@ -844,8 +2029,8 @@ func TestAuthCodeWithMockStrategy(t *testing.T) { authURL: oauthConfig.AuthCodeURL("some-foo-state"), expectOAuthAuthError: true, shouldPassConsentStrategy: false, - cb: func(t *testing.T) httprouter.Handle { - return func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { + cb: func(t *testing.T) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { require.Empty(t, r.URL.Query().Get("code")) assert.Equal(t, fosite.ErrRequestForbidden.Error(), r.URL.Query().Get("error")) } @@ -857,11 +2042,11 @@ func TestAuthCodeWithMockStrategy(t *testing.T) { authTime: time.Now().UTC().Add(-time.Second), requestTime: time.Now().UTC().Add(-time.Minute), shouldPassConsentStrategy: true, - cb: func(t *testing.T) httprouter.Handle { - return func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { + cb: func(t *testing.T) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { code = r.URL.Query().Get("code") require.NotEmpty(t, code) - w.Write([]byte(r.URL.Query().Get("code"))) + _, _ = w.Write([]byte(r.URL.Query().Get("code"))) } }, }, @@ -872,8 +2057,8 @@ func TestAuthCodeWithMockStrategy(t *testing.T) { requestTime: time.Now().UTC(), expectOAuthAuthError: true, shouldPassConsentStrategy: true, - cb: func(t *testing.T) httprouter.Handle { - return func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { + cb: func(t *testing.T) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { code = r.URL.Query().Get("code") require.Empty(t, code) assert.Equal(t, fosite.ErrLoginRequired.Error(), r.URL.Query().Get("error")) @@ -882,8 +2067,6 @@ func TestAuthCodeWithMockStrategy(t *testing.T) { }, } { t.Run(fmt.Sprintf("case=%d/description=%s", k, tc.d), func(t *testing.T) { - mutex.Lock() - defer mutex.Unlock() if tc.cb == nil { tc.cb = noopHandler } @@ -892,8 +2075,7 @@ func TestAuthCodeWithMockStrategy(t *testing.T) { consentStrategy.authTime = tc.authTime consentStrategy.requestTime = tc.requestTime - cb := tc.cb(t) - callbackHandler = &cb + callbackHandler = tc.cb(t) req, err := http.NewRequest("GET", tc.authURL, nil) require.NoError(t, err) @@ -903,8 +2085,8 @@ func TestAuthCodeWithMockStrategy(t *testing.T) { } resp, err := (&http.Client{Jar: tc.cj}).Do(req) - require.NoError(t, err, tc.authURL, ts.URL) - defer resp.Body.Close() + require.NoError(t, err, tc.authURL, publicTs.URL) + defer resp.Body.Close() //nolint:errcheck if tc.expectOAuthAuthError { require.Empty(t, code) @@ -913,7 +2095,7 @@ func TestAuthCodeWithMockStrategy(t *testing.T) { require.NotEmpty(t, code) - token, err := oauthConfig.Exchange(oauth2.NoContext, code) + token, err := oauthConfig.Exchange(ctx, code) if tc.expectOAuthTokenError { require.Error(t, err) return @@ -932,7 +2114,7 @@ func TestAuthCodeWithMockStrategy(t *testing.T) { } var testSuccess = func(response *http.Response) { - defer resp.Body.Close() + defer resp.Body.Close() //nolint:errcheck require.Equal(t, http.StatusOK, resp.StatusCode) @@ -941,25 +2123,25 @@ func TestAuthCodeWithMockStrategy(t *testing.T) { assert.Equal(t, "foo", claims["sub"]) } - req, err = http.NewRequest("GET", ts.URL+"/userinfo", nil) + req, err = http.NewRequest("GET", publicTs.URL+"/userinfo", nil) req.Header.Add("Authorization", "bearer "+token.AccessToken) testSuccess(makeRequest(req)) - req, err = http.NewRequest("POST", ts.URL+"/userinfo", nil) + req, err = http.NewRequest("POST", publicTs.URL+"/userinfo", nil) req.Header.Add("Authorization", "bearer "+token.AccessToken) testSuccess(makeRequest(req)) - req, err = http.NewRequest("POST", ts.URL+"/userinfo", bytes.NewBuffer([]byte("access_token="+token.AccessToken))) + req, err = http.NewRequest("POST", publicTs.URL+"/userinfo", bytes.NewBuffer([]byte("access_token="+token.AccessToken))) req.Header.Add("Content-Type", "application/x-www-form-urlencoded") testSuccess(makeRequest(req)) - req, err = http.NewRequest("GET", ts.URL+"/userinfo", nil) + req, err = http.NewRequest("GET", publicTs.URL+"/userinfo", nil) req.Header.Add("Authorization", "bearer asdfg") resp := makeRequest(req) require.Equal(t, http.StatusUnauthorized, resp.StatusCode) }) - res, err := testRefresh(t, token, ts.URL, tc.checkExpiry) + res, err := testRefresh(t, token, publicTs.URL, tc.checkExpiry) require.NoError(t, err) assert.Equal(t, http.StatusOK, res.StatusCode) @@ -978,17 +2160,11 @@ func TestAuthCodeWithMockStrategy(t *testing.T) { t.Skip() } - body, err := x.DecodeSegment(strings.Split(token.AccessToken, ".")[1]) - require.NoError(t, err) - origPayload := map[string]interface{}{} - require.NoError(t, json.Unmarshal(body, &origPayload)) - - body, err = x.DecodeSegment(strings.Split(refreshedToken.AccessToken, ".")[1]) - require.NoError(t, err) + require.NoError(t, json.Unmarshal(testhelpers.InsecureDecodeJWT(t, token.AccessToken), &origPayload)) refreshedPayload := map[string]interface{}{} - require.NoError(t, json.Unmarshal(body, &refreshedPayload)) + require.NoError(t, json.Unmarshal(testhelpers.InsecureDecodeJWT(t, refreshedToken.AccessToken), &refreshedPayload)) if tc.checkExpiry { assert.NotEqual(t, refreshedPayload["exp"], origPayload["exp"]) @@ -1002,7 +2178,7 @@ func TestAuthCodeWithMockStrategy(t *testing.T) { require.NotEqual(t, token.AccessToken, refreshedToken.AccessToken) t.Run("old token should no longer be usable", func(t *testing.T) { - req, err := http.NewRequest("GET", ts.URL+"/userinfo", nil) + req, err := http.NewRequest("GET", publicTs.URL+"/userinfo", nil) require.NoError(t, err) req.Header.Add("Authorization", "bearer "+token.AccessToken) res, err := http.DefaultClient.Do(req) @@ -1011,7 +2187,7 @@ func TestAuthCodeWithMockStrategy(t *testing.T) { }) t.Run("refreshing new refresh token should work", func(t *testing.T) { - res, err := testRefresh(t, &refreshedToken, ts.URL, false) + res, err := testRefresh(t, &refreshedToken, publicTs.URL, false) require.NoError(t, err) assert.Equal(t, http.StatusOK, res.StatusCode) @@ -1021,173 +2197,236 @@ func TestAuthCodeWithMockStrategy(t *testing.T) { }) t.Run("should call refresh token hook if configured", func(t *testing.T) { - hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, r.Header.Get("Content-Type"), "application/json; charset=UTF-8") - - expectedGrantedScopes := []string{"openid", "offline", "hydra.*"} - expectedSubject := "foo" - - var hookReq hydraoauth2.RefreshTokenHookRequest - require.NoError(t, json.NewDecoder(r.Body).Decode(&hookReq)) - require.Equal(t, hookReq.Subject, expectedSubject) - require.ElementsMatch(t, hookReq.GrantedScopes, expectedGrantedScopes) - require.ElementsMatch(t, hookReq.GrantedAudience, []string{}) - require.Equal(t, hookReq.ClientID, oauthConfig.ClientID) - require.NotEmpty(t, hookReq.Session) - require.Equal(t, hookReq.Session.Subject, expectedSubject) - require.Equal(t, hookReq.Session.ClientID, oauthConfig.ClientID) - require.Equal(t, hookReq.Session.Extra, map[string]interface{}{}) - require.NotEmpty(t, hookReq.Requester) - require.Equal(t, hookReq.Requester.ClientID, oauthConfig.ClientID) - require.ElementsMatch(t, hookReq.Requester.GrantedScopes, expectedGrantedScopes) - - except := []string{ - "session.kid", - "session.id_token.expires_at", - "session.id_token.headers.extra.kid", - "session.id_token.id_token_claims.iat", - "session.id_token.id_token_claims.exp", - "session.id_token.id_token_claims.rat", - "session.id_token.id_token_claims.auth_time", - } - snapshotx.SnapshotTExcept(t, hookReq, except) - - claims := map[string]interface{}{ - "hooked": true, + run := func(hookType string) func(t *testing.T) { + return func(t *testing.T) { + hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, r.Header.Get("Content-Type"), "application/json; charset=UTF-8") + + expectedGrantedScopes := []string{"openid", "offline", "hydra.*"} + expectedSubject := "foo" + + exceptKeys := []string{ + "session.kid", + "session.id_token.expires_at", + "session.id_token.headers.extra.kid", + "session.id_token.id_token_claims.iat", + "session.id_token.id_token_claims.exp", + "session.id_token.id_token_claims.rat", + "session.id_token.id_token_claims.auth_time", + } + + if hookType == "legacy" { + var hookReq hydraoauth2.RefreshTokenHookRequest + require.NoError(t, json.NewDecoder(r.Body).Decode(&hookReq)) + require.Equal(t, hookReq.Subject, expectedSubject) + require.ElementsMatch(t, hookReq.GrantedScopes, expectedGrantedScopes) + require.ElementsMatch(t, hookReq.GrantedAudience, []string{}) + require.Equal(t, hookReq.ClientID, oauthConfig.ClientID) + require.NotEmpty(t, hookReq.Session) + require.Equal(t, hookReq.Session.Subject, expectedSubject) + require.Equal(t, hookReq.Session.ClientID, oauthConfig.ClientID) + require.NotEmpty(t, hookReq.Requester) + require.Equal(t, hookReq.Requester.ClientID, oauthConfig.ClientID) + require.ElementsMatch(t, hookReq.Requester.GrantedScopes, expectedGrantedScopes) + + snapshotx.SnapshotT(t, hookReq, snapshotx.ExceptPaths(exceptKeys...)) + } else { + var hookReq hydraoauth2.TokenHookRequest + require.NoError(t, json.NewDecoder(r.Body).Decode(&hookReq)) + require.NotEmpty(t, hookReq.Session) + require.Equal(t, hookReq.Session.Subject, expectedSubject) + require.Equal(t, hookReq.Session.ClientID, oauthConfig.ClientID) + require.NotEmpty(t, hookReq.Request) + require.Equal(t, hookReq.Request.ClientID, oauthConfig.ClientID) + require.ElementsMatch(t, hookReq.Request.GrantedScopes, expectedGrantedScopes) + require.ElementsMatch(t, hookReq.Request.GrantedAudience, []string{}) + require.Equal(t, hookReq.Request.Payload, map[string][]string{"grant_type": {"refresh_token"}}) + + snapshotx.SnapshotT(t, hookReq, snapshotx.ExceptPaths(exceptKeys...)) + } + + claims := map[string]interface{}{ + "hooked": hookType, + } + + hookResp := hydraoauth2.TokenHookResponse{ + Session: flow.AcceptOAuth2ConsentRequestSession{ + AccessToken: claims, + IDToken: claims, + }, + } + + w.WriteHeader(http.StatusOK) + require.NoError(t, json.NewEncoder(w).Encode(&hookResp)) + })) + defer hs.Close() + + if hookType == "legacy" { + reg.Config().MustSet(ctx, config.KeyRefreshTokenHook, hs.URL) + defer reg.Config().MustSet(ctx, config.KeyRefreshTokenHook, nil) + + } else { + reg.Config().MustSet(ctx, config.KeyTokenHook, hs.URL) + defer reg.Config().MustSet(ctx, config.KeyTokenHook, nil) + } + + res, err := testRefresh(t, &refreshedToken, publicTs.URL, false) + require.NoError(t, err) + assert.Equal(t, http.StatusOK, res.StatusCode) + + body, err := io.ReadAll(res.Body) + require.NoError(t, err) + require.NoError(t, json.Unmarshal(body, &refreshedToken)) + + accessTokenClaims := testhelpers.IntrospectToken(t, refreshedToken.AccessToken, adminTs) + require.Equalf(t, hookType, accessTokenClaims.Get("ext.hooked").String(), "%+v", accessTokenClaims) + + require.Equal(t, hookType, gjson.GetBytes(testhelpers.InsecureDecodeJWT(t, gjson.GetBytes(body, "id_token").Str), "hooked").String()) } - - hookResp := hydraoauth2.RefreshTokenHookResponse{ - Session: consent.AcceptOAuth2ConsentRequestSession{ - AccessToken: claims, - IDToken: claims, - }, - } - - w.WriteHeader(http.StatusOK) - require.NoError(t, json.NewEncoder(w).Encode(&hookResp)) - })) - defer hs.Close() - - conf.MustSet(ctx, config.KeyRefreshTokenHookURL, hs.URL) - defer conf.MustSet(ctx, config.KeyRefreshTokenHookURL, nil) - - res, err := testRefresh(t, &refreshedToken, ts.URL, false) - require.NoError(t, err) - assert.Equal(t, http.StatusOK, res.StatusCode) - - body, err := io.ReadAll(res.Body) - require.NoError(t, err) - require.NoError(t, json.Unmarshal(body, &refreshedToken)) - - accessTokenClaims := testhelpers.IntrospectToken(t, oauthConfig, refreshedToken.AccessToken, ts) - require.True(t, accessTokenClaims.Get("ext.hooked").Bool()) - - idTokenBody, err := x.DecodeSegment( - strings.Split( - gjson.GetBytes(body, "id_token").String(), - ".", - )[1], - ) - require.NoError(t, err) - - require.True(t, gjson.GetBytes(idTokenBody, "hooked").Bool()) + } + t.Run("hook=legacy", run("legacy")) + t.Run("hook=new", run("new")) }) t.Run("should not override session data if token refresh hook returns no content", func(t *testing.T) { - hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusNoContent) - })) - defer hs.Close() - - conf.MustSet(ctx, config.KeyRefreshTokenHookURL, hs.URL) - defer conf.MustSet(ctx, config.KeyRefreshTokenHookURL, nil) - - origAccessTokenClaims := testhelpers.IntrospectToken(t, oauthConfig, refreshedToken.AccessToken, ts) - - res, err := testRefresh(t, &refreshedToken, ts.URL, false) - require.NoError(t, err) - assert.Equal(t, http.StatusOK, res.StatusCode) - - body, err = io.ReadAll(res.Body) - require.NoError(t, err) - - require.NoError(t, json.Unmarshal(body, &refreshedToken)) - - refreshedAccessTokenClaims := testhelpers.IntrospectToken(t, oauthConfig, refreshedToken.AccessToken, ts) - assertx.EqualAsJSONExcept(t, json.RawMessage(origAccessTokenClaims.Raw), json.RawMessage(refreshedAccessTokenClaims.Raw), []string{"exp", "iat", "nbf"}) + run := func(hookType string) func(t *testing.T) { + return func(t *testing.T) { + hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusNoContent) + })) + defer hs.Close() + + if hookType == "legacy" { + reg.Config().MustSet(ctx, config.KeyRefreshTokenHook, hs.URL) + defer reg.Config().MustSet(ctx, config.KeyRefreshTokenHook, nil) + } else { + reg.Config().MustSet(ctx, config.KeyTokenHook, hs.URL) + defer reg.Config().MustSet(ctx, config.KeyTokenHook, nil) + } + + origAccessTokenClaims := testhelpers.IntrospectToken(t, refreshedToken.AccessToken, adminTs) + + res, err := testRefresh(t, &refreshedToken, publicTs.URL, false) + require.NoError(t, err) + assert.Equal(t, http.StatusOK, res.StatusCode) + + body, err = io.ReadAll(res.Body) + require.NoError(t, err) + + require.NoError(t, json.Unmarshal(body, &refreshedToken)) + + refreshedAccessTokenClaims := testhelpers.IntrospectToken(t, refreshedToken.AccessToken, adminTs) + assertx.EqualAsJSONExcept(t, json.RawMessage(origAccessTokenClaims.Raw), json.RawMessage(refreshedAccessTokenClaims.Raw), []string{"exp", "iat", "nbf"}) + } + } + t.Run("hook=legacy", run("legacy")) + t.Run("hook=new", run("new")) }) - t.Run("should fail token refresh with `server_error` if hook fails", func(t *testing.T) { - hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusInternalServerError) - })) - defer hs.Close() - - conf.MustSet(ctx, config.KeyRefreshTokenHookURL, hs.URL) - defer conf.MustSet(ctx, config.KeyRefreshTokenHookURL, nil) - - res, err := testRefresh(t, &refreshedToken, ts.URL, false) - require.NoError(t, err) - assert.Equal(t, http.StatusInternalServerError, res.StatusCode) - - var errBody fosite.RFC6749ErrorJson - require.NoError(t, json.NewDecoder(res.Body).Decode(&errBody)) - require.Equal(t, fosite.ErrServerError.Error(), errBody.Name) - require.Equal(t, "An error occurred while executing the refresh token hook.", errBody.Description) + t.Run("should fail token refresh with `server_error` if refresh hook fails", func(t *testing.T) { + run := func(hookType string) func(t *testing.T) { + return func(t *testing.T) { + hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusInternalServerError) + })) + defer hs.Close() + + if hookType == "legacy" { + reg.Config().MustSet(ctx, config.KeyRefreshTokenHook, hs.URL) + defer reg.Config().MustSet(ctx, config.KeyRefreshTokenHook, nil) + } else { + reg.Config().MustSet(ctx, config.KeyTokenHook, hs.URL) + defer reg.Config().MustSet(ctx, config.KeyTokenHook, nil) + } + + res, err := testRefresh(t, &refreshedToken, publicTs.URL, false) + require.NoError(t, err) + assert.Equal(t, http.StatusInternalServerError, res.StatusCode) + + var errBody fosite.RFC6749ErrorJson + require.NoError(t, json.NewDecoder(res.Body).Decode(&errBody)) + require.Equal(t, fosite.ErrServerError.Error(), errBody.Name) + require.Equal(t, "An error occurred while executing the token hook.", errBody.Description) + } + } + t.Run("hook=legacy", run("legacy")) + t.Run("hook=new", run("new")) }) - t.Run("should fail token refresh with `access_denied` if hook denied the request", func(t *testing.T) { - hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusForbidden) - })) - defer hs.Close() - - conf.MustSet(ctx, config.KeyRefreshTokenHookURL, hs.URL) - defer conf.MustSet(ctx, config.KeyRefreshTokenHookURL, nil) - - res, err := testRefresh(t, &refreshedToken, ts.URL, false) - require.NoError(t, err) - assert.Equal(t, http.StatusForbidden, res.StatusCode) - - var errBody fosite.RFC6749ErrorJson - require.NoError(t, json.NewDecoder(res.Body).Decode(&errBody)) - require.Equal(t, fosite.ErrAccessDenied.Error(), errBody.Name) - require.Equal(t, "The refresh token hook target responded with an error. Make sure that the request you are making is valid. Maybe the credential or request parameters you are using are limited in scope or otherwise restricted.", errBody.Description) + t.Run("should fail token refresh with `access_denied` if legacy refresh hook denied the request", func(t *testing.T) { + run := func(hookType string) func(t *testing.T) { + return func(t *testing.T) { + hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusForbidden) + })) + defer hs.Close() + + if hookType == "legacy" { + reg.Config().MustSet(ctx, config.KeyRefreshTokenHook, hs.URL) + defer reg.Config().MustSet(ctx, config.KeyRefreshTokenHook, nil) + } else { + reg.Config().MustSet(ctx, config.KeyTokenHook, hs.URL) + defer reg.Config().MustSet(ctx, config.KeyTokenHook, nil) + } + + res, err := testRefresh(t, &refreshedToken, publicTs.URL, false) + require.NoError(t, err) + assert.Equal(t, http.StatusForbidden, res.StatusCode) + + var errBody fosite.RFC6749ErrorJson + require.NoError(t, json.NewDecoder(res.Body).Decode(&errBody)) + require.Equal(t, fosite.ErrAccessDenied.Error(), errBody.Name) + require.Equal(t, "The token hook target responded with an error. Make sure that the request you are making is valid. Maybe the credential or request parameters you are using are limited in scope or otherwise restricted.", errBody.Description) + } + } + t.Run("hook=legacy", run("legacy")) + t.Run("hook=new", run("new")) }) - t.Run("should fail token refresh with `server_error` if hook response is malformed", func(t *testing.T) { - hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusOK) - })) - defer hs.Close() - - conf.MustSet(ctx, config.KeyRefreshTokenHookURL, hs.URL) - defer conf.MustSet(ctx, config.KeyRefreshTokenHookURL, nil) - - res, err := testRefresh(t, &refreshedToken, ts.URL, false) - require.NoError(t, err) - assert.Equal(t, http.StatusInternalServerError, res.StatusCode) - - var errBody fosite.RFC6749ErrorJson - require.NoError(t, json.NewDecoder(res.Body).Decode(&errBody)) - require.Equal(t, fosite.ErrServerError.Error(), errBody.Name) - require.Equal(t, "The refresh token hook target responded with an error.", errBody.Description) + t.Run("should fail token refresh with `server_error` if refresh hook response is malformed", func(t *testing.T) { + run := func(hookType string) func(t *testing.T) { + return func(t *testing.T) { + hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + })) + defer hs.Close() + + if hookType == "legacy" { + reg.Config().MustSet(ctx, config.KeyRefreshTokenHook, hs.URL) + defer reg.Config().MustSet(ctx, config.KeyRefreshTokenHook, nil) + } else { + reg.Config().MustSet(ctx, config.KeyTokenHook, hs.URL) + defer reg.Config().MustSet(ctx, config.KeyTokenHook, nil) + } + + res, err := testRefresh(t, &refreshedToken, publicTs.URL, false) + require.NoError(t, err) + assert.Equal(t, http.StatusInternalServerError, res.StatusCode) + + var errBody fosite.RFC6749ErrorJson + require.NoError(t, json.NewDecoder(res.Body).Decode(&errBody)) + require.Equal(t, fosite.ErrServerError.Error(), errBody.Name) + require.Equal(t, "The token hook target responded with an error.", errBody.Description) + } + } + t.Run("hook=legacy", run("legacy")) + t.Run("hook=new", run("new")) }) t.Run("refreshing old token should no longer work", func(t *testing.T) { - res, err := testRefresh(t, token, ts.URL, false) + res, err := testRefresh(t, token, publicTs.URL, false) require.NoError(t, err) - assert.Equal(t, http.StatusUnauthorized, res.StatusCode) + assert.Equal(t, http.StatusBadRequest, res.StatusCode) }) t.Run("attempt to refresh old token should revoke new token", func(t *testing.T) { - res, err := testRefresh(t, &refreshedToken, ts.URL, false) + res, err := testRefresh(t, &refreshedToken, publicTs.URL, false) require.NoError(t, err) - assert.Equal(t, http.StatusUnauthorized, res.StatusCode) + assert.Equal(t, http.StatusBadRequest, res.StatusCode) }) t.Run("duplicate code exchange fails", func(t *testing.T) { - token, err := oauthConfig.Exchange(oauth2.NoContext, code) + token, err := oauthConfig.Exchange(context.TODO(), code) require.Error(t, err) require.Nil(t, token) }) @@ -1222,3 +2461,54 @@ func testRefresh(t *testing.T, token *oauth2.Token, u string, sleep bool) (*http return http.DefaultClient.Do(req) } + +func withScope(scope string) func(*client.Client) { + return func(c *client.Client) { + c.Scope = scope + } +} + +func newOAuth2Client( + t *testing.T, + reg interface { + config.Provider + client.Registry + }, + callbackURL string, + opts ...func(*client.Client), +) (*client.Client, *oauth2.Config) { + ctx := t.Context() + secret := uuid.New() + c := &client.Client{ + Secret: secret, + RedirectURIs: []string{callbackURL}, + ResponseTypes: []string{"id_token", "code", "token"}, + GrantTypes: []string{ + "implicit", + "refresh_token", + "authorization_code", + "password", + "client_credentials", + }, + Scope: "hydra offline openid", + Audience: []string{"https://api.ory.sh/"}, + } + + // apply options + for _, o := range opts { + o(c) + } + + require.NoError(t, reg.ClientManager().CreateClient(ctx, c)) + return c, &oauth2.Config{ + ClientID: c.GetID(), + ClientSecret: secret, + RedirectURL: callbackURL, + Endpoint: oauth2.Endpoint{ + AuthURL: reg.Config().OAuth2AuthURL(ctx).String(), + TokenURL: reg.Config().OAuth2TokenURL(ctx).String(), + AuthStyle: oauth2.AuthStyleInHeader, + }, + Scopes: strings.Split(c.Scope, " "), + } +} diff --git a/oauth2/oauth2_client_credentials_bench_test.go b/oauth2/oauth2_client_credentials_bench_test.go new file mode 100644 index 00000000000..dc61ec75a75 --- /dev/null +++ b/oauth2/oauth2_client_credentials_bench_test.go @@ -0,0 +1,156 @@ +// Copyright © 2022 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2_test + +import ( + "context" + "encoding/json" + "net/url" + "strings" + "testing" + "time" + + "github.com/gofrs/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/tidwall/gjson" + "go.opentelemetry.io/otel/sdk/trace" + "go.opentelemetry.io/otel/sdk/trace/tracetest" + goauth2 "golang.org/x/oauth2" + "golang.org/x/oauth2/clientcredentials" + + hc "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/x/configx" + "github.com/ory/x/otelx" +) + +func BenchmarkClientCredentials(b *testing.B) { + ctx := context.Background() + + spans := tracetest.NewSpanRecorder() + tracer := trace.NewTracerProvider(trace.WithSpanProcessor(spans)).Tracer("") + + dsn := "postgres://postgres:secret@127.0.0.1:3445/postgres?sslmode=disable" + reg := testhelpers.NewRegistrySQLFromURL(b, dsn, true, true, driver.WithTracerWrapper(func(t *otelx.Tracer) *otelx.Tracer { return new(otelx.Tracer).WithOTLP(tracer) }), driver.WithConfigOptions(configx.WithValue(config.KeyAccessTokenStrategy, "opaque"))) + public, admin := testhelpers.NewOAuth2Server(ctx, b, reg) + + newCustomClient := func(b *testing.B, c *hc.Client) (*hc.Client, clientcredentials.Config) { + unhashedSecret := c.Secret + require.NoError(b, reg.ClientManager().CreateClient(ctx, c)) + return c, clientcredentials.Config{ + ClientID: c.GetID(), + ClientSecret: unhashedSecret, + TokenURL: reg.Config().OAuth2TokenURL(ctx).String(), + Scopes: strings.Split(c.Scope, " "), + EndpointParams: url.Values{"audience": c.Audience}, + } + } + + newClient := func(b *testing.B) (*hc.Client, clientcredentials.Config) { + return newCustomClient(b, &hc.Client{ + Secret: uuid.Must(uuid.NewV4()).String(), + RedirectURIs: []string{public.URL + "/callback"}, + ResponseTypes: []string{"token"}, + GrantTypes: []string{"client_credentials"}, + Scope: "foobar", + Audience: []string{"https://api.ory.sh/"}, + }) + } + + getToken := func(t *testing.B, conf clientcredentials.Config) (*goauth2.Token, error) { + conf.AuthStyle = goauth2.AuthStyleInHeader + return conf.Token(context.Background()) + } + + encodeOr := func(b *testing.B, val interface{}, or string) string { + out, err := json.Marshal(val) + require.NoError(b, err) + if string(out) == "null" { + return or + } + + return string(out) + } + + inspectToken := func(b *testing.B, token *goauth2.Token, cl *hc.Client, conf clientcredentials.Config, strategy string, expectedExp time.Time, checkExtraClaims bool) { + introspection := testhelpers.IntrospectToken(b, token.AccessToken, admin) + + check := func(res gjson.Result) { + assert.EqualValues(b, cl.GetID(), res.Get("client_id").String(), "%s", res.Raw) + assert.EqualValues(b, cl.GetID(), res.Get("sub").String(), "%s", res.Raw) + assert.EqualValues(b, reg.Config().IssuerURL(ctx).String(), res.Get("iss").String(), "%s", res.Raw) + + assert.EqualValues(b, res.Get("nbf").Int(), res.Get("iat").Int(), "%s", res.Raw) + assert.WithinDuration(b, expectedExp, time.Unix(res.Get("exp").Int(), 0), time.Second) + + assert.EqualValues(b, encodeOr(b, conf.EndpointParams["audience"], "[]"), res.Get("aud").Raw, "%s", res.Raw) + + if checkExtraClaims { + require.True(b, res.Get("ext.hooked").Bool()) + } + } + + check(introspection) + assert.True(b, introspection.Get("active").Bool()) + assert.EqualValues(b, "access_token", introspection.Get("token_use").String()) + assert.EqualValues(b, "Bearer", introspection.Get("token_type").String()) + assert.EqualValues(b, strings.Join(conf.Scopes, " "), introspection.Get("scope").String(), "%s", introspection.Raw) + + if strategy != "jwt" { + return + } + + jwtClaims := gjson.ParseBytes(testhelpers.InsecureDecodeJWT(b, token.AccessToken)) + assert.NotEmpty(b, jwtClaims.Get("jti").String()) + assert.EqualValues(b, encodeOr(b, conf.Scopes, "[]"), jwtClaims.Get("scp").Raw, "%s", introspection.Raw) + check(jwtClaims) + } + + getAndInspectToken := func(b *testing.B, cl *hc.Client, conf clientcredentials.Config, strategy string, expectedExp time.Time, checkExtraClaims bool) { + token, err := getToken(b, conf) + require.NoError(b, err) + inspectToken(b, token, cl, conf, strategy, expectedExp, checkExtraClaims) + } + + run := func(strategy string) func(b *testing.B) { + return func(t *testing.B) { + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, strategy) + + cl, conf := newClient(b) + getAndInspectToken(b, cl, conf, strategy, time.Now().Add(reg.Config().GetAccessTokenLifespan(ctx)), false) + } + } + + b.Run("strategy=jwt", func(b *testing.B) { + initialDBSpans := dbSpans(spans) + for i := 0; i < b.N; i++ { + run("jwt")(b) + } + b.ReportMetric(0, "ns/op") + b.ReportMetric(float64(b.Elapsed().Milliseconds())/float64(b.N), "ms/op") + b.ReportMetric((float64(dbSpans(spans)-initialDBSpans))/float64(b.N), "queries/op") + }) + + b.Run("strategy=opaque", func(b *testing.B) { + initialDBSpans := dbSpans(spans) + for i := 0; i < b.N; i++ { + run("opaque")(b) + } + b.ReportMetric(0, "ns/op") + b.ReportMetric(float64(b.Elapsed().Milliseconds())/float64(b.N), "ms/op") + b.ReportMetric((float64(dbSpans(spans)-initialDBSpans))/float64(b.N), "queries/op") + }) +} + +func dbSpans(spans *tracetest.SpanRecorder) (count int) { + for _, s := range spans.Started() { + if strings.HasPrefix(s.Name(), "sql-") { + count++ + } + } + return +} diff --git a/oauth2/oauth2_client_credentials_test.go b/oauth2/oauth2_client_credentials_test.go index 5f0de59aef8..ef63b45e6a1 100644 --- a/oauth2/oauth2_client_credentials_test.go +++ b/oauth2/oauth2_client_credentials_test.go @@ -6,34 +6,34 @@ package oauth2_test import ( "context" "encoding/json" - "math" + "net/http" + "net/http/httptest" "net/url" "strings" "testing" "time" - "github.com/google/uuid" - "github.com/tidwall/gjson" - + "github.com/gofrs/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/tidwall/gjson" goauth2 "golang.org/x/oauth2" "golang.org/x/oauth2/clientcredentials" - "github.com/ory/hydra/internal/testhelpers" - "github.com/ory/x/contextx" - - hc "github.com/ory/hydra/client" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal" - "github.com/ory/hydra/x" - "github.com/ory/x/requirex" + hc "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/flow" + "github.com/ory/hydra/v2/internal/testhelpers" + hydraoauth2 "github.com/ory/hydra/v2/oauth2" + "github.com/ory/x/configx" ) func TestClientCredentials(t *testing.T) { + t.Parallel() + ctx := context.Background() - reg := internal.NewMockedRegistry(t, &contextx.Default{}) - reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "opaque") + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions(configx.WithValue(config.KeyAccessTokenStrategy, "opaque"))) public, admin := testhelpers.NewOAuth2Server(ctx, t, reg) var newCustomClient = func(t *testing.T, c *hc.Client) (*hc.Client, clientcredentials.Config) { @@ -49,15 +49,14 @@ func TestClientCredentials(t *testing.T) { } var newClient = func(t *testing.T) (*hc.Client, clientcredentials.Config) { - cc, config := newCustomClient(t, &hc.Client{ - Secret: uuid.New().String(), + return newCustomClient(t, &hc.Client{ + Secret: uuid.Must(uuid.NewV4()).String(), RedirectURIs: []string{public.URL + "/callback"}, ResponseTypes: []string{"token"}, GrantTypes: []string{"client_credentials"}, Scope: "foobar", Audience: []string{"https://api.ory.sh/"}, }) - return cc, config } var getToken = func(t *testing.T, conf clientcredentials.Config) (*goauth2.Token, error) { @@ -75,8 +74,8 @@ func TestClientCredentials(t *testing.T) { return string(out) } - var inspectToken = func(t *testing.T, token *goauth2.Token, cl *hc.Client, conf clientcredentials.Config, strategy string, expectedExp time.Time) { - introspection := testhelpers.IntrospectToken(t, &goauth2.Config{ClientID: cl.GetID(), ClientSecret: conf.ClientSecret}, token.AccessToken, admin) + var inspectToken = func(t *testing.T, token *goauth2.Token, cl *hc.Client, conf clientcredentials.Config, strategy string, expectedExp time.Time, checkExtraClaims bool) { + introspection := testhelpers.IntrospectToken(t, token.AccessToken, admin) check := func(res gjson.Result) { assert.EqualValues(t, cl.GetID(), res.Get("client_id").String(), "%s", res.Raw) @@ -84,9 +83,13 @@ func TestClientCredentials(t *testing.T) { assert.EqualValues(t, reg.Config().IssuerURL(ctx).String(), res.Get("iss").String(), "%s", res.Raw) assert.EqualValues(t, res.Get("nbf").Int(), res.Get("iat").Int(), "%s", res.Raw) - requirex.EqualTime(t, expectedExp, time.Unix(res.Get("exp").Int(), 0), time.Second) + assert.WithinDuration(t, expectedExp, time.Unix(res.Get("exp").Int(), 0), 2*time.Second) assert.EqualValues(t, encodeOr(t, conf.EndpointParams["audience"], "[]"), res.Get("aud").Raw, "%s", res.Raw) + + if checkExtraClaims { + require.True(t, res.Get("ext.hooked").Bool()) + } } check(introspection) @@ -99,19 +102,16 @@ func TestClientCredentials(t *testing.T) { return } - body, err := x.DecodeSegment(strings.Split(token.AccessToken, ".")[1]) - require.NoError(t, err) - - jwtClaims := gjson.ParseBytes(body) + jwtClaims := gjson.ParseBytes(testhelpers.InsecureDecodeJWT(t, token.AccessToken)) assert.NotEmpty(t, jwtClaims.Get("jti").String()) assert.EqualValues(t, encodeOr(t, conf.Scopes, "[]"), jwtClaims.Get("scp").Raw, "%s", introspection.Raw) check(jwtClaims) } - var getAndInspectToken = func(t *testing.T, cl *hc.Client, conf clientcredentials.Config, strategy string, expectedExp time.Time) { + var getAndInspectToken = func(t *testing.T, cl *hc.Client, conf clientcredentials.Config, strategy string, expectedExp time.Time, checkExtraClaims bool) { token, err := getToken(t, conf) require.NoError(t, err) - inspectToken(t, token, cl, conf, strategy, expectedExp) + inspectToken(t, token, cl, conf, strategy, expectedExp, checkExtraClaims) } t.Run("case=should fail because audience is not allowed", func(t *testing.T) { @@ -134,7 +134,7 @@ func TestClientCredentials(t *testing.T) { reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, strategy) cl, conf := newClient(t) - getAndInspectToken(t, cl, conf, strategy, time.Now().Add(reg.Config().GetAccessTokenLifespan(ctx))) + getAndInspectToken(t, cl, conf, strategy, time.Now().Add(reg.Config().GetAccessTokenLifespan(ctx)), false) } } @@ -149,7 +149,7 @@ func TestClientCredentials(t *testing.T) { cl, conf := newClient(t) conf.EndpointParams = url.Values{} - getAndInspectToken(t, cl, conf, strategy, time.Now().Add(reg.Config().GetAccessTokenLifespan(ctx))) + getAndInspectToken(t, cl, conf, strategy, time.Now().Add(reg.Config().GetAccessTokenLifespan(ctx)), false) } } @@ -164,7 +164,7 @@ func TestClientCredentials(t *testing.T) { cl, conf := newClient(t) conf.Scopes = []string{} - getAndInspectToken(t, cl, conf, strategy, time.Now().Add(reg.Config().GetAccessTokenLifespan(ctx))) + getAndInspectToken(t, cl, conf, strategy, time.Now().Add(reg.Config().GetAccessTokenLifespan(ctx)), false) } } @@ -188,7 +188,7 @@ func TestClientCredentials(t *testing.T) { // We reset this so that introspectToken is going to check for the default scope. conf.Scopes = defaultScope - inspectToken(t, token, cl, conf, strategy, time.Now().Add(reg.Config().GetAccessTokenLifespan(ctx))) + inspectToken(t, token, cl, conf, strategy, time.Now().Add(reg.Config().GetAccessTokenLifespan(ctx)), false) } } @@ -201,9 +201,8 @@ func TestClientCredentials(t *testing.T) { return func(t *testing.T) { reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, strategy) - secret := uuid.New().String() cl, conf := newCustomClient(t, &hc.Client{ - Secret: secret, + Secret: uuid.Must(uuid.NewV4()).String(), RedirectURIs: []string{public.URL + "/callback"}, ResponseTypes: []string{"token"}, GrantTypes: []string{"client_credentials"}, @@ -211,7 +210,7 @@ func TestClientCredentials(t *testing.T) { Audience: []string{"https://api.ory.sh/"}, }) testhelpers.UpdateClientTokenLifespans(t, &goauth2.Config{ClientID: cl.GetID(), ClientSecret: conf.ClientSecret}, cl.GetID(), testhelpers.TestLifespans, admin) - getAndInspectToken(t, cl, conf, strategy, time.Now().Add(testhelpers.TestLifespans.ClientCredentialsGrantAccessTokenLifespan.Duration)) + getAndInspectToken(t, cl, conf, strategy, time.Now().Add(testhelpers.TestLifespans.ClientCredentialsGrantAccessTokenLifespan.Duration), false) } } @@ -226,15 +225,158 @@ func TestClientCredentials(t *testing.T) { run := func(strategy string) func(t *testing.T) { return func(t *testing.T) { reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, strategy) - cl, conf := newClient(t) + _, conf := newClient(t) conf.Scopes = []string{} token, err := getToken(t, conf) require.NoError(t, err) + expected := time.Now().Add(duration) + assert.WithinDuration(t, expected, token.Expiry, 5*time.Second) + introspection := testhelpers.IntrospectToken(t, token.AccessToken, admin) + assert.WithinDuration(t, expected, time.Unix(introspection.Get("exp").Int(), 0), 5*time.Second) + } + } + + t.Run("strategy=opaque", run("opaque")) + t.Run("strategy=jwt", run("jwt")) + }) + + t.Run("should call token hook if configured", func(t *testing.T) { + run := func(strategy string) func(t *testing.T) { + return func(t *testing.T) { + scope := "foobar" + audience := []string{"https://api.ory.sh/"} + + hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, r.Header.Get("Content-Type"), "application/json; charset=UTF-8") + assert.Equal(t, r.Header.Get("Authorization"), "Bearer secret value") + + expectedGrantedScopes := []string{"foobar"} + expectedGrantedAudience := []string{"https://api.ory.sh/"} + + var hookReq hydraoauth2.TokenHookRequest + require.NoError(t, json.NewDecoder(r.Body).Decode(&hookReq)) + require.NotEmpty(t, hookReq.Session) + require.Equal(t, hookReq.Session.Extra, map[string]interface{}{}) + require.NotEmpty(t, hookReq.Request) + require.ElementsMatch(t, hookReq.Request.GrantedScopes, expectedGrantedScopes) + require.ElementsMatch(t, hookReq.Request.GrantedAudience, expectedGrantedAudience) + require.Equal(t, hookReq.Request.Payload, map[string][]string{ + "grant_type": {"client_credentials"}, + "scope": {"foobar"}, + }) + + claims := map[string]interface{}{ + "hooked": true, + } + + hookResp := hydraoauth2.TokenHookResponse{ + Session: flow.AcceptOAuth2ConsentRequestSession{ + AccessToken: claims, + IDToken: claims, + }, + } + + w.WriteHeader(http.StatusOK) + require.NoError(t, json.NewEncoder(w).Encode(&hookResp)) + })) + defer hs.Close() + + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, strategy) + reg.Config().MustSet(ctx, config.KeyTokenHook, &config.HookConfig{ + URL: hs.URL, + Auth: &config.Auth{ + Type: "api_key", + Config: config.AuthConfig{ + In: "header", + Name: "Authorization", + Value: "Bearer secret value", + }, + }, + }) + + defer reg.Config().MustSet(ctx, config.KeyTokenHook, nil) + + cl, conf := newCustomClient(t, &hc.Client{ + Secret: uuid.Must(uuid.NewV4()).String(), + RedirectURIs: []string{public.URL + "/callback"}, + ResponseTypes: []string{"token"}, + GrantTypes: []string{"client_credentials"}, + Scope: scope, + Audience: audience, + }) + getAndInspectToken(t, cl, conf, strategy, time.Now().Add(reg.Config().GetAccessTokenLifespan(ctx)), true) + } + } + + t.Run("strategy=opaque", run("opaque")) + t.Run("strategy=jwt", run("jwt")) + }) + + t.Run("should fail token if hook fails", func(t *testing.T) { + run := func(strategy string) func(t *testing.T) { + return func(t *testing.T) { + hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusInternalServerError) + })) + defer hs.Close() + + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, strategy) + reg.Config().MustSet(ctx, config.KeyTokenHook, hs.URL) + + defer reg.Config().MustSet(ctx, config.KeyTokenHook, nil) + + _, conf := newClient(t) + + _, err := getToken(t, conf) + require.Error(t, err) + } + } + + t.Run("strategy=opaque", run("opaque")) + t.Run("strategy=jwt", run("jwt")) + }) + + t.Run("should fail token if hook denied the request", func(t *testing.T) { + run := func(strategy string) func(t *testing.T) { + return func(t *testing.T) { + hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusForbidden) + })) + defer hs.Close() + + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, strategy) + reg.Config().MustSet(ctx, config.KeyTokenHook, hs.URL) + + defer reg.Config().MustSet(ctx, config.KeyTokenHook, nil) + + _, conf := newClient(t) + + _, err := getToken(t, conf) + require.Error(t, err) + } + } + + t.Run("strategy=opaque", run("opaque")) + t.Run("strategy=jwt", run("jwt")) + }) + + t.Run("should fail token if hook response is malformed", func(t *testing.T) { + run := func(strategy string) func(t *testing.T) { + return func(t *testing.T) { + hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + })) + defer hs.Close() + + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, strategy) + reg.Config().MustSet(ctx, config.KeyTokenHook, hs.URL) + + defer reg.Config().MustSet(ctx, config.KeyTokenHook, nil) - assert.True(t, math.Abs(float64(time.Now().Add(duration).Round(time.Minute).Unix())-float64(token.Expiry.Round(time.Minute).Unix())) < 5) + _, conf := newClient(t) - introspection := testhelpers.IntrospectToken(t, &goauth2.Config{ClientID: cl.GetID(), ClientSecret: conf.ClientSecret}, token.AccessToken, admin) - assert.EqualValues(t, time.Now().Add(duration).Round(time.Minute), time.Unix(introspection.Get("exp").Int(), 0).Round(time.Minute)) + _, err := getToken(t, conf) + require.Error(t, err) } } diff --git a/oauth2/oauth2_device_code_test.go b/oauth2/oauth2_device_code_test.go new file mode 100644 index 00000000000..a5241be4adc --- /dev/null +++ b/oauth2/oauth2_device_code_test.go @@ -0,0 +1,875 @@ +// Copyright © 2024 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2_test + +import ( + "context" + "net/http" + "strings" + "testing" + "time" + + "github.com/pborman/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/tidwall/gjson" + "golang.org/x/oauth2" + + hydra "github.com/ory/hydra-client-go/v2" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/token/jwt" + "github.com/ory/hydra/v2/internal/testhelpers" + hydraoauth2 "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/configx" + "github.com/ory/x/pointerx" +) + +func TestDeviceAuthRequest(t *testing.T) { + t.Parallel() + + ctx := context.Background() + reg := testhelpers.NewRegistryMemory(t) + testhelpers.NewOAuth2Server(ctx, t, reg) + + secret := uuid.New() + c := &client.Client{ + ID: "device-client", + Secret: secret, + GrantTypes: []string{"urn:ietf:params:oauth:grant-type:device_code"}, + Scope: "hydra offline openid", + Audience: []string{"https://api.ory.sh/"}, + TokenEndpointAuthMethod: "client_secret_post", + } + require.NoError(t, reg.ClientManager().CreateClient(ctx, c)) + + oauthClient := &oauth2.Config{ + ClientID: c.GetID(), + ClientSecret: secret, + Endpoint: oauth2.Endpoint{ + DeviceAuthURL: reg.Config().OAuth2DeviceAuthorisationURL(ctx).String(), + TokenURL: reg.Config().OAuth2TokenURL(ctx).String(), + AuthStyle: oauth2.AuthStyleInParams, + }, + Scopes: strings.Split(c.Scope, " "), + } + + testCases := []struct { + description string + setUp func() + check func(t *testing.T, resp *oauth2.DeviceAuthResponse, err error) + cleanUp func() + }{ + { + description: "should pass", + check: func(t *testing.T, resp *oauth2.DeviceAuthResponse, _ error) { + assert.NotEmpty(t, resp.DeviceCode) + assert.NotEmpty(t, resp.UserCode) + assert.NotEmpty(t, resp.Interval) + assert.NotEmpty(t, resp.VerificationURI) + assert.NotEmpty(t, resp.VerificationURIComplete) + }, + }, + } + + for _, testCase := range testCases { + t.Run("case="+testCase.description, func(t *testing.T) { + if testCase.setUp != nil { + testCase.setUp() + } + + resp, err := oauthClient.DeviceAuth(context.Background(), []oauth2.AuthCodeOption{oauth2.SetAuthURLParam("client_secret", secret)}...) + + if testCase.check != nil { + testCase.check(t, resp, err) + } + + if testCase.cleanUp != nil { + testCase.cleanUp() + } + }) + } +} + +func TestDeviceTokenRequest(t *testing.T) { + t.Parallel() + + ctx := context.Background() + reg := testhelpers.NewRegistryMemory(t) + testhelpers.NewOAuth2Server(ctx, t, reg) + + secret := uuid.New() + c := &client.Client{ + ID: "device-client", + Secret: secret, + GrantTypes: []string{ + string(fosite.GrantTypeDeviceCode), + string(fosite.GrantTypeRefreshToken), + }, + Scope: "hydra offline openid", + Audience: []string{"https://api.ory.sh/"}, + } + require.NoError(t, reg.ClientManager().CreateClient(ctx, c)) + + oauthClient := &oauth2.Config{ + ClientID: c.GetID(), + ClientSecret: secret, + Endpoint: oauth2.Endpoint{ + DeviceAuthURL: reg.Config().OAuth2DeviceAuthorisationURL(ctx).String(), + TokenURL: reg.Config().OAuth2TokenURL(ctx).String(), + AuthStyle: oauth2.AuthStyleInHeader, + }, + Scopes: strings.Split(c.Scope, " "), + } + + testCases := []struct { + description string + setUp func(signature, userCodeSignature string) + check func(t *testing.T, token *oauth2.Token, err error) + cleanUp func() + }{ + { + description: "should pass with refresh token", + setUp: func(signature, userCodeSignature string) { + authreq := &fosite.DeviceRequest{ + UserCodeState: fosite.UserCodeAccepted, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + ID: c.GetID(), + GrantTypes: []string{string(fosite.GrantTypeDeviceCode)}, + }, + RequestedScope: []string{"hydra", "offline"}, + GrantedScope: []string{"hydra", "offline"}, + Session: &hydraoauth2.Session{ + DefaultSession: &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "hydra", + }, + ExpiresAt: map[fosite.TokenType]time.Time{ + fosite.DeviceCode: time.Now().Add(time.Hour).UTC(), + }, + }, + }, + RequestedAt: time.Now(), + }, + } + + require.NoError(t, reg.OAuth2Storage().CreateDeviceAuthSession(context.TODO(), signature, userCodeSignature, authreq)) + }, + check: func(t *testing.T, token *oauth2.Token, err error) { + assert.NotEmpty(t, token.AccessToken) + assert.NotEmpty(t, token.RefreshToken) + }, + }, + { + description: "should pass with ID token", + setUp: func(signature, userCodeSignature string) { + authreq := &fosite.DeviceRequest{ + UserCodeState: fosite.UserCodeAccepted, + Request: fosite.Request{ + Client: &fosite.DefaultClient{ + ID: c.GetID(), + GrantTypes: []string{string(fosite.GrantTypeDeviceCode)}, + }, + RequestedScope: []string{"hydra", "offline", "openid"}, + GrantedScope: []string{"hydra", "offline", "openid"}, + Session: &hydraoauth2.Session{ + DefaultSession: &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ + Subject: "hydra", + }, + ExpiresAt: map[fosite.TokenType]time.Time{ + fosite.DeviceCode: time.Now().Add(time.Hour).UTC(), + }, + }, + }, + RequestedAt: time.Now(), + }, + } + + require.NoError(t, reg.OAuth2Storage().CreateDeviceAuthSession(context.TODO(), signature, userCodeSignature, authreq)) + require.NoError(t, reg.OAuth2Storage().CreateOpenIDConnectSession(context.TODO(), signature, authreq)) + }, + check: func(t *testing.T, token *oauth2.Token, err error) { + assert.NotEmpty(t, token.AccessToken) + assert.NotEmpty(t, token.RefreshToken) + assert.NotEmpty(t, token.Extra("id_token")) + }, + }, + } + + for _, testCase := range testCases { + t.Run("case="+testCase.description, func(t *testing.T) { + code, signature, err := reg.DeviceCodeStrategy().GenerateDeviceCode(context.TODO()) + require.NoError(t, err) + _, userCodeSignature, err := reg.UserCodeStrategy().GenerateUserCode(context.TODO()) + require.NoError(t, err) + + if testCase.setUp != nil { + testCase.setUp(signature, userCodeSignature) + } + + var token *oauth2.Token + token, err = oauthClient.DeviceAccessToken(context.Background(), &oauth2.DeviceAuthResponse{DeviceCode: code}) + + if testCase.check != nil { + testCase.check(t, token, err) + } + + if testCase.cleanUp != nil { + testCase.cleanUp() + } + }) + } +} + +func TestDeviceCodeWithDefaultStrategy(t *testing.T) { + t.Parallel() + + ctx := t.Context() + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions(configx.WithValues(map[string]any{ + config.KeyAccessTokenStrategy: "opaque", + config.KeyRefreshTokenHook: "", + }))) + publicTS, adminTS := testhelpers.NewOAuth2Server(ctx, t, reg) + + publicClient := hydra.NewAPIClient(hydra.NewConfiguration()) + publicClient.GetConfig().Servers = hydra.ServerConfigurations{{URL: publicTS.URL}} + adminClient := hydra.NewAPIClient(hydra.NewConfiguration()) + adminClient.GetConfig().Servers = hydra.ServerConfigurations{{URL: adminTS.URL}} + + getDeviceCode := func(t *testing.T, conf *oauth2.Config, c *http.Client, params ...oauth2.AuthCodeOption) (*oauth2.DeviceAuthResponse, error) { + return conf.DeviceAuth(ctx, params...) + } + + acceptUserCode := func(t *testing.T, conf *oauth2.Config, c *http.Client, devResp *oauth2.DeviceAuthResponse) *http.Response { + if c == nil { + c = testhelpers.NewEmptyJarClient(t) + } + + resp, err := c.Get(devResp.VerificationURIComplete) + require.NoError(t, err) + require.Contains(t, reg.Config().DeviceDoneURL(ctx).String(), resp.Request.URL.Path, "did not end up in post device URL") + require.Equal(t, resp.Request.URL.Query().Get("client_id"), conf.ClientID) + + return resp + } + + acceptDeviceHandler := func(t *testing.T, c *client.Client) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + userCode := r.URL.Query().Get("user_code") + payload := hydra.AcceptDeviceUserCodeRequest{ + UserCode: &userCode, + } + + v, _, err := adminClient.OAuth2API.AcceptUserCodeRequest(context.Background()). + DeviceChallenge(r.URL.Query().Get("device_challenge")). + AcceptDeviceUserCodeRequest(payload). + Execute() + require.NoError(t, err) + require.NotEmpty(t, v.RedirectTo) + http.Redirect(w, r, v.RedirectTo, http.StatusFound) + } + } + + acceptLoginHandler := func(t *testing.T, c *client.Client, subject string, scopes []string, checkRequestPayload func(request *hydra.OAuth2LoginRequest) *hydra.AcceptOAuth2LoginRequest) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + rr, _, err := adminClient.OAuth2API.GetOAuth2LoginRequest(context.Background()).LoginChallenge(r.URL.Query().Get("login_challenge")).Execute() + require.NoError(t, err) + + assert.EqualValues(t, c.GetID(), pointerx.Deref(rr.Client.ClientId)) + assert.Empty(t, pointerx.Deref(rr.Client.ClientSecret)) + assert.EqualValues(t, c.GrantTypes, rr.Client.GrantTypes) + assert.EqualValues(t, c.LogoURI, pointerx.Deref(rr.Client.LogoUri)) + assert.EqualValues(t, r.URL.Query().Get("login_challenge"), rr.Challenge) + assert.EqualValues(t, scopes, rr.RequestedScope) + assert.Contains(t, rr.RequestUrl, hydraoauth2.DeviceVerificationPath) + + acceptBody := hydra.AcceptOAuth2LoginRequest{ + Subject: subject, + Remember: pointerx.Ptr(!rr.Skip), + Acr: pointerx.Ptr("1"), + Amr: []string{"pwd"}, + Context: map[string]interface{}{"context": "bar"}, + } + if checkRequestPayload != nil { + if b := checkRequestPayload(rr); b != nil { + acceptBody = *b + } + } + + v, _, err := adminClient.OAuth2API.AcceptOAuth2LoginRequest(context.Background()). + LoginChallenge(r.URL.Query().Get("login_challenge")). + AcceptOAuth2LoginRequest(acceptBody). + Execute() + require.NoError(t, err) + require.NotEmpty(t, v.RedirectTo) + http.Redirect(w, r, v.RedirectTo, http.StatusFound) + } + } + + acceptConsentHandler := func(t *testing.T, c *client.Client, subject string, scopes []string, checkRequestPayload func(*hydra.OAuth2ConsentRequest)) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + rr, _, err := adminClient.OAuth2API.GetOAuth2ConsentRequest(context.Background()).ConsentChallenge(r.URL.Query().Get("consent_challenge")).Execute() + require.NoError(t, err) + + assert.EqualValues(t, c.GetID(), pointerx.Deref(rr.Client.ClientId)) + assert.Empty(t, pointerx.Deref(rr.Client.ClientSecret)) + assert.EqualValues(t, c.GrantTypes, rr.Client.GrantTypes) + assert.EqualValues(t, c.LogoURI, pointerx.Deref(rr.Client.LogoUri)) + assert.EqualValues(t, subject, pointerx.Deref(rr.Subject)) + assert.EqualValues(t, scopes, rr.RequestedScope) + assert.Contains(t, *rr.RequestUrl, hydraoauth2.DeviceVerificationPath) + if checkRequestPayload != nil { + checkRequestPayload(rr) + } + + assert.Equal(t, map[string]interface{}{"context": "bar"}, rr.Context) + v, _, err := adminClient.OAuth2API.AcceptOAuth2ConsentRequest(context.Background()). + ConsentChallenge(r.URL.Query().Get("consent_challenge")). + AcceptOAuth2ConsentRequest(hydra.AcceptOAuth2ConsentRequest{ + GrantScope: scopes, Remember: pointerx.Ptr(true), RememberFor: pointerx.Ptr[int64](0), + GrantAccessTokenAudience: rr.RequestedAccessTokenAudience, + Session: &hydra.AcceptOAuth2ConsentRequestSession{ + AccessToken: map[string]interface{}{"foo": "bar"}, + IdToken: map[string]interface{}{"bar": "baz"}, + }, + }). + Execute() + require.NoError(t, err) + require.NotEmpty(t, v.RedirectTo) + http.Redirect(w, r, v.RedirectTo, http.StatusFound) + } + } + + assertRefreshToken := func(t *testing.T, token *oauth2.Token, c *oauth2.Config, expectedExp time.Time) { + actualExp := testhelpers.IntrospectToken(t, token.RefreshToken, adminTS).Get("exp").Int() + assert.WithinDuration(t, expectedExp, time.Unix(actualExp, 0), time.Second) + } + + assertIDToken := func(t *testing.T, token *oauth2.Token, c *oauth2.Config, expectedSubject, expectedNonce string, expectedExp time.Time) gjson.Result { + idt, ok := token.Extra("id_token").(string) + require.True(t, ok) + assert.NotEmpty(t, idt) + + claims := gjson.ParseBytes(testhelpers.InsecureDecodeJWT(t, idt)) + assert.True(t, time.Now().After(time.Unix(claims.Get("iat").Int(), 0)), "%s", claims) + assert.True(t, time.Now().After(time.Unix(claims.Get("nbf").Int(), 0)), "%s", claims) + assert.True(t, time.Now().Before(time.Unix(claims.Get("exp").Int(), 0)), "%s", claims) + assert.WithinDuration(t, expectedExp, time.Unix(claims.Get("exp").Int(), 0), 2*time.Second) + assert.NotEmpty(t, claims.Get("jti").String(), "%s", claims) + assert.EqualValues(t, reg.Config().IssuerURL(ctx).String(), claims.Get("iss").String(), "%s", claims) + assert.NotEmpty(t, claims.Get("sid").String(), "%s", claims) + assert.Equal(t, "1", claims.Get("acr").String(), "%s", claims) + require.Len(t, claims.Get("amr").Array(), 1, "%s", claims) + assert.EqualValues(t, "pwd", claims.Get("amr").Array()[0].String(), "%s", claims) + + require.Len(t, claims.Get("aud").Array(), 1, "%s", claims) + assert.EqualValues(t, c.ClientID, claims.Get("aud").Array()[0].String(), "%s", claims) + assert.EqualValues(t, expectedSubject, claims.Get("sub").String(), "%s", claims) + assert.EqualValues(t, `baz`, claims.Get("bar").String(), "%s", claims) + + return claims + } + + introspectAccessToken := func(t *testing.T, conf *oauth2.Config, token *oauth2.Token, expectedSubject string) gjson.Result { + require.NotEmpty(t, token.AccessToken) + i := testhelpers.IntrospectToken(t, token.AccessToken, adminTS) + assert.True(t, i.Get("active").Bool(), "%s", i) + assert.EqualValues(t, conf.ClientID, i.Get("client_id").String(), "%s", i) + assert.EqualValues(t, expectedSubject, i.Get("sub").String(), "%s", i) + assert.EqualValues(t, `bar`, i.Get("ext.foo").String(), "%s", i) + return i + } + + assertJWTAccessToken := func(t *testing.T, strat string, conf *oauth2.Config, token *oauth2.Token, expectedSubject string, expectedExp time.Time, scopes string) gjson.Result { + require.NotEmpty(t, token.AccessToken) + parts := strings.Split(token.AccessToken, ".") + if strat != "jwt" { + require.Len(t, parts, 2) + return gjson.Parse("null") + } + require.Len(t, parts, 3) + + i := gjson.ParseBytes(testhelpers.InsecureDecodeJWT(t, token.AccessToken)) + assert.NotEmpty(t, i.Get("jti").String()) + assert.EqualValues(t, conf.ClientID, i.Get("client_id").String(), "%s", i) + assert.EqualValues(t, expectedSubject, i.Get("sub").String(), "%s", i) + assert.EqualValues(t, reg.Config().IssuerURL(ctx).String(), i.Get("iss").String(), "%s", i) + assert.True(t, time.Now().After(time.Unix(i.Get("iat").Int(), 0)), "%s", i) + assert.True(t, time.Now().After(time.Unix(i.Get("nbf").Int(), 0)), "%s", i) + assert.True(t, time.Now().Before(time.Unix(i.Get("exp").Int(), 0)), "%s", i) + assert.WithinDuration(t, expectedExp, time.Unix(i.Get("exp").Int(), 0), time.Second) + assert.EqualValues(t, `bar`, i.Get("ext.foo").String(), "%s", i) + assert.EqualValues(t, scopes, i.Get("scp").Raw, "%s", i) + return i + } + + waitForRefreshTokenExpiry := func() { + time.Sleep(reg.Config().GetRefreshTokenLifespan(ctx) + time.Second) + } + + t.Run("case=checks if request fails when audience does not match", func(t *testing.T) { + testhelpers.NewLoginConsentUI(t, reg.Config(), testhelpers.HTTPServerNoExpectedCallHandler(t), testhelpers.HTTPServerNoExpectedCallHandler(t)) + _, conf := newDeviceClient(t, reg) + resp, err := conf.DeviceAuth(ctx, oauth2.SetAuthURLParam("audience", "https://not-ory-api/")) + require.Error(t, err) + var devErr *oauth2.RetrieveError + require.ErrorAs(t, err, &devErr) + require.Nil(t, resp) + require.Equal(t, devErr.Response.StatusCode, http.StatusBadRequest) + }) + + subject := "aeneas-rekkas" + nonce := uuid.New() + t.Run("case=perform device flow without ID and refresh tokens", func(t *testing.T) { + c, conf := newDeviceClient(t, reg) + conf.Scopes = []string{"hydra"} + testhelpers.NewDeviceLoginConsentUI(t, reg.Config(), + acceptDeviceHandler(t, c), + acceptLoginHandler(t, c, subject, conf.Scopes, nil), + acceptConsentHandler(t, c, subject, conf.Scopes, nil), + ) + + resp, err := getDeviceCode(t, conf, nil) + require.NoError(t, err) + require.NotEmpty(t, resp.DeviceCode) + require.NotEmpty(t, resp.UserCode) + loginFlowResp := acceptUserCode(t, conf, nil, resp) + require.NotNil(t, loginFlowResp) + token, err := conf.DeviceAccessToken(context.Background(), resp) + require.NoError(t, err) + + assert.Empty(t, token.Extra("c_nonce_draft_00"), "should not be set if not requested") + assert.Empty(t, token.Extra("c_nonce_expires_in_draft_00"), "should not be set if not requested") + introspectAccessToken(t, conf, token, subject) + assert.Empty(t, token.Extra("id_token")) + assert.Empty(t, token.RefreshToken) + }) + t.Run("case=perform device flow with ID token", func(t *testing.T) { + c, conf := newDeviceClient(t, reg) + conf.Scopes = []string{"openid", "hydra"} + testhelpers.NewDeviceLoginConsentUI(t, reg.Config(), + acceptDeviceHandler(t, c), + acceptLoginHandler(t, c, subject, conf.Scopes, nil), + acceptConsentHandler(t, c, subject, conf.Scopes, nil), + ) + + resp, err := getDeviceCode(t, conf, nil) + require.NoError(t, err) + require.NotEmpty(t, resp.DeviceCode) + require.NotEmpty(t, resp.UserCode) + loginFlowResp := acceptUserCode(t, conf, nil, resp) + require.NotNil(t, loginFlowResp) + token, err := conf.DeviceAccessToken(context.Background(), resp) + iat := time.Now() + require.NoError(t, err) + + assert.Empty(t, token.Extra("c_nonce_draft_00"), "should not be set if not requested") + assert.Empty(t, token.Extra("c_nonce_expires_in_draft_00"), "should not be set if not requested") + introspectAccessToken(t, conf, token, subject) + assertIDToken(t, token, conf, subject, nonce, iat.Add(reg.Config().GetIDTokenLifespan(ctx))) + assert.Empty(t, token.RefreshToken) + }) + t.Run("case=perform device flow with refresh token", func(t *testing.T) { + c, conf := newDeviceClient(t, reg) + conf.Scopes = []string{"hydra", "offline"} + testhelpers.NewDeviceLoginConsentUI(t, reg.Config(), + acceptDeviceHandler(t, c), + acceptLoginHandler(t, c, subject, conf.Scopes, nil), + acceptConsentHandler(t, c, subject, conf.Scopes, nil), + ) + + resp, err := getDeviceCode(t, conf, nil) + require.NoError(t, err) + require.NotEmpty(t, resp.DeviceCode) + require.NotEmpty(t, resp.UserCode) + loginFlowResp := acceptUserCode(t, conf, nil, resp) + require.NotNil(t, loginFlowResp) + token, err := conf.DeviceAccessToken(context.Background(), resp) + iat := time.Now() + require.NoError(t, err) + + assert.Empty(t, token.Extra("c_nonce_draft_00"), "should not be set if not requested") + assert.Empty(t, token.Extra("c_nonce_expires_in_draft_00"), "should not be set if not requested") + introspectAccessToken(t, conf, token, subject) + assert.Empty(t, token.Extra("id_token")) + assertRefreshToken(t, token, conf, iat.Add(reg.Config().GetRefreshTokenLifespan(ctx))) + }) + t.Run("case=perform device flow with ID token and refresh tokens", func(t *testing.T) { + run := func(t *testing.T, strategy string) { + c, conf := newDeviceClient(t, reg) + testhelpers.NewDeviceLoginConsentUI(t, reg.Config(), + acceptDeviceHandler(t, c), + acceptLoginHandler(t, c, subject, conf.Scopes, nil), + acceptConsentHandler(t, c, subject, conf.Scopes, nil), + ) + + resp, err := getDeviceCode(t, conf, nil) + require.NoError(t, err) + require.NotEmpty(t, resp.DeviceCode) + require.NotEmpty(t, resp.UserCode) + loginFlowResp := acceptUserCode(t, conf, nil, resp) + require.NotNil(t, loginFlowResp) + token, err := conf.DeviceAccessToken(context.Background(), resp) + iat := time.Now() + require.NoError(t, err) + + assert.Empty(t, token.Extra("c_nonce_draft_00"), "should not be set if not requested") + assert.Empty(t, token.Extra("c_nonce_expires_in_draft_00"), "should not be set if not requested") + introspectAccessToken(t, conf, token, subject) + assertJWTAccessToken(t, strategy, conf, token, subject, iat.Add(reg.Config().GetAccessTokenLifespan(ctx)), `["hydra","offline","openid"]`) + assertIDToken(t, token, conf, subject, nonce, iat.Add(reg.Config().GetIDTokenLifespan(ctx))) + assertRefreshToken(t, token, conf, iat.Add(reg.Config().GetRefreshTokenLifespan(ctx))) + + t.Run("followup=successfully perform refresh token flow", func(t *testing.T) { + require.NotEmpty(t, token.RefreshToken) + token.Expiry = token.Expiry.Add(-time.Hour * 24) + iat = time.Now() + refreshedToken, err := conf.TokenSource(context.Background(), token).Token() + require.NoError(t, err) + + require.NotEqual(t, token.AccessToken, refreshedToken.AccessToken) + require.NotEqual(t, token.RefreshToken, refreshedToken.RefreshToken) + require.NotEqual(t, token.Extra("id_token"), refreshedToken.Extra("id_token")) + introspectAccessToken(t, conf, refreshedToken, subject) + + t.Run("followup=refreshed tokens contain valid tokens", func(t *testing.T) { + assertJWTAccessToken(t, strategy, conf, refreshedToken, subject, iat.Add(reg.Config().GetAccessTokenLifespan(ctx)), `["hydra","offline","openid"]`) + assertIDToken(t, refreshedToken, conf, subject, nonce, iat.Add(reg.Config().GetIDTokenLifespan(ctx))) + assertRefreshToken(t, refreshedToken, conf, iat.Add(reg.Config().GetRefreshTokenLifespan(ctx))) + }) + + t.Run("followup=original access token is no longer valid", func(t *testing.T) { + i := testhelpers.IntrospectToken(t, token.AccessToken, adminTS) + assert.False(t, i.Get("active").Bool(), "%s", i) + }) + + t.Run("followup=original refresh token is no longer valid", func(t *testing.T) { + _, err := conf.TokenSource(context.Background(), token).Token() + assert.Error(t, err) + }) + + t.Run("followup=but fail subsequent refresh because expiry was reached", func(t *testing.T) { + waitForRefreshTokenExpiry() + + // Force golang to refresh token + refreshedToken.Expiry = refreshedToken.Expiry.Add(-time.Hour * 24) + _, err := conf.TokenSource(context.Background(), refreshedToken).Token() + require.Error(t, err) + }) + }) + } + + t.Run("strategy=jwt", func(t *testing.T) { + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "jwt") + run(t, "jwt") + }) + + t.Run("strategy=opaque", func(t *testing.T) { + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "opaque") + run(t, "opaque") + }) + }) + t.Run("case=perform flow with audience", func(t *testing.T) { + expectAud := "https://api.ory.sh/" + c, conf := newDeviceClient(t, reg) + testhelpers.NewDeviceLoginConsentUI( + t, + reg.Config(), + acceptDeviceHandler(t, c), + acceptLoginHandler(t, c, subject, conf.Scopes, func(r *hydra.OAuth2LoginRequest) *hydra.AcceptOAuth2LoginRequest { + assert.False(t, r.Skip) + assert.EqualValues(t, []string{expectAud}, r.RequestedAccessTokenAudience) + return nil + }), + acceptConsentHandler(t, c, subject, conf.Scopes, func(r *hydra.OAuth2ConsentRequest) { + assert.False(t, *r.Skip) + assert.EqualValues(t, []string{expectAud}, r.RequestedAccessTokenAudience) + }), + ) + + resp, err := conf.DeviceAuth(ctx, oauth2.SetAuthURLParam("audience", "https://api.ory.sh/")) + require.NoError(t, err) + require.NotEmpty(t, resp.DeviceCode) + require.NotEmpty(t, resp.UserCode) + loginFlowResp := acceptUserCode(t, conf, nil, resp) + require.NotNil(t, loginFlowResp) + + token, err := conf.DeviceAccessToken(context.Background(), resp) + require.NoError(t, err) + + claims := introspectAccessToken(t, conf, token, subject) + aud := claims.Get("aud").Array() + require.Len(t, aud, 1) + assert.EqualValues(t, aud[0].String(), expectAud) + + assertIDToken(t, token, conf, subject, nonce, time.Now().Add(reg.Config().GetIDTokenLifespan(ctx))) + }) + + t.Run("case=respects client token lifespan configuration", func(t *testing.T) { + run := func(t *testing.T, strategy string, c *client.Client, conf *oauth2.Config, expectedLifespans client.Lifespans) { + testhelpers.NewDeviceLoginConsentUI( + t, + reg.Config(), + acceptDeviceHandler(t, c), + acceptLoginHandler(t, c, subject, conf.Scopes, nil), + acceptConsentHandler(t, c, subject, conf.Scopes, nil), + ) + + resp, err := getDeviceCode(t, conf, nil) + require.NoError(t, err) + require.NotEmpty(t, resp.DeviceCode) + require.NotEmpty(t, resp.UserCode) + loginFlowResp := acceptUserCode(t, conf, nil, resp) + require.NotNil(t, loginFlowResp) + + token, err := conf.DeviceAccessToken(context.Background(), resp) + iat := time.Now() + require.NoError(t, err) + + body := introspectAccessToken(t, conf, token, subject) + assert.WithinDuration(t, iat.Add(expectedLifespans.DeviceAuthorizationGrantAccessTokenLifespan.Duration), time.Unix(body.Get("exp").Int(), 0), time.Second) + + assertJWTAccessToken(t, strategy, conf, token, subject, iat.Add(expectedLifespans.DeviceAuthorizationGrantAccessTokenLifespan.Duration), `["hydra","offline","openid"]`) + assertIDToken(t, token, conf, subject, nonce, iat.Add(expectedLifespans.DeviceAuthorizationGrantIDTokenLifespan.Duration)) + assertRefreshToken(t, token, conf, iat.Add(expectedLifespans.DeviceAuthorizationGrantRefreshTokenLifespan.Duration)) + + t.Run("followup=successfully perform refresh token flow", func(t *testing.T) { + require.NotEmpty(t, token.RefreshToken) + token.Expiry = token.Expiry.Add(-time.Hour * 24) + refreshedToken, err := conf.TokenSource(context.Background(), token).Token() + iat = time.Now() + require.NoError(t, err) + assertRefreshToken(t, refreshedToken, conf, iat.Add(expectedLifespans.RefreshTokenGrantRefreshTokenLifespan.Duration)) + assertJWTAccessToken(t, strategy, conf, refreshedToken, subject, iat.Add(expectedLifespans.RefreshTokenGrantAccessTokenLifespan.Duration), `["hydra","offline","openid"]`) + assertIDToken(t, refreshedToken, conf, subject, nonce, iat.Add(expectedLifespans.RefreshTokenGrantIDTokenLifespan.Duration)) + + require.NotEqual(t, token.AccessToken, refreshedToken.AccessToken) + require.NotEqual(t, token.RefreshToken, refreshedToken.RefreshToken) + require.NotEqual(t, token.Extra("id_token"), refreshedToken.Extra("id_token")) + + body := introspectAccessToken(t, conf, refreshedToken, subject) + assert.WithinDuration(t, iat.Add(expectedLifespans.RefreshTokenGrantAccessTokenLifespan.Duration), time.Unix(body.Get("exp").Int(), 0), time.Second) + + t.Run("followup=original access token is no longer valid", func(t *testing.T) { + i := testhelpers.IntrospectToken(t, token.AccessToken, adminTS) + assert.False(t, i.Get("active").Bool(), "%s", i) + }) + + t.Run("followup=original refresh token is no longer valid", func(t *testing.T) { + _, err := conf.TokenSource(context.Background(), token).Token() + assert.Error(t, err) + }) + }) + } + + t.Run("case=custom-lifespans-active-jwt", func(t *testing.T) { + c, conf := newDeviceClient(t, reg) + ls := testhelpers.TestLifespans + ls.DeviceAuthorizationGrantAccessTokenLifespan = x.NullDuration{Valid: true, Duration: 6 * time.Second} + testhelpers.UpdateClientTokenLifespans( + t, + &oauth2.Config{ClientID: c.GetID(), ClientSecret: conf.ClientSecret}, + c.GetID(), + ls, adminTS, + ) + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "jwt") + run(t, "jwt", c, conf, ls) + }) + + t.Run("case=custom-lifespans-active-opaque", func(t *testing.T) { + c, conf := newDeviceClient(t, reg) + ls := testhelpers.TestLifespans + ls.DeviceAuthorizationGrantAccessTokenLifespan = x.NullDuration{Valid: true, Duration: 6 * time.Second} + testhelpers.UpdateClientTokenLifespans( + t, + &oauth2.Config{ClientID: c.GetID(), ClientSecret: conf.ClientSecret}, + c.GetID(), + ls, adminTS, + ) + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "opaque") + run(t, "opaque", c, conf, ls) + }) + + t.Run("case=custom-lifespans-unset", func(t *testing.T) { + c, conf := newDeviceClient(t, reg) + testhelpers.UpdateClientTokenLifespans(t, &oauth2.Config{ClientID: c.GetID(), ClientSecret: conf.ClientSecret}, c.GetID(), testhelpers.TestLifespans, adminTS) + testhelpers.UpdateClientTokenLifespans(t, &oauth2.Config{ClientID: c.GetID(), ClientSecret: conf.ClientSecret}, c.GetID(), client.Lifespans{}, adminTS) + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "opaque") + + //goland:noinspection GoDeprecation + expectedLifespans := client.Lifespans{ + AuthorizationCodeGrantAccessTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetAccessTokenLifespan(ctx)}, + AuthorizationCodeGrantIDTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetIDTokenLifespan(ctx)}, + AuthorizationCodeGrantRefreshTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetRefreshTokenLifespan(ctx)}, + ClientCredentialsGrantAccessTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetAccessTokenLifespan(ctx)}, + ImplicitGrantAccessTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetAccessTokenLifespan(ctx)}, + ImplicitGrantIDTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetIDTokenLifespan(ctx)}, + JwtBearerGrantAccessTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetAccessTokenLifespan(ctx)}, + PasswordGrantAccessTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetAccessTokenLifespan(ctx)}, + PasswordGrantRefreshTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetRefreshTokenLifespan(ctx)}, + RefreshTokenGrantIDTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetIDTokenLifespan(ctx)}, + RefreshTokenGrantAccessTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetAccessTokenLifespan(ctx)}, + RefreshTokenGrantRefreshTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetRefreshTokenLifespan(ctx)}, + DeviceAuthorizationGrantIDTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetIDTokenLifespan(ctx)}, + DeviceAuthorizationGrantAccessTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetAccessTokenLifespan(ctx)}, + DeviceAuthorizationGrantRefreshTokenLifespan: x.NullDuration{Valid: true, Duration: reg.Config().GetRefreshTokenLifespan(ctx)}, + } + run(t, "opaque", c, conf, expectedLifespans) + }) + }) + t.Run("case=cannot reuse user_code", func(t *testing.T) { + c, conf := newDeviceClient(t, reg) + testhelpers.NewDeviceLoginConsentUI(t, reg.Config(), + func(w http.ResponseWriter, r *http.Request) { + userCode := r.URL.Query().Get("user_code") + payload := hydra.AcceptDeviceUserCodeRequest{ + UserCode: &userCode, + } + + v, _, err := adminClient.OAuth2API.AcceptUserCodeRequest(context.Background()). + DeviceChallenge(r.URL.Query().Get("device_challenge")). + AcceptDeviceUserCodeRequest(payload). + Execute() + if err != nil { + w.WriteHeader(http.StatusBadRequest) + return + } + require.NotEmpty(t, v.RedirectTo) + http.Redirect(w, r, v.RedirectTo, http.StatusFound) + }, + acceptLoginHandler(t, c, subject, conf.Scopes, nil), + acceptConsentHandler(t, c, subject, conf.Scopes, nil), + ) + + resp, err := getDeviceCode(t, conf, nil) + require.NoError(t, err) + require.NotEmpty(t, resp.DeviceCode) + require.NotEmpty(t, resp.UserCode) + loginFlowResp := acceptUserCode(t, conf, nil, resp) + require.NotNil(t, loginFlowResp) + token, err := conf.DeviceAccessToken(context.Background(), resp) + iat := time.Now() + require.NoError(t, err) + + introspectAccessToken(t, conf, token, subject) + assertIDToken(t, token, conf, subject, nonce, iat.Add(reg.Config().GetIDTokenLifespan(ctx))) + assertRefreshToken(t, token, conf, iat.Add(reg.Config().GetRefreshTokenLifespan(ctx))) + + hc := testhelpers.NewEmptyJarClient(t) + + loginFlowResp2, err := hc.Get(resp.VerificationURIComplete) + require.NoError(t, err) + require.Equal(t, loginFlowResp2.StatusCode, http.StatusBadRequest) + }) + t.Run("case=cannot reuse device_challenge", func(t *testing.T) { + var deviceChallenge string + c, conf := newDeviceClient(t, reg) + testhelpers.NewDeviceLoginConsentUI(t, reg.Config(), + func(w http.ResponseWriter, r *http.Request) { + userCode := r.URL.Query().Get("user_code") + payload := hydra.AcceptDeviceUserCodeRequest{ + UserCode: &userCode, + } + + if deviceChallenge == "" { + deviceChallenge = r.URL.Query().Get("device_challenge") + } + v, _, err := adminClient.OAuth2API.AcceptUserCodeRequest(context.Background()). + DeviceChallenge(deviceChallenge). + AcceptDeviceUserCodeRequest(payload). + Execute() + if err != nil { + w.WriteHeader(http.StatusBadRequest) + return + } + require.NoError(t, err) + require.NotEmpty(t, v.RedirectTo) + http.Redirect(w, r, v.RedirectTo, http.StatusFound) + }, + acceptLoginHandler(t, c, subject, conf.Scopes, nil), + acceptConsentHandler(t, c, subject, conf.Scopes, nil), + ) + + resp, err := getDeviceCode(t, conf, nil) + require.NoError(t, err) + require.NotEmpty(t, resp.DeviceCode) + require.NotEmpty(t, resp.UserCode) + + hc := testhelpers.NewEmptyJarClient(t) + loginFlowResp := acceptUserCode(t, conf, hc, resp) + require.NoError(t, err) + require.Contains(t, reg.Config().DeviceDoneURL(ctx).String(), loginFlowResp.Request.URL.Path, "did not end up in post device URL") + require.Equal(t, loginFlowResp.Request.URL.Query().Get("client_id"), conf.ClientID) + + require.NotNil(t, loginFlowResp) + token, err := conf.DeviceAccessToken(context.Background(), resp) + iat := time.Now() + require.NoError(t, err) + + introspectAccessToken(t, conf, token, subject) + assertIDToken(t, token, conf, subject, nonce, iat.Add(reg.Config().GetIDTokenLifespan(ctx))) + assertRefreshToken(t, token, conf, iat.Add(reg.Config().GetRefreshTokenLifespan(ctx))) + + resp2, err := getDeviceCode(t, conf, nil) + require.NoError(t, err) + require.NotEmpty(t, resp2.DeviceCode) + require.NotEmpty(t, resp2.UserCode) + + payload := hydra.AcceptDeviceUserCodeRequest{ + UserCode: &resp2.UserCode, + } + + acceptResp, _, err := adminClient.OAuth2API.AcceptUserCodeRequest(context.Background()). + DeviceChallenge(deviceChallenge). + AcceptDeviceUserCodeRequest(payload). + Execute() + require.NoError(t, err) + + loginFlowResp2, err := hc.Get(acceptResp.RedirectTo) + require.NoError(t, err) + require.Equalf(t, http.StatusForbidden, loginFlowResp2.StatusCode, "requested %q", acceptResp.RedirectTo) + }) +} + +func newDeviceClient( + t *testing.T, + reg interface { + config.Provider + client.Registry + }, + opts ...func(*client.Client), +) (*client.Client, *oauth2.Config) { + ctx := context.Background() + c := &client.Client{ + GrantTypes: []string{ + "refresh_token", + "urn:ietf:params:oauth:grant-type:device_code", + }, + Scope: "hydra offline openid", + Audience: []string{"https://api.ory.sh/"}, + TokenEndpointAuthMethod: "none", + } + + // apply options + for _, o := range opts { + o(c) + } + + require.NoError(t, reg.ClientManager().CreateClient(ctx, c)) + return c, &oauth2.Config{ + ClientID: c.GetID(), + Endpoint: oauth2.Endpoint{ + DeviceAuthURL: reg.Config().OAuth2DeviceAuthorisationURL(ctx).String(), + TokenURL: reg.Config().OAuth2TokenURL(ctx).String(), + AuthStyle: oauth2.AuthStyleInHeader, + }, + Scopes: strings.Split(c.Scope, " "), + } +} diff --git a/oauth2/oauth2_helper_test.go b/oauth2/oauth2_helper_test.go index a1d4f5ad9bb..a5438df6dc1 100644 --- a/oauth2/oauth2_helper_test.go +++ b/oauth2/oauth2_helper_test.go @@ -10,11 +10,12 @@ import ( "github.com/pkg/errors" - "github.com/ory/fosite" + "github.com/ory/hydra/v2/flow" + "github.com/ory/hydra/v2/fosite" "github.com/ory/x/sqlxx" - "github.com/ory/hydra/client" - "github.com/ory/hydra/consent" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/consent" ) var _ consent.Strategy = new(consentMock) @@ -25,27 +26,44 @@ type consentMock struct { requestTime time.Time } -func (c *consentMock) HandleOAuth2AuthorizationRequest(ctx context.Context, w http.ResponseWriter, r *http.Request, req fosite.AuthorizeRequester) (*consent.AcceptOAuth2ConsentRequest, error) { +func (c *consentMock) HandleOAuth2AuthorizationRequest(ctx context.Context, w http.ResponseWriter, r *http.Request, req fosite.AuthorizeRequester) (*flow.Flow, error) { if c.deny { return nil, fosite.ErrRequestForbidden } - return &consent.AcceptOAuth2ConsentRequest{ - ConsentRequest: &consent.OAuth2ConsentRequest{ - Subject: "foo", - ACR: "1", - }, - AuthenticatedAt: sqlxx.NullTime(c.authTime), - GrantedScope: []string{"offline", "openid", "hydra.*"}, - Session: &consent.AcceptOAuth2ConsentRequestSession{ - AccessToken: map[string]interface{}{}, - IDToken: map[string]interface{}{}, - }, - RequestedAt: c.requestTime, + return &flow.Flow{ + Subject: "foo", + ACR: "1", + LoginAuthenticatedAt: sqlxx.NullTime(c.authTime), + GrantedScope: []string{"offline", "openid", "hydra.*"}, + SessionAccessToken: map[string]interface{}{}, + SessionIDToken: map[string]interface{}{}, + RequestedAt: c.requestTime, }, nil } -func (c *consentMock) HandleOpenIDConnectLogout(ctx context.Context, w http.ResponseWriter, r *http.Request) (*consent.LogoutResult, error) { +func (c *consentMock) HandleOAuth2DeviceAuthorizationRequest(ctx context.Context, w http.ResponseWriter, r *http.Request) (*flow.Flow, error) { + if c.deny { + return nil, fosite.ErrRequestForbidden + } + + return &flow.Flow{ + Subject: "foo", + ACR: "1", + DeviceChallengeID: "12345", + LoginAuthenticatedAt: sqlxx.NullTime(c.authTime), + GrantedScope: []string{"offline", "openid", "hydra.*"}, + SessionAccessToken: map[string]interface{}{}, + SessionIDToken: map[string]interface{}{}, + RequestedAt: c.requestTime, + }, nil +} + +func (c *consentMock) HandleOpenIDConnectLogout(ctx context.Context, w http.ResponseWriter, r *http.Request) (*flow.LogoutResult, error) { + panic("not implemented") +} + +func (c *consentMock) HandleHeadlessLogout(ctx context.Context, w http.ResponseWriter, r *http.Request, sid string) error { panic("not implemented") } diff --git a/oauth2/oauth2_jwt_bearer_test.go b/oauth2/oauth2_jwt_bearer_test.go index 755dc883a36..48984ff2e6d 100644 --- a/oauth2/oauth2_jwt_bearer_test.go +++ b/oauth2/oauth2_jwt_bearer_test.go @@ -5,44 +5,45 @@ package oauth2_test import ( "context" + "encoding/base64" "encoding/json" "fmt" "io" "net/http" + "net/http/httptest" "net/url" "strings" "testing" "time" - "github.com/google/uuid" - "github.com/tidwall/gjson" - "gopkg.in/square/go-jose.v2" - - "github.com/ory/fosite/token/jwt" - "github.com/ory/hydra/jwk" - "github.com/ory/hydra/oauth2/trust" - + "github.com/go-jose/go-jose/v3" + "github.com/gofrs/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/tidwall/gjson" goauth2 "golang.org/x/oauth2" "golang.org/x/oauth2/clientcredentials" - "github.com/ory/hydra/internal/testhelpers" - "github.com/ory/x/contextx" - - hc "github.com/ory/hydra/client" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal" - "github.com/ory/hydra/x" + hc "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/flow" + "github.com/ory/hydra/v2/fosite/token/jwt" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/jwk" + hydraoauth2 "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/oauth2/trust" + "github.com/ory/x/configx" ) func TestJWTBearer(t *testing.T) { - ctx := context.Background() - reg := internal.NewMockedRegistry(t, &contextx.Default{}) - reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "opaque") + t.Parallel() + + ctx := t.Context() + reg := testhelpers.NewRegistryMemory(t, driver.WithConfigOptions(configx.WithValue(config.KeyAccessTokenStrategy, "opaque"))) _, admin := testhelpers.NewOAuth2Server(ctx, t, reg) - secret := uuid.New().String() + secret := uuid.Must(uuid.NewV4()).String() client := &hc.Client{ Secret: secret, GrantTypes: []string{"client_credentials", "urn:ietf:params:oauth:grant-type:jwt-bearer"}, @@ -61,12 +62,14 @@ func TestJWTBearer(t *testing.T) { } var getToken = func(t *testing.T, conf *clientcredentials.Config) (*goauth2.Token, error) { - conf.AuthStyle = goauth2.AuthStyleInHeader + if conf.AuthStyle == goauth2.AuthStyleAutoDetect { + conf.AuthStyle = goauth2.AuthStyleInHeader + } return conf.Token(context.Background()) } - var inspectToken = func(t *testing.T, token *goauth2.Token, cl *hc.Client, strategy string, grant trust.Grant) { - introspection := testhelpers.IntrospectToken(t, &goauth2.Config{ClientID: cl.GetID(), ClientSecret: cl.Secret}, token.AccessToken, admin) + var inspectToken = func(t *testing.T, token *goauth2.Token, cl *hc.Client, strategy string, grant trust.Grant, checkExtraClaims bool) { + introspection := testhelpers.IntrospectToken(t, token.AccessToken, admin) check := func(res gjson.Result) { assert.EqualValues(t, cl.GetID(), res.Get("client_id").String(), "%s", res.Raw) @@ -77,6 +80,10 @@ func TestJWTBearer(t *testing.T) { assert.True(t, res.Get("exp").Int() >= res.Get("iat").Int()+int64(reg.Config().GetAccessTokenLifespan(ctx).Seconds()), "%s", res.Raw) assert.EqualValues(t, fmt.Sprintf(`["%s"]`, reg.Config().OAuth2TokenURL(ctx).String()), res.Get("aud").Raw, "%s", res.Raw) + + if checkExtraClaims { + require.True(t, res.Get("ext.hooked").Bool()) + } } check(introspection) @@ -89,15 +96,13 @@ func TestJWTBearer(t *testing.T) { return } - body, err := x.DecodeSegment(strings.Split(token.AccessToken, ".")[1]) - require.NoError(t, err) - jwtClaims := gjson.ParseBytes(body) + jwtClaims := gjson.ParseBytes(testhelpers.InsecureDecodeJWT(t, token.AccessToken)) assert.NotEmpty(t, jwtClaims.Get("jti").String()) assert.NotEmpty(t, jwtClaims.Get("iss").String()) assert.NotEmpty(t, jwtClaims.Get("client_id").String()) assert.EqualValues(t, "offline_access", introspection.Get("scope").String(), "%s", introspection.Raw) - header, err := x.DecodeSegment(strings.Split(token.AccessToken, ".")[0]) + header, err := base64.RawURLEncoding.DecodeString(strings.Split(token.AccessToken, ".")[0]) require.NoError(t, err) jwtHeader := gjson.ParseBytes(header) assert.NotEmpty(t, jwtHeader.Get("kid").String()) @@ -129,29 +134,29 @@ func TestJWTBearer(t *testing.T) { assert.Contains(t, err.Error(), "urn:ietf:params:oauth:grant-type:jwt-bearer") }) - set, kid := uuid.NewString(), uuid.NewString() - keys, err := jwk.GenerateJWK(ctx, jose.RS256, kid, "sig") + set, kid := uuid.Must(uuid.NewV4()).String(), uuid.Must(uuid.NewV4()).String() + keys, err := jwk.GenerateJWK(jose.RS256, kid, "sig") require.NoError(t, err) trustGrant := trust.Grant{ - ID: uuid.NewString(), + ID: uuid.Must(uuid.NewV4()), Issuer: set, - Subject: uuid.NewString(), + Subject: uuid.Must(uuid.NewV4()).String(), AllowAnySubject: false, Scope: []string{"offline_access"}, ExpiresAt: time.Now().Add(time.Hour), PublicKey: trust.PublicKey{Set: set, KeyID: kid}, } require.NoError(t, reg.GrantManager().CreateGrant(ctx, trustGrant, keys.Keys[0].Public())) - signer := jwk.NewDefaultJWTSigner(reg.Config(), reg, set) + signer := jwk.NewDefaultJWTSigner(reg, set) signer.GetPrivateKey = func(ctx context.Context) (interface{}, error) { return keys.Keys[0], nil } t.Run("case=unable to exchange token with a non-allowed subject", func(t *testing.T) { token, _, err := signer.Generate(ctx, jwt.MapClaims{ - "jti": uuid.NewString(), + "jti": uuid.Must(uuid.NewV4()).String(), "iss": trustGrant.Issuer, - "sub": uuid.NewString(), + "sub": uuid.Must(uuid.NewV4()).String(), "aud": reg.Config().OAuth2TokenURL(ctx).String(), "exp": time.Now().Add(time.Hour).Unix(), "iat": time.Now().Add(-time.Minute).Unix(), @@ -167,7 +172,7 @@ func TestJWTBearer(t *testing.T) { t.Run("case=unable to exchange token with non-allowed scope", func(t *testing.T) { token, _, err := signer.Generate(ctx, jwt.MapClaims{ - "jti": uuid.NewString(), + "jti": uuid.Must(uuid.NewV4()).String(), "iss": trustGrant.Issuer, "sub": trustGrant.Subject, "aud": reg.Config().OAuth2TokenURL(ctx).String(), @@ -186,13 +191,13 @@ func TestJWTBearer(t *testing.T) { t.Run("case=unable to exchange token with an unknown kid", func(t *testing.T) { token, _, err := signer.Generate(ctx, jwt.MapClaims{ - "jti": uuid.NewString(), + "jti": uuid.Must(uuid.NewV4()).String(), "iss": trustGrant.Issuer, "sub": trustGrant.Subject, "aud": reg.Config().OAuth2TokenURL(ctx).String(), "exp": time.Now().Add(time.Hour).Unix(), "iat": time.Now().Add(-time.Minute).Unix(), - }, &jwt.Headers{Extra: map[string]interface{}{"kid": uuid.NewString()}}) + }, &jwt.Headers{Extra: map[string]interface{}{"kid": uuid.Must(uuid.NewV4()).String()}}) require.NoError(t, err) conf := newConf(client) @@ -203,15 +208,15 @@ func TestJWTBearer(t *testing.T) { }) t.Run("case=unable to exchange token with an invalid key", func(t *testing.T) { - keys, err := jwk.GenerateJWK(ctx, jose.RS256, kid, "sig") + keys, err := jwk.GenerateJWK(jose.RS256, kid, "sig") require.NoError(t, err) - signer := jwk.NewDefaultJWTSigner(reg.Config(), reg, set) + signer := jwk.NewDefaultJWTSigner(reg, set) signer.GetPrivateKey = func(ctx context.Context) (interface{}, error) { return keys.Keys[0], nil } token, _, err := signer.Generate(ctx, jwt.MapClaims{ - "jti": uuid.NewString(), + "jti": uuid.Must(uuid.NewV4()).String(), "iss": trustGrant.Issuer, "sub": trustGrant.Subject, "aud": reg.Config().OAuth2TokenURL(ctx).String(), @@ -233,7 +238,7 @@ func TestJWTBearer(t *testing.T) { reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, strategy) token, _, err := signer.Generate(ctx, jwt.MapClaims{ - "jti": uuid.NewString(), + "jti": uuid.Must(uuid.NewV4()).String(), "iss": trustGrant.Issuer, "sub": trustGrant.Subject, "aud": reg.Config().OAuth2TokenURL(ctx).String(), @@ -248,7 +253,7 @@ func TestJWTBearer(t *testing.T) { result, err := getToken(t, conf) require.NoError(t, err) - inspectToken(t, result, client, strategy, trustGrant) + inspectToken(t, result, client, strategy, trustGrant, false) } } @@ -265,7 +270,7 @@ func TestJWTBearer(t *testing.T) { reg.Config().MustSet(ctx, "config.KeyOAuth2GrantJWTClientAuthOptional", true) token, _, err := signer.Generate(ctx, jwt.MapClaims{ - "jti": uuid.NewString(), + "jti": uuid.Must(uuid.NewV4()).String(), "iss": trustGrant.Issuer, "sub": trustGrant.Subject, "aud": reg.Config().OAuth2TokenURL(ctx).String(), @@ -279,7 +284,7 @@ func TestJWTBearer(t *testing.T) { "assertion": {token}, }) require.NoError(t, err) - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck body, err := io.ReadAll(res.Body) require.NoError(t, err) require.EqualValues(t, http.StatusOK, res.StatusCode, "%s", body) @@ -288,7 +293,264 @@ func TestJWTBearer(t *testing.T) { require.NoError(t, json.Unmarshal(body, &result)) assert.NotEmpty(t, result.AccessToken, "%s", body) - inspectToken(t, &result, client, strategy, trustGrant) + inspectToken(t, &result, client, strategy, trustGrant, false) + } + } + + t.Run("strategy=opaque", run("opaque")) + t.Run("strategy=jwt", run("jwt")) + }) + + t.Run("should call token hook if configured", func(t *testing.T) { + run := func(strategy string) func(t *testing.T) { + return func(t *testing.T) { + audience := reg.Config().OAuth2TokenURL(ctx).String() + grantType := "urn:ietf:params:oauth:grant-type:jwt-bearer" + + token, _, err := signer.Generate(ctx, jwt.MapClaims{ + "jti": uuid.Must(uuid.NewV4()).String(), + "iss": trustGrant.Issuer, + "sub": trustGrant.Subject, + "aud": audience, + "exp": time.Now().Add(time.Hour).Unix(), + "iat": time.Now().Add(-time.Minute).Unix(), + }, &jwt.Headers{Extra: map[string]interface{}{"kid": kid}}) + require.NoError(t, err) + + hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, r.Header.Get("Content-Type"), "application/json; charset=UTF-8") + + expectedGrantedScopes := []string{client.Scope} + expectedGrantedAudience := []string{audience} + expectedPayload := map[string][]string{ + "assertion": {token}, + "grant_type": {"urn:ietf:params:oauth:grant-type:jwt-bearer"}, + "scope": {"offline_access"}, + } + + var hookReq hydraoauth2.TokenHookRequest + require.NoError(t, json.NewDecoder(r.Body).Decode(&hookReq)) + require.NotEmpty(t, hookReq.Session) + require.Equal(t, hookReq.Session.Extra, map[string]interface{}{}) + require.NotEmpty(t, hookReq.Request) + require.ElementsMatch(t, hookReq.Request.GrantedScopes, expectedGrantedScopes) + require.ElementsMatch(t, hookReq.Request.GrantedAudience, expectedGrantedAudience) + require.Equal(t, expectedPayload, hookReq.Request.Payload) + + claims := map[string]interface{}{ + "hooked": true, + } + + hookResp := hydraoauth2.TokenHookResponse{ + Session: flow.AcceptOAuth2ConsentRequestSession{ + AccessToken: claims, + IDToken: claims, + }, + } + + w.WriteHeader(http.StatusOK) + require.NoError(t, json.NewEncoder(w).Encode(&hookResp)) + })) + defer hs.Close() + + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, strategy) + reg.Config().MustSet(ctx, config.KeyTokenHook, hs.URL) + + defer reg.Config().MustSet(ctx, config.KeyTokenHook, nil) + + conf := newConf(client) + conf.EndpointParams = url.Values{"grant_type": {grantType}, "assertion": {token}} + + result, err := getToken(t, conf) + require.NoError(t, err) + + inspectToken(t, result, client, strategy, trustGrant, true) + } + } + + t.Run("strategy=opaque", run("opaque")) + t.Run("strategy=jwt", run("jwt")) + }) + + t.Run("should call token hook if configured and omit client_secret from payload", func(t *testing.T) { + run := func(strategy string) func(t *testing.T) { + return func(t *testing.T) { + audience := reg.Config().OAuth2TokenURL(ctx).String() + grantType := "urn:ietf:params:oauth:grant-type:jwt-bearer" + + token, _, err := signer.Generate(ctx, jwt.MapClaims{ + "jti": uuid.Must(uuid.NewV4()).String(), + "iss": trustGrant.Issuer, + "sub": trustGrant.Subject, + "aud": audience, + "exp": time.Now().Add(time.Hour).Unix(), + "iat": time.Now().Add(-time.Minute).Unix(), + }, &jwt.Headers{Extra: map[string]interface{}{"kid": kid}}) + require.NoError(t, err) + + client := &hc.Client{ + Secret: secret, + GrantTypes: []string{"urn:ietf:params:oauth:grant-type:jwt-bearer"}, + Scope: "offline_access", + TokenEndpointAuthMethod: "client_secret_post", + } + require.NoError(t, reg.ClientManager().CreateClient(ctx, client)) + + hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, r.Header.Get("Content-Type"), "application/json; charset=UTF-8") + + expectedGrantedScopes := []string{client.Scope} + expectedGrantedAudience := []string{audience} + expectedPayload := map[string][]string{ + "assertion": {token}, + "client_id": {client.GetID()}, + "grant_type": {"urn:ietf:params:oauth:grant-type:jwt-bearer"}, + "scope": {"offline_access"}, + } + + var hookReq hydraoauth2.TokenHookRequest + require.NoError(t, json.NewDecoder(r.Body).Decode(&hookReq)) + require.NotEmpty(t, hookReq.Session) + require.Equal(t, hookReq.Session.Extra, map[string]interface{}{}) + require.NotEmpty(t, hookReq.Request) + require.ElementsMatch(t, hookReq.Request.GrantedScopes, expectedGrantedScopes) + require.ElementsMatch(t, hookReq.Request.GrantedAudience, expectedGrantedAudience) + require.Equal(t, hookReq.Request.Payload, expectedPayload) + + claims := map[string]interface{}{ + "hooked": true, + } + + hookResp := hydraoauth2.TokenHookResponse{ + Session: flow.AcceptOAuth2ConsentRequestSession{ + AccessToken: claims, + IDToken: claims, + }, + } + + w.WriteHeader(http.StatusOK) + require.NoError(t, json.NewEncoder(w).Encode(&hookResp)) + })) + defer hs.Close() + + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, strategy) + reg.Config().MustSet(ctx, config.KeyTokenHook, hs.URL) + + defer reg.Config().MustSet(ctx, config.KeyTokenHook, nil) + + conf := newConf(client) + conf.AuthStyle = goauth2.AuthStyleInParams + conf.EndpointParams = url.Values{"grant_type": {grantType}, "assertion": {token}} + + result, err := getToken(t, conf) + require.NoError(t, err) + + inspectToken(t, result, client, strategy, trustGrant, true) + } + } + + t.Run("strategy=opaque", run("opaque")) + t.Run("strategy=jwt", run("jwt")) + }) + + t.Run("should fail token if hook fails", func(t *testing.T) { + run := func(strategy string) func(t *testing.T) { + return func(t *testing.T) { + hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusInternalServerError) + })) + defer hs.Close() + + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, strategy) + reg.Config().MustSet(ctx, config.KeyTokenHook, hs.URL) + + defer reg.Config().MustSet(ctx, config.KeyTokenHook, nil) + + token, _, err := signer.Generate(ctx, jwt.MapClaims{ + "jti": uuid.Must(uuid.NewV4()).String(), + "iss": trustGrant.Issuer, + "sub": trustGrant.Subject, + "aud": reg.Config().OAuth2TokenURL(ctx).String(), + "exp": time.Now().Add(time.Hour).Unix(), + "iat": time.Now().Add(-time.Minute).Unix(), + }, &jwt.Headers{Extra: map[string]interface{}{"kid": kid}}) + require.NoError(t, err) + + conf := newConf(client) + conf.EndpointParams = url.Values{"grant_type": {"urn:ietf:params:oauth:grant-type:jwt-bearer"}, "assertion": {token}} + + _, tokenError := getToken(t, conf) + require.Error(t, tokenError) + } + } + + t.Run("strategy=opaque", run("opaque")) + t.Run("strategy=jwt", run("jwt")) + }) + + t.Run("should fail token if hook denied the request", func(t *testing.T) { + run := func(strategy string) func(t *testing.T) { + return func(t *testing.T) { + hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusForbidden) + })) + defer hs.Close() + + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, strategy) + reg.Config().MustSet(ctx, config.KeyTokenHook, hs.URL) + + defer reg.Config().MustSet(ctx, config.KeyTokenHook, nil) + + token, _, err := signer.Generate(ctx, jwt.MapClaims{ + "jti": uuid.Must(uuid.NewV4()).String(), + "iss": trustGrant.Issuer, + "sub": trustGrant.Subject, + "aud": reg.Config().OAuth2TokenURL(ctx).String(), + "exp": time.Now().Add(time.Hour).Unix(), + "iat": time.Now().Add(-time.Minute).Unix(), + }, &jwt.Headers{Extra: map[string]interface{}{"kid": kid}}) + require.NoError(t, err) + + conf := newConf(client) + conf.EndpointParams = url.Values{"grant_type": {"urn:ietf:params:oauth:grant-type:jwt-bearer"}, "assertion": {token}} + + _, tokenError := getToken(t, conf) + require.Error(t, tokenError) + } + } + + t.Run("strategy=opaque", run("opaque")) + t.Run("strategy=jwt", run("jwt")) + }) + + t.Run("should fail token if hook response is malformed", func(t *testing.T) { + run := func(strategy string) func(t *testing.T) { + return func(t *testing.T) { + hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + })) + defer hs.Close() + + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, strategy) + reg.Config().MustSet(ctx, config.KeyTokenHook, hs.URL) + + defer reg.Config().MustSet(ctx, config.KeyTokenHook, nil) + + token, _, err := signer.Generate(ctx, jwt.MapClaims{ + "jti": uuid.Must(uuid.NewV4()).String(), + "iss": trustGrant.Issuer, + "sub": trustGrant.Subject, + "aud": reg.Config().OAuth2TokenURL(ctx).String(), + "exp": time.Now().Add(time.Hour).Unix(), + "iat": time.Now().Add(-time.Minute).Unix(), + }, &jwt.Headers{Extra: map[string]interface{}{"kid": kid}}) + require.NoError(t, err) + + conf := newConf(client) + conf.EndpointParams = url.Values{"grant_type": {"urn:ietf:params:oauth:grant-type:jwt-bearer"}, "assertion": {token}} + + _, tokenError := getToken(t, conf) + require.Error(t, tokenError) } } diff --git a/oauth2/oauth2_provider_mock_test.go b/oauth2/oauth2_provider_mock_test.go index 83d584eb12f..5f18d5864c9 100644 --- a/oauth2/oauth2_provider_mock_test.go +++ b/oauth2/oauth2_provider_mock_test.go @@ -1,8 +1,8 @@ -// Copyright © 2022 Ory Corp +// Copyright © 2025 Ory Corp // SPDX-License-Identifier: Apache-2.0 // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/ory/fosite (interfaces: OAuth2Provider) +// Source: github.com/ory/hydra/v2/fosite (interfaces: OAuth2Provider) // Package oauth2_test is a generated GoMock package. package oauth2_test @@ -14,7 +14,7 @@ import ( gomock "github.com/golang/mock/gomock" - fosite "github.com/ory/fosite" + fosite "github.com/ory/hydra/v2/fosite" ) // MockOAuth2Provider is a mock of OAuth2Provider interface. @@ -121,6 +121,36 @@ func (mr *MockOAuth2ProviderMockRecorder) NewAuthorizeResponse(arg0, arg1, arg2 return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NewAuthorizeResponse", reflect.TypeOf((*MockOAuth2Provider)(nil).NewAuthorizeResponse), arg0, arg1, arg2) } +// NewDeviceRequest mocks base method. +func (m *MockOAuth2Provider) NewDeviceRequest(arg0 context.Context, arg1 *http.Request) (fosite.DeviceRequester, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "NewDeviceRequest", arg0, arg1) + ret0, _ := ret[0].(fosite.DeviceRequester) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// NewDeviceRequest indicates an expected call of NewDeviceRequest. +func (mr *MockOAuth2ProviderMockRecorder) NewDeviceRequest(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NewDeviceRequest", reflect.TypeOf((*MockOAuth2Provider)(nil).NewDeviceRequest), arg0, arg1) +} + +// NewDeviceResponse mocks base method. +func (m *MockOAuth2Provider) NewDeviceResponse(arg0 context.Context, arg1 fosite.DeviceRequester, arg2 fosite.Session) (fosite.DeviceResponder, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "NewDeviceResponse", arg0, arg1, arg2) + ret0, _ := ret[0].(fosite.DeviceResponder) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// NewDeviceResponse indicates an expected call of NewDeviceResponse. +func (mr *MockOAuth2ProviderMockRecorder) NewDeviceResponse(arg0, arg1, arg2 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NewDeviceResponse", reflect.TypeOf((*MockOAuth2Provider)(nil).NewDeviceResponse), arg0, arg1, arg2) +} + // NewIntrospectionRequest mocks base method. func (m *MockOAuth2Provider) NewIntrospectionRequest(arg0 context.Context, arg1 *http.Request, arg2 fosite.Session) (fosite.IntrospectionResponder, error) { m.ctrl.T.Helper() @@ -181,7 +211,7 @@ func (mr *MockOAuth2ProviderMockRecorder) NewRevocationRequest(arg0, arg1 interf } // WriteAccessError mocks base method. -func (m *MockOAuth2Provider) WriteAccessError(arg0 context.Context, arg1 http.ResponseWriter, arg2 fosite.AccessRequester, arg3 error) { +func (m *MockOAuth2Provider) WriteAccessError(arg0 context.Context, arg1 http.ResponseWriter, arg2 fosite.Requester, arg3 error) { m.ctrl.T.Helper() m.ctrl.Call(m, "WriteAccessError", arg0, arg1, arg2, arg3) } @@ -228,6 +258,18 @@ func (mr *MockOAuth2ProviderMockRecorder) WriteAuthorizeResponse(arg0, arg1, arg return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "WriteAuthorizeResponse", reflect.TypeOf((*MockOAuth2Provider)(nil).WriteAuthorizeResponse), arg0, arg1, arg2, arg3) } +// WriteDeviceResponse mocks base method. +func (m *MockOAuth2Provider) WriteDeviceResponse(arg0 context.Context, arg1 http.ResponseWriter, arg2 fosite.DeviceRequester, arg3 fosite.DeviceResponder) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "WriteDeviceResponse", arg0, arg1, arg2, arg3) +} + +// WriteDeviceResponse indicates an expected call of WriteDeviceResponse. +func (mr *MockOAuth2ProviderMockRecorder) WriteDeviceResponse(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "WriteDeviceResponse", reflect.TypeOf((*MockOAuth2Provider)(nil).WriteDeviceResponse), arg0, arg1, arg2, arg3) +} + // WriteIntrospectionError mocks base method. func (m *MockOAuth2Provider) WriteIntrospectionError(arg0 context.Context, arg1 http.ResponseWriter, arg2 error) { m.ctrl.T.Helper() diff --git a/oauth2/oauth2_refresh_token_test.go b/oauth2/oauth2_refresh_token_test.go deleted file mode 100644 index 1d40e0af4d1..00000000000 --- a/oauth2/oauth2_refresh_token_test.go +++ /dev/null @@ -1,270 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package oauth2_test - -import ( - "context" - "errors" - "fmt" - "math/rand" - "net/url" - "strings" - "sync" - "testing" - "time" - - "github.com/gofrs/uuid" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/ory/fosite" - hc "github.com/ory/hydra/client" - "github.com/ory/hydra/driver" - "github.com/ory/hydra/internal" - "github.com/ory/hydra/oauth2" - "github.com/ory/x/contextx" - "github.com/ory/x/dbal" - "github.com/ory/x/networkx" -) - -// TestCreateRefreshTokenSessionStress is a sanity test to verify the fix for https://github.com/ory/hydra/issues/1719 & -// https://github.com/ory/hydra/issues/1735. -// It currently only deals with Postgres as that was what the issue was based on due to the default isolation level used -// by the storage engine. -func TestCreateRefreshTokenSessionStress(t *testing.T) { - if testing.Short() { - return - } - - // number of iterations this test will make to ensure everything is working as expected. This test is aiming to - // prove correct behaviour when the handler is getting hit with the same refresh token in concurrent requests. Given - // that problems that may occur in this scenario are "racey" in nature, it is important to run this test several times - // so to minimize the probability were we pass due to sheer luck. - testRuns := 5 - // number of workers that will concurrently hit the 'CreateRefreshTokenSession' method using the same refresh token. - // don't set this value to be too high as it will result in connection failures to the DB instance. The test is designed such that - // it will retry in the event we get unlucky and a transaction completes successfully prior to other requests getting past the - // first read. - workers := 10 - - token := "234c678fed33c1d2025537ae464a1ebf7d23fc4a" - tokenSignature := "4c7c7e8b3a77ad0c3ec846a21653c48b45dbfa31" - testClient := hc.Client{ - ID: uuid.Must(uuid.NewV4()), - Secret: "secret", - ResponseTypes: []string{"id_token", "code", "token"}, - GrantTypes: []string{"implicit", "refresh_token", "authorization_code", "password", "client_credentials"}, - Scope: "hydra offline openid", - Audience: []string{"https://api.ory.sh/"}, - } - - request := &fosite.AccessRequest{ - GrantTypes: []string{ - "refresh_token", - }, - Request: fosite.Request{ - RequestedAt: time.Now(), - ID: uuid.Must(uuid.NewV4()).String(), - Client: &hc.Client{ - ID: uuid.FromStringOrNil(testClient.GetID()), - }, - RequestedScope: []string{"offline"}, - GrantedScope: []string{"offline"}, - Session: oauth2.NewSession(""), - Form: url.Values{ - "refresh_token": []string{fmt.Sprintf("%s.%s", token, tokenSignature)}, - }, - }, - } - - setupRegistries(t) - - for dbName, dbRegistry := range registries { - if dbName == "memory" { - // todo check why sqlite fails with "no such table: hydra_oauth2_refresh \n sqlite create" - // should be fine though as nobody should use sqlite in production - continue - } - net := &networkx.Network{} - require.NoError(t, dbRegistry.Persister().Connection(context.Background()).First(net)) - dbRegistry.WithContextualizer(&contextx.Static{NID: net.ID, C: internal.NewConfigurationWithDefaults().Source(context.Background())}) - - ctx, _ := context.WithDeadline(context.Background(), time.Now().Add(30*time.Second)) - require.NoError(t, dbRegistry.OAuth2Storage().(clientCreator).CreateClient(ctx, &testClient)) - require.NoError(t, dbRegistry.OAuth2Storage().CreateRefreshTokenSession(ctx, tokenSignature, request)) - _, err := dbRegistry.OAuth2Storage().GetRefreshTokenSession(ctx, tokenSignature, nil) - require.NoError(t, err) - provider := dbRegistry.OAuth2Provider() - storageVersion := dbVersion(t, ctx, dbRegistry) - - var wg sync.WaitGroup - for run := 0; run < testRuns; run++ { - barrier := make(chan struct{}) - errorsCh := make(chan error, workers) - - go func() { - for w := 0; w < workers; w++ { - wg.Add(1) - go func(run, worker int) { - defer wg.Done() - ctx, _ := context.WithDeadline(context.Background(), time.Now().Add(5*time.Second)) - time.Sleep(time.Duration(rand.Intn(100)) * time.Millisecond) - // all workers will block here until the for loop above has launched all the worker go-routines - // this is to ensure we fire all the workers off at the same - <-barrier - _, err := provider.NewAccessResponse(ctx, request) - errorsCh <- err - }(run, w) - } - - // wait until all workers have completed their work - wg.Wait() - close(errorsCh) - }() - - // let the race begin! - // all worker go-routines will now attempt to hit the "NewAccessResponse" method - close(barrier) - - // process worker results - - // successCount is the number of workers that were able to call "NewAccessResponse" without receiving an error. - // if the successCount at the end of a test run is bigger than one, it means that multiple access/refresh tokens - // were issued using the same refresh token! - https://knowyourmeme.com/memes/scared-hamster - var successCount int - for err := range errorsCh { - if err != nil { - if e := (&fosite.RFC6749Error{}); errors.As(err, &e) { - switch e.ErrorField { - - // change logic below when the refresh handler starts returning 'fosite.ErrInvalidRequest' for other reasons. - // as of now, this error is only returned due to concurrent transactions competing to refresh using the same token. - - case fosite.ErrInvalidRequest.ErrorField, fosite.ErrServerError.ErrorField: - // the error description copy is defined by RFC 6749 and should not be different regardless of - // the underlying transactional aware storage backend used by hydra - assert.Contains(t, []string{fosite.ErrInvalidRequest.DescriptionField, fosite.ErrServerError.DescriptionField}, e.DescriptionField) - // the database error debug copy will be different depending on the underlying database used - switch dbName { - case dbal.DriverMySQL: - case dbal.DriverPostgreSQL, dbal.DriverCockroachDB: - var matched bool - for _, errSubstr := range []string{ - // both postgreSQL & cockroachDB return error code 40001 for consistency errors as a result of - // using the REPEATABLE_READ isolation level - "SQLSTATE 40001", - // possible if one worker starts the transaction AFTER another worker has successfully - // refreshed the token and committed the transaction - "not_found", - // postgres: duplicate key value violates unique constraint "hydra_oauth2_access_request_id_idx": Unable to insert or update resource because a resource with that value exists already: The request could not be completed due to concurrent access - "duplicate key", - // cockroach: restart transaction: TransactionRetryWithProtoRefreshError: TransactionRetryError: retry txn (RETRY_WRITE_TOO_OLD - WriteTooOld flag converted to WriteTooOldError): "sql txn" meta={id=7f069400 key=/Table/62/2/"02a55d6e-509b-4d7a-8458-5828b2f831a1"/0 pri=0.00598277 epo=0 ts=1600955431.566576173,2 min=1600955431.566576173,0 seq=6} lock=true stat=PENDING rts=1600955431.566576173,2 wto=false max=1600955431.566576173,0: Unable to serialize access due to a concurrent update in another session: The request could not be completed due to concurrent access - "RETRY_WRITE_TOO_OLD", - // postgres: pq: deadlock detected - "deadlock detected", - // postgres: pq: could not serialize access due to concurrent update: Unable to serialize access due to a concurrent update in another session: The request could not be completed due to concurrent access - "concurrent update", - // cockroach: this happens when there is an error with the storage - "RETRY_WRITE_TOO_OLD", - // refresh token reuse detection - "token_inactive", - } { - if strings.Contains(e.DebugField, errSubstr) { - matched = true - break - } - } - - assert.True(t, matched, "received an unexpected kind of `%s`\n"+ - "DB version: %s\n"+ - "Error description: %s\n"+ - "Error debug: %s\n"+ - "Error hint: %s\n"+ - "Raw error: %T %+v\n"+ - "Raw cause: %T %+v", - e.ErrorField, - storageVersion, - e.DescriptionField, - e.DebugField, - e.HintField, - err, err, - e, e) - } - default: - // unfortunately, MySQL does not offer the same behaviour under the "REPEATABLE_READ" isolation - // level so we have to relax this assertion just for MySQL for the time being as server_errors - // resembling the following can be returned: - // - // Error 1213: Deadlock found when trying to get lock; try restarting transaction - if dbName != dbal.DriverMySQL { - t.Errorf("an unexpected RFC6749 error with the name %q was returned.\n"+ - "Hint: has the refresh token error handling changed in fosite? If so, you need to add further "+ - "assertions here to cover the additional errors that are being returned by the handler.\n"+ - "DB version: %s\n"+ - "Error description: %s\n"+ - "Error debug: %s\n"+ - "Error hint: %s\n"+ - "Raw error: %+v", - e.ErrorField, - storageVersion, - e.DescriptionField, - e.DebugField, - e.HintField, - err) - } - } - } else { - t.Errorf("expected underlying error to be of type '*fosite.RFC6749Error', but it was "+ - "actually of type %T: %+v - DB version: %s", err, err, storageVersion) - } - } else { - successCount++ - } - } - - // IMPORTANT - skip consistency check for MySQL :( - // - // different DBMS's provide different consistency guarantees when using the "REPEATABLE_READ" isolation level - // Currently, MySQL's implementation of "REPEATABLE_READ" makes it possible for multiple concurrent requests - // to successfully utilize the same refresh token. Therefore, we skip the assertion below. - // - // TODO: this needs to be addressed by making it possible to use different isolation levels for various authorization - // flows depending on the underlying hydra storage backend. For example, if using MySQL, hydra should force - // the transaction isolation level to be "Serializable" when a request to the token handler is received. - - switch dbName { - case dbal.DriverMySQL: - case dbal.DriverPostgreSQL, dbal.DriverCockroachDB: - require.Equal(t, 1, successCount, "CRITICAL: in test iteration %d, %d out of %d workers "+ - "were able to use the refresh token. Exactly ONE was expected to be have been successful.", - run, - successCount, - workers) - } - - // reset state for the next test iteration - assert.NoError(t, dbRegistry.OAuth2Storage().DeleteRefreshTokenSession(ctx, tokenSignature)) - assert.NoError(t, dbRegistry.OAuth2Storage().CreateRefreshTokenSession(ctx, tokenSignature, request)) - } - } -} - -type version struct { - Version string `db:"version"` -} - -func dbVersion(t *testing.T, ctx context.Context, registry driver.Registry) string { - var v version - - versionFunc := "version()" - c := registry.Persister().Connection(ctx) - if c.Dialect.Name() == "sqlite3" { - versionFunc = "sqlite_version()" - } - /* #nosec G201 - versionFunc is an enum */ - require.NoError(t, registry.Persister().Connection(ctx).RawQuery(fmt.Sprintf("select %s as version", versionFunc)).First(&v)) - - return v.Version -} diff --git a/oauth2/oauth2_rop_test.go b/oauth2/oauth2_rop_test.go new file mode 100644 index 00000000000..c5cafa1c4aa --- /dev/null +++ b/oauth2/oauth2_rop_test.go @@ -0,0 +1,162 @@ +// Copyright © 2022 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2_test + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/gofrs/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "golang.org/x/oauth2" + + hydra "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/flow" + "github.com/ory/hydra/v2/fosite/compose" + "github.com/ory/hydra/v2/fosite/token/jwt" + "github.com/ory/hydra/v2/internal/kratos" + "github.com/ory/hydra/v2/internal/testhelpers" + hydraoauth2 "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/sqlxx" +) + +func TestResourceOwnerPasswordGrant(t *testing.T) { + t.Parallel() + + ctx := context.Background() + fakeKratos := kratos.NewFake() + reg := testhelpers.NewRegistryMemory(t, + driver.WithKratosClient(fakeKratos), + driver.WithExtraFositeFactories(compose.OAuth2ResourceOwnerPasswordCredentialsFactory), + ) + publicTS, adminTS := testhelpers.NewOAuth2Server(t.Context(), t, reg) + + secret := uuid.Must(uuid.NewV4()).String() + audience := sqlxx.StringSliceJSONFormat{"https://aud.example.com"} + client := &hydra.Client{ + Secret: secret, + GrantTypes: []string{"password", "refresh_token"}, + Scope: "offline", + Audience: audience, + Lifespans: hydra.Lifespans{ + PasswordGrantAccessTokenLifespan: x.NullDuration{Duration: 1 * time.Hour, Valid: true}, + PasswordGrantRefreshTokenLifespan: x.NullDuration{Duration: 1 * time.Hour, Valid: true}, + }, + } + require.NoError(t, reg.ClientManager().CreateClient(ctx, client)) + + oauth2Config := &oauth2.Config{ + ClientID: client.GetID(), + ClientSecret: secret, + Endpoint: oauth2.Endpoint{ + AuthURL: reg.Config().OAuth2AuthURL(ctx).String(), + TokenURL: reg.Config().OAuth2TokenURL(ctx).String(), + AuthStyle: oauth2.AuthStyleInHeader, + }, + Scopes: []string{"offline"}, + } + + hs := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, r.Header.Get("Content-Type"), "application/json; charset=UTF-8") + assert.Equal(t, r.Header.Get("Authorization"), "Bearer secret value") + + var hookReq hydraoauth2.TokenHookRequest + require.NoError(t, json.NewDecoder(r.Body).Decode(&hookReq)) + assert.NotEmpty(t, hookReq.Session) + assert.NotEmpty(t, hookReq.Request) + + claims := hookReq.Session.Extra + claims["hooked"] = true + if hookReq.Request.GrantTypes[0] == "refresh_token" { + claims["refreshed"] = true + } + + hookResp := hydraoauth2.TokenHookResponse{ + Session: flow.AcceptOAuth2ConsentRequestSession{ + AccessToken: claims, + IDToken: claims, + }, + } + + w.WriteHeader(http.StatusOK) + require.NoError(t, json.NewEncoder(w).Encode(&hookResp)) + })) + defer hs.Close() + + reg.Config().MustSet(ctx, config.KeyTokenHook, &config.HookConfig{ + URL: hs.URL, + Auth: &config.Auth{ + Type: "api_key", + Config: config.AuthConfig{ + In: "header", + Name: "Authorization", + Value: "Bearer secret value", + }, + }, + }) + reg.Config().MustSet(ctx, config.KeyAccessTokenStrategy, "jwt") + + t.Run("case=get ROP grant token with valid username and password", func(t *testing.T) { + token, err := oauth2Config.PasswordCredentialsToken(ctx, kratos.FakeUsername, kratos.FakePassword) + require.NoError(t, err) + require.NotEmpty(t, token.AccessToken) + + // Access token should have hook and identity_id claims + jwtAT, err := jwt.Parse(token.AccessToken, func(token *jwt.Token) (interface{}, error) { + return reg.AccessTokenJWTSigner().GetPublicKey(ctx) + }) + require.NoError(t, err) + assert.Equal(t, kratos.FakeUsername, jwtAT.Claims["ext"].(map[string]any)["username"]) + assert.Equal(t, kratos.FakeIdentityID, jwtAT.Claims["sub"]) + assert.Equal(t, publicTS.URL, jwtAT.Claims["iss"]) + assert.True(t, jwtAT.Claims["ext"].(map[string]any)["hooked"].(bool)) + assert.ElementsMatch(t, audience, jwtAT.Claims["aud"]) + + t.Run("case=introspect token", func(t *testing.T) { + // Introspected token should have hook and identity_id claims + i := testhelpers.IntrospectToken(t, token.AccessToken, adminTS) + assert.True(t, i.Get("active").Bool(), "%s", i) + assert.Equal(t, kratos.FakeUsername, i.Get("ext.username").String(), "%s", i) + assert.Equal(t, kratos.FakeIdentityID, i.Get("sub").String(), "%s", i) + assert.True(t, i.Get("ext.hooked").Bool(), "%s", i) + assert.EqualValues(t, oauth2Config.ClientID, i.Get("client_id").String(), "%s", i) + }) + + t.Run("case=refresh token", func(t *testing.T) { + // Refreshed access token should have hook and identity_id claims + require.NotEmpty(t, token.RefreshToken) + token.Expiry = token.Expiry.Add(-time.Hour * 24) + refreshedToken, err := oauth2Config.TokenSource(context.Background(), token).Token() + require.NoError(t, err) + + require.NotEqual(t, token.AccessToken, refreshedToken.AccessToken) + require.NotEqual(t, token.RefreshToken, refreshedToken.RefreshToken) + + jwtAT, err := jwt.Parse(refreshedToken.AccessToken, func(token *jwt.Token) (interface{}, error) { + return reg.AccessTokenJWTSigner().GetPublicKey(ctx) + }) + require.NoError(t, err) + assert.Equal(t, kratos.FakeIdentityID, jwtAT.Claims["sub"]) + assert.Equal(t, kratos.FakeUsername, jwtAT.Claims["ext"].(map[string]any)["username"]) + assert.True(t, jwtAT.Claims["ext"].(map[string]any)["hooked"].(bool)) + assert.True(t, jwtAT.Claims["ext"].(map[string]any)["refreshed"].(bool)) + }) + }) + + t.Run("case=access denied for invalid password", func(t *testing.T) { + _, err := oauth2Config.PasswordCredentialsToken(ctx, kratos.FakeUsername, "invalid") + retrieveError := new(oauth2.RetrieveError) + require.Error(t, err) + require.ErrorAs(t, err, &retrieveError) + assert.Contains(t, retrieveError.ErrorDescription, "Unable to authenticate the provided username and password credentials") + }) +} diff --git a/oauth2/refresh_hook.go b/oauth2/refresh_hook.go new file mode 100644 index 00000000000..6d075a77863 --- /dev/null +++ b/oauth2/refresh_hook.go @@ -0,0 +1,103 @@ +// Copyright © 2022 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "context" + "encoding/json" + + "github.com/pkg/errors" + + "github.com/ory/hydra/v2/x" + + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" +) + +// Requester is a token endpoint's request context. +// +// swagger:ignore +type Requester struct { + // ClientID is the identifier of the OAuth 2.0 client. + ClientID string `json:"client_id"` + // GrantedScopes is the list of scopes granted to the OAuth 2.0 client. + GrantedScopes []string `json:"granted_scopes"` + // GrantedAudience is the list of audiences granted to the OAuth 2.0 client. + GrantedAudience []string `json:"granted_audience"` + // GrantTypes is the requests grant types. + GrantTypes []string `json:"grant_types"` +} + +// RefreshTokenHookRequest is the request body sent to the refresh token hook. +// +// swagger:ignore +type RefreshTokenHookRequest struct { + // Subject is the identifier of the authenticated end-user. + Subject string `json:"subject"` + // Session is the request's session.. + Session *Session `json:"session"` + // Requester is a token endpoint's request context. + Requester Requester `json:"requester"` + // ClientID is the identifier of the OAuth 2.0 client. + ClientID string `json:"client_id"` + // GrantedScopes is the list of scopes granted to the OAuth 2.0 client. + GrantedScopes []string `json:"granted_scopes"` + // GrantedAudience is the list of audiences granted to the OAuth 2.0 client. + GrantedAudience []string `json:"granted_audience"` +} + +// RefreshTokenHook is an AccessRequestHook called for `refresh_token` grant type. +func RefreshTokenHook(reg interface { + config.Provider + x.HTTPClientProvider +}) AccessRequestHook { + return func(ctx context.Context, requester fosite.AccessRequester) error { + hookConfig := reg.Config().TokenRefreshHookConfig(ctx) + if hookConfig == nil { + return nil + } + + if !requester.GetGrantTypes().ExactOne("refresh_token") { + return nil + } + + session, ok := requester.GetSession().(*Session) + if !ok { + return nil + } + + requesterInfo := Requester{ + ClientID: requester.GetClient().GetID(), + GrantedScopes: requester.GetGrantedScopes(), + GrantedAudience: requester.GetGrantedAudience(), + GrantTypes: requester.GetGrantTypes(), + } + + reqBody := RefreshTokenHookRequest{ + Session: session, + Requester: requesterInfo, + Subject: session.GetSubject(), + ClientID: requester.GetClient().GetID(), + GrantedScopes: requester.GetGrantedScopes(), + GrantedAudience: requester.GetGrantedAudience(), + } + + reqBodyBytes, err := json.Marshal(&reqBody) + if err != nil { + return errors.WithStack( + fosite.ErrServerError. + WithWrap(err). + WithDescription("An error occurred while encoding the token hook."). + WithDebugf("Unable to encode the token hook body: %s", err), + ) + } + + err = executeHookAndUpdateSession(ctx, reg, hookConfig, reqBodyBytes, session) + if err != nil { + return err + } + + return nil + } +} diff --git a/oauth2/registry.go b/oauth2/registry.go index 53a9e84cf80..ffb9267eeaa 100644 --- a/oauth2/registry.go +++ b/oauth2/registry.go @@ -4,13 +4,15 @@ package oauth2 import ( - "github.com/ory/fosite" - "github.com/ory/fosite/handler/openid" - "github.com/ory/hydra/client" - "github.com/ory/hydra/consent" - "github.com/ory/hydra/jwk" - "github.com/ory/hydra/oauth2/trust" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/aead" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/consent" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/handler/rfc8628" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/oauth2/trust" + "github.com/ory/hydra/v2/x" ) type InternalRegistry interface { @@ -19,16 +21,21 @@ type InternalRegistry interface { trust.Registry x.RegistryWriter x.RegistryLogger + x.TracingProvider + x.Transactor consent.Registry Registry + FlowCipher() *aead.XChaCha20Poly1305 } type Registry interface { OAuth2Storage() x.FositeStorer OAuth2Provider() fosite.OAuth2Provider - AudienceStrategy() fosite.AudienceMatchingStrategy - AccessTokenJWTStrategy() jwk.JWTSigner + AccessTokenJWTSigner() jwk.JWTSigner OpenIDConnectRequestValidator() *openid.OpenIDConnectRequestValidator AccessRequestHooks() []AccessRequestHook OAuth2ProviderConfig() fosite.Configurator + rfc8628.DeviceRateLimitStrategyProvider + rfc8628.DeviceCodeStrategyProvider + rfc8628.UserCodeStrategyProvider } diff --git a/oauth2/revocator_test.go b/oauth2/revocator_test.go index 62ca3754b06..33a4df4a473 100644 --- a/oauth2/revocator_test.go +++ b/oauth2/revocator_test.go @@ -6,36 +6,34 @@ package oauth2_test import ( "context" "fmt" + "io" "net/http" "net/http/httptest" "testing" "time" - "github.com/gobuffalo/pop/v6" - - "github.com/ory/x/httprouterx" - - "github.com/ory/hydra/persistence/sql" - "github.com/ory/x/contextx" - - hydra "github.com/ory/hydra-client-go/v2" - - "github.com/ory/hydra/internal" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/ory/fosite" - "github.com/ory/hydra/oauth2" - "github.com/ory/hydra/x" + hydra "github.com/ory/hydra-client-go/v2" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/internal" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/persistence/sql" + "github.com/ory/hydra/v2/x" + "github.com/ory/pop/v6" + "github.com/ory/x/httprouterx" + "github.com/ory/x/prometheusx" ) -func createAccessTokenSession(subject, client string, token string, expiresAt time.Time, fs x.FositeStorer, scopes fosite.Arguments) { - createAccessTokenSessionPairwise(subject, client, token, expiresAt, fs, scopes, "") +func createAccessTokenSession(t testing.TB, subject, client, token string, expiresAt time.Time, fs x.FositeStorer, scopes fosite.Arguments) { + createAccessTokenSessionPairwise(t, subject, client, token, expiresAt, fs, scopes, "") } -func createAccessTokenSessionPairwise(subject, client string, token string, expiresAt time.Time, fs x.FositeStorer, scopes fosite.Arguments, obfuscated string) { - ar := fosite.NewAccessRequest(oauth2.NewSession(subject)) +func createAccessTokenSessionPairwise(t testing.TB, subject, client, token string, expiresAt time.Time, fs x.FositeStorer, scopes fosite.Arguments, obfuscated string) { + ar := fosite.NewAccessRequest(oauth2.NewTestSession(t, subject)) ar.GrantedScope = fosite.Arguments{"core"} if scopes != nil { ar.GrantedScope = scopes @@ -60,27 +58,28 @@ func countAccessTokens(t *testing.T, c *pop.Connection) int { } func TestRevoke(t *testing.T) { - conf := internal.NewConfigurationWithDefaults() - reg := internal.NewRegistryMemory(t, conf, &contextx.Default{}) + t.Parallel() + + reg := testhelpers.NewRegistryMemory(t) - internal.MustEnsureRegistryKeys(reg, x.OpenIDConnectKeyName) - internal.AddFositeExamples(reg) + testhelpers.MustEnsureRegistryKeys(t, reg, x.OpenIDConnectKeyName) + internal.AddFositeExamples(t, reg) tokens := Tokens(reg.OAuth2ProviderConfig(), 4) now := time.Now().UTC().Round(time.Second) - handler := reg.OAuth2Handler() - router := x.NewRouterAdmin(conf.AdminURL) - handler.SetRoutes(router, &httprouterx.RouterPublic{Router: router.Router}, func(h http.Handler) http.Handler { - return h - }) + metrics := prometheusx.NewMetricsManagerWithPrefix("hydra", prometheusx.HTTPMetrics, config.Version, config.Commit, config.Date) + handler := oauth2.NewHandler(reg) + router := httprouterx.NewRouterAdminWithPrefix(metrics) + handler.SetPublicRoutes(router.ToPublic(), func(h http.Handler) http.Handler { return h }) + handler.SetAdminRoutes(router) server := httptest.NewServer(router) defer server.Close() - createAccessTokenSession("alice", "my-client", tokens[0][0], now.Add(time.Hour), reg.OAuth2Storage(), nil) - createAccessTokenSession("siri", "my-client", tokens[1][0], now.Add(time.Hour), reg.OAuth2Storage(), nil) - createAccessTokenSession("siri", "my-client", tokens[2][0], now.Add(-time.Hour), reg.OAuth2Storage(), nil) - createAccessTokenSession("siri", "encoded:client", tokens[3][0], now.Add(-time.Hour), reg.OAuth2Storage(), nil) + createAccessTokenSession(t, "alice", "my-client", tokens[0].sig, now.Add(time.Hour), reg.OAuth2Storage(), nil) + createAccessTokenSession(t, "siri", "my-client", tokens[1].sig, now.Add(time.Hour), reg.OAuth2Storage(), nil) + createAccessTokenSession(t, "siri", "my-client", tokens[2].sig, now.Add(-time.Hour), reg.OAuth2Storage(), nil) + createAccessTokenSession(t, "siri", "encoded:client", tokens[3].sig, now.Add(-time.Hour), reg.OAuth2Storage(), nil) require.Equal(t, 4, countAccessTokens(t, reg.Persister().Connection(context.Background()))) client := hydra.NewAPIClient(hydra.NewConfiguration()) @@ -96,42 +95,43 @@ func TestRevoke(t *testing.T) { }, }, { - token: tokens[3][1], + token: tokens[3].tok, assert: func(t *testing.T) { assert.Equal(t, 4, countAccessTokens(t, reg.Persister().Connection(context.Background()))) }, }, { - token: tokens[0][1], + token: tokens[0].tok, assert: func(t *testing.T) { - t.Logf("Tried to delete: %s %s", tokens[0][0], tokens[0][1]) + t.Logf("Tried to delete: %s %s", tokens[0].sig, tokens[0].tok) assert.Equal(t, 3, countAccessTokens(t, reg.Persister().Connection(context.Background()))) }, }, { - token: tokens[0][1], + token: tokens[0].tok, }, { - token: tokens[2][1], + token: tokens[2].tok, assert: func(t *testing.T) { assert.Equal(t, 2, countAccessTokens(t, reg.Persister().Connection(context.Background()))) }, }, { - token: tokens[1][1], + token: tokens[1].tok, assert: func(t *testing.T) { assert.Equal(t, 1, countAccessTokens(t, reg.Persister().Connection(context.Background()))) }, }, } { t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { - _, err := client.OAuth2Api.RevokeOAuth2Token( + resp, err := client.OAuth2API.RevokeOAuth2Token( context.WithValue( context.Background(), hydra.ContextBasicAuth, hydra.BasicAuth{UserName: "my-client", Password: "foobar"}, )).Token(c.token).Execute() - require.NoError(t, err) + body, _ := io.ReadAll(resp.Body) + require.NoErrorf(t, err, "body: %s", body) if c.assert != nil { c.assert(t) diff --git a/oauth2/session.go b/oauth2/session.go index e543a1e123f..dda8ab948da 100644 --- a/oauth2/session.go +++ b/oauth2/session.go @@ -4,20 +4,23 @@ package oauth2 import ( - "encoding/json" + "bytes" + "context" + "slices" + "testing" "time" + jjson "github.com/go-jose/go-jose/v3/json" + "github.com/mohae/deepcopy" "github.com/pkg/errors" "github.com/tidwall/gjson" "github.com/tidwall/sjson" - "github.com/mohae/deepcopy" - - "github.com/ory/fosite" - "github.com/ory/fosite/handler/openid" - "github.com/ory/fosite/token/jwt" - - "github.com/ory/x/stringslice" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/token/jwt" + "github.com/ory/x/logrusx" ) // swagger:ignore @@ -29,50 +32,60 @@ type Session struct { ConsentChallenge string `json:"consent_challenge"` ExcludeNotBeforeClaim bool `json:"exclude_not_before_claim"` AllowedTopLevelClaims []string `json:"allowed_top_level_claims"` + MirrorTopLevelClaims bool `json:"mirror_top_level_claims"` } -func NewSession(subject string) *Session { - return NewSessionWithCustomClaims(subject, nil) +func NewTestSession(t testing.TB, subject string) *Session { + provider := config.MustNew(t, logrusx.New("", "")) + return NewSessionWithCustomClaims(t.Context(), provider, subject) } -func NewSessionWithCustomClaims(subject string, allowedTopLevelClaims []string) *Session { +func NewSessionWithCustomClaims(ctx context.Context, p *config.DefaultProvider, subject string) *Session { return &Session{ DefaultSession: &openid.DefaultSession{ - Claims: new(jwt.IDTokenClaims), - Headers: new(jwt.Headers), - Subject: subject, + Claims: new(jwt.IDTokenClaims), + Headers: new(jwt.Headers), + Subject: subject, + ExpiresAt: make(map[fosite.TokenType]time.Time), }, Extra: map[string]interface{}{}, - AllowedTopLevelClaims: allowedTopLevelClaims, + AllowedTopLevelClaims: p.AllowedTopLevelClaims(ctx), + MirrorTopLevelClaims: p.MirrorTopLevelClaims(ctx), + ExcludeNotBeforeClaim: p.ExcludeNotBeforeClaim(ctx), } } func (s *Session) GetJWTClaims() jwt.JWTClaimsContainer { - //a slice of claims that are reserved and should not be overridden - var reservedClaims = []string{"iss", "sub", "aud", "exp", "nbf", "iat", "jti", "client_id", "scp", "ext"} - - //remove any reserved claims from the custom claims - allowedClaimsFromConfigWithoutReserved := stringslice.Filter(s.AllowedTopLevelClaims, func(s string) bool { - return stringslice.Has(reservedClaims, s) + // remove any reserved claims from the custom claims + allowedClaimsFromConfigWithoutReserved := slices.DeleteFunc(s.AllowedTopLevelClaims, func(s string) bool { + switch s { + // these claims are reserved and should not be overridden + case "iss", "sub", "aud", "exp", "nbf", "iat", "jti", "client_id", "scp", "ext": + return true + } + return false }) - //our new extra map which will be added to the jwt - var topLevelExtraWithMirrorExt = map[string]interface{}{} + // our new extra map which will be added to the jwt + topLevelExtraWithMirrorExt := make(map[string]interface{}, len(allowedClaimsFromConfigWithoutReserved)+2) + topLevelExtraWithMirrorExt["client_id"] = s.ClientID - //setting every allowed claim top level in jwt with respective value + // setting every allowed claim top level in jwt with respective value for _, allowedClaim := range allowedClaimsFromConfigWithoutReserved { if cl, ok := s.Extra[allowedClaim]; ok { topLevelExtraWithMirrorExt[allowedClaim] = cl } } - //for every other claim that was already reserved and for mirroring, add original extra under "ext" - topLevelExtraWithMirrorExt["ext"] = s.Extra + // for every other claim that was already reserved and for mirroring, add original extra under "ext" + if s.MirrorTopLevelClaims { + topLevelExtraWithMirrorExt["ext"] = s.Extra + } claims := &jwt.JWTClaims{ Subject: s.Subject, Issuer: s.DefaultSession.Claims.Issuer, - //set our custom extra map as claims.Extra + // set our custom extra map as claims.Extra Extra: topLevelExtraWithMirrorExt, ExpiresAt: s.GetExpiresAt(fosite.AccessToken), IssuedAt: time.Now(), @@ -94,11 +107,6 @@ func (s *Session) GetJWTClaims() jwt.JWTClaimsContainer { claims.NotBefore = claims.IssuedAt } - if claims.Extra == nil { - claims.Extra = map[string]interface{}{} - } - - claims.Extra["client_id"] = s.ClientID return claims } @@ -171,10 +179,27 @@ func (s *Session) UnmarshalJSON(original []byte) (err error) { } } + // https://github.com/go-jose/go-jose/issues/144 + dec := jjson.NewDecoder(bytes.NewReader(transformed)) + dec.SetNumberType(jjson.UnmarshalIntOrFloat) type t Session - if err := json.Unmarshal(transformed, (*t)(s)); err != nil { + if err := dec.Decode((*t)(s)); err != nil { return errors.WithStack(err) } return nil } + +// GetExtraClaims implements ExtraClaimsSession for Session. +// The returned value can be modified in-place. +func (s *Session) GetExtraClaims() map[string]interface{} { + if s == nil { + return nil + } + + if s.Extra == nil { + s.Extra = make(map[string]interface{}) + } + + return s.Extra +} diff --git a/oauth2/session_custom_claims_test.go b/oauth2/session_custom_claims_test.go index c9b72af534d..74c086c45e1 100644 --- a/oauth2/session_custom_claims_test.go +++ b/oauth2/session_custom_claims_test.go @@ -1,252 +1,135 @@ // Copyright © 2022 Ory Corp // SPDX-License-Identifier: Apache-2.0 -package oauth2_test +package oauth2 import ( - "context" "testing" - "github.com/ory/fosite/handler/openid" - "github.com/ory/fosite/token/jwt" - - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal" - "github.com/ory/hydra/oauth2" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/token/jwt" ) -func createSessionWithCustomClaims(extra map[string]interface{}, allowedTopLevelClaims []string) oauth2.Session { - session := &oauth2.Session{ - DefaultSession: &openid.DefaultSession{ - Claims: &jwt.IDTokenClaims{ - Subject: "alice", - Issuer: "hydra.localhost", - }, - Headers: new(jwt.Headers), +func TestCustomClaimsInSession(t *testing.T) { + t.Parallel() + + session := Session{DefaultSession: &openid.DefaultSession{ + Claims: &jwt.IDTokenClaims{ Subject: "alice", + Issuer: "hydra.localhost", + }, + Headers: new(jwt.Headers), + Subject: "alice", + }} + + for _, tc := range []struct { + name string + extra, expectedClaims map[string]any + allowedTopLevelClaims, expectNotSet []string + mirrorTopLevelClaims, excludeNotBeforeClaim bool + }{{ + name: "no custom claims", + extra: map[string]any{}, + expectedClaims: map[string]any{ + "sub": "alice", + "iss": "hydra.localhost", + }, + expectNotSet: []string{"ext"}, + }, { + name: "top level mirrored", + extra: map[string]any{"foo": "bar"}, + allowedTopLevelClaims: []string{"foo"}, + mirrorTopLevelClaims: true, + expectedClaims: map[string]any{ + "sub": "alice", + "iss": "hydra.localhost", + "foo": "bar", + "ext": map[string]any{"foo": "bar"}, + }, + }, { + name: "top level mirrored with reserved", + extra: map[string]any{ + "foo": "bar", + "iss": "hydra.remote", + "sub": "another-alice", + }, + allowedTopLevelClaims: []string{"foo", "iss", "sub"}, + mirrorTopLevelClaims: true, + expectedClaims: map[string]any{ + "sub": "alice", + "iss": "hydra.localhost", + "foo": "bar", + "ext": map[string]any{ + "foo": "bar", + "iss": "hydra.remote", + "sub": "another-alice", + }, }, - Extra: extra, - AllowedTopLevelClaims: allowedTopLevelClaims, + }, { + name: "with disallowed top level mirrored", + extra: map[string]any{ + "foo": "bar", + "baz": "qux", + }, + allowedTopLevelClaims: []string{"foo"}, + mirrorTopLevelClaims: true, + expectedClaims: map[string]any{ + "sub": "alice", + "iss": "hydra.localhost", + "foo": "bar", + "ext": map[string]any{ + "foo": "bar", + "baz": "qux", + }, + }, + expectNotSet: []string{"baz"}, + }, { + name: "mirrored top level claims with other keys", + extra: map[string]any{"foo": "bar"}, + allowedTopLevelClaims: []string{"foo", "bar"}, + mirrorTopLevelClaims: true, + expectedClaims: map[string]any{ + "sub": "alice", + "iss": "hydra.localhost", + "foo": "bar", + "ext": map[string]any{"foo": "bar"}, + }, + expectNotSet: []string{"bar"}, + }, { + name: "disabled mirror top level claims", + extra: map[string]any{"foo": "bar"}, + allowedTopLevelClaims: []string{"foo"}, + mirrorTopLevelClaims: false, + expectedClaims: map[string]any{ + "sub": "alice", + "iss": "hydra.localhost", + "foo": "bar", + }, + expectNotSet: []string{"ext"}, + }, { + name: "exclude not before claim", + extra: map[string]any{}, + excludeNotBeforeClaim: true, + expectedClaims: map[string]any{ + "sub": "alice", + "iss": "hydra.localhost", + }, + expectNotSet: []string{"nbf"}, + }} { + t.Run(tc.name, func(t *testing.T) { + sess := session + sess.Extra = tc.extra + sess.AllowedTopLevelClaims = tc.allowedTopLevelClaims + sess.MirrorTopLevelClaims = tc.mirrorTopLevelClaims + sess.ExcludeNotBeforeClaim = tc.excludeNotBeforeClaim + + claims := sess.GetJWTClaims().ToMapClaims() + assert.Subset(t, claims, tc.expectedClaims) + for _, key := range tc.expectNotSet { + assert.NotContains(t, claims, key) + } + }) } - return *session -} - -func TestCustomClaimsInSession(t *testing.T) { - ctx := context.Background() - c := internal.NewConfigurationWithDefaults() - - t.Run("no_custom_claims", func(t *testing.T) { - c.MustSet(ctx, config.KeyAllowedTopLevelClaims, []string{}) - - session := createSessionWithCustomClaims(nil, c.AllowedTopLevelClaims(ctx)) - claims := session.GetJWTClaims().ToMapClaims() - - assert.EqualValues(t, "alice", claims["sub"]) - assert.NotEqual(t, "another-alice", claims["sub"]) - - require.Contains(t, claims, "iss") - assert.EqualValues(t, "hydra.localhost", claims["iss"]) - - assert.Empty(t, claims["ext"]) - }) - t.Run("custom_claim_gets_mirrored", func(t *testing.T) { - c.MustSet(ctx, config.KeyAllowedTopLevelClaims, []string{"foo"}) - extra := map[string]interface{}{"foo": "bar"} - - session := createSessionWithCustomClaims(extra, c.AllowedTopLevelClaims(ctx)) - - claims := session.GetJWTClaims().ToMapClaims() - - assert.EqualValues(t, "alice", claims["sub"]) - assert.NotEqual(t, "another-alice", claims["sub"]) - - require.Contains(t, claims, "iss") - assert.EqualValues(t, "hydra.localhost", claims["iss"]) - - require.Contains(t, claims, "foo") - assert.EqualValues(t, "bar", claims["foo"]) - - require.Contains(t, claims, "ext") - extClaims, ok := claims["ext"].(map[string]interface{}) - require.True(t, ok) - - require.Contains(t, extClaims, "foo") - assert.EqualValues(t, "bar", extClaims["foo"]) - }) - t.Run("only_non_reserved_claims_get_mirrored", func(t *testing.T) { - c.MustSet(ctx, config.KeyAllowedTopLevelClaims, []string{"foo", "iss", "sub"}) - extra := map[string]interface{}{"foo": "bar", "iss": "hydra.remote", "sub": "another-alice"} - - session := createSessionWithCustomClaims(extra, c.AllowedTopLevelClaims(ctx)) - - claims := session.GetJWTClaims().ToMapClaims() - - assert.EqualValues(t, "alice", claims["sub"]) - assert.NotEqual(t, "another-alice", claims["sub"]) - - require.Contains(t, claims, "iss") - assert.EqualValues(t, "hydra.localhost", claims["iss"]) - assert.NotEqual(t, "hydra.remote", claims["iss"]) - - require.Contains(t, claims, "foo") - assert.EqualValues(t, "bar", claims["foo"]) - - require.Contains(t, claims, "ext") - extClaims, ok := claims["ext"].(map[string]interface{}) - require.True(t, ok) - - require.Contains(t, extClaims, "foo") - assert.EqualValues(t, "bar", extClaims["foo"]) - - require.Contains(t, extClaims, "iss") - assert.EqualValues(t, "hydra.remote", extClaims["iss"]) - - require.Contains(t, extClaims, "sub") - assert.EqualValues(t, "another-alice", extClaims["sub"]) - }) - t.Run("no_custom_claims_in_config", func(t *testing.T) { - c.MustSet(ctx, config.KeyAllowedTopLevelClaims, []string{}) - extra := map[string]interface{}{"foo": "bar", "iss": "hydra.remote", "sub": "another-alice"} - - session := createSessionWithCustomClaims(extra, c.AllowedTopLevelClaims(ctx)) - - claims := session.GetJWTClaims().ToMapClaims() - - assert.EqualValues(t, "alice", claims["sub"]) - assert.NotEqual(t, "another-alice", claims["sub"]) - - require.Contains(t, claims, "iss") - assert.EqualValues(t, "hydra.localhost", claims["iss"]) - - assert.NotContains(t, claims, "foo") - - require.Contains(t, claims, "ext") - extClaims, ok := claims["ext"].(map[string]interface{}) - require.True(t, ok) - - require.Contains(t, extClaims, "foo") - assert.EqualValues(t, "bar", extClaims["foo"]) - - require.Contains(t, extClaims, "sub") - assert.EqualValues(t, "another-alice", extClaims["sub"]) - - require.Contains(t, extClaims, "iss") - assert.EqualValues(t, "hydra.remote", extClaims["iss"]) - }) - t.Run("more_config_claims_than_given", func(t *testing.T) { - c.MustSet(ctx, config.KeyAllowedTopLevelClaims, []string{"foo", "baz", "bar", "iss"}) - extra := map[string]interface{}{"foo": "foo_value", "sub": "another-alice"} - - session := createSessionWithCustomClaims(extra, c.AllowedTopLevelClaims(ctx)) - - claims := session.GetJWTClaims().ToMapClaims() - - assert.EqualValues(t, "alice", claims["sub"]) - assert.NotEqual(t, "another-alice", claims["sub"]) - - require.Contains(t, claims, "iss") - assert.EqualValues(t, "hydra.localhost", claims["iss"]) - assert.NotEqual(t, "hydra.remote", claims["iss"]) - - require.Contains(t, claims, "foo") - assert.EqualValues(t, "foo_value", claims["foo"]) - - require.Contains(t, claims, "ext") - extClaims, ok := claims["ext"].(map[string]interface{}) - require.True(t, ok) - - require.Contains(t, extClaims, "foo") - assert.EqualValues(t, "foo_value", extClaims["foo"]) - - require.Contains(t, extClaims, "sub") - assert.EqualValues(t, "another-alice", extClaims["sub"]) - }) - t.Run("less_config_claims_than_given", func(t *testing.T) { - c.MustSet(ctx, config.KeyAllowedTopLevelClaims, []string{"foo", "sub"}) - extra := map[string]interface{}{"foo": "foo_value", "bar": "bar_value", "baz": "baz_value", "sub": "another-alice"} - - session := createSessionWithCustomClaims(extra, c.AllowedTopLevelClaims(ctx)) - - claims := session.GetJWTClaims().ToMapClaims() - - assert.EqualValues(t, "alice", claims["sub"]) - assert.NotEqual(t, "another-alice", claims["sub"]) - - require.Contains(t, claims, "iss") - assert.EqualValues(t, "hydra.localhost", claims["iss"]) - - require.Contains(t, claims, "foo") - assert.EqualValues(t, "foo_value", claims["foo"]) - - assert.NotContains(t, claims, "bar") - assert.NotContains(t, claims, "baz") - - require.Contains(t, claims, "ext") - extClaims, ok := claims["ext"].(map[string]interface{}) - require.True(t, ok) - - require.Contains(t, extClaims, "foo") - assert.EqualValues(t, "foo_value", extClaims["foo"]) - - require.Contains(t, extClaims, "sub") - assert.EqualValues(t, "another-alice", extClaims["sub"]) - }) - t.Run("unused_config_claims", func(t *testing.T) { - c.MustSet(ctx, config.KeyAllowedTopLevelClaims, []string{"foo", "bar"}) - extra := map[string]interface{}{"foo": "foo_value", "baz": "baz_value", "sub": "another-alice"} - - session := createSessionWithCustomClaims(extra, c.AllowedTopLevelClaims(ctx)) - - claims := session.GetJWTClaims().ToMapClaims() - - assert.EqualValues(t, "alice", claims["sub"]) - assert.NotEqual(t, "another-alice", claims["sub"]) - - require.Contains(t, claims, "iss") - assert.EqualValues(t, "hydra.localhost", claims["iss"]) - - require.Contains(t, claims, "foo") - assert.EqualValues(t, "foo_value", claims["foo"]) - - assert.NotContains(t, claims, "bar") - assert.NotContains(t, claims, "baz") - - require.Contains(t, claims, "ext") - extClaims, ok := claims["ext"].(map[string]interface{}) - require.True(t, ok) - - require.Contains(t, extClaims, "foo") - assert.EqualValues(t, "foo_value", extClaims["foo"]) - - require.Contains(t, extClaims, "sub") - assert.EqualValues(t, "another-alice", extClaims["sub"]) - }) - t.Run("config_claims_contain_reserved_claims", func(t *testing.T) { - c.MustSet(ctx, config.KeyAllowedTopLevelClaims, []string{"iss", "sub"}) - extra := map[string]interface{}{"iss": "hydra.remote", "sub": "another-alice"} - - session := createSessionWithCustomClaims(extra, c.AllowedTopLevelClaims(ctx)) - - claims := session.GetJWTClaims().ToMapClaims() - - assert.EqualValues(t, "alice", claims["sub"]) - assert.NotEqual(t, "another-alice", claims["sub"]) - - require.Contains(t, claims, "iss") - assert.EqualValues(t, "hydra.localhost", claims["iss"]) - assert.NotEqualValues(t, "hydra.remote", claims["iss"]) - - require.Contains(t, claims, "ext") - extClaims, ok := claims["ext"].(map[string]interface{}) - require.True(t, ok) - - require.Contains(t, extClaims, "sub") - assert.EqualValues(t, "another-alice", extClaims["sub"]) - - require.Contains(t, extClaims, "iss") - assert.EqualValues(t, "hydra.remote", extClaims["iss"]) - }) } diff --git a/oauth2/session_test.go b/oauth2/session_test.go deleted file mode 100644 index 146f018db7b..00000000000 --- a/oauth2/session_test.go +++ /dev/null @@ -1,94 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package oauth2 - -import ( - "encoding/json" - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/ory/fosite" - "github.com/ory/fosite/handler/openid" - "github.com/ory/fosite/token/jwt" - "github.com/ory/x/assertx" - "github.com/ory/x/snapshotx" - - _ "embed" -) - -//go:embed fixtures/v1.11.8-session.json -var v1118Session []byte - -//go:embed fixtures/v1.11.9-session.json -var v1119Session []byte - -func parseTime(t *testing.T, ts string) time.Time { - out, err := time.Parse(time.RFC3339Nano, ts) - require.NoError(t, err) - return out -} - -func TestUnmarshalSession(t *testing.T) { - expect := &Session{ - DefaultSession: &openid.DefaultSession{ - Claims: &jwt.IDTokenClaims{ - JTI: "", - Issuer: "http://127.0.0.1:4444/", - Subject: "foo@bar.com", - Audience: []string{"auth-code-client"}, - Nonce: "mbxojlzlkefzmlecvrzfkmpm", - ExpiresAt: parseTime(t, "0001-01-01T00:00:00Z"), - IssuedAt: parseTime(t, "2022-08-25T09:21:04Z"), - RequestedAt: parseTime(t, "2022-08-25T09:20:54Z"), - AuthTime: parseTime(t, "2022-08-25T09:21:01Z"), - AccessTokenHash: "", - AuthenticationContextClassReference: "0", - AuthenticationMethodsReferences: []string{}, - CodeHash: "", - Extra: map[string]interface{}{ - "sid": "177e1f44-a1e9-415c-bfa3-8b62280b182d", - }, - }, - Headers: &jwt.Headers{Extra: map[string]interface{}{ - "kid": "public:hydra.openid.id-token", - }}, - ExpiresAt: map[fosite.TokenType]time.Time{ - fosite.AccessToken: parseTime(t, "2022-08-25T09:26:05Z"), - fosite.AuthorizeCode: parseTime(t, "2022-08-25T09:23:04.432089764Z"), - fosite.RefreshToken: parseTime(t, "2022-08-26T09:21:05Z"), - }, - Username: "", - Subject: "foo@bar.com", - }, - Extra: map[string]interface{}{}, - KID: "public:hydra.jwt.access-token", - ClientID: "auth-code-client", - ConsentChallenge: "2261efbd447044a1b2f76b05c6aca164", - ExcludeNotBeforeClaim: false, - AllowedTopLevelClaims: []string{ - "persona_id", - "persona_krn", - "grantType", - "market", - "zone", - "login_session_id", - }, - } - - t.Run("v1.11.8", func(t *testing.T) { - var actual Session - require.NoError(t, json.Unmarshal(v1118Session, &actual)) - assertx.EqualAsJSON(t, expect, &actual) - snapshotx.SnapshotTExcept(t, &actual, nil) - }) - - t.Run("v1.11.9", func(t *testing.T) { - var actual Session - require.NoError(t, json.Unmarshal(v1119Session, &actual)) - assertx.EqualAsJSON(t, expect, &actual) - snapshotx.SnapshotTExcept(t, &actual, nil) - }) -} diff --git a/oauth2/token_hook.go b/oauth2/token_hook.go new file mode 100644 index 00000000000..3c3a12182c3 --- /dev/null +++ b/oauth2/token_hook.go @@ -0,0 +1,209 @@ +// Copyright © 2022 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "bytes" + "context" + "encoding/json" + "io" + "net/http" + "time" + + "github.com/pkg/errors" + + "github.com/hashicorp/go-retryablehttp" + + "github.com/ory/hydra/v2/flow" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/reqlog" + + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" +) + +// AccessRequestHook is called when an access token request is performed. +type AccessRequestHook func(ctx context.Context, requester fosite.AccessRequester) error + +// Request is a token endpoint's request context. +// +// swagger:ignore +type Request struct { + // ClientID is the identifier of the OAuth 2.0 client. + ClientID string `json:"client_id"` + // RequestedScopes is the list of scopes requested to the OAuth 2.0 client. + RequestedScopes []string `json:"requested_scopes"` + // GrantedScopes is the list of scopes granted to the OAuth 2.0 client. + GrantedScopes []string `json:"granted_scopes"` + // GrantedAudience is the list of audiences granted to the OAuth 2.0 client. + GrantedAudience []string `json:"granted_audience"` + // GrantTypes is the requests grant types. + GrantTypes []string `json:"grant_types"` + // Payload is the requests payload. + Payload map[string][]string `json:"payload"` +} + +// TokenHookRequest is the request body sent to the token hook. +// +// swagger:ignore +type TokenHookRequest struct { + // Session is the request's session.. + Session *Session `json:"session"` + // Requester is a token endpoint's request context. + Request Request `json:"request"` +} + +// TokenHookResponse is the response body received from the token hook. +// +// swagger:ignore +type TokenHookResponse struct { + // Session is the session data returned by the hook. + Session flow.AcceptOAuth2ConsentRequestSession `json:"session"` +} + +type APIKeyAuthConfig struct { + In string `json:"in"` + Name string `json:"name"` + Value string `json:"value"` +} + +func applyAuth(req *retryablehttp.Request, auth *config.Auth) error { + if auth == nil { + return nil + } + + switch auth.Type { + case "api_key": + switch auth.Config.In { + case "header": + req.Header.Set(auth.Config.Name, auth.Config.Value) + case "cookie": + req.AddCookie(&http.Cookie{Name: auth.Config.Name, Value: auth.Config.Value}) + } + default: + return errors.Errorf("unsupported auth type %q", auth.Type) + } + return nil +} + +func executeHookAndUpdateSession(ctx context.Context, reg x.HTTPClientProvider, hookConfig *config.HookConfig, reqBodyBytes []byte, session *Session) error { + req, err := retryablehttp.NewRequestWithContext(ctx, http.MethodPost, hookConfig.URL, bytes.NewReader(reqBodyBytes)) + if err != nil { + return errors.WithStack( + fosite.ErrServerError. + WithWrap(err). + WithDescription("An error occurred while preparing the token hook."). + WithDebugf("Unable to prepare the HTTP Request: %s", err), + ) + } + if err := applyAuth(req, hookConfig.Auth); err != nil { + return errors.WithStack( + fosite.ErrServerError. + WithWrap(err). + WithDescription("An error occurred while applying the token hook authentication."). + WithDebugf("Unable to apply the token hook authentication: %s", err)) + } + req.Header.Set("Content-Type", "application/json; charset=UTF-8") + + t0 := time.Now() + resp, err := reg.HTTPClient(ctx).Do(req) + if err != nil { + return errors.WithStack( + fosite.ErrServerError. + WithWrap(err). + WithDescription("An error occurred while executing the token hook."). + WithDebugf("Unable to execute HTTP Request: %s", err), + ) + } + defer resp.Body.Close() //nolint:errcheck + resp.Body = io.NopCloser(io.LimitReader(resp.Body, 5<<20 /* 5 MiB */)) + + switch resp.StatusCode { + case http.StatusOK: + // Token permitted with new session data + case http.StatusNoContent: + // Token is permitted without overriding session data + return nil + case http.StatusForbidden: + return errors.WithStack( + fosite.ErrAccessDenied. + WithDescription("The token hook target responded with an error."). + WithDebugf("Token hook responded with HTTP status code: %s", resp.Status), + ) + default: + return errors.WithStack( + fosite.ErrServerError. + WithDescription("The token hook target responded with an error."). + WithDebugf("Token hook responded with HTTP status code: %s", resp.Status), + ) + } + + var respBody TokenHookResponse + if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil { + return errors.WithStack( + fosite.ErrServerError. + WithWrap(err). + WithDescription("The token hook target responded with an error."). + WithDebugf("Response from token hook could not be decoded: %s", err), + ) + } + + reqlog.AccumulateExternalLatency(ctx, time.Since(t0)) // body read + + // Overwrite existing session data (extra claims). + session.Extra = respBody.Session.AccessToken + idTokenClaims := session.IDTokenClaims() + idTokenClaims.Extra = respBody.Session.IDToken + return nil +} + +// TokenHook is an AccessRequestHook called for all grant types. +func TokenHook(reg interface { + config.Provider + x.HTTPClientProvider +}, +) AccessRequestHook { + return func(ctx context.Context, requester fosite.AccessRequester) error { + hookConfig := reg.Config().TokenHookConfig(ctx) + if hookConfig == nil { + return nil + } + + session, ok := requester.GetSession().(*Session) + if !ok { + return nil + } + + request := Request{ + ClientID: requester.GetClient().GetID(), + RequestedScopes: requester.GetRequestedScopes(), + GrantedScopes: requester.GetGrantedScopes(), + GrantedAudience: requester.GetGrantedAudience(), + GrantTypes: requester.GetGrantTypes(), + Payload: requester.Sanitize([]string{"assertion"}).GetRequestForm(), + } + + reqBody := TokenHookRequest{ + Session: session, + Request: request, + } + + reqBodyBytes, err := json.Marshal(&reqBody) + if err != nil { + return errors.WithStack( + fosite.ErrServerError. + WithWrap(err). + WithDescription("An error occurred while encoding the token hook."). + WithDebugf("Unable to encode the token hook body: %s", err), + ) + } + + err = executeHookAndUpdateSession(ctx, reg, hookConfig, reqBodyBytes, session) + if err != nil { + return err + } + + return nil + } +} diff --git a/oauth2/trust/doc.go b/oauth2/trust/doc.go index c30e9521ac0..16de4977dd3 100644 --- a/oauth2/trust/doc.go +++ b/oauth2/trust/doc.go @@ -14,11 +14,15 @@ import ( // OAuth2 JWT Bearer Grant Type Issuer Trust Relationships // // swagger:model trustedOAuth2JwtGrantIssuers +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type trustedOAuth2JwtGrantIssuers []trustedOAuth2JwtGrantIssuer // OAuth2 JWT Bearer Grant Type Issuer Trust Relationship // // swagger:model trustedOAuth2JwtGrantIssuer +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type trustedOAuth2JwtGrantIssuer struct { // example: 9edc811f-4e28-453c-9b46-4de65f00217f ID string `json:"id"` @@ -51,6 +55,8 @@ type trustedOAuth2JwtGrantIssuer struct { // OAuth2 JWT Bearer Grant Type Issuer Trusted JSON Web Key // // swagger:model trustedOAuth2JwtGrantJsonWebKey +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type trustedOAuth2JwtGrantJsonWebKey struct { // The "set" is basically a name for a group(set) of keys. Will be the same as "issuer" in grant. // example: https://jwt-idp.example.com diff --git a/oauth2/trust/error.go b/oauth2/trust/error.go index eee9d0e5fb1..c0607e319db 100644 --- a/oauth2/trust/error.go +++ b/oauth2/trust/error.go @@ -6,7 +6,7 @@ package trust import ( "net/http" - "github.com/ory/fosite" + "github.com/ory/hydra/v2/fosite" ) var ErrMissingRequiredParameter = &fosite.RFC6749Error{ diff --git a/oauth2/trust/grant.go b/oauth2/trust/grant.go index 9d5986415ab..62eafbc4685 100644 --- a/oauth2/trust/grant.go +++ b/oauth2/trust/grant.go @@ -5,10 +5,12 @@ package trust import ( "time" + + "github.com/gofrs/uuid" ) type Grant struct { - ID string `json:"id"` + ID uuid.UUID `json:"id"` // Issuer identifies the principal that issued the JWT assertion (same as iss claim in jwt). Issuer string `json:"issuer"` diff --git a/oauth2/trust/handler.go b/oauth2/trust/handler.go index 7b236eaef13..c675a0e5541 100644 --- a/oauth2/trust/handler.go +++ b/oauth2/trust/handler.go @@ -6,19 +6,18 @@ package trust import ( "encoding/json" "net/http" + "net/url" "time" - "github.com/ory/x/pagination/tokenpagination" - - "github.com/ory/hydra/x" + "github.com/gofrs/uuid" + "github.com/pkg/errors" + "github.com/ory/herodot" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/x" "github.com/ory/x/httprouterx" - - "github.com/google/uuid" - - "github.com/julienschmidt/httprouter" - - "github.com/ory/x/errorsx" + keysetpagination "github.com/ory/x/pagination/keysetpagination_v2" + "github.com/ory/x/urlx" ) const ( @@ -34,15 +33,17 @@ func NewHandler(r InternalRegistry) *Handler { } func (h *Handler) SetRoutes(admin *httprouterx.RouterAdmin) { - admin.GET(grantJWTBearerPath+"/:id", h.getTrustedOAuth2JwtGrantIssuer) + admin.GET(grantJWTBearerPath+"/{id}", h.getTrustedOAuth2JwtGrantIssuer) admin.GET(grantJWTBearerPath, h.adminListTrustedOAuth2JwtGrantIssuers) admin.POST(grantJWTBearerPath, h.trustOAuth2JwtGrantIssuer) - admin.DELETE(grantJWTBearerPath+"/:id", h.deleteTrustedOAuth2JwtGrantIssuer) + admin.DELETE(grantJWTBearerPath+"/{id}", h.deleteTrustedOAuth2JwtGrantIssuer) } // Trust OAuth2 JWT Bearer Grant Type Issuer Request Body // // swagger:model trustOAuth2JwtGrantIssuer +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type trustOAuth2JwtGrantIssuerBody struct { // The "issuer" identifies the principal that issued the JWT assertion (same as "iss" claim in JWT). // @@ -78,6 +79,8 @@ type trustOAuth2JwtGrantIssuerBody struct { // Trust OAuth2 JWT Bearer Grant Type Issuer Request // // swagger:parameters trustOAuth2JwtGrantIssuer +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type trustOAuth2JwtGrantIssuer struct { // in: body Body trustOAuth2JwtGrantIssuerBody @@ -102,21 +105,26 @@ type trustOAuth2JwtGrantIssuer struct { // Responses: // 201: trustedOAuth2JwtGrantIssuer // default: genericError -func (h *Handler) trustOAuth2JwtGrantIssuer(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { +func (h *Handler) trustOAuth2JwtGrantIssuer(w http.ResponseWriter, r *http.Request) { var grantRequest createGrantRequest if err := json.NewDecoder(r.Body).Decode(&grantRequest); err != nil { - h.registry.Writer().WriteError(w, r, errorsx.WithStack(err)) + h.registry.Writer().WriteError(w, r, + errors.WithStack(&fosite.RFC6749Error{ + ErrorField: "error", + DescriptionField: err.Error(), + CodeField: http.StatusBadRequest, + })) return } - if err := h.registry.GrantValidator().Validate(grantRequest); err != nil { + if err := validateGrant(grantRequest); err != nil { h.registry.Writer().WriteError(w, r, err) return } grant := Grant{ - ID: uuid.New().String(), + ID: uuid.Must(uuid.NewV4()), Issuer: grantRequest.Issuer, Subject: grantRequest.Subject, AllowAnySubject: grantRequest.AllowAnySubject, @@ -134,12 +142,14 @@ func (h *Handler) trustOAuth2JwtGrantIssuer(w http.ResponseWriter, r *http.Reque return } - h.registry.Writer().WriteCreated(w, r, grantJWTBearerPath+"/"+grant.ID, &grant) + h.registry.Writer().WriteCreated(w, r, urlx.MustJoin(grantJWTBearerPath, url.PathEscape(grant.ID.String())), &grant) } // Get Trusted OAuth2 JWT Bearer Grant Type Issuer Request // // swagger:parameters getTrustedOAuth2JwtGrantIssuer +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type getTrustedOAuth2JwtGrantIssuer struct { // The id of the desired grant // @@ -166,8 +176,14 @@ type getTrustedOAuth2JwtGrantIssuer struct { // Responses: // 200: trustedOAuth2JwtGrantIssuer // default: genericError -func (h *Handler) getTrustedOAuth2JwtGrantIssuer(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - var id = ps.ByName("id") +func (h *Handler) getTrustedOAuth2JwtGrantIssuer(w http.ResponseWriter, r *http.Request) { + rawID := r.PathValue("id") + id, err := uuid.FromString(rawID) + if err != nil { + h.registry.Writer().WriteError(w, r, + errors.WithStack(herodot.ErrBadRequest.WithReasonf("Unable to parse parameter id: %v", err))) + return + } grant, err := h.registry.GrantManager().GetConcreteGrant(r.Context(), id) if err != nil { @@ -181,6 +197,8 @@ func (h *Handler) getTrustedOAuth2JwtGrantIssuer(w http.ResponseWriter, r *http. // Delete Trusted OAuth2 JWT Bearer Grant Type Issuer Request // // swagger:parameters deleteTrustedOAuth2JwtGrantIssuer +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type deleteTrustedOAuth2JwtGrantIssuer struct { // The id of the desired grant // in: path @@ -209,8 +227,14 @@ type deleteTrustedOAuth2JwtGrantIssuer struct { // Responses: // 204: emptyResponse // default: genericError -func (h *Handler) deleteTrustedOAuth2JwtGrantIssuer(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - var id = ps.ByName("id") +func (h *Handler) deleteTrustedOAuth2JwtGrantIssuer(w http.ResponseWriter, r *http.Request) { + rawID := r.PathValue("id") + id, err := uuid.FromString(rawID) + if err != nil { + h.registry.Writer().WriteError(w, r, + errors.WithStack(herodot.ErrBadRequest.WithReasonf("Unable to parse parameter id: %v", err))) + return + } if err := h.registry.GrantManager().DeleteGrant(r.Context(), id); err != nil { h.registry.Writer().WriteError(w, r, err) @@ -223,6 +247,8 @@ func (h *Handler) deleteTrustedOAuth2JwtGrantIssuer(w http.ResponseWriter, r *ht // List Trusted OAuth2 JWT Bearer Grant Type Issuers Request // // swagger:parameters listTrustedOAuth2JwtGrantIssuers +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type listTrustedOAuth2JwtGrantIssuers struct { // If optional "issuer" is supplied, only jwt-bearer grants with this issuer will be returned. // @@ -230,7 +256,7 @@ type listTrustedOAuth2JwtGrantIssuers struct { // required: false Issuer string `json:"issuer"` - tokenpagination.TokenPaginator + keysetpagination.RequestParameters } // swagger:route GET /admin/trust/grants/jwt-bearer/issuers oAuth2 listTrustedOAuth2JwtGrantIssuers @@ -250,26 +276,26 @@ type listTrustedOAuth2JwtGrantIssuers struct { // Responses: // 200: trustedOAuth2JwtGrantIssuers // default: genericError -func (h *Handler) adminListTrustedOAuth2JwtGrantIssuers(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - page, itemsPerPage := x.ParsePagination(r) +func (h *Handler) adminListTrustedOAuth2JwtGrantIssuers(w http.ResponseWriter, r *http.Request) { optionalIssuer := r.URL.Query().Get("issuer") - grants, err := h.registry.GrantManager().GetGrants(r.Context(), itemsPerPage, page*itemsPerPage, optionalIssuer) + pageKeys := h.registry.Config().GetPaginationEncryptionKeys(r.Context()) + pageOpts, err := keysetpagination.ParseQueryParams(pageKeys, r.URL.Query()) if err != nil { - h.registry.Writer().WriteError(w, r, err) + h.registry.Writer().WriteError(w, r, + errors.WithStack(herodot.ErrBadRequest.WithReasonf("Unable to parse pagination parameters: %v", err))) return } - n, err := h.registry.GrantManager().CountGrants(r.Context()) + grants, nextPage, err := h.registry.GrantManager().GetGrants(r.Context(), optionalIssuer, pageOpts...) if err != nil { h.registry.Writer().WriteError(w, r, err) return } - - x.PaginationHeader(w, r.URL, int64(n), page, itemsPerPage) if grants == nil { grants = []Grant{} } + keysetpagination.SetLinkHeader(w, pageKeys, r.URL, nextPage) h.registry.Writer().Write(w, r, grants) } diff --git a/oauth2/trust/handler_test.go b/oauth2/trust/handler_test.go index 929b9840b57..fb6db9f4af1 100644 --- a/oauth2/trust/handler_test.go +++ b/oauth2/trust/handler_test.go @@ -9,30 +9,28 @@ import ( "crypto/rand" "crypto/rsa" "encoding/json" + "io" "net/http" "net/http/httptest" "testing" "time" - "gopkg.in/square/go-jose.v2" - - "github.com/ory/x/pointerx" - + "github.com/go-jose/go-jose/v3" + "github.com/gofrs/uuid" "github.com/stretchr/testify/assert" - - "github.com/ory/hydra/oauth2/trust" - "github.com/ory/x/contextx" - - "github.com/google/uuid" "github.com/stretchr/testify/suite" - - "github.com/ory/hydra/driver" - "github.com/ory/hydra/jwk" + "github.com/tidwall/gjson" hydra "github.com/ory/hydra-client-go/v2" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/oauth2/trust" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/configx" + "github.com/ory/x/pointerx" + "github.com/ory/x/prometheusx" ) // Define the suite, and absorb the built-in basic suite @@ -40,26 +38,25 @@ import ( // returns the current testing context. type HandlerTestSuite struct { suite.Suite - registry driver.Registry + registry *driver.RegistrySQL server *httptest.Server hydraClient *hydra.APIClient publicKey *rsa.PublicKey } // Setup will run before the tests in the suite are run. -func (s *HandlerTestSuite) SetupSuite() { - conf := internal.NewConfigurationWithDefaults() - conf.MustSet(context.Background(), config.KeySubjectTypesSupported, []string{"public"}) - conf.MustSet(context.Background(), config.KeyDefaultClientScope, []string{"foo", "bar"}) - s.registry = internal.NewRegistryMemory(s.T(), conf, &contextx.Default{}) +func (s *HandlerTestSuite) SetupTest() { + s.registry = testhelpers.NewRegistryMemory(s.T(), driver.WithConfigOptions(configx.WithValues(map[string]any{ + config.KeySubjectTypesSupported: []string{"public"}, + config.KeyDefaultClientScope: []string{"foo", "bar"}, + }))) - router := x.NewRouterAdmin(conf.AdminURL) + metrics := prometheusx.NewMetricsManagerWithPrefix("hydra", prometheusx.HTTPMetrics, config.Version, config.Commit, config.Date) + router := x.NewRouterAdmin(metrics) handler := trust.NewHandler(s.registry) handler.SetRoutes(router) jwkHandler := jwk.NewHandler(s.registry) - jwkHandler.SetRoutes(router, x.NewRouterPublic(), func(h http.Handler) http.Handler { - return h - }) + jwkHandler.SetAdminRoutes(router) s.server = httptest.NewServer(router) c := hydra.NewAPIClient(hydra.NewConfiguration()) @@ -69,17 +66,13 @@ func (s *HandlerTestSuite) SetupSuite() { } // Setup before each test. -func (s *HandlerTestSuite) SetupTest() { -} +func (s *HandlerTestSuite) SetupSuite() {} // Will run after all the tests in the suite have been run. -func (s *HandlerTestSuite) TearDownSuite() { -} +func (s *HandlerTestSuite) TearDownSuite() {} // Will run after each test in the suite. -func (s *HandlerTestSuite) TearDownTest() { - internal.CleanAndMigrate(s.registry)(s.T()) -} +func (s *HandlerTestSuite) TearDownTest() {} // In order for 'go test' to run this suite, we need to create // a normal test function and pass our suite to suite.Run. @@ -98,7 +91,7 @@ func (s *HandlerTestSuite) TestGrantCanBeCreatedAndFetched() { model := createRequestParams ctx := context.Background() - createResult, _, err := s.hydraClient.OAuth2Api.TrustOAuth2JwtGrantIssuer(ctx).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() + createResult, _, err := s.hydraClient.OAuth2API.TrustOAuth2JwtGrantIssuer(ctx).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() s.Require().NoError(err, "no errors expected on grant creation") s.NotEmpty(createResult.Id, " grant id expected to be non-empty") s.Equal(model.Issuer, *createResult.Issuer, "issuer must match") @@ -108,7 +101,7 @@ func (s *HandlerTestSuite) TestGrantCanBeCreatedAndFetched() { s.Equal(model.Jwk.Kid, *createResult.PublicKey.Kid, "public key id must match") s.Equal(model.ExpiresAt.Round(time.Second).UTC().String(), createResult.ExpiresAt.Round(time.Second).UTC().String(), "expiration date must match") - getResult, _, err := s.hydraClient.OAuth2Api.GetTrustedOAuth2JwtGrantIssuer(ctx, *createResult.Id).Execute() + getResult, _, err := s.hydraClient.OAuth2API.GetTrustedOAuth2JwtGrantIssuer(ctx, *createResult.Id).Execute() s.Require().NoError(err, "no errors expected on grant fetching") s.Equal(*createResult.Id, *getResult.Id, " grant id must match") s.Equal(model.Issuer, *getResult.Issuer, "issuer must match") @@ -129,15 +122,14 @@ func (s *HandlerTestSuite) TestGrantCanNotBeCreatedWithSameIssuerSubjectKey() { ) ctx := context.Background() - _, _, err := s.hydraClient.OAuth2Api.TrustOAuth2JwtGrantIssuer(ctx).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() + _, _, err := s.hydraClient.OAuth2API.TrustOAuth2JwtGrantIssuer(ctx).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() s.Require().NoError(err, "no errors expected on grant creation") - _, _, err = s.hydraClient.OAuth2Api.TrustOAuth2JwtGrantIssuer(ctx).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() + _, _, err = s.hydraClient.OAuth2API.TrustOAuth2JwtGrantIssuer(ctx).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() s.Require().Error(err, "expected error, because grant with same issuer+subject+kid exists") - kid := uuid.New().String() - createRequestParams.Jwk.Kid = kid - _, _, err = s.hydraClient.OAuth2Api.TrustOAuth2JwtGrantIssuer(ctx).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() + createRequestParams.Jwk.Kid = uuid.Must(uuid.NewV4()).String() + _, _, err = s.hydraClient.OAuth2API.TrustOAuth2JwtGrantIssuer(ctx).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() s.NoError(err, "no errors expected on grant creation, because kid is now different") } @@ -150,10 +142,28 @@ func (s *HandlerTestSuite) TestGrantCanNotBeCreatedWithSubjectAndAnySubject() { time.Now().Add(time.Hour), ) - _, _, err := s.hydraClient.OAuth2Api.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() + _, _, err := s.hydraClient.OAuth2API.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() s.Require().Error(err, "expected error, because a grant with a subject and allow_any_subject cannot be created") } +func (s *HandlerTestSuite) TestGrantCanNotBeCreatedWithUnknownJWK() { + createRequestParams := hydra.TrustOAuth2JwtGrantIssuer{ + AllowAnySubject: pointerx.Ptr(true), + ExpiresAt: time.Now().Add(1 * time.Hour), + Issuer: "ory", + Jwk: hydra.JsonWebKey{ + Alg: "unknown", + }, + Scope: []string{"openid", "offline", "profile"}, + } + + _, res, err := s.hydraClient.OAuth2API.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() + s.Require().Error(err, "expected error, because the key type was unknown") + s.Assert().Equal(http.StatusBadRequest, res.StatusCode) + body, _ := io.ReadAll(res.Body) + s.Contains(gjson.GetBytes(body, "error_description").String(), "unknown json web key type") +} + func (s *HandlerTestSuite) TestGrantCanNotBeCreatedWithMissingFields() { createRequestParams := s.newCreateJwtBearerGrantParams( "", @@ -163,7 +173,7 @@ func (s *HandlerTestSuite) TestGrantCanNotBeCreatedWithMissingFields() { time.Now().Add(time.Hour), ) - _, _, err := s.hydraClient.OAuth2Api.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() + _, _, err := s.hydraClient.OAuth2API.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() s.Require().Error(err, "expected error, because grant missing issuer") createRequestParams = s.newCreateJwtBearerGrantParams( @@ -174,7 +184,7 @@ func (s *HandlerTestSuite) TestGrantCanNotBeCreatedWithMissingFields() { time.Now().Add(time.Hour), ) - _, _, err = s.hydraClient.OAuth2Api.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() + _, _, err = s.hydraClient.OAuth2API.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() s.Require().Error(err, "expected error, because grant missing subject") createRequestParams = s.newCreateJwtBearerGrantParams( @@ -185,7 +195,7 @@ func (s *HandlerTestSuite) TestGrantCanNotBeCreatedWithMissingFields() { time.Time{}, ) - _, _, err = s.hydraClient.OAuth2Api.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() + _, _, err = s.hydraClient.OAuth2API.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() s.Error(err, "expected error, because grant missing expiration date") } @@ -198,10 +208,10 @@ func (s *HandlerTestSuite) TestGrantPublicCanBeFetched() { time.Now().Add(time.Hour), ) - _, _, err := s.hydraClient.OAuth2Api.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() + _, _, err := s.hydraClient.OAuth2API.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() s.Require().NoError(err, "no error expected on grant creation") - getResult, _, err := s.hydraClient.JwkApi.GetJsonWebKey(context.Background(), createRequestParams.Issuer, createRequestParams.Jwk.Kid).Execute() + getResult, _, err := s.hydraClient.JwkAPI.GetJsonWebKey(context.Background(), createRequestParams.Issuer, createRequestParams.Jwk.Kid).Execute() s.Require().NoError(err, "no error expected on fetching public key") s.Equal(createRequestParams.Jwk.Kid, getResult.Keys[0].Kid) @@ -216,7 +226,7 @@ func (s *HandlerTestSuite) TestGrantWithAnySubjectCanBeCreated() { time.Now().Add(time.Hour), ) - grant, _, err := s.hydraClient.OAuth2Api.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() + grant, _, err := s.hydraClient.OAuth2API.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() s.Require().NoError(err, "no error expected on grant creation") assert.Empty(s.T(), grant.Subject) @@ -239,20 +249,21 @@ func (s *HandlerTestSuite) TestGrantListCanBeFetched() { time.Now().Add(time.Hour), ) - _, _, err := s.hydraClient.OAuth2Api.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() + _, _, err := s.hydraClient.OAuth2API.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() s.Require().NoError(err, "no errors expected on grant creation") - _, _, err = s.hydraClient.OAuth2Api.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(createRequestParams2).Execute() + _, _, err = s.hydraClient.OAuth2API.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(createRequestParams2).Execute() s.Require().NoError(err, "no errors expected on grant creation") - getResult, _, err := s.hydraClient.OAuth2Api.ListTrustedOAuth2JwtGrantIssuers(context.Background()).Execute() + getResult, _, err := s.hydraClient.OAuth2API.ListTrustedOAuth2JwtGrantIssuers(context.Background()).Execute() s.Require().NoError(err, "no errors expected on grant list fetching") - s.Len(getResult, 2, "expected to get list of 2 grants") + s.Require().Len(getResult, 2, "expected to get list of 2 grants") + s.ElementsMatch([]string{createRequestParams.Issuer, createRequestParams2.Issuer}, []string{*getResult[0].Issuer, *getResult[1].Issuer}) - getResult, _, err = s.hydraClient.OAuth2Api.ListTrustedOAuth2JwtGrantIssuers(context.Background()).Issuer(createRequestParams2.Issuer).Execute() + getResult, _, err = s.hydraClient.OAuth2API.ListTrustedOAuth2JwtGrantIssuers(context.Background()).Issuer(createRequestParams2.Issuer).Execute() s.Require().NoError(err, "no errors expected on grant list fetching") - s.Len(getResult, 1, "expected to get list of 1 grant, when filtering by issuer") + s.Require().Len(getResult, 1, "expected to get list of 1 grant, when filtering by issuer") s.Equal(createRequestParams2.Issuer, *getResult[0].Issuer, "issuer must match") } @@ -265,13 +276,13 @@ func (s *HandlerTestSuite) TestGrantCanBeDeleted() { time.Now().Add(time.Hour), ) - createResult, _, err := s.hydraClient.OAuth2Api.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() + createResult, _, err := s.hydraClient.OAuth2API.TrustOAuth2JwtGrantIssuer(context.Background()).TrustOAuth2JwtGrantIssuer(createRequestParams).Execute() s.Require().NoError(err, "no errors expected on grant creation") - _, err = s.hydraClient.OAuth2Api.DeleteTrustedOAuth2JwtGrantIssuer(context.Background(), *createResult.Id).Execute() + _, err = s.hydraClient.OAuth2API.DeleteTrustedOAuth2JwtGrantIssuer(context.Background(), *createResult.Id).Execute() s.Require().NoError(err, "no errors expected on grant deletion") - _, err = s.hydraClient.OAuth2Api.DeleteTrustedOAuth2JwtGrantIssuer(context.Background(), *createResult.Id).Execute() + _, err = s.hydraClient.OAuth2API.DeleteTrustedOAuth2JwtGrantIssuer(context.Background(), *createResult.Id).Execute() s.Error(err, "expected error, because grant has been already deleted") } @@ -279,7 +290,7 @@ func (s *HandlerTestSuite) generateJWK(publicKey *rsa.PublicKey) hydra.JsonWebKe var b bytes.Buffer s.Require().NoError(json.NewEncoder(&b).Encode(&jose.JSONWebKey{ Key: publicKey, - KeyID: uuid.New().String(), + KeyID: uuid.Must(uuid.NewV4()).String(), Algorithm: string(jose.RS256), Use: "sig", })) @@ -297,8 +308,8 @@ func (s *HandlerTestSuite) newCreateJwtBearerGrantParams( Issuer: issuer, Jwk: s.generateJWK(s.publicKey), Scope: scope, - Subject: pointerx.String(subject), - AllowAnySubject: pointerx.Bool(allowAnySubject), + Subject: pointerx.Ptr(subject), + AllowAnySubject: pointerx.Ptr(allowAnySubject), } } diff --git a/oauth2/trust/manager.go b/oauth2/trust/manager.go index 3e822b60d3c..f48aba3ddc7 100644 --- a/oauth2/trust/manager.go +++ b/oauth2/trust/manager.go @@ -7,32 +7,16 @@ import ( "context" "time" + "github.com/go-jose/go-jose/v3" "github.com/gofrs/uuid" - "gopkg.in/square/go-jose.v2" + + keysetpagination "github.com/ory/x/pagination/keysetpagination_v2" ) type GrantManager interface { CreateGrant(ctx context.Context, g Grant, publicKey jose.JSONWebKey) error - GetConcreteGrant(ctx context.Context, id string) (Grant, error) - DeleteGrant(ctx context.Context, id string) error - GetGrants(ctx context.Context, limit, offset int, optionalIssuer string) ([]Grant, error) - CountGrants(ctx context.Context) (int, error) + GetConcreteGrant(ctx context.Context, id uuid.UUID) (Grant, error) + DeleteGrant(ctx context.Context, id uuid.UUID) error + GetGrants(ctx context.Context, optionalIssuer string, pageOpts ...keysetpagination.Option) ([]Grant, *keysetpagination.Paginator, error) FlushInactiveGrants(ctx context.Context, notAfter time.Time, limit int, batchSize int) error } - -type SQLData struct { - ID string `db:"id"` - NID uuid.UUID `db:"nid"` - Issuer string `db:"issuer"` - Subject string `db:"subject"` - AllowAnySubject bool `db:"allow_any_subject"` - Scope string `db:"scope"` - KeySet string `db:"key_set"` - KeyID string `db:"key_id"` - CreatedAt time.Time `db:"created_at"` - ExpiresAt time.Time `db:"expires_at"` -} - -func (SQLData) TableName() string { - return "hydra_oauth2_trusted_jwt_bearer_issuer" -} diff --git a/oauth2/trust/manager_test_helpers.go b/oauth2/trust/manager_test_helpers.go index e51407b905b..4ba6169e4c3 100644 --- a/oauth2/trust/manager_test_helpers.go +++ b/oauth2/trust/manager_test_helpers.go @@ -4,58 +4,51 @@ package trust import ( - "context" "sort" "testing" "time" - "github.com/ory/x/josex" - - "github.com/google/uuid" + "github.com/go-jose/go-jose/v3" + "github.com/gofrs/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "gopkg.in/square/go-jose.v2" - "github.com/ory/hydra/jwk" + "github.com/ory/x/josex" + "github.com/ory/x/sqlcon" + + "github.com/ory/hydra/v2/jwk" ) func TestHelperGrantManagerCreateGetDeleteGrant(t1 GrantManager, km jwk.Manager, parallel bool) func(t *testing.T) { - tokenServicePubKey1 := jose.JSONWebKey{} - tokenServicePubKey2 := jose.JSONWebKey{} - mikePubKey := jose.JSONWebKey{} - return func(t *testing.T) { if parallel { t.Parallel() } - kid1, kid2 := uuid.NewString(), uuid.NewString() - kid3 := uuid.NewString() - set := uuid.NewString() + kid1, kid2 := uuid.Must(uuid.NewV4()).String(), uuid.Must(uuid.NewV4()).String() + kid3 := uuid.Must(uuid.NewV4()).String() + set := uuid.Must(uuid.NewV4()).String() - keySet, err := km.GenerateAndPersistKeySet(context.Background(), set, kid1, string(jose.RS256), "sig") + key1, err := jwk.GenerateJWK(jose.RS256, kid1, "sig") require.NoError(t, err) - tokenServicePubKey1 = josex.ToPublicKey(&keySet.Keys[0]) + tokenServicePubKey1 := josex.ToPublicKey(&key1.Keys[0]) - keySet, err = km.GenerateAndPersistKeySet(context.Background(), set, kid2, string(jose.RS256), "sig") + key2, err := jwk.GenerateJWK(jose.RS256, kid2, "sig") require.NoError(t, err) - tokenServicePubKey2 = josex.ToPublicKey(&keySet.Keys[0]) + tokenServicePubKey2 := josex.ToPublicKey(&key2.Keys[0]) - keySet, err = km.GenerateAndPersistKeySet(context.Background(), "https://mike.example.com", kid3, string(jose.RS256), "sig") + key3, err := jwk.GenerateJWK(jose.RS256, kid3, "sig") require.NoError(t, err) - mikePubKey = josex.ToPublicKey(&keySet.Keys[0]) + mikePubKey := josex.ToPublicKey(&key3.Keys[0]) - storedGrants, err := t1.GetGrants(context.TODO(), 100, 0, "") + storedGrants, nextPage, err := t1.GetGrants(t.Context(), "") require.NoError(t, err) assert.Len(t, storedGrants, 0) - - count, err := t1.CountGrants(context.TODO()) - require.NoError(t, err) - assert.Equal(t, 0, count) + assert.True(t, nextPage.IsLast()) createdAt := time.Now().UTC().Round(time.Second) expiresAt := createdAt.AddDate(1, 0, 0) grant := Grant{ - ID: uuid.New().String(), + ID: uuid.Must(uuid.NewV4()), Issuer: set, Subject: "bob@example.com", Scope: []string{"openid", "offline"}, @@ -67,10 +60,9 @@ func TestHelperGrantManagerCreateGetDeleteGrant(t1 GrantManager, km jwk.Manager, ExpiresAt: expiresAt, } - err = t1.CreateGrant(context.TODO(), grant, tokenServicePubKey1) - require.NoError(t, err) + require.NoError(t, t1.CreateGrant(t.Context(), grant, tokenServicePubKey1)) - storedGrant, err := t1.GetConcreteGrant(context.TODO(), grant.ID) + storedGrant, err := t1.GetConcreteGrant(t.Context(), grant.ID) require.NoError(t, err) assert.Equal(t, grant.ID, storedGrant.ID) assert.Equal(t, grant.Issuer, storedGrant.Issuer) @@ -81,7 +73,7 @@ func TestHelperGrantManagerCreateGetDeleteGrant(t1 GrantManager, km jwk.Manager, assert.Equal(t, grant.ExpiresAt.Format(time.RFC3339), storedGrant.ExpiresAt.Format(time.RFC3339)) grant2 := Grant{ - ID: uuid.New().String(), + ID: uuid.Must(uuid.NewV4()), Issuer: set, Subject: "maria@example.com", Scope: []string{"openid"}, @@ -92,11 +84,10 @@ func TestHelperGrantManagerCreateGetDeleteGrant(t1 GrantManager, km jwk.Manager, CreatedAt: createdAt.Add(time.Minute * 5), ExpiresAt: createdAt.Add(-time.Minute * 5), } - err = t1.CreateGrant(context.TODO(), grant2, tokenServicePubKey2) - require.NoError(t, err) + require.NoError(t, t1.CreateGrant(t.Context(), grant2, tokenServicePubKey2)) grant3 := Grant{ - ID: uuid.New().String(), + ID: uuid.Must(uuid.NewV4()), Issuer: "https://mike.example.com", Subject: "mike@example.com", Scope: []string{"permissions", "openid", "offline"}, @@ -108,14 +99,9 @@ func TestHelperGrantManagerCreateGetDeleteGrant(t1 GrantManager, km jwk.Manager, ExpiresAt: createdAt.Add(-time.Hour * 24), } - err = t1.CreateGrant(context.TODO(), grant3, mikePubKey) - require.NoError(t, err) - - count, err = t1.CountGrants(context.TODO()) - require.NoError(t, err) - assert.Equal(t, 3, count) + require.NoError(t, t1.CreateGrant(t.Context(), grant3, mikePubKey)) - storedGrants, err = t1.GetGrants(context.TODO(), 100, 0, "") + storedGrants, nextPage, err = t1.GetGrants(t.Context(), "") sort.Slice(storedGrants, func(i, j int) bool { return storedGrants[i].CreatedAt.Before(storedGrants[j].CreatedAt) }) @@ -124,8 +110,9 @@ func TestHelperGrantManagerCreateGetDeleteGrant(t1 GrantManager, km jwk.Manager, assert.Equal(t, grant.ID, storedGrants[0].ID) assert.Equal(t, grant2.ID, storedGrants[1].ID) assert.Equal(t, grant3.ID, storedGrants[2].ID) + assert.True(t, nextPage.IsLast()) - storedGrants, err = t1.GetGrants(context.TODO(), 100, 0, set) + storedGrants, nextPage, err = t1.GetGrants(t.Context(), set) sort.Slice(storedGrants, func(i, j int) bool { return storedGrants[i].CreatedAt.Before(storedGrants[j].CreatedAt) }) @@ -133,50 +120,42 @@ func TestHelperGrantManagerCreateGetDeleteGrant(t1 GrantManager, km jwk.Manager, require.Len(t, storedGrants, 2) assert.Equal(t, grant.ID, storedGrants[0].ID) assert.Equal(t, grant2.ID, storedGrants[1].ID) + assert.True(t, nextPage.IsLast()) - err = t1.DeleteGrant(context.TODO(), grant.ID) - require.NoError(t, err) - - _, err = t1.GetConcreteGrant(context.TODO(), grant.ID) - require.Error(t, err) - - count, err = t1.CountGrants(context.TODO()) - require.NoError(t, err) - assert.Equal(t, 2, count) + require.NoError(t, t1.DeleteGrant(t.Context(), grant.ID)) - err = t1.FlushInactiveGrants(context.TODO(), grant2.ExpiresAt, 1000, 100) - require.NoError(t, err) + _, err = t1.GetConcreteGrant(t.Context(), grant.ID) + require.ErrorIs(t, err, sqlcon.ErrNoRows) - count, err = t1.CountGrants(context.TODO()) - require.NoError(t, err) - assert.Equal(t, 1, count) + require.NoError(t, t1.FlushInactiveGrants(t.Context(), grant2.ExpiresAt, 1000, 100)) - _, err = t1.GetConcreteGrant(context.TODO(), grant2.ID) + _, err = t1.GetConcreteGrant(t.Context(), grant2.ID) assert.NoError(t, err) + + _, err = t1.GetConcreteGrant(t.Context(), grant3.ID) + assert.ErrorIs(t, err, sqlcon.ErrNoRows) } } -func TestHelperGrantManagerErrors(m GrantManager, km jwk.Manager, parallel bool) func(t *testing.T) { - pubKey1 := jose.JSONWebKey{} - pubKey2 := jose.JSONWebKey{} - +func TestHelperGrantManagerErrors(m GrantManager, km jwk.Manager) func(t *testing.T) { return func(t *testing.T) { - set := uuid.NewString() - kid1, kid2 := uuid.NewString(), uuid.NewString() + set := uuid.Must(uuid.NewV4()).String() + kid1, kid2 := uuid.Must(uuid.NewV4()).String(), uuid.Must(uuid.NewV4()).String() t.Parallel() - keySet, err := km.GenerateAndPersistKeySet(context.Background(), set, kid1, string(jose.RS256), "sig") + + key1, err := jwk.GenerateJWK(jose.RS256, kid1, "sig") require.NoError(t, err) - pubKey1 = josex.ToPublicKey(&keySet.Keys[0]) + pubKey1 := josex.ToPublicKey(&key1.Keys[0]) - keySet, err = km.GenerateAndPersistKeySet(context.Background(), set, kid2, string(jose.RS256), "sig") + key2, err := jwk.GenerateJWK(jose.RS256, kid2, "sig") require.NoError(t, err) - pubKey2 = josex.ToPublicKey(&keySet.Keys[0]) + pubKey2 := josex.ToPublicKey(&key2.Keys[0]) createdAt := time.Now() expiresAt := createdAt.AddDate(1, 0, 0) grant := Grant{ - ID: uuid.New().String(), + ID: uuid.Must(uuid.NewV4()), Issuer: "issuer", Subject: "subject", Scope: []string{"openid", "offline"}, @@ -188,26 +167,24 @@ func TestHelperGrantManagerErrors(m GrantManager, km jwk.Manager, parallel bool) ExpiresAt: expiresAt, } - err = m.CreateGrant(context.TODO(), grant, pubKey1) - require.NoError(t, err) + require.NoError(t, m.CreateGrant(t.Context(), grant, pubKey1)) - grant.ID = uuid.New().String() - err = m.CreateGrant(context.TODO(), grant, pubKey1) - require.Error(t, err, "error expected, because combination of issuer + subject + key_id must be unique") + grant.ID = uuid.Must(uuid.NewV4()) + err = m.CreateGrant(t.Context(), grant, pubKey1) + require.ErrorIs(t, err, sqlcon.ErrUniqueViolation, "error expected, because combination of issuer + subject + key_id must be unique") grant2 := grant grant2.PublicKey = PublicKey{ Set: set, KeyID: kid2, } - err = m.CreateGrant(context.TODO(), grant2, pubKey2) - require.NoError(t, err) + require.NoError(t, m.CreateGrant(t.Context(), grant2, pubKey2)) - nonExistingGrantID := uuid.New().String() - err = m.DeleteGrant(context.TODO(), nonExistingGrantID) + nonExistingGrantID := uuid.Must(uuid.NewV4()) + err = m.DeleteGrant(t.Context(), nonExistingGrantID) require.Error(t, err, "expect error, when deleting non-existing grant") - _, err = m.GetConcreteGrant(context.TODO(), nonExistingGrantID) + _, err = m.GetConcreteGrant(t.Context(), nonExistingGrantID) require.Error(t, err, "expect error, when fetching non-existing grant") } } diff --git a/oauth2/trust/registry.go b/oauth2/trust/registry.go index 6184f8e9072..68af06e8340 100644 --- a/oauth2/trust/registry.go +++ b/oauth2/trust/registry.go @@ -4,16 +4,19 @@ package trust import ( - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/x" ) type InternalRegistry interface { x.RegistryWriter x.RegistryLogger Registry + config.Provider + jwk.ManagerProvider } type Registry interface { GrantManager() GrantManager - GrantValidator() *GrantValidator } diff --git a/oauth2/trust/request.go b/oauth2/trust/request.go index 3ba7be2655b..853b7e00d53 100644 --- a/oauth2/trust/request.go +++ b/oauth2/trust/request.go @@ -6,7 +6,7 @@ package trust import ( "time" - "gopkg.in/square/go-jose.v2" + "github.com/go-jose/go-jose/v3" ) type createGrantRequest struct { diff --git a/oauth2/trust/validator.go b/oauth2/trust/validator.go index ad113a4df84..177529a0ee2 100644 --- a/oauth2/trust/validator.go +++ b/oauth2/trust/validator.go @@ -3,36 +3,27 @@ package trust -import ( - "github.com/ory/x/errorsx" -) +import "github.com/pkg/errors" -type GrantValidator struct { -} - -func NewGrantValidator() *GrantValidator { - return &GrantValidator{} -} - -func (v *GrantValidator) Validate(request createGrantRequest) error { +func validateGrant(request createGrantRequest) error { if request.Issuer == "" { - return errorsx.WithStack(ErrMissingRequiredParameter.WithHint("Field 'issuer' is required.")) + return errors.WithStack(ErrMissingRequiredParameter.WithHint("Field 'issuer' is required.")) } if request.Subject == "" && !request.AllowAnySubject { - return errorsx.WithStack(ErrMissingRequiredParameter.WithHint("One of 'subject' or 'allow_any_subject' field must be set.")) + return errors.WithStack(ErrMissingRequiredParameter.WithHint("One of 'subject' or 'allow_any_subject' field must be set.")) } if request.Subject != "" && request.AllowAnySubject { - return errorsx.WithStack(ErrMissingRequiredParameter.WithHint("Both 'subject' and 'allow_any_subject' fields cannot be set at the same time.")) + return errors.WithStack(ErrMissingRequiredParameter.WithHint("Both 'subject' and 'allow_any_subject' fields cannot be set at the same time.")) } if request.ExpiresAt.IsZero() { - return errorsx.WithStack(ErrMissingRequiredParameter.WithHint("Field 'expires_at' is required.")) + return errors.WithStack(ErrMissingRequiredParameter.WithHint("Field 'expires_at' is required.")) } if request.PublicKeyJWK.KeyID == "" { - return errorsx.WithStack(ErrMissingRequiredParameter.WithHint("Field 'jwk' must contain JWK with kid header.")) + return errors.WithStack(ErrMissingRequiredParameter.WithHint("Field 'jwk' must contain JWK with kid header.")) } return nil diff --git a/oauth2/trust/validator_test.go b/oauth2/trust/validator_test.go index 1d07b5def05..96ae73de2d7 100644 --- a/oauth2/trust/validator_test.go +++ b/oauth2/trust/validator_test.go @@ -7,12 +7,14 @@ import ( "testing" "time" - "gopkg.in/square/go-jose.v2" + "github.com/go-jose/go-jose/v3" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/fosite" ) func TestEmptyIssuerIsInvalid(t *testing.T) { - v := GrantValidator{} - r := createGrantRequest{ Issuer: "", Subject: "valid-subject", @@ -23,14 +25,12 @@ func TestEmptyIssuerIsInvalid(t *testing.T) { }, } - if err := v.Validate(r); err == nil { - t.Error("an empty issuer should not be valid") - } + err := &fosite.RFC6749Error{} + require.ErrorAs(t, validateGrant(r), &err) + assert.Equal(t, "Field 'issuer' is required.", err.HintField) } func TestEmptySubjectAndNoAnySubjectFlagIsInvalid(t *testing.T) { - v := GrantValidator{} - r := createGrantRequest{ Issuer: "valid-issuer", Subject: "", @@ -41,14 +41,12 @@ func TestEmptySubjectAndNoAnySubjectFlagIsInvalid(t *testing.T) { }, } - if err := v.Validate(r); err == nil { - t.Error("an empty subject should not be valid") - } + err := &fosite.RFC6749Error{} + require.ErrorAs(t, validateGrant(r), &err) + assert.Equal(t, "One of 'subject' or 'allow_any_subject' field must be set.", err.HintField) } func TestEmptySubjectWithAnySubjectFlagIsValid(t *testing.T) { - v := GrantValidator{} - r := createGrantRequest{ Issuer: "valid-issuer", Subject: "", @@ -59,14 +57,10 @@ func TestEmptySubjectWithAnySubjectFlagIsValid(t *testing.T) { }, } - if err := v.Validate(r); err != nil { - t.Error("an empty subject with the allow_any_subject flag should be valid") - } + assert.NoError(t, validateGrant(r)) } func TestNonEmptySubjectWithAnySubjectFlagIsInvalid(t *testing.T) { - v := GrantValidator{} - r := createGrantRequest{ Issuer: "valid-issuer", Subject: "some-issuer", @@ -77,14 +71,12 @@ func TestNonEmptySubjectWithAnySubjectFlagIsInvalid(t *testing.T) { }, } - if err := v.Validate(r); err == nil { - t.Error("a non empty subject with the allow_any_subject flag should not be valid") - } + err := &fosite.RFC6749Error{} + require.ErrorAs(t, validateGrant(r), &err) + assert.Equal(t, "Both 'subject' and 'allow_any_subject' fields cannot be set at the same time.", err.HintField) } func TestEmptyExpiresAtIsInvalid(t *testing.T) { - v := GrantValidator{} - r := createGrantRequest{ Issuer: "valid-issuer", Subject: "valid-subject", @@ -95,14 +87,12 @@ func TestEmptyExpiresAtIsInvalid(t *testing.T) { }, } - if err := v.Validate(r); err == nil { - t.Error("an empty expiration should not be valid") - } + err := &fosite.RFC6749Error{} + require.ErrorAs(t, validateGrant(r), &err) + assert.Equal(t, "Field 'expires_at' is required.", err.HintField) } func TestEmptyPublicKeyIdIsInvalid(t *testing.T) { - v := GrantValidator{} - r := createGrantRequest{ Issuer: "valid-issuer", Subject: "valid-subject", @@ -113,14 +103,12 @@ func TestEmptyPublicKeyIdIsInvalid(t *testing.T) { }, } - if err := v.Validate(r); err == nil { - t.Error("an empty public key id should not be valid") - } + err := &fosite.RFC6749Error{} + require.ErrorAs(t, validateGrant(r), &err) + assert.Equal(t, "Field 'jwk' must contain JWK with kid header.", err.HintField) } func TestIsValid(t *testing.T) { - v := GrantValidator{} - r := createGrantRequest{ Issuer: "valid-issuer", Subject: "valid-subject", @@ -131,7 +119,5 @@ func TestIsValid(t *testing.T) { }, } - if err := v.Validate(r); err != nil { - t.Error("A request with an issuer, a subject, an expiration and a public key should be valid") - } + assert.NoError(t, validateGrant(r)) } diff --git a/oauth2/verifiable_credentials.go b/oauth2/verifiable_credentials.go new file mode 100644 index 00000000000..e8af528f7d5 --- /dev/null +++ b/oauth2/verifiable_credentials.go @@ -0,0 +1,87 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package oauth2 + +import ( + "encoding/json" + + "github.com/golang-jwt/jwt/v5" + + "github.com/ory/hydra/v2/fosite" +) + +// Request a Verifiable Credential +// +// swagger:parameters createVerifiableCredential +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions +type createVerifiableCredentialRequest struct { + // in: body + Body CreateVerifiableCredentialRequestBody +} + +// CreateVerifiableCredentialRequestBody contains the request body to request a verifiable credential. +// +// swagger:parameters createVerifiableCredentialRequestBody +type CreateVerifiableCredentialRequestBody struct { + Format string `json:"format"` + Types []string `json:"types"` + Proof *VerifiableCredentialProof `json:"proof"` +} + +// VerifiableCredentialProof contains the proof of a verifiable credential. +// +// swagger:parameters verifiableCredentialProof +type VerifiableCredentialProof struct { + ProofType string `json:"proof_type"` + JWT string `json:"jwt"` +} + +// VerifiableCredentialResponse contains the verifiable credential. +// +// swagger:model verifiableCredentialResponse +type VerifiableCredentialResponse struct { + Format string `json:"format"` + Credential string `json:"credential_draft_00"` +} + +// VerifiableCredentialPrimingResponse contains the nonce to include in the proof-of-possession JWT. +// +// swagger:model verifiableCredentialPrimingResponse +type VerifiableCredentialPrimingResponse struct { + Format string `json:"format"` + Nonce string `json:"c_nonce"` + NonceExpiresIn int64 `json:"c_nonce_expires_in"` + + fosite.RFC6749ErrorJson +} + +type VerifableCredentialClaims struct { + jwt.RegisteredClaims + VerifiableCredential VerifiableCredentialClaim `json:"vc"` +} +type VerifiableCredentialClaim struct { + Context []string `json:"@context"` + Subject map[string]any `json:"credentialSubject"` + Type []string `json:"type"` +} + +func (v *VerifableCredentialClaims) GetAudience() (jwt.ClaimStrings, error) { + return jwt.ClaimStrings{}, nil +} + +func (v *VerifableCredentialClaims) ToMapClaims() (res map[string]any, err error) { + res = map[string]any{} + + bs, err := json.Marshal(v) + if err != nil { + return nil, err + } + err = json.Unmarshal(bs, &res) + if err != nil { + return nil, err + } + + return res, nil +} diff --git a/openapitools.json b/openapitools.json index 54d00804a3d..81d278a3853 100644 --- a/openapitools.json +++ b/openapitools.json @@ -2,6 +2,6 @@ "$schema": "node_modules/@openapitools/openapi-generator-cli/config.schema.json", "spaces": 2, "generator-cli": { - "version": "6.0.1" + "version": "7.14.0" } } diff --git a/oryx/.gitignore b/oryx/.gitignore new file mode 100644 index 00000000000..7c9bd24da6c --- /dev/null +++ b/oryx/.gitignore @@ -0,0 +1,8 @@ +.bin +vendor +.idea +coverage.txt +node_modules/ +**/*.pprof +**/memstats.*.txt +.vscode/settings.json diff --git a/oryx/.goimportsignore b/oryx/.goimportsignore new file mode 100644 index 00000000000..a725465aee2 --- /dev/null +++ b/oryx/.goimportsignore @@ -0,0 +1 @@ +vendor/ \ No newline at end of file diff --git a/oryx/.golangci.yml b/oryx/.golangci.yml new file mode 100644 index 00000000000..7f5807ece1a --- /dev/null +++ b/oryx/.golangci.yml @@ -0,0 +1,9 @@ +version: "2" + +linters: + enable: + - gosec + - errcheck + - ineffassign + - staticcheck + - unused diff --git a/oryx/.nancy-ignore b/oryx/.nancy-ignore new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/.prettierignore b/oryx/.prettierignore new file mode 100644 index 00000000000..3d36f2d2244 --- /dev/null +++ b/oryx/.prettierignore @@ -0,0 +1,5 @@ +.github/pull_request_template.md +clidoc/testdata/ +healthx/openapi/patch.yaml +.snapshots +fixtures diff --git a/oryx/.reference-ignore b/oryx/.reference-ignore new file mode 100644 index 00000000000..eee2a89c2ed --- /dev/null +++ b/oryx/.reference-ignore @@ -0,0 +1,3 @@ +**/node_modules +docs +CHANGELOG.md diff --git a/oryx/.schemas/corsx/viper.schema.json b/oryx/.schemas/corsx/viper.schema.json new file mode 100644 index 00000000000..c7e61f65e1f --- /dev/null +++ b/oryx/.schemas/corsx/viper.schema.json @@ -0,0 +1,92 @@ +{ + "$id": "https://raw.githubusercontent.com/ory/x/master/.schemas/corsx/viper.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Cross Origin Resource Sharing (CORS)", + "description": "Configure [Cross Origin Resource Sharing (CORS)](http://www.w3.org/TR/cors/) using the following options.", + "type": "object", + "properties": { + "enabled": { + "type": "boolean", + "default": false, + "title": "Enable CORS", + "description": "If set to true, CORS will be enabled and preflight-requests (OPTION) will be answered." + }, + "allowed_origins": { + "title": "Allowed Origins", + "description": "A list of origins a cross-domain request can be executed from. If the special * value is present in the list, all origins will be allowed. An origin may contain a wildcard (*) to replace 0 or more characters (i.e.: http://*.domain.com). Usage of wildcards implies a small performance penality. Only one wildcard can be used per origin.", + "type": "array", + "items": { + "type": "string", + "minLength": 1 + }, + "default": ["*"], + "uniqueItems": true, + "examples": [ + "https://example.com", + "https://*.example.com", + "https://*.foo.example.com" + ] + }, + "allowed_methods": { + "type": "array", + "title": "Allowed HTTP Methods", + "description": "A list of methods the client is allowed to use with cross-domain requests.", + "items": { + "type": "string", + "enum": [ + "GET", + "HEAD", + "POST", + "PUT", + "DELETE", + "CONNECT", + "TRACE", + "PATCH" + ] + }, + "uniqueItems": true, + "default": ["GET", "POST", "PUT", "PATCH", "DELETE"] + }, + "allowed_headers": { + "description": "A list of non simple headers the client is allowed to use with cross-domain requests.", + "title": "Allowed Request HTTP Headers", + "type": "array", + "items": { + "type": "string" + }, + "minLength": 1, + "uniqueItems": true, + "default": ["Authorization", "Content-Type"] + }, + "exposed_headers": { + "description": "Indicates which headers are safe to expose to the API of a CORS API specification", + "title": "Allowed Response HTTP Headers", + "type": "array", + "items": { + "type": "string" + }, + "minLength": 1, + "uniqueItems": true, + "default": ["Content-Type"] + }, + "allow_credentials": { + "type": "boolean", + "title": "Allow HTTP Credentials", + "default": false, + "description": "Indicates whether the request can include user credentials like cookies, HTTP authentication or client side SSL certificates." + }, + "max_age": { + "type": "number", + "default": 0, + "title": "Maximum Age", + "description": "Indicates how long (in seconds) the results of a preflight request can be cached. The default is 0 which stands for no max age." + }, + "debug": { + "type": "boolean", + "default": false, + "title": "Enable Debugging", + "description": "Set to true to debug server side CORS issues." + } + }, + "additionalProperties": false +} diff --git a/oryx/.schemas/logrusx/viper.schema.json b/oryx/.schemas/logrusx/viper.schema.json new file mode 100644 index 00000000000..8648e8d5e68 --- /dev/null +++ b/oryx/.schemas/logrusx/viper.schema.json @@ -0,0 +1,24 @@ +{ + "$id": "https://raw.githubusercontent.com/ory/x/master/.schemas/logrusx/viper.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Log", + "description": "Configure logging using the following options. Logging will always be sent to stdout and stderr.", + "type": "object", + "properties": { + "level": { + "type": "string", + "default": "info", + "enum": ["panic", "fatal", "error", "warn", "info", "debug"], + "title": "Level", + "description": "Debug enables stack traces on errors. Can also be set using environment variable LOG_LEVEL." + }, + "format": { + "type": "string", + "default": "text", + "enum": ["text", "json"], + "title": "Format", + "description": "The log format can either be text or JSON." + } + }, + "additionalProperties": false +} diff --git a/oryx/.schemas/profilingx/viper.schema.json b/oryx/.schemas/profilingx/viper.schema.json new file mode 100644 index 00000000000..af66f303b87 --- /dev/null +++ b/oryx/.schemas/profilingx/viper.schema.json @@ -0,0 +1,8 @@ +{ + "$id": "https://raw.githubusercontent.com/ory/x/master/.schemas/profilingx/viper.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Profiling", + "description": "Enables CPU or memory profiling if set. For more details on profiling Go programs read [Profiling Go Programs](https://blog.golang.org/profiling-go-programs).", + "type": "string", + "enum": ["cpu", "mem"] +} diff --git a/oryx/.schemas/tlsx/viper.schema.json b/oryx/.schemas/tlsx/viper.schema.json new file mode 100644 index 00000000000..2ba259c910b --- /dev/null +++ b/oryx/.schemas/tlsx/viper.schema.json @@ -0,0 +1,47 @@ +{ + "$id": "https://raw.githubusercontent.com/ory/x/master/.schemas/tlsx/viper.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "HTTPS", + "description": "Configure HTTP over TLS (HTTPS). All options can also be set using environment variables by replacing dots (`.`) with underscores (`_`) and uppercasing the key. For example, `some.prefix.tls.key.path` becomes `export SOME_PREFIX_TLS_KEY_PATH`. If all keys are left undefined, TLS will be disabled.", + "type": "object", + "additionalProperties": false, + "definitions": { + "source": { + "type": "object", + "additionalProperties": false, + "properties": { + "path": { + "title": "Path to PEM-encoded Fle", + "type": "string", + "examples": ["path/to/file.pem"] + }, + "base64": { + "title": "Base64 Encoded Inline", + "description": "The base64 string of the PEM-encoded file content. Can be generated using for example `base64 -i path/to/file.pem`.", + "type": "string", + "examples": [ + "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tXG5NSUlEWlRDQ0FrMmdBd0lCQWdJRVY1eE90REFOQmdr..." + ] + } + } + } + }, + "properties": { + "key": { + "title": "Private Key (PEM)", + "allOf": [ + { + "$ref": "#/definitions/source" + } + ] + }, + "cert": { + "title": "TLS Certificate (PEM)", + "allOf": [ + { + "$ref": "#/definitions/source" + } + ] + } + } +} diff --git a/oryx/LICENSE b/oryx/LICENSE new file mode 100644 index 00000000000..261eeb9e9f8 --- /dev/null +++ b/oryx/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/oryx/Makefile b/oryx/Makefile new file mode 100644 index 00000000000..c0d4c998a5f --- /dev/null +++ b/oryx/Makefile @@ -0,0 +1,61 @@ +SHELL=/bin/bash -o pipefail + +export PATH := .bin:${PATH} + +.bin/ory: Makefile + curl --retry 7 --retry-connrefused https://raw.githubusercontent.com/ory/meta/master/install.sh | bash -s -- -b .bin ory v0.2.2 + touch .bin/ory + +.PHONY: format +format: .bin/ory node_modules + .bin/ory dev headers copyright --type=open-source --exclude=clidoc/ --exclude=hasherx/mocks_pkdbf2_test.go --exclude=josex/ --exclude=hasherx/ --exclude=jsonnetsecure/jsonnet.go + go tool goimports -w -local github.com/ory . + npm exec -- prettier --write . + +.bin/golangci-lint: Makefile + curl --retry 7 --retry-connrefused -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b .bin v2.4.0 + +.bin/licenses: Makefile + curl --retry 7 --retry-connrefused https://raw.githubusercontent.com/ory/ci/master/licenses/install | sh + +licenses: .bin/licenses node_modules # checks open-source licenses + .bin/licenses + +.PHONY: test +test: + make resetdb + export TEST_DATABASE_POSTGRESQL=postgres://postgres:secret@127.0.0.1:3445/hydra?sslmode=disable; export TEST_DATABASE_COCKROACHDB=cockroach://root@127.0.0.1:3446/defaultdb?sslmode=disable; export TEST_DATABASE_MYSQL='mysql://root:secret@tcp(127.0.0.1:3444)/mysql?parseTime=true&multiStatements=true'; go test -count=1 -tags sqlite ./... + +.PHONY: resetdb +resetdb: + docker kill hydra_test_database_mysql || true + docker kill hydra_test_database_postgres || true + docker kill hydra_test_database_cockroach || true + docker rm -f hydra_test_database_mysql || true + docker rm -f hydra_test_database_postgres || true + docker rm -f hydra_test_database_cockroach || true + docker run --rm --name hydra_test_database_mysql -p 3444:3306 -e MYSQL_ROOT_PASSWORD=secret -d mysql:8.0 + docker run --rm --name hydra_test_database_postgres -p 3445:5432 -e POSTGRES_PASSWORD=secret -e POSTGRES_DB=hydra -d postgres:11.8 + docker run --rm --name hydra_test_database_cockroach -p 3446:26257 -d cockroachdb/cockroach:latest-v25.4 start-single-node --insecure + +.PHONY: lint +lint: .bin/golangci-lint + GO111MODULE=on .bin/golangci-lint run -v ./... + +.PHONY: migrations-render +migrations-render: .bin/ory + ory dev pop migration render networkx/migrations/templates networkx/migrations/sql + +.PHONY: migrations-render-replace +migrations-render-replace: .bin/ory + ory dev pop migration render -r networkx/migrations/templates networkx/migrations/sql + +.PHONY: mocks +mocks: + go tool mockgen -package hasherx_test -destination hasherx/mocks_argon2_test.go github.com/ory/x/hasherx Argon2Configurator + go tool mockgen -package hasherx_test -destination hasherx/mocks_bcrypt_test.go github.com/ory/x/hasherx BCryptConfigurator + go tool mockgen -package hasherx_test -destination hasherx/mocks_pkdbf2_test.go github.com/ory/x/hasherx PBKDF2Configurator + +node_modules: package-lock.json + npm ci + touch node_modules diff --git a/oryx/assertx/assertx.go b/oryx/assertx/assertx.go new file mode 100644 index 00000000000..30a47e3f33e --- /dev/null +++ b/oryx/assertx/assertx.go @@ -0,0 +1,65 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package assertx + +import ( + "bytes" + "encoding/json" + "strings" + "testing" + "time" + + "github.com/tidwall/sjson" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func PrettifyJSONPayload(t testing.TB, payload interface{}) string { + t.Helper() + o, err := json.MarshalIndent(payload, "", " ") + require.NoError(t, err) + return string(o) +} + +func EqualAsJSON(t testing.TB, expected, actual interface{}, args ...interface{}) { + t.Helper() + var eb, ab bytes.Buffer + if len(args) == 0 { + args = []interface{}{PrettifyJSONPayload(t, actual)} + } + + require.NoError(t, json.NewEncoder(&eb).Encode(expected), args...) + require.NoError(t, json.NewEncoder(&ab).Encode(actual), args...) + assert.JSONEq(t, strings.TrimSpace(eb.String()), strings.TrimSpace(ab.String()), args...) +} + +func EqualAsJSONExcept(t testing.TB, expected, actual interface{}, except []string, args ...interface{}) { + t.Helper() + var eb, ab bytes.Buffer + if len(args) == 0 { + args = []interface{}{PrettifyJSONPayload(t, actual)} + } + + require.NoError(t, json.NewEncoder(&eb).Encode(expected), args...) + require.NoError(t, json.NewEncoder(&ab).Encode(actual), args...) + + var err error + ebs, abs := eb.String(), ab.String() + for _, k := range except { + ebs, err = sjson.Delete(ebs, k) + require.NoError(t, err) + + abs, err = sjson.Delete(abs, k) + require.NoError(t, err) + } + + assert.JSONEq(t, strings.TrimSpace(ebs), strings.TrimSpace(abs), args...) +} + +// Deprecated: use assert.WithinDuration instead +func TimeDifferenceLess(t testing.TB, t1, t2 time.Time, seconds int) { + t.Helper() + assert.WithinDuration(t, t1, t2, time.Duration(seconds)*time.Second) +} diff --git a/oryx/cachex/ristretto.go b/oryx/cachex/ristretto.go new file mode 100644 index 00000000000..89fa8739948 --- /dev/null +++ b/oryx/cachex/ristretto.go @@ -0,0 +1,67 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package cachex + +import ( + "github.com/dgraph-io/ristretto/v2" + "github.com/prometheus/client_golang/prometheus" +) + +// RistrettoCollector collects Ristretto cache metrics. +type RistrettoCollector struct { + prefix string + metricsFunc func() *ristretto.Metrics +} + +// NewRistrettoCollector creates a new RistrettoCollector. +// +// To use this collector, you need to register it with a Prometheus registry: +// +// func main() { +// cache, _ := ristretto.NewCache(&ristretto.Config{ +// NumCounters: 1e7, +// MaxCost: 1 << 30, +// BufferItems: 64, +// }) +// collector := NewRistrettoCollector("prefix_", func() *ristretto.Metrics { +// return cache.Metrics +// }) +// prometheus.MustRegister(collector) +// } +func NewRistrettoCollector(prefix string, metricsFunc func() *ristretto.Metrics) *RistrettoCollector { + return &RistrettoCollector{ + prefix: prefix, + metricsFunc: metricsFunc, + } +} + +// Describe sends the super-set of all possible descriptors of metrics +// collected by this Collector. +func (c *RistrettoCollector) Describe(ch chan<- *prometheus.Desc) { + ch <- prometheus.NewDesc(c.prefix+"ristretto_hits", "Total number of cache hits", nil, nil) + ch <- prometheus.NewDesc(c.prefix+"ristretto_misses", "Total number of cache misses", nil, nil) + ch <- prometheus.NewDesc(c.prefix+"ristretto_ratio", "Cache hit ratio", nil, nil) + ch <- prometheus.NewDesc(c.prefix+"ristretto_keys_added", "Total number of keys added to the cache", nil, nil) + ch <- prometheus.NewDesc(c.prefix+"ristretto_cost_added", "Total cost of keys added to the cache", nil, nil) + ch <- prometheus.NewDesc(c.prefix+"ristretto_keys_evicted", "Total number of keys evicted from the cache", nil, nil) + ch <- prometheus.NewDesc(c.prefix+"ristretto_cost_evicted", "Total cost of keys evicted from the cache", nil, nil) + ch <- prometheus.NewDesc(c.prefix+"ristretto_sets_dropped", "Total number of sets dropped", nil, nil) + ch <- prometheus.NewDesc(c.prefix+"ristretto_sets_rejected", "Total number of sets rejected", nil, nil) + ch <- prometheus.NewDesc(c.prefix+"ristretto_gets_kept", "Total number of gets kept", nil, nil) +} + +// Collect is called by the Prometheus registry when collecting metrics. +func (c *RistrettoCollector) Collect(ch chan<- prometheus.Metric) { + metrics := c.metricsFunc() + ch <- prometheus.MustNewConstMetric(prometheus.NewDesc(c.prefix+"ristretto_hits", "Total number of cache hits", nil, nil), prometheus.GaugeValue, float64(metrics.Hits())) + ch <- prometheus.MustNewConstMetric(prometheus.NewDesc(c.prefix+"ristretto_misses", "Total number of cache misses", nil, nil), prometheus.GaugeValue, float64(metrics.Misses())) + ch <- prometheus.MustNewConstMetric(prometheus.NewDesc(c.prefix+"ristretto_ratio", "Cache hit ratio", nil, nil), prometheus.GaugeValue, metrics.Ratio()) + ch <- prometheus.MustNewConstMetric(prometheus.NewDesc(c.prefix+"ristretto_keys_added", "Total number of keys added to the cache", nil, nil), prometheus.GaugeValue, float64(metrics.KeysAdded())) + ch <- prometheus.MustNewConstMetric(prometheus.NewDesc(c.prefix+"ristretto_cost_added", "Total cost of keys added to the cache", nil, nil), prometheus.GaugeValue, float64(metrics.CostAdded())) + ch <- prometheus.MustNewConstMetric(prometheus.NewDesc(c.prefix+"ristretto_keys_evicted", "Total number of keys evicted from the cache", nil, nil), prometheus.GaugeValue, float64(metrics.KeysEvicted())) + ch <- prometheus.MustNewConstMetric(prometheus.NewDesc(c.prefix+"ristretto_cost_evicted", "Total cost of keys evicted from the cache", nil, nil), prometheus.GaugeValue, float64(metrics.CostEvicted())) + ch <- prometheus.MustNewConstMetric(prometheus.NewDesc(c.prefix+"ristretto_sets_dropped", "Total number of sets dropped", nil, nil), prometheus.GaugeValue, float64(metrics.SetsDropped())) + ch <- prometheus.MustNewConstMetric(prometheus.NewDesc(c.prefix+"ristretto_sets_rejected", "Total number of sets rejected", nil, nil), prometheus.GaugeValue, float64(metrics.SetsRejected())) + ch <- prometheus.MustNewConstMetric(prometheus.NewDesc(c.prefix+"ristretto_gets_kept", "Total number of gets kept", nil, nil), prometheus.GaugeValue, float64(metrics.GetsKept())) +} diff --git a/oryx/castx/castx.go b/oryx/castx/castx.go new file mode 100644 index 00000000000..5abc962f8d9 --- /dev/null +++ b/oryx/castx/castx.go @@ -0,0 +1,68 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package castx + +import ( + "encoding/csv" + "fmt" + "reflect" + "strings" + + "github.com/spf13/cast" +) + +// ToFloatSlice casts an interface to a []float64 type. +func ToFloatSlice(i interface{}) []float64 { + f, _ := ToFloatSliceE(i) + return f +} + +// ToFloatSliceE casts an interface to a []float64 type. +func ToFloatSliceE(i interface{}) ([]float64, error) { + if i == nil { + return []float64{}, fmt.Errorf("unable to cast %#v of type %T to []float64", i, i) + } + + switch v := i.(type) { + case []float64: + return v, nil + } + + kind := reflect.TypeOf(i).Kind() + switch kind { + case reflect.Slice, reflect.Array: + s := reflect.ValueOf(i) + a := make([]float64, s.Len()) + for j := range a { + val, err := cast.ToFloat64E(s.Index(j).Interface()) + if err != nil { + return []float64{}, fmt.Errorf("unable to cast %#v of type %T to []float64", i, i) + } + a[j] = val + } + return a, nil + default: + return []float64{}, fmt.Errorf("unable to cast %#v of type %T to []float64", i, i) + } +} + +// ToStringSlice casts an interface to a []string type and respects comma-separated values. +func ToStringSlice(i interface{}) []string { + s, _ := ToStringSliceE(i) + return s +} + +// ToStringSliceE casts an interface to a []string type and respects comma-separated values. +func ToStringSliceE(i interface{}) ([]string, error) { + switch s := i.(type) { + case string: + return parseCSV(s) + } + + return cast.ToStringSliceE(i) +} + +func parseCSV(v string) ([]string, error) { + return csv.NewReader(strings.NewReader(v)).Read() +} diff --git a/oryx/clidoc/generate.go b/oryx/clidoc/generate.go new file mode 100644 index 00000000000..20daa66de0c --- /dev/null +++ b/oryx/clidoc/generate.go @@ -0,0 +1,78 @@ +package clidoc + +import ( + "bytes" + "fmt" + "io" + "os" + "path/filepath" + "strings" + + "github.com/pkg/errors" + + "github.com/spf13/cobra" +) + +// Generate generates markdown documentation for a cobra command and its children. +func Generate(cmd *cobra.Command, args []string) error { + if len(args) != 1 { + return errors.New("command expects one argument which is the path to the output directory") + } + + return generate(cmd, args[0]) +} + +func trimExt(s string) string { + return strings.ReplaceAll(strings.TrimSuffix(s, filepath.Ext(s)), "_", "-") +} + +func generate(cmd *cobra.Command, dir string) error { + cmd.DisableAutoGenTag = true + for _, c := range cmd.Commands() { + if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() { + continue + } + if err := generate(c, dir); err != nil { + return err + } + } + + basename := strings.Replace(cmd.CommandPath(), " ", "-", -1) + if err := os.MkdirAll(filepath.Join(dir), 0750); err != nil { + return err + } + + filename := filepath.Join(dir, basename) + ".md" + f, err := os.Create(filename) //#nosec:G304 + if err != nil { + return err + } + defer (func() { _ = f.Close() })() + + if _, err := io.WriteString(f, fmt.Sprintf(`--- +id: %s +title: %s +description: %s +--- + + +`, + basename, + cmd.CommandPath(), + cmd.CommandPath(), + )); err != nil { + return err + } + + var b bytes.Buffer + if err := GenMarkdownCustom(cmd, &b, trimExt); err != nil { + return err + } + + _, err = f.WriteString(b.String()) + return err +} diff --git a/oryx/clidoc/md_docs.go b/oryx/clidoc/md_docs.go new file mode 100644 index 00000000000..9932475fad2 --- /dev/null +++ b/oryx/clidoc/md_docs.go @@ -0,0 +1,201 @@ +//Copyright 2015 Red Hat Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package clidoc + +import ( + "bytes" + "fmt" + "html" + "io" + "os" + "path/filepath" + "regexp" + "sort" + "strings" + + "github.com/ory/x/cmdx" + + "github.com/spf13/cobra" +) + +func printOptions(buf *bytes.Buffer, cmd *cobra.Command, name string) error { + flags := cmd.NonInheritedFlags() + flags.SetOutput(buf) + if flags.HasAvailableFlags() { + buf.WriteString("### Options\n\n```\n") + flags.PrintDefaults() + buf.WriteString("```\n\n") + } + + parentFlags := cmd.InheritedFlags() + parentFlags.SetOutput(buf) + if parentFlags.HasAvailableFlags() { + buf.WriteString("### Options inherited from parent commands\n\n```\n") + parentFlags.PrintDefaults() + buf.WriteString("```\n\n") + } + return nil +} + +// GenMarkdown creates markdown output. +func GenMarkdown(cmd *cobra.Command, w io.Writer) error { + return GenMarkdownCustom(cmd, w, func(s string) string { return s }) +} + +// GenMarkdownCustom creates custom markdown output. +func GenMarkdownCustom(cmd *cobra.Command, w io.Writer, linkHandler func(string) string) error { + cmd.InitDefaultHelpCmd() + cmd.InitDefaultHelpFlag() + + buf := new(bytes.Buffer) + name := cmd.CommandPath() + + buf.WriteString("## " + html.EscapeString(name) + "\n\n") + buf.WriteString(fenceIndentedBlocks(cmd.Short) + "\n\n") + if len(cmd.Long) > 0 { + buf.WriteString("### Synopsis\n\n") + long, err := cmdx.TemplateCommandField(cmd, cmd.Long) + if err != nil { + buf.WriteString(fmt.Sprintf("\n\n", err.Error())) + long = cmd.Long + } + buf.WriteString(fenceIndentedBlocks(long) + "\n\n") + } + + if cmd.Runnable() { + buf.WriteString(fmt.Sprintf("```\n%s\n```\n\n", cmd.UseLine())) + } + + if len(cmd.Example) > 0 { + buf.WriteString("### Examples\n\n") + example, err := cmdx.TemplateCommandField(cmd, cmd.Example) + if err != nil { + buf.WriteString(fmt.Sprintf("\n\n", err.Error())) + example = cmd.Example + } + buf.WriteString(fmt.Sprintf("```\n%s\n```\n\n", example)) + } + + if err := printOptions(buf, cmd, name); err != nil { + return err + } + if hasSeeAlso(cmd) { + buf.WriteString("### See also\n\n") + if cmd.HasParent() { + parent := cmd.Parent() + pname := parent.CommandPath() + link := pname + ".md" + link = strings.Replace(link, " ", "_", -1) + short := parent.Short + if short != "" { + short = fmt.Sprintf(" %s", short) + } + buf.WriteString(fmt.Sprintf("* [%s](%s)%s\n", pname, linkHandler(link), short)) + cmd.VisitParents(func(c *cobra.Command) { + if c.DisableAutoGenTag { + cmd.DisableAutoGenTag = c.DisableAutoGenTag + } + }) + } + + children := cmd.Commands() + sort.Sort(byName(children)) + + for _, child := range children { + if !child.IsAvailableCommand() || child.IsAdditionalHelpTopicCommand() { + continue + } + cname := name + " " + child.Name() + link := cname + ".md" + link = strings.Replace(link, " ", "_", -1) + short := child.Short + if short != "" { + short = fmt.Sprintf(" - %s", short) + } + buf.WriteString(fmt.Sprintf("* [%s](%s)\t%s\n", cname, linkHandler(link), short)) + } + buf.WriteString("\n") + } + + _, err := buf.WriteTo(w) + return err +} + +// GenMarkdownTree will generate a markdown page for this command and all +// descendants in the directory given. The header may be nil. +// This function may not work correctly if your command names have `-` in them. +// If you have `cmd` with two subcmds, `sub` and `sub-third`, +// and `sub` has a subcommand called `third`, it is undefined which +// help output will be in the file `cmd-sub-third.1`. +func GenMarkdownTree(cmd *cobra.Command, dir string) error { + identity := func(s string) string { return s } + emptyStr := func(s string) string { return "" } + return GenMarkdownTreeCustom(cmd, dir, emptyStr, identity) +} + +// GenMarkdownTreeCustom is the the same as GenMarkdownTree, but +// with custom filePrepender and linkHandler. +func GenMarkdownTreeCustom(cmd *cobra.Command, dir string, filePrepender, linkHandler func(string) string) error { + for _, c := range cmd.Commands() { + if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() { + continue + } + if err := GenMarkdownTreeCustom(c, dir, filePrepender, linkHandler); err != nil { + return err + } + } + + basename := strings.Replace(cmd.CommandPath(), " ", "_", -1) + ".md" + filename := filepath.Join(dir, basename) + f, err := os.Create(filename) //#nosec:G304) //#nosec:G304 + if err != nil { + return err + } + defer (func() { _ = f.Close() })() + + if _, err := io.WriteString(f, filePrepender(filename)); err != nil { + return err + } + if err := GenMarkdownCustom(cmd, f, linkHandler); err != nil { + return err + } + return nil +} + +var indentedBlock = regexp.MustCompile(`(?m)^(?: {4}|\t).*(?:\n(?: {4}|\t).*)*`) + +func fenceIndentedBlocks(s string) string { + return indentedBlock.ReplaceAllStringFunc(s, func(block string) string { + // trim trailing newline to keep fence tidy + block = strings.TrimRight(block, "\n") + + // de-indent exactly one level for nicer fenced output + lines := strings.Split(block, "\n") + for i, ln := range lines { + switch { + case strings.HasPrefix(ln, " "): + lines[i] = ln[4:] + case strings.HasPrefix(ln, "\t"): + lines[i] = ln[1:] + } + } + b := strings.Join(lines, "\n") + + // guard against already fenced content + if strings.HasPrefix(strings.TrimSpace(b), "```") { + return block + } + return "```\n" + b + "\n```" + }) +} diff --git a/oryx/clidoc/testdata/hydra-client-admin.md b/oryx/clidoc/testdata/hydra-client-admin.md new file mode 100644 index 00000000000..935c4531099 --- /dev/null +++ b/oryx/clidoc/testdata/hydra-client-admin.md @@ -0,0 +1,48 @@ +--- +id: hydra-client-admin +title: hydra client admin +description: hydra client admin +--- + + +## hydra client admin + +Foo bar baz bar + +``` +short with multiple +``` + + + +### Synopsis + +Run the admin server + +``` +<[some argument]> + <[some argument]> + <[some argument]> + <[some argument]> +<[some argument]> +``` + + +``` +hydra client admin [flags] +``` + +### Options + +``` + -h, --help help for admin +``` + +### See also + +* [hydra client](hydra-client) Run client commands + diff --git a/oryx/clidoc/testdata/hydra-client-public.md b/oryx/clidoc/testdata/hydra-client-public.md new file mode 100644 index 00000000000..66f9c200113 --- /dev/null +++ b/oryx/clidoc/testdata/hydra-client-public.md @@ -0,0 +1,36 @@ +--- +id: hydra-client-public +title: hydra client public +description: hydra client public +--- + + +## hydra client public + + + +### Synopsis + +Run the public server + +<[some argument]> + + +``` +hydra client public [flags] +``` + +### Options + +``` + -h, --help help for public +``` + +### See also + +* [hydra client](hydra-client) Run client commands + diff --git a/oryx/clidoc/testdata/hydra-client.md b/oryx/clidoc/testdata/hydra-client.md new file mode 100644 index 00000000000..eb105cd12d3 --- /dev/null +++ b/oryx/clidoc/testdata/hydra-client.md @@ -0,0 +1,48 @@ +--- +id: hydra-client +title: hydra client +description: hydra client +--- + + +## hydra client + +Run client commands + +### Synopsis + +Manage OAuth2 clients + +<[some argument]> + + +``` +hydra client [flags] +``` + +### Examples + +``` +hydra client --whatever +``` + +### Options + +``` + -h, --help help for client +``` + +### See also + +* [hydra](hydra) +* [hydra client admin](hydra-client-admin) - Foo bar baz bar + + short with multiple + + +* [hydra client public](hydra-client-public) + diff --git a/oryx/clidoc/testdata/hydra-serve.md b/oryx/clidoc/testdata/hydra-serve.md new file mode 100644 index 00000000000..d7a6fc44c8d --- /dev/null +++ b/oryx/clidoc/testdata/hydra-serve.md @@ -0,0 +1,36 @@ +--- +id: hydra-serve +title: hydra serve +description: hydra serve +--- + + +## hydra serve + + + +### Synopsis + +Manage the server + +<[some argument]> + + +``` +hydra serve [flags] +``` + +### Options + +``` + -h, --help help for serve +``` + +### See also + +* [hydra](hydra) + diff --git a/oryx/clidoc/testdata/hydra.md b/oryx/clidoc/testdata/hydra.md new file mode 100644 index 00000000000..863017eec4d --- /dev/null +++ b/oryx/clidoc/testdata/hydra.md @@ -0,0 +1,38 @@ +--- +id: hydra +title: hydra +description: hydra +--- + + +## hydra + + + +### Synopsis + +A sample text +root + +<[some argument]> + + +``` +hydra [flags] +``` + +### Options + +``` + -h, --help help for hydra +``` + +### See also + +* [hydra client](hydra-client) - Run client commands +* [hydra serve](hydra-serve) + diff --git a/oryx/clidoc/util.go b/oryx/clidoc/util.go new file mode 100644 index 00000000000..e8e74a3a28c --- /dev/null +++ b/oryx/clidoc/util.go @@ -0,0 +1,40 @@ +// Copyright 2015 Red Hat Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package clidoc + +import ( + "github.com/spf13/cobra" +) + +// Test to see if we have a reason to print See Also information in docs +// Basically this is a test for a parent command or a subcommand which is +// both not deprecated and not the autogenerated help command. +func hasSeeAlso(cmd *cobra.Command) bool { + if cmd.HasParent() { + return true + } + for _, c := range cmd.Commands() { + if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() { + continue + } + return true + } + return false +} + +type byName []*cobra.Command + +func (s byName) Len() int { return len(s) } +func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] } +func (s byName) Less(i, j int) bool { return s[i].Name() < s[j].Name() } diff --git a/oryx/cmdx/args.go b/oryx/cmdx/args.go new file mode 100644 index 00000000000..7a153206586 --- /dev/null +++ b/oryx/cmdx/args.go @@ -0,0 +1,52 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package cmdx + +import ( + "fmt" + + "github.com/spf13/cobra" +) + +// MinArgs fatals if args does not satisfy min. +// Deprecated: set cobra.MinimumNArgs on the cmd.Args field instead +func MinArgs(cmd *cobra.Command, args []string, min int) { + if len(args) < min { + Fatalf(`%s + +Expected at least %d command line arguments but only got %d.`, cmd.UsageString(), min, len(args)) + } +} + +// ExactArgs fatals if args does not equal l. +// Deprecated: set cobra.ExactArgs on the cmd.Args field instead +func ExactArgs(cmd *cobra.Command, args []string, l int) { + if len(args) < l { + Fatalf(`%s + +Expected exactly %d command line arguments but got %d.`, cmd.UsageString(), l, len(args)) + } +} + +// RangeArgs fatals if args does not satisfy any of the lengths set in r. +// Deprecated: set cobra.Ar on the cmd.RangeArgs field instead +func RangeArgs(cmd *cobra.Command, args []string, r []int) { + for _, a := range r { + if len(args) == a { + return + } + } + Fatalf(`%s + +Expected exact %v command line arguments but got %d.`, cmd.UsageString(), r, len(args)) +} + +// ZeroOrTwoArgs requires either no or 2 arguments. +func ZeroOrTwoArgs(cmd *cobra.Command, args []string) error { + // zero or exactly two args + if len(args) != 0 && len(args) != 2 { + return fmt.Errorf("expected zero or two args, got %d: %+v", len(args), args) + } + return nil +} diff --git a/oryx/cmdx/env.go b/oryx/cmdx/env.go new file mode 100644 index 00000000000..f7cb9c33f51 --- /dev/null +++ b/oryx/cmdx/env.go @@ -0,0 +1,31 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package cmdx + +// EnvVarExamplesHelpMessage returns a string containing documentation on how to use environment variables. +func EnvVarExamplesHelpMessage(name string) string { + return `This command exposes a variety of controls via environment variables. Here are some examples on how to +configure environment variables: + +Linux / macOS: + $ export FOO=bar + $ export BAZ=bar + $ ` + name + ` ... + + $ FOO=bar BAZ=bar ` + name + ` ... + +Docker: + $ docker run -e FOO=bar -e BAZ=bar ... + +Windows (cmd): + > set FOO=bar + > set BAZ=bar + > ` + name + ` ... + +Windows (powershell): + > $env:FOO = "bar" + > $env:BAZ = "bar" + > ` + name + ` +` +} diff --git a/oryx/cmdx/helper.go b/oryx/cmdx/helper.go new file mode 100644 index 00000000000..345b31c126f --- /dev/null +++ b/oryx/cmdx/helper.go @@ -0,0 +1,273 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package cmdx + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "os" + "testing" + + "golang.org/x/sync/errgroup" + + "github.com/spf13/cobra" + "github.com/spf13/pflag" + "github.com/stretchr/testify/require" + + "github.com/pkg/errors" + + "github.com/ory/x/logrusx" +) + +var ( + // ErrNilDependency is returned if a dependency is missing. + ErrNilDependency = fmt.Errorf("a dependency was expected to be defined but is nil. Please open an issue with the stack trace") + // ErrNoPrintButFail is returned to detect a failure state that was already reported to the user in some way + ErrNoPrintButFail = fmt.Errorf("this error should never be printed") + + debugStdout, debugStderr = io.Discard, io.Discard +) + +func init() { + if os.Getenv("DEBUG") != "" { + debugStdout = os.Stdout + debugStderr = os.Stderr + } +} + +// FailSilently is supposed to be used within a commands RunE function. +// It silences cobras error handling and returns the ErrNoPrintButFail error. +func FailSilently(cmd *cobra.Command) error { + cmd.SilenceErrors = true + cmd.SilenceUsage = true + return errors.WithStack(ErrNoPrintButFail) +} + +// Must fatals with the optional message if err is not nil. +// Deprecated: do not use this function in commands, as it makes it impossible to test them. Instead, return the error. +func Must(err error, message string, args ...interface{}) { + if err == nil { + return + } + + _, _ = fmt.Fprintf(os.Stderr, message+"\n", args...) + os.Exit(1) +} + +// CheckResponse fatals if err is nil or the response.StatusCode does not match the expectedStatusCode +// Deprecated: do not use this function in commands, as it makes it impossible to test them. Instead, return the error. +func CheckResponse(err error, expectedStatusCode int, response *http.Response) { + Must(err, "Command failed because error occurred: %s", err) + + if response.StatusCode != expectedStatusCode { + out, err := io.ReadAll(response.Body) + if err != nil { + out = []byte{} + } + pretty, err := json.MarshalIndent(json.RawMessage(out), "", "\t") + if err == nil { + out = pretty + } + + Fatalf( + `Command failed because status code %d was expected but code %d was received. + +Response payload: + +%s`, + expectedStatusCode, + response.StatusCode, + out, + ) + } +} + +// FormatResponse takes an object and prints a json.MarshalIdent version of it or fatals. +// Deprecated: do not use this function in commands, as it makes it impossible to test them. Instead, return the error. +func FormatResponse(o interface{}) string { + out, err := json.MarshalIndent(o, "", "\t") + Must(err, `Command failed because an error occurred while prettifying output: %s`, err) + return string(out) +} + +// Fatalf prints to os.Stderr and exists with code 1. +// Deprecated: do not use this function in commands, as it makes it impossible to test them. Instead, return the error. +func Fatalf(message string, args ...interface{}) { + if len(args) > 0 { + _, _ = fmt.Fprintf(os.Stderr, message+"\n", args...) + } else { + _, _ = fmt.Fprintln(os.Stderr, message) + } + os.Exit(1) +} + +// ExpectDependency expects every dependency to be not nil or it fatals. +// Deprecated: do not use this function in commands, as it makes it impossible to test them. Instead, return the error. +func ExpectDependency(logger *logrusx.Logger, dependencies ...interface{}) { + if logger == nil { + panic("missing logger for dependency check") + } + for _, d := range dependencies { + if d == nil { + logger.WithError(errors.WithStack(ErrNilDependency)).Fatalf("A fatal issue occurred.") + } + } +} + +// CallbackWriter will execute each callback once the message is received. +// The full matched message is passed to the callback. An error returned from the callback is returned by Write. +type CallbackWriter struct { + Callbacks map[string]func([]byte) error + buf bytes.Buffer +} + +func (c *CallbackWriter) Write(msg []byte) (int, error) { + for p, cb := range c.Callbacks { + if bytes.Contains(msg, []byte(p)) { + if err := cb(msg); err != nil { + return 0, err + } + } + } + return c.buf.Write(msg) +} + +func (c *CallbackWriter) String() string { + return c.buf.String() +} + +var _ io.Writer = (*CallbackWriter)(nil) + +func prepareCmd(cmd *cobra.Command, stdIn io.Reader, stdOut, stdErr io.Writer, args []string) { + cmd.SetIn(stdIn) + outs := []io.Writer{debugStdout} + if stdOut != nil { + outs = append(outs, stdOut) + } + cmd.SetOut(io.MultiWriter(outs...)) + errs := []io.Writer{debugStderr} + if stdErr != nil { + errs = append(errs, stdErr) + } + cmd.SetErr(io.MultiWriter(errs...)) + + if args == nil { + args = []string{} + } + cmd.SetArgs(args) +} + +// ExecBackgroundCtx runs the cobra command in the background. +func ExecBackgroundCtx(ctx context.Context, cmd *cobra.Command, stdIn io.Reader, stdOut, stdErr io.Writer, args ...string) *errgroup.Group { + prepareCmd(cmd, stdIn, stdOut, stdErr, args) + + eg := &errgroup.Group{} + eg.Go(func() error { + defer cmd.SetIn(nil) + return cmd.ExecuteContext(ctx) + }) + + return eg +} + +// Exec runs the provided cobra command with the given reader as STD_IN and the given args. +// Returns STD_OUT, STD_ERR and the error from the execution. +func Exec(t testing.TB, cmd *cobra.Command, stdIn io.Reader, args ...string) (string, string, error) { + ctx, cancel := context.WithCancel(context.Background()) + t.Cleanup(cancel) + + return ExecCtx(ctx, cmd, stdIn, args...) +} + +func ExecCtx(ctx context.Context, cmd *cobra.Command, stdIn io.Reader, args ...string) (string, string, error) { + stdOut, stdErr := &bytes.Buffer{}, &bytes.Buffer{} + + prepareCmd(cmd, stdIn, stdOut, stdErr, args) + + // needs to be on a separate line to ensure that the output buffers are read AFTER the command ran + err := cmd.ExecuteContext(ctx) + + return stdOut.String(), stdErr.String(), err +} + +// ExecNoErr is a helper that assumes a successful run from Exec. +// Returns STD_OUT. +func ExecNoErr(t testing.TB, cmd *cobra.Command, args ...string) string { + ctx, cancel := context.WithCancel(context.Background()) + t.Cleanup(cancel) + + return ExecNoErrCtx(ctx, t, cmd, args...) +} + +func ExecNoErrCtx(ctx context.Context, t require.TestingT, cmd *cobra.Command, args ...string) string { + stdOut, stdErr, err := ExecCtx(ctx, cmd, nil, args...) + if err == nil { + require.Len(t, stdErr, 0, "std_out: %s\nstd_err: %s", stdOut, stdErr) + } else { + require.ErrorIsf(t, err, context.Canceled, "std_out: %s\nstd_err: %s", stdOut, stdErr) + } + return stdOut +} + +// ExecExpectedErr is a helper that assumes a failing run from Exec returning ErrNoPrintButFail +// Returns STD_ERR. +func ExecExpectedErr(t testing.TB, cmd *cobra.Command, args ...string) string { + ctx, cancel := context.WithCancel(context.Background()) + t.Cleanup(cancel) + + return ExecExpectedErrCtx(ctx, t, cmd, args...) +} + +func ExecExpectedErrCtx(ctx context.Context, t require.TestingT, cmd *cobra.Command, args ...string) string { + stdOut, stdErr, err := ExecCtx(ctx, cmd, nil, args...) + require.True(t, errors.Is(err, ErrNoPrintButFail), "std_out: %s\nstd_err: %s", stdOut, stdErr) + require.Len(t, stdOut, 0, stdErr) + return stdErr +} + +type CommandExecuter struct { + New func() *cobra.Command + Ctx context.Context + PersistentArgs []string +} + +func (c *CommandExecuter) Exec(stdin io.Reader, args ...string) (string, string, error) { + return ExecCtx(c.Ctx, c.New(), stdin, append(c.PersistentArgs, args...)...) +} + +func (c *CommandExecuter) ExecBackground(stdin io.Reader, stdOut, stdErr io.Writer, args ...string) *errgroup.Group { + return ExecBackgroundCtx(c.Ctx, c.New(), stdin, stdOut, stdErr, append(c.PersistentArgs, args...)...) +} + +func (c *CommandExecuter) ExecNoErr(t require.TestingT, args ...string) string { + return ExecNoErrCtx(c.Ctx, t, c.New(), append(c.PersistentArgs, args...)...) +} + +func (c *CommandExecuter) ExecExpectedErr(t require.TestingT, args ...string) string { + return ExecExpectedErrCtx(c.Ctx, t, c.New(), append(c.PersistentArgs, args...)...) +} + +type URL struct { + url.URL +} + +var _ pflag.Value = (*URL)(nil) + +func (u *URL) Set(s string) error { + uu, err := url.Parse(s) + if err != nil { + return err + } + u.URL = *uu + return nil +} + +func (*URL) Type() string { + return "url" +} diff --git a/oryx/cmdx/http.go b/oryx/cmdx/http.go new file mode 100644 index 00000000000..890884c5309 --- /dev/null +++ b/oryx/cmdx/http.go @@ -0,0 +1,125 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package cmdx + +import ( + "crypto/tls" + "fmt" + "net/http" + "net/url" + "os" + "strings" + "time" + + "github.com/hashicorp/go-retryablehttp" + "github.com/pkg/errors" + "github.com/spf13/cobra" + "github.com/spf13/pflag" + + "github.com/ory/x/httpx" +) + +const ( + envKeyEndpoint = "ORY_SDK_URL" + FlagEndpoint = "endpoint" + FlagSkipTLSVerify = "skip-tls-verify" + FlagHeaders = "http-header" +) + +// Remote returns the remote endpoint for the given command. +func Remote(cmd *cobra.Command) (string, error) { + endpoint, err := cmd.Flags().GetString(FlagEndpoint) + if err != nil { + return "", errors.WithStack(err) + } + + if endpoint != "" { + return strings.TrimRight(endpoint, "/"), nil + } else if endpoint := os.Getenv("ORY_SDK_URL"); endpoint != "" { + return strings.TrimRight(endpoint, "/"), nil + } + + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "To execute this command, the endpoint URL must point to the URL where Ory is located. To set the endpoint URL, use flag `--endpoint` or environment variable `ORY_SDK_URL`.") + return "", FailSilently(cmd) +} + +// RemoteURI returns the remote URI for the given command. +func RemoteURI(cmd *cobra.Command) (*url.URL, error) { + remote, err := Remote(cmd) + if err != nil { + return nil, err + } + + endpoint, err := url.ParseRequestURI(remote) + if err != nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not parse endpoint URL: %s", err) + return nil, err + } + + return endpoint, nil +} + +// NewClient creates a new HTTP client. +func NewClient(cmd *cobra.Command) (*http.Client, *url.URL, error) { + endpoint, err := cmd.Flags().GetString(FlagEndpoint) + if err != nil { + return nil, nil, errors.WithStack(err) + } + + if endpoint == "" { + endpoint = os.Getenv(envKeyEndpoint) + } + + if endpoint == "" { + return nil, nil, errors.Errorf("you have to set the remote endpoint, try --help for details") + } + + u, err := url.Parse(strings.TrimRight(endpoint, "/")) + if err != nil { + return nil, nil, errors.Wrapf(err, `could not parse the endpoint URL "%s"`, endpoint) + } + + hc := retryablehttp.NewClient().StandardClient() + hc.Timeout = time.Second * 10 + + rawHeaders, err := cmd.Flags().GetStringSlice(FlagHeaders) + if err != nil { + return nil, nil, errors.WithStack(err) + } + header := http.Header{} + for _, h := range rawHeaders { + parts := strings.Split(h, ":") + if len(parts) != 2 { + _, _ = fmt.Fprintf(cmd.OutOrStdout(), "Unable to parse `--http-header` flag. Format of flag value is a `: ` delimited string like `--http-header 'Some-Header: some-values; other values`. Received: %v", rawHeaders) + return nil, nil, FailSilently(cmd) + } + + for k := range parts { + parts[k] = strings.TrimSpace(parts[k]) + } + + header.Add(parts[0], parts[1]) + } + + skipVerify, err := cmd.Flags().GetBool(FlagSkipTLSVerify) + if err != nil { + return nil, nil, errors.WithStack(err) + } + + rt := httpx.NewTransportWithHeader(header) + rt.RoundTripper = &http.Transport{ + TLSClientConfig: &tls.Config{ + InsecureSkipVerify: skipVerify, //nolint:gosec // This is a false positive + }, + } + hc.Transport = rt + return hc, u, nil +} + +// RegisterHTTPClientFlags registers HTTP client configuration flags. +func RegisterHTTPClientFlags(flags *pflag.FlagSet) { + flags.StringP(FlagEndpoint, FlagEndpoint[:1], "", fmt.Sprintf("The API URL this command should target. Alternatively set using the %s environmental variable.", envKeyEndpoint)) + flags.Bool(FlagSkipTLSVerify, false, "Do not verify TLS certificates. Useful when dealing with self-signed certificates. Do not use in production!") + flags.StringSliceP(FlagHeaders, "H", []string{}, "A list of additional HTTP headers to set. HTTP headers is separated by a `: `, for example: `-H 'Authorization: bearer some-token'`.") +} diff --git a/oryx/cmdx/noise_printer.go b/oryx/cmdx/noise_printer.go new file mode 100644 index 00000000000..fbd46c9d027 --- /dev/null +++ b/oryx/cmdx/noise_printer.go @@ -0,0 +1,137 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package cmdx + +import ( + "fmt" + "io" + + "github.com/spf13/cobra" + "github.com/spf13/pflag" +) + +type ConditionalPrinter struct { + w io.Writer + print bool +} + +const ( + FlagQuiet = "quiet" +) + +func RegisterNoiseFlags(flags *pflag.FlagSet) { + flags.BoolP(FlagQuiet, FlagQuiet[:1], false, "Be quiet with output printing.") +} + +// NewLoudOutPrinter returns a ConditionalPrinter that +// only prints to cmd.OutOrStdout when --quiet is not set +func NewLoudOutPrinter(cmd *cobra.Command) *ConditionalPrinter { + quiet, err := cmd.Flags().GetBool(FlagQuiet) + if err != nil { + Fatalf(err.Error()) + } + + return &ConditionalPrinter{ + w: cmd.OutOrStdout(), + print: !quiet, + } +} + +// NewQuietOutPrinter returns a ConditionalPrinter that +// only prints to cmd.OutOrStdout when --quiet is set +func NewQuietOutPrinter(cmd *cobra.Command) *ConditionalPrinter { + quiet, err := cmd.Flags().GetBool(FlagQuiet) + if err != nil { + Fatalf(err.Error()) + } + + return &ConditionalPrinter{ + w: cmd.OutOrStdout(), + print: quiet, + } +} + +// NewLoudErrPrinter returns a ConditionalPrinter that +// only prints to cmd.ErrOrStderr when --quiet is not set +func NewLoudErrPrinter(cmd *cobra.Command) *ConditionalPrinter { + quiet, err := cmd.Flags().GetBool(FlagQuiet) + if err != nil { + Fatalf(err.Error()) + } + + return &ConditionalPrinter{ + w: cmd.ErrOrStderr(), + print: !quiet, + } +} + +// NewQuietErrPrinter returns a ConditionalPrinter that +// only prints to cmd.ErrOrStderr when --quiet is set +func NewQuietErrPrinter(cmd *cobra.Command) *ConditionalPrinter { + quiet, err := cmd.Flags().GetBool(FlagQuiet) + if err != nil { + Fatalf(err.Error()) + } + + return &ConditionalPrinter{ + w: cmd.ErrOrStderr(), + print: quiet, + } +} + +// NewLoudPrinter returns a ConditionalPrinter that +// only prints to w when --quiet is not set +func NewLoudPrinter(cmd *cobra.Command, w io.Writer) *ConditionalPrinter { + quiet, err := cmd.Flags().GetBool(FlagQuiet) + if err != nil { + Fatalf(err.Error()) + } + + return &ConditionalPrinter{ + w: w, + print: !quiet, + } +} + +// NewQuietPrinter returns a ConditionalPrinter that +// only prints to w when --quiet is set +func NewQuietPrinter(cmd *cobra.Command, w io.Writer) *ConditionalPrinter { + quiet, err := cmd.Flags().GetBool(FlagQuiet) + if err != nil { + Fatalf(err.Error()) + } + + return &ConditionalPrinter{ + w: w, + print: quiet, + } +} + +func NewConditionalPrinter(w io.Writer, print bool) *ConditionalPrinter { + return &ConditionalPrinter{ + w: w, + print: print, + } +} + +func (p *ConditionalPrinter) Println(a ...interface{}) (n int, err error) { + if p.print { + return fmt.Fprintln(p.w, a...) + } + return +} + +func (p *ConditionalPrinter) Print(a ...interface{}) (n int, err error) { + if p.print { + return fmt.Fprint(p.w, a...) + } + return +} + +func (p *ConditionalPrinter) Printf(format string, a ...interface{}) (n int, err error) { + if p.print { + return fmt.Fprintf(p.w, format, a...) + } + return +} diff --git a/oryx/cmdx/output.go b/oryx/cmdx/output.go new file mode 100644 index 00000000000..b17d46b7849 --- /dev/null +++ b/oryx/cmdx/output.go @@ -0,0 +1,84 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package cmdx + +import "strconv" + +type ( + // OutputIder outputs an ID + OutputIder string + // OutputIderCollection outputs a list of IDs + OutputIderCollection struct { + Items []OutputIder + } +) + +func (OutputIder) Header() []string { + return []string{"ID"} +} + +func (i OutputIder) Columns() []string { + return []string{string(i)} +} + +func (i OutputIder) Interface() interface{} { + return i +} + +func (OutputIderCollection) Header() []string { + return []string{"ID"} +} + +func (c OutputIderCollection) Table() [][]string { + rows := make([][]string, len(c.Items)) + for i, ident := range c.Items { + rows[i] = []string{string(ident)} + } + return rows +} + +func (c OutputIderCollection) Interface() interface{} { + return c.Items +} + +func (c OutputIderCollection) Len() int { + return len(c.Items) +} + +type PaginatedList struct { + Collection interface { + Table + IDs() []string + } `json:"-"` + Items []interface{} `json:"items"` + NextPageToken string `json:"next_page_token"` + IsLastPage bool `json:"is_last_page"` +} + +func (r *PaginatedList) Header() []string { + return r.Collection.Header() +} + +func (r *PaginatedList) Table() [][]string { + return append( + r.Collection.Table(), + []string{}, + []string{"NEXT PAGE TOKEN", r.NextPageToken}, + []string{"IS LAST PAGE", strconv.FormatBool(r.IsLastPage)}, + ) +} + +func (r *PaginatedList) Interface() interface{} { + return r +} + +func (r *PaginatedList) Len() int { + return r.Collection.Len() + 3 +} + +func (r *PaginatedList) IDs() []string { + return r.Collection.IDs() +} + +var _ Table = (*PaginatedList)(nil) diff --git a/oryx/cmdx/pagination.go b/oryx/cmdx/pagination.go new file mode 100644 index 00000000000..b5c721ac727 --- /dev/null +++ b/oryx/cmdx/pagination.go @@ -0,0 +1,57 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package cmdx + +import ( + "fmt" + "strconv" + + "github.com/spf13/cobra" +) + +const ( + FlagPageSize = "page-size" + FlagPageToken = "page-token" +) + +func RegisterTokenPaginationFlags(cmd *cobra.Command) (pageSize int, pageToken string) { + cmd.Flags().StringVar(&pageToken, FlagPageToken, "", "page token acquired from a previous response") + cmd.Flags().IntVar(&pageSize, FlagPageSize, 100, "maximum number of items to return") + return +} + +// ParsePaginationArgs parses pagination arguments from the command line. +func ParsePaginationArgs(cmd *cobra.Command, pageArg, perPageArg string) (page, perPage int64, err error) { + if len(pageArg+perPageArg) > 0 { + page, err = strconv.ParseInt(pageArg, 0, 64) + if err != nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not parse page argument\"%s\": %s", pageArg, err) + return 0, 0, FailSilently(cmd) + } + + perPage, err = strconv.ParseInt(perPageArg, 0, 64) + if err != nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not parse per-page argument\"%s\": %s", perPageArg, err) + return 0, 0, FailSilently(cmd) + } + } + return +} + +// ParseTokenPaginationArgs parses token-based pagination arguments from the command line. +func ParseTokenPaginationArgs(cmd *cobra.Command) (page string, perPage int, err error) { + pageArg, err := cmd.Flags().GetString(FlagPageToken) + if err != nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not parse %s argument \"%s\": %s", FlagPageToken, pageArg, err) + return "", 0, FailSilently(cmd) + } + + perPageArg, err := cmd.Flags().GetInt(FlagPageSize) + if err != nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not parse %s argument \"%d\": %s", FlagPageSize, perPageArg, err) + return "", 0, FailSilently(cmd) + } + + return pageArg, perPageArg, nil +} diff --git a/oryx/cmdx/printing.go b/oryx/cmdx/printing.go new file mode 100644 index 00000000000..59b2965f66b --- /dev/null +++ b/oryx/cmdx/printing.go @@ -0,0 +1,321 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package cmdx + +import ( + "encoding/json" + "errors" + "fmt" + "io" + "os" + "strings" + "text/tabwriter" + + "github.com/go-openapi/jsonpointer" + "github.com/goccy/go-yaml" + "github.com/spf13/cobra" + "github.com/spf13/pflag" + "github.com/tidwall/gjson" +) + +type ( + TableHeader interface { + Header() []string + } + TableRow interface { + TableHeader + Columns() []string + Interface() interface{} + } + Table interface { + TableHeader + Table() [][]string + Interface() interface{} + Len() int + } + Nil struct{} + + format string +) + +const ( + FormatQuiet format = "quiet" + FormatTable format = "table" + FormatJSON format = "json" + FormatJSONPath format = "jsonpath" + FormatJSONPointer format = "jsonpointer" + FormatJSONPretty format = "json-pretty" + FormatYAML format = "yaml" + FormatDefault format = "default" + + FlagFormat = "format" + + None = "" +) + +func (Nil) String() string { + return "null" +} + +func (Nil) Interface() interface{} { + return nil +} + +func PrintErrors(cmd *cobra.Command, errs map[string]error) { + for src, err := range errs { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "%s: %s\n", src, err.Error()) + } +} + +func PrintRow(cmd *cobra.Command, row TableRow) { + f := getFormat(cmd) + + switch f { + case FormatQuiet: + if idAble, ok := row.(interface{ ID() string }); ok { + _, _ = fmt.Fprintln(cmd.OutOrStdout(), idAble.ID()) + break + } + _, _ = fmt.Fprintln(cmd.OutOrStdout(), row.Columns()[0]) + case FormatJSON: + printJSON(cmd.OutOrStdout(), row.Interface(), false, "") + case FormatYAML: + printYAML(cmd.OutOrStdout(), row.Interface()) + case FormatJSONPretty: + printJSON(cmd.OutOrStdout(), row.Interface(), true, "") + case FormatJSONPath: + printJSON(cmd.OutOrStdout(), row.Interface(), true, getPath(cmd)) + case FormatJSONPointer: + printJSON(cmd.OutOrStdout(), filterJSONPointer(cmd, row.Interface()), true, "") + case FormatTable, FormatDefault: + w := tabwriter.NewWriter(cmd.OutOrStdout(), 0, 8, 1, '\t', 0) + + fields := row.Columns() + for i, h := range row.Header() { + _, _ = fmt.Fprintf(w, "%s\t%s\t\n", h, fields[i]) + } + + _ = w.Flush() + } +} + +func filterJSONPointer(cmd *cobra.Command, data any) any { + f, err := cmd.Flags().GetString(FlagFormat) + // unexpected error + Must(err, "flag access error: %s", err) + _, jsonptr, found := strings.Cut(f, "=") + if !found { + _, _ = fmt.Fprintf(os.Stderr, + "Format %s is missing a JSON pointer, e.g., --%s=%s=. The path syntax is described at https://datatracker.ietf.org/doc/html/draft-ietf-appsawg-json-pointer-07.", + f, FlagFormat, f) + os.Exit(1) + } + ptr, err := jsonpointer.New(jsonptr) + Must(err, "invalid JSON pointer: %s", err) + + result, _, err := ptr.Get(data) + Must(err, "failed to apply JSON pointer: %s", err) + + return result +} + +func PrintTable(cmd *cobra.Command, table Table) { + f := getFormat(cmd) + + switch f { + case FormatQuiet: + if table.Len() == 0 { + fmt.Fprintln(cmd.OutOrStdout()) + } + + if idAble, ok := table.(interface{ IDs() []string }); ok { + for _, row := range idAble.IDs() { + fmt.Fprintln(cmd.OutOrStdout(), row) + } + break + } + + for _, row := range table.Table() { + fmt.Fprintln(cmd.OutOrStdout(), row[0]) + } + case FormatJSON: + printJSON(cmd.OutOrStdout(), table.Interface(), false, "") + case FormatJSONPretty: + printJSON(cmd.OutOrStdout(), table.Interface(), true, "") + case FormatJSONPath: + printJSON(cmd.OutOrStdout(), table.Interface(), true, getPath(cmd)) + case FormatJSONPointer: + printJSON(cmd.OutOrStdout(), filterJSONPointer(cmd, table.Interface()), true, "") + case FormatYAML: + printYAML(cmd.OutOrStdout(), table.Interface()) + default: + w := tabwriter.NewWriter(cmd.OutOrStdout(), 0, 8, 1, '\t', 0) + + for _, h := range table.Header() { + fmt.Fprintf(w, "%s\t", h) + } + fmt.Fprintln(w) + + for _, row := range table.Table() { + fmt.Fprintln(w, strings.Join(row, "\t")+"\t") + } + + _ = w.Flush() + } +} + +type interfacer interface{ Interface() interface{} } + +func PrintJSONAble(cmd *cobra.Command, d interface{ String() string }) { + var path string + if d == nil { + d = Nil{} + } + switch getFormat(cmd) { + default: + _, _ = fmt.Fprint(cmd.OutOrStdout(), d.String()) + case FormatJSON: + var v interface{} = d + if i, ok := d.(interfacer); ok { + v = i + } + printJSON(cmd.OutOrStdout(), v, false, "") + case FormatJSONPath: + path = getPath(cmd) + fallthrough + case FormatJSONPretty: + var v interface{} = d + if i, ok := d.(interfacer); ok { + v = i + } + printJSON(cmd.OutOrStdout(), v, true, path) + case FormatJSONPointer: + var v interface{} = d + if i, ok := d.(interfacer); ok { + v = i + } + printJSON(cmd.OutOrStdout(), filterJSONPointer(cmd, v), true, "") + case FormatYAML: + var v interface{} = d + if i, ok := d.(interfacer); ok { + v = i + } + printYAML(cmd.OutOrStdout(), v) + } +} + +func getQuiet(cmd *cobra.Command) bool { + // ignore the error here as we use this function also when the flag might not be registered + q, _ := cmd.Flags().GetBool(FlagQuiet) + return q +} + +func getFormat(cmd *cobra.Command) format { + if getQuiet(cmd) { + return FormatQuiet + } + + // ignore the error here as we use this function also when the flag might not be registered + f, _ := cmd.Flags().GetString(FlagFormat) + + switch { + case f == string(FormatTable): + return FormatTable + case f == string(FormatJSON): + return FormatJSON + case strings.HasPrefix(f, string(FormatJSONPath)): + return FormatJSONPath + case strings.HasPrefix(f, string(FormatJSONPointer)): + return FormatJSONPointer + case f == string(FormatJSONPretty): + return FormatJSONPretty + case f == string(FormatYAML): + return FormatYAML + default: + return FormatDefault + } +} + +func getPath(cmd *cobra.Command) string { + f, err := cmd.Flags().GetString(FlagFormat) + // unexpected error + Must(err, "flag access error: %s", err) + _, path, found := strings.Cut(f, "=") + if !found { + _, _ = fmt.Fprintf(os.Stderr, + "Format %s is missing a path, e.g., --%s=%s=. The path syntax is described at https://github.com/tidwall/gjson/blob/master/SYNTAX.md", + f, FlagFormat, f) + os.Exit(1) + } + + return path +} + +func printJSON(w io.Writer, v interface{}, pretty bool, path string) { + if path != "" { + temp, err := json.Marshal(v) + Must(err, "Error encoding JSON: %s", err) + v = gjson.GetBytes(temp, path).Value() + } + + e := json.NewEncoder(w) + if pretty { + e.SetIndent("", " ") + } + err := e.Encode(v) + // unexpected error + Must(err, "Error encoding JSON: %s", err) +} + +func printYAML(w io.Writer, v interface{}) { + j, err := json.Marshal(v) + Must(err, "Error encoding JSON: %s", err) + e, err := yaml.JSONToYAML(j) + Must(err, "Error encoding YAML: %s", err) + _, _ = w.Write(e) +} + +func RegisterJSONFormatFlags(flags *pflag.FlagSet) { + flags.String(FlagFormat, string(FormatDefault), fmt.Sprintf("Set the output format. One of %s, %s, %s, %s, %s and %s.", FormatDefault, FormatJSON, FormatYAML, FormatJSONPretty, FormatJSONPath, FormatJSONPointer)) +} + +func RegisterFormatFlags(flags *pflag.FlagSet) { + RegisterNoiseFlags(flags) + flags.String(FlagFormat, string(FormatDefault), fmt.Sprintf("Set the output format. One of %s, %s, %s, %s, %s and %s.", FormatTable, FormatJSON, FormatYAML, FormatJSONPretty, FormatJSONPath, FormatJSONPointer)) +} + +func PrintOpenAPIError(cmd *cobra.Command, err error) error { + if err == nil { + return nil + } + + var be interface { + Body() []byte + } + if !errors.As(err, &be) { + return err + } + + body := be.Body() + didPrettyPrint := false + if message := gjson.GetBytes(body, "error.message"); message.Exists() { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "%s\n", message.String()) + didPrettyPrint = true + } + if reason := gjson.GetBytes(body, "error.reason"); reason.Exists() { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "%s\n", reason.String()) + didPrettyPrint = true + } + + if didPrettyPrint { + return FailSilently(cmd) + } + + if body, err := json.MarshalIndent(json.RawMessage(body), "", " "); err == nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "%s\nFailed to execute API request, see error above.\n", body) + return FailSilently(cmd) + } + + return err +} diff --git a/oryx/cmdx/usage.go b/oryx/cmdx/usage.go new file mode 100644 index 00000000000..ccbc243e62e --- /dev/null +++ b/oryx/cmdx/usage.go @@ -0,0 +1,110 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package cmdx + +import ( + "bytes" + "text/template" + + "github.com/Masterminds/sprig/v3" + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +var usageTemplateFuncs = sprig.TxtFuncMap() + +// AddUsageTemplateFunc adds a template function to the usage template. +func AddUsageTemplateFunc(name string, f interface{}) { + usageTemplateFuncs[name] = f +} + +const ( + helpTemplate = `{{insertTemplate . (or .Long .Short) | trimTrailingWhitespaces}} + +{{if or .Runnable .HasSubCommands}}{{.UsageString}}{{end}}` + usageTemplate = `Usage:{{if .Runnable}} + {{.UseLine}}{{end}}{{if .HasAvailableSubCommands}} + {{.CommandPath}} [command]{{end}}{{if gt (len .Aliases) 0}} + +Aliases: + {{.NameAndAliases}}{{end}}{{if .HasExample}} + +Examples: +{{insertTemplate . .Example}}{{end}}{{if .HasAvailableSubCommands}} + +Available Commands:{{range .Commands}}{{if (or .IsAvailableCommand (eq .Name "help"))}} + {{rpad .Name .NamePadding }} {{.Short}}{{end}}{{end}}{{end}}{{if .HasAvailableLocalFlags}} + +Flags: +{{.LocalFlags.FlagUsages | trimTrailingWhitespaces}}{{end}}{{if .HasAvailableInheritedFlags}} + +Global Flags: +{{.InheritedFlags.FlagUsages | trimTrailingWhitespaces}}{{end}}{{if .HasHelpSubCommands}} + +Additional help topics:{{range .Commands}}{{if .IsAdditionalHelpTopicCommand}} + {{rpad .CommandPath .CommandPathPadding}} {{.Short}}{{end}}{{end}}{{end}}{{if .HasAvailableSubCommands}} + +Use "{{.CommandPath}} [command] --help" for more information about a command.{{end}} +` +) + +// EnableUsageTemplating enables gotemplates for usage strings, i.e. cmd.Short, cmd.Long, and cmd.Example. +// The data for the template is the command itself. Especially useful are `.Root.Name` and `.CommandPath`. +// This will be inherited by all subcommands, so enabling it on the root command is sufficient. +func EnableUsageTemplating(cmds ...*cobra.Command) { + cobra.AddTemplateFunc("insertTemplate", TemplateCommandField) + for _, cmd := range cmds { + cmd.SetHelpTemplate(helpTemplate) + cmd.SetUsageTemplate(usageTemplate) + } +} + +func TemplateCommandField(cmd *cobra.Command, field string) (string, error) { + t := template.New("") + t.Funcs(usageTemplateFuncs) + t, err := t.Parse(field) + if err != nil { + return "", err + } + var out bytes.Buffer + if err := t.Execute(&out, cmd); err != nil { + return "", err + } + return out.String(), nil +} + +// DisableUsageTemplating resets the commands usage template to the default. +// This can be used to undo the effects of EnableUsageTemplating, specifically for a subcommand. +func DisableUsageTemplating(cmds ...*cobra.Command) { + defaultCmd := new(cobra.Command) + for _, cmd := range cmds { + cmd.SetHelpTemplate(defaultCmd.HelpTemplate()) + cmd.SetUsageTemplate(defaultCmd.UsageTemplate()) + } +} + +// AssertUsageTemplates asserts that the usage string of the commands are properly templated. +func AssertUsageTemplates(t require.TestingT, cmd *cobra.Command) { + var usage, help string + require.NotPanics(t, func() { + usage = cmd.UsageString() + + out, err := cmd.OutOrStdout(), cmd.ErrOrStderr() + bb := new(bytes.Buffer) + + cmd.SetOut(bb) + cmd.SetErr(bb) + require.NoError(t, cmd.Help()) + help = bb.String() + + cmd.SetOut(out) + cmd.SetErr(err) + }) + assert.NotContains(t, usage, "{{") + assert.NotContains(t, help, "{{") + for _, child := range cmd.Commands() { + AssertUsageTemplates(t, child) + } +} diff --git a/oryx/cmdx/user_input.go b/oryx/cmdx/user_input.go new file mode 100644 index 00000000000..1659d298431 --- /dev/null +++ b/oryx/cmdx/user_input.go @@ -0,0 +1,57 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package cmdx + +import ( + "bufio" + "fmt" + "io" + "os" + "strings" + + "github.com/pkg/errors" +) + +// asks for confirmation with the question string s and reads the answer +// pass nil to use os.Stdin and os.Stdout +func AskForConfirmation(s string, stdin io.Reader, stdout io.Writer) bool { + if stdin == nil { + stdin = os.Stdin + } + if stdout == nil { + stdout = os.Stdout + } + + ok, err := AskScannerForConfirmation(s, bufio.NewReader(stdin), stdout) + if err != nil { + Must(err, "Unable to confirm: %s", err) + } + + return ok +} + +func AskScannerForConfirmation(s string, reader *bufio.Reader, stdout io.Writer) (bool, error) { + if stdout == nil { + stdout = os.Stdout + } + + for { + _, err := fmt.Fprintf(stdout, "%s [y/n]: ", s) + if err != nil { + return false, errors.Wrap(err, "unable to print to stdout") + } + + response, err := reader.ReadString('\n') + if err != nil { + return false, errors.Wrap(err, "unable to read from stdin") + } + + response = strings.ToLower(strings.TrimSpace(response)) + if response == "y" || response == "yes" { + return true, nil + } else if response == "n" || response == "no" { + return false, nil + } + } +} diff --git a/oryx/cmdx/version.go b/oryx/cmdx/version.go new file mode 100644 index 00000000000..88641618367 --- /dev/null +++ b/oryx/cmdx/version.go @@ -0,0 +1,38 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package cmdx + +import ( + "fmt" + "os" + + "github.com/spf13/cobra" +) + +// Version returns a *cobra.Command that handles the `version` command. +func Version(gitTag, gitHash, buildTime *string) *cobra.Command { + return &cobra.Command{ + Use: "version", + Short: "Show the build version, build time, and git hash", + Run: func(cmd *cobra.Command, args []string) { + if len(*gitTag) == 0 { + fmt.Fprintln(os.Stderr, "Unable to determine version because the build process did not properly configure it.") + } else { + fmt.Printf("Version: %s\n", *gitTag) + } + + if len(*gitHash) == 0 { + fmt.Fprintln(os.Stderr, "Unable to determine build commit because the build process did not properly configure it.") + } else { + fmt.Printf("Build Commit: %s\n", *gitHash) + } + + if len(*buildTime) == 0 { + fmt.Fprintln(os.Stderr, "Unable to determine build timestamp because the build process did not properly configure it.") + } else { + fmt.Printf("Build Timestamp: %s\n", *buildTime) + } + }, + } +} diff --git a/oryx/configx/.snapshots/TestKoanfSchemaDefaults.json b/oryx/configx/.snapshots/TestKoanfSchemaDefaults.json new file mode 100644 index 00000000000..0967ef424bc --- /dev/null +++ b/oryx/configx/.snapshots/TestKoanfSchemaDefaults.json @@ -0,0 +1 @@ +{} diff --git a/oryx/configx/context.go b/oryx/configx/context.go new file mode 100644 index 00000000000..a465d363a47 --- /dev/null +++ b/oryx/configx/context.go @@ -0,0 +1,22 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package configx + +import "context" + +type contextKey int + +const configContextKey contextKey = iota + 1 + +func ContextWithConfigOptions(ctx context.Context, opts ...OptionModifier) context.Context { + return context.WithValue(ctx, configContextKey, opts) +} + +func ConfigOptionsFromContext(ctx context.Context) []OptionModifier { + opts, ok := ctx.Value(configContextKey).([]OptionModifier) + if !ok { + return []OptionModifier{} + } + return opts +} diff --git a/oryx/configx/cors.go b/oryx/configx/cors.go new file mode 100644 index 00000000000..97fc85cf8d4 --- /dev/null +++ b/oryx/configx/cors.go @@ -0,0 +1,30 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package configx + +import ( + _ "embed" + + "github.com/rs/cors" +) + +const CORSConfigSchemaID = "ory://cors-config" + +//go:embed cors.schema.json +var CORSConfigSchema []byte + +func (p *Provider) CORS(prefix string, defaults cors.Options) (cors.Options, bool) { + prefix = cleanPrefix(prefix) + + return cors.Options{ + AllowedOrigins: p.StringsF(prefix+"cors.allowed_origins", defaults.AllowedOrigins), + AllowedMethods: p.StringsF(prefix+"cors.allowed_methods", defaults.AllowedMethods), + AllowedHeaders: p.StringsF(prefix+"cors.allowed_headers", defaults.AllowedHeaders), + ExposedHeaders: p.StringsF(prefix+"cors.exposed_headers", defaults.ExposedHeaders), + AllowCredentials: p.BoolF(prefix+"cors.allow_credentials", defaults.AllowCredentials), + OptionsPassthrough: p.BoolF(prefix+"cors.options_passthrough", defaults.OptionsPassthrough), + MaxAge: p.IntF(prefix+"cors.max_age", defaults.MaxAge), + Debug: p.BoolF(prefix+"cors.debug", defaults.Debug), + }, p.Bool(prefix + "cors.enabled") +} diff --git a/oryx/configx/cors.schema.json b/oryx/configx/cors.schema.json new file mode 100644 index 00000000000..e65559ac965 --- /dev/null +++ b/oryx/configx/cors.schema.json @@ -0,0 +1,106 @@ +{ + "$id": "https://github.com/ory/x/configx/cors.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "title": "CORS", + "description": "Configures Cross Origin Resource Sharing for this endpoint.", + "properties": { + "enabled": { + "type": "boolean", + "default": false + }, + "allowed_origins": { + "type": "array", + "description": "A list of origins a cross-domain request can be executed from. If the special * value is present in the list, all origins will be allowed. An origin may contain a wildcard (*) to replace 0 or more characters (i.e.: https://*.example.com). Only one wildcard can be used per origin.", + "items": { + "type": "string", + "minLength": 1, + "not": { + "type": "string", + "description": "matches all strings that contain two or more (*)", + "pattern": ".*\\*.*\\*.*" + }, + "anyOf": [ + { + "type": "string", + "format": "uri" + }, + { + "const": "*" + } + ] + }, + "uniqueItems": true, + "examples": [ + [ + "https://example.com", + "https://*.example.com", + "https://*.foo.example.com" + ] + ] + }, + "allowed_methods": { + "type": "array", + "description": "A list of HTTP methods the user agent is allowed to use with cross-domain requests.", + "items": { + "type": "string", + "enum": [ + "POST", + "GET", + "PUT", + "PATCH", + "DELETE", + "CONNECT", + "HEAD", + "OPTIONS", + "TRACE" + ] + } + }, + "allowed_headers": { + "type": "array", + "description": "A list of non-simple headers the client is allowed to use with cross-domain requests.", + "examples": [ + [ + "Authorization", + "Content-Type", + "Max-Age", + "X-Session-Token", + "X-XSRF-TOKEN", + "X-CSRF-TOKEN" + ] + ], + "items": { + "type": "string" + } + }, + "exposed_headers": { + "type": "array", + "description": "Sets which headers are safe to expose to the API of a CORS API specification.", + "items": { + "type": "string" + } + }, + "allow_credentials": { + "type": "boolean", + "description": "Sets whether the request can include user credentials like cookies, HTTP authentication or client side SSL certificates.", + "default": true + }, + "options_passthrough": { + "type": "boolean", + "description": "TODO", + "default": false + }, + "max_age": { + "type": "integer", + "description": "Sets how long (in seconds) the results of a preflight request can be cached. If set to 0, every request is preceded by a preflight request.", + "default": 0, + "minimum": 0 + }, + "debug": { + "type": "boolean", + "description": "Adds additional log output to debug CORS issues.", + "default": false + } + } +} diff --git a/oryx/configx/error.go b/oryx/configx/error.go new file mode 100644 index 00000000000..d705092c5e7 --- /dev/null +++ b/oryx/configx/error.go @@ -0,0 +1,30 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package configx + +import ( + "fmt" + + "github.com/pkg/errors" +) + +type ImmutableError struct { + From interface{} + To interface{} + Key string + error +} + +func NewImmutableError(key string, from, to interface{}) error { + return &ImmutableError{ + From: from, + To: to, + Key: key, + error: errors.Errorf("immutable configuration key \"%s\" was changed from \"%v\" to \"%v\"", key, from, to), + } +} + +func (e *ImmutableError) Error() string { + return fmt.Sprintf("immutable configuration key \"%s\" was changed from \"%v\" to \"%v\"", e.Key, e.From, e.To) +} diff --git a/oryx/configx/helpers.go b/oryx/configx/helpers.go new file mode 100644 index 00000000000..8b9fc80fd4f --- /dev/null +++ b/oryx/configx/helpers.go @@ -0,0 +1,51 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package configx + +import ( + "bytes" + "fmt" + "io" + "strings" + + "github.com/spf13/pflag" +) + +// RegisterFlags registers the config file flag. +func RegisterFlags(flags *pflag.FlagSet) { + flags.StringSliceP("config", "c", []string{}, "Path to one or more .json, .yaml, .yml, .toml config files. Values are loaded in the order provided, meaning that the last config file overwrites values from the previous config file.") +} + +// host = unix:/path/to/socket => port is discarded, otherwise format as host:port +func GetAddress(host string, port int) string { + if strings.HasPrefix(host, "unix:") { + return host + } + return fmt.Sprintf("%s:%d", host, port) +} + +func (s *Serve) GetAddress() string { + return GetAddress(s.Host, s.Port) +} + +// AddSchemaResources adds the config schema partials to the compiler. +// The interface is specified instead of `jsonschema.Compiler` to allow the use of any jsonschema library fork or version. +func AddSchemaResources(c interface { + AddResource(url string, r io.Reader) error +}) error { + if err := c.AddResource(TLSConfigSchemaID, bytes.NewReader(TLSConfigSchema)); err != nil { + return err + } + if err := c.AddResource(ServeConfigSchemaID, bytes.NewReader(ServeConfigSchema)); err != nil { + return err + } + return c.AddResource(CORSConfigSchemaID, bytes.NewReader(CORSConfigSchema)) +} + +func cleanPrefix(prefix string) string { + if len(prefix) > 0 { + prefix = strings.TrimRight(prefix, ".") + "." + } + return prefix +} diff --git a/oryx/configx/koanf_confmap.go b/oryx/configx/koanf_confmap.go new file mode 100644 index 00000000000..48245988fe2 --- /dev/null +++ b/oryx/configx/koanf_confmap.go @@ -0,0 +1,69 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package configx + +import ( + "bytes" + "encoding/json" + "errors" + + "github.com/knadh/koanf/maps" + "github.com/tidwall/gjson" +) + +// KoanfConfmap implements a raw map[string]interface{} provider. +type KoanfConfmap struct { + tuples []tuple +} + +// Provider returns a confmap Provider that takes a flat or nested +// map[string]interface{}. If a delim is provided, it indicates that the +// keys are flat and the map needs to be unflatted by delim. +func NewKoanfConfmap(tuples []tuple) *KoanfConfmap { + return &KoanfConfmap{tuples: jsonify(tuples)} +} + +func jsonify(tuples []tuple) []tuple { + for k, t := range tuples { + var parsed interface{} + switch vt := t.Value.(type) { + case string: + if gjson.Valid(vt) && json.NewDecoder(bytes.NewBufferString(vt)).Decode(&parsed) == nil { + tuples[k].Value = parsed + } + continue + case []byte: + if gjson.ValidBytes(vt) && json.NewDecoder(bytes.NewBuffer(vt)).Decode(&parsed) == nil { + tuples[k].Value = parsed + } + continue + case json.RawMessage: + if gjson.ValidBytes(vt) && json.NewDecoder(bytes.NewBuffer(vt)).Decode(&parsed) == nil { + tuples[k].Value = parsed + } + continue + } + } + return tuples +} + +// ReadBytes is not supported by the env provider. +func (e *KoanfConfmap) ReadBytes() ([]byte, error) { + return nil, errors.New("confmap provider does not support this method") +} + +// Read returns the loaded map[string]interface{}. +func (e *KoanfConfmap) Read() (map[string]interface{}, error) { + values := map[string]interface{}{} + for _, t := range e.tuples { + values[t.Key] = t.Value + } + + // Ensure any nested values are properly converted as well + cp := maps.Copy(values) + maps.IntfaceKeysToStrings(cp) + cp = maps.Unflatten(cp, Delimiter) + + return cp, nil +} diff --git a/oryx/configx/koanf_env.go b/oryx/configx/koanf_env.go new file mode 100644 index 00000000000..0e97725a71a --- /dev/null +++ b/oryx/configx/koanf_env.go @@ -0,0 +1,185 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package configx + +import ( + "encoding/json" + "os" + "regexp" + "strings" + + "github.com/pkg/errors" + "github.com/tidwall/sjson" + + "github.com/ory/jsonschema/v3" + + "github.com/spf13/cast" + "github.com/tidwall/gjson" + + "github.com/ory/x/castx" + "github.com/ory/x/jsonschemax" +) + +var isNumRegex = regexp.MustCompile("^[0-9]+$") + +func NewKoanfEnv(prefix string, rawSchema []byte, schema *jsonschema.Schema) (*Env, error) { + paths, err := getSchemaPaths(rawSchema, schema) + if err != nil { + return nil, err + } + + return &Env{ + paths: paths, + prefix: prefix, + }, nil +} + +// Env implements an environment variables provider. +type Env struct { + prefix string + paths []jsonschemax.Path +} + +// ReadBytes is not supported by the env provider. +func (e *Env) ReadBytes() ([]byte, error) { + return nil, errors.New("env provider does not support this method") +} + +// Read reads all available environment variables into a key:value map +// and returns it. +func (e *Env) Read() (map[string]interface{}, error) { + // Collect the environment variable keys. + var keys []string + for _, k := range os.Environ() { + if e.prefix != "" { + if strings.HasPrefix(k, e.prefix) { + keys = append(keys, k) + } + } else { + keys = append(keys, k) + } + } + + raw := "{}" + var err error + for _, k := range keys { + parts := strings.SplitN(k, "=", 2) + + key, value := e.extract(parts[0], parts[1]) + // If the callback blanked the key, it should be omitted + if key == "" { + continue + } + + raw, err = sjson.Set(raw, key, value) + if err != nil { + return nil, errors.WithStack(err) + } + } + + var m map[string]interface{} + if err := json.Unmarshal([]byte(raw), &m); err != nil { + return nil, errors.WithStack(err) + } + + return m, nil +} + +// Watch is not supported. +func (e *Env) Watch(cb func(event interface{}, err error)) error { + return errors.New("env provider does not support this method") +} + +func (e *Env) extract(key string, value string) (string, interface{}) { + key = strings.Replace(strings.ToLower(strings.TrimPrefix(key, e.prefix)), "_", ".", -1) + + for _, path := range e.paths { + normalized := strings.Replace(path.Name, "_", ".", -1) + name := path.Name + + // Crazy hack to get arrays working. + var indices []string + searchParts := strings.Split(normalized, ".") + keyParts := strings.Split(key, ".") + if len(searchParts) == len(keyParts) { + for k, search := range searchParts { + if search != keyParts[k] { + indices = nil + } + + if search != "#" { + continue + } + + if !isNumRegex.MatchString(keyParts[k]) { + continue + } + + searchParts[k] = keyParts[k] + indices = append(indices, keyParts[k]) + } + } + + if len(indices) > 0 { + normalized = strings.Join(searchParts, ".") + for _, index := range indices { + name = strings.Replace(name, "#", index, 1) + } + } + + if normalized == key { + switch path.TypeHint { + case jsonschemax.String: + return name, cast.ToString(value) + case jsonschemax.Float: + return name, cast.ToFloat64(value) + case jsonschemax.Int: + return name, cast.ToInt64(value) + case jsonschemax.Bool: + return name, cast.ToBool(value) + case jsonschemax.Nil: + return name, nil + case jsonschemax.BoolSlice: + if !gjson.Valid(value) { + return name, cast.ToBoolSlice(value) + } + fallthrough + case jsonschemax.StringSlice: + if !gjson.Valid(value) { + return name, castx.ToStringSlice(value) + } + fallthrough + case jsonschemax.IntSlice: + if !gjson.Valid(value) { + return name, cast.ToIntSlice(value) + } + fallthrough + case jsonschemax.FloatSlice: + if !gjson.Valid(value) { + return name, castx.ToFloatSlice(value) + } + fallthrough + case jsonschemax.JSON: + return name, decode(value) + default: + return name, value + } + } + } + + return "", nil +} + +func decode(value string) (v interface{}) { + b := []byte(value) + var arr []interface{} + if err := json.Unmarshal(b, &arr); err == nil { + return &arr + } + h := map[string]interface{}{} + if err := json.Unmarshal(b, &h); err == nil { + return &h + } + return nil +} diff --git a/oryx/configx/koanf_file.go b/oryx/configx/koanf_file.go new file mode 100644 index 00000000000..a862df8fb1b --- /dev/null +++ b/oryx/configx/koanf_file.go @@ -0,0 +1,90 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package configx + +import ( + "context" + "os" + "path/filepath" + "strings" + + "github.com/knadh/koanf/parsers/json" + "github.com/knadh/koanf/parsers/toml" + "github.com/knadh/koanf/parsers/yaml" + "github.com/knadh/koanf/v2" + + "github.com/pkg/errors" + + "github.com/ory/x/watcherx" +) + +// KoanfFile implements a KoanfFile provider. +type KoanfFile struct { + subKey string + path string + parser koanf.Parser +} + +// NewKoanfFile returns a file provider. +func NewKoanfFile(path string) (*KoanfFile, error) { + return NewKoanfFileSubKey(path, "") +} + +func NewKoanfFileSubKey(path, subKey string) (*KoanfFile, error) { + kf := &KoanfFile{ + path: filepath.Clean(path), + subKey: subKey, + } + + switch e := filepath.Ext(path); e { + case ".toml": + kf.parser = toml.Parser() + case ".json": + kf.parser = json.Parser() + case ".yaml", ".yml": + kf.parser = yaml.Parser() + default: + return nil, errors.Errorf("unknown config file extension: %s", e) + } + + return kf, nil +} + +// ReadBytes is not supported by KoanfFile. +func (f *KoanfFile) ReadBytes() ([]byte, error) { + return nil, errors.New("file provider does not support this method") +} + +// Read reads the file and returns the parsed configuration. +func (f *KoanfFile) Read() (map[string]interface{}, error) { + //#nosec G304 -- false positive + fc, err := os.ReadFile(f.path) + if err != nil { + return nil, errors.WithStack(err) + } + + v, err := f.parser.Unmarshal(fc) + if err != nil { + return nil, errors.WithStack(err) + } + + if f.subKey == "" { + return v, nil + } + + path := strings.Split(f.subKey, Delimiter) + for i := range path { + v = map[string]interface{}{ + path[len(path)-1-i]: v, + } + } + + return v, nil +} + +// WatchChannel watches the file and triggers a callback when it changes. It is a +// blocking function that internally spawns a goroutine to watch for changes. +func (f *KoanfFile) WatchChannel(ctx context.Context, c watcherx.EventChannel) (watcherx.Watcher, error) { + return watcherx.WatchFile(ctx, f.path, c) +} diff --git a/oryx/configx/koanf_full_merge.go b/oryx/configx/koanf_full_merge.go new file mode 100644 index 00000000000..dc25868d37c --- /dev/null +++ b/oryx/configx/koanf_full_merge.go @@ -0,0 +1,35 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package configx + +import ( + "encoding/json" + + "github.com/pkg/errors" + "github.com/tidwall/sjson" + + "github.com/ory/x/jsonx" +) + +func MergeAllTypes(src, dst map[string]interface{}) error { + rawSrc, err := json.Marshal(src) + if err != nil { + return errors.WithStack(err) + } + + dstSrc, err := json.Marshal(dst) + if err != nil { + return errors.WithStack(err) + } + + keys := jsonx.Flatten(rawSrc) + for key, value := range keys { + dstSrc, err = sjson.SetBytes(dstSrc, key, value) + if err != nil { + return errors.WithStack(err) + } + } + + return errors.WithStack(json.Unmarshal(dstSrc, &dst)) +} diff --git a/oryx/configx/koanf_memory.go b/oryx/configx/koanf_memory.go new file mode 100644 index 00000000000..32893e6a042 --- /dev/null +++ b/oryx/configx/koanf_memory.go @@ -0,0 +1,51 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package configx + +import ( + "context" + + "github.com/knadh/koanf/parsers/json" + "github.com/knadh/koanf/v2" + + "github.com/pkg/errors" + + stdjson "encoding/json" +) + +// KoanfMemory implements a KoanfMemory provider. +type KoanfMemory struct { + doc stdjson.RawMessage + + ctx context.Context + parser koanf.Parser +} + +// NewKoanfMemory returns a file provider. +func NewKoanfMemory(ctx context.Context, doc stdjson.RawMessage) *KoanfMemory { + return &KoanfMemory{ + ctx: ctx, + doc: doc, + parser: json.Parser(), + } +} + +func (f *KoanfMemory) SetDoc(doc stdjson.RawMessage) { + f.doc = doc +} + +// ReadBytes reads the contents of a file on disk and returns the bytes. +func (f *KoanfMemory) ReadBytes() ([]byte, error) { + return nil, errors.New("file provider does not support this method") +} + +// Read is not supported by the file provider. +func (f *KoanfMemory) Read() (map[string]interface{}, error) { + v, err := f.parser.Unmarshal(f.doc) + if err != nil { + return nil, errors.WithStack(err) + } + + return v, nil +} diff --git a/oryx/configx/koanf_schema_defaults.go b/oryx/configx/koanf_schema_defaults.go new file mode 100644 index 00000000000..9659606c2a7 --- /dev/null +++ b/oryx/configx/koanf_schema_defaults.go @@ -0,0 +1,47 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package configx + +import ( + "strings" + + "github.com/knadh/koanf/maps" + "github.com/pkg/errors" + + "github.com/ory/jsonschema/v3" + "github.com/ory/x/jsonschemax" +) + +type KoanfSchemaDefaults struct { + keys []jsonschemax.Path +} + +func NewKoanfSchemaDefaults(rawSchema []byte, schema *jsonschema.Schema) (*KoanfSchemaDefaults, error) { + keys, err := getSchemaPaths(rawSchema, schema) + if err != nil { + return nil, err + } + + return &KoanfSchemaDefaults{keys: keys}, nil +} + +func (k *KoanfSchemaDefaults) ReadBytes() ([]byte, error) { + return nil, errors.New("schema defaults provider does not support this method") +} + +func (k *KoanfSchemaDefaults) Read() (map[string]interface{}, error) { + values := map[string]interface{}{} + for _, key := range k.keys { + // It's an array! + if strings.Contains(key.Name, "#") { + continue + } + + if key.Default != nil { + values[key.Name] = key.Default + } + } + + return maps.Unflatten(values, "."), nil +} diff --git a/oryx/configx/options.go b/oryx/configx/options.go new file mode 100644 index 00000000000..6a51797f0de --- /dev/null +++ b/oryx/configx/options.go @@ -0,0 +1,159 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package configx + +import ( + "context" + "errors" + "fmt" + "io" + "os" + + "github.com/spf13/pflag" + + "github.com/ory/jsonschema/v3" + "github.com/ory/x/logrusx" + + "github.com/knadh/koanf/v2" + + "github.com/ory/x/watcherx" +) + +type ( + OptionModifier func(p *Provider) +) + +func WithContext(ctx context.Context) OptionModifier { + return func(p *Provider) { + for _, o := range ConfigOptionsFromContext(ctx) { + o(p) + } + } +} + +func WithConfigFiles(files ...string) OptionModifier { + return func(p *Provider) { + p.files = append(p.files, files...) + } +} + +func WithImmutables(immutables ...string) OptionModifier { + return func(p *Provider) { + p.immutables = append(p.immutables, immutables...) + } +} + +func WithExceptImmutables(exceptImmutables ...string) OptionModifier { + return func(p *Provider) { + p.exceptImmutables = append(p.exceptImmutables, exceptImmutables...) + } +} + +func WithFlags(flags *pflag.FlagSet) OptionModifier { + return func(p *Provider) { + p.flags = flags + } +} + +func WithLogger(l *logrusx.Logger) OptionModifier { + return func(p *Provider) { + p.logger = l + } +} + +func SkipValidation() OptionModifier { + return func(p *Provider) { + p.skipValidation = true + } +} + +func DisableEnvLoading() OptionModifier { + return func(p *Provider) { + p.disableEnvLoading = true + } +} + +func WithValue(key string, value interface{}) OptionModifier { + return func(p *Provider) { + p.forcedValues = append(p.forcedValues, tuple{Key: key, Value: value}) + } +} + +func WithValues(values map[string]interface{}) OptionModifier { + return func(p *Provider) { + for key, value := range values { + p.forcedValues = append(p.forcedValues, tuple{Key: key, Value: value}) + } + } +} + +func WithBaseValues(values map[string]interface{}) OptionModifier { + return func(p *Provider) { + for key, value := range values { + p.baseValues = append(p.baseValues, tuple{Key: key, Value: value}) + } + } +} + +func WithUserProviders(providers ...koanf.Provider) OptionModifier { + return func(p *Provider) { + p.userProviders = providers + } +} + +// DEPRECATED without replacement. This option is a no-op. +func OmitKeysFromTracing(keys ...string) OptionModifier { + return func(*Provider) {} +} + +func AttachWatcher(watcher func(event watcherx.Event, err error)) OptionModifier { + return func(p *Provider) { + p.onChanges = append(p.onChanges, watcher) + } +} + +func WithLogrusWatcher(l *logrusx.Logger) OptionModifier { + return AttachWatcher(LogrusWatcher(l)) +} + +func LogrusWatcher(l *logrusx.Logger) func(e watcherx.Event, err error) { + return func(e watcherx.Event, err error) { + l.WithField("file", e.Source()). + WithField("event_type", fmt.Sprintf("%T", e)). + Info("A change to a configuration file was detected.") + + if et := new(jsonschema.ValidationError); errors.As(err, &et) { + l.WithField("event", fmt.Sprintf("%#v", et)). + Errorf("The changed configuration is invalid and could not be loaded. Rolling back to the last working configuration revision. Please address the validation errors before restarting the process.") + } else if et := new(ImmutableError); errors.As(err, &et) { + l.WithError(err). + WithField("key", et.Key). + WithField("old_value", fmt.Sprintf("%v", et.From)). + WithField("new_value", fmt.Sprintf("%v", et.To)). + Errorf("A configuration value marked as immutable has changed. Rolling back to the last working configuration revision. To reload the values please restart the process.") + } else if err != nil { + l.WithError(err).Errorf("An error occurred while watching config file %s", e.Source()) + } else { + l.WithField("file", e.Source()). + WithField("event_type", fmt.Sprintf("%T", e)). + Info("Configuration change processed successfully.") + } + } +} + +func WithStderrValidationReporter() OptionModifier { + return func(p *Provider) { + p.onValidationError = func(k *koanf.Koanf, err error) { + p.printHumanReadableValidationErrors(k, os.Stderr, err) + } + } +} + +func WithStandardValidationReporter(w io.Writer) OptionModifier { + return func(p *Provider) { + p.onValidationError = func(k *koanf.Koanf, err error) { + p.printHumanReadableValidationErrors(k, w, err) + } + } +} diff --git a/oryx/configx/permission.go b/oryx/configx/permission.go new file mode 100644 index 00000000000..51be0a5998e --- /dev/null +++ b/oryx/configx/permission.go @@ -0,0 +1,56 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package configx + +import ( + "os" + "os/user" + "strconv" +) + +type UnixPermission struct { + Owner string + Group string + Mode os.FileMode +} + +func (p *UnixPermission) SetPermission(file string) error { + var e error + e = os.Chmod(file, p.Mode) + if e != nil { + return e + } + + gid := -1 + uid := -1 + + if p.Owner != "" { + var userObj *user.User + userObj, e = user.Lookup(p.Owner) + if e != nil { + return e + } + uid, e = strconv.Atoi(userObj.Uid) + if e != nil { + return e + } + } + if p.Group != "" { + var group *user.Group + group, e := user.LookupGroup(p.Group) + if e != nil { + return e + } + gid, e = strconv.Atoi(group.Gid) + if e != nil { + return e + } + } + + e = os.Chown(file, uid, gid) + if e != nil { + return e + } + return nil +} diff --git a/oryx/configx/pflag.go b/oryx/configx/pflag.go new file mode 100644 index 00000000000..9362a54dcc6 --- /dev/null +++ b/oryx/configx/pflag.go @@ -0,0 +1,57 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package configx + +import ( + "strings" + + "github.com/knadh/koanf/providers/posflag" + "github.com/knadh/koanf/v2" + "github.com/pkg/errors" + "github.com/spf13/pflag" + + "github.com/ory/jsonschema/v3" + "github.com/ory/x/jsonschemax" +) + +type PFlagProvider struct { + p *posflag.Posflag + paths []jsonschemax.Path +} + +func NewPFlagProvider(rawSchema []byte, schema *jsonschema.Schema, f *pflag.FlagSet, k *koanf.Koanf) (*PFlagProvider, error) { + paths, err := getSchemaPaths(rawSchema, schema) + if err != nil { + return nil, err + } + return &PFlagProvider{ + p: posflag.Provider(f, ".", k), + paths: paths, + }, nil +} + +func (p *PFlagProvider) ReadBytes() ([]byte, error) { + return nil, errors.New("pflag provider does not support this method") +} + +func (p *PFlagProvider) Read() (map[string]interface{}, error) { + all, err := p.p.Read() + if err != nil { + return nil, errors.WithStack(err) + } + knownFlags := make(map[string]interface{}, len(all)) + for k, v := range all { + k = strings.ReplaceAll(k, ".", "-") + for _, path := range p.paths { + normalized := strings.ReplaceAll(path.Name, ".", "-") + if k == normalized { + knownFlags[k] = v + break + } + } + } + return knownFlags, nil +} + +var _ koanf.Provider = (*PFlagProvider)(nil) diff --git a/oryx/configx/provider.go b/oryx/configx/provider.go new file mode 100644 index 00000000000..278ecd0fbc0 --- /dev/null +++ b/oryx/configx/provider.go @@ -0,0 +1,549 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package configx + +import ( + "bytes" + "context" + "fmt" + "io" + "net/url" + "os" + "reflect" + "sync" + "time" + + "github.com/inhies/go-bytesize" + "github.com/knadh/koanf/parsers/json" + "github.com/knadh/koanf/providers/posflag" + "github.com/knadh/koanf/v2" + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/spf13/pflag" + + "github.com/ory/jsonschema/v3" + "github.com/ory/x/jsonschemax" + "github.com/ory/x/logrusx" + "github.com/ory/x/otelx" + "github.com/ory/x/watcherx" +) + +type tuple struct { + Key string + Value interface{} +} + +type Provider struct { + l sync.RWMutex + *koanf.Koanf + immutables, exceptImmutables []string + + schema []byte + flags *pflag.FlagSet + validator *jsonschema.Schema + onChanges []func(watcherx.Event, error) + onValidationError func(k *koanf.Koanf, err error) + + forcedValues []tuple + baseValues []tuple + files []string + + skipValidation bool + disableEnvLoading bool + + logger *logrusx.Logger + + providers []koanf.Provider + userProviders []koanf.Provider +} + +const ( + FlagConfig = "config" + Delimiter = "." +) + +// RegisterConfigFlag registers the "--config" flag on pflag.FlagSet. +func RegisterConfigFlag(flags *pflag.FlagSet, fallback []string) { + flags.StringSliceP(FlagConfig, "c", fallback, "Config files to load, overwriting in the order specified.") +} + +// New creates a new provider instance or errors. +// Configuration values are loaded in the following order: +// +// 1. Defaults from the JSON Schema +// 2. Config files (yaml, yml, toml, json) +// 3. Command line flags +// 4. Environment variables +// +// There will also be file-watchers started for all config files. To cancel the +// watchers, cancel the context. +func New(ctx context.Context, schema []byte, modifiers ...OptionModifier) (*Provider, error) { + validator, err := getSchema(ctx, schema) + if err != nil { + return nil, err + } + + l := logrus.New() + l.Out = io.Discard + + p := &Provider{ + schema: schema, + validator: validator, + onValidationError: func(k *koanf.Koanf, err error) {}, + logger: logrusx.New("discarding config logger", "", logrusx.UseLogger(l)), + Koanf: koanf.NewWithConf(koanf.Conf{Delim: Delimiter, StrictMerge: true}), + } + + for _, m := range modifiers { + m(p) + } + + providers, err := p.createProviders(ctx) + if err != nil { + return nil, err + } + + p.providers = providers + + k, err := p.newKoanf() + if err != nil { + return nil, err + } + + p.replaceKoanf(k) + return p, nil +} + +func (p *Provider) SkipValidation() bool { + return p.skipValidation +} + +func (p *Provider) createProviders(ctx context.Context) (providers []koanf.Provider, err error) { + defaultsProvider, err := NewKoanfSchemaDefaults(p.schema, p.validator) + if err != nil { + return nil, err + } + providers = append(providers, defaultsProvider) + + // Workaround for https://github.com/knadh/koanf/pull/47 + for _, t := range p.baseValues { + providers = append(providers, NewKoanfConfmap([]tuple{t})) + } + + paths := p.files + if p.flags != nil { + p, _ := p.flags.GetStringSlice(FlagConfig) + paths = append(paths, p...) + } + + p.logger.WithField("files", paths).Debug("Adding config files.") + + c := make(watcherx.EventChannel) + + defer func() { + if err == nil && len(paths) > 0 { + go p.watchForFileChanges(ctx, c) + } + }() + for _, path := range paths { + fp, err := NewKoanfFile(path) + if err != nil { + return nil, err + } + + if _, err := fp.WatchChannel(ctx, c); err != nil { + return nil, err + } + + providers = append(providers, fp) + } + + providers = append(providers, p.userProviders...) + + if p.flags != nil { + pp, err := NewPFlagProvider(p.schema, p.validator, p.flags, p.Koanf) + if err != nil { + return nil, err + } + providers = append(providers, pp) + } + + if !p.disableEnvLoading { + envProvider, err := NewKoanfEnv("", p.schema, p.validator) + if err != nil { + return nil, err + } + providers = append(providers, envProvider) + } + + // Workaround for https://github.com/knadh/koanf/pull/47 + for _, t := range p.forcedValues { + providers = append(providers, NewKoanfConfmap([]tuple{t})) + } + + return providers, nil +} + +func (p *Provider) replaceKoanf(k *koanf.Koanf) { + p.Koanf = k +} + +func (p *Provider) validate(k *koanf.Koanf) error { + if p.skipValidation { + return nil + } + + out, err := k.Marshal(json.Parser()) + if err != nil { + return errors.WithStack(err) + } + if err := p.validator.Validate(bytes.NewReader(out)); err != nil { + p.onValidationError(k, err) + return err + } + + return nil +} + +// newKoanf creates a new koanf instance with all the updated config +// +// This is unfortunately required due to several limitations / bugs in koanf: +// +// - https://github.com/knadh/koanf/issues/77 +// - https://github.com/knadh/koanf/pull/47 +func (p *Provider) newKoanf() (_ *koanf.Koanf, err error) { + k := koanf.New(Delimiter) + + for _, provider := range p.providers { + // posflag.Posflag requires access to Koanf instance so we recreate the provider here which is a workaround + // for posflag.Provider's API. + if _, ok := provider.(*posflag.Posflag); ok { + provider = posflag.Provider(p.flags, ".", k) + } + + var opts []koanf.Option + if _, ok := provider.(*Env); ok { + opts = append(opts, koanf.WithMergeFunc(MergeAllTypes)) + } + + if err := k.Load(provider, nil, opts...); err != nil { + return nil, err + } + } + + if err := p.validate(k); err != nil { + return nil, err + } + + return k, nil +} + +// SetTracer does nothing. DEPRECATED without replacement. +func (p *Provider) SetTracer(_ context.Context, _ *otelx.Tracer) { +} + +func (p *Provider) runOnChanges(e watcherx.Event, err error) { + for k := range p.onChanges { + p.onChanges[k](e, err) + } +} + +func deleteOtherKeys(k *koanf.Koanf, keys []string) { +outer: + for _, key := range k.Keys() { + for _, ik := range keys { + if key == ik { + continue outer + } + } + k.Delete(key) + } +} + +func (p *Provider) reload(e watcherx.Event) { + p.l.Lock() + + var err error + defer func() { + // we first want to unlock and then runOnChanges, so that the callbacks can actually use the Provider + p.l.Unlock() + p.runOnChanges(e, err) + }() + + nk, err := p.newKoanf() + if err != nil { + return // unlocks & runs changes in defer + } + + oldImmutables, newImmutables := p.Koanf.Copy(), nk.Copy() + deleteOtherKeys(oldImmutables, p.immutables) + deleteOtherKeys(newImmutables, p.immutables) + + for _, key := range p.exceptImmutables { + oldImmutables.Delete(key) + newImmutables.Delete(key) + } + if !reflect.DeepEqual(oldImmutables.Raw(), newImmutables.Raw()) { + for _, key := range p.immutables { + if !reflect.DeepEqual(oldImmutables.Get(key), newImmutables.Get(key)) { + err = NewImmutableError(key, fmt.Sprintf("%v", p.Koanf.Get(key)), fmt.Sprintf("%v", nk.Get(key))) + return // unlocks & runs changes in defer + } + } + } + + p.replaceKoanf(nk) + + // unlocks & runs changes in defer +} + +func (p *Provider) watchForFileChanges(ctx context.Context, c watcherx.EventChannel) { + for { + select { + case <-ctx.Done(): + return + case e := <-c: + switch et := e.(type) { + case *watcherx.ErrorEvent: + p.runOnChanges(e, et) + default: + p.reload(e) + } + } + } +} + +// DirtyPatch patches individual config keys without reloading the full config +// +// WARNING! This method is only useful to override existing keys in string or number +// format. DO NOT use this method to override arrays, maps, or other complex types. +// +// This method DOES NOT validate the config against the config JSON schema. If you +// need to validate the config, use the Set method instead. +// +// This method can not be used to remove keys from the config as that is not +// possible without reloading the full config. +func (p *Provider) DirtyPatch(key string, value any) error { + p.l.Lock() + defer p.l.Unlock() + + t := tuple{Key: key, Value: value} + kc := NewKoanfConfmap([]tuple{t}) + + p.forcedValues = append(p.forcedValues, t) + p.providers = append(p.providers, kc) + + if err := p.Koanf.Load(kc, nil, []koanf.Option{}...); err != nil { + return err + } + + return nil +} + +func (p *Provider) Set(key string, value interface{}) error { + p.l.Lock() + defer p.l.Unlock() + + p.forcedValues = append(p.forcedValues, tuple{Key: key, Value: value}) + p.providers = append(p.providers, NewKoanfConfmap([]tuple{{Key: key, Value: value}})) + + k, err := p.newKoanf() + if err != nil { + return err + } + + p.replaceKoanf(k) + return nil +} + +func (p *Provider) BoolF(key string, fallback bool) bool { + p.l.RLock() + defer p.l.RUnlock() + + if !p.Koanf.Exists(key) { + return fallback + } + + return p.Bool(key) +} + +func (p *Provider) StringF(key string, fallback string) string { + p.l.RLock() + defer p.l.RUnlock() + + if !p.Koanf.Exists(key) { + return fallback + } + + return p.String(key) +} + +func (p *Provider) StringsF(key string, fallback []string) (val []string) { + p.l.RLock() + defer p.l.RUnlock() + + if !p.Koanf.Exists(key) { + return fallback + } + + return p.Strings(key) +} + +func (p *Provider) IntF(key string, fallback int) (val int) { + p.l.RLock() + defer p.l.RUnlock() + + if !p.Koanf.Exists(key) { + return fallback + } + + return p.Int(key) +} + +func (p *Provider) Float64F(key string, fallback float64) (val float64) { + p.l.RLock() + defer p.l.RUnlock() + + if !p.Koanf.Exists(key) { + return fallback + } + + return p.Float64(key) +} + +func (p *Provider) DurationF(key string, fallback time.Duration) (val time.Duration) { + p.l.RLock() + defer p.l.RUnlock() + + if !p.Koanf.Exists(key) { + return fallback + } + + return p.Duration(key) +} + +func (p *Provider) ByteSizeF(key string, fallback bytesize.ByteSize) bytesize.ByteSize { + p.l.RLock() + defer p.l.RUnlock() + + if !p.Koanf.Exists(key) { + return fallback + } + + switch v := p.Koanf.Get(key).(type) { + case string: + // this type usually comes from user input + dec, err := bytesize.Parse(v) + if err != nil { + p.logger.WithField("key", key).WithField("raw_value", v).WithError(err).Warnf("error parsing byte size value, using fallback of %s", fallback) + return fallback + } + return dec + case float64: + // this type comes from json.Unmarshal + return bytesize.ByteSize(v) + case bytesize.ByteSize: + return v + default: + p.logger.WithField("key", key).WithField("raw_type", fmt.Sprintf("%T", v)).WithField("raw_value", fmt.Sprintf("%+v", v)).Errorf("error converting byte size value because of unknown type, using fallback of %s", fallback) + return fallback + } +} + +func (p *Provider) GetF(key string, fallback interface{}) (val interface{}) { + p.l.RLock() + defer p.l.RUnlock() + + if !p.Exists(key) { + return fallback + } + + return p.Get(key) +} + +func (p *Provider) TracingConfig(serviceName string) *otelx.Config { + return &otelx.Config{ + ServiceName: p.StringF("tracing.service_name", serviceName), + DeploymentEnvironment: p.StringF("tracing.deployment_environment", ""), + Provider: p.String("tracing.provider"), + Providers: otelx.ProvidersConfig{ + Jaeger: otelx.JaegerConfig{ + Sampling: otelx.JaegerSampling{ + ServerURL: p.String("tracing.providers.jaeger.sampling.server_url"), + TraceIdRatio: p.Float64F("tracing.providers.jaeger.sampling.trace_id_ratio", 1), + }, + LocalAgentAddress: p.String("tracing.providers.jaeger.local_agent_address"), + }, + Zipkin: otelx.ZipkinConfig{ + ServerURL: p.String("tracing.providers.zipkin.server_url"), + Sampling: otelx.ZipkinSampling{ + SamplingRatio: p.Float64("tracing.providers.zipkin.sampling.sampling_ratio"), + }, + }, + OTLP: otelx.OTLPConfig{ + ServerURL: p.String("tracing.providers.otlp.server_url"), + Insecure: p.Bool("tracing.providers.otlp.insecure"), + Sampling: otelx.OTLPSampling{ + SamplingRatio: p.Float64F("tracing.providers.otlp.sampling.sampling_ratio", 1), + }, + AuthorizationHeader: p.String("tracing.providers.otlp.authorization_header"), + }, + }, + } +} + +func (p *Provider) RequestURIF(path string, fallback *url.URL) *url.URL { + p.l.RLock() + defer p.l.RUnlock() + + switch t := p.Get(path).(type) { + case *url.URL: + return t + case url.URL: + return &t + case string: + if parsed, err := url.ParseRequestURI(t); err == nil { + return parsed + } + } + + return fallback +} + +func (p *Provider) URIF(path string, fallback *url.URL) *url.URL { + p.l.RLock() + defer p.l.RUnlock() + + switch t := p.Get(path).(type) { + case *url.URL: + return t + case url.URL: + return &t + case string: + if parsed, err := url.Parse(t); err == nil { + return parsed + } + } + + return fallback +} + +// PrintHumanReadableValidationErrors prints human readable validation errors. Duh. +func (p *Provider) PrintHumanReadableValidationErrors(w io.Writer, err error) { + p.printHumanReadableValidationErrors(p.Koanf, w, err) +} + +func (p *Provider) printHumanReadableValidationErrors(k *koanf.Koanf, w io.Writer, err error) { + if err == nil { + return + } + + _, _ = fmt.Fprintln(os.Stderr, "") + conf, innerErr := k.Marshal(json.Parser()) + if innerErr != nil { + _, _ = fmt.Fprintf(w, "Unable to unmarshal configuration: %+v", innerErr) + } + + jsonschemax.FormatValidationErrorForCLI(w, conf, err) +} diff --git a/oryx/configx/schema.go b/oryx/configx/schema.go new file mode 100644 index 00000000000..9ee138133b3 --- /dev/null +++ b/oryx/configx/schema.go @@ -0,0 +1,45 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package configx + +import ( + "bytes" + "fmt" + + "github.com/ory/x/logrusx" + "github.com/ory/x/otelx" + + "github.com/gofrs/uuid" + "github.com/pkg/errors" + "github.com/tidwall/gjson" + + "github.com/ory/jsonschema/v3" +) + +func newCompiler(schema []byte) (string, *jsonschema.Compiler, error) { + id := gjson.GetBytes(schema, "$id").String() + if id == "" { + id = fmt.Sprintf("%s.json", uuid.Must(uuid.NewV4()).String()) + } + + compiler := jsonschema.NewCompiler() + if err := compiler.AddResource(id, bytes.NewReader(schema)); err != nil { + return "", nil, errors.WithStack(err) + } + + // DO NOT REMOVE THIS + compiler.ExtractAnnotations = true + + if err := otelx.AddConfigSchema(compiler); err != nil { + return "", nil, err + } + if err := logrusx.AddConfigSchema(compiler); err != nil { + return "", nil, err + } + if err := AddSchemaResources(compiler); err != nil { + return "", nil, err + } + + return id, compiler, nil +} diff --git a/oryx/configx/schema_cache.go b/oryx/configx/schema_cache.go new file mode 100644 index 00000000000..023fb180119 --- /dev/null +++ b/oryx/configx/schema_cache.go @@ -0,0 +1,48 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package configx + +import ( + "context" + "crypto/sha256" + + "github.com/dgraph-io/ristretto/v2" + + "github.com/ory/jsonschema/v3" +) + +var schemaCacheConfig = &ristretto.Config[[]byte, *jsonschema.Schema]{ + // Hold up to 25 schemas in cache. Usually we only need one. + MaxCost: 25, + NumCounters: 250, + BufferItems: 64, + Metrics: false, + IgnoreInternalCost: true, + Cost: func(*jsonschema.Schema) int64 { + return 1 + }, +} + +var schemaCache, _ = ristretto.NewCache(schemaCacheConfig) + +func getSchema(ctx context.Context, schema []byte) (*jsonschema.Schema, error) { + key := sha256.Sum256(schema) + if val, found := schemaCache.Get(key[:]); found { + return val, nil + } + + schemaID, comp, err := newCompiler(schema) + if err != nil { + return nil, err + } + + validator, err := comp.Compile(ctx, schemaID) + if err != nil { + return nil, err + } + + schemaCache.Set(key[:], validator, 1) + schemaCache.Wait() + return validator, nil +} diff --git a/oryx/configx/schema_path_cache.go b/oryx/configx/schema_path_cache.go new file mode 100644 index 00000000000..685fc1c2dd3 --- /dev/null +++ b/oryx/configx/schema_path_cache.go @@ -0,0 +1,41 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package configx + +import ( + "crypto/sha256" + + "github.com/ory/x/jsonschemax" + + "github.com/dgraph-io/ristretto/v2" + + "github.com/ory/jsonschema/v3" +) + +var schemaPathCacheConfig = &ristretto.Config[[]byte, []jsonschemax.Path]{ + // Hold up to 25 schemas in cache. Usually we only need one. + MaxCost: 250, + NumCounters: 2500, + BufferItems: 64, + Metrics: false, + IgnoreInternalCost: true, +} + +var schemaPathCache, _ = ristretto.NewCache[[]byte, []jsonschemax.Path](schemaPathCacheConfig) + +func getSchemaPaths(rawSchema []byte, schema *jsonschema.Schema) ([]jsonschemax.Path, error) { + key := sha256.Sum256(rawSchema) + if val, found := schemaPathCache.Get(key[:]); found { + return val, nil + } + + keys, err := jsonschemax.ListPathsWithInitializedSchemaAndArraysIncluded(schema) + if err != nil { + return nil, err + } + + schemaPathCache.Set(key[:], keys, 1) + schemaPathCache.Wait() + return keys, nil +} diff --git a/oryx/configx/serve.go b/oryx/configx/serve.go new file mode 100644 index 00000000000..6fb29932e0b --- /dev/null +++ b/oryx/configx/serve.go @@ -0,0 +1,137 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package configx + +import ( + "cmp" + "context" + "crypto/tls" + _ "embed" + "fmt" + "net/url" + "os" + + "github.com/ory/x/logrusx" + "github.com/ory/x/tlsx" +) + +const ( + ServeConfigSchemaID = "ory://serve-config" + TLSConfigSchemaID = "ory://tls-config" +) + +//go:embed serve.schema.json +var ServeConfigSchema []byte + +//go:embed tls.schema.json +var TLSConfigSchema []byte + +type ( + Serve struct { + Host, WriteListenFile string + Port int + BaseURL *url.URL + Socket UnixPermission + TLS TLS + RequestLog ServeRequestLog + } + TLS struct { + Enabled bool + AllowTerminationFrom []string + CertBase64, KeyBase64, CertPath, KeyPath string + } + ServeRequestLog struct { + DisableHealth bool + } +) + +func (p *Provider) Serve(prefix string, isDev bool, defaults Serve) *Serve { + prefix = cleanPrefix(prefix) + + defaults.Socket.Mode = cmp.Or(defaults.Socket.Mode, 0o755) + + serve := Serve{ + Host: p.StringF(prefix+"host", defaults.Host), + Port: p.IntF(prefix+"port", defaults.Port), + WriteListenFile: p.StringF(prefix+"write_listen_file", defaults.WriteListenFile), + BaseURL: p.URIF(prefix+"base_url", defaults.BaseURL), + Socket: UnixPermission{ + Owner: p.StringF(prefix+"socket.owner", defaults.Socket.Owner), + Group: p.StringF(prefix+"socket.group", defaults.Socket.Group), + Mode: os.FileMode(p.IntF(prefix+"socket.mode", int(defaults.Socket.Mode))), + }, + TLS: p.TLS(prefix+"tls", defaults.TLS), + RequestLog: ServeRequestLog{ + DisableHealth: p.BoolF(prefix+"requestlog.disable_health", defaults.RequestLog.DisableHealth), + }, + } + + if serve.BaseURL == nil { + serve.BaseURL = &url.URL{ + Scheme: "http", + Path: "/", + } + if !isDev || serve.TLS.Enabled { + serve.BaseURL.Scheme = "https" + } + host := serve.Host + if host == "0.0.0.0" || host == "" { + var err error + host, err = os.Hostname() + if err != nil { + p.logger.WithError(err).Warn("Unable to get hostname from system, falling back to 127.0.0.1.") + host = "127.0.0.1" + } + } + serve.BaseURL.Host = fmt.Sprintf("%s:%d", host, serve.Port) + } + + return &serve +} + +func (p *Provider) TLS(prefix string, defaults TLS) TLS { + prefix = cleanPrefix(prefix) + + return TLS{ + Enabled: p.BoolF(prefix+"enabled", defaults.Enabled), + AllowTerminationFrom: p.StringsF(prefix+"allow_termination_from", defaults.AllowTerminationFrom), + CertBase64: p.StringF(prefix+"cert.base64", defaults.CertBase64), + KeyBase64: p.StringF(prefix+"key.base64", defaults.KeyBase64), + CertPath: p.StringF(prefix+"cert.path", defaults.CertPath), + KeyPath: p.StringF(prefix+"key.path", defaults.KeyPath), + } +} + +func (t *TLS) GetCertFunc(ctx context.Context, l *logrusx.Logger, ifaceName string) (tlsx.CertFunc, error) { + switch { + case t.CertBase64 != "" && t.KeyBase64 != "": + cert, err := tlsx.CertificateFromBase64(t.CertBase64, t.KeyBase64) + if err != nil { + return nil, fmt.Errorf("unable to load TLS certificate for interface %s: %w", ifaceName, err) + } + l.Infof("Setting up HTTPS for %s", ifaceName) + return func(*tls.ClientHelloInfo) (*tls.Certificate, error) { return &cert, nil }, nil + case t.CertPath != "" && t.KeyPath != "": + errs := make(chan error, 1) + getCert, err := tlsx.GetCertificate(ctx, t.CertPath, t.KeyPath, errs) + if err != nil { + return nil, fmt.Errorf("unable to load TLS certificate for interface %s: %w", ifaceName, err) + } + go func() { + for { + select { + case <-ctx.Done(): + return + case err := <-errs: + l.WithError(err).Error("Failed to reload TLS certificates, using previous certificates") + } + } + }() + l.Infof("Setting up HTTPS for %s (automatic certificate reloading active)", ifaceName) + return getCert, nil + default: + l.Infof("TLS has not been configured for %s, skipping", ifaceName) + } + return nil, nil +} diff --git a/oryx/configx/serve.schema.json b/oryx/configx/serve.schema.json new file mode 100644 index 00000000000..1f1df38db02 --- /dev/null +++ b/oryx/configx/serve.schema.json @@ -0,0 +1,70 @@ +{ + "$id": "https://github.com/ory/x/configx/serve.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "request_log": { + "type": "object", + "properties": { + "disable_for_health": { + "title": "Disable health endpoints request logging", + "description": "Disable request logging for /health/alive and /health/ready endpoints", + "type": "boolean", + "default": false + } + }, + "additionalProperties": false + }, + "base_url": { + "title": "Base URL", + "description": "The URL where the endpoint is exposed at. This domain is used to generate redirects, form URLs, and more.", + "type": "string", + "format": "uri-reference", + "examples": [ + "https://my-app.com/", + "https://my-app.com/.ory/kratos/public", + "https://auth.my-app.com/hydra" + ] + }, + "host": { + "title": "Host", + "description": "The host (interface) that the endpoint listens on.", + "type": "string", + "default": "0.0.0.0" + }, + "port": { + "title": "Port", + "description": "The port that the endpoint listens on.", + "type": "integer", + "minimum": 1, + "maximum": 65535 + }, + "socket": { + "type": "object", + "additionalProperties": false, + "description": "Sets the permissions of the unix socket", + "properties": { + "owner": { + "type": "string", + "description": "Owner of unix socket. If empty, the owner will be the user running the service.", + "default": "" + }, + "group": { + "type": "string", + "description": "Group of unix socket. If empty, the group will be the primary group of the user running the service.", + "default": "" + }, + "mode": { + "type": "integer", + "description": "Mode of unix socket in numeric form", + "default": 493, + "minimum": 0, + "maximum": 511 + } + } + }, + "tls": { + "$ref": "ory://tls-config" + } + } +} diff --git a/oryx/configx/span.go b/oryx/configx/span.go new file mode 100644 index 00000000000..2f2471e2b8e --- /dev/null +++ b/oryx/configx/span.go @@ -0,0 +1,10 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package configx + +const ( + LoadSpanOpName = "config-load" + UpdatedSpanOpName = "config-update" + SnapshotSpanOpName = "config-snapshot" +) diff --git a/oryx/configx/stub/benchmark/benchmark.yaml b/oryx/configx/stub/benchmark/benchmark.yaml new file mode 100644 index 00000000000..1f4b7a1c218 --- /dev/null +++ b/oryx/configx/stub/benchmark/benchmark.yaml @@ -0,0 +1,312 @@ +# Please find the documentation for this file at +# https://www.ory.sh/oathkeeper/docs/configuration + +log: + level: debug + format: json + +profiling: cpu + +serve: + proxy: + port: 1234 + host: 127.0.0.1 + + timeout: + read: 1s + write: 2s + idle: 3s + + cors: + enabled: true + allowed_origins: + - https://example.com + - https://*.example.com + allowed_methods: + - POST + - GET + - PUT + - PATCH + - DELETE + allowed_headers: + - Authorization + - Content-Type + exposed_headers: + - Content-Type + allow_credentials: true + max_age: 10 + debug: true + tls: + key: + path: /path/to/key.pem + base64: LS0tLS1CRUdJTiBFTkNSWVBURUQgUFJJVkFURSBLRVktLS0tLVxuTUlJRkRqQkFCZ2txaGtpRzl3MEJCUTB3... + cert: + path: /path/to/cert.pem + base64: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tXG5NSUlEWlRDQ0FrMmdBd0lCQWdJRVY1eE90REFOQmdr... + + api: + port: 1235 + host: 127.0.0.2 + + timeout: + read: 1s + write: 2s + idle: 3s + + cors: + enabled: true + allowed_origins: + - https://example.org + - https://*.example.org + allowed_methods: + - GET + - PUT + - PATCH + - DELETE + allowed_headers: + - Authorization + - Content-Type + exposed_headers: + - Content-Type + allow_credentials: true + max_age: 10 + debug: true + tls: + key: + path: /path/to/key.pem + base64: LS0tLS1CRUdJTiBFTkNSWVBURUQgUFJJVkFURSBLRVktLS0tLVxuTUlJRkRqQkFCZ2txaGtpRzl3MEJCUTB3... + cert: + path: /path/to/cert.pem + base64: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tXG5NSUlEWlRDQ0FrMmdBd0lCQWdJRVY1eE90REFOQmdr... + + prometheus: + port: 9000 + host: localhost + metrics_path: /metrics + collapse_request_paths: true + +# Configures Access Rules +access_rules: + # Locations (list of URLs) where access rules should be fetched from on boot. + # It is expected that the documents at those locations return a JSON or YAML Array containing ORY Oathkeeper Access Rules. + repositories: + # If the URL Scheme is `file://`, the access rules (an array of access rules is expected) will be + # fetched from the local file system. + - file://path/to/rules.json + # If the URL Scheme is `inline://`, the access rules (an array of access rules is expected) + # are expected to be a base64 encoded (with padding!) JSON/YAML string (base64_encode(`[{"id":"foo-rule","authenticators":[....]}]`)): + - inline://W3siaWQiOiJmb28tcnVsZSIsImF1dGhlbnRpY2F0b3JzIjpbXX1d + # If the URL Scheme is `http://` or `https://`, the access rules (an array of access rules is expected) will be + # fetched from the provided HTTP(s) location. + - https://path-to-my-rules/rules.json + # Optional fields describing matching strategy, defaults to "regexp". + matching_strategy: glob + +errors: + fallback: + - json + handlers: + redirect: + enabled: true + config: + to: http://path-to/redirect + json: + enabled: true + config: + verbose: true + when: + - error: + - unauthorized + - forbidden + - internal_server_error + request: + header: + content_type: + - application/json + accept: + - application/json + cidr: + - 127.0.0.0/24 + +# All authenticators can be configured under this configuration key +authenticators: + # Configures the anonymous authenticator + anonymous: + # Set enabled to true if the authenticator should be enabled and false to disable the authenticator. Defaults to false. + enabled: true + + config: + # Sets the anonymous username. Defaults to "anonymous". Common names include "guest", "anon", "anonymous", "unknown". + subject: guest + + # Configures the cookie session authenticator + cookie_session: + # Set enabled to true if the authenticator should be enabled and false to disable the authenticator. Defaults to false. + enabled: true + + config: + # Sets the origin to proxy requests to. If the response is a 200 with body `{ "subject": "...", "extra": {} }` + # The request will pass the subject through successfully, otherwise it will be marked as unauthorized + check_session_url: https://session-store-host + + # Sets a list of possible cookies to look for on incoming requests, and will fallthrough to the next authenticator if + # none of the passed cookies are set on the request + only: + - sessionid + + # Configures the jwt authenticator + jwt: + # Set enabled to true if the authenticator should be enabled and false to disable the authenticator. Defaults to false. + enabled: true + + config: + # REQUIRED IF ENABLED - The URL where ORY Oathkeeper can retrieve JSON Web Keys from for validating the JSON Web + # Token. Usually something like "https://my-keys.com/.well-known/jwks.json". The response of that endpoint must + # return a JSON Web Key Set (JWKS). + jwks_urls: + - https://my-website.com/.well-known/jwks.json + - https://my-other-website.com/.well-known/jwks.json + - file://path/to/local/jwks.json + + # Sets the strategy to be used to validate/match the scope. Supports "hierarchic", "exact", "wildcard", "none". Defaults + # to "none". + scope_strategy: wildcard + + # Configures the noop authenticator + noop: + # Set enabled to true if the authenticator should be enabled and false to disable the authenticator. Defaults to false. + enabled: true + + # Configures the oauth2_client_credentials authenticator + oauth2_client_credentials: + # Set enabled to true if the authenticator should be enabled and false to disable the authenticator. Defaults to false. + enabled: true + + config: + # REQUIRED IF ENABLED - The OAuth 2.0 Token Endpoint that will be used to validate the client credentials. + token_url: https://my-website.com/oauth2/token + + # Configures the oauth2_introspection authenticator + oauth2_introspection: + # Set enabled to true if the authenticator should be enabled and false to disable the authenticator. Defaults to false. + enabled: true + + config: + # REQUIRED IF ENABLED - The OAuth 2.0 Token Introspection endpoint. + introspection_url: https://my-website.com/oauth2/introspection + + # Sets the strategy to be used to validate/match the token scope. Supports "hierarchic", "exact", "wildcard", "none". Defaults + # to "none". + scope_strategy: exact + + # Enable pre-authorization in cases where the OAuth 2.0 Token Introspection endpoint is protected by OAuth 2.0 Bearer + # Tokens that can be retrieved using the OAuth 2.0 Client Credentials grant. + pre_authorization: + # Enable pre-authorization. Defaults to false. + enabled: true + + # REQUIRED IF ENABLED - The OAuth 2.0 Client ID to be used for the OAuth 2.0 Client Credentials Grant. + client_id: some_id + + # REQUIRED IF ENABLED - The OAuth 2.0 Client Secret to be used for the OAuth 2.0 Client Credentials Grant. + client_secret: some_secret + + # The OAuth 2.0 Scope to be requested during the OAuth 2.0 Client Credentials Grant. + scope: + - foo + - bar + + # REQUIRED IF ENABLED - The OAuth 2.0 Token Endpoint where the OAuth 2.0 Client Credentials Grant will be performed. + token_url: https://my-website.com/oauth2/token + + # Configures the unauthorized authenticator + unauthorized: + # Set enabled to true if the authenticator should be enabled and false to disable the authenticator. Defaults to false. + enabled: true + +# All authorizers can be configured under this configuration key +authorizers: + # Configures the allow authorizer + allow: + # Set enabled to true if the authorizer should be enabled and false to disable the authorizer. Defaults to false. + enabled: true + + # Configures the deny authorizer + deny: + # Set enabled to true if the authorizer should be enabled and false to disable the authorizer. Defaults to false. + enabled: true + + # Configures the keto_engine_acp_ory authorizer + keto_engine_acp_ory: + # Set enabled to true if the authorizer should be enabled and false to disable the authorizer. Defaults to false. + enabled: true + + config: + # REQUIRED IF ENABLED - The base URL of ORY Keto, typically something like http(s)://[:]/ + base_url: http://my-keto/ + required_action: unknown + required_resource: unknown + + # Configures the remote authorizer + remote: + # Set enabled to true if the authorizer should be enabled and false to disable the authorizer. Defaults to false. + enabled: true + + config: + remote: https://host/path + headers: {} + + # Configures the remote_json authorizer + remote_json: + # Set enabled to true if the authorizer should be enabled and false to disable the authorizer. Defaults to false. + enabled: true + + config: + remote: https://host/path + payload: "{}" + +# All mutators can be configured under this configuration key +mutators: + header: + enabled: true + config: + headers: + foo: bar + + # Configures the cookie mutator + cookie: + # Set enabled to true if the mutator should be enabled and false to disable the mutator. Defaults to false. + enabled: true + config: + cookies: + foo: bar + + # Configures the hydrator mutator + hydrator: + # Set enabled to true if the mutator should be enabled and false to disable the mutator. Defaults to false. + enabled: true + + config: + api: + url: https://some-url/ + + # Configures the id_token mutator + id_token: + # Set enabled to true if the mutator should be enabled and false to disable the mutator. Defaults to false. + enabled: true + config: + # REQUIRED IF ENABLED - Sets the "iss" value of the ID Token. + issuer_url: https://my-oathkeeper/ + # REQUIRED IF ENABLED - Sets the URL where keys should be fetched from. Supports remote locations (http, https) as + # well as local filesystem paths. + jwks_url: https://fetch-keys/from/this/location.json + # jwks_url: file:///from/this/absolute/location.json + # jwks_url: file://../from/this/relative/location.json + + # Sets the time-to-live of the ID token. Defaults to one minute. Valid time units are: s (second), m (minute), h (hour). + ttl: 1h + + # Configures the noop mutator + noop: + # Set enabled to true if the mutator should be enabled and false to disable the mutator. Defaults to false. + enabled: true diff --git a/oryx/configx/stub/benchmark/schema.config.json b/oryx/configx/stub/benchmark/schema.config.json new file mode 100644 index 00000000000..08d9faf6a98 --- /dev/null +++ b/oryx/configx/stub/benchmark/schema.config.json @@ -0,0 +1,204 @@ +{ + "log": { + "level": "debug", + "format": "json" + }, + "profiling": "cpu", + "serve": { + "proxy": { + "port": 1234, + "host": "127.0.0.1", + "timeout": { + "read": "1s", + "write": "2s", + "idle": "3s" + }, + "cors": { + "enabled": true, + "allowed_origins": ["https://example.com", "https://*.example.com"], + "allowed_methods": ["POST", "GET", "PUT", "PATCH", "DELETE"], + "allowed_headers": ["Authorization", "Content-Type"], + "exposed_headers": ["Content-Type"], + "allow_credentials": true, + "max_age": 10, + "debug": true + }, + "tls": { + "key": { + "path": "/path/to/key.pem", + "base64": "LS0tLS1CRUdJTiBFTkNSWVBURUQgUFJJVkFURSBLRVktLS0tLVxuTUlJRkRqQkFCZ2txaGtpRzl3MEJCUTB3..." + }, + "cert": { + "path": "/path/to/cert.pem", + "base64": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tXG5NSUlEWlRDQ0FrMmdBd0lCQWdJRVY1eE90REFOQmdr..." + } + } + }, + "api": { + "port": 1235, + "host": "127.0.0.2", + "cors": { + "enabled": true, + "allowed_origins": ["https://example.org", "https://*.example.org"], + "allowed_methods": ["GET", "PUT", "PATCH", "DELETE"], + "allowed_headers": ["Authorization", "Content-Type"], + "exposed_headers": ["Content-Type"], + "allow_credentials": true, + "max_age": 10, + "debug": true + }, + "tls": { + "key": { + "path": "/path/to/key.pem", + "base64": "LS0tLS1CRUdJTiBFTkNSWVBURUQgUFJJVkFURSBLRVktLS0tLVxuTUlJRkRqQkFCZ2txaGtpRzl3MEJCUTB3..." + }, + "cert": { + "path": "/path/to/cert.pem", + "base64": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tXG5NSUlEWlRDQ0FrMmdBd0lCQWdJRVY1eE90REFOQmdr..." + } + } + } + }, + "access_rules": { + "repositories": [ + "file://path/to/rules.json", + "inline://W3siaWQiOiJmb28tcnVsZSIsImF1dGhlbnRpY2F0b3JzIjpbXX1d", + "https://path-to-my-rules/rules.json" + ], + "matching_strategy": "glob" + }, + "errors": { + "fallback": ["json"], + "handlers": { + "redirect": { + "enabled": true, + "config": { + "to": "http://path-to/redirect" + } + }, + "json": { + "enabled": true, + "config": { + "verbose": true, + "when": [ + { + "error": ["unauthorized", "forbidden", "internal_server_error"], + "request": { + "header": { + "content_type": ["application/json"], + "accept": ["application/json"] + }, + "cidr": ["127.0.0.0/24"] + } + } + ] + } + } + } + }, + "authenticators": { + "anonymous": { + "enabled": true, + "config": { + "subject": "guest" + } + }, + "cookie_session": { + "enabled": true, + "config": { + "check_session_url": "https://session-store-host", + "only": ["sessionid"] + } + }, + "jwt": { + "enabled": true, + "config": { + "jwks_urls": [ + "https://my-website.com/.well-known/jwks.json", + "https://my-other-website.com/.well-known/jwks.json", + "file://path/to/local/jwks.json" + ], + "scope_strategy": "wildcard" + } + }, + "noop": { + "enabled": true + }, + "oauth2_client_credentials": { + "enabled": true, + "config": { + "token_url": "https://my-website.com/oauth2/token" + } + }, + "oauth2_introspection": { + "enabled": true, + "config": { + "introspection_url": "https://my-website.com/oauth2/introspection", + "scope_strategy": "exact", + "pre_authorization": { + "enabled": true, + "client_id": "some_id", + "client_secret": "some_secret", + "scope": ["foo", "bar"], + "token_url": "https://my-website.com/oauth2/token" + } + } + }, + "unauthorized": { + "enabled": true + } + }, + "authorizers": { + "allow": { + "enabled": true + }, + "deny": { + "enabled": true + }, + "keto_engine_acp_ory": { + "enabled": true, + "config": { + "base_url": "http://my-keto/", + "required_action": "unknown", + "required_resource": "unknown" + } + } + }, + "mutators": { + "header": { + "enabled": false, + "config": { + "headers": { + "foo": "bar" + } + } + }, + "cookie": { + "enabled": true, + "config": { + "cookies": { + "foo": "bar" + } + } + }, + "hydrator": { + "enabled": true, + "config": { + "api": { + "url": "https://some-url/" + } + } + }, + "id_token": { + "enabled": true, + "config": { + "issuer_url": "https://my-oathkeeper/", + "jwks_url": "https://fetch-keys/from/this/location.json", + "ttl": "1h" + } + }, + "noop": { + "enabled": true + } + } +} diff --git a/oryx/configx/stub/domain-aliases/config.schema.json b/oryx/configx/stub/domain-aliases/config.schema.json new file mode 100644 index 00000000000..79007910b1e --- /dev/null +++ b/oryx/configx/stub/domain-aliases/config.schema.json @@ -0,0 +1,41 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "domain_aliases": { + "title": "Domain Aliases", + "description": "Adds an alias domain. If a request with the hostname (FQDN) matching the hostname in the alias is found, that URL is used as the base URL.", + "type": "array", + "items": [ + { + "additionalProperties": false, + "type": "object", + "required": ["match_domain", "base_path", "scheme"], + "properties": { + "match_domain": { + "minLength": 1, + "title": "Matching Domain", + "description": "Sets the matching domain. If the domain matches with this entry, the accompanying base_url will be used.", + "type": "string", + "examples": ["localhost", "my-domain.com"] + }, + "scheme": { + "title": "Scheme", + "description": "Sets the scheme, for example https or http.", + "type": "string", + "enum": ["http", "https"] + }, + "base_path": { + "minLength": 1, + "title": "Base Path", + "description": "Sets the base path for the matched domain.", + "type": "string", + "default": "/", + "pattern": "^/.*$", + "examples": ["/", "/.ory/kratos"] + } + } + } + ] + } + } +} diff --git a/oryx/configx/stub/from-files/a.yaml b/oryx/configx/stub/from-files/a.yaml new file mode 100644 index 00000000000..f3e18085dc4 --- /dev/null +++ b/oryx/configx/stub/from-files/a.yaml @@ -0,0 +1,27 @@ +version: v0.5.3-alpha.1 + +dsn: memory + +serve: + public: + base_url: http://127.0.0.1:4433/ + cors: + enabled: true + admin: + base_url: http://kratos:4434/ + +selfservice: + default_browser_return_url: http://127.0.0.1:4455/ + whitelisted_return_urls: + - http://127.0.0.1:4455 + + methods: + password: + enabled: true + + flows: + error: + ui_url: http://127.0.0.1:4455/error + + settings: + ui_url: http://127.0.0.1:4455/settings diff --git a/oryx/configx/stub/from-files/b.yaml b/oryx/configx/stub/from-files/b.yaml new file mode 100644 index 00000000000..1d489893a2e --- /dev/null +++ b/oryx/configx/stub/from-files/b.yaml @@ -0,0 +1,54 @@ +selfservice: + flows: + settings: + privileged_session_max_age: 15m + + recovery: + enabled: true + ui_url: http://127.0.0.1:4455/recovery + + verification: + enabled: true + ui_url: http://127.0.0.1:4455/verify + after: + default_browser_return_url: http://127.0.0.1:4455/ + + logout: + after: + default_browser_return_url: http://127.0.0.1:4455/auth/login + + login: + ui_url: http://127.0.0.1:4455/auth/login + lifespan: 10m + + registration: + lifespan: 10m + ui_url: http://127.0.0.1:4455/auth/registration + after: + password: + hooks: + - hook: session + +log: + level: debug + format: text + leak_sensitive_values: true + +secrets: + cookie: + - PLEASE-CHANGE-ME-I-AM-VERY-INSECURE + +hashers: + argon2: + parallelism: 1 + memory: 131072 + iterations: 2 + salt_length: 16 + key_length: 16 + +identity: + default_schema_url: file:///etc/config/kratos/identity.schema.json + +courier: + smtp: + connection_uri: smtps://test:test@mailslurper:1025/?skip_ssl_verify=true diff --git a/oryx/configx/stub/from-files/config.schema.json b/oryx/configx/stub/from-files/config.schema.json new file mode 100644 index 00000000000..75847b2f043 --- /dev/null +++ b/oryx/configx/stub/from-files/config.schema.json @@ -0,0 +1,1085 @@ +{ + "$id": "https://github.com/ory/kratos/.schema/config.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "ORY Kratos Configuration", + "type": "object", + "definitions": { + "defaultReturnTo": { + "title": "Redirect browsers to set URL per default", + "description": "ORY Kratos redirects to this URL per default on completion of self-service flows and other browser interaction. Read this [article for more information on browser redirects](https://www.ory.sh/kratos/docs/concepts/browser-redirect-flow-completion).", + "type": "string", + "format": "uri-reference", + "minLength": 1, + "examples": ["https://my-app.com/dashboard", "/dashboard"] + }, + "selfServiceSessionRevokerHook": { + "type": "object", + "properties": { + "hook": { + "const": "revoke_active_sessions" + } + }, + "additionalProperties": false, + "required": ["hook"] + }, + "selfServiceVerifyHook": { + "type": "object", + "properties": { + "hook": { + "const": "verify" + } + }, + "additionalProperties": false, + "required": ["hook"] + }, + "selfServiceSessionIssuerHook": { + "type": "object", + "properties": { + "hook": { + "const": "session" + } + }, + "additionalProperties": false, + "required": ["hook"] + }, + "OIDCClaims": { + "title": "OpenID Connect claims", + "description": "The OpenID Connect claims and optionally their properties which should be included in the id_token or returned from the UserInfo Endpoint.", + "type": "object", + "examples": [ + { + "id_token": { + "email": null, + "email_verified": null + } + }, + { + "userinfo": { + "given_name": { + "essential": true + }, + "nickname": null, + "email": { + "essential": true + }, + "email_verified": { + "essential": true + }, + "picture": null, + "http://example.info/claims/groups": null + }, + "id_token": { + "auth_time": { + "essential": true + }, + "acr": { + "values": ["urn:mace:incommon:iap:silver"] + } + } + } + ], + "patternProperties": { + "^userinfo$|^id_token$": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + ".*": { + "oneOf": [ + { + "const": null, + "description": "Indicates that this Claim is being requested in the default manner." + }, + { + "type": "object", + "additionalProperties": false, + "properties": { + "essential": { + "description": "Indicates whether the Claim being requested is an Essential Claim.", + "type": "boolean" + }, + "value": { + "description": "Requests that the Claim be returned with a particular value.", + "$comment": "There seem to be no constrains on value" + }, + "values": { + "description": "Requests that the Claim be returned with one of a set of values, with the values appearing in order of preference.", + "type": "array", + "items": { + "$comment": "There seem to be no constrains on individual items" + } + } + } + } + ] + } + } + } + } + }, + "selfServiceOIDCProvider": { + "type": "object", + "properties": { + "id": { + "type": "string", + "examples": ["google"] + }, + "provider": { + "title": "Provider", + "description": "Can be one of github, gitlab, generic, google, microsoft, discord.", + "type": "string", + "enum": [ + "github", + "gitlab", + "generic", + "google", + "microsoft", + "discord" + ], + "examples": ["google"] + }, + "client_id": { + "type": "string" + }, + "client_secret": { + "type": "string" + }, + "issuer_url": { + "type": "string", + "format": "uri", + "examples": ["https://accounts.google.com"] + }, + "auth_url": { + "type": "string", + "format": "uri", + "examples": ["https://accounts.google.com/o/oauth2/v2/auth"] + }, + "token_url": { + "type": "string", + "format": "uri", + "examples": ["https://www.googleapis.com/oauth2/v4/token"] + }, + "mapper_url": { + "title": "Jsonnet Mapper URL", + "description": "The URL where the jsonnet source is located for mapping the provider's data to ORY Kratos data.", + "type": "string", + "format": "uri", + "examples": [ + "file://path/to/oidc.jsonnet", + "https://foo.bar.com/path/to/oidc.jsonnet", + "base64://bG9jYWwgc3ViamVjdCA9I..." + ] + }, + "scope": { + "type": "array", + "items": { + "type": "string", + "examples": ["offline_access", "profile"] + } + }, + "tenant": { + "title": "Azure AD Tenant", + "description": "The Azure AD Tenant to use for authentication.", + "type": "string", + "examples": [ + "common", + "organizations", + "consumers", + "8eaef023-2b34-4da1-9baa-8bc8c9d6a490", + "contoso.onmicrosoft.com" + ] + }, + "requested_claims": { + "$ref": "#/definitions/OIDCClaims" + } + }, + "additionalProperties": false, + "required": [ + "id", + "provider", + "client_id", + "client_secret", + "mapper_url" + ], + "if": { + "properties": { + "provider": { + "const": "microsoft" + } + }, + "required": ["provider"] + }, + "then": { + "required": ["tenant"] + }, + "else": { + "not": { + "properties": { + "tenant": {} + }, + "required": ["tenant"] + } + } + }, + "selfServiceAfterSettingsMethod": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "hooks": { + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "#/definitions/selfServiceVerifyHook" + } + ] + }, + "uniqueItems": true, + "additionalItems": false + } + } + }, + "selfServiceAfterLoginMethod": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "hooks": { + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "#/definitions/selfServiceSessionRevokerHook" + } + ] + }, + "uniqueItems": true, + "additionalItems": false + } + } + }, + "selfServiceAfterRegistrationMethod": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "hooks": { + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "#/definitions/selfServiceSessionIssuerHook" + } + ] + }, + "uniqueItems": true, + "additionalItems": false + } + } + }, + "selfServiceAfterSettings": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "password": { + "$ref": "#/definitions/selfServiceAfterSettingsMethod" + }, + "profile": { + "$ref": "#/definitions/selfServiceAfterSettingsMethod" + } + } + }, + "selfServiceAfterLogin": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "password": { + "$ref": "#/definitions/selfServiceAfterLoginMethod" + }, + "oidc": { + "$ref": "#/definitions/selfServiceAfterLoginMethod" + } + } + }, + "selfServiceAfterRegistration": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "password": { + "$ref": "#/definitions/selfServiceAfterRegistrationMethod" + }, + "oidc": { + "$ref": "#/definitions/selfServiceAfterRegistrationMethod" + } + } + } + }, + "properties": { + "selfservice": { + "type": "object", + "additionalProperties": false, + "required": ["default_browser_return_url"], + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "whitelisted_return_urls": { + "title": "Whitelisted Return To URLs", + "description": "List of URLs that are allowed to be redirected to. A redirection request is made by appending `?return_to=...` to Login, Registration, and other self-service flows.", + "type": "array", + "items": { + "type": "string", + "format": "uri-reference" + }, + "examples": [ + [ + "https://app.my-app.com/dashboard", + "/dashboard", + "https://www.my-app.com/" + ] + ], + "uniqueItems": true + }, + "flows": { + "type": "object", + "additionalProperties": false, + "properties": { + "settings": { + "type": "object", + "additionalProperties": false, + "properties": { + "ui_url": { + "title": "URL of the Settings page.", + "description": "URL where the Settings UI is hosted. Check the [reference implementation](https://github.com/ory/kratos-selfservice-ui-node).", + "type": "string", + "format": "uri-reference", + "examples": ["https://my-app.com/user/settings"], + "default": "https://www.ory.sh/kratos/docs/fallback/settings" + }, + "lifespan": { + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "1h", + "examples": ["1h", "1m", "1s"] + }, + "privileged_session_max_age": { + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "1h", + "examples": ["1h", "1m", "1s"] + }, + "after": { + "$ref": "#/definitions/selfServiceAfterSettings" + } + } + }, + "logout": { + "type": "object", + "additionalProperties": false, + "properties": { + "after": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + } + } + } + } + }, + "registration": { + "type": "object", + "additionalProperties": false, + "properties": { + "ui_url": { + "title": "Registration UI URL", + "description": "URL where the Registration UI is hosted. Check the [reference implementation](https://github.com/ory/kratos-selfservice-ui-node).", + "type": "string", + "format": "uri-reference", + "examples": ["https://my-app.com/signup"], + "default": "https://www.ory.sh/kratos/docs/fallback/registration" + }, + "lifespan": { + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "1h", + "examples": ["1h", "1m", "1s"] + }, + "after": { + "$ref": "#/definitions/selfServiceAfterRegistration" + } + } + }, + "login": { + "type": "object", + "additionalProperties": false, + "properties": { + "ui_url": { + "title": "Login UI URL", + "description": "URL where the Login UI is hosted. Check the [reference implementation](https://github.com/ory/kratos-selfservice-ui-node).", + "type": "string", + "format": "uri-reference", + "examples": ["https://my-app.com/login"], + "default": "https://www.ory.sh/kratos/docs/fallback/login" + }, + "lifespan": { + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "1h", + "examples": ["1h", "1m", "1s"] + }, + "after": { + "$ref": "#/definitions/selfServiceAfterLogin" + } + } + }, + "verification": { + "title": "Email and Phone Verification and Account Activation Configuration", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "title": "Enable Email/Phone Verification", + "description": "If set to true will enable [Email and Phone Verification and Account Activation](https://www.ory.sh/kratos/docs/self-service/flows/verify-email-account-activation/).", + "default": false + }, + "ui_url": { + "title": "Verify UI URL", + "description": "URL where the ORY Verify UI is hosted. This is the page where users activate and / or verify their email or telephone number. Check the [reference implementation](https://github.com/ory/kratos-selfservice-ui-node).", + "type": "string", + "format": "uri-reference", + "examples": ["https://my-app.com/verify"], + "default": "https://www.ory.sh/kratos/docs/fallback/verification" + }, + "after": { + "type": "object", + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + } + }, + "additionalProperties": false + }, + "lifespan": { + "title": "Self-Service Verification Request Lifespan", + "description": "Sets how long the verification request (for the UI interaction) is valid.", + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "1h", + "examples": ["1h", "1m", "1s"] + } + } + }, + "recovery": { + "title": "Account Recovery Configuration", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "title": "Enable Account Recovery", + "description": "If set to true will enable [Account Recovery](https://www.ory.sh/kratos/docs/self-service/flows/password-reset-account-recovery/).", + "default": false + }, + "ui_url": { + "title": "Recovery UI URL", + "description": "URL where the ORY Recovery UI is hosted. This is the page where users request and complete account recovery. Check the [reference implementation](https://github.com/ory/kratos-selfservice-ui-node).", + "type": "string", + "format": "uri-reference", + "examples": ["https://my-app.com/verify"], + "default": "https://www.ory.sh/kratos/docs/fallback/recovery" + }, + "after": { + "type": "object", + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + } + }, + "additionalProperties": false + }, + "lifespan": { + "title": "Self-Service Recovery Request Lifespan", + "description": "Sets how long the recovery request is valid. If expired, the user has to redo the flow.", + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "1h", + "examples": ["1h", "1m", "1s"] + } + } + }, + "error": { + "type": "object", + "additionalProperties": false, + "properties": { + "ui_url": { + "title": "ORY Kratos Error UI URL", + "description": "URL where the ORY Kratos Error UI is hosted. Check the [reference implementation](https://github.com/ory/kratos-selfservice-ui-node).", + "type": "string", + "format": "uri-reference", + "examples": ["https://my-app.com/kratos-error"], + "default": "https://www.ory.sh/kratos/docs/fallback/error" + } + } + } + } + }, + "methods": { + "type": "object", + "additionalProperties": false, + "properties": { + "profile": { + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "title": "Enables Profile Management Method", + "default": true + } + } + }, + "link": { + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "title": "Enables Link Method", + "default": true + } + } + }, + "password": { + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "title": "Enables Username/Email and Password Method", + "default": true + } + } + }, + "oidc": { + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "title": "Enables OpenID Connect Method", + "default": false + }, + "config": { + "type": "object", + "additionalProperties": false, + "properties": { + "providers": { + "title": "OpenID Connect and OAuth2 Providers", + "description": "A list and configuration of OAuth2 and OpenID Connect providers ORY Kratos should integrate with.", + "type": "array", + "items": { + "$ref": "#/definitions/selfServiceOIDCProvider" + } + } + } + } + } + } + } + } + } + }, + "dsn": { + "type": "string", + "title": "Data Source Name", + "description": "DSN is used to specify the database credentials as a connection URI.", + "examples": [ + "postgres://user: password@postgresd:5432/database?sslmode=disable&max_conns=20&max_idle_conns=4", + "mysql://user:secret@tcp(mysqld:3306)/database?max_conns=20&max_idle_conns=4", + "cockroach://user@cockroachdb:26257/database?sslmode=disable&max_conns=20&max_idle_conns=4", + "sqlite:///var/lib/sqlite/db.sqlite?_fk=true&mode=rwc" + ] + }, + "courier": { + "type": "object", + "title": "Courier configuration", + "description": "The courier is responsible for sending and delivering messages over email, sms, and other means.", + "properties": { + "template_override_path": { + "type": "string", + "title": "Override message templates", + "description": "You can override certain or all message templates by pointing this key to the path where the templates are located.", + "examples": ["/conf/courier-templates"] + }, + "smtp": { + "title": "SMTP Configuration", + "description": "Configures outgoing emails using the SMTP protocol.", + "type": "object", + "properties": { + "connection_uri": { + "title": "SMTP connection string", + "description": "This URI will be used to connect to the SMTP server. Use the query parameter to allow (`?skip_ssl_verify=true`) or disallow (`?skip_ssl_verify=false`) self-signed TLS certificates. Please keep in mind that any host other than localhost / 127.0.0.1 must use smtp over TLS (smtps) or the connection will not be possible.", + "examples": [ + "smtps://foo:bar@my-mailserver:1234/?skip_ssl_verify=false" + ], + "type": "string", + "format": "uri" + }, + "from_address": { + "title": "SMTP Sender Address", + "description": "The recipient of an email will see this as the sender address.", + "type": "string", + "format": "email", + "default": "no-reply@ory.kratos.sh" + } + }, + "required": ["connection_uri"], + "additionalProperties": false + } + }, + "required": ["smtp"], + "additionalProperties": false + }, + "serve": { + "type": "object", + "properties": { + "admin": { + "type": "object", + "properties": { + "base_url": { + "title": "Admin Base URL", + "description": "The URL where the admin endpoint is exposed at.", + "type": "string", + "format": "uri", + "examples": ["https://kratos.private-network:4434/"] + }, + "host": { + "title": "Admin Host", + "description": "The host (interface) kratos' admin endpoint listens on.", + "type": "string", + "default": "0.0.0.0" + }, + "port": { + "title": "Admin Port", + "description": "The port kratos' admin endpoint listens on.", + "type": "integer", + "minimum": 1, + "maximum": 65535, + "examples": [4434], + "default": 4434 + } + }, + "additionalProperties": false + }, + "public": { + "type": "object", + "properties": { + "cors": { + "type": "object", + "additionalProperties": false, + "description": "Configures Cross Origin Resource Sharing for public endpoints.", + "properties": { + "enabled": { + "type": "boolean", + "description": "Sets whether CORS is enabled.", + "default": false + }, + "allowed_origins": { + "type": "array", + "description": "A list of origins a cross-domain request can be executed from. If the special * value is present in the list, all origins will be allowed. An origin may contain a wildcard (*) to replace 0 or more characters (i.e.: http://*.domain.com). Only one wildcard can be used per origin.", + "items": { + "type": "string", + "minLength": 1, + "not": { + "type": "string", + "description": "does match all strings that contain two or more (*)", + "pattern": ".*\\*.*\\*.*" + }, + "anyOf": [ + { + "format": "uri" + }, + { + "const": "*" + } + ] + }, + "uniqueItems": true, + "default": ["*"], + "examples": [ + [ + "https://example.com", + "https://*.example.com", + "https://*.foo.example.com" + ] + ] + }, + "allowed_methods": { + "type": "array", + "description": "A list of HTTP methods the user agent is allowed to use with cross-domain requests.", + "default": ["POST", "GET", "PUT", "PATCH", "DELETE"], + "items": { + "type": "string", + "enum": [ + "POST", + "GET", + "PUT", + "PATCH", + "DELETE", + "CONNECT", + "HEAD", + "OPTIONS", + "TRACE" + ] + } + }, + "allowed_headers": { + "type": "array", + "description": "A list of non simple headers the client is allowed to use with cross-domain requests.", + "default": [ + "Authorization", + "Content-Type", + "X-Session-Token" + ], + "items": { + "type": "string" + } + }, + "exposed_headers": { + "type": "array", + "description": "Sets which headers are safe to expose to the API of a CORS API specification.", + "default": ["Content-Type"], + "items": { + "type": "string" + } + }, + "allow_credentials": { + "type": "boolean", + "description": "Sets whether the request can include user credentials like cookies, HTTP authentication or client side SSL certificates.", + "default": true + }, + "options_passthrough": { + "type": "boolean", + "description": "TODO", + "default": false + }, + "max_age": { + "type": "integer", + "description": "Sets how long (in seconds) the results of a preflight request can be cached. If set to 0, every request is preceded by a preflight request.", + "default": 0, + "minimum": 0 + }, + "debug": { + "type": "boolean", + "description": "Adds additional log output to debug server side CORS issues.", + "default": false + } + } + }, + "base_url": { + "title": "Public Base URL", + "description": "The URL where the public endpoint is exposed at.", + "type": "string", + "format": "uri-reference", + "examples": [ + "https://my-app.com/.ory/kratos/public", + "/.ory/kratos/public/" + ] + }, + "host": { + "title": "Public Host", + "description": "The host (interface) kratos' public endpoint listens on.", + "type": "string", + "default": "0.0.0.0" + }, + "port": { + "title": "Public Port", + "description": "The port kratos' public endpoint listens on.", + "type": "integer", + "minimum": 1, + "maximum": 65535, + "examples": [4433], + "default": 4433 + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false + }, + "log": { + "type": "object", + "properties": { + "level": { + "type": "string", + "enum": [ + "trace", + "debug", + "info", + "warning", + "error", + "fatal", + "panic" + ] + }, + "leak_sensitive_values": { + "type": "boolean", + "title": "Leak Sensitive Log Values", + "description": "If set will leak sensitive values (e.g. emails) in the logs." + }, + "redaction_text": { + "type": "string", + "title": "Sensitive log value redaction text", + "description": "Text to use, when redacting sensitive log value." + }, + "format": { + "type": "string", + "enum": ["json", "text"] + } + }, + "additionalProperties": false + }, + "identity": { + "type": "object", + "properties": { + "default_schema_url": { + "title": "JSON Schema URL for default identity traits", + "description": "Path to the JSON Schema which describes a default identity's traits.", + "type": "string", + "format": "uri", + "examples": [ + "file://path/to/identity.traits.schema.json", + "https://foo.bar.com/path/to/identity.traits.schema.json" + ] + }, + "schemas": { + "type": "array", + "title": "Additional JSON Schemas for Identity Traits", + "examples": [ + [ + { + "id": "customer", + "url": "https://foo.bar.com/path/to/customer.traits.schema.json" + }, + { + "id": "employee", + "url": "https://foo.bar.com/path/to/employee.traits.schema.json" + }, + { + "id": "employee-v2", + "url": "https://foo.bar.com/path/to/employee.v2.traits.schema.json" + } + ] + ], + "items": { + "type": "object", + "properties": { + "id": { + "title": "The schema's ID.", + "type": "string", + "examples": ["employee"] + }, + "url": { + "type": "string", + "title": "Path to the JSON Schema", + "format": "uri", + "examples": [ + "file://path/to/identity.traits.schema.json", + "https://foo.bar.com/path/to/identity.traits.schema.json" + ] + } + }, + "required": ["id", "url"], + "not": { + "type": "object", + "properties": { + "id": { + "const": "default" + } + }, + "additionalProperties": true + } + } + } + }, + "required": ["default_schema_url"], + "additionalProperties": false + }, + "secrets": { + "type": "object", + "properties": { + "default": { + "type": "array", + "title": "Default Encryption Signing Secrets", + "description": "The first secret in the array is used for singing and encrypting things while all other keys are used to verify and decrypt older things that were signed with that old secret.", + "items": { + "type": "string", + "minLength": 16 + }, + "uniqueItems": true + }, + "cookie": { + "type": "array", + "title": "Singing Keys for Cookies", + "description": "The first secret in the array is used for encrypting cookies while all other keys are used to decrypt older cookies that were signed with that old secret.", + "items": { + "type": "string", + "minLength": 16 + }, + "uniqueItems": true + } + }, + "additionalProperties": false + }, + "hashers": { + "title": "Hashing Algorithm Configuration", + "type": "object", + "properties": { + "argon2": { + "title": "Configuration for the Argon2id hasher.", + "type": "object", + "properties": { + "memory": { + "type": "integer", + "minimum": 16384 + }, + "iterations": { + "type": "integer", + "minimum": 1 + }, + "parallelism": { + "type": "integer", + "minimum": 1 + }, + "salt_length": { + "type": "integer", + "minimum": 16 + }, + "key_length": { + "type": "integer", + "minimum": 16 + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false + }, + "session": { + "type": "object", + "additionalProperties": false, + "properties": { + "lifespan": { + "title": "Session Lifespan", + "description": "Defines how long a session is active. Once that lifespan has been reached, the user needs to sign in again.", + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "24h", + "examples": ["1h", "1m", "1s"] + }, + "cookie": { + "type": "object", + "properties": { + "domain": { + "title": "Session Cookie Domain", + "description": "Sets the session cookie domain. Useful when dealing with subdomains. Use with care!", + "type": "string" + }, + "persistent": { + "title": "Make Session Cookie Persistent", + "description": "If set to true will persist the cookie in the end-user's browser using the `max-age` parameter which is set to the `session.lifespan` value. Persistent cookies are not deleted when the browser is closed (e.g. on reboot or alt+f4).", + "type": "boolean", + "default": true + }, + "path": { + "title": "Session Cookie Path", + "description": "Sets the session cookie path. Use with care!", + "type": "string", + "default": "/" + }, + "same_site": { + "title": "Cookie Same Site Configuration", + "type": "string", + "enum": ["Strict", "Lax", "None"], + "default": "Lax" + } + }, + "additionalProperties": false + } + } + }, + "version": { + "title": "The kratos version this config is written for.", + "description": "SemVer according to https://semver.org/ prefixed with `v` as in our releases.", + "type": "string", + "pattern": "^v(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$", + "examples": ["v0.5.0-alpha.1"] + } + }, + "allOf": [ + { + "if": { + "properties": { + "selfservice": { + "properties": { + "flows": { + "oneOf": [ + { + "properties": { + "verification": { + "properties": { + "enabled": { + "const": true + } + }, + "required": ["enabled"] + } + }, + "required": ["verification"] + }, + { + "properties": { + "recovery": { + "properties": { + "enabled": { + "const": true + } + }, + "required": ["enabled"] + } + }, + "required": ["recovery"] + } + ] + } + }, + "required": ["flows"] + } + }, + "required": ["selfservice"] + }, + "then": { + "required": ["courier"] + } + } + ], + "required": ["identity", "dsn", "selfservice"] +} diff --git a/oryx/configx/stub/from-files/expected.json b/oryx/configx/stub/from-files/expected.json new file mode 100644 index 00000000000..2fe5dd16343 --- /dev/null +++ b/oryx/configx/stub/from-files/expected.json @@ -0,0 +1,124 @@ +{ + "courier": { + "smtp": { + "connection_uri": "smtps://test:test@mailslurper:1025/?skip_ssl_verify=true", + "from_address": "no-reply@ory.kratos.sh" + } + }, + "dsn": "sqlite:///var/lib/sqlite/db.sqlite?_fk=true", + "hashers": { + "argon2": { + "iterations": 2, + "key_length": 16, + "memory": 131072, + "parallelism": 1, + "salt_length": 16 + } + }, + "identity": { + "default_schema_url": "file:///etc/config/kratos/identity.schema.json" + }, + "log": { + "format": "text", + "leak_sensitive_values": true, + "level": "debug" + }, + "secrets": { + "cookie": ["PLEASE-CHANGE-ME-I-AM-VERY-INSECURE"] + }, + "selfservice": { + "default_browser_return_url": "http://127.0.0.1:4455/", + "flows": { + "error": { + "ui_url": "http://127.0.0.1:4455/error" + }, + "login": { + "lifespan": "10m", + "ui_url": "http://127.0.0.1:4455/auth/login" + }, + "logout": { + "after": { + "default_browser_return_url": "http://127.0.0.1:4455/auth/login" + } + }, + "recovery": { + "enabled": true, + "lifespan": "1h", + "ui_url": "http://127.0.0.1:4455/recovery" + }, + "registration": { + "after": { + "password": { + "hooks": [ + { + "hook": "session" + } + ] + } + }, + "lifespan": "10m", + "ui_url": "http://127.0.0.1:4455/auth/registration" + }, + "settings": { + "lifespan": "1h", + "privileged_session_max_age": "15m", + "ui_url": "http://127.0.0.1:4455/settings" + }, + "verification": { + "after": { + "default_browser_return_url": "http://127.0.0.1:4455/" + }, + "enabled": true, + "lifespan": "1h", + "ui_url": "http://127.0.0.1:4455/verify" + } + }, + "methods": { + "link": { + "enabled": true + }, + "oidc": { + "enabled": false + }, + "password": { + "enabled": true + }, + "profile": { + "enabled": true + } + }, + "whitelisted_return_urls": ["http://127.0.0.1:4455"] + }, + "serve": { + "admin": { + "base_url": "http://kratos:4434/", + "host": "0.0.0.0", + "port": 4434 + }, + "public": { + "base_url": "http://127.0.0.1:4433/", + "cors": { + "allow_credentials": true, + "allowed_headers": ["Authorization", "Content-Type", "X-Session-Token"], + "allowed_methods": ["POST", "GET", "PUT", "PATCH", "DELETE"], + "allowed_origins": ["*"], + "debug": false, + "enabled": true, + "exposed_headers": ["Content-Type"], + "max_age": 0, + "options_passthrough": false + }, + "host": "0.0.0.0", + "port": 4433 + } + }, + "session": { + "cookie": { + "path": "/", + "persistent": true, + "same_site": "Lax" + }, + "lifespan": "24h" + }, + "version": "v0.5.3-alpha.1" +} diff --git a/oryx/configx/stub/hydra/config.schema.json b/oryx/configx/stub/hydra/config.schema.json new file mode 100644 index 00000000000..e2ce4aff7c1 --- /dev/null +++ b/oryx/configx/stub/hydra/config.schema.json @@ -0,0 +1,792 @@ +{ + "$id": "https://github.com/ory/hydra/docs/config.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "ORY Hydra Configuration", + "type": "object", + "definitions": { + "http_method": { + "type": "string", + "enum": [ + "POST", + "GET", + "PUT", + "PATCH", + "DELETE", + "CONNECT", + "HEAD", + "OPTIONS", + "TRACE" + ] + }, + "port_number": { + "type": "integer", + "description": "The port to listen on.", + "minimum": 1, + "maximum": 65535 + }, + "socket": { + "type": "object", + "additionalProperties": false, + "description": "Sets the permissions of the unix socket", + "properties": { + "owner": { + "type": "string", + "description": "Owner of unix socket. If empty, the owner will be the user running hydra.", + "default": "" + }, + "group": { + "type": "string", + "description": "Group of unix socket. If empty, the group will be the primary group of the user running hydra.", + "default": "" + }, + "mode": { + "type": "integer", + "description": "Mode of unix socket in numeric form", + "default": 493, + "minimum": 0, + "maximum": 511 + } + } + }, + "cors": { + "type": "object", + "additionalProperties": false, + "description": "Configures Cross Origin Resource Sharing for public endpoints.", + "properties": { + "enabled": { + "type": "boolean", + "description": "Sets whether CORS is enabled.", + "default": false + }, + "allowed_origins": { + "type": "array", + "description": "A list of origins a cross-domain request can be executed from. If the special * value is present in the list, all origins will be allowed. An origin may contain a wildcard (*) to replace 0 or more characters (i.e.: http://*.domain.com). Only one wildcard can be used per origin.", + "items": { + "type": "string", + "minLength": 1, + "not": { + "type": "string", + "description": "does match all strings that contain two or more (*)", + "pattern": ".*\\*.*\\*.*" + }, + "anyOf": [ + { + "format": "uri" + }, + { + "const": "*" + } + ] + }, + "uniqueItems": true, + "default": ["*"], + "examples": [ + [ + "https://example.com", + "https://*.example.com", + "https://*.foo.example.com" + ] + ] + }, + "allowed_methods": { + "type": "array", + "description": "A list of HTTP methods the user agent is allowed to use with cross-domain requests.", + "default": ["POST", "GET", "PUT", "PATCH", "DELETE"], + "items": { + "type": "string", + "enum": [ + "POST", + "GET", + "PUT", + "PATCH", + "DELETE", + "CONNECT", + "HEAD", + "OPTIONS", + "TRACE" + ] + } + }, + "allowed_headers": { + "type": "array", + "description": "A list of non simple headers the client is allowed to use with cross-domain requests.", + "default": ["Authorization", "Content-Type"], + "items": { + "type": "string" + } + }, + "exposed_headers": { + "type": "array", + "description": "Sets which headers are safe to expose to the API of a CORS API specification.", + "default": ["Content-Type"], + "items": { + "type": "string" + } + }, + "allow_credentials": { + "type": "boolean", + "description": "Sets whether the request can include user credentials like cookies, HTTP authentication or client side SSL certificates.", + "default": true + }, + "options_passthrough": { + "type": "boolean", + "description": "TODO", + "default": false + }, + "max_age": { + "type": "integer", + "description": "Sets how long (in seconds) the results of a preflight request can be cached. If set to 0, every request is preceded by a preflight request.", + "default": 0, + "minimum": 0 + }, + "debug": { + "type": "boolean", + "description": "Adds additional log output to debug server side CORS issues.", + "default": false + } + } + }, + "pem_file": { + "type": "object", + "oneOf": [ + { + "properties": { + "path": { + "type": "string", + "description": "The path to the pem file.", + "examples": ["/path/to/file.pem"] + } + }, + "additionalProperties": false, + "required": ["path"] + }, + { + "properties": { + "base64": { + "type": "string", + "description": "The base64 encoded string (without padding).", + "contentEncoding": "base64", + "contentMediaType": "application/x-pem-file", + "examples": ["b3J5IGh5ZHJhIGlzIGF3ZXNvbWUK"] + } + }, + "additionalProperties": false, + "required": ["base64"] + } + ] + }, + "duration": { + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "examples": ["1h"] + } + }, + "properties": { + "log": { + "type": "object", + "additionalProperties": false, + "description": "Configures the logger", + "properties": { + "level": { + "type": "string", + "description": "Sets the log level.", + "enum": ["panic", "fatal", "error", "warn", "info", "debug", "trace"], + "default": "info" + }, + "leak_sensitive_values": { + "type": "boolean", + "description": "Logs sensitive values such as cookie and URL parameter.", + "default": false + }, + "redaction_text": { + "type": "string", + "title": "Sensitive log value redaction text", + "description": "Text to use, when redacting sensitive log value." + }, + "format": { + "type": "string", + "description": "Sets the log format.", + "enum": ["json", "json_pretty", "text"], + "default": "text" + } + } + }, + "serve": { + "type": "object", + "additionalProperties": false, + "description": "Controls the configuration for the http(s) daemon(s).", + "properties": { + "public": { + "type": "object", + "additionalProperties": false, + "description": "Controls the public daemon serving public API endpoints like /oauth2/auth, /oauth2/token, /.well-known/jwks.json", + "properties": { + "port": { + "default": 4444, + "allOf": [ + { + "$ref": "#/definitions/port_number" + } + ] + }, + "host": { + "type": "string", + "description": "The interface or unix socket ORY Hydra should listen and handle public API requests on. Use the prefix \"unix:\" to specify a path to a unix socket. Leave empty to listen on all interfaces.", + "default": "", + "examples": ["localhost"] + }, + "cors": { + "$ref": "#/definitions/cors" + }, + "socket": { + "$ref": "#/definitions/socket" + }, + "access_log": { + "type": "object", + "additionalProperties": false, + "description": "Access Log configuration for public server.", + "properties": { + "disable_for_health": { + "type": "boolean", + "description": "Disable access log for health endpoints.", + "default": false + } + } + } + } + }, + "admin": { + "type": "object", + "additionalProperties": false, + "properties": { + "port": { + "default": 4445, + "allOf": [ + { + "$ref": "#/definitions/port_number" + } + ] + }, + "host": { + "type": "string", + "description": "The interface or unix socket ORY Hydra should listen and handle administrative API requests on. Use the prefix \"unix:\" to specify a path to a unix socket. Leave empty to listen on all interfaces.", + "default": "", + "examples": ["localhost"] + }, + "cors": { + "$ref": "#/definitions/cors" + }, + "socket": { + "$ref": "#/definitions/socket" + }, + "access_log": { + "type": "object", + "additionalProperties": false, + "description": "Access Log configuration for admin server.", + "properties": { + "disable_for_health": { + "type": "boolean", + "description": "Disable access log for health endpoints.", + "default": false + } + } + } + } + }, + "tls": { + "type": "object", + "additionalProperties": false, + "description": "Configures HTTPS (HTTP over TLS). If configured, the server automatically supports HTTP/2.", + "properties": { + "key": { + "description": "Configures the private key (pem encoded).", + "allOf": [ + { + "$ref": "#/definitions/pem_file" + } + ] + }, + "cert": { + "description": "Configures the private key (pem encoded).", + "allOf": [ + { + "$ref": "#/definitions/pem_file" + } + ] + }, + "allow_termination_from": { + "type": "array", + "description": "Whitelist one or multiple CIDR address ranges and allow them to terminate TLS connections. Be aware that the X-Forwarded-Proto header must be set and must never be modifiable by anyone but your proxy / gateway / load balancer. Supports ipv4 and ipv6. Hydra serves http instead of https when this option is set.", + "items": { + "type": "string", + "oneOf": [ + { + "pattern": "^(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))/([0-9]|[1-9][0-9]|1[0-1][0-9]|12[0-8])$" + }, + { + "pattern": "^([0-9]{1,3}\\.){3}[0-9]{1,3}/([0-9]|[1-2][0-9]|3[0-2])$" + } + ], + "examples": ["127.0.0.1/32"] + } + } + } + }, + "cookies": { + "type": "object", + "additionalProperties": false, + "properties": { + "same_site_mode": { + "type": "string", + "description": "Specify the SameSite mode that cookies should be sent with.", + "enum": ["Strict", "Lax", "None"], + "default": "None" + }, + "same_site_legacy_workaround": { + "type": "boolean", + "description": "Some older browser versions don’t work with SameSite=None. This option enables the workaround defined in https://web.dev/samesite-cookie-recipes/ which essentially stores a second cookie without SameSite as a fallback.", + "default": false, + "examples": [true] + } + } + } + } + }, + "dsn": { + "type": "string", + "description": "Sets the data source name. This configures the backend where ORY Hydra persists data. If dsn is \"memory\", data will be written to memory and is lost when you restart this instance. ORY Hydra supports popular SQL databases. For more detailed configuration information go to: https://www.ory.sh/docs/hydra/dependencies-environment#sql" + }, + "webfinger": { + "type": "object", + "additionalProperties": false, + "description": "Configures ./well-known/ settings.", + "properties": { + "jwks": { + "type": "object", + "additionalProperties": false, + "description": "Configures the /.well-known/jwks.json endpoint.", + "properties": { + "broadcast_keys": { + "type": "array", + "description": "A list of JSON Web Keys that should be exposed at that endpoint. This is usually the public key for verifying OpenID Connect ID Tokens. However, you might want to add additional keys here as well.", + "items": { + "type": "string" + }, + "default": ["hydra.openid.id-token"], + "examples": ["hydra.jwt.access-token"] + } + } + }, + "oidc_discovery": { + "type": "object", + "additionalProperties": false, + "description": "Configures OpenID Connect Discovery (/.well-known/openid-configuration).", + "properties": { + "jwks_url": { + "type": "string", + "description": "Overwrites the JWKS URL", + "format": "uri", + "examples": ["https://my-service.com/.well-known/jwks.json"] + }, + "token_url": { + "type": "string", + "description": "Overwrites the OAuth2 Token URL", + "format": "uri", + "examples": ["https://my-service.com/oauth2/token"] + }, + "auth_url": { + "type": "string", + "description": "Overwrites the OAuth2 Auth URL", + "format": "uri", + "examples": ["https://my-service.com/oauth2/auth"] + }, + "client_registration_url": { + "description": "Sets the OpenID Connect Dynamic Client Registration Endpoint", + "type": "string", + "format": "uri", + "examples": ["https://my-service.com/clients"] + }, + "supported_claims": { + "type": "array", + "description": "A list of supported claims to be broadcasted. Claim \"sub\" is always included.", + "items": { + "type": "string" + }, + "examples": [["email", "username"]] + }, + "supported_scope": { + "type": "array", + "description": "The scope OAuth 2.0 Clients may request. Scope `offline`, `offline_access`, and `openid` are always included.", + "items": { + "type": "string" + }, + "examples": [["email", "whatever", "read.photos"]] + }, + "userinfo_url": { + "type": "string", + "description": "A URL of the userinfo endpoint to be advertised at the OpenID Connect Discovery endpoint /.well-known/openid-configuration. Defaults to ORY Hydra's userinfo endpoint at /userinfo. Set this value if you want to handle this endpoint yourself.", + "format": "uri", + "examples": ["https://example.org/my-custom-userinfo-endpoint"] + } + } + } + } + }, + "oidc": { + "type": "object", + "additionalProperties": false, + "description": "Configures OpenID Connect features.", + "properties": { + "subject_identifiers": { + "type": "object", + "additionalProperties": false, + "description": "Configures the Subject Identifier algorithm. For more information please head over to the documentation: https://www.ory.sh/docs/hydra/advanced#subject-identifier-algorithms", + "properties": { + "enabled": { + "type": "array", + "description": "A list of algorithms to enable.", + "items": { + "type": "string", + "enum": ["public", "pairwise"] + } + }, + "pairwise": { + "type": "object", + "additionalProperties": false, + "description": "Configures the pairwise algorithm.", + "properties": { + "salt": { + "type": "string" + } + }, + "required": ["salt"] + } + }, + "if": { + "properties": { + "enabled": { + "contains": { + "const": "pairwise" + } + } + } + }, + "then": { + "required": ["pairwise"] + }, + "else": { + "properties": { + "pairwise": { + "$comment": "This enforces pairwise to not be set if 'enabled' does not contain 'pairwise'", + "not": {} + } + } + }, + "examples": [ + { + "enabled": ["public", "pairwise"], + "pairwise": { + "salt": "some-random-salt" + } + } + ] + }, + "dynamic_client_registration": { + "type": "object", + "additionalProperties": false, + "description": "Configures OpenID Connect Dynamic Client Registration (exposed as admin endpoints /clients/...).", + "properties": { + "default_scope": { + "type": "array", + "description": "The OpenID Connect Dynamic Client Registration specification has no concept of whitelisting OAuth 2.0 Scope. If you want to expose Dynamic Client Registration, you should set the default scope enabled for newly registered clients. Keep in mind that users can overwrite this default by setting the \"scope\" key in the registration payload, effectively disabling the concept of whitelisted scopes.", + "items": { + "type": "string" + }, + "examples": [["openid", "offline", "offline_access"]] + } + } + } + } + }, + "urls": { + "type": "object", + "additionalProperties": false, + "properties": { + "self": { + "type": "object", + "additionalProperties": false, + "properties": { + "issuer": { + "type": "string", + "description": "This value will be used as the \"issuer\" in access and ID tokens. It must be specified and using HTTPS protocol, unless --dangerous-force-http is set. This should typically be equal to the public value.", + "format": "uri", + "examples": ["https://localhost:4444/"] + }, + "public": { + "type": "string", + "description": "This is the base location of the public endpoints of your ORY Hydra installation. This should typically be equal to the issuer value. If left unspecified, it falls back to the issuer value.", + "format": "uri", + "examples": ["https://localhost:4444/"] + } + } + }, + "login": { + "type": "string", + "description": "Sets the login endpoint of the User Login & Consent flow. Defaults to an internal fallback URL showing an error.", + "format": "uri", + "examples": ["https://my-login.app/login"] + }, + "consent": { + "type": "string", + "description": "Sets the consent endpoint of the User Login & Consent flow. Defaults to an internal fallback URL showing an error.", + "format": "uri", + "examples": ["https://my-consent.app/consent"] + }, + "logout": { + "type": "string", + "description": "Sets the logout endpoint. Defaults to an internal fallback URL showing an error.", + "format": "uri", + "examples": ["https://my-logout.app/logout"] + }, + "error": { + "type": "string", + "description": "Sets the error endpoint. The error ui will be shown when an OAuth2 error occurs that which can not be sent back to the client. Defaults to an internal fallback URL showing an error.", + "format": "uri", + "examples": ["https://my-error.app/error"] + }, + "post_logout_redirect": { + "type": "string", + "description": "When a user agent requests to logout, it will be redirected to this url afterwards per default.", + "format": "uri", + "examples": ["https://my-example.app/logout-successful"] + } + } + }, + "strategies": { + "type": "object", + "additionalProperties": false, + "properties": { + "scope": { + "type": "string", + "description": "Defines how scopes are matched. For more details have a look at https://github.com/ory/fosite#scopes", + "enum": [ + "exact", + "wildcard", + "DEPRECATED_HIERARCHICAL_SCOPE_STRATEGY" + ], + "default": "wildcard" + }, + "access_token": { + "type": "string", + "description": "Defines access token type. jwt is a bad idea, see https://www.ory.sh/docs/hydra/advanced#json-web-tokens", + "enum": ["opaque", "jwt"] + } + } + }, + "ttl": { + "type": "object", + "additionalProperties": false, + "description": "Configures time to live.", + "properties": { + "login_consent_request": { + "description": "Configures how long a user login and consent flow may take.", + "default": "1h", + "allOf": [ + { + "$ref": "#/definitions/duration" + } + ] + }, + "access_token": { + "description": "Configures how long access tokens are valid.", + "default": "1h", + "allOf": [ + { + "$ref": "#/definitions/duration" + } + ] + }, + "refresh_token": { + "description": "Configures how long refresh tokens are valid. Set to -1 for refresh tokens to never expire.", + "default": "720h", + "oneOf": [ + { + "$ref": "#/definitions/duration" + }, + { + "enum": ["-1", -1] + } + ] + }, + "id_token": { + "description": "Configures how long id tokens are valid.", + "default": "1h", + "allOf": [ + { + "$ref": "#/definitions/duration" + } + ] + }, + "auth_code": { + "description": "Configures how long auth codes are valid.", + "default": "10m", + "allOf": [ + { + "$ref": "#/definitions/duration" + } + ] + } + } + }, + "oauth2": { + "type": "object", + "additionalProperties": false, + "properties": { + "expose_internal_errors": { + "type": "boolean", + "description": "Set this to true if you want to share error debugging information with your OAuth 2.0 clients. Keep in mind that debug information is very valuable when dealing with errors, but might also expose database error codes and similar errors.", + "default": false, + "examples": [true] + }, + "session": { + "type": "object", + "properties": { + "encrypt_at_rest": { + "type": "boolean", + "default": true, + "title": "Encrypt OAuth2 Session", + "description": "If set to true (default) ORY Hydra encrypt OAuth2 and OpenID Connect session data using AES-GCM and the system secret before persisting it in the database." + } + } + }, + "include_legacy_error_fields": { + "type": "boolean", + "description": "Set this to true if you want to include the `error_hint` and `error_debug` legacy fields in error responses. We recommend to set this to `false` unless you have clients using these fields.", + "default": false, + "examples": [true] + }, + "hashers": { + "type": "object", + "additionalProperties": false, + "description": "Configures hashing algorithms. Supports only BCrypt at the moment.", + "properties": { + "bcrypt": { + "type": "object", + "additionalProperties": false, + "description": "Configures the BCrypt hashing algorithm used for hashing Client Secrets.", + "properties": { + "cost": { + "type": "integer", + "description": "Sets the BCrypt cost. The higher the value, the more CPU time is being used to generate hashes.", + "default": 10, + "minimum": 4, + "maximum": 31 + } + } + } + } + }, + "pkce": { + "type": "object", + "additionalProperties": false, + "properties": { + "enforced": { + "type": "boolean", + "description": "Sets whether PKCE should be enforced for all clients.", + "examples": [true] + }, + "enforced_for_public_clients": { + "type": "boolean", + "description": "Sets whether PKCE should be enforced for public clients.", + "examples": [true] + } + } + }, + "client_credentials": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_grant_allowed_scope": { + "type": "boolean", + "description": "Defines how scopes are added if the request doesn't contains any scope", + "examples": [false] + } + } + } + } + }, + "secrets": { + "type": "object", + "additionalProperties": false, + "description": "The secrets section configures secrets used for encryption and signing of several systems. All secrets can be rotated, for more information on this topic go to: https://www.ory.sh/docs/hydra/advanced#rotation-of-hmac-token-signing-and-database-and-cookie-encryption-keys", + "properties": { + "system": { + "description": "The system secret must be at least 16 characters long. If none is provided, one will be generated. They key is used to encrypt sensitive data using AES-GCM (256 bit) and validate HMAC signatures. The first item in the list is used for signing and encryption. The whole list is used for verifying signatures and decryption.", + "type": "array", + "items": { + "type": "string", + "minLength": 16 + }, + "examples": [ + [ + "this-is-the-primary-secret", + "this-is-an-old-secret", + "this-is-another-old-secret" + ] + ] + }, + "cookie": { + "type": "array", + "description": "A secret that is used to encrypt cookie sessions. Defaults to secrets.system. It is recommended to use a separate secret in production. The first item in the list is used for signing and encryption. The whole list is used for verifying signatures and decryption.", + "items": { + "type": "string", + "minLength": 16 + }, + "examples": [ + [ + "this-is-the-primary-secret", + "this-is-an-old-secret", + "this-is-another-old-secret" + ] + ] + } + } + }, + "profiling": { + "type": "string", + "description": "Enables profiling if set. For more details on profiling, head over to: https://blog.golang.org/profiling-go-programs", + "enum": ["cpu", "mem"], + "examples": ["cpu"] + }, + "tracing": { + "$ref": "ory://tracing-config" + }, + "version": { + "type": "string", + "title": "The Hydra version this config is written for.", + "description": "SemVer according to https://semver.org/ prefixed with `v` as in our releases.", + "pattern": "^v(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$" + }, + "cgroups": { + "type": "object", + "additionalProperties": false, + "description": "ORY Hydra can respect Linux container CPU quota", + "properties": { + "v1": { + "type": "object", + "additionalProperties": false, + "description": "Configures parameters using cgroups v1 hierarchy", + "properties": { + "auto_max_procs_enabled": { + "type": "boolean", + "description": "Set GOMAXPROCS automatically according to cgroups limits", + "default": false, + "examples": [true] + } + } + } + } + } + }, + "required": ["dsn"] +} diff --git a/oryx/configx/stub/hydra/expected.json b/oryx/configx/stub/hydra/expected.json new file mode 100644 index 00000000000..d35faf47f9c --- /dev/null +++ b/oryx/configx/stub/hydra/expected.json @@ -0,0 +1,122 @@ +{ + "cgroups": { + "v1": { + "auto_max_procs_enabled": false + } + }, + "dsn": "sqlite:///var/lib/sqlite/db.sqlite?_fk=true", + "log": { + "format": "text", + "leak_sensitive_values": false, + "level": "info" + }, + "oauth2": { + "expose_internal_errors": false, + "hashers": { + "bcrypt": { + "cost": 10 + } + }, + "include_legacy_error_fields": false, + "session": { + "encrypt_at_rest": true + } + }, + "oidc": { + "subject_identifiers": { + "enabled": ["pairwise", "public"], + "pairwise": { + "salt": "youReallyNeedToChangeThis" + } + } + }, + "secrets": { + "system": ["youReallyNeedToChangeThis"] + }, + "serve": { + "admin": { + "access_log": { + "disable_for_health": false + }, + "cors": { + "allow_credentials": true, + "allowed_headers": ["Authorization", "Content-Type"], + "allowed_methods": ["POST", "GET", "PUT", "PATCH", "DELETE"], + "allowed_origins": ["*"], + "debug": false, + "enabled": false, + "exposed_headers": ["Content-Type"], + "max_age": 0, + "options_passthrough": false + }, + "host": "", + "port": 4445, + "socket": { + "group": "", + "mode": 493, + "owner": "" + } + }, + "cookies": { + "same_site_legacy_workaround": false, + "same_site_mode": "Lax" + }, + "public": { + "access_log": { + "disable_for_health": false + }, + "cors": { + "allow_credentials": true, + "allowed_headers": ["Authorization", "Content-Type"], + "allowed_methods": ["POST", "GET", "PUT", "PATCH", "DELETE"], + "allowed_origins": ["*"], + "debug": false, + "enabled": false, + "exposed_headers": ["Content-Type"], + "max_age": 0, + "options_passthrough": false + }, + "host": "", + "port": 4444, + "socket": { + "group": "", + "mode": 493, + "owner": "" + } + } + }, + "strategies": { + "scope": "wildcard" + }, + "tracing": { + "provider": "jaeger", + "providers": { + "jaeger": { + "local_agent_address": "jaeger:6831", + "sampling": { + "server_url": "http://jaeger:5778/sampling" + } + } + } + }, + "ttl": { + "access_token": "1h", + "auth_code": "10m", + "id_token": "1h", + "login_consent_request": "1h", + "refresh_token": "720h" + }, + "urls": { + "consent": "http://127.0.0.1:3000/consent", + "login": "http://127.0.0.1:3000/login", + "logout": "http://127.0.0.1:3000/logout", + "self": { + "issuer": "http://127.0.0.1:4444" + } + }, + "webfinger": { + "jwks": { + "broadcast_keys": ["hydra.openid.id-token"] + } + } +} diff --git a/oryx/configx/stub/hydra/hydra.yaml b/oryx/configx/stub/hydra/hydra.yaml new file mode 100644 index 00000000000..441dfc9d2be --- /dev/null +++ b/oryx/configx/stub/hydra/hydra.yaml @@ -0,0 +1,22 @@ +serve: + cookies: + same_site_mode: Lax + +urls: + self: + issuer: http://127.0.0.1:4444 + consent: http://127.0.0.1:3000/consent + login: http://127.0.0.1:3000/login + logout: http://127.0.0.1:3000/logout + +secrets: + system: + - youReallyNeedToChangeThis + +oidc: + subject_identifiers: + enabled: + - pairwise + - public + pairwise: + salt: youReallyNeedToChangeThis diff --git a/oryx/configx/stub/kratos/config.schema.json b/oryx/configx/stub/kratos/config.schema.json new file mode 100644 index 00000000000..75847b2f043 --- /dev/null +++ b/oryx/configx/stub/kratos/config.schema.json @@ -0,0 +1,1085 @@ +{ + "$id": "https://github.com/ory/kratos/.schema/config.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "ORY Kratos Configuration", + "type": "object", + "definitions": { + "defaultReturnTo": { + "title": "Redirect browsers to set URL per default", + "description": "ORY Kratos redirects to this URL per default on completion of self-service flows and other browser interaction. Read this [article for more information on browser redirects](https://www.ory.sh/kratos/docs/concepts/browser-redirect-flow-completion).", + "type": "string", + "format": "uri-reference", + "minLength": 1, + "examples": ["https://my-app.com/dashboard", "/dashboard"] + }, + "selfServiceSessionRevokerHook": { + "type": "object", + "properties": { + "hook": { + "const": "revoke_active_sessions" + } + }, + "additionalProperties": false, + "required": ["hook"] + }, + "selfServiceVerifyHook": { + "type": "object", + "properties": { + "hook": { + "const": "verify" + } + }, + "additionalProperties": false, + "required": ["hook"] + }, + "selfServiceSessionIssuerHook": { + "type": "object", + "properties": { + "hook": { + "const": "session" + } + }, + "additionalProperties": false, + "required": ["hook"] + }, + "OIDCClaims": { + "title": "OpenID Connect claims", + "description": "The OpenID Connect claims and optionally their properties which should be included in the id_token or returned from the UserInfo Endpoint.", + "type": "object", + "examples": [ + { + "id_token": { + "email": null, + "email_verified": null + } + }, + { + "userinfo": { + "given_name": { + "essential": true + }, + "nickname": null, + "email": { + "essential": true + }, + "email_verified": { + "essential": true + }, + "picture": null, + "http://example.info/claims/groups": null + }, + "id_token": { + "auth_time": { + "essential": true + }, + "acr": { + "values": ["urn:mace:incommon:iap:silver"] + } + } + } + ], + "patternProperties": { + "^userinfo$|^id_token$": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + ".*": { + "oneOf": [ + { + "const": null, + "description": "Indicates that this Claim is being requested in the default manner." + }, + { + "type": "object", + "additionalProperties": false, + "properties": { + "essential": { + "description": "Indicates whether the Claim being requested is an Essential Claim.", + "type": "boolean" + }, + "value": { + "description": "Requests that the Claim be returned with a particular value.", + "$comment": "There seem to be no constrains on value" + }, + "values": { + "description": "Requests that the Claim be returned with one of a set of values, with the values appearing in order of preference.", + "type": "array", + "items": { + "$comment": "There seem to be no constrains on individual items" + } + } + } + } + ] + } + } + } + } + }, + "selfServiceOIDCProvider": { + "type": "object", + "properties": { + "id": { + "type": "string", + "examples": ["google"] + }, + "provider": { + "title": "Provider", + "description": "Can be one of github, gitlab, generic, google, microsoft, discord.", + "type": "string", + "enum": [ + "github", + "gitlab", + "generic", + "google", + "microsoft", + "discord" + ], + "examples": ["google"] + }, + "client_id": { + "type": "string" + }, + "client_secret": { + "type": "string" + }, + "issuer_url": { + "type": "string", + "format": "uri", + "examples": ["https://accounts.google.com"] + }, + "auth_url": { + "type": "string", + "format": "uri", + "examples": ["https://accounts.google.com/o/oauth2/v2/auth"] + }, + "token_url": { + "type": "string", + "format": "uri", + "examples": ["https://www.googleapis.com/oauth2/v4/token"] + }, + "mapper_url": { + "title": "Jsonnet Mapper URL", + "description": "The URL where the jsonnet source is located for mapping the provider's data to ORY Kratos data.", + "type": "string", + "format": "uri", + "examples": [ + "file://path/to/oidc.jsonnet", + "https://foo.bar.com/path/to/oidc.jsonnet", + "base64://bG9jYWwgc3ViamVjdCA9I..." + ] + }, + "scope": { + "type": "array", + "items": { + "type": "string", + "examples": ["offline_access", "profile"] + } + }, + "tenant": { + "title": "Azure AD Tenant", + "description": "The Azure AD Tenant to use for authentication.", + "type": "string", + "examples": [ + "common", + "organizations", + "consumers", + "8eaef023-2b34-4da1-9baa-8bc8c9d6a490", + "contoso.onmicrosoft.com" + ] + }, + "requested_claims": { + "$ref": "#/definitions/OIDCClaims" + } + }, + "additionalProperties": false, + "required": [ + "id", + "provider", + "client_id", + "client_secret", + "mapper_url" + ], + "if": { + "properties": { + "provider": { + "const": "microsoft" + } + }, + "required": ["provider"] + }, + "then": { + "required": ["tenant"] + }, + "else": { + "not": { + "properties": { + "tenant": {} + }, + "required": ["tenant"] + } + } + }, + "selfServiceAfterSettingsMethod": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "hooks": { + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "#/definitions/selfServiceVerifyHook" + } + ] + }, + "uniqueItems": true, + "additionalItems": false + } + } + }, + "selfServiceAfterLoginMethod": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "hooks": { + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "#/definitions/selfServiceSessionRevokerHook" + } + ] + }, + "uniqueItems": true, + "additionalItems": false + } + } + }, + "selfServiceAfterRegistrationMethod": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "hooks": { + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "#/definitions/selfServiceSessionIssuerHook" + } + ] + }, + "uniqueItems": true, + "additionalItems": false + } + } + }, + "selfServiceAfterSettings": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "password": { + "$ref": "#/definitions/selfServiceAfterSettingsMethod" + }, + "profile": { + "$ref": "#/definitions/selfServiceAfterSettingsMethod" + } + } + }, + "selfServiceAfterLogin": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "password": { + "$ref": "#/definitions/selfServiceAfterLoginMethod" + }, + "oidc": { + "$ref": "#/definitions/selfServiceAfterLoginMethod" + } + } + }, + "selfServiceAfterRegistration": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "password": { + "$ref": "#/definitions/selfServiceAfterRegistrationMethod" + }, + "oidc": { + "$ref": "#/definitions/selfServiceAfterRegistrationMethod" + } + } + } + }, + "properties": { + "selfservice": { + "type": "object", + "additionalProperties": false, + "required": ["default_browser_return_url"], + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "whitelisted_return_urls": { + "title": "Whitelisted Return To URLs", + "description": "List of URLs that are allowed to be redirected to. A redirection request is made by appending `?return_to=...` to Login, Registration, and other self-service flows.", + "type": "array", + "items": { + "type": "string", + "format": "uri-reference" + }, + "examples": [ + [ + "https://app.my-app.com/dashboard", + "/dashboard", + "https://www.my-app.com/" + ] + ], + "uniqueItems": true + }, + "flows": { + "type": "object", + "additionalProperties": false, + "properties": { + "settings": { + "type": "object", + "additionalProperties": false, + "properties": { + "ui_url": { + "title": "URL of the Settings page.", + "description": "URL where the Settings UI is hosted. Check the [reference implementation](https://github.com/ory/kratos-selfservice-ui-node).", + "type": "string", + "format": "uri-reference", + "examples": ["https://my-app.com/user/settings"], + "default": "https://www.ory.sh/kratos/docs/fallback/settings" + }, + "lifespan": { + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "1h", + "examples": ["1h", "1m", "1s"] + }, + "privileged_session_max_age": { + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "1h", + "examples": ["1h", "1m", "1s"] + }, + "after": { + "$ref": "#/definitions/selfServiceAfterSettings" + } + } + }, + "logout": { + "type": "object", + "additionalProperties": false, + "properties": { + "after": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + } + } + } + } + }, + "registration": { + "type": "object", + "additionalProperties": false, + "properties": { + "ui_url": { + "title": "Registration UI URL", + "description": "URL where the Registration UI is hosted. Check the [reference implementation](https://github.com/ory/kratos-selfservice-ui-node).", + "type": "string", + "format": "uri-reference", + "examples": ["https://my-app.com/signup"], + "default": "https://www.ory.sh/kratos/docs/fallback/registration" + }, + "lifespan": { + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "1h", + "examples": ["1h", "1m", "1s"] + }, + "after": { + "$ref": "#/definitions/selfServiceAfterRegistration" + } + } + }, + "login": { + "type": "object", + "additionalProperties": false, + "properties": { + "ui_url": { + "title": "Login UI URL", + "description": "URL where the Login UI is hosted. Check the [reference implementation](https://github.com/ory/kratos-selfservice-ui-node).", + "type": "string", + "format": "uri-reference", + "examples": ["https://my-app.com/login"], + "default": "https://www.ory.sh/kratos/docs/fallback/login" + }, + "lifespan": { + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "1h", + "examples": ["1h", "1m", "1s"] + }, + "after": { + "$ref": "#/definitions/selfServiceAfterLogin" + } + } + }, + "verification": { + "title": "Email and Phone Verification and Account Activation Configuration", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "title": "Enable Email/Phone Verification", + "description": "If set to true will enable [Email and Phone Verification and Account Activation](https://www.ory.sh/kratos/docs/self-service/flows/verify-email-account-activation/).", + "default": false + }, + "ui_url": { + "title": "Verify UI URL", + "description": "URL where the ORY Verify UI is hosted. This is the page where users activate and / or verify their email or telephone number. Check the [reference implementation](https://github.com/ory/kratos-selfservice-ui-node).", + "type": "string", + "format": "uri-reference", + "examples": ["https://my-app.com/verify"], + "default": "https://www.ory.sh/kratos/docs/fallback/verification" + }, + "after": { + "type": "object", + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + } + }, + "additionalProperties": false + }, + "lifespan": { + "title": "Self-Service Verification Request Lifespan", + "description": "Sets how long the verification request (for the UI interaction) is valid.", + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "1h", + "examples": ["1h", "1m", "1s"] + } + } + }, + "recovery": { + "title": "Account Recovery Configuration", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "title": "Enable Account Recovery", + "description": "If set to true will enable [Account Recovery](https://www.ory.sh/kratos/docs/self-service/flows/password-reset-account-recovery/).", + "default": false + }, + "ui_url": { + "title": "Recovery UI URL", + "description": "URL where the ORY Recovery UI is hosted. This is the page where users request and complete account recovery. Check the [reference implementation](https://github.com/ory/kratos-selfservice-ui-node).", + "type": "string", + "format": "uri-reference", + "examples": ["https://my-app.com/verify"], + "default": "https://www.ory.sh/kratos/docs/fallback/recovery" + }, + "after": { + "type": "object", + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + } + }, + "additionalProperties": false + }, + "lifespan": { + "title": "Self-Service Recovery Request Lifespan", + "description": "Sets how long the recovery request is valid. If expired, the user has to redo the flow.", + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "1h", + "examples": ["1h", "1m", "1s"] + } + } + }, + "error": { + "type": "object", + "additionalProperties": false, + "properties": { + "ui_url": { + "title": "ORY Kratos Error UI URL", + "description": "URL where the ORY Kratos Error UI is hosted. Check the [reference implementation](https://github.com/ory/kratos-selfservice-ui-node).", + "type": "string", + "format": "uri-reference", + "examples": ["https://my-app.com/kratos-error"], + "default": "https://www.ory.sh/kratos/docs/fallback/error" + } + } + } + } + }, + "methods": { + "type": "object", + "additionalProperties": false, + "properties": { + "profile": { + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "title": "Enables Profile Management Method", + "default": true + } + } + }, + "link": { + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "title": "Enables Link Method", + "default": true + } + } + }, + "password": { + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "title": "Enables Username/Email and Password Method", + "default": true + } + } + }, + "oidc": { + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "title": "Enables OpenID Connect Method", + "default": false + }, + "config": { + "type": "object", + "additionalProperties": false, + "properties": { + "providers": { + "title": "OpenID Connect and OAuth2 Providers", + "description": "A list and configuration of OAuth2 and OpenID Connect providers ORY Kratos should integrate with.", + "type": "array", + "items": { + "$ref": "#/definitions/selfServiceOIDCProvider" + } + } + } + } + } + } + } + } + } + }, + "dsn": { + "type": "string", + "title": "Data Source Name", + "description": "DSN is used to specify the database credentials as a connection URI.", + "examples": [ + "postgres://user: password@postgresd:5432/database?sslmode=disable&max_conns=20&max_idle_conns=4", + "mysql://user:secret@tcp(mysqld:3306)/database?max_conns=20&max_idle_conns=4", + "cockroach://user@cockroachdb:26257/database?sslmode=disable&max_conns=20&max_idle_conns=4", + "sqlite:///var/lib/sqlite/db.sqlite?_fk=true&mode=rwc" + ] + }, + "courier": { + "type": "object", + "title": "Courier configuration", + "description": "The courier is responsible for sending and delivering messages over email, sms, and other means.", + "properties": { + "template_override_path": { + "type": "string", + "title": "Override message templates", + "description": "You can override certain or all message templates by pointing this key to the path where the templates are located.", + "examples": ["/conf/courier-templates"] + }, + "smtp": { + "title": "SMTP Configuration", + "description": "Configures outgoing emails using the SMTP protocol.", + "type": "object", + "properties": { + "connection_uri": { + "title": "SMTP connection string", + "description": "This URI will be used to connect to the SMTP server. Use the query parameter to allow (`?skip_ssl_verify=true`) or disallow (`?skip_ssl_verify=false`) self-signed TLS certificates. Please keep in mind that any host other than localhost / 127.0.0.1 must use smtp over TLS (smtps) or the connection will not be possible.", + "examples": [ + "smtps://foo:bar@my-mailserver:1234/?skip_ssl_verify=false" + ], + "type": "string", + "format": "uri" + }, + "from_address": { + "title": "SMTP Sender Address", + "description": "The recipient of an email will see this as the sender address.", + "type": "string", + "format": "email", + "default": "no-reply@ory.kratos.sh" + } + }, + "required": ["connection_uri"], + "additionalProperties": false + } + }, + "required": ["smtp"], + "additionalProperties": false + }, + "serve": { + "type": "object", + "properties": { + "admin": { + "type": "object", + "properties": { + "base_url": { + "title": "Admin Base URL", + "description": "The URL where the admin endpoint is exposed at.", + "type": "string", + "format": "uri", + "examples": ["https://kratos.private-network:4434/"] + }, + "host": { + "title": "Admin Host", + "description": "The host (interface) kratos' admin endpoint listens on.", + "type": "string", + "default": "0.0.0.0" + }, + "port": { + "title": "Admin Port", + "description": "The port kratos' admin endpoint listens on.", + "type": "integer", + "minimum": 1, + "maximum": 65535, + "examples": [4434], + "default": 4434 + } + }, + "additionalProperties": false + }, + "public": { + "type": "object", + "properties": { + "cors": { + "type": "object", + "additionalProperties": false, + "description": "Configures Cross Origin Resource Sharing for public endpoints.", + "properties": { + "enabled": { + "type": "boolean", + "description": "Sets whether CORS is enabled.", + "default": false + }, + "allowed_origins": { + "type": "array", + "description": "A list of origins a cross-domain request can be executed from. If the special * value is present in the list, all origins will be allowed. An origin may contain a wildcard (*) to replace 0 or more characters (i.e.: http://*.domain.com). Only one wildcard can be used per origin.", + "items": { + "type": "string", + "minLength": 1, + "not": { + "type": "string", + "description": "does match all strings that contain two or more (*)", + "pattern": ".*\\*.*\\*.*" + }, + "anyOf": [ + { + "format": "uri" + }, + { + "const": "*" + } + ] + }, + "uniqueItems": true, + "default": ["*"], + "examples": [ + [ + "https://example.com", + "https://*.example.com", + "https://*.foo.example.com" + ] + ] + }, + "allowed_methods": { + "type": "array", + "description": "A list of HTTP methods the user agent is allowed to use with cross-domain requests.", + "default": ["POST", "GET", "PUT", "PATCH", "DELETE"], + "items": { + "type": "string", + "enum": [ + "POST", + "GET", + "PUT", + "PATCH", + "DELETE", + "CONNECT", + "HEAD", + "OPTIONS", + "TRACE" + ] + } + }, + "allowed_headers": { + "type": "array", + "description": "A list of non simple headers the client is allowed to use with cross-domain requests.", + "default": [ + "Authorization", + "Content-Type", + "X-Session-Token" + ], + "items": { + "type": "string" + } + }, + "exposed_headers": { + "type": "array", + "description": "Sets which headers are safe to expose to the API of a CORS API specification.", + "default": ["Content-Type"], + "items": { + "type": "string" + } + }, + "allow_credentials": { + "type": "boolean", + "description": "Sets whether the request can include user credentials like cookies, HTTP authentication or client side SSL certificates.", + "default": true + }, + "options_passthrough": { + "type": "boolean", + "description": "TODO", + "default": false + }, + "max_age": { + "type": "integer", + "description": "Sets how long (in seconds) the results of a preflight request can be cached. If set to 0, every request is preceded by a preflight request.", + "default": 0, + "minimum": 0 + }, + "debug": { + "type": "boolean", + "description": "Adds additional log output to debug server side CORS issues.", + "default": false + } + } + }, + "base_url": { + "title": "Public Base URL", + "description": "The URL where the public endpoint is exposed at.", + "type": "string", + "format": "uri-reference", + "examples": [ + "https://my-app.com/.ory/kratos/public", + "/.ory/kratos/public/" + ] + }, + "host": { + "title": "Public Host", + "description": "The host (interface) kratos' public endpoint listens on.", + "type": "string", + "default": "0.0.0.0" + }, + "port": { + "title": "Public Port", + "description": "The port kratos' public endpoint listens on.", + "type": "integer", + "minimum": 1, + "maximum": 65535, + "examples": [4433], + "default": 4433 + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false + }, + "log": { + "type": "object", + "properties": { + "level": { + "type": "string", + "enum": [ + "trace", + "debug", + "info", + "warning", + "error", + "fatal", + "panic" + ] + }, + "leak_sensitive_values": { + "type": "boolean", + "title": "Leak Sensitive Log Values", + "description": "If set will leak sensitive values (e.g. emails) in the logs." + }, + "redaction_text": { + "type": "string", + "title": "Sensitive log value redaction text", + "description": "Text to use, when redacting sensitive log value." + }, + "format": { + "type": "string", + "enum": ["json", "text"] + } + }, + "additionalProperties": false + }, + "identity": { + "type": "object", + "properties": { + "default_schema_url": { + "title": "JSON Schema URL for default identity traits", + "description": "Path to the JSON Schema which describes a default identity's traits.", + "type": "string", + "format": "uri", + "examples": [ + "file://path/to/identity.traits.schema.json", + "https://foo.bar.com/path/to/identity.traits.schema.json" + ] + }, + "schemas": { + "type": "array", + "title": "Additional JSON Schemas for Identity Traits", + "examples": [ + [ + { + "id": "customer", + "url": "https://foo.bar.com/path/to/customer.traits.schema.json" + }, + { + "id": "employee", + "url": "https://foo.bar.com/path/to/employee.traits.schema.json" + }, + { + "id": "employee-v2", + "url": "https://foo.bar.com/path/to/employee.v2.traits.schema.json" + } + ] + ], + "items": { + "type": "object", + "properties": { + "id": { + "title": "The schema's ID.", + "type": "string", + "examples": ["employee"] + }, + "url": { + "type": "string", + "title": "Path to the JSON Schema", + "format": "uri", + "examples": [ + "file://path/to/identity.traits.schema.json", + "https://foo.bar.com/path/to/identity.traits.schema.json" + ] + } + }, + "required": ["id", "url"], + "not": { + "type": "object", + "properties": { + "id": { + "const": "default" + } + }, + "additionalProperties": true + } + } + } + }, + "required": ["default_schema_url"], + "additionalProperties": false + }, + "secrets": { + "type": "object", + "properties": { + "default": { + "type": "array", + "title": "Default Encryption Signing Secrets", + "description": "The first secret in the array is used for singing and encrypting things while all other keys are used to verify and decrypt older things that were signed with that old secret.", + "items": { + "type": "string", + "minLength": 16 + }, + "uniqueItems": true + }, + "cookie": { + "type": "array", + "title": "Singing Keys for Cookies", + "description": "The first secret in the array is used for encrypting cookies while all other keys are used to decrypt older cookies that were signed with that old secret.", + "items": { + "type": "string", + "minLength": 16 + }, + "uniqueItems": true + } + }, + "additionalProperties": false + }, + "hashers": { + "title": "Hashing Algorithm Configuration", + "type": "object", + "properties": { + "argon2": { + "title": "Configuration for the Argon2id hasher.", + "type": "object", + "properties": { + "memory": { + "type": "integer", + "minimum": 16384 + }, + "iterations": { + "type": "integer", + "minimum": 1 + }, + "parallelism": { + "type": "integer", + "minimum": 1 + }, + "salt_length": { + "type": "integer", + "minimum": 16 + }, + "key_length": { + "type": "integer", + "minimum": 16 + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false + }, + "session": { + "type": "object", + "additionalProperties": false, + "properties": { + "lifespan": { + "title": "Session Lifespan", + "description": "Defines how long a session is active. Once that lifespan has been reached, the user needs to sign in again.", + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "24h", + "examples": ["1h", "1m", "1s"] + }, + "cookie": { + "type": "object", + "properties": { + "domain": { + "title": "Session Cookie Domain", + "description": "Sets the session cookie domain. Useful when dealing with subdomains. Use with care!", + "type": "string" + }, + "persistent": { + "title": "Make Session Cookie Persistent", + "description": "If set to true will persist the cookie in the end-user's browser using the `max-age` parameter which is set to the `session.lifespan` value. Persistent cookies are not deleted when the browser is closed (e.g. on reboot or alt+f4).", + "type": "boolean", + "default": true + }, + "path": { + "title": "Session Cookie Path", + "description": "Sets the session cookie path. Use with care!", + "type": "string", + "default": "/" + }, + "same_site": { + "title": "Cookie Same Site Configuration", + "type": "string", + "enum": ["Strict", "Lax", "None"], + "default": "Lax" + } + }, + "additionalProperties": false + } + } + }, + "version": { + "title": "The kratos version this config is written for.", + "description": "SemVer according to https://semver.org/ prefixed with `v` as in our releases.", + "type": "string", + "pattern": "^v(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$", + "examples": ["v0.5.0-alpha.1"] + } + }, + "allOf": [ + { + "if": { + "properties": { + "selfservice": { + "properties": { + "flows": { + "oneOf": [ + { + "properties": { + "verification": { + "properties": { + "enabled": { + "const": true + } + }, + "required": ["enabled"] + } + }, + "required": ["verification"] + }, + { + "properties": { + "recovery": { + "properties": { + "enabled": { + "const": true + } + }, + "required": ["enabled"] + } + }, + "required": ["recovery"] + } + ] + } + }, + "required": ["flows"] + } + }, + "required": ["selfservice"] + }, + "then": { + "required": ["courier"] + } + } + ], + "required": ["identity", "dsn", "selfservice"] +} diff --git a/oryx/configx/stub/kratos/expected.json b/oryx/configx/stub/kratos/expected.json new file mode 100644 index 00000000000..e9b5d45e60c --- /dev/null +++ b/oryx/configx/stub/kratos/expected.json @@ -0,0 +1,135 @@ +{ + "courier": { + "smtp": { + "connection_uri": "smtps://test:test@mailslurper:1025/?skip_ssl_verify=true", + "from_address": "no-reply@ory.kratos.sh" + } + }, + "dsn": "sqlite:///var/lib/sqlite/db.sqlite?_fk=true", + "hashers": { + "argon2": { + "iterations": 2, + "key_length": 16, + "memory": 131072, + "parallelism": 1, + "salt_length": 16 + } + }, + "identity": { + "default_schema_url": "file:///etc/config/kratos/identity.schema.json" + }, + "log": { + "format": "text", + "leak_sensitive_values": true, + "level": "debug" + }, + "secrets": { + "cookie": ["PLEASE-CHANGE-ME-I-AM-VERY-INSECURE"] + }, + "selfservice": { + "default_browser_return_url": "http://127.0.0.1:4455/", + "flows": { + "error": { + "ui_url": "http://127.0.0.1:4455/error" + }, + "login": { + "lifespan": "10m", + "ui_url": "http://127.0.0.1:4455/auth/login" + }, + "logout": { + "after": { + "default_browser_return_url": "http://127.0.0.1:4455/auth/login" + } + }, + "recovery": { + "enabled": true, + "lifespan": "1h", + "ui_url": "http://127.0.0.1:4455/recovery" + }, + "registration": { + "after": { + "password": { + "hooks": [ + { + "hook": "session" + } + ] + } + }, + "lifespan": "10m", + "ui_url": "http://127.0.0.1:4455/auth/registration" + }, + "settings": { + "lifespan": "1h", + "privileged_session_max_age": "15m", + "ui_url": "http://127.0.0.1:4455/settings" + }, + "verification": { + "after": { + "default_browser_return_url": "http://127.0.0.1:4455/" + }, + "enabled": true, + "lifespan": "1h", + "ui_url": "http://127.0.0.1:4455/verify" + } + }, + "methods": { + "link": { + "enabled": true + }, + "oidc": { + "enabled": true, + "config": { + "providers": [ + { + "id": "google", + "provider": "google", + "mapper_url": "file:///etc/config/kratos/oidc.google.jsonnet", + "client_id": "client@example.com", + "client_secret": "secret" + } + ] + } + }, + "password": { + "enabled": true + }, + "profile": { + "enabled": true + } + }, + "whitelisted_return_urls": ["http://127.0.0.1:4455"] + }, + "serve": { + "admin": { + "base_url": "http://kratos:4434/", + "host": "0.0.0.0", + "port": 4434 + }, + "public": { + "base_url": "http://127.0.0.1:4433/", + "cors": { + "allow_credentials": true, + "allowed_headers": ["Authorization", "Content-Type", "X-Session-Token"], + "allowed_methods": ["POST", "GET", "PUT", "PATCH", "DELETE"], + "allowed_origins": ["*"], + "debug": false, + "enabled": true, + "exposed_headers": ["Content-Type"], + "max_age": 0, + "options_passthrough": false + }, + "host": "0.0.0.0", + "port": 4433 + } + }, + "session": { + "cookie": { + "path": "/", + "persistent": true, + "same_site": "Lax" + }, + "lifespan": "24h" + }, + "version": "v0.5.3-alpha.1" +} diff --git a/oryx/configx/stub/kratos/kratos.yaml b/oryx/configx/stub/kratos/kratos.yaml new file mode 100644 index 00000000000..0d74f1966dc --- /dev/null +++ b/oryx/configx/stub/kratos/kratos.yaml @@ -0,0 +1,76 @@ +version: v0.5.3-alpha.1 + +dsn: memory + +serve: + public: + base_url: http://127.0.0.1:4433/ + cors: + enabled: true + admin: + base_url: http://kratos:4434/ + +selfservice: + default_browser_return_url: http://127.0.0.1:4455/ + whitelisted_return_urls: + - http://127.0.0.1:4455 + + methods: + password: + enabled: true + oidc: + enabled: true + + flows: + error: + ui_url: http://127.0.0.1:4455/error + + settings: + ui_url: http://127.0.0.1:4455/settings + privileged_session_max_age: 15m + + recovery: + enabled: true + ui_url: http://127.0.0.1:4455/recovery + + verification: + enabled: true + ui_url: http://127.0.0.1:4455/verify + after: + default_browser_return_url: http://127.0.0.1:4455/ + + logout: + after: + default_browser_return_url: http://127.0.0.1:4455/auth/login + + login: + ui_url: http://127.0.0.1:4455/auth/login + lifespan: 10m + + registration: + lifespan: 10m + ui_url: http://127.0.0.1:4455/auth/registration + +log: + level: debug + format: text + leak_sensitive_values: true + +secrets: + cookie: + - PLEASE-CHANGE-ME-I-AM-VERY-INSECURE + +hashers: + argon2: + parallelism: 1 + memory: 131072 + iterations: 2 + salt_length: 16 + key_length: 16 + +identity: + default_schema_url: file:///etc/config/kratos/identity.schema.json + +courier: + smtp: + connection_uri: smtps://test:test@mailslurper:1025/?skip_ssl_verify=true diff --git a/oryx/configx/stub/multi/a.yaml b/oryx/configx/stub/multi/a.yaml new file mode 100644 index 00000000000..f3e18085dc4 --- /dev/null +++ b/oryx/configx/stub/multi/a.yaml @@ -0,0 +1,27 @@ +version: v0.5.3-alpha.1 + +dsn: memory + +serve: + public: + base_url: http://127.0.0.1:4433/ + cors: + enabled: true + admin: + base_url: http://kratos:4434/ + +selfservice: + default_browser_return_url: http://127.0.0.1:4455/ + whitelisted_return_urls: + - http://127.0.0.1:4455 + + methods: + password: + enabled: true + + flows: + error: + ui_url: http://127.0.0.1:4455/error + + settings: + ui_url: http://127.0.0.1:4455/settings diff --git a/oryx/configx/stub/multi/b.yaml b/oryx/configx/stub/multi/b.yaml new file mode 100644 index 00000000000..1d489893a2e --- /dev/null +++ b/oryx/configx/stub/multi/b.yaml @@ -0,0 +1,54 @@ +selfservice: + flows: + settings: + privileged_session_max_age: 15m + + recovery: + enabled: true + ui_url: http://127.0.0.1:4455/recovery + + verification: + enabled: true + ui_url: http://127.0.0.1:4455/verify + after: + default_browser_return_url: http://127.0.0.1:4455/ + + logout: + after: + default_browser_return_url: http://127.0.0.1:4455/auth/login + + login: + ui_url: http://127.0.0.1:4455/auth/login + lifespan: 10m + + registration: + lifespan: 10m + ui_url: http://127.0.0.1:4455/auth/registration + after: + password: + hooks: + - hook: session + +log: + level: debug + format: text + leak_sensitive_values: true + +secrets: + cookie: + - PLEASE-CHANGE-ME-I-AM-VERY-INSECURE + +hashers: + argon2: + parallelism: 1 + memory: 131072 + iterations: 2 + salt_length: 16 + key_length: 16 + +identity: + default_schema_url: file:///etc/config/kratos/identity.schema.json + +courier: + smtp: + connection_uri: smtps://test:test@mailslurper:1025/?skip_ssl_verify=true diff --git a/oryx/configx/stub/multi/config.schema.json b/oryx/configx/stub/multi/config.schema.json new file mode 100644 index 00000000000..75847b2f043 --- /dev/null +++ b/oryx/configx/stub/multi/config.schema.json @@ -0,0 +1,1085 @@ +{ + "$id": "https://github.com/ory/kratos/.schema/config.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "ORY Kratos Configuration", + "type": "object", + "definitions": { + "defaultReturnTo": { + "title": "Redirect browsers to set URL per default", + "description": "ORY Kratos redirects to this URL per default on completion of self-service flows and other browser interaction. Read this [article for more information on browser redirects](https://www.ory.sh/kratos/docs/concepts/browser-redirect-flow-completion).", + "type": "string", + "format": "uri-reference", + "minLength": 1, + "examples": ["https://my-app.com/dashboard", "/dashboard"] + }, + "selfServiceSessionRevokerHook": { + "type": "object", + "properties": { + "hook": { + "const": "revoke_active_sessions" + } + }, + "additionalProperties": false, + "required": ["hook"] + }, + "selfServiceVerifyHook": { + "type": "object", + "properties": { + "hook": { + "const": "verify" + } + }, + "additionalProperties": false, + "required": ["hook"] + }, + "selfServiceSessionIssuerHook": { + "type": "object", + "properties": { + "hook": { + "const": "session" + } + }, + "additionalProperties": false, + "required": ["hook"] + }, + "OIDCClaims": { + "title": "OpenID Connect claims", + "description": "The OpenID Connect claims and optionally their properties which should be included in the id_token or returned from the UserInfo Endpoint.", + "type": "object", + "examples": [ + { + "id_token": { + "email": null, + "email_verified": null + } + }, + { + "userinfo": { + "given_name": { + "essential": true + }, + "nickname": null, + "email": { + "essential": true + }, + "email_verified": { + "essential": true + }, + "picture": null, + "http://example.info/claims/groups": null + }, + "id_token": { + "auth_time": { + "essential": true + }, + "acr": { + "values": ["urn:mace:incommon:iap:silver"] + } + } + } + ], + "patternProperties": { + "^userinfo$|^id_token$": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + ".*": { + "oneOf": [ + { + "const": null, + "description": "Indicates that this Claim is being requested in the default manner." + }, + { + "type": "object", + "additionalProperties": false, + "properties": { + "essential": { + "description": "Indicates whether the Claim being requested is an Essential Claim.", + "type": "boolean" + }, + "value": { + "description": "Requests that the Claim be returned with a particular value.", + "$comment": "There seem to be no constrains on value" + }, + "values": { + "description": "Requests that the Claim be returned with one of a set of values, with the values appearing in order of preference.", + "type": "array", + "items": { + "$comment": "There seem to be no constrains on individual items" + } + } + } + } + ] + } + } + } + } + }, + "selfServiceOIDCProvider": { + "type": "object", + "properties": { + "id": { + "type": "string", + "examples": ["google"] + }, + "provider": { + "title": "Provider", + "description": "Can be one of github, gitlab, generic, google, microsoft, discord.", + "type": "string", + "enum": [ + "github", + "gitlab", + "generic", + "google", + "microsoft", + "discord" + ], + "examples": ["google"] + }, + "client_id": { + "type": "string" + }, + "client_secret": { + "type": "string" + }, + "issuer_url": { + "type": "string", + "format": "uri", + "examples": ["https://accounts.google.com"] + }, + "auth_url": { + "type": "string", + "format": "uri", + "examples": ["https://accounts.google.com/o/oauth2/v2/auth"] + }, + "token_url": { + "type": "string", + "format": "uri", + "examples": ["https://www.googleapis.com/oauth2/v4/token"] + }, + "mapper_url": { + "title": "Jsonnet Mapper URL", + "description": "The URL where the jsonnet source is located for mapping the provider's data to ORY Kratos data.", + "type": "string", + "format": "uri", + "examples": [ + "file://path/to/oidc.jsonnet", + "https://foo.bar.com/path/to/oidc.jsonnet", + "base64://bG9jYWwgc3ViamVjdCA9I..." + ] + }, + "scope": { + "type": "array", + "items": { + "type": "string", + "examples": ["offline_access", "profile"] + } + }, + "tenant": { + "title": "Azure AD Tenant", + "description": "The Azure AD Tenant to use for authentication.", + "type": "string", + "examples": [ + "common", + "organizations", + "consumers", + "8eaef023-2b34-4da1-9baa-8bc8c9d6a490", + "contoso.onmicrosoft.com" + ] + }, + "requested_claims": { + "$ref": "#/definitions/OIDCClaims" + } + }, + "additionalProperties": false, + "required": [ + "id", + "provider", + "client_id", + "client_secret", + "mapper_url" + ], + "if": { + "properties": { + "provider": { + "const": "microsoft" + } + }, + "required": ["provider"] + }, + "then": { + "required": ["tenant"] + }, + "else": { + "not": { + "properties": { + "tenant": {} + }, + "required": ["tenant"] + } + } + }, + "selfServiceAfterSettingsMethod": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "hooks": { + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "#/definitions/selfServiceVerifyHook" + } + ] + }, + "uniqueItems": true, + "additionalItems": false + } + } + }, + "selfServiceAfterLoginMethod": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "hooks": { + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "#/definitions/selfServiceSessionRevokerHook" + } + ] + }, + "uniqueItems": true, + "additionalItems": false + } + } + }, + "selfServiceAfterRegistrationMethod": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "hooks": { + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "#/definitions/selfServiceSessionIssuerHook" + } + ] + }, + "uniqueItems": true, + "additionalItems": false + } + } + }, + "selfServiceAfterSettings": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "password": { + "$ref": "#/definitions/selfServiceAfterSettingsMethod" + }, + "profile": { + "$ref": "#/definitions/selfServiceAfterSettingsMethod" + } + } + }, + "selfServiceAfterLogin": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "password": { + "$ref": "#/definitions/selfServiceAfterLoginMethod" + }, + "oidc": { + "$ref": "#/definitions/selfServiceAfterLoginMethod" + } + } + }, + "selfServiceAfterRegistration": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "password": { + "$ref": "#/definitions/selfServiceAfterRegistrationMethod" + }, + "oidc": { + "$ref": "#/definitions/selfServiceAfterRegistrationMethod" + } + } + } + }, + "properties": { + "selfservice": { + "type": "object", + "additionalProperties": false, + "required": ["default_browser_return_url"], + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + }, + "whitelisted_return_urls": { + "title": "Whitelisted Return To URLs", + "description": "List of URLs that are allowed to be redirected to. A redirection request is made by appending `?return_to=...` to Login, Registration, and other self-service flows.", + "type": "array", + "items": { + "type": "string", + "format": "uri-reference" + }, + "examples": [ + [ + "https://app.my-app.com/dashboard", + "/dashboard", + "https://www.my-app.com/" + ] + ], + "uniqueItems": true + }, + "flows": { + "type": "object", + "additionalProperties": false, + "properties": { + "settings": { + "type": "object", + "additionalProperties": false, + "properties": { + "ui_url": { + "title": "URL of the Settings page.", + "description": "URL where the Settings UI is hosted. Check the [reference implementation](https://github.com/ory/kratos-selfservice-ui-node).", + "type": "string", + "format": "uri-reference", + "examples": ["https://my-app.com/user/settings"], + "default": "https://www.ory.sh/kratos/docs/fallback/settings" + }, + "lifespan": { + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "1h", + "examples": ["1h", "1m", "1s"] + }, + "privileged_session_max_age": { + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "1h", + "examples": ["1h", "1m", "1s"] + }, + "after": { + "$ref": "#/definitions/selfServiceAfterSettings" + } + } + }, + "logout": { + "type": "object", + "additionalProperties": false, + "properties": { + "after": { + "type": "object", + "additionalProperties": false, + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + } + } + } + } + }, + "registration": { + "type": "object", + "additionalProperties": false, + "properties": { + "ui_url": { + "title": "Registration UI URL", + "description": "URL where the Registration UI is hosted. Check the [reference implementation](https://github.com/ory/kratos-selfservice-ui-node).", + "type": "string", + "format": "uri-reference", + "examples": ["https://my-app.com/signup"], + "default": "https://www.ory.sh/kratos/docs/fallback/registration" + }, + "lifespan": { + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "1h", + "examples": ["1h", "1m", "1s"] + }, + "after": { + "$ref": "#/definitions/selfServiceAfterRegistration" + } + } + }, + "login": { + "type": "object", + "additionalProperties": false, + "properties": { + "ui_url": { + "title": "Login UI URL", + "description": "URL where the Login UI is hosted. Check the [reference implementation](https://github.com/ory/kratos-selfservice-ui-node).", + "type": "string", + "format": "uri-reference", + "examples": ["https://my-app.com/login"], + "default": "https://www.ory.sh/kratos/docs/fallback/login" + }, + "lifespan": { + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "1h", + "examples": ["1h", "1m", "1s"] + }, + "after": { + "$ref": "#/definitions/selfServiceAfterLogin" + } + } + }, + "verification": { + "title": "Email and Phone Verification and Account Activation Configuration", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "title": "Enable Email/Phone Verification", + "description": "If set to true will enable [Email and Phone Verification and Account Activation](https://www.ory.sh/kratos/docs/self-service/flows/verify-email-account-activation/).", + "default": false + }, + "ui_url": { + "title": "Verify UI URL", + "description": "URL where the ORY Verify UI is hosted. This is the page where users activate and / or verify their email or telephone number. Check the [reference implementation](https://github.com/ory/kratos-selfservice-ui-node).", + "type": "string", + "format": "uri-reference", + "examples": ["https://my-app.com/verify"], + "default": "https://www.ory.sh/kratos/docs/fallback/verification" + }, + "after": { + "type": "object", + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + } + }, + "additionalProperties": false + }, + "lifespan": { + "title": "Self-Service Verification Request Lifespan", + "description": "Sets how long the verification request (for the UI interaction) is valid.", + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "1h", + "examples": ["1h", "1m", "1s"] + } + } + }, + "recovery": { + "title": "Account Recovery Configuration", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "title": "Enable Account Recovery", + "description": "If set to true will enable [Account Recovery](https://www.ory.sh/kratos/docs/self-service/flows/password-reset-account-recovery/).", + "default": false + }, + "ui_url": { + "title": "Recovery UI URL", + "description": "URL where the ORY Recovery UI is hosted. This is the page where users request and complete account recovery. Check the [reference implementation](https://github.com/ory/kratos-selfservice-ui-node).", + "type": "string", + "format": "uri-reference", + "examples": ["https://my-app.com/verify"], + "default": "https://www.ory.sh/kratos/docs/fallback/recovery" + }, + "after": { + "type": "object", + "properties": { + "default_browser_return_url": { + "$ref": "#/definitions/defaultReturnTo" + } + }, + "additionalProperties": false + }, + "lifespan": { + "title": "Self-Service Recovery Request Lifespan", + "description": "Sets how long the recovery request is valid. If expired, the user has to redo the flow.", + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "1h", + "examples": ["1h", "1m", "1s"] + } + } + }, + "error": { + "type": "object", + "additionalProperties": false, + "properties": { + "ui_url": { + "title": "ORY Kratos Error UI URL", + "description": "URL where the ORY Kratos Error UI is hosted. Check the [reference implementation](https://github.com/ory/kratos-selfservice-ui-node).", + "type": "string", + "format": "uri-reference", + "examples": ["https://my-app.com/kratos-error"], + "default": "https://www.ory.sh/kratos/docs/fallback/error" + } + } + } + } + }, + "methods": { + "type": "object", + "additionalProperties": false, + "properties": { + "profile": { + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "title": "Enables Profile Management Method", + "default": true + } + } + }, + "link": { + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "title": "Enables Link Method", + "default": true + } + } + }, + "password": { + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "title": "Enables Username/Email and Password Method", + "default": true + } + } + }, + "oidc": { + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "title": "Enables OpenID Connect Method", + "default": false + }, + "config": { + "type": "object", + "additionalProperties": false, + "properties": { + "providers": { + "title": "OpenID Connect and OAuth2 Providers", + "description": "A list and configuration of OAuth2 and OpenID Connect providers ORY Kratos should integrate with.", + "type": "array", + "items": { + "$ref": "#/definitions/selfServiceOIDCProvider" + } + } + } + } + } + } + } + } + } + }, + "dsn": { + "type": "string", + "title": "Data Source Name", + "description": "DSN is used to specify the database credentials as a connection URI.", + "examples": [ + "postgres://user: password@postgresd:5432/database?sslmode=disable&max_conns=20&max_idle_conns=4", + "mysql://user:secret@tcp(mysqld:3306)/database?max_conns=20&max_idle_conns=4", + "cockroach://user@cockroachdb:26257/database?sslmode=disable&max_conns=20&max_idle_conns=4", + "sqlite:///var/lib/sqlite/db.sqlite?_fk=true&mode=rwc" + ] + }, + "courier": { + "type": "object", + "title": "Courier configuration", + "description": "The courier is responsible for sending and delivering messages over email, sms, and other means.", + "properties": { + "template_override_path": { + "type": "string", + "title": "Override message templates", + "description": "You can override certain or all message templates by pointing this key to the path where the templates are located.", + "examples": ["/conf/courier-templates"] + }, + "smtp": { + "title": "SMTP Configuration", + "description": "Configures outgoing emails using the SMTP protocol.", + "type": "object", + "properties": { + "connection_uri": { + "title": "SMTP connection string", + "description": "This URI will be used to connect to the SMTP server. Use the query parameter to allow (`?skip_ssl_verify=true`) or disallow (`?skip_ssl_verify=false`) self-signed TLS certificates. Please keep in mind that any host other than localhost / 127.0.0.1 must use smtp over TLS (smtps) or the connection will not be possible.", + "examples": [ + "smtps://foo:bar@my-mailserver:1234/?skip_ssl_verify=false" + ], + "type": "string", + "format": "uri" + }, + "from_address": { + "title": "SMTP Sender Address", + "description": "The recipient of an email will see this as the sender address.", + "type": "string", + "format": "email", + "default": "no-reply@ory.kratos.sh" + } + }, + "required": ["connection_uri"], + "additionalProperties": false + } + }, + "required": ["smtp"], + "additionalProperties": false + }, + "serve": { + "type": "object", + "properties": { + "admin": { + "type": "object", + "properties": { + "base_url": { + "title": "Admin Base URL", + "description": "The URL where the admin endpoint is exposed at.", + "type": "string", + "format": "uri", + "examples": ["https://kratos.private-network:4434/"] + }, + "host": { + "title": "Admin Host", + "description": "The host (interface) kratos' admin endpoint listens on.", + "type": "string", + "default": "0.0.0.0" + }, + "port": { + "title": "Admin Port", + "description": "The port kratos' admin endpoint listens on.", + "type": "integer", + "minimum": 1, + "maximum": 65535, + "examples": [4434], + "default": 4434 + } + }, + "additionalProperties": false + }, + "public": { + "type": "object", + "properties": { + "cors": { + "type": "object", + "additionalProperties": false, + "description": "Configures Cross Origin Resource Sharing for public endpoints.", + "properties": { + "enabled": { + "type": "boolean", + "description": "Sets whether CORS is enabled.", + "default": false + }, + "allowed_origins": { + "type": "array", + "description": "A list of origins a cross-domain request can be executed from. If the special * value is present in the list, all origins will be allowed. An origin may contain a wildcard (*) to replace 0 or more characters (i.e.: http://*.domain.com). Only one wildcard can be used per origin.", + "items": { + "type": "string", + "minLength": 1, + "not": { + "type": "string", + "description": "does match all strings that contain two or more (*)", + "pattern": ".*\\*.*\\*.*" + }, + "anyOf": [ + { + "format": "uri" + }, + { + "const": "*" + } + ] + }, + "uniqueItems": true, + "default": ["*"], + "examples": [ + [ + "https://example.com", + "https://*.example.com", + "https://*.foo.example.com" + ] + ] + }, + "allowed_methods": { + "type": "array", + "description": "A list of HTTP methods the user agent is allowed to use with cross-domain requests.", + "default": ["POST", "GET", "PUT", "PATCH", "DELETE"], + "items": { + "type": "string", + "enum": [ + "POST", + "GET", + "PUT", + "PATCH", + "DELETE", + "CONNECT", + "HEAD", + "OPTIONS", + "TRACE" + ] + } + }, + "allowed_headers": { + "type": "array", + "description": "A list of non simple headers the client is allowed to use with cross-domain requests.", + "default": [ + "Authorization", + "Content-Type", + "X-Session-Token" + ], + "items": { + "type": "string" + } + }, + "exposed_headers": { + "type": "array", + "description": "Sets which headers are safe to expose to the API of a CORS API specification.", + "default": ["Content-Type"], + "items": { + "type": "string" + } + }, + "allow_credentials": { + "type": "boolean", + "description": "Sets whether the request can include user credentials like cookies, HTTP authentication or client side SSL certificates.", + "default": true + }, + "options_passthrough": { + "type": "boolean", + "description": "TODO", + "default": false + }, + "max_age": { + "type": "integer", + "description": "Sets how long (in seconds) the results of a preflight request can be cached. If set to 0, every request is preceded by a preflight request.", + "default": 0, + "minimum": 0 + }, + "debug": { + "type": "boolean", + "description": "Adds additional log output to debug server side CORS issues.", + "default": false + } + } + }, + "base_url": { + "title": "Public Base URL", + "description": "The URL where the public endpoint is exposed at.", + "type": "string", + "format": "uri-reference", + "examples": [ + "https://my-app.com/.ory/kratos/public", + "/.ory/kratos/public/" + ] + }, + "host": { + "title": "Public Host", + "description": "The host (interface) kratos' public endpoint listens on.", + "type": "string", + "default": "0.0.0.0" + }, + "port": { + "title": "Public Port", + "description": "The port kratos' public endpoint listens on.", + "type": "integer", + "minimum": 1, + "maximum": 65535, + "examples": [4433], + "default": 4433 + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false + }, + "log": { + "type": "object", + "properties": { + "level": { + "type": "string", + "enum": [ + "trace", + "debug", + "info", + "warning", + "error", + "fatal", + "panic" + ] + }, + "leak_sensitive_values": { + "type": "boolean", + "title": "Leak Sensitive Log Values", + "description": "If set will leak sensitive values (e.g. emails) in the logs." + }, + "redaction_text": { + "type": "string", + "title": "Sensitive log value redaction text", + "description": "Text to use, when redacting sensitive log value." + }, + "format": { + "type": "string", + "enum": ["json", "text"] + } + }, + "additionalProperties": false + }, + "identity": { + "type": "object", + "properties": { + "default_schema_url": { + "title": "JSON Schema URL for default identity traits", + "description": "Path to the JSON Schema which describes a default identity's traits.", + "type": "string", + "format": "uri", + "examples": [ + "file://path/to/identity.traits.schema.json", + "https://foo.bar.com/path/to/identity.traits.schema.json" + ] + }, + "schemas": { + "type": "array", + "title": "Additional JSON Schemas for Identity Traits", + "examples": [ + [ + { + "id": "customer", + "url": "https://foo.bar.com/path/to/customer.traits.schema.json" + }, + { + "id": "employee", + "url": "https://foo.bar.com/path/to/employee.traits.schema.json" + }, + { + "id": "employee-v2", + "url": "https://foo.bar.com/path/to/employee.v2.traits.schema.json" + } + ] + ], + "items": { + "type": "object", + "properties": { + "id": { + "title": "The schema's ID.", + "type": "string", + "examples": ["employee"] + }, + "url": { + "type": "string", + "title": "Path to the JSON Schema", + "format": "uri", + "examples": [ + "file://path/to/identity.traits.schema.json", + "https://foo.bar.com/path/to/identity.traits.schema.json" + ] + } + }, + "required": ["id", "url"], + "not": { + "type": "object", + "properties": { + "id": { + "const": "default" + } + }, + "additionalProperties": true + } + } + } + }, + "required": ["default_schema_url"], + "additionalProperties": false + }, + "secrets": { + "type": "object", + "properties": { + "default": { + "type": "array", + "title": "Default Encryption Signing Secrets", + "description": "The first secret in the array is used for singing and encrypting things while all other keys are used to verify and decrypt older things that were signed with that old secret.", + "items": { + "type": "string", + "minLength": 16 + }, + "uniqueItems": true + }, + "cookie": { + "type": "array", + "title": "Singing Keys for Cookies", + "description": "The first secret in the array is used for encrypting cookies while all other keys are used to decrypt older cookies that were signed with that old secret.", + "items": { + "type": "string", + "minLength": 16 + }, + "uniqueItems": true + } + }, + "additionalProperties": false + }, + "hashers": { + "title": "Hashing Algorithm Configuration", + "type": "object", + "properties": { + "argon2": { + "title": "Configuration for the Argon2id hasher.", + "type": "object", + "properties": { + "memory": { + "type": "integer", + "minimum": 16384 + }, + "iterations": { + "type": "integer", + "minimum": 1 + }, + "parallelism": { + "type": "integer", + "minimum": 1 + }, + "salt_length": { + "type": "integer", + "minimum": 16 + }, + "key_length": { + "type": "integer", + "minimum": 16 + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false + }, + "session": { + "type": "object", + "additionalProperties": false, + "properties": { + "lifespan": { + "title": "Session Lifespan", + "description": "Defines how long a session is active. Once that lifespan has been reached, the user needs to sign in again.", + "type": "string", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "24h", + "examples": ["1h", "1m", "1s"] + }, + "cookie": { + "type": "object", + "properties": { + "domain": { + "title": "Session Cookie Domain", + "description": "Sets the session cookie domain. Useful when dealing with subdomains. Use with care!", + "type": "string" + }, + "persistent": { + "title": "Make Session Cookie Persistent", + "description": "If set to true will persist the cookie in the end-user's browser using the `max-age` parameter which is set to the `session.lifespan` value. Persistent cookies are not deleted when the browser is closed (e.g. on reboot or alt+f4).", + "type": "boolean", + "default": true + }, + "path": { + "title": "Session Cookie Path", + "description": "Sets the session cookie path. Use with care!", + "type": "string", + "default": "/" + }, + "same_site": { + "title": "Cookie Same Site Configuration", + "type": "string", + "enum": ["Strict", "Lax", "None"], + "default": "Lax" + } + }, + "additionalProperties": false + } + } + }, + "version": { + "title": "The kratos version this config is written for.", + "description": "SemVer according to https://semver.org/ prefixed with `v` as in our releases.", + "type": "string", + "pattern": "^v(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$", + "examples": ["v0.5.0-alpha.1"] + } + }, + "allOf": [ + { + "if": { + "properties": { + "selfservice": { + "properties": { + "flows": { + "oneOf": [ + { + "properties": { + "verification": { + "properties": { + "enabled": { + "const": true + } + }, + "required": ["enabled"] + } + }, + "required": ["verification"] + }, + { + "properties": { + "recovery": { + "properties": { + "enabled": { + "const": true + } + }, + "required": ["enabled"] + } + }, + "required": ["recovery"] + } + ] + } + }, + "required": ["flows"] + } + }, + "required": ["selfservice"] + }, + "then": { + "required": ["courier"] + } + } + ], + "required": ["identity", "dsn", "selfservice"] +} diff --git a/oryx/configx/stub/multi/expected.json b/oryx/configx/stub/multi/expected.json new file mode 100644 index 00000000000..2fe5dd16343 --- /dev/null +++ b/oryx/configx/stub/multi/expected.json @@ -0,0 +1,124 @@ +{ + "courier": { + "smtp": { + "connection_uri": "smtps://test:test@mailslurper:1025/?skip_ssl_verify=true", + "from_address": "no-reply@ory.kratos.sh" + } + }, + "dsn": "sqlite:///var/lib/sqlite/db.sqlite?_fk=true", + "hashers": { + "argon2": { + "iterations": 2, + "key_length": 16, + "memory": 131072, + "parallelism": 1, + "salt_length": 16 + } + }, + "identity": { + "default_schema_url": "file:///etc/config/kratos/identity.schema.json" + }, + "log": { + "format": "text", + "leak_sensitive_values": true, + "level": "debug" + }, + "secrets": { + "cookie": ["PLEASE-CHANGE-ME-I-AM-VERY-INSECURE"] + }, + "selfservice": { + "default_browser_return_url": "http://127.0.0.1:4455/", + "flows": { + "error": { + "ui_url": "http://127.0.0.1:4455/error" + }, + "login": { + "lifespan": "10m", + "ui_url": "http://127.0.0.1:4455/auth/login" + }, + "logout": { + "after": { + "default_browser_return_url": "http://127.0.0.1:4455/auth/login" + } + }, + "recovery": { + "enabled": true, + "lifespan": "1h", + "ui_url": "http://127.0.0.1:4455/recovery" + }, + "registration": { + "after": { + "password": { + "hooks": [ + { + "hook": "session" + } + ] + } + }, + "lifespan": "10m", + "ui_url": "http://127.0.0.1:4455/auth/registration" + }, + "settings": { + "lifespan": "1h", + "privileged_session_max_age": "15m", + "ui_url": "http://127.0.0.1:4455/settings" + }, + "verification": { + "after": { + "default_browser_return_url": "http://127.0.0.1:4455/" + }, + "enabled": true, + "lifespan": "1h", + "ui_url": "http://127.0.0.1:4455/verify" + } + }, + "methods": { + "link": { + "enabled": true + }, + "oidc": { + "enabled": false + }, + "password": { + "enabled": true + }, + "profile": { + "enabled": true + } + }, + "whitelisted_return_urls": ["http://127.0.0.1:4455"] + }, + "serve": { + "admin": { + "base_url": "http://kratos:4434/", + "host": "0.0.0.0", + "port": 4434 + }, + "public": { + "base_url": "http://127.0.0.1:4433/", + "cors": { + "allow_credentials": true, + "allowed_headers": ["Authorization", "Content-Type", "X-Session-Token"], + "allowed_methods": ["POST", "GET", "PUT", "PATCH", "DELETE"], + "allowed_origins": ["*"], + "debug": false, + "enabled": true, + "exposed_headers": ["Content-Type"], + "max_age": 0, + "options_passthrough": false + }, + "host": "0.0.0.0", + "port": 4433 + } + }, + "session": { + "cookie": { + "path": "/", + "persistent": true, + "same_site": "Lax" + }, + "lifespan": "24h" + }, + "version": "v0.5.3-alpha.1" +} diff --git a/oryx/configx/stub/nested-array/config.schema.json b/oryx/configx/stub/nested-array/config.schema.json new file mode 100644 index 00000000000..b70c935517f --- /dev/null +++ b/oryx/configx/stub/nested-array/config.schema.json @@ -0,0 +1,105 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "providers": { + "title": "OpenID Connect and OAuth2 Providers", + "description": "A list and configuration of OAuth2 and OpenID Connect providers ORY Kratos should integrate with.", + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "string", + "examples": ["google"] + }, + "provider": { + "title": "Provider", + "description": "Can be one of github, gitlab, generic, google, microsoft, discord.", + "type": "string", + "enum": [ + "github", + "gitlab", + "generic", + "google", + "microsoft", + "discord" + ], + "examples": ["google"] + }, + "client_id": { + "type": "string" + }, + "client_secret": { + "type": "string" + }, + "issuer_url": { + "type": "string", + "format": "uri", + "examples": ["https://accounts.google.com"] + }, + "auth_url": { + "type": "string", + "format": "uri", + "examples": ["https://accounts.google.com/o/oauth2/v2/auth"] + }, + "token_url": { + "type": "string", + "format": "uri", + "examples": ["https://www.googleapis.com/oauth2/v4/token"] + }, + "mapper_url": { + "title": "Jsonnet Mapper URL", + "description": "The URL where the jsonnet source is located for mapping the provider's data to ORY Kratos data.", + "type": "string", + "format": "uri", + "examples": [ + "file://path/to/oidc.jsonnet", + "https://foo.bar.com/path/to/oidc.jsonnet", + "base64://bG9jYWwgc3ViamVjdCA9I..." + ] + }, + "scope": { + "type": "array", + "items": { + "type": "string", + "examples": ["offline_access", "profile"] + } + }, + "tenant": { + "title": "Azure AD Tenant", + "description": "The Azure AD Tenant to use for authentication.", + "type": "string", + "examples": [ + "common", + "organizations", + "consumers", + "8eaef023-2b34-4da1-9baa-8bc8c9d6a490", + "contoso.onmicrosoft.com" + ] + } + }, + "additionalProperties": false, + "required": [], + "if": { + "properties": { + "provider": { + "const": "microsoft" + } + }, + "required": ["provider"] + }, + "then": { + "required": ["tenant"] + }, + "else": { + "not": { + "properties": { + "tenant": {} + }, + "required": ["tenant"] + } + } + } + } + } +} diff --git a/oryx/configx/stub/nested-array/expected.json b/oryx/configx/stub/nested-array/expected.json new file mode 100644 index 00000000000..e8609d26e58 --- /dev/null +++ b/oryx/configx/stub/nested-array/expected.json @@ -0,0 +1,11 @@ +{ + "providers": [ + { + "id": "google", + "client_id": "client@example.com" + }, + { + "client_id": "some@example.com" + } + ] +} diff --git a/oryx/configx/stub/nested-array/kratos.yaml b/oryx/configx/stub/nested-array/kratos.yaml new file mode 100644 index 00000000000..ac667ffd560 --- /dev/null +++ b/oryx/configx/stub/nested-array/kratos.yaml @@ -0,0 +1,2 @@ +providers: + - id: google diff --git a/oryx/configx/stub/watch/config.schema.json b/oryx/configx/stub/watch/config.schema.json new file mode 100644 index 00000000000..80382f42f73 --- /dev/null +++ b/oryx/configx/stub/watch/config.schema.json @@ -0,0 +1,19 @@ +{ + "$id": "https://example.com/config.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "config", + "type": "object", + "properties": { + "dsn": { + "type": "string" + }, + "foo": { + "const": "bar" + }, + "bar": { + "type": "string", + "enum": ["foo", "bar", "baz"] + } + }, + "required": ["dsn"] +} diff --git a/oryx/configx/tls.schema.json b/oryx/configx/tls.schema.json new file mode 100644 index 00000000000..832f679d27d --- /dev/null +++ b/oryx/configx/tls.schema.json @@ -0,0 +1,68 @@ +{ + "$id": "https://github.com/ory/x/tlsx/config.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "HTTPS", + "description": "Configure HTTP over TLS (HTTPS).", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean" + }, + "key": { + "title": "Private Key (PEM)", + "$ref": "#/definitions/source" + }, + "cert": { + "title": "TLS Certificate (PEM)", + "$ref": "#/definitions/source" + }, + "allow_termination_from": { + "type": "array", + "description": "Allow-list one or multiple CIDR address ranges and allow them to terminate TLS connections. Be aware that the X-Forwarded-Proto header must be set and must never be modifiable by anyone but your proxy / gateway / load balancer. Supports ipv4 and ipv6. The service serves http instead of https when this option is set.", + "items": { + "description": "CIDR address range.", + "type": "string", + "oneOf": [ + { + "pattern": "^(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))/([0-9]|[1-9][0-9]|1[0-1][0-9]|12[0-8])$" + }, + { + "pattern": "^([0-9]{1,3}\\.){3}[0-9]{1,3}/([0-9]|[1-2][0-9]|3[0-2])$" + } + ], + "examples": ["127.0.0.1/32"] + } + } + }, + "definitions": { + "source": { + "type": "object", + "oneOf": [ + { + "properties": { + "path": { + "title": "Path to PEM-encoded File", + "type": "string", + "examples": ["path/to/file.pem"] + } + }, + "additionalProperties": false + }, + { + "properties": { + "base64": { + "title": "Base64 Encoded Inline", + "description": "The base64 string of the PEM-encoded file content. Can be generated using for example `base64 -i path/to/file.pem`.", + "type": "string", + "examples": [ + "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tXG5NSUlEWlRDQ0FrMmdBd0lCQWdJRVY1eE90REFOQmdr..." + ] + } + }, + "additionalProperties": false + } + ] + } + } +} diff --git a/oryx/contextx/contextual.go b/oryx/contextx/contextual.go new file mode 100644 index 00000000000..e7d744630c2 --- /dev/null +++ b/oryx/contextx/contextual.go @@ -0,0 +1,43 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package contextx + +import ( + "context" + + "github.com/ory/x/configx" + + "github.com/gofrs/uuid" +) + +type ( + Contextualizer interface { + // Network returns the network id for the given context. + Network(ctx context.Context, network uuid.UUID) uuid.UUID + + // Config returns the config for the given context. + Config(ctx context.Context, config *configx.Provider) *configx.Provider + } + Provider interface { + Contextualizer() Contextualizer + } + Static struct { + NID uuid.UUID + C *configx.Provider + } +) + +func (d *Static) Network(_ context.Context, nid uuid.UUID) uuid.UUID { + if d.NID == uuid.Nil { + return nid + } + return d.NID +} + +func (d *Static) Config(_ context.Context, c *configx.Provider) *configx.Provider { + if d.C == nil { + return c + } + return d.C +} diff --git a/oryx/contextx/contextual_mock.go b/oryx/contextx/contextual_mock.go new file mode 100644 index 00000000000..ef75861748c --- /dev/null +++ b/oryx/contextx/contextual_mock.go @@ -0,0 +1,39 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package contextx + +import ( + "context" + + "github.com/ory/x/configx" + + "github.com/gofrs/uuid" +) + +// TestContextualizer is a mock implementation of the Contextualizer interface. +type TestContextualizer struct{} + +type contextKeyFake int + +// fakeNIDContext is a test key for NID. +const fakeNIDContext contextKeyFake = 1 + +// SetNIDContext sets the nid for the given context. +func SetNIDContext(ctx context.Context, nid uuid.UUID) context.Context { + return context.WithValue(ctx, fakeNIDContext, nid) //nolint:staticcheck +} + +// Network returns the network id for the given context. +func (d *TestContextualizer) Network(ctx context.Context, network uuid.UUID) uuid.UUID { + nid, ok := ctx.Value(fakeNIDContext).(uuid.UUID) + if !ok { + return network + } + return nid +} + +// Config returns the config for the given context. +func (d *TestContextualizer) Config(ctx context.Context, config *configx.Provider) *configx.Provider { + return config +} diff --git a/oryx/contextx/default.go b/oryx/contextx/default.go new file mode 100644 index 00000000000..573380747a3 --- /dev/null +++ b/oryx/contextx/default.go @@ -0,0 +1,27 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package contextx + +import ( + "context" + + "github.com/gofrs/uuid" + + "github.com/ory/x/configx" +) + +type Default struct{} + +var _ Contextualizer = (*Default)(nil) + +func (d *Default) Network(_ context.Context, network uuid.UUID) uuid.UUID { + if network == uuid.Nil { + panic("nid must be not nil") + } + return network +} + +func (d *Default) Config(_ context.Context, config *configx.Provider) *configx.Provider { + return config +} diff --git a/oryx/contextx/testhelpers.go b/oryx/contextx/testhelpers.go new file mode 100644 index 00000000000..98e835b7d8e --- /dev/null +++ b/oryx/contextx/testhelpers.go @@ -0,0 +1,158 @@ +// Copyright © 2024 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package contextx + +import ( + "context" + "net/http" + "net/http/httptest" + + "github.com/gofrs/uuid" + + "github.com/ory/x/configx" +) + +type ( + TestConfigProvider struct { + ConfigSchema []byte + Options []configx.OptionModifier + } + contextKey int +) + +func NewTestConfigProvider(schema []byte, opts ...configx.OptionModifier) *TestConfigProvider { + return &TestConfigProvider{ + ConfigSchema: schema, + Options: opts, + } +} + +func (t *TestConfigProvider) Network(ctx context.Context, network uuid.UUID) uuid.UUID { + return (&Default{}).Network(ctx, network) +} + +func (t *TestConfigProvider) Config(ctx context.Context, config *configx.Provider) *configx.Provider { + values, ok := ctx.Value(contextConfigKey).([]map[string]any) + if !ok { + return config + } + + opts := make([]configx.OptionModifier, 1, 1+len(values)) + opts[0] = configx.WithValues(config.All()) + for _, v := range values { + opts = append(opts, configx.WithValues(v)) + } + config, err := configx.New(ctx, t.ConfigSchema, append(t.Options, opts...)...) + if err != nil { + // This is not production code. The provider is only used in tests. + panic(err) + } + return config +} + +const contextConfigKey contextKey = 1 + +var ( + _ Contextualizer = (*TestConfigProvider)(nil) +) + +func WithConfigValue(ctx context.Context, key string, value any) context.Context { + return WithConfigValues(ctx, map[string]any{key: value}) +} + +func WithConfigValues(ctx context.Context, setValues ...map[string]any) context.Context { + values, ok := ctx.Value(contextConfigKey).([]map[string]any) + if !ok { + values = make([]map[string]any, 0) + } + newValues := make([]map[string]any, len(values), len(values)+len(setValues)) + copy(newValues, values) + newValues = append(newValues, setValues...) + + return context.WithValue(ctx, contextConfigKey, newValues) +} + +type ConfigurableTestHandler struct { + configs map[uuid.UUID][]map[string]any + handler http.Handler +} + +func NewConfigurableTestHandler(h http.Handler) *ConfigurableTestHandler { + return &ConfigurableTestHandler{ + configs: make(map[uuid.UUID][]map[string]any), + handler: h, + } +} + +func (t *ConfigurableTestHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + cID := r.Header.Get("Test-Config-Id") + if config, ok := t.configs[uuid.FromStringOrNil(cID)]; ok { + r = r.WithContext(WithConfigValues(r.Context(), config...)) + } + t.handler.ServeHTTP(w, r) +} + +func (t *ConfigurableTestHandler) RegisterConfig(config ...map[string]any) uuid.UUID { + id := uuid.Must(uuid.NewV4()) + t.configs[id] = config + return id +} + +func (t *ConfigurableTestHandler) UseConfig(r *http.Request, id uuid.UUID) *http.Request { + r.Header.Set("Test-Config-Id", id.String()) + return r +} + +func (t *ConfigurableTestHandler) UseConfigValues(r *http.Request, values ...map[string]any) *http.Request { + return t.UseConfig(r, t.RegisterConfig(values...)) +} + +type ConfigurableTestServer struct { + *httptest.Server + handler *ConfigurableTestHandler + transport http.RoundTripper +} + +func NewConfigurableTestServer(h http.Handler) *ConfigurableTestServer { + handler := NewConfigurableTestHandler(h) + server := httptest.NewServer(handler) + + t := server.Client().Transport + cts := &ConfigurableTestServer{ + handler: handler, + Server: server, + transport: t, + } + server.Client().Transport = cts + return cts +} + +func (t *ConfigurableTestServer) RoundTrip(r *http.Request) (*http.Response, error) { + config, ok := r.Context().Value(contextConfigKey).([]map[string]any) + if ok && config != nil { + r = t.handler.UseConfigValues(r, config...) + } + return t.transport.RoundTrip(r) +} + +type AutoContextClient struct { + *http.Client + transport http.RoundTripper + ctx context.Context +} + +func (t *ConfigurableTestServer) Client(ctx context.Context) *AutoContextClient { + baseClient := *t.Server.Client() + autoClient := &AutoContextClient{ + Client: &baseClient, + transport: t, + ctx: ctx, + } + baseClient.Transport = autoClient + return autoClient +} + +func (c *AutoContextClient) RoundTrip(r *http.Request) (*http.Response, error) { + return c.transport.RoundTrip(r.WithContext(c.ctx)) +} diff --git a/oryx/contextx/tree.go b/oryx/contextx/tree.go new file mode 100644 index 00000000000..84760ed76e7 --- /dev/null +++ b/oryx/contextx/tree.go @@ -0,0 +1,26 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package contextx + +import ( + "context" + "testing" +) + +type ContextKey int + +const ( + ValidContextKey ContextKey = iota + 1 +) + +var RootContext = context.WithValue(context.Background(), ValidContextKey, true) + +func TestRootContext(t *testing.T) context.Context { + return context.WithValue(t.Context(), ValidContextKey, true) +} + +func IsRootContext(ctx context.Context) bool { + is, ok := ctx.Value(ValidContextKey).(bool) + return is && ok +} diff --git a/oryx/corsx/check_origin.go b/oryx/corsx/check_origin.go new file mode 100644 index 00000000000..f5bf037f4d6 --- /dev/null +++ b/oryx/corsx/check_origin.go @@ -0,0 +1,54 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package corsx + +import "strings" + +// CheckOrigin is a function that can be used well with cors.Options.AllowOriginRequestFunc. +// It checks whether the origin is allowed following the same behavior as github.com/rs/cors. +// +// Recommended usage for hot-reloadable origins: +// +// func (p *Config) cors(ctx context.Context, prefix string) (cors.Options, bool) { +// opts, enabled := p.GetProvider(ctx).CORS(prefix, cors.Options{ +// AllowedMethods: []string{"GET", "POST", "PUT", "PATCH", "DELETE"}, +// AllowedHeaders: []string{"Authorization", "Content-Type", "Cookie"}, +// ExposedHeaders: []string{"Content-Type", "Set-Cookie"}, +// AllowCredentials: true, +// }) +// opts.AllowOriginRequestFunc = func(r *http.Request, origin string) bool { +// // load the origins from the config on every request to allow hot-reloading +// allowedOrigins := p.GetProvider(r.Context()).Strings(prefix + ".cors.allowed_origins") +// return corsx.CheckOrigin(allowedOrigins, origin) +// } +// return opts, enabled +// } +func CheckOrigin(allowedOrigins []string, origin string) bool { + if len(allowedOrigins) == 0 { + return true + } + for _, o := range allowedOrigins { + if o == "*" { + // allow all origins + return true + } + // Note: for origins and methods matching, the spec requires a case-sensitive matching. + // As it may be error-prone, we chose to ignore the spec here. + // https://github.com/rs/cors/blob/066574eebbd0f5f1b6cd1154a160cc292ac1835e/cors.go#L132-L133 + o = strings.ToLower(o) + prefix, suffix, found := strings.Cut(o, "*") + if !found { + // not a pattern, check for equality + if o == origin { + return true + } + continue + } + // inspired by https://github.com/rs/cors/blob/066574eebbd0f5f1b6cd1154a160cc292ac1835e/utils.go#L15 + if len(origin) >= len(prefix)+len(suffix) && strings.HasPrefix(origin, prefix) && strings.HasSuffix(origin, suffix) { + return true + } + } + return false +} diff --git a/oryx/corsx/cmd.go b/oryx/corsx/cmd.go new file mode 100644 index 00000000000..b475201a0a7 --- /dev/null +++ b/oryx/corsx/cmd.go @@ -0,0 +1,46 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package corsx + +// HelpMessage returns a string containing information on setting up this CORS middleware. +func HelpMessage() string { + return `- CORS_ENABLED: Switch CORS support on (true) or off (false). Default is off (false). + + Example: CORS_ENABLED=true + +- CORS_ALLOWED_ORIGINS: A list of origins (comma separated values) a cross-domain request can be executed from. + If the special * value is present in the list, all origins will be allowed. An origin may contain a wildcard (*) + to replace 0 or more characters (i.e.: http://*.domain.com). Usage of wildcards implies a small performance penality. + Only one wildcard can be used per origin. The default value is *. + + Example: CORS_ALLOWED_ORIGINS=http://*.domain.com,http://*.domain2.com + +- CORS_ALLOWED_METHODS: A list of methods (comma separated values) the client is allowed to use with cross-domain + requests. Default value is simple methods (GET and POST). + + Example: CORS_ALLOWED_METHODS=POST,GET,PUT + +- CORS_ALLOWED_CREDENTIALS: Indicates whether the request can include user credentials like cookies, HTTP authentication + or client side SSL certificates. + + Default: CORS_ALLOWED_CREDENTIALS=false + Example: CORS_ALLOWED_CREDENTIALS=true + +- CORS_DEBUG: Debugging flag adds additional output to debug server side CORS issues. + + Default: CORS_DEBUG=false + Example: CORS_DEBUG=true + +- CORS_MAX_AGE: Indicates how long (in seconds) the results of a preflight request can be cached. The default is 0 + which stands for no max age. + + Default: CORS_MAX_AGE=0 + Example: CORS_MAX_AGE=10 + +- CORS_ALLOWED_HEADERS: A list of non simple headers (comma separated values) the client is allowed to use with + cross-domain requests. + +- CORS_EXPOSED_HEADERS: Indicates which headers (comma separated values) are safe to expose to the API of a + CORS API specification.` +} diff --git a/oryx/corsx/defaults.go b/oryx/corsx/defaults.go new file mode 100644 index 00000000000..136bdec4f48 --- /dev/null +++ b/oryx/corsx/defaults.go @@ -0,0 +1,33 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package corsx + +// CORSRequestHeadersSafelist We add the safe list cors accept headers +// https://developer.mozilla.org/en-US/docs/Glossary/CORS-safelisted_request_header +var CORSRequestHeadersSafelist = []string{"Accept", "Content-Type", "Content-Length", "Accept-Language", "Content-Language"} + +// CORSResponseHeadersSafelist We add the safe list cors expose headers +// https://developer.mozilla.org/en-US/docs/Glossary/CORS-safelisted_response_header +var CORSResponseHeadersSafelist = []string{"Set-Cookie", "Cache-Control", "Expires", "Last-Modified", "Pragma", "Content-Length", "Content-Language", "Content-Type"} + +// CORSDefaultAllowedMethods Default allowed methods +var CORSDefaultAllowedMethods = []string{"GET", "POST", "PUT", "PATCH", "DELETE"} + +// CORSRequestHeadersExtended Extended list of request headers +// these will be concatenated with the safelist +var CORSRequestHeadersExtended = []string{"Authorization", "X-CSRF-TOKEN"} + +// CORSResponseHeadersExtended Extended list of response headers +// these will be concatenated with the safelist +var CORSResponseHeadersExtended = []string{} + +// CORSDefaultMaxAge max age for cache of preflight request result +// default is 5 seconds +// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Max-Age +var CORSDefaultMaxAge = 5 + +// CORSAllowCredentials default value for allow credentials +// this is required for cookies to be sent by the browser +// we always want this since we are using cookies for authentication most of the time +var CORSAllowCredentials = true diff --git a/oryx/corsx/middleware.go b/oryx/corsx/middleware.go new file mode 100644 index 00000000000..a6ab0b82468 --- /dev/null +++ b/oryx/corsx/middleware.go @@ -0,0 +1,34 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package corsx + +import ( + "context" + "net/http" + + "github.com/rs/cors" + "github.com/urfave/negroni" +) + +// ContextualizedMiddleware is a context-aware CORS middleware. It allows hot-reloading CORS configuration using +// the HTTP request context. +// +// n := negroni.New() +// n.UseFunc(ContextualizedMiddleware(func(context.Context) (opts cors.Options, enabled bool) { +// panic("implement me") +// }) +// // ... +// +// Deprecated: because this is not really practical to use, you should use CheckOrigin as the cors.Options.AllowOriginRequestFunc instead. +func ContextualizedMiddleware(provider func(context.Context) (opts cors.Options, enabled bool)) negroni.HandlerFunc { + return func(rw http.ResponseWriter, r *http.Request, next http.HandlerFunc) { + options, enabled := provider(r.Context()) + if !enabled { + next(rw, r) + return + } + + cors.New(options).Handler(next).ServeHTTP(rw, r) + } +} diff --git a/oryx/corsx/normalize.go b/oryx/corsx/normalize.go new file mode 100644 index 00000000000..609b12439a1 --- /dev/null +++ b/oryx/corsx/normalize.go @@ -0,0 +1,28 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package corsx + +import "net/url" + +// NormalizeOrigins normalizes the CORS origins. +func NormalizeOrigins(origins []url.URL) []string { + results := make([]string, len(origins)) + for k, o := range origins { + results[k] = o.Scheme + "://" + o.Host + } + return results +} + +// NormalizeOriginStrings normalizes the CORS origins from string representation +func NormalizeOriginStrings(origins []string) ([]string, error) { + results := make([]string, len(origins)) + for k, o := range origins { + u, err := url.ParseRequestURI(o) + if err != nil { + return nil, err + } + results[k] = u.Scheme + "://" + u.Host + } + return results, nil +} diff --git a/oryx/crdbx/readonly.go b/oryx/crdbx/readonly.go new file mode 100644 index 00000000000..f473c387500 --- /dev/null +++ b/oryx/crdbx/readonly.go @@ -0,0 +1,21 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package crdbx + +import ( + "github.com/ory/pop/v6" + + "github.com/ory/x/dbal" + "github.com/ory/x/sqlcon" +) + +// SetTransactionReadOnly sets the transaction to read only for CockroachDB. +func SetTransactionReadOnly(c *pop.Connection) error { + if c.Dialect.Name() != dbal.DriverCockroachDB { + // Only CockroachDB supports this. + return nil + } + + return sqlcon.HandleError(c.RawQuery("SET TRANSACTION READ ONLY").Exec()) +} diff --git a/oryx/crdbx/staleness.go b/oryx/crdbx/staleness.go new file mode 100644 index 00000000000..f9158840435 --- /dev/null +++ b/oryx/crdbx/staleness.go @@ -0,0 +1,110 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package crdbx + +import ( + "net/http" + + "github.com/ory/x/dbal" + + "github.com/ory/pop/v6" + + "github.com/ory/x/sqlcon" +) + +// Control API consistency guarantees +// +// swagger:model consistencyRequestParameters +type ConsistencyRequestParameters struct { + // Read Consistency Level (preview) + // + // The read consistency level determines the consistency guarantee for reads: + // + // - strong (slow): The read is guaranteed to return the most recent data committed at the start of the read. + // - eventual (very fast): The result will return data that is about 4.8 seconds old. + // + // The default consistency guarantee can be changed in the Ory Network Console or using the Ory CLI with + // `ory patch project --replace '/previews/default_read_consistency_level="strong"'`. + // + // Setting the default consistency level to `eventual` may cause regressions in the future as we add consistency + // controls to more APIs. Currently, the following APIs will be affected by this setting: + // + // - `GET /admin/identities` + // + // This feature is in preview and only available in Ory Network. + // + // required: false + // in: query + Consistency ConsistencyLevel `json:"consistency"` +} + +// ConsistencyLevel is the consistency level. +// swagger:enum ConsistencyLevel +type ConsistencyLevel string + +const ( + // ConsistencyLevelUnset is the unset / default consistency level. + ConsistencyLevelUnset ConsistencyLevel = "" + // ConsistencyLevelStrong is the strong consistency level. + ConsistencyLevelStrong ConsistencyLevel = "strong" + // ConsistencyLevelEventual is the eventual consistency level using follower read timestamps. + ConsistencyLevelEventual ConsistencyLevel = "eventual" +) + +// ConsistencyLevelFromRequest extracts the consistency level from a request. +func ConsistencyLevelFromRequest(r *http.Request) ConsistencyLevel { + return ConsistencyLevelFromString(r.URL.Query().Get("consistency")) +} + +// ConsistencyLevelFromString converts a string to a ConsistencyLevel. +// If the string is not recognized or unset, ConsistencyLevelStrong is returned. +func ConsistencyLevelFromString(in string) ConsistencyLevel { + switch in { + case string(ConsistencyLevelStrong): + return ConsistencyLevelStrong + case string(ConsistencyLevelEventual): + return ConsistencyLevelEventual + case string(ConsistencyLevelUnset): + return ConsistencyLevelUnset + } + return ConsistencyLevelStrong +} + +// SetTransactionConsistency sets the transaction consistency level for CockroachDB. +func SetTransactionConsistency(c *pop.Connection, level ConsistencyLevel, fallback ConsistencyLevel) error { + q := getTransactionConsistencyQuery(c.Dialect.Name(), level, fallback) + if len(q) == 0 { + return nil + } + + return sqlcon.HandleError(c.RawQuery(q).Exec()) +} + +const transactionFollowerReadTimestamp = "SET TRANSACTION AS OF SYSTEM TIME follower_read_timestamp()" + +func getTransactionConsistencyQuery(dialect string, level ConsistencyLevel, fallback ConsistencyLevel) string { + if dialect != dbal.DriverCockroachDB { + // Only CockroachDB supports this. + return "" + } + + switch level { + case ConsistencyLevelStrong: + // Nothing to do + return "" + case ConsistencyLevelEventual: + // Jumps to end of function + case ConsistencyLevelUnset: + fallthrough + default: + if fallback != ConsistencyLevelEventual { + // Nothing to do + return "" + } + + // Jumps to end of function + } + + return transactionFollowerReadTimestamp +} diff --git a/oryx/dbal/canonicalize.go b/oryx/dbal/canonicalize.go new file mode 100644 index 00000000000..b07ce3747a8 --- /dev/null +++ b/oryx/dbal/canonicalize.go @@ -0,0 +1,9 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package dbal + +const ( + DriverMySQL = "mysql" + DriverCockroachDB = "cockroach" +) diff --git a/oryx/dbal/driver.go b/oryx/dbal/driver.go new file mode 100644 index 00000000000..ae6ed3341de --- /dev/null +++ b/oryx/dbal/driver.go @@ -0,0 +1,14 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package dbal + +import ( + "strings" +) + +// IsSQLite returns true if the connection is a SQLite string. +func IsSQLite(dsn string) bool { + scheme := strings.Split(dsn, "://")[0] + return scheme == "sqlite" || scheme == "sqlite3" +} diff --git a/oryx/dbal/dsn.go b/oryx/dbal/dsn.go new file mode 100644 index 00000000000..fa1b2f3e12a --- /dev/null +++ b/oryx/dbal/dsn.go @@ -0,0 +1,51 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package dbal + +import ( + "fmt" + "os" + "regexp" + "testing" + + "github.com/stretchr/testify/require" +) + +var sqliteMemoryRegexp = regexp.MustCompile(`^sqlite://file:.+\?.*&?mode=memory($|&.*)|sqlite://(file:)?:memory:\?.*$|^(:memory:|memory)$`) + +// IsMemorySQLite returns true if a given DSN string is pointing to a SQLite database. +// +// SQLite can be written in different styles depending on the use case +// - just in memory +// - shared connection +// - shared but unique in the same process +// see: https://sqlite.org/inmemorydb.html +func IsMemorySQLite(dsn string) bool { return sqliteMemoryRegexp.MatchString(dsn) } + +// NewSQLiteTestDatabase creates a new, unique SQLite database +// which is shared amongst all callers and identified by an individual file name. +// The database file is created in the system's temporary directory, and not actively +// removed to allow debugging in case of test failures. +func NewSQLiteTestDatabase(t testing.TB) string { + fn, err := os.MkdirTemp("", "sqlite-test-db-*") + require.NoError(t, err) + return NewSQLiteDatabase(fn) +} + +// NewSQLiteInMemoryDatabase creates a new in-memory, unique SQLite database +// which is shared amongst all callers and identified by an individual file name. +func NewSQLiteInMemoryDatabase(name string) string { + return fmt.Sprintf("sqlite://file:%s?_fk=true&mode=memory&cache=shared&_busy_timeout=1000", name) +} + +// NewSQLiteDatabase creates a new on-disk, unique SQLite database +// which is shared amongst all callers and identified by an individual file name. +// This is sometimes necessary over a in-memory database, for example when multiple tests/goroutines run in parallel +// and access the same table. +// This would result in a locking error from SQLite when running in-memory. +// Additionally, shared cache mode is deprecated and discouraged, and the problem is better solved with the WAL, +// according to official docs. +func NewSQLiteDatabase(name string) string { + return fmt.Sprintf("sqlite://file:%s/db.sqlite?_fk=true&_journal_mode=WAL&_busy_timeout=1000", name) +} diff --git a/oryx/dbal/testhelpers.go b/oryx/dbal/testhelpers.go new file mode 100644 index 00000000000..b13d0109614 --- /dev/null +++ b/oryx/dbal/testhelpers.go @@ -0,0 +1,59 @@ +package dbal + +import ( + "bytes" + "encoding/hex" + "fmt" + "io/fs" + "os" + "path/filepath" + "regexp" + "testing" + + "github.com/ory/x/sqlcon/dockertest" + "github.com/pkg/errors" + "github.com/stretchr/testify/require" + + "github.com/ory/pop/v6" + "github.com/ory/x/fsx" +) + +var hashDumpRegex = regexp.MustCompile(`-- migrations hash: ([^\n]+)\n`) + +func RestoreFromSchemaDump(t testing.TB, c *pop.Connection, migrations fs.FS, writeTo string) func(testing.TB) { + newHash, err := fsx.DirHash(migrations) + require.NoError(t, err) + + dumpFilename := filepath.Join(writeTo, c.Dialect.Name()+"_dump.sql") + + updateDump := func(t testing.TB) { + dump := dockertest.DumpSchema(t, c) + f, err := os.OpenFile(dumpFilename, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0644) + require.NoError(t, err) + defer f.Close() + + _, _ = fmt.Fprintf(f, "-- migrations hash: %x\n\n%s", newHash, dump) + t.Fatal("database schema restored from migrations and dump updated, please re-run the test") + } + + dump, err := os.ReadFile(dumpFilename) + if errors.Is(err, fs.ErrNotExist) { + return updateDump + } + require.NoError(t, err) + + matches := hashDumpRegex.FindSubmatch(dump) + if len(matches) != 2 { + return updateDump + } + + currentHash, err := hex.DecodeString(string(matches[1])) + require.NoError(t, err) + + if !bytes.Equal(newHash, currentHash) { + return updateDump + } + + require.NoError(t, c.RawQuery(string(dump)).Exec()) + return nil +} diff --git a/oryx/decoderx/http.go b/oryx/decoderx/http.go new file mode 100644 index 00000000000..3fef1e42458 --- /dev/null +++ b/oryx/decoderx/http.go @@ -0,0 +1,569 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package decoderx + +import ( + "bytes" + "context" + "crypto/sha256" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "slices" + "strconv" + "strings" + + "github.com/pkg/errors" + "github.com/tidwall/gjson" + "github.com/tidwall/sjson" + + "github.com/ory/jsonschema/v3" + + "github.com/ory/herodot" + + "github.com/ory/x/httpx" + "github.com/ory/x/jsonschemax" +) + +type ( + // HTTP decodes json and form-data from HTTP Request Bodies. + HTTP struct{} + + httpDecoderOptions struct { + keepRequestBody bool + allowedContentTypes []string + allowedHTTPMethods []string + jsonSchemaRef string + jsonSchemaCompiler *jsonschema.Compiler + jsonSchemaValidate bool + maxCircularReferenceDepth uint8 + handleParseErrors parseErrorStrategy + expectJSONFlattened bool + queryAndBody bool + } + + // HTTPDecoderOption configures the HTTP decoder. + HTTPDecoderOption func(*httpDecoderOptions) + + parseErrorStrategy uint8 +) + +const ( + httpContentTypeMultipartForm = "multipart/form-data" + httpContentTypeURLEncodedForm = "application/x-www-form-urlencoded" + httpContentTypeJSON = "application/json" +) + +const ( + // ParseErrorIgnoreConversionErrors will ignore any errors caused by strconv.Parse* and use the + // raw form field value, which is a string, when such a parse error occurs. + // + // If the JSON Schema defines `{"ratio": {"type": "number"}}` but `ratio=foobar` then field + // `ratio` will be handled as a string. If the destination struct is a `json.RawMessage`, then + // the output will be `{"ratio": "foobar"}`. + ParseErrorIgnoreConversionErrors parseErrorStrategy = iota + 1 + + // ParseErrorUseEmptyValueOnConversionErrors will ignore any parse errors caused by strconv.Parse* and use the + // default value of the type to be casted, e.g. float64(0), string(""). + // + // If the JSON Schema defines `{"ratio": {"type": "number"}}` but `ratio=foobar` then field + // `ratio` will receive the default value for the primitive type (here `0.0` for `number`). + // If the destination struct is a `json.RawMessage`, then the output will be `{"ratio": 0.0}`. + ParseErrorUseEmptyValueOnConversionErrors + + // ParseErrorReturnOnConversionErrors will abort and return with an error if strconv.Parse* returns + // an error. + // + // If the JSON Schema defines `{"ratio": {"type": "number"}}` but `ratio=foobar` the parser aborts + // and returns an error, here: `strconv.ParseFloat: parsing "foobar"`. + ParseErrorReturnOnConversionErrors +) + +var errKeyNotFound = errors.New("key not found") + +// HTTPFormDecoder configures the HTTP decoder to only accept form-data +// (application/x-www-form-urlencoded, multipart/form-data) +func HTTPFormDecoder() HTTPDecoderOption { + return func(o *httpDecoderOptions) { + o.allowedContentTypes = []string{httpContentTypeMultipartForm, httpContentTypeURLEncodedForm} + } +} + +// HTTPJSONDecoder configures the HTTP decoder to only accept JSON data +// (application/json). +func HTTPJSONDecoder() HTTPDecoderOption { + return func(o *httpDecoderOptions) { + o.allowedContentTypes = []string{httpContentTypeJSON} + } +} + +// HTTPKeepRequestBody configures the HTTP decoder to allow other +// HTTP request body readers to read the body as well by keeping +// the data in memory. +func HTTPKeepRequestBody(keep bool) HTTPDecoderOption { + return func(o *httpDecoderOptions) { + o.keepRequestBody = keep + } +} + +// HTTPDecoderSetValidatePayloads sets if payloads should be validated or not. +func HTTPDecoderSetValidatePayloads(validate bool) HTTPDecoderOption { + return func(o *httpDecoderOptions) { + o.jsonSchemaValidate = validate + o.keepRequestBody = true + } +} + +// HTTPDecoderJSONFollowsFormFormat if set tells the decoder that JSON follows the same conventions +// as the form decoder, meaning `{"foo.bar": "..."}` is translated to `{"foo": {"bar": "..."}}`. +func HTTPDecoderJSONFollowsFormFormat() HTTPDecoderOption { + return func(o *httpDecoderOptions) { + o.expectJSONFlattened = true + o.keepRequestBody = true + } +} + +// HTTPDecoderAllowedMethods sets the allowed HTTP methods. Defaults are POST, PUT, PATCH. +func HTTPDecoderAllowedMethods(method ...string) HTTPDecoderOption { + return func(o *httpDecoderOptions) { + o.allowedHTTPMethods = method + } +} + +// HTTPDecoderUseQueryAndBody will check both the HTTP body and the HTTP query params when decoding. +// Only relevant for non-GET operations. +func HTTPDecoderUseQueryAndBody() HTTPDecoderOption { + return func(o *httpDecoderOptions) { + o.queryAndBody = true + } +} + +// HTTPDecoderSetIgnoreParseErrorsStrategy sets a strategy for dealing with strconv.Parse* errors: +// +// - decoderx.ParseErrorIgnoreConversionErrors will ignore any parse errors caused by strconv.Parse* and use the +// raw form field value, which is a string, when such a parse error occurs. (default) +// - decoderx.ParseErrorUseEmptyValueOnConversionErrors will ignore any parse errors caused by strconv.Parse* and use the +// default value of the type to be casted, e.g. float64(0), string(""). +// - decoderx.ParseErrorReturnOnConversionErrors will abort and return with an error if strconv.Parse* returns +// an error. +func HTTPDecoderSetIgnoreParseErrorsStrategy(strategy parseErrorStrategy) HTTPDecoderOption { + return func(o *httpDecoderOptions) { + o.handleParseErrors = strategy + } +} + +// HTTPDecoderSetMaxCircularReferenceDepth sets the maximum recursive reference resolution depth. +func HTTPDecoderSetMaxCircularReferenceDepth(depth uint8) HTTPDecoderOption { + return func(o *httpDecoderOptions) { + o.maxCircularReferenceDepth = depth + } +} + +// HTTPJSONSchemaCompiler sets a JSON schema to be used for validation and type assertion of +// incoming requests. +func HTTPJSONSchemaCompiler(ref string, compiler *jsonschema.Compiler) HTTPDecoderOption { + return func(o *httpDecoderOptions) { + if compiler == nil { + compiler = jsonschema.NewCompiler() + } + compiler.ExtractAnnotations = true + o.jsonSchemaCompiler = compiler + o.jsonSchemaRef = ref + o.jsonSchemaValidate = true + } +} + +// HTTPRawJSONSchemaCompiler uses a JSON Schema Compiler with the provided JSON Schema in raw byte form. +func HTTPRawJSONSchemaCompiler(raw []byte) (HTTPDecoderOption, error) { + compiler := jsonschema.NewCompiler() + id := fmt.Sprintf("%x.json", sha256.Sum256(raw)) + if err := compiler.AddResource(id, bytes.NewReader(raw)); err != nil { + return nil, err + } + compiler.ExtractAnnotations = true + + return func(o *httpDecoderOptions) { + o.jsonSchemaCompiler = compiler + o.jsonSchemaRef = id + o.jsonSchemaValidate = true + }, nil +} + +// MustHTTPRawJSONSchemaCompiler uses HTTPRawJSONSchemaCompiler and panics on error. +func MustHTTPRawJSONSchemaCompiler(raw []byte) HTTPDecoderOption { + f, err := HTTPRawJSONSchemaCompiler(raw) + if err != nil { + panic(err) + } + return f +} + +func newHTTPDecoderOptions(fs []HTTPDecoderOption) *httpDecoderOptions { + o := &httpDecoderOptions{ + allowedContentTypes: []string{ + httpContentTypeMultipartForm, httpContentTypeURLEncodedForm, httpContentTypeJSON, + }, + allowedHTTPMethods: []string{"POST", "PUT", "PATCH"}, + maxCircularReferenceDepth: 5, + handleParseErrors: ParseErrorIgnoreConversionErrors, + } + + for _, f := range fs { + f(o) + } + + return o +} + +// NewHTTP creates a new HTTP decoder. +func NewHTTP() *HTTP { + return new(HTTP) +} + +func (t *HTTP) validateRequest(r *http.Request, c *httpDecoderOptions) error { + method := strings.ToUpper(r.Method) + + if !slices.Contains(c.allowedHTTPMethods, method) { + return errors.WithStack(herodot.ErrBadRequest.WithReasonf(`Unable to decode body because HTTP Request Method was "%s" but only %v are supported.`, method, c.allowedHTTPMethods)) + } + + if method != "GET" { + if r.ContentLength == 0 { + return errors.WithStack(herodot.ErrBadRequest.WithReasonf(`Unable to decode HTTP Request Body because its HTTP Header "Content-Length" is zero.`)) + } + + if !httpx.HasContentType(r, c.allowedContentTypes...) { + return errors.WithStack(herodot.ErrBadRequest.WithReasonf(`HTTP %s Request used unknown HTTP Header "Content-Type: %s", only %v are supported.`, method, r.Header.Get("Content-Type"), c.allowedContentTypes)) + } + } + + return nil +} + +func (t *HTTP) validatePayload(ctx context.Context, raw json.RawMessage, c *httpDecoderOptions) error { + if !c.jsonSchemaValidate { + return nil + } + + if c.jsonSchemaCompiler == nil { + return errors.WithStack(herodot.ErrInternalServerError.WithReasonf("JSON Schema Validation is required but no compiler was provided.")) + } + + schema, err := c.jsonSchemaCompiler.Compile(ctx, c.jsonSchemaRef) + if err != nil { + return errors.WithStack(herodot.ErrInternalServerError.WithReasonf("Unable to load JSON Schema from location: %s", c.jsonSchemaRef).WithDebug(err.Error())) + } + + if err := schema.Validate(bytes.NewBuffer(raw)); err != nil { + if _, ok := err.(*jsonschema.ValidationError); ok { + return errors.WithStack(err) + } + return errors.WithStack(herodot.ErrInternalServerError.WithReasonf("Unable to process JSON Schema and input: %s", err).WithDebug(err.Error())) + } + + return nil +} + +// Decode takes a HTTP Request Body and decodes it into destination. +func (t *HTTP) Decode(r *http.Request, destination interface{}, opts ...HTTPDecoderOption) error { + c := newHTTPDecoderOptions(opts) + if err := t.validateRequest(r, c); err != nil { + return err + } + + if r.Method == "GET" { + return t.decodeForm(r, destination, c) + } else if httpx.HasContentType(r, httpContentTypeJSON) { + if c.expectJSONFlattened { + return t.decodeJSONForm(r, destination, c) + } + return t.decodeJSON(r, destination, c) + } else if httpx.HasContentType(r, httpContentTypeMultipartForm, httpContentTypeURLEncodedForm) { + return t.decodeForm(r, destination, c) + } + + return errors.WithStack(herodot.ErrInternalServerError.WithReasonf("Unable to determine decoder for content type: %s", r.Header.Get("Content-Type"))) +} + +func (t *HTTP) requestBody(r *http.Request, o *httpDecoderOptions) (reader io.ReadCloser, err error) { + if strings.ToUpper(r.Method) == "GET" { + return io.NopCloser(bytes.NewBufferString(r.URL.Query().Encode())), nil + } + + if !o.keepRequestBody { + return r.Body, nil + } + + bodyBytes, err := io.ReadAll(r.Body) + if err != nil { + return nil, errors.Wrapf(err, "unable to read body") + } + + _ = r.Body.Close() // must close + r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes)) + + return io.NopCloser(bytes.NewBuffer(bodyBytes)), nil +} + +func (t *HTTP) decodeJSONForm(r *http.Request, destination interface{}, o *httpDecoderOptions) error { + if o.jsonSchemaCompiler == nil { + return errors.WithStack(herodot.ErrInternalServerError.WithReasonf("Unable to decode HTTP Form Body because no validation schema was provided. This is a code bug.")) + } + + paths, err := jsonschemax.ListPathsWithRecursion(r.Context(), o.jsonSchemaRef, o.jsonSchemaCompiler, o.maxCircularReferenceDepth) + if err != nil { + return errors.WithStack(herodot.ErrInternalServerError.WithTrace(err).WithReasonf("Unable to prepare JSON Schema for HTTP Post Body Form parsing: %s", err).WithDebugf("%+v", err)) + } + + reader, err := t.requestBody(r, o) + if err != nil { + return err + } + + var interim json.RawMessage + if err := json.NewDecoder(reader).Decode(&interim); err != nil { + return errors.WithStack(herodot.ErrBadRequest.WithError(err.Error()).WithReason("Unable to decode form as JSON.")) + } + + parsed := gjson.ParseBytes(interim) + if !parsed.IsObject() { + return errors.WithStack(herodot.ErrBadRequest.WithReasonf("Expected JSON sent in request body to be an object but got: %s", parsed.Type.String())) + } + + values := url.Values{} + parsed.ForEach(func(k, v gjson.Result) bool { + values.Set(k.String(), v.String()) + return true + }) + + if o.queryAndBody { + _ = r.ParseForm() + for k := range r.Form { + values.Set(k, r.Form.Get(k)) + } + } + + raw, err := t.decodeURLValues(values, paths, o) + if err != nil { + return err + } + + if err := json.Unmarshal(raw, destination); err != nil { + return errors.WithStack(err) + } + + return t.validatePayload(r.Context(), raw, o) +} + +func (t *HTTP) decodeForm(r *http.Request, destination interface{}, o *httpDecoderOptions) error { + if o.jsonSchemaCompiler == nil { + return errors.WithStack(herodot.ErrInternalServerError.WithReasonf("Unable to decode HTTP Form Body because no validation schema was provided. This is a code bug.")) + } + + reader, err := t.requestBody(r, o) + if err != nil { + return err + } + + defer func() { + r.Body = reader + }() + + if err := r.ParseForm(); err != nil { + return errors.WithStack(herodot.ErrBadRequest.WithReasonf("Unable to decode HTTP %s form body: %s", strings.ToUpper(r.Method), err).WithDebug(err.Error())) + } + + paths, err := jsonschemax.ListPathsWithRecursion(r.Context(), o.jsonSchemaRef, o.jsonSchemaCompiler, o.maxCircularReferenceDepth) + if err != nil { + return errors.WithStack(herodot.ErrInternalServerError.WithTrace(err).WithReasonf("Unable to prepare JSON Schema for HTTP Post Body Form parsing: %s", err).WithDebugf("%+v", err)) + } + + values := r.PostForm + if r.Method == "GET" || o.queryAndBody { + values = r.Form + } + + raw, err := t.decodeURLValues(values, paths, o) + if err != nil && !errors.Is(err, errKeyNotFound) { + return err + } + + if err := json.NewDecoder(bytes.NewReader(raw)).Decode(destination); err != nil { + return errors.WithStack(herodot.ErrBadRequest.WithReasonf("Unable to decode JSON payload: %s", err)) + } + + return t.validatePayload(r.Context(), raw, o) +} + +func (t *HTTP) decodeURLValues(values url.Values, paths []jsonschemax.Path, o *httpDecoderOptions) (json.RawMessage, error) { + raw := json.RawMessage(`{}`) + for key := range values { + for _, path := range paths { + if key == path.Name { + var err error + switch path.Type.(type) { + case []string: + raw, err = sjson.SetBytes(raw, path.Name, values[key]) + case []float64: + for k, v := range values[key] { + var f float64 + if f, err = strconv.ParseFloat(v, 64); err != nil { + switch o.handleParseErrors { + case ParseErrorIgnoreConversionErrors: + raw, err = sjson.SetBytes(raw, path.Name+"."+strconv.Itoa(k), v) + case ParseErrorUseEmptyValueOnConversionErrors: + raw, err = sjson.SetBytes(raw, path.Name+"."+strconv.Itoa(k), f) + case ParseErrorReturnOnConversionErrors: + return nil, errors.WithStack(herodot.ErrBadRequest.WithReasonf("Expected value to be a number."). + WithDetail("parse_error", err.Error()). + WithDetail("name", key). + WithDetailf("index", "%d", k). + WithDetail("value", v)) + } + } else { + raw, err = sjson.SetBytes(raw, path.Name+"."+strconv.Itoa(k), f) + } + } + case []bool: + for k, v := range values[key] { + var b bool + if b, err = strconv.ParseBool(v); err != nil { + switch o.handleParseErrors { + case ParseErrorIgnoreConversionErrors: + raw, err = sjson.SetBytes(raw, path.Name+"."+strconv.Itoa(k), v) + case ParseErrorUseEmptyValueOnConversionErrors: + raw, err = sjson.SetBytes(raw, path.Name+"."+strconv.Itoa(k), b) + case ParseErrorReturnOnConversionErrors: + return nil, errors.WithStack(herodot.ErrBadRequest.WithReasonf("Expected value to be a boolean."). + WithDetail("parse_error", err.Error()). + WithDetail("name", key). + WithDetailf("index", "%d", k). + WithDetail("value", v)) + } + } else { + raw, err = sjson.SetBytes(raw, path.Name+"."+strconv.Itoa(k), b) + } + } + case []interface{}: + raw, err = sjson.SetBytes(raw, path.Name, values[key]) + case bool: + v := values[key][len(values[key])-1] + if len(v) == 0 { + if !path.Required { + continue + } + v = "false" + } + + var b bool + if b, err = strconv.ParseBool(v); err != nil { + switch o.handleParseErrors { + case ParseErrorIgnoreConversionErrors: + raw, err = sjson.SetBytes(raw, path.Name, v) + case ParseErrorUseEmptyValueOnConversionErrors: + raw, err = sjson.SetBytes(raw, path.Name, b) + case ParseErrorReturnOnConversionErrors: + return nil, errors.WithStack(herodot.ErrBadRequest.WithReasonf("Expected value to be a boolean."). + WithDetail("parse_error", err.Error()). + WithDetail("name", key). + WithDetail("value", values.Get(key))) + } + } else { + raw, err = sjson.SetBytes(raw, path.Name, b) + } + case float64: + v := values.Get(key) + if len(v) == 0 { + if !path.Required { + continue + } + v = "0.0" + } + + var f float64 + if f, err = strconv.ParseFloat(v, 64); err != nil { + switch o.handleParseErrors { + case ParseErrorIgnoreConversionErrors: + raw, err = sjson.SetBytes(raw, path.Name, v) + case ParseErrorUseEmptyValueOnConversionErrors: + raw, err = sjson.SetBytes(raw, path.Name, f) + case ParseErrorReturnOnConversionErrors: + return nil, errors.WithStack(herodot.ErrBadRequest.WithReasonf("Expected value to be a number."). + WithDetail("parse_error", err.Error()). + WithDetail("name", key). + WithDetail("value", values.Get(key))) + } + } else { + raw, err = sjson.SetBytes(raw, path.Name, f) + } + case string: + v := values.Get(key) + if len(v) == 0 { + continue + } + + raw, err = sjson.SetBytes(raw, path.Name, v) + case map[string]interface{}: + v := values.Get(key) + if len(v) == 0 && !path.Required { + continue + } + + raw, err = sjson.SetRawBytes(raw, path.Name, []byte(v)) + case []map[string]interface{}: + raw, err = sjson.SetBytes(raw, path.Name, values[key]) + } + + if err != nil { + return nil, errors.WithStack(herodot.ErrBadRequest.WithReasonf("Unable to type assert values from HTTP Post Body: %s", err)) + } + break + } + } + } + + for _, path := range paths { + if path.TypeHint != jsonschemax.JSON { + continue + } + + if !gjson.GetBytes(raw, path.Name).Exists() { + var err error + raw, err = sjson.SetRawBytes(raw, path.Name, []byte(`{}`)) + if err != nil { + return nil, errors.WithStack(err) + } + } + } + + return raw, nil +} + +func (t *HTTP) decodeJSON(r *http.Request, destination interface{}, o *httpDecoderOptions) error { + reader, err := t.requestBody(r, o) + if err != nil { + return err + } + + raw, err := io.ReadAll(reader) + if err != nil { + return errors.WithStack(herodot.ErrBadRequest.WithReasonf("Unable to read HTTP POST body: %s", err)) + } + + dc := json.NewDecoder(bytes.NewReader(raw)) + if err := dc.Decode(destination); err != nil { + return errors.WithStack(herodot.ErrBadRequest.WithReasonf("Unable to decode JSON payload: %s", err).WithDebugf("Received request body: %s", string(raw))) + } + + if err := t.validatePayload(r.Context(), raw, o); err != nil { + if o.expectJSONFlattened && strings.Contains(err.Error(), "json: unknown field") { + return t.decodeJSONForm(r, destination, o) + } + return err + } + + return nil +} diff --git a/oryx/decoderx/stub/consent.json b/oryx/decoderx/stub/consent.json new file mode 100644 index 00000000000..6539260706e --- /dev/null +++ b/oryx/decoderx/stub/consent.json @@ -0,0 +1,53 @@ +{ + "$id": "https://example.com/ory.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "additionalProperties": false, + "properties": { + "traits": { + "additionalProperties": false, + "properties": { + "consent": { + "additionalProperties": false, + "properties": { + "tos": { + "description": "yyyymmdd of when this was accepted", + "title": "I accept the Terms of Service https://www.ory.sh/ptos", + "const": true, + "maxLength": 30 + }, + "inner": { + "type": "object", + "properties": { + "foo": { + "type": "string" + } + }, + "required": ["foo"] + } + }, + "required": ["tos"], + "title": "Consent", + "type": "object" + }, + "notrequired": { + "additionalProperties": false, + "properties": { + "tos": { + "description": "yyyymmdd of when this was accepted", + "title": "I accept the Terms of Service https://www.ory.sh/ptos", + "const": true, + "maxLength": 30 + } + }, + "required": ["tos"], + "title": "Consent", + "type": "object" + } + }, + "required": ["consent"], + "type": "object" + } + }, + "title": "Person", + "type": "object" +} diff --git a/oryx/decoderx/stub/dynamic-object.json b/oryx/decoderx/stub/dynamic-object.json new file mode 100644 index 00000000000..beed1274bc2 --- /dev/null +++ b/oryx/decoderx/stub/dynamic-object.json @@ -0,0 +1,22 @@ +{ + "$id": "https://example.com/config.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "name": { + "type": "object", + "properties": { + "first": { + "type": "string" + }, + "last": { + "type": "string" + } + } + }, + "dynamic_object": { + "type": "object", + "additionalProperties": true + } + } +} diff --git a/oryx/decoderx/stub/nested.json b/oryx/decoderx/stub/nested.json new file mode 100644 index 00000000000..4efa3af38fd --- /dev/null +++ b/oryx/decoderx/stub/nested.json @@ -0,0 +1,36 @@ +{ + "$id": "https://example.com/person.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Person", + "type": "object", + "required": ["node"], + "properties": { + "node": { + "type": "object", + "required": ["node"], + "properties": { + "node": { + "type": "object", + "properties": { + "node": { + "type": "object", + "properties": { + "leaf": { + "type": "string" + } + }, + "required": ["leaf"] + }, + "leaf": { + "type": "string" + } + }, + "required": ["leaf"] + }, + "leaf": { + "type": "string" + } + } + } + } +} diff --git a/oryx/decoderx/stub/person.json b/oryx/decoderx/stub/person.json new file mode 100644 index 00000000000..7779aac08a4 --- /dev/null +++ b/oryx/decoderx/stub/person.json @@ -0,0 +1,31 @@ +{ + "$id": "https://example.com/person.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Person", + "type": "object", + "properties": { + "name": { + "type": "object", + "properties": { + "first": { + "type": "string" + }, + "last": { + "type": "string" + } + } + }, + "age": { + "type": "integer" + }, + "ratio": { + "type": "number" + }, + "consent": { + "type": "boolean" + }, + "newsletter": { + "type": "boolean" + } + } +} diff --git a/oryx/decoderx/stub/required-defaults.json b/oryx/decoderx/stub/required-defaults.json new file mode 100644 index 00000000000..62edd80a517 --- /dev/null +++ b/oryx/decoderx/stub/required-defaults.json @@ -0,0 +1,57 @@ +{ + "$id": "https://example.com/person.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Person", + "type": "object", + "properties": { + "name": { + "type": "object", + "properties": { + "first": { + "type": "string" + }, + "last": { + "type": "string" + } + }, + "required": ["first"] + }, + "name2": { + "type": "object", + "properties": { + "first": { + "type": "string" + }, + "last": { + "type": "string" + } + }, + "required": ["first"] + }, + "age": { + "type": "integer" + }, + "age2": { + "type": "integer" + }, + "ratio": { + "type": "number" + }, + "ratio2": { + "type": "number" + }, + "consent": { + "type": "boolean" + }, + "consent2": { + "type": "boolean" + }, + "newsletter": { + "type": "boolean" + }, + "newsletter2": { + "type": "boolean" + } + }, + "required": ["age2", "ratio2", "consent2", "newsletter2", "name2"] +} diff --git a/oryx/decoderx/stub/schema.json b/oryx/decoderx/stub/schema.json new file mode 100644 index 00000000000..c748fbd473a --- /dev/null +++ b/oryx/decoderx/stub/schema.json @@ -0,0 +1,11 @@ +{ + "$id": "https://example.com/config.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "required": ["foo"], + "properties": { + "foo": { + "type": "string" + } + } +} diff --git a/oryx/docs/alpha_num.png b/oryx/docs/alpha_num.png new file mode 100644 index 00000000000..2bbc6563166 Binary files /dev/null and b/oryx/docs/alpha_num.png differ diff --git a/oryx/docs/num.png b/oryx/docs/num.png new file mode 100644 index 00000000000..ecaadf6c0a6 Binary files /dev/null and b/oryx/docs/num.png differ diff --git a/oryx/docs/result_num.png b/oryx/docs/result_num.png new file mode 100644 index 00000000000..98bb043cfd4 Binary files /dev/null and b/oryx/docs/result_num.png differ diff --git a/oryx/errorsx/errors.go b/oryx/errorsx/errors.go new file mode 100644 index 00000000000..801141cd4c8 --- /dev/null +++ b/oryx/errorsx/errors.go @@ -0,0 +1,101 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package errorsx + +import ( + "github.com/pkg/errors" + + "github.com/ory/herodot" +) + +// Cause returns the underlying cause of the error, if possible. +// An error value has a cause if it implements the following +// interface: +// +// type causer interface { +// Cause() error +// } +// +// If the error does not implement Cause, the original error will +// be returned. If the error is nil, nil will be returned without further +// investigation. +// Deprecated: you should probably use errors.As instead. +func Cause(err error) error { + type causer interface { + Cause() error + } + + for err != nil { + cause, ok := err.(causer) + if !ok || cause.Cause() == nil { + break + } + err = cause.Cause() + } + return err +} + +// WithStack mirror pkg/errors.WithStack but does not wrap existing stack +// traces. +// Deprecated: you should probably use errors.WithStack instead and only annotate stacks when it makes sense. +func WithStack(err error) error { + if e, ok := err.(StackTracer); ok && len(e.StackTrace()) > 0 { + return err + } + + return errors.WithStack(err) +} + +// StatusCodeCarrier can be implemented by an error to support setting status codes in the error itself. +type StatusCodeCarrier interface { + // StatusCode returns the status code of this error. + StatusCode() int +} + +// RequestIDCarrier can be implemented by an error to support error contexts. +type RequestIDCarrier interface { + // RequestID returns the ID of the request that caused the error, if applicable. + RequestID() string +} + +// ReasonCarrier can be implemented by an error to support error contexts. +type ReasonCarrier interface { + // Reason returns the reason for the error, if applicable. + Reason() string +} + +// DebugCarrier can be implemented by an error to support error contexts. +type DebugCarrier interface { + // Debug returns debugging information for the error, if applicable. + Debug() string +} + +// StatusCarrier can be implemented by an error to support error contexts. +type StatusCarrier interface { + // ID returns the error id, if applicable. + Status() string +} + +// DetailsCarrier can be implemented by an error to support error contexts. +type DetailsCarrier interface { + // Details returns details on the error, if applicable. + Details() map[string]interface{} +} + +// IDCarrier can be implemented by an error to support error contexts. +type IDCarrier interface { + // ID returns application error ID on the error, if applicable. + ID() string +} + +type StackTracer interface { + StackTrace() errors.StackTrace +} + +func GetCodeFromHerodotError(err error) (code int, ok bool) { + herodotErr := &herodot.DefaultError{} + isHerodot := errors.As(err, &herodotErr) + + return herodotErr.CodeField, isHerodot +} diff --git a/oryx/fetcher/fetcher.go b/oryx/fetcher/fetcher.go new file mode 100644 index 00000000000..f1fa4f1f5b1 --- /dev/null +++ b/oryx/fetcher/fetcher.go @@ -0,0 +1,178 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fetcher + +import ( + "bytes" + "context" + "crypto/sha256" + "encoding/base64" + stderrors "errors" + "io" + "net/http" + "os" + "strings" + "time" + + "github.com/dgraph-io/ristretto/v2" + "github.com/hashicorp/go-retryablehttp" + "github.com/pkg/errors" + + "github.com/ory/x/httpx" + "github.com/ory/x/stringsx" +) + +// Fetcher is able to load file contents from http, https, file, and base64 locations. +type Fetcher struct { + hc *retryablehttp.Client + limit int64 + cache *ristretto.Cache[[]byte, []byte] + ttl time.Duration +} + +type opts struct { + hc *retryablehttp.Client + limit int64 + cache *ristretto.Cache[[]byte, []byte] + ttl time.Duration +} + +var ErrUnknownScheme = stderrors.New("unknown scheme") + +// WithClient sets the http.Client the fetcher uses. +func WithClient(hc *retryablehttp.Client) Modifier { + return func(o *opts) { + o.hc = hc + } +} + +// WithMaxHTTPMaxBytes reads at most limit bytes from the HTTP response body, +// returning bytes.ErrToLarge if the limit would be exceeded. +func WithMaxHTTPMaxBytes(limit int64) Modifier { + return func(o *opts) { + o.limit = limit + } +} + +func WithCache(cache *ristretto.Cache[[]byte, []byte], ttl time.Duration) Modifier { + return func(o *opts) { + if ttl < 0 { + return + } + o.cache = cache + o.ttl = ttl + } +} + +func newOpts() *opts { + return &opts{ + hc: httpx.NewResilientClient(), + } +} + +type Modifier func(*opts) + +// NewFetcher creates a new fetcher instance. +func NewFetcher(opts ...Modifier) *Fetcher { + o := newOpts() + for _, f := range opts { + f(o) + } + return &Fetcher{hc: o.hc, limit: o.limit, cache: o.cache, ttl: o.ttl} +} + +// Fetch fetches the file contents from the source. +func (f *Fetcher) Fetch(source string) (*bytes.Buffer, error) { + return f.FetchContext(context.Background(), source) +} + +// FetchContext fetches the file contents from the source and allows to pass a +// context that is used for HTTP requests. +func (f *Fetcher) FetchContext(ctx context.Context, source string) (*bytes.Buffer, error) { + b, err := f.FetchBytes(ctx, source) + if err != nil { + return nil, err + } + return bytes.NewBuffer(b), nil +} + +// FetchBytes fetches the file contents from the source and allows to pass a +// context that is used for HTTP requests. +func (f *Fetcher) FetchBytes(ctx context.Context, source string) ([]byte, error) { + switch s := stringsx.SwitchPrefix(source); { + case s.HasPrefix("http://", "https://"): + return f.fetchRemote(ctx, source) + case s.HasPrefix("file://"): + return f.fetchFile(strings.TrimPrefix(source, "file://")) + case s.HasPrefix("base64://"): + src, err := base64.StdEncoding.DecodeString(strings.TrimPrefix(source, "base64://")) + if err != nil { + return nil, errors.Wrapf(err, "base64decode: %s", source) + } + return src, nil + default: + return nil, errors.Wrap(ErrUnknownScheme, s.ToUnknownPrefixErr().Error()) + } +} + +func (f *Fetcher) fetchRemote(ctx context.Context, source string) (b []byte, err error) { + if f.cache != nil { + cacheKey := sha256.Sum256([]byte(source)) + if v, ok := f.cache.Get(cacheKey[:]); ok { + b = make([]byte, len(v)) + copy(b, v) + return b, nil + } + defer func() { + if err == nil && len(b) > 0 { + toCache := make([]byte, len(b)) + copy(toCache, b) + f.cache.SetWithTTL(cacheKey[:], toCache, int64(len(toCache)), f.ttl) + } + }() + } + + req, err := retryablehttp.NewRequestWithContext(ctx, http.MethodGet, source, nil) + if err != nil { + return nil, errors.Wrapf(err, "new request: %s", source) + } + res, err := f.hc.Do(req) + if err != nil { + return nil, errors.Wrap(err, source) + } + defer res.Body.Close() + + if res.StatusCode != http.StatusOK { + return nil, errors.Errorf("expected http response status code 200 but got %d when fetching: %s", res.StatusCode, source) + } + + if f.limit > 0 { + var buf bytes.Buffer + n, err := io.Copy(&buf, io.LimitReader(res.Body, f.limit+1)) + if n > f.limit { + return nil, bytes.ErrTooLarge + } + if err != nil { + return nil, err + } + return buf.Bytes(), nil + } + return io.ReadAll(res.Body) +} + +func (f *Fetcher) fetchFile(source string) ([]byte, error) { + fp, err := os.Open(source) // #nosec:G304 + if err != nil { + return nil, errors.Wrapf(err, "unable to open file: %s", source) + } + defer fp.Close() + b, err := io.ReadAll(fp) + if err != nil { + return nil, errors.Wrapf(err, "unable to read file: %s", source) + } + if err := fp.Close(); err != nil { + return nil, errors.Wrapf(err, "unable to close file: %s", source) + } + return b, nil +} diff --git a/oryx/flagx/flagx.go b/oryx/flagx/flagx.go new file mode 100644 index 00000000000..67fb85927da --- /dev/null +++ b/oryx/flagx/flagx.go @@ -0,0 +1,108 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package flagx + +import ( + "time" + + "github.com/spf13/pflag" + + "github.com/spf13/cobra" + + "github.com/ory/x/cmdx" +) + +func NewFlagSet(name string) *pflag.FlagSet { + return pflag.NewFlagSet(name, pflag.ContinueOnError) +} + +// MustGetBool returns a bool flag or fatals if an error occurs. +// Deprecated: just handle the error properly, this breaks command testing +func MustGetBool(cmd *cobra.Command, name string) bool { + ok, err := cmd.Flags().GetBool(name) + if err != nil { + cmdx.Fatalf(err.Error()) + } + return ok +} + +// MustGetString returns a string flag or fatals if an error occurs. +// Deprecated: just handle the error properly, this breaks command testing +func MustGetString(cmd *cobra.Command, name string) string { + s, err := cmd.Flags().GetString(name) + if err != nil { + cmdx.Fatalf(err.Error()) + } + return s +} + +// MustGetDuration returns a time.Duration flag or fatals if an error occurs. +// Deprecated: just handle the error properly, this breaks command testing +func MustGetDuration(cmd *cobra.Command, name string) time.Duration { + d, err := cmd.Flags().GetDuration(name) + if err != nil { + cmdx.Fatalf(err.Error()) + } + return d +} + +// MustGetStringSlice returns a []string flag or fatals if an error occurs. +// Deprecated: just handle the error properly, this breaks command testing +func MustGetStringSlice(cmd *cobra.Command, name string) []string { + ss, err := cmd.Flags().GetStringSlice(name) + if err != nil { + cmdx.Fatalf(err.Error()) + } + return ss +} + +// MustGetStringArray returns a []string flag or fatals if an error occurs. +// Deprecated: just handle the error properly, this breaks command testing +func MustGetStringArray(cmd *cobra.Command, name string) []string { + ss, err := cmd.Flags().GetStringArray(name) + if err != nil { + cmdx.Fatalf(err.Error()) + } + return ss +} + +// MustGetStringToStringMap returns a map[string]string flag or fatals if an error occurs. +// Deprecated: just handle the error properly, this breaks command testing +func MustGetStringToStringMap(cmd *cobra.Command, name string) map[string]string { + ss, err := cmd.Flags().GetStringToString(name) + if err != nil { + cmdx.Fatalf(err.Error()) + } + return ss +} + +// MustGetInt returns a int flag or fatals if an error occurs. +// Deprecated: just handle the error properly, this breaks command testing +func MustGetInt(cmd *cobra.Command, name string) int { + ss, err := cmd.Flags().GetInt(name) + if err != nil { + cmdx.Fatalf(err.Error()) + } + return ss +} + +// MustGetUint8 returns a uint8 flag or fatals if an error occurs. +// Deprecated: just handle the error properly, this breaks command testing +func MustGetUint8(cmd *cobra.Command, name string) uint8 { + v, err := cmd.Flags().GetUint8(name) + if err != nil { + cmdx.Fatalf(err.Error()) + } + return v +} + +// MustGetUint32 returns a uint32 flag or fatals if an error occurs. +// Deprecated: just handle the error properly, this breaks command testing +func MustGetUint32(cmd *cobra.Command, name string) uint32 { + v, err := cmd.Flags().GetUint32(name) + if err != nil { + cmdx.Fatalf(err.Error()) + } + return v +} diff --git a/oryx/fsx/dirhash.go b/oryx/fsx/dirhash.go new file mode 100644 index 00000000000..51682c45563 --- /dev/null +++ b/oryx/fsx/dirhash.go @@ -0,0 +1,31 @@ +package fsx + +import ( + "crypto/sha512" + "io" + "io/fs" +) + +// DirHash computes a directory hash from all files contained in any subdirectories. +func DirHash(dir fs.FS) ([]byte, error) { + hash := sha512.New() + err := fs.WalkDir(dir, ".", func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + if d.IsDir() { + return nil + } + _, _ = io.WriteString(hash, path) // hash write never errors + f, err := dir.Open(path) + if err != nil { + return err + } + _, _ = io.Copy(hash, f) // hash write never errors + if err = f.Close(); err != nil { + return err + } + return nil + }) + return hash.Sum(nil), err +} diff --git a/oryx/fsx/merge.go b/oryx/fsx/merge.go new file mode 100644 index 00000000000..5b758e8cd25 --- /dev/null +++ b/oryx/fsx/merge.go @@ -0,0 +1,229 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package fsx + +import ( + "io" + "io/fs" + "sort" + "time" + + "github.com/pkg/errors" +) + +type ( + mergedFS []fs.FS + mergedFile struct { + files []fs.File + unprocessedDirEntries dirEntries + } + mergedFileInfo []fs.FileInfo + dirEntries []fs.DirEntry +) + +var ( + _ fs.StatFS = (mergedFS)(nil) + _ fs.ReadDirFS = (mergedFS)(nil) + _ fs.ReadDirFile = (*mergedFile)(nil) + _ fs.FileInfo = (mergedFileInfo)(nil) + _ sort.Interface = (dirEntries)(nil) +) + +// Merge multiple filesystems. Later file systems are shadowed by previous ones. +func Merge(fss ...fs.FS) fs.FS { + return mergedFS(fss) +} + +func (m mergedFS) Open(name string) (fs.File, error) { + var file mergedFile + for _, fsys := range m { + f, err := fsys.Open(name) + if errors.Is(err, fs.ErrNotExist) { + continue + } + if err != nil { + return nil, errors.WithStack(err) + } + + file.files = append(file.files, f) + } + if len(file.files) == 0 { + return nil, errors.WithStack(fs.ErrNotExist) + } + + return &file, nil +} + +func (m mergedFS) Stat(name string) (fs.FileInfo, error) { + for i, fsys := range m { + info, err := fs.Stat(fsys, name) + if errors.Is(err, fs.ErrNotExist) { + continue + } + + switch { + case err != nil: + return nil, errors.WithStack(err) + case info.IsDir(): + dirs := mergedFileInfo{info} + for j := i + 1; j < len(m); j++ { + info, err := fs.Stat(m[j], name) + if errors.Is(err, fs.ErrNotExist) { + continue + } + if err != nil { + return nil, err + } + dirs = append(dirs, info) + } + return dirs, nil + default: + return info, nil + } + } + return nil, errors.WithStack(fs.ErrNotExist) +} + +func (m mergedFS) ReadDir(name string) ([]fs.DirEntry, error) { + var entries dirEntries + + for _, fsys := range m { + e, err := fs.ReadDir(fsys, name) + if errors.Is(err, fs.ErrNotExist) { + continue + } + if err != nil { + return nil, err + } + entries = append(entries, e...) + } + if len(entries) == 0 { + return nil, errors.WithStack(fs.ErrNotExist) + } + + entries.clean() + return entries, nil +} + +func (m mergedFileInfo) Name() string { + return m[0].Name() +} + +func (m mergedFileInfo) Size() int64 { + return m[0].Size() +} + +func (m mergedFileInfo) Mode() fs.FileMode { + return m[0].Mode() +} + +func (m mergedFileInfo) ModTime() time.Time { + return m[0].ModTime() +} + +func (m mergedFileInfo) IsDir() bool { + return m[0].IsDir() +} + +func (m mergedFileInfo) Sys() interface{} { + return m +} + +func (d dirEntries) Len() int { + return len(d) +} + +func (d dirEntries) Less(i, j int) bool { + return d[i].Name() < d[j].Name() +} + +func (d dirEntries) Swap(i, j int) { + d[i], d[j] = d[j], d[i] +} + +func (d *dirEntries) clean() { + sort.Sort(d) + + for i := 1; i < len(*d); i++ { + if (*d)[i-1].Name() == (*d)[i].Name() { + if len(*d)-i == 1 { + // remove the last entry; we're done + *d = (*d)[:i] + return + } + // remove the duplicate entry at index i + *d = append((*d)[:i], (*d)[i+1:]...) + + // need to check the same index again + i-- + } + } +} + +func (m *mergedFile) Stat() (fs.FileInfo, error) { + return m.files[0].Stat() +} + +func (m *mergedFile) Read(bytes []byte) (int, error) { + return m.files[0].Read(bytes) +} + +func (m *mergedFile) Close() error { + var firstErr error + for _, f := range m.files { + if err := f.Close(); err != nil { + if firstErr == nil { + firstErr = errors.WithStack(err) + } + } + } + return firstErr +} + +func (m *mergedFile) ReadDir(n int) ([]fs.DirEntry, error) { + if m.unprocessedDirEntries != nil { + if n <= 0 { + entries := m.unprocessedDirEntries + m.unprocessedDirEntries = nil + return entries, nil + } + if n >= len(m.unprocessedDirEntries) { + entries := m.unprocessedDirEntries + m.unprocessedDirEntries = nil + return entries, io.EOF + } + + var entries dirEntries + entries, m.unprocessedDirEntries = m.unprocessedDirEntries[:n], m.unprocessedDirEntries[n:] + return entries, nil + } + + var entries dirEntries + for _, f := range m.files { + if f, ok := f.(fs.ReadDirFile); ok { + e, err := f.ReadDir(-1) + if err != nil && !errors.Is(err, fs.ErrNotExist) { + return nil, err + } + entries = append(entries, e...) + } + } + if entries == nil { + if n > 0 { + return nil, io.EOF + } + return nil, nil + } + + entries.clean() + if n <= 0 { + return entries, nil + } + if n >= len(entries) { + return entries, io.EOF + } + + entries, m.unprocessedDirEntries = entries[:n], entries[n:] + return entries, nil +} diff --git a/oryx/go.mod b/oryx/go.mod new file mode 100644 index 00000000000..f877c144e56 --- /dev/null +++ b/oryx/go.mod @@ -0,0 +1,227 @@ +module github.com/ory/x + +go 1.25 + +require ( + code.dny.dev/ssrf v0.2.0 + github.com/Masterminds/sprig/v3 v3.3.0 + github.com/auth0/go-jwt-middleware/v2 v2.3.0 + github.com/avast/retry-go/v4 v4.6.1 + github.com/bmatcuk/doublestar/v2 v2.0.4 + github.com/bradleyjkemp/cupaloy/v2 v2.8.0 + github.com/cockroachdb/cockroach-go/v2 v2.4.1 + github.com/dgraph-io/ristretto/v2 v2.2.0 + github.com/docker/docker v28.3.3+incompatible + github.com/evanphx/json-patch/v5 v5.9.11 + github.com/fsnotify/fsnotify v1.9.0 + github.com/ghodss/yaml v1.0.0 + github.com/go-jose/go-jose/v3 v3.0.4 + github.com/go-openapi/jsonpointer v0.21.2 + github.com/go-openapi/runtime v0.28.0 + github.com/go-sql-driver/mysql v1.9.3 + github.com/gobuffalo/httptest v1.5.2 + github.com/gobwas/glob v0.2.3 + github.com/goccy/go-yaml v1.18.0 + github.com/gofrs/uuid v4.4.0+incompatible + github.com/golang-jwt/jwt/v5 v5.3.0 + github.com/google/go-jsonnet v0.21.0 + github.com/gorilla/websocket v1.5.3 + github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 + github.com/hashicorp/go-retryablehttp v0.7.8 + github.com/inhies/go-bytesize v0.0.0-20220417184213-4913239db9cf + github.com/jackc/pgconn v1.14.3 + github.com/jackc/pgx/v4 v4.18.3 + github.com/jackc/puddle/v2 v2.2.2 + github.com/jmoiron/sqlx v1.4.0 + github.com/julienschmidt/httprouter v1.3.1-0.20240130105656-484018016424 + github.com/knadh/koanf/maps v0.1.2 + github.com/knadh/koanf/parsers/json v0.1.0 + github.com/knadh/koanf/parsers/toml v0.1.0 + github.com/knadh/koanf/parsers/yaml v0.1.0 + github.com/knadh/koanf/providers/posflag v0.1.0 + github.com/knadh/koanf/providers/rawbytes v0.1.0 + github.com/knadh/koanf/v2 v2.2.2 + github.com/laher/mergefs v0.1.1 + github.com/lestrrat-go/jwx v1.2.31 + github.com/lib/pq v1.10.9 + github.com/mattn/go-sqlite3 v1.14.32 + github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 + github.com/ory/analytics-go/v5 v5.0.1 + github.com/ory/dockertest/v3 v3.12.0 + github.com/ory/herodot v0.10.7 + github.com/ory/jsonschema/v3 v3.0.9-0.20250317235931-280c5fc7bf0e + github.com/ory/pop/v6 v6.3.2-0.20251203152233-a32233875f7e + github.com/pelletier/go-toml v1.9.5 + github.com/peterhellberg/link v1.2.0 + github.com/pkg/errors v0.9.1 + github.com/pkg/profile v1.7.0 + github.com/prometheus/client_golang v1.23.0 + github.com/prometheus/client_model v0.6.2 + github.com/prometheus/common v0.65.0 + github.com/rakutentech/jwk-go v1.2.0 + github.com/rs/cors v1.11.1 + github.com/seatgeek/logrus-gelf-formatter v0.0.0-20210414080842-5b05eb8ff761 + github.com/sirupsen/logrus v1.9.3 + github.com/spf13/cast v1.9.2 + github.com/spf13/cobra v1.10.1 + github.com/spf13/pflag v1.0.10 + github.com/ssoready/hyrumtoken v1.0.0 + github.com/stretchr/testify v1.11.1 + github.com/tidwall/gjson v1.18.0 + github.com/tidwall/sjson v1.2.5 + github.com/urfave/negroni v1.0.0 + go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.62.0 + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0 + go.opentelemetry.io/contrib/propagators/b3 v1.37.0 + go.opentelemetry.io/contrib/propagators/jaeger v1.37.0 + go.opentelemetry.io/contrib/samplers/jaegerremote v0.31.0 + go.opentelemetry.io/otel v1.38.0 + go.opentelemetry.io/otel/exporters/jaeger v1.17.0 + go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.37.0 + go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.37.0 + go.opentelemetry.io/otel/exporters/zipkin v1.37.0 + go.opentelemetry.io/otel/sdk v1.38.0 + go.opentelemetry.io/otel/trace v1.38.0 + go.opentelemetry.io/proto/otlp v1.7.1 + go.uber.org/goleak v1.3.0 + go.uber.org/mock v0.5.2 + golang.org/x/crypto v0.45.0 + golang.org/x/mod v0.29.0 + golang.org/x/net v0.47.0 + golang.org/x/oauth2 v0.33.0 + golang.org/x/sync v0.18.0 + google.golang.org/grpc v1.74.2 + google.golang.org/protobuf v1.36.9 +) + +require ( + dario.cat/mergo v1.0.2 // indirect + filippo.io/edwards25519 v1.1.0 // indirect + github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c // indirect + github.com/Masterminds/goutils v1.1.1 // indirect + github.com/Masterminds/semver/v3 v3.4.0 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 // indirect + github.com/XSAM/otelsql v0.39.0 // indirect + github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect + github.com/aymerick/douceur v0.2.0 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/cenkalti/backoff/v4 v4.3.0 // indirect + github.com/cenkalti/backoff/v5 v5.0.2 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/containerd/continuity v0.4.5 // indirect + github.com/containerd/errdefs v1.0.0 // indirect + github.com/containerd/errdefs/pkg v0.3.0 // indirect + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 // indirect + github.com/distribution/reference v0.6.0 // indirect + github.com/docker/cli v28.3.3+incompatible // indirect + github.com/docker/go-connections v0.6.0 // indirect + github.com/docker/go-units v0.5.0 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/fatih/color v1.18.0 // indirect + github.com/fatih/structs v1.1.0 // indirect + github.com/felixge/fgprof v0.9.5 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/go-logr/logr v1.4.3 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-openapi/errors v0.22.2 // indirect + github.com/go-openapi/strfmt v0.23.0 // indirect + github.com/go-openapi/swag v0.23.1 // indirect + github.com/go-viper/mapstructure/v2 v2.4.0 // indirect + github.com/gobuffalo/envy v1.10.2 // indirect + github.com/gobuffalo/fizz v1.14.4 // indirect + github.com/gobuffalo/flect v1.0.3 // indirect + github.com/gobuffalo/github_flavored_markdown v1.1.4 // indirect + github.com/gobuffalo/helpers v0.6.10 // indirect + github.com/gobuffalo/nulls v0.4.2 // indirect + github.com/gobuffalo/plush/v4 v4.1.22 // indirect + github.com/gobuffalo/plush/v5 v5.0.7 // indirect + github.com/gobuffalo/tags/v3 v3.1.4 // indirect + github.com/gobuffalo/validate/v3 v3.3.3 // indirect + github.com/goccy/go-json v0.10.5 // indirect + github.com/gofrs/flock v0.12.1 // indirect + github.com/gogo/googleapis v1.4.1 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/google/pprof v0.0.0-20250630185457-6e76a2b096b5 // indirect + github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/gorilla/css v1.0.1 // indirect + github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1 // indirect + github.com/hashicorp/go-cleanhttp v0.5.2 // indirect + github.com/huandu/xstrings v1.5.0 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/jackc/chunkreader/v2 v2.0.1 // indirect + github.com/jackc/pgio v1.0.0 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgproto3/v2 v2.3.3 // indirect + github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect + github.com/jackc/pgtype v1.14.4 // indirect + github.com/jackc/pgx/v5 v5.7.5 // indirect + github.com/jaegertracing/jaeger-idl v0.5.0 // indirect + github.com/jandelgado/gcov2lcov v1.1.1 // indirect + github.com/joho/godotenv v1.5.1 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect + github.com/lestrrat-go/backoff/v2 v2.0.8 // indirect + github.com/lestrrat-go/blackmagic v1.0.4 // indirect + github.com/lestrrat-go/httpcc v1.0.1 // indirect + github.com/lestrrat-go/iter v1.0.2 // indirect + github.com/lestrrat-go/option v1.0.1 // indirect + github.com/mailru/easyjson v0.9.0 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/microcosm-cc/bluemonday v1.0.27 // indirect + github.com/mitchellh/copystructure v1.2.0 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/mitchellh/reflectwalk v1.0.2 // indirect + github.com/moby/docker-image-spec v1.3.1 // indirect + github.com/moby/sys/atomicwriter v0.1.0 // indirect + github.com/moby/sys/user v0.4.0 // indirect + github.com/moby/term v0.5.2 // indirect + github.com/morikuni/aec v1.0.0 // indirect + github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect + github.com/nyaruka/phonenumbers v1.6.5 // indirect + github.com/oklog/ulid v1.3.1 // indirect + github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.1.1 // indirect + github.com/opencontainers/runc v1.3.3 // indirect + github.com/openzipkin/zipkin-go v0.4.3 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/prometheus/procfs v0.17.0 // indirect + github.com/rogpeppe/go-internal v1.14.1 // indirect + github.com/segmentio/backo-go v1.1.0 // indirect + github.com/sergi/go-diff v1.4.0 // indirect + github.com/shopspring/decimal v1.4.0 // indirect + github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d // indirect + github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e // indirect + github.com/stretchr/objx v0.5.2 // indirect + github.com/tidwall/match v1.1.1 // indirect + github.com/tidwall/pretty v1.2.1 // indirect + github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect + github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect + github.com/xeipuuv/gojsonschema v1.2.0 // indirect + github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c // indirect + go.mongodb.org/mongo-driver v1.17.4 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/otel/metric v1.38.0 // indirect + go.yaml.in/yaml/v2 v2.4.2 // indirect + golang.org/x/exp v0.0.0-20250813145105-42675adae3e6 // indirect + golang.org/x/sys v0.38.0 // indirect + golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8 // indirect + golang.org/x/text v0.31.0 // indirect + golang.org/x/time v0.12.0 // indirect + golang.org/x/tools v0.38.0 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20250811230008-5f3141c8851a // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20250811230008-5f3141c8851a // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + sigs.k8s.io/yaml v1.6.0 // indirect +) + +tool ( + github.com/jandelgado/gcov2lcov + go.uber.org/mock/mockgen + golang.org/x/tools/cmd/goimports +) diff --git a/oryx/go.sum b/oryx/go.sum new file mode 100644 index 00000000000..7157a80e0af --- /dev/null +++ b/oryx/go.sum @@ -0,0 +1,755 @@ +code.dny.dev/ssrf v0.2.0 h1:wCBP990rQQ1CYfRpW+YK1+8xhwUjv189AQ3WMo1jQaI= +code.dny.dev/ssrf v0.2.0/go.mod h1:B+91l25OnyaLIeCx0WRJN5qfJ/4/ZTZxRXgm0lj/2w8= +dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8= +dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA= +filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= +filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= +github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c h1:udKWzYgxTojEKWjV8V+WSxDXJ4NFATAsZjh8iIbsQIg= +github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= +github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= +github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= +github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0= +github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= +github.com/Masterminds/sprig/v3 v3.3.0 h1:mQh0Yrg1XPo6vjYXgtf5OtijNAKJRNcTdOOGZe3tPhs= +github.com/Masterminds/sprig/v3 v3.3.0/go.mod h1:Zy1iXRYNqNLUolqCpL4uhk6SHUMAOSCzdgBfDb35Lz0= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw= +github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk= +github.com/XSAM/otelsql v0.39.0 h1:4o374mEIMweaeevL7fd8Q3C710Xi2Jh/c8G4Qy9bvCY= +github.com/XSAM/otelsql v0.39.0/go.mod h1:uMOXLUX+wkuAuP0AR3B45NXX7E9lJS2mERa8gqdU8R0= +github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= +github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= +github.com/auth0/go-jwt-middleware/v2 v2.3.0 h1:4QREj6cS3d8dS05bEm443jhnqQF97FX9sMBeWqnNRzE= +github.com/auth0/go-jwt-middleware/v2 v2.3.0/go.mod h1:dL4ObBs1/dj4/W4cYxd8rqAdDGXYyd5rqbpMIxcbVrU= +github.com/avast/retry-go/v4 v4.6.1 h1:VkOLRubHdisGrHnTu89g08aQEWEgRU7LVEop3GbIcMk= +github.com/avast/retry-go/v4 v4.6.1/go.mod h1:V6oF8njAwxJ5gRo1Q7Cxab24xs5NCWZBeaHHBklR8mA= +github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= +github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bmatcuk/doublestar/v2 v2.0.4 h1:6I6oUiT/sU27eE2OFcWqBhL1SwjyvQuOssxT4a1yidI= +github.com/bmatcuk/doublestar/v2 v2.0.4/go.mod h1:QMmcs3H2AUQICWhfzLXz+IYln8lRQmTZRptLie8RgRw= +github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= +github.com/bradleyjkemp/cupaloy/v2 v2.8.0 h1:any4BmKE+jGIaMpnU8YgH/I2LPiLBufr6oMMlVBbn9M= +github.com/bradleyjkemp/cupaloy/v2 v2.8.0/go.mod h1:bm7JXdkRd4BHJk9HpwqAI8BoAY1lps46Enkdqw6aRX0= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/cenkalti/backoff/v5 v5.0.2 h1:rIfFVxEf1QsI7E1ZHfp/B4DF/6QBAUhmgkxc0H7Zss8= +github.com/cenkalti/backoff/v5 v5.0.2/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chromedp/cdproto v0.0.0-20230802225258-3cf4e6d46a89/go.mod h1:GKljq0VrfU4D5yc+2qA6OVr8pmO/MBbPEWqWQ/oqGEs= +github.com/chromedp/chromedp v0.9.2/go.mod h1:LkSXJKONWTCHAfQasKFUZI+mxqS4tZqhmtGzzhLsnLs= +github.com/chromedp/sysutil v1.0.0/go.mod h1:kgWmDdq8fTzXYcKIBqIYvRRTnYb9aNS9moAV0xufSww= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/logex v1.2.1/go.mod h1:JLbx6lG2kDbNRFnfkgvh4eRJRPX1QCoOIWomwysCBrQ= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObkaSkeBlk= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8= +github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= +github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= +github.com/cockroachdb/cockroach-go/v2 v2.4.1 h1:ACVT/zXsuK6waRPVYtDQpsM8pPA7IA/3fkgA02RR/Gw= +github.com/cockroachdb/cockroach-go/v2 v2.4.1/go.mod h1:9U179XbCx4qFWtNhc7BiWLPfuyMVQ7qdAhfrwLz1vH0= +github.com/containerd/continuity v0.4.5 h1:ZRoN1sXq9u7V6QoHMcVWGhOwDFqZ4B9i5H6un1Wh0x4= +github.com/containerd/continuity v0.4.5/go.mod h1:/lNJvtJKUQStBzpVQ1+rasXO1LAWtUQssk28EZvJ3nE= +github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI= +github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M= +github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE= +github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY= +github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 h1:NMZiJj8QnKe1LgsbDayM4UoHwbvwDRwnI3hwNaAHRnc= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0/go.mod h1:ZXNYxsqcloTdSy/rNShjYzMhyjf0LaoftYK0p+A3h40= +github.com/dgraph-io/ristretto/v2 v2.2.0 h1:bkY3XzJcXoMuELV8F+vS8kzNgicwQFAaGINAEJdWGOM= +github.com/dgraph-io/ristretto/v2 v2.2.0/go.mod h1:RZrm63UmcBAaYWC1DotLYBmTvgkrs0+XhBd7Npn7/zI= +github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da h1:aIftn67I1fkbMa512G+w+Pxci9hJPB8oMnkcP3iZF38= +github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/cli v28.3.3+incompatible h1:fp9ZHAr1WWPGdIWBM1b3zLtgCF+83gRdVMTJsUeiyAo= +github.com/docker/cli v28.3.3+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/docker v28.3.3+incompatible h1:Dypm25kh4rmk49v1eiVbsAtpAsYURjYkaKubwuBdxEI= +github.com/docker/docker v28.3.3+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.6.0 h1:LlMG9azAe1TqfR7sO+NJttz1gy6KO7VJBh+pMmjSD94= +github.com/docker/go-connections v0.6.0/go.mod h1:AahvXYshr6JgfUJGdDCs2b5EZG/vmaMAntpSFH5BFKE= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/evanphx/json-patch/v5 v5.9.11 h1:/8HVnzMq13/3x9TPvjG08wUGqBTmZBsCWzjTM0wiaDU= +github.com/evanphx/json-patch/v5 v5.9.11/go.mod h1:3j+LviiESTElxA4p3EMKAB9HXj3/XEtnUf6OZxqIQTM= +github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= +github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= +github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= +github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= +github.com/felixge/fgprof v0.9.3/go.mod h1:RdbpDgzqYVh/T9fPELJyV7EYJuHB55UTEULNun8eiPw= +github.com/felixge/fgprof v0.9.5 h1:8+vR6yu2vvSKn08urWyEuxx75NWPEvybbkBirEpsbVY= +github.com/felixge/fgprof v0.9.5/go.mod h1:yKl+ERSa++RYOs32d8K6WEXCB4uXdLls4ZaZPpayhMM= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= +github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= +github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k= +github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= +github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-jose/go-jose/v3 v3.0.4 h1:Wp5HA7bLQcKnf6YYao/4kpRpVMp/yf6+pJKV8WFSaNY= +github.com/go-jose/go-jose/v3 v3.0.4/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-openapi/analysis v0.23.0 h1:aGday7OWupfMs+LbmLZG4k0MYXIANxcuBTYUC03zFCU= +github.com/go-openapi/analysis v0.23.0/go.mod h1:9mz9ZWaSlV8TvjQHLl2mUW2PbZtemkE8yA5v22ohupo= +github.com/go-openapi/errors v0.22.2 h1:rdxhzcBUazEcGccKqbY1Y7NS8FDcMyIRr0934jrYnZg= +github.com/go-openapi/errors v0.22.2/go.mod h1:+n/5UdIqdVnLIJ6Q9Se8HNGUXYaY6CN8ImWzfi/Gzp0= +github.com/go-openapi/jsonpointer v0.21.2 h1:AqQaNADVwq/VnkCmQg6ogE+M3FOsKTytwges0JdwVuA= +github.com/go-openapi/jsonpointer v0.21.2/go.mod h1:50I1STOfbY1ycR8jGz8DaMeLCdXiI6aDteEdRNNzpdk= +github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ= +github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4= +github.com/go-openapi/loads v0.22.0 h1:ECPGd4jX1U6NApCGG1We+uEozOAvXvJSF4nnwHZ8Aco= +github.com/go-openapi/loads v0.22.0/go.mod h1:yLsaTCS92mnSAZX5WWoxszLj0u+Ojl+Zs5Stn1oF+rs= +github.com/go-openapi/runtime v0.28.0 h1:gpPPmWSNGo214l6n8hzdXYhPuJcGtziTOgUpvsFWGIQ= +github.com/go-openapi/runtime v0.28.0/go.mod h1:QN7OzcS+XuYmkQLw05akXk0jRH/eZ3kb18+1KwW9gyc= +github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9ZY= +github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk= +github.com/go-openapi/strfmt v0.23.0 h1:nlUS6BCqcnAk0pyhi9Y+kdDVZdZMHfEKQiS4HaMgO/c= +github.com/go-openapi/strfmt v0.23.0/go.mod h1:NrtIpfKtWIygRkKVsxh7XQMDQW5HKQl6S5ik2elW+K4= +github.com/go-openapi/swag v0.23.1 h1:lpsStH0n2ittzTnbaSloVZLuB5+fvSY/+hnagBjSNZU= +github.com/go-openapi/swag v0.23.1/go.mod h1:STZs8TbRvEQQKUA+JZNAm3EWlgaOBGpyFDqQnDHMef0= +github.com/go-openapi/validate v0.24.0 h1:LdfDKwNbpB6Vn40xhTdNZAnfLECL81w+VX3BumrGD58= +github.com/go-openapi/validate v0.24.0/go.mod h1:iyeX1sEufmv3nPbBdX3ieNviWnOZaJ1+zquzJEf2BAQ= +github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= +github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo= +github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= +github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= +github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= +github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= +github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/gobuffalo/envy v1.10.2 h1:EIi03p9c3yeuRCFPOKcSfajzkLb3hrRjEpHGI8I2Wo4= +github.com/gobuffalo/envy v1.10.2/go.mod h1:qGAGwdvDsaEtPhfBzb3o0SfDea8ByGn9j8bKmVft9z8= +github.com/gobuffalo/fizz v1.14.4 h1:8uume7joF6niTNWN582IQ2jhGTUoa9g1fiV/tIoGdBs= +github.com/gobuffalo/fizz v1.14.4/go.mod h1:9/2fGNXNeIFOXEEgTPJwiK63e44RjG+Nc4hfMm1ArGM= +github.com/gobuffalo/flect v0.3.0/go.mod h1:5pf3aGnsvqvCj50AVni7mJJF8ICxGZ8HomberC3pXLE= +github.com/gobuffalo/flect v1.0.3 h1:xeWBM2nui+qnVvNM4S3foBhCAL2XgPU+a7FdpelbTq4= +github.com/gobuffalo/flect v1.0.3/go.mod h1:A5msMlrHtLqh9umBSnvabjsMrCcCpAyzglnDvkbYKHs= +github.com/gobuffalo/github_flavored_markdown v1.1.3/go.mod h1:IzgO5xS6hqkDmUh91BW/+Qxo/qYnvfzoz3A7uLkg77I= +github.com/gobuffalo/github_flavored_markdown v1.1.4 h1:WacrEGPXUDX+BpU1GM/Y0ADgMzESKNWls9hOTG1MHVs= +github.com/gobuffalo/github_flavored_markdown v1.1.4/go.mod h1:Vl9686qrVVQou4GrHRK/KOG3jCZOKLUqV8MMOAYtlso= +github.com/gobuffalo/helpers v0.6.7/go.mod h1:j0u1iC1VqlCaJEEVkZN8Ia3TEzfj/zoXANqyJExTMTA= +github.com/gobuffalo/helpers v0.6.10 h1:puKDCOrJ0EIq5ScnTRgKyvEZ05xQa+gwRGCpgoh6Ek8= +github.com/gobuffalo/helpers v0.6.10/go.mod h1:r52L6VSnByLJFOmURp1irvzgSakk7RodChi1YbGwk8I= +github.com/gobuffalo/httptest v1.5.2 h1:GpGy520SfY1QEmyPvaqmznTpG4gEQqQ82HtHqyNEreM= +github.com/gobuffalo/httptest v1.5.2/go.mod h1:FA23yjsWLGj92mVV74Qtc8eqluc11VqcWr8/C1vxt4g= +github.com/gobuffalo/nulls v0.4.2 h1:GAqBR29R3oPY+WCC7JL9KKk9erchaNuV6unsOSZGQkw= +github.com/gobuffalo/nulls v0.4.2/go.mod h1:EElw2zmBYafU2R9W4Ii1ByIj177wA/pc0JdjtD0EsH8= +github.com/gobuffalo/plush/v4 v4.1.16/go.mod h1:6t7swVsarJ8qSLw1qyAH/KbrcSTwdun2ASEQkOznakg= +github.com/gobuffalo/plush/v4 v4.1.22 h1:bPQr5PsiTg54UGMsfvnIAvFmUfxzD/ri+wbpu7PlmTM= +github.com/gobuffalo/plush/v4 v4.1.22/go.mod h1:WiKHJx3qBvfaDVlrv8zT7NCd3dEMaVR/fVxW4wqV17M= +github.com/gobuffalo/plush/v5 v5.0.7 h1:nI8sIt5tZAN2tCZHeaXkH7HAvxvvk3sJHG2TtrKeSHM= +github.com/gobuffalo/plush/v5 v5.0.7/go.mod h1:C08u/VEqzzPBXFF/yqs40P/5Cvc/zlZsMzhCxXyWJmU= +github.com/gobuffalo/tags/v3 v3.1.4 h1:X/ydLLPhgXV4h04Hp2xlbI2oc5MDaa7eub6zw8oHjsM= +github.com/gobuffalo/tags/v3 v3.1.4/go.mod h1:ArRNo3ErlHO8BtdA0REaZxijuWnWzF6PUXngmMXd2I0= +github.com/gobuffalo/validate/v3 v3.3.3 h1:o7wkIGSvZBYBd6ChQoLxkz2y1pfmhbI4jNJYh6PuNJ4= +github.com/gobuffalo/validate/v3 v3.3.3/go.mod h1:YC7FsbJ/9hW/VjQdmXPvFqvRis4vrRYFxr69WiNZw6g= +github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= +github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= +github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM= +github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= +github.com/gobwas/ws v1.2.1/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY= +github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= +github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= +github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw= +github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= +github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= +github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= +github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gofrs/uuid v4.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gofrs/uuid v4.4.0+incompatible h1:3qXRTX8/NbyulANqlc0lchS1gqAVxRgsuW1YrTJupqA= +github.com/gofrs/uuid v4.4.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gogo/googleapis v1.4.1 h1:1Yx4Myt7BxzvUr5ldGSbwYiZG6t9wGBZ+8/fX3Wvtq0= +github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= +github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/go-jsonnet v0.21.0 h1:43Bk3K4zMRP/aAZm9Po2uSEjY6ALCkYUVIcz9HLGMvA= +github.com/google/go-jsonnet v0.21.0/go.mod h1:tCGAu8cpUpEZcdGMmdOu37nh8bGgqubhI5v2iSk3KJQ= +github.com/google/pprof v0.0.0-20211214055906-6f57359322fd/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg= +github.com/google/pprof v0.0.0-20240227163752-401108e1b7e7/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= +github.com/google/pprof v0.0.0-20250630185457-6e76a2b096b5 h1:xhMrHhTJ6zxu3gA4enFM9MLn9AY7613teCdFnlUVbSQ= +github.com/google/pprof v0.0.0-20250630185457-6e76a2b096b5/go.mod h1:5hDyRhoBCxViHszMt12TnOpEI4VVi+U8Gm9iphldiMA= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= +github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8= +github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0= +github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ= +github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= +github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI= +github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= +github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= +github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho= +github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1 h1:X5VWvz21y3gzm9Nw/kaUeku/1+uBhcekkmy4IkffJww= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1/go.mod h1:Zanoh4+gvIgluNqcfMVTJueD4wSS5hT7zTt4Mrutd90= +github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= +github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-retryablehttp v0.7.8 h1:ylXZWnqa7Lhqpk0L1P1LzDtGcCR0rPVUrx/c8Unxc48= +github.com/hashicorp/go-retryablehttp v0.7.8/go.mod h1:rjiScheydd+CxvumBsIrFKlx3iS0jrZ7LvzFGFmuKbw= +github.com/huandu/xstrings v1.5.0 h1:2ag3IFq9ZDANvthTwTiqSSZLjDc+BedvHPAp5tJy2TI= +github.com/huandu/xstrings v1.5.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/ianlancetaylor/demangle v0.0.0-20210905161508-09a460cdf81d/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w= +github.com/ianlancetaylor/demangle v0.0.0-20230524184225-eabc099b10ab/go.mod h1:gx7rwoVhcfuVKG5uya9Hs3Sxj7EIvldVofAWIUtGouw= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/inhies/go-bytesize v0.0.0-20220417184213-4913239db9cf h1:FtEj8sfIcaaBfAKrE1Cwb61YDtYq9JxChK1c7AKce7s= +github.com/inhies/go-bytesize v0.0.0-20220417184213-4913239db9cf/go.mod h1:yrqSXGoD/4EKfF26AOGzscPOgTTJcyAwM2rpixWT+t4= +github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= +github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= +github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA= +github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE= +github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s= +github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o= +github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY= +github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= +github.com/jackc/pgconn v1.14.3 h1:bVoTr12EGANZz66nZPkMInAV/KHD2TxH9npjXXgiB3w= +github.com/jackc/pgconn v1.14.3/go.mod h1:RZbme4uasqzybK2RK5c65VsHxoyaml09lx3tXOcO/VM= +github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= +github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= +github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= +github.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd/go.mod h1:hrBW0Enj2AZTNpt/7Y5rr2xe/9Mn757Wtb2xeBzPv2c= +github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65 h1:DadwsjnMwFjfWc9y5Wi/+Zz7xoE5ALHsRQlOctkOiHc= +github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= +github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.3.3 h1:1HLSx5H+tXR9pW3in3zaztoEwQYRC9SQaYUHjTSUOag= +github.com/jackc/pgproto3/v2 v2.3.3/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg= +github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc= +github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= +github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM= +github.com/jackc/pgtype v1.14.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= +github.com/jackc/pgtype v1.14.4 h1:fKuNiCumbKTAIxQwXfB/nsrnkEI6bPJrrSiMKgbJ2j8= +github.com/jackc/pgtype v1.14.4/go.mod h1:aKeozOde08iifGosdJpz9MBZonJOUJxqNpPBcMJTlVA= +github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= +github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= +github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= +github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs= +github.com/jackc/pgx/v4 v4.18.2/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw= +github.com/jackc/pgx/v4 v4.18.3 h1:dE2/TrEsGX3RBprb3qryqSV9Y60iZN1C6i8IrmW9/BA= +github.com/jackc/pgx/v4 v4.18.3/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw= +github.com/jackc/pgx/v5 v5.7.5 h1:JHGfMnQY+IEtGM63d+NGMjoRpysB2JBwDr5fsngwmJs= +github.com/jackc/pgx/v5 v5.7.5/go.mod h1:aruU7o91Tc2q2cFp5h4uP3f6ztExVpyVv88Xl/8Vl8M= +github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.3.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= +github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/jaegertracing/jaeger-idl v0.5.0 h1:zFXR5NL3Utu7MhPg8ZorxtCBjHrL3ReM1VoB65FOFGE= +github.com/jaegertracing/jaeger-idl v0.5.0/go.mod h1:ON90zFo9eoyXrt9F/KN8YeF3zxcnujaisMweFY/rg5k= +github.com/jandelgado/gcov2lcov v1.1.1 h1:CHUNoAglvb34DqmMoZchnzDbA3yjpzT8EoUvVqcAY+s= +github.com/jandelgado/gcov2lcov v1.1.1/go.mod h1:tMVUlMVtS1po2SB8UkADWhOT5Y5Q13XOce2AYU69JuI= +github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= +github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o= +github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY= +github.com/joho/godotenv v1.4.0/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/julienschmidt/httprouter v1.3.1-0.20240130105656-484018016424 h1:KsUAkP+Y6n+542zpxWiQDUvOqfh3n429HYleEvq/V7M= +github.com/julienschmidt/httprouter v1.3.1-0.20240130105656-484018016424/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= +github.com/knadh/koanf/maps v0.1.2 h1:RBfmAW5CnZT+PJ1CVc1QSJKf4Xu9kxfQgYVQSu8hpbo= +github.com/knadh/koanf/maps v0.1.2/go.mod h1:npD/QZY3V6ghQDdcQzl1W4ICNVTkohC8E73eI2xW4yI= +github.com/knadh/koanf/parsers/json v0.1.0 h1:dzSZl5pf5bBcW0Acnu20Djleto19T0CfHcvZ14NJ6fU= +github.com/knadh/koanf/parsers/json v0.1.0/go.mod h1:ll2/MlXcZ2BfXD6YJcjVFzhG9P0TdJ207aIBKQhV2hY= +github.com/knadh/koanf/parsers/toml v0.1.0 h1:S2hLqS4TgWZYj4/7mI5m1CQQcWurxUz6ODgOub/6LCI= +github.com/knadh/koanf/parsers/toml v0.1.0/go.mod h1:yUprhq6eo3GbyVXFFMdbfZSo928ksS+uo0FFqNMnO18= +github.com/knadh/koanf/parsers/yaml v0.1.0 h1:ZZ8/iGfRLvKSaMEECEBPM1HQslrZADk8fP1XFUxVI5w= +github.com/knadh/koanf/parsers/yaml v0.1.0/go.mod h1:cvbUDC7AL23pImuQP0oRw/hPuccrNBS2bps8asS0CwY= +github.com/knadh/koanf/providers/posflag v0.1.0 h1:mKJlLrKPcAP7Ootf4pBZWJ6J+4wHYujwipe7Ie3qW6U= +github.com/knadh/koanf/providers/posflag v0.1.0/go.mod h1:SYg03v/t8ISBNrMBRMlojH8OsKowbkXV7giIbBVgbz0= +github.com/knadh/koanf/providers/rawbytes v0.1.0 h1:dpzgu2KO6uf6oCb4aP05KDmKmAmI51k5pe8RYKQ0qME= +github.com/knadh/koanf/providers/rawbytes v0.1.0/go.mod h1:mMTB1/IcJ/yE++A2iEZbY1MLygX7vttU+C+S/YmPu9c= +github.com/knadh/koanf/v2 v2.2.2 h1:ghbduIkpFui3L587wavneC9e3WIliCgiCgdxYO/wd7A= +github.com/knadh/koanf/v2 v2.2.2/go.mod h1:abWQc0cBXLSF/PSOMCB/SK+T13NXDsPvOksbpi5e/9Q= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/laher/mergefs v0.1.1 h1:nV2bTS57vrmbMxeR6uvJpI8LyGl3QHj4bLBZO3aUV58= +github.com/laher/mergefs v0.1.1/go.mod h1:FSY1hYy94on4Tz60waRMGdO1awwS23BacqJlqf9lJ9Q= +github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs= +github.com/lestrrat-go/backoff/v2 v2.0.8 h1:oNb5E5isby2kiro9AgdHLv5N5tint1AnDVVf2E2un5A= +github.com/lestrrat-go/backoff/v2 v2.0.8/go.mod h1:rHP/q/r9aT27n24JQLa7JhSQZCKBBOiM/uP402WwN8Y= +github.com/lestrrat-go/blackmagic v1.0.4 h1:IwQibdnf8l2KoO+qC3uT4OaTWsW7tuRQXy9TRN9QanA= +github.com/lestrrat-go/blackmagic v1.0.4/go.mod h1:6AWFyKNNj0zEXQYfTMPfZrAXUWUfTIZ5ECEUEJaijtw= +github.com/lestrrat-go/httpcc v1.0.1 h1:ydWCStUeJLkpYyjLDHihupbn2tYmZ7m22BGkcvZZrIE= +github.com/lestrrat-go/httpcc v1.0.1/go.mod h1:qiltp3Mt56+55GPVCbTdM9MlqhvzyuL6W/NMDA8vA5E= +github.com/lestrrat-go/iter v1.0.2 h1:gMXo1q4c2pHmC3dn8LzRhJfP1ceCbgSiT9lUydIzltI= +github.com/lestrrat-go/iter v1.0.2/go.mod h1:Momfcq3AnRlRjI5b5O8/G5/BvpzrhoFTZcn06fEOPt4= +github.com/lestrrat-go/jwx v1.2.31 h1:/OM9oNl/fzyldpv5HKZ9m7bTywa7COUfg8gujd9nJ54= +github.com/lestrrat-go/jwx v1.2.31/go.mod h1:eQJKoRwWcLg4PfD5CFA5gIZGxhPgoPYq9pZISdxLf0c= +github.com/lestrrat-go/option v1.0.0/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I= +github.com/lestrrat-go/option v1.0.1 h1:oAzP2fvZGQKWkvHa1/SAcFolBEca1oN+mQ7eooNBEYU= +github.com/lestrrat-go/option v1.0.1/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I= +github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mailru/easyjson v0.9.0 h1:PrnmzHw7262yW8sTBwxi1PdJA3Iw/EKBa8psRf7d9a4= +github.com/mailru/easyjson v0.9.0/go.mod h1:1+xMtQp2MRNVL/V1bOzuP3aP8VNwRW55fQUto+XFtTU= +github.com/matryer/is v1.4.0 h1:sosSmIWwkYITGrxZ25ULNDeKiMNzFSr4V/eqBQP0PeE= +github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= +github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= +github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= +github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= +github.com/mattn/go-sqlite3 v1.14.15/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= +github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= +github.com/mattn/go-sqlite3 v1.14.32 h1:JD12Ag3oLy1zQA+BNn74xRgaBbdhbNIDYvQUEuuErjs= +github.com/mattn/go-sqlite3 v1.14.32/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= +github.com/microcosm-cc/bluemonday v1.0.20/go.mod h1:yfBmMi8mxvaZut3Yytv+jTXRY8mxyjJ0/kQBTElld50= +github.com/microcosm-cc/bluemonday v1.0.22/go.mod h1:ytNkv4RrDrLJ2pqlsSI46O6IVXmZOBBD4SaJyDwwTkM= +github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk= +github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA= +github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= +github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= +github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw= +github.com/moby/sys/atomicwriter v0.1.0/go.mod h1:Ul8oqv2ZMNHOceF643P6FKPXeCmYtlQMvpizfsSoaWs= +github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU= +github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko= +github.com/moby/sys/user v0.4.0 h1:jhcMKit7SA80hivmFJcbB1vqmw//wU61Zdui2eQXuMs= +github.com/moby/sys/user v0.4.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs= +github.com/moby/term v0.5.2 h1:6qk3FJAFDs6i/q3W/pQ97SX192qKfZgGjCQqfCJkgzQ= +github.com/moby/term v0.5.2/go.mod h1:d3djjFCrjnB+fl8NJux+EJzu0msscUP+f8it8hPkFLc= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/nyaruka/phonenumbers v1.6.5 h1:aBCaUhfpRA7hU6fsXk+p7KF1aNx4nQlq9hGeo2qdFg8= +github.com/nyaruka/phonenumbers v1.6.5/go.mod h1:7gjs+Lchqm49adhAKB5cdcng5ZXgt6x7Jgvi0ZorUtU= +github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= +github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= +github.com/onsi/ginkgo/v2 v2.22.2 h1:/3X8Panh8/WwhU/3Ssa6rCKqPLuAkVY2I0RoyDLySlU= +github.com/onsi/ginkgo/v2 v2.22.2/go.mod h1:oeMosUL+8LtarXBHu/c0bx2D/K9zyQ6uX3cTyztHwsk= +github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8= +github.com/onsi/gomega v1.36.2/go.mod h1:DdwyADRjrc825LhMEkD76cHR5+pUnjhUN8GlHlRPHzY= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= +github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= +github.com/opencontainers/runc v1.3.3 h1:qlmBbbhu+yY0QM7jqfuat7M1H3/iXjju3VkP9lkFQr4= +github.com/opencontainers/runc v1.3.3/go.mod h1:D7rL72gfWxVs9cJ2/AayxB0Hlvn9g0gaF1R7uunumSI= +github.com/openzipkin/zipkin-go v0.4.3 h1:9EGwpqkgnwdEIJ+Od7QVSEIH+ocmm5nPat0G7sjsSdg= +github.com/openzipkin/zipkin-go v0.4.3/go.mod h1:M9wCJZFWCo2RiY+o1eBCEMe0Dp2S5LDHcMZmk3RmK7c= +github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0= +github.com/ory/analytics-go/v5 v5.0.1 h1:LX8T5B9FN8KZXOtxgN+R3I4THRRVB6+28IKgKBpXmAM= +github.com/ory/analytics-go/v5 v5.0.1/go.mod h1:lWCiCjAaJkKfgR/BN5DCLMol8BjKS1x+4jxBxff/FF0= +github.com/ory/dockertest/v3 v3.12.0 h1:3oV9d0sDzlSQfHtIaB5k6ghUCVMVLpAY8hwrqoCyRCw= +github.com/ory/dockertest/v3 v3.12.0/go.mod h1:aKNDTva3cp8dwOWwb9cWuX84aH5akkxXRvO7KCwWVjE= +github.com/ory/herodot v0.10.7 h1:CETBRP4LboLlQCSVTkyQix/a2bVh1rmNhhfxd45khCI= +github.com/ory/herodot v0.10.7/go.mod h1:j6i246U6iX8TStYNKIVQxb2waweQvtOLi+b/9q+OULg= +github.com/ory/jsonschema/v3 v3.0.9-0.20250317235931-280c5fc7bf0e h1:4tUrC7x4YWRVMFp+c64KACNSGchW1zXo4l6Pa9/1hA8= +github.com/ory/jsonschema/v3 v3.0.9-0.20250317235931-280c5fc7bf0e/go.mod h1:XWLxVK4un/iuIcrw+6lCeanbF3NZwO5k6RdLeu/loQk= +github.com/ory/pop/v6 v6.3.2-0.20251203152233-a32233875f7e h1:gsbAteu8HZYnkIF4WVBaxklvF/s5IbcxYcCi6qX93ms= +github.com/ory/pop/v6 v6.3.2-0.20251203152233-a32233875f7e/go.mod h1:PEqjxMcIV87rBhlyDDha76I7/w2W/FHenSq3V3X1A/A= +github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/peterhellberg/link v1.2.0 h1:UA5pg3Gp/E0F2WdX7GERiNrPQrM1K6CVJUUWfHa4t6c= +github.com/peterhellberg/link v1.2.0/go.mod h1:gYfAh+oJgQu2SrZHg5hROVRQe1ICoK0/HHJTcE0edxc= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/profile v1.7.0 h1:hnbDkaNWPCLMO9wGLdBFTIZvzDrDfBM2072E1S9gJkA= +github.com/pkg/profile v1.7.0/go.mod h1:8Uer0jas47ZQMJ7VD+OHknK4YDY07LPUC6dEvqDjvNo= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_golang v1.23.0 h1:ust4zpdl9r4trLY/gSjlm07PuiBq2ynaXXlptpfy8Uc= +github.com/prometheus/client_golang v1.23.0/go.mod h1:i/o0R9ByOnHX0McrTMTyhYvKE4haaf2mW08I+jGAjEE= +github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= +github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= +github.com/prometheus/common v0.65.0 h1:QDwzd+G1twt//Kwj/Ww6E9FQq1iVMmODnILtW1t2VzE= +github.com/prometheus/common v0.65.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8= +github.com/prometheus/procfs v0.17.0 h1:FuLQ+05u4ZI+SS/w9+BWEM2TXiHKsUQ9TADiRH7DuK0= +github.com/prometheus/procfs v0.17.0/go.mod h1:oPQLaDAMRbA+u8H5Pbfq+dl3VDAvHxMUOVhe0wYB2zw= +github.com/rakutentech/jwk-go v1.2.0 h1:vNJwedPkRR+32V5WGNj0JP4COes93BGERvzQLBjLy4c= +github.com/rakutentech/jwk-go v1.2.0/go.mod h1:pI0bYVntqaJ27RCpaC75MTUacheW0Rk4+8XzWWe1OWM= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/rs/cors v1.11.1 h1:eU3gRzXLRK57F5rKMGMZURNdIG4EoAmX8k94r9wXWHA= +github.com/rs/cors v1.11.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= +github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= +github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= +github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/seatgeek/logrus-gelf-formatter v0.0.0-20210414080842-5b05eb8ff761 h1:0b8DF5kR0PhRoRXDiEEdzrgBc8UqVY4JWLkQJCRsLME= +github.com/seatgeek/logrus-gelf-formatter v0.0.0-20210414080842-5b05eb8ff761/go.mod h1:/THDZYi7F/BsVEcYzYPqdcWFQ+1C2InkawTKfLOAnzg= +github.com/segmentio/analytics-go v3.1.0+incompatible/go.mod h1:C7CYBtQWk4vRk2RyLu0qOcbHJ18E3F1HV2C/8JvKN48= +github.com/segmentio/backo-go v0.0.0-20200129164019-23eae7c10bd3/go.mod h1:9/Rh6yILuLysoQnZ2oNooD2g7aBnvM7r/fNVxRNWfBc= +github.com/segmentio/backo-go v1.1.0 h1:cJIfHQUdmLsd8t9IXqf5J8SdrOMn9vMa7cIvOavHAhc= +github.com/segmentio/backo-go v1.1.0/go.mod h1:ckenwdf+v/qbyhVdNPWHnqh2YdJBED1O9cidYyM5J18= +github.com/segmentio/conf v1.2.0/go.mod h1:Y3B9O/PqqWqjyxyWWseyj/quPEtMu1zDp/kVbSWWaB0= +github.com/segmentio/go-snakecase v1.1.0/go.mod h1:jk1miR5MS7Na32PZUykG89Arm+1BUSYhuGR6b7+hJto= +github.com/segmentio/objconv v1.0.1/go.mod h1:auayaH5k3137Cl4SoXTgrzQcuQDmvuVtZgS0fb1Ahys= +github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I= +github.com/sergi/go-diff v1.4.0 h1:n/SP9D5ad1fORl+llWyN+D6qoUETXNZARKjyY2/KVCw= +github.com/sergi/go-diff v1.4.0/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= +github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= +github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= +github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d h1:yKm7XZV6j9Ev6lojP2XaIshpT4ymkqhMeSghO5Ps00E= +github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d/go.mod h1:UdhH50NIW0fCiwBSr0co2m7BnFLdv4fQTgdqdJTHFeE= +github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e h1:qpG93cPwA5f7s/ZPBJnGOYQNK/vKsaDaseuKT5Asee8= +github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e/go.mod h1:HuIsMU8RRBOtsCgI77wP899iHVBQpCmg4ErYMZB+2IA= +github.com/spf13/cast v1.9.2 h1:SsGfm7M8QOFtEzumm7UZrZdLLquNdzFYfIbEXntcFbE= +github.com/spf13/cast v1.9.2/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo= +github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s= +github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0= +github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= +github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/ssoready/hyrumtoken v1.0.0 h1:N/JPJDOuYS7qPSnOvZpPxNVXwtlT3kfzAMEcPrH8ywQ= +github.com/ssoready/hyrumtoken v1.0.0/go.mod h1:h8q768r5Uv6iJKOwsNENIWWUP9kvmLykQox5m3SCpqc= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= +github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= +github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= +github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= +github.com/urfave/negroni v1.0.0 h1:kIimOitoypq34K7TG7DUaJ9kq/N4Ofuwi1sjz0KipXc= +github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= +github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c h1:3lbZUMbMiGUW/LMkfsEABsc5zNT9+b1CvsJx47JzJ8g= +github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c/go.mod h1:UrdRz5enIKZ63MEE3IF9l2/ebyx59GyGgPi+tICQdmM= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= +go.mongodb.org/mongo-driver v1.17.4 h1:jUorfmVzljjr0FLzYQsGP8cgN/qzzxlY9Vh0C9KFXVw= +go.mongodb.org/mongo-driver v1.17.4/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.62.0 h1:wCeciVlAfb5DC8MQl/DlmAv/FVPNpQgFvI/71+hatuc= +go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.62.0/go.mod h1:WfEApdZDMlLUAev/0QQpr8EJ/z0VWDKYZ5tF5RH5T1U= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0 h1:Hf9xI/XLML9ElpiHVDNwvqI0hIFlzV8dgIr35kV1kRU= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0/go.mod h1:NfchwuyNoMcZ5MLHwPrODwUF1HWCXWrL31s8gSAdIKY= +go.opentelemetry.io/contrib/propagators/b3 v1.37.0 h1:0aGKdIuVhy5l4GClAjl72ntkZJhijf2wg1S7b5oLoYA= +go.opentelemetry.io/contrib/propagators/b3 v1.37.0/go.mod h1:nhyrxEJEOQdwR15zXrCKI6+cJK60PXAkJ/jRyfhr2mg= +go.opentelemetry.io/contrib/propagators/jaeger v1.37.0 h1:pW+qDVo0jB0rLsNeaP85xLuz20cvsECUcN7TE+D8YTM= +go.opentelemetry.io/contrib/propagators/jaeger v1.37.0/go.mod h1:x7bd+t034hxLTve1hF9Yn9qQJlO/pP8H5pWIt7+gsFM= +go.opentelemetry.io/contrib/samplers/jaegerremote v0.31.0 h1:l8XCsDh7L6Z7PB+vlw1s4ufNab+ayT2RMNdvDE/UyPc= +go.opentelemetry.io/contrib/samplers/jaegerremote v0.31.0/go.mod h1:XAOSk4bqj5vtoiY08bexeiafzxdXeLlxKFnwscvn8Fc= +go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8= +go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM= +go.opentelemetry.io/otel/exporters/jaeger v1.17.0 h1:D7UpUy2Xc2wsi1Ras6V40q806WM07rqoCWzXu7Sqy+4= +go.opentelemetry.io/otel/exporters/jaeger v1.17.0/go.mod h1:nPCqOnEH9rNLKqH/+rrUjiMzHJdV1BlpKcTwRTyKkKI= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.37.0 h1:Ahq7pZmv87yiyn3jeFz/LekZmPLLdKejuO3NcK9MssM= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.37.0/go.mod h1:MJTqhM0im3mRLw1i8uGHnCvUEeS7VwRyxlLC78PA18M= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.37.0 h1:bDMKF3RUSxshZ5OjOTi8rsHGaPKsAt76FaqgvIUySLc= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.37.0/go.mod h1:dDT67G/IkA46Mr2l9Uj7HsQVwsjASyV9SjGofsiUZDA= +go.opentelemetry.io/otel/exporters/zipkin v1.37.0 h1:Z2apuaRnHEjzDAkpbWNPiksz1R0/FCIrJSjiMA43zwI= +go.opentelemetry.io/otel/exporters/zipkin v1.37.0/go.mod h1:ofGu/7fG+bpmjZoiPUUmYDJ4vXWxMT57HmGoegx49uw= +go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA= +go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI= +go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E= +go.opentelemetry.io/otel/sdk v1.38.0/go.mod h1:ghmNdGlVemJI3+ZB5iDEuk4bWA3GkTpW+DOoZMYBVVg= +go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6qT5wthqPoM= +go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA= +go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE= +go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs= +go.opentelemetry.io/proto/otlp v1.7.1 h1:gTOMpGDb0WTBOP8JaO72iL3auEZhVmAQg4ipjOVAtj4= +go.opentelemetry.io/proto/otlp v1.7.1/go.mod h1:b2rVh6rfI/s2pHWNlB7ILJcRALpcNDzKhACevjI+ZnE= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.uber.org/mock v0.5.2 h1:LbtPTcP8A5k9WPXj54PPPbjcI4Y6lhyOZXn+VS7wNko= +go.uber.org/mock v0.5.2/go.mod h1:wLlUxC2vVTPTaE3UD51E0BGOAElKrILxhVSDYQLld5o= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= +go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= +go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI= +go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU= +go.yaml.in/yaml/v3 v3.0.3 h1:bXOww4E/J3f66rav3pX3m8w6jDE4knZjGOw8b5Y6iNE= +go.yaml.in/yaml/v3 v3.0.3/go.mod h1:tBHosrYAkRZjRAOREWbDnBXUf08JOwYq++0QNwQiWzI= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.20.0/go.mod h1:Xwo95rrVNIoSMx9wa1JroENMToLWn3RNVrTBpLHgZPQ= +golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= +golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= +golang.org/x/exp v0.0.0-20250813145105-42675adae3e6 h1:SbTAbRFnd5kjQXbczszQ0hdk3ctwYf3qBNH9jIsGclE= +golang.org/x/exp v0.0.0-20250813145105-42675adae3e6/go.mod h1:4QTo5u+SEIbbKW1RacMZq1YEfOBqeXa19JeshGi+zc4= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA= +golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= +golang.org/x/net v0.0.0-20221002022538-bcab6841153b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= +golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= +golang.org/x/oauth2 v0.33.0 h1:4Q+qn+E5z8gPRJfmRy7C2gGG3T4jIprK6aSYgTXGRpo= +golang.org/x/oauth2 v0.33.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= +golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8 h1:LvzTn0GQhWuvKH/kVRS3R3bVAsdQWI7hvfLHGgh9+lU= +golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8/go.mod h1:Pi4ztBfryZoJEkyFTI5/Ocsu2jXyDr6iSdgJiYE/uwE= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= +golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= +golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ= +golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs= +golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/genproto/googleapis/api v0.0.0-20250811230008-5f3141c8851a h1:DMCgtIAIQGZqJXMVzJF4MV8BlWoJh2ZuFiRdAleyr58= +google.golang.org/genproto/googleapis/api v0.0.0-20250811230008-5f3141c8851a/go.mod h1:y2yVLIE/CSMCPXaHnSKXxu1spLPnglFLegmgdY23uuE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250811230008-5f3141c8851a h1:tPE/Kp+x9dMSwUm/uM0JKK0IfdiJkwAbSMSeZBXXJXc= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250811230008-5f3141c8851a/go.mod h1:gw1tLEfykwDz2ET4a12jcXt4couGAm7IwsVaTy0Sflo= +google.golang.org/grpc v1.74.2 h1:WoosgB65DlWVC9FqI82dGsZhWFNBSLjQ84bjROOpMu4= +google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM= +google.golang.org/protobuf v1.36.9 h1:w2gp2mA27hUeUzj9Ex9FBjsBm40zfaDtEWow293U7Iw= +google.golang.org/protobuf v1.36.9/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/go-jose/go-jose.v2 v2.6.3 h1:nt80fvSDlhKWQgSWyHyy5CfmlQr+asih51R8PTWNKKs= +gopkg.in/go-jose/go-jose.v2 v2.6.3/go.mod h1:zzZDPkNNw/c9IE7Z9jr11mBZQhKQTMzoEEIoEdZlFBI= +gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE= +gopkg.in/go-playground/mold.v2 v2.2.0/go.mod h1:XMyyRsGtakkDPbxXbrA5VODo6bUXyvoDjLd5l3T0XoA= +gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= +gopkg.in/validator.v2 v2.0.0-20180514200540-135c24b11c19/go.mod h1:o4V0GXN9/CAmCsvJ0oXYZvrZOe7syiDZSN1GWGZTGzc= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU= +gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +sigs.k8s.io/yaml v1.6.0 h1:G8fkbMSAFqgEFgh4b1wmtzDnioxFCUgTZhlbj5P9QYs= +sigs.k8s.io/yaml v1.6.0/go.mod h1:796bPqUfzR/0jLAl6XjHl3Ck7MiyVv8dbTdyT3/pMf4= diff --git a/oryx/hasherx/hash_comparator.go b/oryx/hasherx/hash_comparator.go new file mode 100644 index 00000000000..be2745fe27e --- /dev/null +++ b/oryx/hasherx/hash_comparator.go @@ -0,0 +1,227 @@ +package hasherx + +import ( + "context" + "crypto/subtle" + "encoding/base64" + "fmt" + "math" + "regexp" + "strings" + + "github.com/pkg/errors" + "golang.org/x/crypto/argon2" + "golang.org/x/crypto/bcrypt" + "golang.org/x/crypto/pbkdf2" +) + +var ErrUnknownHashAlgorithm = errors.New("unknown hash algorithm") + +// Compare the given password with the given hash. +func Compare(ctx context.Context, password []byte, hash []byte) error { + switch { + case IsBcryptHash(hash): + return CompareBcrypt(ctx, password, hash) + case IsArgon2idHash(hash): + return CompareArgon2id(ctx, password, hash) + case IsArgon2iHash(hash): + return CompareArgon2i(ctx, password, hash) + case IsPbkdf2Hash(hash): + return ComparePbkdf2(ctx, password, hash) + default: + return errors.WithStack(ErrUnknownHashAlgorithm) + } +} + +func CompareBcrypt(_ context.Context, password []byte, hash []byte) error { + if err := validateBcryptPasswordLength(password); err != nil { + return err + } + + err := bcrypt.CompareHashAndPassword(hash, password) + if err != nil { + return err + } + + return nil +} + +func CompareArgon2id(_ context.Context, password []byte, hash []byte) error { + // Extract the parameters, salt and derived key from the encoded password + // hash. + p, salt, hash, err := decodeArgon2idHash(string(hash)) + if err != nil { + return err + } + + mem := uint64(p.Memory) + if mem > math.MaxUint32 { + return errors.WithStack(ErrInvalidHash) + } + + // Derive the key from the other password using the same parameters. + otherHash := argon2.IDKey(password, salt, p.Iterations, uint32(mem), p.Parallelism, p.KeyLength) + + // Check that the contents of the hashed passwords are identical. Note + // that we are using the subtle.ConstantTimeCompare() function for this + // to help prevent timing attacks. + if subtle.ConstantTimeCompare(hash, otherHash) == 1 { + return nil + } + return errors.WithStack(ErrMismatchedHashAndPassword) +} + +func CompareArgon2i(_ context.Context, password []byte, hash []byte) error { + // Extract the parameters, salt and derived key from the encoded password + // hash. + p, salt, hash, err := decodeArgon2idHash(string(hash)) + if err != nil { + return err + } + + mem := uint64(p.Memory) + if mem > math.MaxUint32 { + return errors.WithStack(ErrInvalidHash) + } + + // Derive the key from the other password using the same parameters. + otherHash := argon2.Key(password, salt, p.Iterations, uint32(mem), p.Parallelism, p.KeyLength) + + // Check that the contents of the hashed passwords are identical. Note + // that we are using the subtle.ConstantTimeCompare() function for this + // to help prevent timing attacks. + if subtle.ConstantTimeCompare(hash, otherHash) == 1 { + return nil + } + return errors.WithStack(ErrMismatchedHashAndPassword) +} + +func ComparePbkdf2(_ context.Context, password []byte, hash []byte) error { + // Extract the parameters, salt and derived key from the encoded password + // hash. + p, salt, hash, err := decodePbkdf2Hash(string(hash)) + if err != nil { + return err + } + + // Derive the key from the other password using the same parameters. + otherHash := pbkdf2.Key(password, salt, int(p.Iterations), int(p.KeyLength), getPseudorandomFunctionForPbkdf2(p.Algorithm)) + + // Check that the contents of the hashed passwords are identical. Note + // that we are using the subtle.ConstantTimeCompare() function for this + // to help prevent timing attacks. + if subtle.ConstantTimeCompare(hash, otherHash) == 1 { + return nil + } + return errors.WithStack(ErrMismatchedHashAndPassword) +} + +var ( + isBcryptHash = regexp.MustCompile(`^\$2[abzy]?\$`) + isArgon2idHash = regexp.MustCompile(`^\$argon2id\$`) + isArgon2iHash = regexp.MustCompile(`^\$argon2i\$`) + isPbkdf2Hash = regexp.MustCompile(`^\$pbkdf2-sha[0-9]{1,3}\$`) +) + +func IsBcryptHash(hash []byte) bool { + return isBcryptHash.Match(hash) +} + +func IsArgon2idHash(hash []byte) bool { + return isArgon2idHash.Match(hash) +} + +func IsArgon2iHash(hash []byte) bool { + return isArgon2iHash.Match(hash) +} + +func IsPbkdf2Hash(hash []byte) bool { + return isPbkdf2Hash.Match(hash) +} + +func decodeArgon2idHash(encodedHash string) (p *Argon2Config, salt, hash []byte, err error) { + parts := strings.Split(encodedHash, "$") + if len(parts) != 6 { + return nil, nil, nil, ErrInvalidHash + } + + var version int + _, err = fmt.Sscanf(parts[2], "v=%d", &version) + if err != nil { + return nil, nil, nil, err + } + if version != argon2.Version { + return nil, nil, nil, ErrIncompatibleVersion + } + + p = new(Argon2Config) + _, err = fmt.Sscanf(parts[3], "m=%d,t=%d,p=%d", &p.Memory, &p.Iterations, &p.Parallelism) + if err != nil { + return nil, nil, nil, err + } + + salt, err = base64.RawStdEncoding.Strict().DecodeString(parts[4]) + if err != nil { + return nil, nil, nil, err + } + saltLength := uint(len(salt)) + if saltLength > math.MaxUint32 { + return nil, nil, nil, ErrInvalidHash + } + p.SaltLength = uint32(saltLength) + + hash, err = base64.RawStdEncoding.Strict().DecodeString(parts[5]) + if err != nil { + return nil, nil, nil, err + } + keyLength := uint(len(hash)) + if keyLength > math.MaxUint32 { + return nil, nil, nil, ErrInvalidHash + } + p.KeyLength = uint32(keyLength) + + return p, salt, hash, nil +} + +// decodePbkdf2Hash decodes PBKDF2 encoded password hash. +// format: $pbkdf2-$i=,l=$$ +func decodePbkdf2Hash(encodedHash string) (p *PBKDF2Config, salt, hash []byte, err error) { + parts := strings.Split(encodedHash, "$") + if len(parts) != 5 { + return nil, nil, nil, ErrInvalidHash + } + + p = new(PBKDF2Config) + digestParts := strings.SplitN(parts[1], "-", 2) + if len(digestParts) != 2 { + return nil, nil, nil, ErrInvalidHash + } + p.Algorithm = digestParts[1] + + _, err = fmt.Sscanf(parts[2], "i=%d,l=%d", &p.Iterations, &p.KeyLength) + if err != nil { + return nil, nil, nil, err + } + + salt, err = base64.RawStdEncoding.Strict().DecodeString(parts[3]) + if err != nil { + return nil, nil, nil, err + } + saltLength := uint(len(salt)) + if saltLength > math.MaxUint32 { + return nil, nil, nil, ErrInvalidHash + } + p.SaltLength = uint32(saltLength) + + hash, err = base64.RawStdEncoding.Strict().DecodeString(parts[4]) + if err != nil { + return nil, nil, nil, err + } + keyLength := uint(len(hash)) + if keyLength > math.MaxUint32 { + return nil, nil, nil, ErrInvalidHash + } + p.KeyLength = uint32(keyLength) + + return p, salt, hash, nil +} diff --git a/oryx/hasherx/hasher.go b/oryx/hasherx/hasher.go new file mode 100644 index 00000000000..c25472451b6 --- /dev/null +++ b/oryx/hasherx/hasher.go @@ -0,0 +1,20 @@ +package hasherx + +import ( + "context" +) + +// Hasher provides methods for generating and comparing password hashes. +type Hasher interface { + // Generate returns a hash derived from the password or an error if the hash method failed. + Generate(ctx context.Context, password []byte) ([]byte, error) + + // Understands returns whether the given hash can be understood by this hasher. + Understands(hash []byte) bool +} + +type HashProvider interface { + Hasher() Hasher +} + +const tracingComponent = "github.com/ory/kratos/hash" diff --git a/oryx/hasherx/hasher_argon2.go b/oryx/hasherx/hasher_argon2.go new file mode 100644 index 00000000000..238ab395d23 --- /dev/null +++ b/oryx/hasherx/hasher_argon2.go @@ -0,0 +1,118 @@ +package hasherx + +import ( + "bytes" + "context" + "crypto/rand" + "encoding/base64" + "fmt" + "math" + "time" + + "github.com/ory/x/otelx" + + "github.com/inhies/go-bytesize" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/codes" + + "github.com/pkg/errors" + "golang.org/x/crypto/argon2" +) + +var ( + ErrInvalidHash = errors.New("the encoded hash is not in the correct format") + ErrIncompatibleVersion = errors.New("incompatible version of argon2") + ErrMismatchedHashAndPassword = errors.New("passwords do not match") +) + +type ( + // Argon2Config is the configuration for a Argon2 hasher. + Argon2Config struct { + // Memory is the amount of memory to use. + Memory bytesize.ByteSize `json:"memory"` + + // Iterations is the number of iterations to use. + Iterations uint32 `json:"iterations"` + + // Parallelism is the number of threads to use. + Parallelism uint8 `json:"parallelism"` + + // SaltLength is the length of the salt to use. + SaltLength uint32 `json:"salt_length"` + + // KeyLength is the length of the key to use. + KeyLength uint32 `json:"key_length"` + + // ExpectedDuration is the expected duration of the hash. + ExpectedDuration time.Duration `json:"expected_duration"` + + // ExpectedDeviation is the expected deviation of the hash. + ExpectedDeviation time.Duration `json:"expected_deviation"` + + // DedicatedMemory is the amount of dedicated memory to use. + DedicatedMemory bytesize.ByteSize `json:"dedicated_memory"` + } + // Argon2 is a hasher that uses the Argon2 algorithm. + Argon2 struct { + c Argon2Configurator + } + // Argon2Configurator is a function that returns the Argon2 configuration. + Argon2Configurator interface { + HasherArgon2Config(ctx context.Context) *Argon2Config + } +) + +func NewHasherArgon2(c Argon2Configurator) *Argon2 { + return &Argon2{c: c} +} + +func toKB(mem bytesize.ByteSize) (uint32, error) { + kb := uint64(mem / bytesize.KB) + if kb > math.MaxUint32 { + return 0, errors.Errorf("memory %v is too large", mem) + } + return uint32(kb), nil +} + +// Generate generates a hash for the given password. +func (h *Argon2) Generate(ctx context.Context, password []byte) (_ []byte, err error) { + ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "hash.Argon2.Generate") + defer otelx.End(span, &err) + p := h.c.HasherArgon2Config(ctx) + span.SetAttributes(attribute.String("argon2.config", fmt.Sprintf("#%v", p))) + + salt := make([]byte, p.SaltLength) + if _, err := rand.Read(salt); err != nil { + return nil, err + } + + mem, err := toKB(p.Memory) + if err != nil { + return nil, err + } + // Pass the plaintext password, salt and parameters to the argon2.IDKey + // function. This will generate a hash of the password using the Argon2id + // variant. + hash := argon2.IDKey(password, salt, p.Iterations, mem, p.Parallelism, p.KeyLength) + + var b bytes.Buffer + if _, err := fmt.Fprintf( + &b, + "$argon2id$v=%d$m=%d,t=%d,p=%d$%s$%s", + argon2.Version, mem, p.Iterations, p.Parallelism, + base64.RawStdEncoding.EncodeToString(salt), + base64.RawStdEncoding.EncodeToString(hash), + ); err != nil { + span.RecordError(err) + span.SetStatus(codes.Error, err.Error()) + return nil, errors.WithStack(err) + } + + return b.Bytes(), nil +} + +// Understands checks if the given hash is in the correct format. +func (h *Argon2) Understands(hash []byte) bool { + return IsArgon2idHash(hash) +} diff --git a/oryx/hasherx/hasher_bcrypt.go b/oryx/hasherx/hasher_bcrypt.go new file mode 100644 index 00000000000..6219617099e --- /dev/null +++ b/oryx/hasherx/hasher_bcrypt.go @@ -0,0 +1,69 @@ +package hasherx + +import ( + "context" + + "github.com/ory/x/otelx" + "github.com/pkg/errors" + + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/attribute" + + "golang.org/x/crypto/bcrypt" +) + +// ErrBcryptPasswordLengthReached is returned when the password is longer than 72 bytes. +var ErrBcryptPasswordLengthReached = errors.Errorf("passwords are limited to a maximum length of 72 characters") + +type ( + // Bcrypt is a hasher that uses the bcrypt algorithm. + Bcrypt struct { + c BCryptConfigurator + } + // BCryptConfig is the configuration for the bcrypt hasher. + BCryptConfig struct { + Cost uint32 `json:"cost"` + } + // BCryptConfigurator is the interface that must be implemented by a configuration provider for the bcrypt hasher. + BCryptConfigurator interface { + HasherBcryptConfig(ctx context.Context) *BCryptConfig + } +) + +func NewHasherBcrypt(c BCryptConfigurator) *Bcrypt { + return &Bcrypt{c: c} +} + +// Generate generates a hash for the given password. +func (h *Bcrypt) Generate(ctx context.Context, password []byte) (hash []byte, err error) { + ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "hash.Bcrypt.Generate") + defer otelx.End(span, &err) + + if err := validateBcryptPasswordLength(password); err != nil { + return nil, err + } + + cost := int(h.c.HasherBcryptConfig(ctx).Cost) + span.SetAttributes(attribute.Int("bcrypt.cost", cost)) + hash, err = bcrypt.GenerateFromPassword(password, cost) + if err != nil { + return nil, err + } + + return hash, nil +} + +func validateBcryptPasswordLength(password []byte) error { + // Bcrypt truncates the password to the first 72 bytes, following the OpenBSD implementation, + // so if password is longer than 72 bytes, function returns an error + // See https://en.wikipedia.org/wiki/Bcrypt#User_input + if len(password) > 72 { + return ErrBcryptPasswordLengthReached + } + return nil +} + +// Understands checks if the given hash is in the correct format. +func (h *Bcrypt) Understands(hash []byte) bool { + return IsBcryptHash(hash) +} diff --git a/oryx/hasherx/hasher_pbkdf2.go b/oryx/hasherx/hasher_pbkdf2.go new file mode 100644 index 00000000000..6897c52e240 --- /dev/null +++ b/oryx/hasherx/hasher_pbkdf2.go @@ -0,0 +1,102 @@ +package hasherx + +import ( + "bytes" + "context" + "crypto/rand" + "crypto/sha1" // #nosec G505 - compatibility for imported passwords + "crypto/sha256" + "crypto/sha512" + "encoding/base64" + "fmt" + "hash" + + "github.com/ory/x/otelx" + "github.com/pkg/errors" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/codes" + "golang.org/x/crypto/pbkdf2" + "golang.org/x/crypto/sha3" +) + +type ( + // PBKDF2 is a PBKDF2 hasher. + PBKDF2 struct { + c PBKDF2Configurator + } + + // PBKDF2Config is the configuration for a PBKDF2 hasher. + PBKDF2Config struct { + // Algorithm can be one of sha1, sha224, sha256, sha384, sha512 + Algorithm string + // Iterations is the number of iterations to use. + Iterations uint32 + // KeyLength is the length of the salt. + SaltLength uint32 + // KeyLength is the length of the key. + KeyLength uint32 + } + + // PBKDF2Configurator is a configurator for a PBKDF2 hasher. + PBKDF2Configurator interface { + HasherPBKDF2Config(ctx context.Context) *PBKDF2Config + } +) + +// NewHasherPBKDF2 creates a new PBKDF2 hasher. +func NewHasherPBKDF2(c PBKDF2Configurator) *PBKDF2 { + return &PBKDF2{c: c} +} + +// Generate generates a hash for the given password. +func (h *PBKDF2) Generate(ctx context.Context, password []byte) (hash []byte, err error) { + ctx, span := otel.GetTracerProvider().Tracer("").Start(ctx, "hash.PBKDF2.Generate") + defer otelx.End(span, &err) + + conf := h.c.HasherPBKDF2Config(ctx) + salt := make([]byte, conf.SaltLength) + if _, err := rand.Read(salt); err != nil { + return nil, err + } + + key := pbkdf2.Key(password, salt, int(conf.Iterations), int(conf.KeyLength), getPseudorandomFunctionForPbkdf2(conf.Algorithm)) + + var b bytes.Buffer + if _, err := fmt.Fprintf( + &b, + "$pbkdf2-%s$i=%d,l=%d$%s$%s", + conf.Algorithm, + conf.Iterations, + conf.KeyLength, + base64.RawStdEncoding.EncodeToString(salt), + base64.RawStdEncoding.EncodeToString(key), + ); err != nil { + span.RecordError(err) + span.SetStatus(codes.Error, err.Error()) + return nil, errors.WithStack(err) + } + + return b.Bytes(), nil +} + +// Understands checks if the given hash is in the correct format. +func (h *PBKDF2) Understands(hash []byte) bool { + return IsPbkdf2Hash(hash) +} + +func getPseudorandomFunctionForPbkdf2(alg string) func() hash.Hash { + switch alg { + case "sha1": + return sha1.New + case "sha224": + return sha3.New224 + case "sha256": + return sha256.New + case "sha384": + return sha3.New384 + case "sha512": + return sha512.New + default: + return sha256.New + } +} diff --git a/oryx/healthx/doc.go b/oryx/healthx/doc.go new file mode 100644 index 00000000000..200b47d32ca --- /dev/null +++ b/oryx/healthx/doc.go @@ -0,0 +1,37 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Package healthx providers helpers for returning health status information via HTTP. +package healthx + +import "strings" + +// The health status of the service. +// +// swagger:model healthStatus +type swaggerHealthStatus struct { + // Status always contains "ok". + Status string `json:"status"` +} + +// The not ready status of the service. +// +// swagger:model healthNotReadyStatus +type swaggerNotReadyStatus struct { + // Errors contains a list of errors that caused the not ready status. + Errors map[string]string `json:"errors"` +} + +func (s swaggerNotReadyStatus) Error() string { + var errs []string + for _, err := range s.Errors { + errs = append(errs, err) + } + return strings.Join(errs, "; ") +} + +// swagger:model version +type swaggerVersion struct { + // Version is the service's version. + Version string `json:"version"` +} diff --git a/oryx/healthx/handler.go b/oryx/healthx/handler.go new file mode 100644 index 00000000000..c679cd172d7 --- /dev/null +++ b/oryx/healthx/handler.go @@ -0,0 +1,225 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package healthx + +import ( + "net/http" + + "github.com/ory/herodot" +) + +const ( + // AliveCheckPath is the path where information about the life state of the instance is provided. + AliveCheckPath = "/health/alive" + // ReadyCheckPath is the path where information about the ready state of the instance is provided. + ReadyCheckPath = "/health/ready" + // VersionPath is the path where information about the software version of the instance is provided. + VersionPath = "/version" +) + +// RoutesToObserve returns a string of all the available routes of this module. +func RoutesToObserve() []string { + return []string{ + AliveCheckPath, + ReadyCheckPath, + VersionPath, + } +} + +// ReadyChecker should return an error if the component is not ready yet. +type ReadyChecker func(r *http.Request) error + +// ReadyCheckers is a map of ReadyCheckers. +type ReadyCheckers map[string]ReadyChecker + +// NoopReadyChecker is always ready. +func NoopReadyChecker() error { + return nil +} + +// Handler handles HTTP requests to health and version endpoints. +type Handler struct { + H herodot.Writer + VersionString string + ReadyChecks ReadyCheckers +} + +type options struct { + middleware func(http.Handler) http.Handler +} + +type Options func(*options) + +// NewHandler instantiates a handler. +func NewHandler( + h herodot.Writer, + version string, + readyChecks ReadyCheckers, +) *Handler { + return &Handler{ + H: h, + VersionString: version, + ReadyChecks: readyChecks, + } +} + +type router interface { + Handler(method, path string, handler http.Handler) +} + +// SetHealthRoutes registers this handler's routes for health checking. +func (h *Handler) SetHealthRoutes(r router, shareErrors bool, opts ...Options) { + o := &options{} + aliveHandler := h.Alive() + readyHandler := h.Ready(shareErrors) + + for _, opt := range opts { + opt(o) + } + + if o.middleware != nil { + aliveHandler = o.middleware(aliveHandler) + readyHandler = o.middleware(readyHandler) + } + + r.Handler("GET", AliveCheckPath, aliveHandler) + r.Handler("GET", ReadyCheckPath, readyHandler) +} + +// SetVersionRoutes registers this handler's routes for health checking. +func (h *Handler) SetVersionRoutes(r router, opts ...Options) { + o := &options{} + versionHandler := h.Version() + + for _, opt := range opts { + opt(o) + } + + if o.middleware != nil { + versionHandler = o.middleware(versionHandler) + } + + r.Handler("GET", VersionPath, versionHandler) +} + +// Alive returns an ok status if the instance is ready to handle HTTP requests. +// +// swagger:route GET /health/alive health isInstanceAlive +// +// # Check alive status +// +// This endpoint returns a 200 status code when the HTTP server is up running. +// This status does currently not include checks whether the database connection is working. +// +// If the service supports TLS Edge Termination, this endpoint does not require the +// `X-Forwarded-Proto` header to be set. +// +// Be aware that if you are running multiple nodes of this service, the health status will never +// refer to the cluster state, only to a single instance. +// +// Produces: +// - application/json +// - text/plain +// +// Responses: +// 200: healthStatus +// default: unexpectedError +func (h *Handler) Alive() http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + h.H.Write(rw, r, &swaggerHealthStatus{ + Status: "ok", + }) + }) +} + +// swagger:model unexpectedError +// +//nolint:deadcode,unused +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions +type unexpectedError string + +// Ready returns an ok status if the instance is ready to handle HTTP requests and all ReadyCheckers are ok. +// +// swagger:route GET /health/ready health isInstanceReady +// +// # Check readiness status +// +// This endpoint returns a 200 status code when the HTTP server is up running and the environment dependencies (e.g. +// the database) are responsive as well. +// +// If the service supports TLS Edge Termination, this endpoint does not require the +// `X-Forwarded-Proto` header to be set. +// +// Be aware that if you are running multiple nodes of this service, the health status will never +// refer to the cluster state, only to a single instance. +// +// Produces: +// - application/json +// - text/plain +// +// Responses: +// 200: healthStatus +// 503: healthNotReadyStatus +// default: unexpectedError +func (h *Handler) Ready(shareErrors bool) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + var notReady = swaggerNotReadyStatus{ + Errors: map[string]string{}, + } + + for n, c := range h.ReadyChecks { + if err := c(r); err != nil { + if shareErrors { + notReady.Errors[n] = err.Error() + } else { + notReady.Errors[n] = "error may contain sensitive information and was obfuscated" + } + } + } + + if len(notReady.Errors) > 0 { + h.H.WriteErrorCode(rw, r, http.StatusServiceUnavailable, ¬Ready) + return + } + + h.H.Write(rw, r, &swaggerHealthStatus{ + Status: "ok", + }) + }) +} + +// Version returns this service's versions. +// +// swagger:route GET /version version getVersion +// +// # Get service version +// +// This endpoint returns the service version typically notated using semantic versioning. +// +// If the service supports TLS Edge Termination, this endpoint does not require the +// `X-Forwarded-Proto` header to be set. +// +// Be aware that if you are running multiple nodes of this service, the health status will never +// refer to the cluster state, only to a single instance. +// +// Produces: +// - application/json +// +// Responses: +// 200: version +func (h *Handler) Version() http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + h.H.Write(rw, r, &swaggerVersion{ + Version: h.VersionString, + }) + }) +} + +// WithMiddleware accepts a http.Handler to be run on the +// route handlers +func WithMiddleware(h func(http.Handler) http.Handler) Options { + return func(o *options) { + o.middleware = h + } +} diff --git a/oryx/healthx/openapi/patch.yaml b/oryx/healthx/openapi/patch.yaml new file mode 100644 index 00000000000..2d4baef3f14 --- /dev/null +++ b/oryx/healthx/openapi/patch.yaml @@ -0,0 +1,112 @@ +- op: replace + path: /paths/~1health~1alive + value: + get: + description: |- + This endpoint returns a HTTP 200 status code when {{.ProjectHumanName}} is accepting incoming + HTTP requests. This status does currently not include checks whether the database connection is working. + + If the service supports TLS Edge Termination, this endpoint does not require the + `X-Forwarded-Proto` header to be set. + + Be aware that if you are running multiple nodes of this service, the health status will never + refer to the cluster state, only to a single instance. + operationId: isAlive + responses: + '200': + content: + application/json: + schema: + required: + - status + type: object + properties: + status: + description: Always "ok". + type: string + description: '{{.ProjectHumanName}} is ready to accept connections.' + default: + content: + text/plain: + schema: + type: string + description: Unexpected error + summary: Check HTTP Server Status + tags: {{ .HealthPathTags | toJson }} +- op: replace + path: /paths/~1health~1ready + value: + get: + operationId: isReady + description: |- + This endpoint returns a HTTP 200 status code when {{.ProjectHumanName}} is up running and the environment dependencies (e.g. + the database) are responsive as well. + + If the service supports TLS Edge Termination, this endpoint does not require the + `X-Forwarded-Proto` header to be set. + + Be aware that if you are running multiple nodes of {{.ProjectHumanName}}, the health status will never + refer to the cluster state, only to a single instance. + responses: + '200': + content: + application/json: + schema: + required: + - status + type: object + properties: + status: + description: Always "ok". + type: string + description: '{{.ProjectHumanName}} is ready to accept requests.' + '503': + content: + application/json: + schema: + required: + - errors + properties: + errors: + additionalProperties: + type: string + description: Errors contains a list of errors that caused the not ready status. + type: object + type: object + description: Ory Kratos is not yet ready to accept requests. + default: + content: + text/plain: + schema: + type: string + description: Unexpected error + summary: Check HTTP Server and Database Status + tags: {{ .HealthPathTags | toJson }} +- op: replace + path: /paths/~1version + value: + get: + description: |- + This endpoint returns the version of {{.ProjectHumanName}}. + + If the service supports TLS Edge Termination, this endpoint does not require the + `X-Forwarded-Proto` header to be set. + + Be aware that if you are running multiple nodes of this service, the version will never + refer to the cluster state, only to a single instance. + operationId: getVersion + responses: + '200': + content: + application/json: + schema: + type: object + required: + - version + properties: + version: + description: The version of {{.ProjectHumanName}}. + type: string + description: Returns the {{.ProjectHumanName}} version. + summary: Return Running Software Version. + tags: {{ .HealthPathTags | toJson }} diff --git a/oryx/httprouterx/router.go b/oryx/httprouterx/router.go new file mode 100644 index 00000000000..65de7bba425 --- /dev/null +++ b/oryx/httprouterx/router.go @@ -0,0 +1,115 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package httprouterx + +import ( + "net/http" + "path" + "strings" + + "github.com/ory/x/prometheusx" +) + +const AdminPrefix = "/admin" + +type ( + router struct { + Mux *http.ServeMux + prefix string + metricsManager *prometheusx.MetricsManager + } + RouterAdmin struct{ router } + RouterPublic struct{ router } +) + +// NewRouterAdmin creates a new admin router. +func NewRouterAdmin(metricsManager *prometheusx.MetricsManager) *RouterAdmin { + return &RouterAdmin{router: router{ + Mux: http.NewServeMux(), + metricsManager: metricsManager, + }} +} + +func (r *RouterAdmin) ToPublic() *RouterPublic { + return &RouterPublic{router: router{ + Mux: r.Mux, + metricsManager: r.metricsManager, + }} +} + +// NewRouterPublic returns a public router. +func NewRouterPublic(metricsManager *prometheusx.MetricsManager) *RouterPublic { + return &RouterPublic{router: router{ + Mux: http.NewServeMux(), + metricsManager: metricsManager, + }} +} + +// NewRouterAdminWithPrefix creates a new router with the admin prefix. +func NewRouterAdminWithPrefix(metricsHandler *prometheusx.MetricsManager) *RouterAdmin { + r := NewRouterAdmin(metricsHandler) + r.prefix = AdminPrefix + return r +} + +func (r *router) GET(route string, handle http.HandlerFunc) { + r.handle(http.MethodGet, route, handle) +} + +func (r *router) HEAD(route string, handle http.HandlerFunc) { + r.handle(http.MethodHead, route, handle) +} + +func (r *router) POST(route string, handle http.HandlerFunc) { + r.handle(http.MethodPost, route, handle) +} + +func (r *router) PUT(route string, handle http.HandlerFunc) { + r.handle(http.MethodPut, route, handle) +} + +func (r *router) PATCH(route string, handle http.HandlerFunc) { + r.handle(http.MethodPatch, route, handle) +} + +func (r *router) DELETE(route string, handle http.HandlerFunc) { + r.handle(http.MethodDelete, route, handle) +} + +func (r *router) Handler(method, route string, handler http.Handler) { + r.handle(method, route, handler) +} + +func (r *router) handle(method string, route string, handler http.Handler) { + r.Mux.HandleFunc(method+" "+path.Join(r.prefix, route), func(w http.ResponseWriter, req *http.Request) { + // In order the get the right metrics for the right path, `req.Pattern` must have been filled by the http router. + // This is the case at this point, but not before e.g. when the prometheus middleware runs as a negroni middleware: + // the http router has not run yet and `req.Pattern` is empty. + r.metricsManager.ServeHTTP(w, req, handler.ServeHTTP) + }) +} + +func (r *router) ServeHTTP(w http.ResponseWriter, req *http.Request) { r.Mux.ServeHTTP(w, req) } + +func TrimTrailingSlashNegroni(rw http.ResponseWriter, r *http.Request, next http.HandlerFunc) { + r.URL.Path = strings.TrimSuffix(r.URL.Path, "/") + + next(rw, r) +} + +func NoCacheNegroni(rw http.ResponseWriter, r *http.Request, next http.HandlerFunc) { + if r.Method == "GET" { + rw.Header().Set("Cache-Control", "private, no-cache, no-store, must-revalidate") + } + + next(rw, r) +} + +func AddAdminPrefixIfNotPresentNegroni(rw http.ResponseWriter, r *http.Request, next http.HandlerFunc) { + if !strings.HasPrefix(r.URL.Path, AdminPrefix) { + r.URL.Path = path.Join(AdminPrefix, r.URL.Path) + } + + next(rw, r) +} diff --git a/oryx/httpx/assert.go b/oryx/httpx/assert.go new file mode 100644 index 00000000000..c913267077a --- /dev/null +++ b/oryx/httpx/assert.go @@ -0,0 +1,24 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package httpx + +import ( + "net/http" +) + +func GetResponseMeta(w http.ResponseWriter) (status, size int) { + switch t := w.(type) { + case interface{ Status() int }: + status = t.Status() + } + + switch t := w.(type) { + case interface{ Size() int }: + size = t.Size() + case interface{ Written() int64 }: + size = int(t.Written()) + } + + return +} diff --git a/oryx/httpx/chan_handler.go b/oryx/httpx/chan_handler.go new file mode 100644 index 00000000000..42b9a20f37b --- /dev/null +++ b/oryx/httpx/chan_handler.go @@ -0,0 +1,21 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package httpx + +import "net/http" + +type chanHandler <-chan http.HandlerFunc + +var _ http.Handler = chanHandler(nil) + +func (c chanHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + (<-c)(w, r) +} + +// NewChanHandler returns a new handler and corresponding channel for sending handler funcs. +// Useful for testing. The argument buf specifies the channel capacity, so pass 0 for a sync handler. +func NewChanHandler(buf int) (http.Handler, chan<- http.HandlerFunc) { + c := make(chan http.HandlerFunc, buf) + return chanHandler(c), c +} diff --git a/oryx/httpx/client_info.go b/oryx/httpx/client_info.go new file mode 100644 index 00000000000..1f966e485a5 --- /dev/null +++ b/oryx/httpx/client_info.go @@ -0,0 +1,72 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package httpx + +import ( + "net" + "net/http" + "strconv" + "strings" +) + +type GeoLocation struct { + City string + Region string + Country string + Latitude *float64 + Longitude *float64 +} + +func GetClientIPAddressesWithoutInternalIPs(ipAddresses []string) (string, error) { + var res string + + for i := len(ipAddresses) - 1; i >= 0; i-- { + ip := strings.TrimSpace(ipAddresses[i]) + + if !net.ParseIP(ip).IsPrivate() { + res = ip + break + } + } + + return res, nil +} + +func ClientIP(r *http.Request) string { + if trueClientIP := r.Header.Get("True-Client-IP"); trueClientIP != "" { + return trueClientIP + } else if cfConnectingIP := r.Header.Get("Cf-Connecting-IP"); cfConnectingIP != "" { + return cfConnectingIP + } else if realClientIP := r.Header.Get("X-Real-IP"); realClientIP != "" { + return realClientIP + } else if forwardedIP := r.Header.Get("X-Forwarded-For"); forwardedIP != "" { + ip, _ := GetClientIPAddressesWithoutInternalIPs(strings.Split(forwardedIP, ",")) + return ip + } else { + return r.RemoteAddr + } +} + +func parseFloatHeaderValue(headerValue string) *float64 { + if headerValue == "" { + return nil + } + + val, err := strconv.ParseFloat(headerValue, 64) + if err != nil { + return nil + } + + return &val +} + +func ClientGeoLocation(r *http.Request) *GeoLocation { + return &GeoLocation{ + City: r.Header.Get("Cf-Ipcity"), + Region: r.Header.Get("Cf-Region-Code"), + Country: r.Header.Get("Cf-Ipcountry"), + Longitude: parseFloatHeaderValue(r.Header.Get("Cf-Iplongitude")), + Latitude: parseFloatHeaderValue(r.Header.Get("Cf-Iplatitude")), + } +} diff --git a/oryx/httpx/content_type.go b/oryx/httpx/content_type.go new file mode 100644 index 00000000000..6c01c9f4648 --- /dev/null +++ b/oryx/httpx/content_type.go @@ -0,0 +1,28 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package httpx + +import ( + "mime" + "net/http" + "slices" + "strings" +) + +// HasContentType determines whether the request `content-type` includes a +// server-acceptable mime-type +// +// Failure should yield an HTTP 415 (`http.StatusUnsupportedMediaType`) +func HasContentType(r *http.Request, mimetypes ...string) bool { + contentType := r.Header.Get("Content-Type") + if contentType == "" { + return slices.Contains(mimetypes, "application/octet-stream") + } + + mediaType, _, err := mime.ParseMediaType(strings.TrimSpace(contentType)) + if err != nil { + return false + } + return slices.Contains(mimetypes, mediaType) +} diff --git a/oryx/httpx/gzip_server.go b/oryx/httpx/gzip_server.go new file mode 100644 index 00000000000..1ee6603c30d --- /dev/null +++ b/oryx/httpx/gzip_server.go @@ -0,0 +1,50 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package httpx + +import ( + "compress/gzip" + "fmt" + "io" + "net/http" + "strings" +) + +type CompressionRequestReader struct { + ErrHandler func(w http.ResponseWriter, r *http.Request, err error) +} + +func defaultCompressionErrorHandler(w http.ResponseWriter, r *http.Request, err error) { + http.Error(w, err.Error(), http.StatusBadRequest) +} + +func NewCompressionRequestReader(eh func(w http.ResponseWriter, r *http.Request, err error)) *CompressionRequestReader { + if eh == nil { + eh = defaultCompressionErrorHandler + } + + return &CompressionRequestReader{ + ErrHandler: eh, + } +} + +func (c *CompressionRequestReader) ServeHTTP(w http.ResponseWriter, r *http.Request, next http.HandlerFunc) { + for _, enc := range strings.Split(r.Header.Get("Content-Encoding"), ",") { + switch enc = strings.TrimSpace(enc); enc { + case "gzip": + reader, err := gzip.NewReader(r.Body) + if err != nil { + c.ErrHandler(w, r, err) + return + } + r.Body = io.NopCloser(reader) + case "identity", "": + // nothing to do + default: + c.ErrHandler(w, r, fmt.Errorf("%s content encoding not supported", enc)) + } + } + + next(w, r) +} diff --git a/oryx/httpx/private_ip_validator.go b/oryx/httpx/private_ip_validator.go new file mode 100644 index 00000000000..f644d4c4886 --- /dev/null +++ b/oryx/httpx/private_ip_validator.go @@ -0,0 +1,94 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package httpx + +import ( + "fmt" + "net" + "net/netip" + "net/url" + + "code.dny.dev/ssrf" + "github.com/pkg/errors" +) + +// ErrPrivateIPAddressDisallowed is returned when a private IP address is disallowed. +type ErrPrivateIPAddressDisallowed error + +// DisallowPrivateIPAddressesWhenSet is a wrapper for DisallowIPPrivateAddresses which returns valid +// when ipOrHostnameOrURL is empty. +func DisallowPrivateIPAddressesWhenSet(ipOrHostnameOrURL string) error { + if ipOrHostnameOrURL == "" { + return nil + } + return DisallowIPPrivateAddresses(ipOrHostnameOrURL) +} + +// DisallowIPPrivateAddresses returns nil for a domain (with NS lookup), IP, or IPv6 address if it +// does not resolve to a private IP subnet. This is a first level of defense against +// SSRF attacks by disallowing any domain or IP to resolve to a private network range. +// +// Please keep in mind that validations for domains is valid only when looking up. +// A malicious actor could easily update the DSN record post validation to point +// to an internal IP +func DisallowIPPrivateAddresses(ipOrHostnameOrURL string) error { + lookup := func(hostname string) ([]net.IP, error) { + lookup, err := net.LookupIP(hostname) + if err != nil { + if dnsErr := new(net.DNSError); errors.As(err, &dnsErr) && (dnsErr.IsNotFound || dnsErr.IsTemporary) { + // If the hostname does not resolve, we can't validate it. So yeah, + // I guess we're allowing it. + return nil, nil + } + return nil, errors.WithStack(err) + } + return lookup, nil + } + + var ips []net.IP + ip := net.ParseIP(ipOrHostnameOrURL) + if ip == nil { + if result, err := lookup(ipOrHostnameOrURL); err != nil { + return err + } else if result != nil { + ips = append(ips, result...) + } + + if parsed, err := url.Parse(ipOrHostnameOrURL); err == nil { + if result, err := lookup(parsed.Hostname()); err != nil { + return err + } else if result != nil { + ips = append(ips, result...) + } + } + } else { + ips = append(ips, ip) + } + + for _, ip := range ips { + ip, err := netip.ParseAddr(ip.String()) + if err != nil { + return ErrPrivateIPAddressDisallowed(errors.WithStack(err)) // should be unreacheable + } + + if ip.Is4() { + for _, deny := range ssrf.IPv4DeniedPrefixes { + if deny.Contains(ip) { + return ErrPrivateIPAddressDisallowed(fmt.Errorf("%s is not a public IP address", ip)) + } + } + } else { + if !ssrf.IPv6GlobalUnicast.Contains(ip) { + return ErrPrivateIPAddressDisallowed(fmt.Errorf("%s is not a public IP address", ip)) + } + for _, net := range ssrf.IPv6DeniedPrefixes { + if net.Contains(ip) { + return ErrPrivateIPAddressDisallowed(fmt.Errorf("%s is not a public IP address", ip)) + } + } + } + } + + return nil +} diff --git a/oryx/httpx/request.go b/oryx/httpx/request.go new file mode 100644 index 00000000000..b18d1a2e81e --- /dev/null +++ b/oryx/httpx/request.go @@ -0,0 +1,51 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package httpx + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "net/url" + "strings" + + "github.com/pkg/errors" +) + +// NewRequestJSON returns a new JSON *http.Request. +func NewRequestJSON(method, url string, data interface{}) (*http.Request, error) { + var b bytes.Buffer + if err := json.NewEncoder(&b).Encode(data); err != nil { + return nil, errors.WithStack(err) + } + req, err := http.NewRequest(method, url, &b) + if err != nil { + return nil, errors.WithStack(err) + } + req.Header.Set("Content-Type", "application/json") + return req, nil +} + +// NewRequestForm returns a new POST Form *http.Request. +func NewRequestForm(method, url string, data url.Values) (*http.Request, error) { + req, err := http.NewRequest(method, url, strings.NewReader(data.Encode())) + if err != nil { + return nil, errors.WithStack(err) + } + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + return req, nil +} + +// MustNewRequest returns a new *http.Request or fatals. +func MustNewRequest(method, url string, body io.Reader, contentType string) *http.Request { + req, err := http.NewRequest(method, url, body) + if err != nil { + panic(err) + } + if contentType != "" { + req.Header.Set("Content-Type", contentType) + } + return req +} diff --git a/oryx/httpx/resilient_client.go b/oryx/httpx/resilient_client.go new file mode 100644 index 00000000000..cac80d74db7 --- /dev/null +++ b/oryx/httpx/resilient_client.go @@ -0,0 +1,138 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package httpx + +import ( + "context" + "io" + "log" + "net/http" + "time" + + "golang.org/x/oauth2" + + "github.com/hashicorp/go-retryablehttp" + + "github.com/ory/x/logrusx" +) + +type resilientOptions struct { + c *http.Client + l interface{} + retryWaitMin time.Duration + retryWaitMax time.Duration + retryMax int + noInternalIPs bool + internalIPExceptions []string +} + +func newResilientOptions() *resilientOptions { + connTimeout := time.Minute + return &resilientOptions{ + c: &http.Client{Timeout: connTimeout}, + retryWaitMin: 1 * time.Second, + retryWaitMax: 30 * time.Second, + retryMax: 4, + l: log.New(io.Discard, "", log.LstdFlags), + } +} + +// ResilientOptions is a set of options for the ResilientClient. +type ResilientOptions func(o *resilientOptions) + +// ResilientClientWithMaxRetry sets the maximum number of retries. +func ResilientClientWithMaxRetry(retryMax int) ResilientOptions { + return func(o *resilientOptions) { + o.retryMax = retryMax + } +} + +// ResilientClientWithMinxRetryWait sets the minimum wait time between retries. +func ResilientClientWithMinxRetryWait(retryWaitMin time.Duration) ResilientOptions { + return func(o *resilientOptions) { + o.retryWaitMin = retryWaitMin + } +} + +// ResilientClientWithMaxRetryWait sets the maximum wait time for a retry. +func ResilientClientWithMaxRetryWait(retryWaitMax time.Duration) ResilientOptions { + return func(o *resilientOptions) { + o.retryWaitMax = retryWaitMax + } +} + +// ResilientClientWithConnectionTimeout sets the connection timeout for the client. +func ResilientClientWithConnectionTimeout(connTimeout time.Duration) ResilientOptions { + return func(o *resilientOptions) { + o.c.Timeout = connTimeout + } +} + +// ResilientClientWithLogger sets the logger to be used by the client. +func ResilientClientWithLogger(l *logrusx.Logger) ResilientOptions { + return func(o *resilientOptions) { + o.l = l + } +} + +// ResilientClientDisallowInternalIPs disallows internal IPs from being used. +func ResilientClientDisallowInternalIPs() ResilientOptions { + return func(o *resilientOptions) { + o.noInternalIPs = true + } +} + +// ResilientClientAllowInternalIPRequestsTo allows requests to the glob-matching URLs even +// if they are internal IPs. +func ResilientClientAllowInternalIPRequestsTo(urlGlobs ...string) ResilientOptions { + return func(o *resilientOptions) { + o.internalIPExceptions = urlGlobs + } +} + +// NewResilientClient creates a new ResilientClient. +func NewResilientClient(opts ...ResilientOptions) *retryablehttp.Client { + o := newResilientOptions() + for _, f := range opts { + f(o) + } + + if o.noInternalIPs { + o.c.Transport = &noInternalIPRoundTripper{ + onWhitelist: allowInternalAllowIPv6, + notOnWhitelist: prohibitInternalAllowIPv6, + internalIPExceptions: o.internalIPExceptions, + } + } else { + o.c.Transport = allowInternalAllowIPv6 + } + + cl := retryablehttp.NewClient() + cl.HTTPClient = o.c + cl.Logger = o.l + cl.RetryWaitMin = o.retryWaitMin + cl.RetryWaitMax = o.retryWaitMax + cl.RetryMax = o.retryMax + cl.CheckRetry = retryablehttp.DefaultRetryPolicy + cl.Backoff = retryablehttp.DefaultBackoff + return cl +} + +// SetOAuth2 modifies the given client to enable OAuth2 authentication. Requests +// with the client should always use the returned context. +// +// client := http.NewResilientClient(opts...) +// ctx, client = httpx.SetOAuth2(ctx, client, oauth2Config, oauth2Token) +// req, err := retryablehttp.NewRequestWithContext(ctx, ...) +// if err != nil { /* ... */ } +// res, err := client.Do(req) +func SetOAuth2(ctx context.Context, cl *retryablehttp.Client, c OAuth2Config, t *oauth2.Token) (context.Context, *retryablehttp.Client) { + ctx = context.WithValue(ctx, oauth2.HTTPClient, cl.HTTPClient) + cl.HTTPClient = c.Client(ctx, t) + return ctx, cl +} + +type OAuth2Config interface { + Client(context.Context, *oauth2.Token) *http.Client +} diff --git a/oryx/httpx/ssrf.go b/oryx/httpx/ssrf.go new file mode 100644 index 00000000000..a217c7dd3c9 --- /dev/null +++ b/oryx/httpx/ssrf.go @@ -0,0 +1,145 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package httpx + +import ( + "context" + "net" + "net/http" + "net/http/httptrace" + "net/netip" + "time" + + "code.dny.dev/ssrf" + "github.com/gobwas/glob" + "go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace" + "go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp" +) + +var _ http.RoundTripper = (*noInternalIPRoundTripper)(nil) + +type noInternalIPRoundTripper struct { + onWhitelist, notOnWhitelist http.RoundTripper + internalIPExceptions []string +} + +// NewNoInternalIPRoundTripper creates a RoundTripper that disallows +// non-publicly routable IP addresses, except for URLs matching the given +// exception globs. +// Deprecated: Use ResilientClientDisallowInternalIPs instead. +func NewNoInternalIPRoundTripper(exceptions []string) http.RoundTripper { + return &noInternalIPRoundTripper{ + onWhitelist: allowInternalAllowIPv6, + notOnWhitelist: prohibitInternalAllowIPv6, + internalIPExceptions: exceptions, + } +} + +// RoundTrip implements http.RoundTripper. +func (n noInternalIPRoundTripper) RoundTrip(request *http.Request) (*http.Response, error) { + incoming := IncomingRequestURL(request) + incoming.RawQuery = "" + incoming.RawFragment = "" + for _, exception := range n.internalIPExceptions { + compiled, err := glob.Compile(exception, '.', '/') + if err != nil { + return nil, err + } + if compiled.Match(incoming.String()) { + return n.onWhitelist.RoundTrip(request) + } + } + + return n.notOnWhitelist.RoundTrip(request) +} + +var ( + prohibitInternalAllowIPv6 http.RoundTripper + allowInternalAllowIPv6 http.RoundTripper +) + +func init() { + t, d := newDefaultTransport() + d.Control = ssrf.New( + ssrf.WithAnyPort(), + ssrf.WithNetworks("tcp4", "tcp6"), + ).Safe + prohibitInternalAllowIPv6 = OTELTraceTransport(t) +} + +func init() { + t, d := newDefaultTransport() + d.Control = ssrf.New( + ssrf.WithAnyPort(), + ssrf.WithNetworks("tcp4"), + ).Safe + t.DialContext = func(ctx context.Context, network, addr string) (net.Conn, error) { + return d.DialContext(ctx, "tcp4", addr) + } +} + +func init() { + t, d := newDefaultTransport() + d.Control = ssrf.New( + ssrf.WithAnyPort(), + ssrf.WithNetworks("tcp4", "tcp6"), + ssrf.WithAllowedV4Prefixes( + netip.MustParsePrefix("10.0.0.0/8"), // Private-Use (RFC 1918) + netip.MustParsePrefix("127.0.0.0/8"), // Loopback (RFC 1122, Section 3.2.1.3)) + netip.MustParsePrefix("169.254.0.0/16"), // Link Local (RFC 3927) + netip.MustParsePrefix("172.16.0.0/12"), // Private-Use (RFC 1918) + netip.MustParsePrefix("192.168.0.0/16"), // Private-Use (RFC 1918) + ), + ssrf.WithAllowedV6Prefixes( + netip.MustParsePrefix("::1/128"), // Loopback (RFC 4193) + netip.MustParsePrefix("fc00::/7"), // Unique Local (RFC 4193) + ), + ).Safe + allowInternalAllowIPv6 = OTELTraceTransport(t) +} + +func init() { + t, d := newDefaultTransport() + d.Control = ssrf.New( + ssrf.WithAnyPort(), + ssrf.WithNetworks("tcp4"), + ssrf.WithAllowedV4Prefixes( + netip.MustParsePrefix("10.0.0.0/8"), // Private-Use (RFC 1918) + netip.MustParsePrefix("127.0.0.0/8"), // Loopback (RFC 1122, Section 3.2.1.3)) + netip.MustParsePrefix("169.254.0.0/16"), // Link Local (RFC 3927) + netip.MustParsePrefix("172.16.0.0/12"), // Private-Use (RFC 1918) + netip.MustParsePrefix("192.168.0.0/16"), // Private-Use (RFC 1918) + ), + ssrf.WithAllowedV6Prefixes( + netip.MustParsePrefix("::1/128"), // Loopback (RFC 4193) + netip.MustParsePrefix("fc00::/7"), // Unique Local (RFC 4193) + ), + ).Safe + t.DialContext = func(ctx context.Context, network, addr string) (net.Conn, error) { + return d.DialContext(ctx, "tcp4", addr) + } +} + +func newDefaultTransport() (*http.Transport, *net.Dialer) { + dialer := net.Dialer{ + Timeout: 30 * time.Second, + KeepAlive: 30 * time.Second, + } + return &http.Transport{ + Proxy: http.ProxyFromEnvironment, + DialContext: dialer.DialContext, + ForceAttemptHTTP2: true, + MaxIdleConns: 100, + IdleConnTimeout: 90 * time.Second, + TLSHandshakeTimeout: 10 * time.Second, + ExpectContinueTimeout: 1 * time.Second, + }, &dialer +} + +// OTELTraceTransport wraps the given http.Transport with OpenTelemetry instrumentation. +func OTELTraceTransport(t *http.Transport) http.RoundTripper { + return otelhttp.NewTransport(t, otelhttp.WithClientTrace(func(ctx context.Context) *httptrace.ClientTrace { + return otelhttptrace.NewClientTrace(ctx, otelhttptrace.WithoutHeaders(), otelhttptrace.WithoutSubSpans()) + })) +} diff --git a/oryx/httpx/transports.go b/oryx/httpx/transports.go new file mode 100644 index 00000000000..3bb84f40230 --- /dev/null +++ b/oryx/httpx/transports.go @@ -0,0 +1,64 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package httpx + +import "net/http" + +// WrapTransportWithHeader wraps a http.Transport to always use the values from the given header. +func WrapTransportWithHeader(parent http.RoundTripper, h http.Header) *TransportWithHeader { + return &TransportWithHeader{ + RoundTripper: parent, + h: h, + } +} + +// NewTransportWithHeader returns a new http.Transport that always uses the values from the given header. +func NewTransportWithHeader(h http.Header) *TransportWithHeader { + return &TransportWithHeader{ + RoundTripper: http.DefaultTransport, + h: h, + } +} + +// TransportWithHeader is an http.RoundTripper that always uses the values from the given header. +type TransportWithHeader struct { + http.RoundTripper + h http.Header +} + +// RoundTrip implements http.RoundTripper. +func (ct *TransportWithHeader) RoundTrip(req *http.Request) (*http.Response, error) { + for k := range ct.h { + req.Header.Set(k, ct.h.Get(k)) + } + return ct.RoundTripper.RoundTrip(req) +} + +// NewTransportWithHost returns a new http.Transport that always uses the given host. +func NewTransportWithHost(host string) *TransportWithHost { + return &TransportWithHost{ + RoundTripper: http.DefaultTransport, + host: host, + } +} + +// WrapRoundTripperWithHost wraps a http.RoundTripper that always uses the given host. +func WrapRoundTripperWithHost(parent http.RoundTripper, host string) *TransportWithHost { + return &TransportWithHost{ + RoundTripper: parent, + host: host, + } +} + +// TransportWithHost is an http.RoundTripper that always uses the given host. +type TransportWithHost struct { + http.RoundTripper + host string +} + +// RoundTrip implements http.RoundTripper. +func (ct *TransportWithHost) RoundTrip(req *http.Request) (*http.Response, error) { + req.Host = ct.host + return ct.RoundTripper.RoundTrip(req) +} diff --git a/oryx/httpx/url.go b/oryx/httpx/url.go new file mode 100644 index 00000000000..ff206e9ceef --- /dev/null +++ b/oryx/httpx/url.go @@ -0,0 +1,29 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package httpx + +import ( + "cmp" + "net/http" + "net/url" +) + +// IncomingRequestURL returns the URL of the incoming HTTP request by looking at the host, TLS, and X-Forwarded-* headers. +func IncomingRequestURL(r *http.Request) *url.URL { + source := *r.URL + source.Host = cmp.Or(source.Host, r.Header.Get("X-Forwarded-Host"), r.Host) + + if proto := r.Header.Get("X-Forwarded-Proto"); len(proto) > 0 { + source.Scheme = proto + } + + if source.Scheme == "" { + source.Scheme = "https" + if r.TLS == nil { + source.Scheme = "http" + } + } + + return &source +} diff --git a/oryx/httpx/wait_for.go b/oryx/httpx/wait_for.go new file mode 100644 index 00000000000..bdad9df9bd4 --- /dev/null +++ b/oryx/httpx/wait_for.go @@ -0,0 +1,53 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package httpx + +import ( + "context" + "io" + "net/http" + "time" + + "github.com/avast/retry-go/v4" + "github.com/pkg/errors" + "github.com/tidwall/gjson" +) + +// WaitForEndpoint waits for the endpoint to be available. +func WaitForEndpoint(ctx context.Context, endpoint string, opts ...retry.Option) error { + return WaitForEndpointWithClient(ctx, http.DefaultClient, endpoint, opts...) +} + +// WaitForEndpointWithClient waits for the endpoint to be available while using the given http.Client. +func WaitForEndpointWithClient(ctx context.Context, client *http.Client, endpoint string, opts ...retry.Option) error { + return retry.Do(func() error { + req, err := http.NewRequestWithContext(ctx, "GET", endpoint, nil) + if err != nil { + return err + } + + res, err := client.Do(req) + if err != nil { + return err + } + defer res.Body.Close() + + body, err := io.ReadAll(res.Body) + if err != nil { + return err + } + + if gjson.GetBytes(body, "status").String() != "ok" { + return errors.Errorf("status is not yet ok: %s", body) + } + + return nil + }, + append([]retry.Option{ + retry.DelayType(retry.BackOffDelay), + retry.Delay(time.Second), + retry.MaxDelay(time.Second * 2), + retry.Attempts(20), + }, opts...)...) +} diff --git a/oryx/ioutilx/pkger.go b/oryx/ioutilx/pkger.go new file mode 100644 index 00000000000..ebd20128595 --- /dev/null +++ b/oryx/ioutilx/pkger.go @@ -0,0 +1,17 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package ioutilx + +import ( + "io" +) + +// MustReadAll reads a reader or panics. +func MustReadAll(r io.Reader) []byte { + all, err := io.ReadAll(r) + if err != nil { + panic(err) + } + return all +} diff --git a/oryx/ipx/cidr.go b/oryx/ipx/cidr.go new file mode 100644 index 00000000000..341e792cd26 --- /dev/null +++ b/oryx/ipx/cidr.go @@ -0,0 +1,23 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package ipx + +import ( + "iter" + "net/netip" +) + +func Hosts(prefix netip.Prefix) iter.Seq[netip.Addr] { + prefix = prefix.Masked() + return func(yield func(netip.Addr) bool) { + if !prefix.IsValid() { + return + } + for addr := prefix.Addr().Next(); prefix.Contains(addr); addr = addr.Next() { + if !yield(addr) { + return + } + } + } +} diff --git a/oryx/ipx/ip_validator.go b/oryx/ipx/ip_validator.go new file mode 100644 index 00000000000..1a40ba9b5b4 --- /dev/null +++ b/oryx/ipx/ip_validator.go @@ -0,0 +1,100 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package ipx + +import ( + "context" + "net" + "net/url" + "time" + + "golang.org/x/sync/errgroup" + + "github.com/pkg/errors" +) + +// IsAssociatedIPAllowedWhenSet is a wrapper for IsAssociatedIPAllowed which returns valid +// when ipOrHostnameOrURL is empty. +func IsAssociatedIPAllowedWhenSet(ipOrHostnameOrURL string) error { + if ipOrHostnameOrURL == "" { + return nil + } + return IsAssociatedIPAllowed(ipOrHostnameOrURL) +} + +// AreAllAssociatedIPsAllowed fails if one of the pairs is failing. +func AreAllAssociatedIPsAllowed(pairs map[string]string) error { + g := new(errgroup.Group) + for key, ipOrHostnameOrURL := range pairs { + key := key + ipOrHostnameOrURL := ipOrHostnameOrURL + g.Go(func() error { + return errors.Wrapf(IsAssociatedIPAllowed(ipOrHostnameOrURL), "key %s validation is failing", key) + }) + } + return g.Wait() +} + +// IsAssociatedIPAllowed returns nil for a domain (with NS lookup), IP, or IPv6 address if it +// does not resolve to a private IP subnet. This is a first level of defense against +// SSRF attacks by disallowing any domain or IP to resolve to a private network range. +// +// Please keep in mind that validations for domains is valid only when looking up. +// A malicious actor could easily update the DSN record post validation to point +// to an internal IP +func IsAssociatedIPAllowed(ipOrHostnameOrURL string) error { + lookup := func(hostname string) []net.IP { + ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) + defer cancel() + + lookup, err := net.DefaultResolver.LookupIPAddr(ctx, hostname) + if err != nil { + return nil + } + ips := make([]net.IP, len(lookup)) + for i, ip := range lookup { + ips[i] = ip.IP + } + return ips + } + + var ips []net.IP + ip := net.ParseIP(ipOrHostnameOrURL) + if ip == nil { + if result := lookup(ipOrHostnameOrURL); result != nil { + ips = append(ips, result...) + } + + if parsed, err := url.Parse(ipOrHostnameOrURL); err == nil { + if result := lookup(parsed.Hostname()); result != nil { + ips = append(ips, result...) + } + } + } else { + ips = append(ips, ip) + } + + for _, disabled := range []string{ + "127.0.0.0/8", + "10.0.0.0/8", + "172.16.0.0/12", + "192.168.0.0/16", + "fd47:1ed0:805d:59f0::/64", + "fc00::/7", + "::1/128", + } { + _, cidr, err := net.ParseCIDR(disabled) + if err != nil { + return err + } + + for _, ip := range ips { + if cidr.Contains(ip) { + return errors.Errorf("ip %s is in the %s range", ip, disabled) + } + } + } + + return nil +} diff --git a/oryx/josex/encoding.go b/oryx/josex/encoding.go new file mode 100644 index 00000000000..57ff0e48447 --- /dev/null +++ b/oryx/josex/encoding.go @@ -0,0 +1,55 @@ +/*- + * Copyright 2019 Square Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package josex + +import "io" + +// Base64Reader wraps an input stream consisting of either standard or url-safe +// base64 data, and maps it to a raw (unpadded) standard encoding. This can be used +// to read any base64-encoded data as input, whether padded, unpadded, standard or +// url-safe. +type Base64Reader struct { + In io.Reader +} + +func (r Base64Reader) Read(p []byte) (n int, err error) { + n, err = r.In.Read(p) + if err != nil { + return + } + + for i := range n { + switch p[i] { + // Map - to + + case 0x2D: + p[i] = 0x2B + // Map _ to / + case 0x5F: + p[i] = 0x2F + // Strip = + case 0x3D: + n = i + default: + } + } + + if n == 0 { + err = io.EOF + } + + return +} diff --git a/oryx/josex/generate.go b/oryx/josex/generate.go new file mode 100644 index 00000000000..055a4c5cea7 --- /dev/null +++ b/oryx/josex/generate.go @@ -0,0 +1,121 @@ +/*- + * Copyright 2019 Square Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package josex + +import ( + "crypto" + "crypto/ecdsa" + "crypto/ed25519" + "crypto/elliptic" + "crypto/rand" + "crypto/rsa" + "errors" + "fmt" + + "github.com/go-jose/go-jose/v3" +) + +// NewSigningKey generates a keypair for corresponding SignatureAlgorithm. +func NewSigningKey(alg jose.SignatureAlgorithm, bits int) (crypto.PublicKey, crypto.PrivateKey, error) { + switch alg { + case jose.ES256, jose.ES384, jose.ES512, jose.EdDSA: + keylen := map[jose.SignatureAlgorithm]int{ + jose.ES256: 256, + jose.ES384: 384, + jose.ES512: 521, // sic! + jose.EdDSA: 256, + } + if bits != 0 && bits != keylen[alg] { + return nil, nil, errors.New("invalid elliptic curve key size, this algorithm does not support arbitrary size") + } + case jose.RS256, jose.RS384, jose.RS512, jose.PS256, jose.PS384, jose.PS512: + if bits == 0 { + bits = 2048 + } + if bits < 2048 { + return nil, nil, errors.New("invalid key size for RSA key, 2048 or more is required") + } + } + switch alg { + case jose.ES256: + key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + return nil, nil, err + } + return key.Public(), key, err + case jose.ES384: + key, err := ecdsa.GenerateKey(elliptic.P384(), rand.Reader) + if err != nil { + return nil, nil, err + } + return key.Public(), key, err + case jose.ES512: + key, err := ecdsa.GenerateKey(elliptic.P521(), rand.Reader) + if err != nil { + return nil, nil, err + } + return key.Public(), key, err + case jose.EdDSA: + pub, key, err := ed25519.GenerateKey(rand.Reader) + return pub, key, err + case jose.RS256, jose.RS384, jose.RS512, jose.PS256, jose.PS384, jose.PS512: + key, err := rsa.GenerateKey(rand.Reader, bits) + if err != nil { + return nil, nil, err + } + return key.Public(), key, err + default: + return nil, nil, fmt.Errorf("unknown algorithm %s for signing key", alg) + } +} + +// NewEncryptionKey generates a keypair for corresponding KeyAlgorithm. +func NewEncryptionKey(alg jose.KeyAlgorithm, bits int) (crypto.PublicKey, crypto.PrivateKey, error) { + switch alg { + case jose.RSA1_5, jose.RSA_OAEP, jose.RSA_OAEP_256: + if bits == 0 { + bits = 2048 + } + if bits < 2048 { + return nil, nil, errors.New("invalid key size for RSA key, 2048 or more is required") + } + key, err := rsa.GenerateKey(rand.Reader, bits) + if err != nil { + return nil, nil, err + } + return key.Public(), key, err + case jose.ECDH_ES, jose.ECDH_ES_A128KW, jose.ECDH_ES_A192KW, jose.ECDH_ES_A256KW: + var crv elliptic.Curve + switch bits { + case 0, 256: + crv = elliptic.P256() + case 384: + crv = elliptic.P384() + case 521: + crv = elliptic.P521() + default: + return nil, nil, errors.New("invalid elliptic curve key size, use one of 256, 384, or 521") + } + key, err := ecdsa.GenerateKey(crv, rand.Reader) + if err != nil { + return nil, nil, err + } + return key.Public(), key, err + default: + return nil, nil, fmt.Errorf("unknown algorithm %s for encryption key", alg) + } +} diff --git a/oryx/josex/public.go b/oryx/josex/public.go new file mode 100644 index 00000000000..667a2cbbbd2 --- /dev/null +++ b/oryx/josex/public.go @@ -0,0 +1,29 @@ +package josex + +import ( + "crypto" + + "github.com/go-jose/go-jose/v3" +) + +// ToPublicKey returns the public key of the given private key. +func ToPublicKey(k *jose.JSONWebKey) jose.JSONWebKey { + if key := k.Public(); key.Key != nil { + return key + } + + // HSM workaround - jose does not understand crypto.Signer / HSM so we need to manually + // extract the public key. + switch key := k.Key.(type) { + case crypto.Signer: + newKey := *k + newKey.Key = key.Public() + return newKey + case jose.OpaqueSigner: + newKey := *k + newKey.Key = key.Public().Key + return newKey + } + + return jose.JSONWebKey{} +} diff --git a/oryx/josex/utils.go b/oryx/josex/utils.go new file mode 100644 index 00000000000..036f36aaa54 --- /dev/null +++ b/oryx/josex/utils.go @@ -0,0 +1,103 @@ +/*- + * Copyright 2019 Square Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package josex + +import ( + "crypto/x509" + "encoding/pem" + "errors" + "fmt" + + "github.com/go-jose/go-jose/v3" +) + +// LoadJSONWebKey returns a *jose.JSONWebKey for a given JSON string. +func LoadJSONWebKey(json []byte, pub bool) (*jose.JSONWebKey, error) { + var jwk jose.JSONWebKey + err := jwk.UnmarshalJSON(json) + if err != nil { + return nil, err + } + if !jwk.Valid() { + return nil, errors.New("invalid JWK key") + } + if jwk.IsPublic() != pub { + return nil, errors.New("priv/pub JWK key mismatch") + } + return &jwk, nil +} + +// LoadPublicKey loads a public key from PEM/DER/JWK-encoded data. +func LoadPublicKey(data []byte) (interface{}, error) { + input := data + + block, _ := pem.Decode(data) + if block != nil { + input = block.Bytes + } + + // Try to load SubjectPublicKeyInfo + pub, err0 := x509.ParsePKIXPublicKey(input) + if err0 == nil { + return pub, nil + } + + cert, err1 := x509.ParseCertificate(input) + if err1 == nil { + return cert.PublicKey, nil + } + + jwk, err2 := LoadJSONWebKey(data, true) + if err2 == nil { + return jwk, nil + } + + return nil, fmt.Errorf("square/go-jose: parse error, got '%s', '%s' and '%s'", err0, err1, err2) +} + +// LoadPrivateKey loads a private key from PEM/DER/JWK-encoded data. +func LoadPrivateKey(data []byte) (interface{}, error) { + input := data + + block, _ := pem.Decode(data) + if block != nil { + input = block.Bytes + } + + var priv interface{} + priv, err0 := x509.ParsePKCS1PrivateKey(input) + if err0 == nil { + return priv, nil + } + + priv, err1 := x509.ParsePKCS8PrivateKey(input) + if err1 == nil { + return priv, nil + } + + priv, err2 := x509.ParseECPrivateKey(input) + if err2 == nil { + return priv, nil + } + + jwk, err3 := LoadJSONWebKey(input, false) + if err3 == nil { + return jwk, nil + } + + return nil, fmt.Errorf("square/go-jose: parse error, got '%s', '%s', '%s' and '%s'", err0, err1, err2, err3) +} diff --git a/oryx/jsonnetsecure/cmd.go b/oryx/jsonnetsecure/cmd.go new file mode 100644 index 00000000000..33bd28d1130 --- /dev/null +++ b/oryx/jsonnetsecure/cmd.go @@ -0,0 +1,104 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jsonnetsecure + +import ( + "bufio" + "fmt" + "io" + + "github.com/pkg/errors" + "github.com/spf13/cobra" +) + +const ( + GiB uint64 = 1024 * 1024 * 1024 + // Generous limit on virtual memory including the peak memory allocated by the Go runtime, the Jsonnet VM, + // and the Jsonnet script. + // This number was acquired by running: + // Found by trial and error with: + // `ulimit -Sv 1048576 && echo '{"Snippet": "{user_id: std.repeat(\'a\', 1000)}"}' | kratos jsonnet -0` + // NOTE: Ideally we'd like to limit RSS but that is not possible on Linux with `ulimit/setrlimit(2)` - only with cgroups. + virtualMemoryLimitBytes = 2 * GiB +) + +func NewJsonnetCmd() *cobra.Command { + var null bool + cmd := &cobra.Command{ + Use: "jsonnet", + Short: "Run Jsonnet as a CLI command", + Hidden: true, + RunE: func(cmd *cobra.Command, args []string) error { + + // This could fail because current limits are lower than what we tried to set, + // so we still continue in this case. + SetVirtualMemoryLimit(virtualMemoryLimitBytes) + + if null { + return scan(cmd.OutOrStdout(), cmd.InOrStdin()) + } + + input, err := io.ReadAll(cmd.InOrStdin()) + if err != nil { + return errors.Wrap(err, "failed to read from stdin") + } + + json, err := eval(input) + if err != nil { + return errors.Wrap(err, "failed to evaluate jsonnet") + } + + if _, err := io.WriteString(cmd.OutOrStdout(), json); err != nil { + return errors.Wrap(err, "failed to write json output") + } + return nil + }, + } + cmd.Flags().BoolVarP(&null, "null", "0", false, + `Read multiple snippets and parameters from stdin separated by null bytes. +Output will be in the same order as inputs, separated by null bytes. +Evaluation errors will also be reported to stdout, separated by null bytes. +Non-recoverable errors are written to stderr and the program will terminate with a non-zero exit code.`) + + return cmd +} + +func scan(w io.Writer, r io.Reader) error { + scanner := bufio.NewScanner(r) + scanner.Split(splitNull) + for scanner.Scan() { + json, err := eval(scanner.Bytes()) + if err != nil { + json = fmt.Sprintf("ERROR: %s", err) + } + if _, err := fmt.Fprintf(w, "%s%c", json, 0); err != nil { + return errors.Wrap(err, "failed to write json output") + } + } + return errors.Wrap(scanner.Err(), "failed to read from stdin") +} + +func eval(input []byte) (json string, err error) { + var params processParameters + if err := params.Decode(input); err != nil { + return "", err + } + + vm := MakeSecureVM() + + for _, it := range params.ExtCodes { + vm.ExtCode(it.Key, it.Value) + } + for _, it := range params.ExtVars { + vm.ExtVar(it.Key, it.Value) + } + for _, it := range params.TLACodes { + vm.TLACode(it.Key, it.Value) + } + for _, it := range params.TLAVars { + vm.TLAVar(it.Key, it.Value) + } + + return vm.EvaluateAnonymousSnippet(params.Filename, params.Snippet) +} diff --git a/oryx/jsonnetsecure/cmd/root.go b/oryx/jsonnetsecure/cmd/root.go new file mode 100644 index 00000000000..09b45d34e30 --- /dev/null +++ b/oryx/jsonnetsecure/cmd/root.go @@ -0,0 +1,22 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package main + +import ( + "context" + "fmt" + "os" + + "github.com/ory/x/jsonnetsecure" +) + +func main() { + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + if err := jsonnetsecure.NewJsonnetCmd().ExecuteContext(ctx); err != nil { + fmt.Println(err) + os.Exit(-1) + } +} diff --git a/oryx/jsonnetsecure/jsonnet.go b/oryx/jsonnetsecure/jsonnet.go new file mode 100644 index 00000000000..0aa8dafb116 --- /dev/null +++ b/oryx/jsonnetsecure/jsonnet.go @@ -0,0 +1,133 @@ +package jsonnetsecure + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "os" + "os/exec" + "path" + "runtime" + "testing" + + "github.com/google/go-jsonnet" +) + +type ( + VM interface { + EvaluateAnonymousSnippet(filename string, snippet string) (json string, formattedErr error) + ExtCode(key string, val string) + ExtVar(key string, val string) + TLACode(key string, val string) + TLAVar(key string, val string) + } + + kv struct { + Key, Value string + } + processParameters struct { + Filename, Snippet string + TLACodes, TLAVars, ExtCodes, ExtVars []kv + } + + vmOptions struct { + jsonnetBinaryPath string + args []string + ctx context.Context + pool *pool + } + + Option func(o *vmOptions) +) + +func (pp *processParameters) EncodeTo(w io.Writer) error { + return json.NewEncoder(w).Encode(pp) +} + +func (pp *processParameters) Decode(d []byte) error { + return json.Unmarshal(d, pp) +} + +func newVMOptions() *vmOptions { + jsonnetBinaryPath, _ := os.Executable() + return &vmOptions{ + jsonnetBinaryPath: jsonnetBinaryPath, + ctx: context.Background(), + } +} + +func WithContext(ctx context.Context) Option { + return func(o *vmOptions) { + o.ctx = ctx + } +} + +func WithProcessPool(p Pool) Option { + return func(o *vmOptions) { + pool, _ := p.(*pool) + o.pool = pool + } +} + +func WithJsonnetBinary(jsonnetBinaryPath string) Option { + return func(o *vmOptions) { + o.jsonnetBinaryPath = jsonnetBinaryPath + } +} + +func WithProcessArgs(args ...string) Option { + return func(o *vmOptions) { + o.args = args + } +} + +func MakeSecureVM(opts ...Option) VM { + options := newVMOptions() + for _, o := range opts { + o(options) + } + + if options.pool != nil { + return NewProcessPoolVM(options) + } else { + vm := jsonnet.MakeVM() + vm.Importer(new(ErrorImporter)) + return vm + } +} + +// ErrorImporter errors when calling "import". +type ErrorImporter struct{} + +// Import fetches data from a map entry. +// All paths are treated as absolute keys. +func (importer *ErrorImporter) Import(importedFrom, importedPath string) (contents jsonnet.Contents, foundAt string, err error) { + return jsonnet.Contents{}, "", fmt.Errorf("import not available %v", importedPath) +} + +func JsonnetTestBinary(t testing.TB) string { + t.Helper() + + // We can force the usage of a given jsonnet executable. + // Useful to test different versions, or run the tests under wine. + if s := os.Getenv("ORY_JSONNET_PATH"); s != "" { + return s + } + + var stderr bytes.Buffer + // Using `t.TempDir()` results in permissions errors on Windows, sometimes. + outPath := path.Join(os.TempDir(), "jsonnet") + if runtime.GOOS == "windows" { + outPath = outPath + ".exe" + } + cmd := exec.Command("go", "build", "-o", outPath, "github.com/ory/x/jsonnetsecure/cmd") + cmd.Stderr = &stderr + + if err := cmd.Run(); err != nil || stderr.Len() != 0 { + t.Fatalf("building the Go binary returned error: %v\n%s", err, stderr.String()) + } + + return outPath +} diff --git a/oryx/jsonnetsecure/jsonnet_pool.go b/oryx/jsonnetsecure/jsonnet_pool.go new file mode 100644 index 00000000000..5a3a3e51e75 --- /dev/null +++ b/oryx/jsonnetsecure/jsonnet_pool.go @@ -0,0 +1,284 @@ +// Copyright © 2024 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jsonnetsecure + +// Known limitations/edge cases: +// - The child process exiting early (e.g. crashing) or getting killed (e.g. reaching some OS limit) +// is not detected and no error will be returned in this case from `eval()`. +// - Misbehaving jsonnet scripts in the middle of a batch being passed to the child process for evaluation may result in +// no error (as mentioned above), and other valid scripts in this batch may result +// in an error (because the output from the child process is truncated). +// +// Possible remediations: +// - Do not pass a batch of scripts to a worker, only pass one script at a time (to isolate misbehaving scripts) +// - Validate that the output is valid JSON (to detect truncated output) +// - Detect the child process exiting (to return an error) + +import ( + "bufio" + "context" + "encoding/json" + "io" + "math" + "os/exec" + "strings" + "time" + + "github.com/jackc/puddle/v2" + "github.com/pkg/errors" + "go.opentelemetry.io/otel/attribute" + semconv "go.opentelemetry.io/otel/semconv/v1.27.0" + "go.opentelemetry.io/otel/trace" + + "github.com/ory/x/otelx" +) + +const ( + KiB = 1024 + jsonnetOutputLimit = 512 * KiB + jsonnetErrLimit = 1 * KiB +) + +type ( + processPoolVM struct { + path string + args []string + ctx context.Context + params processParameters + pool *pool + } + Pool interface { + Close() + private() + } + pool struct { + puddle *puddle.Pool[worker] + } + worker struct { + cmd *exec.Cmd + stdin chan<- []byte + stdout <-chan string + stderr <-chan string + } + contextKeyType string +) + +var ( + ErrProcessPoolClosed = errors.New("jsonnetsecure: process pool closed") + + _ VM = (*processPoolVM)(nil) + _ Pool = (*pool)(nil) + + contextValuePath contextKeyType = "argc" + contextValueArgs contextKeyType = "argv" +) + +func NewProcessPool(size int) Pool { + size = max(5, min(size, math.MaxInt32)) + pud, err := puddle.NewPool(&puddle.Config[worker]{ + MaxSize: int32(size), //nolint:gosec // disable G115 // because of the previous min/max, 5 <= size <= math.MaxInt32 + Constructor: newWorker, + Destructor: worker.destroy, + }) + if err != nil { + panic(err) // this should never happen, see implementation of puddle.NewPool + } + for range size { + // warm pool + go pud.CreateResource(context.Background()) + } + go func() { + for { + time.Sleep(10 * time.Second) + for _, proc := range pud.AcquireAllIdle() { + if proc.Value().cmd.ProcessState != nil { + proc.Destroy() + } else { + proc.Release() + } + } + } + }() + return &pool{pud} +} + +func (*pool) private() {} + +func (p *pool) Close() { + p.puddle.Close() +} + +func newWorker(ctx context.Context) (_ worker, err error) { + tracer := trace.SpanFromContext(ctx).TracerProvider().Tracer("") + ctx, span := tracer.Start(ctx, "jsonnetsecure.newWorker") + defer otelx.End(span, &err) + + path, _ := ctx.Value(contextValuePath).(string) + if path == "" { + return worker{}, errors.New("newWorker: missing binary path in context") + } + args, _ := ctx.Value(contextValueArgs).([]string) + cmd := exec.Command(path, append(args, "-0")...) + cmd.Env = []string{"GOMAXPROCS=1"} + cmd.WaitDelay = 100 * time.Millisecond + + span.SetAttributes(semconv.ProcessCommand(cmd.Path), semconv.ProcessCommandArgs(cmd.Args...)) + + stdin, err := cmd.StdinPipe() + if err != nil { + return worker{}, errors.Wrap(err, "newWorker: failed to create stdin pipe") + } + + in := make(chan []byte, 1) + go func(c <-chan []byte) { + for input := range c { + if _, err := stdin.Write(append(input, 0)); err != nil { + stdin.Close() + return + } + } + }(in) + + stdout, err := cmd.StdoutPipe() + if err != nil { + return worker{}, errors.Wrap(err, "newWorker: failed to create stdout pipe") + } + stderr, err := cmd.StderrPipe() + if err != nil { + return worker{}, errors.Wrap(err, "newWorker: failed to create stderr pipe") + } + + if err := cmd.Start(); err != nil { + return worker{}, errors.Wrap(err, "newWorker: failed to start process") + } + + span.SetAttributes(semconv.ProcessPID(cmd.Process.Pid)) + + scan := func(c chan<- string, r io.Reader) { + defer close(c) + // NOTE: `bufio.Scanner` has its own internal limit of 64 KiB. + scanner := bufio.NewScanner(r) + + scanner.Split(splitNull) + for scanner.Scan() { + c <- scanner.Text() + } + if err := scanner.Err(); err != nil { + c <- "ERROR: scan: " + err.Error() + } + } + out := make(chan string, 1) + go scan(out, stdout) + errs := make(chan string, 1) + go scan(errs, stderr) + + w := worker{ + cmd: cmd, + stdin: in, + stdout: out, + stderr: errs, + } + _, err = w.eval(ctx, []byte("{}")) // warm up + if err != nil { + w.destroy() + return worker{}, errors.Wrap(err, "newWorker: warm up failed") + } + + return w, nil +} + +func (w worker) destroy() { + close(w.stdin) + w.cmd.Process.Kill() + w.cmd.Wait() +} + +func (w worker) eval(ctx context.Context, processParams []byte) (output string, err error) { + tracer := trace.SpanFromContext(ctx).TracerProvider().Tracer("") + ctx, span := tracer.Start(ctx, "jsonnetsecure.worker.eval", trace.WithAttributes( + semconv.ProcessPID(w.cmd.Process.Pid))) + defer otelx.End(span, &err) + + select { + case <-ctx.Done(): + return "", ctx.Err() + case w.stdin <- processParams: + break + } + + select { + case <-ctx.Done(): + return "", ctx.Err() + case output := <-w.stdout: + return output, nil + case err := <-w.stderr: + return "", errors.New(err) + } +} + +func (vm *processPoolVM) EvaluateAnonymousSnippet(filename string, snippet string) (_ string, err error) { + tracer := trace.SpanFromContext(vm.ctx).TracerProvider().Tracer("") + ctx, span := tracer.Start(vm.ctx, "jsonnetsecure.processPoolVM.EvaluateAnonymousSnippet", trace.WithAttributes(attribute.String("filename", filename))) + defer otelx.End(span, &err) + + params := vm.params + params.Filename = filename + params.Snippet = snippet + pp, err := json.Marshal(params) + if err != nil { + return "", errors.Wrap(err, "jsonnetsecure: marshal") + } + + ctx = context.WithValue(ctx, contextValuePath, vm.path) + ctx = context.WithValue(ctx, contextValueArgs, vm.args) + worker, err := vm.pool.puddle.Acquire(ctx) + if err != nil { + return "", errors.Wrap(err, "jsonnetsecure: acquire") + } + + ctx, cancel := context.WithTimeout(ctx, 1*time.Second) + defer cancel() + result, err := worker.Value().eval(ctx, pp) + if err != nil { + worker.Destroy() + return "", errors.Wrap(err, "jsonnetsecure: eval") + } else { + worker.Release() + } + + if strings.HasPrefix(result, "ERROR: ") { + return "", errors.New("jsonnetsecure: " + result) + } + + return result, nil +} + +func NewProcessPoolVM(opts *vmOptions) VM { + ctx := opts.ctx + if ctx == nil { + ctx = context.Background() + } + return &processPoolVM{ + path: opts.jsonnetBinaryPath, + args: opts.args, + ctx: ctx, + pool: opts.pool, + } +} + +func (vm *processPoolVM) ExtCode(key string, val string) { + vm.params.ExtCodes = append(vm.params.ExtCodes, kv{key, val}) +} + +func (vm *processPoolVM) ExtVar(key string, val string) { + vm.params.ExtVars = append(vm.params.ExtVars, kv{key, val}) +} + +func (vm *processPoolVM) TLACode(key string, val string) { + vm.params.TLACodes = append(vm.params.TLACodes, kv{key, val}) +} + +func (vm *processPoolVM) TLAVar(key string, val string) { + vm.params.TLAVars = append(vm.params.TLAVars, kv{key, val}) +} diff --git a/oryx/jsonnetsecure/limit_unix.go b/oryx/jsonnetsecure/limit_unix.go new file mode 100644 index 00000000000..ee6ecc73e60 --- /dev/null +++ b/oryx/jsonnetsecure/limit_unix.go @@ -0,0 +1,29 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +//go:build !windows + +package jsonnetsecure + +import ( + "fmt" + "runtime/debug" + "syscall" + + "github.com/pkg/errors" +) + +func SetVirtualMemoryLimit(limitBytes uint64) error { + // Tell the Go runtime about the limit. + debug.SetMemoryLimit(int64(limitBytes)) //nolint:gosec // The number is a compile-time constant. + + lim := syscall.Rlimit{ + Cur: limitBytes, + Max: limitBytes, + } + err := syscall.Setrlimit(syscall.RLIMIT_AS, &lim) + if err != nil { + return errors.WithStack(fmt.Errorf("failed to set virtual memory limit: %v\n", err)) + } + return nil +} diff --git a/oryx/jsonnetsecure/limit_windows.go b/oryx/jsonnetsecure/limit_windows.go new file mode 100644 index 00000000000..3557c7a20b2 --- /dev/null +++ b/oryx/jsonnetsecure/limit_windows.go @@ -0,0 +1,16 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +//go:build windows + +package jsonnetsecure + +import "runtime/debug" + +func SetVirtualMemoryLimit(limitBytes uint64) error { + // Tell the Go runtime about the limit. + debug.SetMemoryLimit(int64(limitBytes)) //nolint:gosec // The number is a compile-time constant. + + // TODO No OS limit for now. Apparently there is a Windows-specific equivalent (Job control)? + return nil +} diff --git a/oryx/jsonnetsecure/null.go b/oryx/jsonnetsecure/null.go new file mode 100644 index 00000000000..42d6e921aa2 --- /dev/null +++ b/oryx/jsonnetsecure/null.go @@ -0,0 +1,22 @@ +// Copyright © 2024 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jsonnetsecure + +import "bytes" + +func splitNull(data []byte, atEOF bool) (advance int, token []byte, err error) { + // Look for a null byte; if found, return the position after it, + // the data before it, and no error. + if i := bytes.IndexByte(data, 0); i >= 0 { + return i + 1, data[0:i], nil + } + + // If we're at EOF, we have a final, non-terminated word. Return it. + if atEOF && len(data) != 0 { + return len(data), data, nil + } + + // Request more data. + return 0, nil, nil +} diff --git a/oryx/jsonnetsecure/provider.go b/oryx/jsonnetsecure/provider.go new file mode 100644 index 00000000000..4e4092615d5 --- /dev/null +++ b/oryx/jsonnetsecure/provider.go @@ -0,0 +1,60 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jsonnetsecure + +import ( + "context" + "os" + "runtime" + "testing" +) + +type ( + VMProvider interface { + // JsonnetVM creates a new secure process-isolated Jsonnet VM whose + // execution is bound to the provided context, i.e., + // cancelling the context will terminate the VM process. + JsonnetVM(context.Context) (VM, error) + } + + // TestProvider provides a secure VM by running go build on github.com/ory/x/jsonnetsecure/cmd + TestProvider struct { + jsonnetBinary string + pool Pool + } + + // DefaultProvider provides a secure VM by calling the currently + // running the current binary with the provided subcommand. + DefaultProvider struct { + Subcommand string + Pool Pool + } +) + +func NewTestProvider(t testing.TB) *TestProvider { + pool := NewProcessPool(runtime.GOMAXPROCS(0)) + t.Cleanup(pool.Close) + return &TestProvider{JsonnetTestBinary(t), pool} +} + +func (p *TestProvider) JsonnetVM(ctx context.Context) (VM, error) { + return MakeSecureVM( + WithContext(ctx), + WithProcessPool(p.pool), + WithJsonnetBinary(p.jsonnetBinary), + ), nil +} + +func (p *DefaultProvider) JsonnetVM(ctx context.Context) (VM, error) { + self, err := os.Executable() + if err != nil { + return nil, err + } + return MakeSecureVM( + WithContext(ctx), + WithJsonnetBinary(self), + WithProcessArgs(p.Subcommand), + WithProcessPool(p.Pool), + ), nil +} diff --git a/oryx/jsonnetsecure/stub/import.jsonnet b/oryx/jsonnetsecure/stub/import.jsonnet new file mode 100644 index 00000000000..02b09a7b54f --- /dev/null +++ b/oryx/jsonnetsecure/stub/import.jsonnet @@ -0,0 +1 @@ +{ foo: 'bar' } diff --git a/oryx/jsonnetx/format.go b/oryx/jsonnetx/format.go new file mode 100644 index 00000000000..0bcd084e422 --- /dev/null +++ b/oryx/jsonnetx/format.go @@ -0,0 +1,80 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jsonnetx + +import ( + "fmt" + "os" + + "github.com/bmatcuk/doublestar/v2" + "github.com/google/go-jsonnet/formatter" + "github.com/spf13/cobra" + + "github.com/ory/x/cmdx" +) + +// FormatCommand represents the format command +// Deprecated: use NewFormatCommand instead. +var FormatCommand = NewFormatCommand() + +func NewFormatCommand() *cobra.Command { + var verbose, write bool + cmd := &cobra.Command{ + Use: "format path/to/files/*.jsonnet [more/files.jsonnet, [supports/**/{foo,bar}.jsonnet]]", + Long: `Formats JSONNet files using the official JSONNet formatter. + +Use -w or --write to write output back to files instead of stdout. + +` + GlobHelp, + Args: cobra.MinimumNArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + for _, pattern := range args { + files, err := doublestar.Glob(pattern) + if err != nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Glob pattern %q is not valid: %s\n", pattern, err) + return cmdx.FailSilently(cmd) + } + + for _, file := range files { + if fi, err := os.Stat(file); err != nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Matching file %q could not be opened: %s\n", file, err) + return cmdx.FailSilently(cmd) + } else if fi.IsDir() { + continue + } + + if verbose { + fmt.Printf("Processing file: %s\n", file) + } + + //#nosec G304 -- false positive + content, err := os.ReadFile(file) + if err != nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Unable to read file %q: %s\n", file, err) + return cmdx.FailSilently(cmd) + } + + output, err := formatter.Format(file, string(content), formatter.DefaultOptions()) + if err != nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "File %q could not be formatted: %s", file, err) + } + + if write { + err := os.WriteFile(file, []byte(output), 0644) // #nosec + if err != nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Unable to write file %q: %s\n", file, err) + return cmdx.FailSilently(cmd) + } + } else { + fmt.Println(output) + } + } + } + return nil + }, + } + cmd.Flags().BoolVarP(&write, "write", "w", false, "Write formatted output back to file.") + cmd.Flags().BoolVarP(&verbose, "verbose", "v", false, "Verbose output.") + return cmd +} diff --git a/oryx/jsonnetx/lint.go b/oryx/jsonnetx/lint.go new file mode 100644 index 00000000000..543aed90a9a --- /dev/null +++ b/oryx/jsonnetx/lint.go @@ -0,0 +1,67 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jsonnetx + +import ( + "fmt" + "os" + + "github.com/bmatcuk/doublestar/v2" + "github.com/google/go-jsonnet" + "github.com/google/go-jsonnet/linter" + "github.com/spf13/cobra" + + "github.com/ory/x/cmdx" +) + +// LintCommand represents the lint command +// Deprecated: use NewLintCommand instead. +var LintCommand = NewLintCommand() + +func NewLintCommand() *cobra.Command { + var verbose bool + cmd := &cobra.Command{ + Use: "lint path/to/files/*.jsonnet [more/files.jsonnet, [supports/**/{foo,bar}.jsonnet]]", + Long: `Lints JSONNet files using the official JSONNet linter and exits with a status code of 1 when issues are detected. + +` + GlobHelp, + Args: cobra.MinimumNArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + for _, pattern := range args { + files, err := doublestar.Glob(pattern) + if err != nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Glob pattern %q is not valid: %s\n", pattern, err) + return cmdx.FailSilently(cmd) + } + + for _, file := range files { + if fi, err := os.Stat(file); err != nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Matching file %q could not be opened: %s\n", file, err) + return cmdx.FailSilently(cmd) + } else if fi.IsDir() { + continue + } + + if verbose { + fmt.Printf("Processing file: %s\n", file) + } + + //#nosec G304 -- false positive + content, err := os.ReadFile(file) + if err != nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Unable to read file %q: %s\n", file, err) + return cmdx.FailSilently(cmd) + } + + if linter.LintSnippet(jsonnet.MakeVM(), cmd.ErrOrStderr(), []linter.Snippet{{FileName: file, Code: string(content)}}) { + return cmdx.FailSilently(cmd) + } + } + } + return nil + }, + } + cmd.Flags().BoolVarP(&verbose, "verbose", "v", false, "Verbose output.") + return cmd +} diff --git a/oryx/jsonnetx/root.go b/oryx/jsonnetx/root.go new file mode 100644 index 00000000000..bd05f17de59 --- /dev/null +++ b/oryx/jsonnetx/root.go @@ -0,0 +1,56 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jsonnetx + +import ( + "github.com/spf13/cobra" +) + +const GlobHelp = `Glob patterns supports the following special terms in the patterns: + + Special Terms | Meaning + ------------- | ------- + '*' | matches any sequence of non-path-separators + '**' | matches any sequence of characters, including path separators + '?' | matches any single non-path-separator character + '[class]' | matches any single non-path-separator character against a class of characters ([see below](#character-classes)) + '{alt1,...}' | matches a sequence of characters if one of the comma-separated alternatives matches + + Any character with a special meaning can be escaped with a backslash ('\'). + + #### Character Classes + + Character classes support the following: + + Class | Meaning + ---------- | ------- + '[abc]' | matches any single character within the set + '[a-z]' | matches any single character in the range + '[^class]' | matches any single character which does *not* match the class +` + +// RootCommand represents the jsonnet command +// Deprecated: use NewRootCommand instead. +var RootCommand = &cobra.Command{ + Use: "jsonnet", + Short: "Helpers for linting and formatting JSONNet code", +} + +// RegisterCommandRecursive adds all jsonnet helpers to the RootCommand +// Deprecated: use NewRootCommand instead. +func RegisterCommandRecursive(parent *cobra.Command) { + parent.AddCommand(RootCommand) + + RootCommand.AddCommand(FormatCommand) + RootCommand.AddCommand(LintCommand) +} + +func NewRootCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "jsonnet", + Short: "Helpers for linting and formatting JSONNet code", + } + cmd.AddCommand(NewFormatCommand(), NewLintCommand()) + return cmd +} diff --git a/oryx/jsonschemax/.snapshots/TestListPaths-case=0.json b/oryx/jsonschemax/.snapshots/TestListPaths-case=0.json new file mode 100644 index 00000000000..f9ee1c467ac --- /dev/null +++ b/oryx/jsonschemax/.snapshots/TestListPaths-case=0.json @@ -0,0 +1,3534 @@ +[ + { + "Title": "Access Rules", + "Description": "Configure access rules. All sub-keys support configuration reloading without restarting.", + "Examples": null, + "Name": "access_rules", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Repositories", + "Description": "Locations (list of URLs) where access rules should be fetched from on boot. It is expected that the documents at those locations return a JSON or YAML Array containing ORY Oathkeeper Access Rules:\n\n- If the URL Scheme is `file://`, the access rules (an array of access rules is expected) will be fetched from the local file system.\n- If the URL Scheme is `inline://`, the access rules (an array of access rules is expected) are expected to be a base64 encoded (with padding!) JSON/YAML string (base64_encode(`[{\"id\":\"foo-rule\",\"authenticators\":[....]}]`)).\n- If the URL Scheme is `http://` or `https://`, the access rules (an array of access rules is expected) will be fetched from the provided HTTP(s) location.", + "Examples": [ + "[\"file://path/to/rules.json\",\"inline://W3siaWQiOiJmb28tcnVsZSIsImF1dGhlbnRpY2F0b3JzIjpbXX1d\",\"https://path-to-my-rules/rules.json\"]" + ], + "Name": "access_rules.repositories", + "Default": null, + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "access_rules.repositories.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "uri", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Authenticators", + "Description": "For more information on authenticators head over to: https://www.ory.sh/docs/oathkeeper/pipeline/authn", + "Examples": null, + "Name": "authenticators", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Anonymous", + "Description": "The [`anonymous` authenticator](https://www.ory.sh/docs/oathkeeper/pipeline/authn#anonymous).", + "Examples": null, + "Name": "authenticators.anonymous", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Anonymous Authenticator Configuration", + "Description": "This section is optional when the authenticator is disabled.", + "Examples": null, + "Name": "authenticators.anonymous.config", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Anonymous Subject", + "Description": "Sets the anonymous username.", + "Examples": [ + "guest", + "anon", + "anonymous", + "unknown" + ], + "Name": "authenticators.anonymous.config.subject", + "Default": "anonymous", + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Enabled", + "Description": "En-/disables this component.", + "Examples": [ + true + ], + "Name": "authenticators.anonymous.enabled", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Cookie Session", + "Description": "The [`cookie_session` authenticator](https://www.ory.sh/docs/oathkeeper/pipeline/authn#cookie_session).", + "Examples": null, + "Name": "authenticators.cookie_session", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Cookie Session Authenticator Configuration", + "Description": "This section is optional when the authenticator is disabled.", + "Examples": null, + "Name": "authenticators.cookie_session.config", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Session Check URL", + "Description": "The origin to proxy requests to. If the response is a 200 with body `{ \"subject\": \"...\", \"extra\": {} }`. The request will pass the subject through successfully, otherwise it will be marked as unauthorized.\n\n\u003eIf this authenticator is enabled, this value is required.", + "Examples": [ + "https://session-store-host" + ], + "Name": "authenticators.cookie_session.config.check_session_url", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "uri", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Only Cookies", + "Description": "A list of possible cookies to look for on incoming requests, and will fallthrough to the next authenticator if none of the passed cookies are set on the request.", + "Examples": null, + "Name": "authenticators.cookie_session.config.only", + "Default": null, + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "authenticators.cookie_session.config.only.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Enabled", + "Description": "En-/disables this component.", + "Examples": [ + true + ], + "Name": "authenticators.cookie_session.enabled", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "JSON Web Token (jwt)", + "Description": "The [`jwt` authenticator](https://www.ory.sh/docs/oathkeeper/pipeline/authn#jwt).", + "Examples": null, + "Name": "authenticators.jwt", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "JWT Authenticator Configuration", + "Description": "This section is optional when the authenticator is disabled.", + "Examples": null, + "Name": "authenticators.jwt.config", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "authenticators.jwt.config.allowed_algorithms", + "Default": null, + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "authenticators.jwt.config.allowed_algorithms.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "JSON Web Key URLs", + "Description": "URLs where ORY Oathkeeper can retrieve JSON Web Keys from for validating the JSON Web Token. Usually something like \"https://my-keys.com/.well-known/jwks.json\". The response of that endpoint must return a JSON Web Key Set (JWKS).\n\n\u003eIf this authenticator is enabled, this value is required.", + "Examples": [ + "https://my-website.com/.well-known/jwks.json", + "https://my-other-website.com/.well-known/jwks.json", + "file://path/to/local/jwks.json" + ], + "Name": "authenticators.jwt.config.jwks_urls", + "Default": null, + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "authenticators.jwt.config.jwks_urls.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "uri", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Required Token Scope", + "Description": "An array of OAuth 2.0 scopes that are required when accessing an endpoint protected by this handler.\n If the token used in the Authorization header did not request that specific scope, the request is denied.", + "Examples": null, + "Name": "authenticators.jwt.config.required_scope", + "Default": null, + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "authenticators.jwt.config.required_scope.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Scope Strategy", + "Description": "Sets the strategy validation algorithm.", + "Examples": null, + "Name": "authenticators.jwt.config.scope_strategy", + "Default": "none", + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": [ + "hierarchic", + "exact", + "wildcard", + "none" + ], + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Intended Audience", + "Description": "An array of audiences that are required when accessing an endpoint protected by this handler.\n If the token used in the Authorization header is not intended for any of the requested audiences, the request is denied.", + "Examples": null, + "Name": "authenticators.jwt.config.target_audience", + "Default": null, + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "authenticators.jwt.config.target_audience.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "authenticators.jwt.config.token_from", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Header", + "Description": "The header (case insensitive) that must contain a token for request authentication. It can't be set along with query_parameter.", + "Examples": null, + "Name": "authenticators.jwt.config.token_from.header", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Query Parameter", + "Description": "The query parameter (case sensitive) that must contain a token for request authentication. It can't be set along with header.", + "Examples": null, + "Name": "authenticators.jwt.config.token_from.query_parameter", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "authenticators.jwt.config.trusted_issuers", + "Default": null, + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "authenticators.jwt.config.trusted_issuers.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Enabled", + "Description": "En-/disables this component.", + "Examples": [ + true + ], + "Name": "authenticators.jwt.enabled", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "No Operation (noop)", + "Description": "The [`noop` authenticator](https://www.ory.sh/docs/oathkeeper/pipeline/authn#noop).", + "Examples": null, + "Name": "authenticators.noop", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Enabled", + "Description": "En-/disables this component.", + "Examples": [ + true + ], + "Name": "authenticators.noop.enabled", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "OAuth 2.0 Client Credentials", + "Description": "The [`oauth2_client_credentials` authenticator](https://www.ory.sh/docs/oathkeeper/pipeline/authn#oauth2_client_credentials).", + "Examples": null, + "Name": "authenticators.oauth2_client_credentials", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "OAuth 2.0 Client Credentials Authenticator Configuration", + "Description": "This section is optional when the authenticator is disabled.", + "Examples": null, + "Name": "authenticators.oauth2_client_credentials.config", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Request Permissions (Token Scope)", + "Description": "Scopes is an array of OAuth 2.0 scopes that are required when accessing an endpoint protected by this rule.\n If the token used in the Authorization header did not request that specific scope, the request is denied.", + "Examples": null, + "Name": "authenticators.oauth2_client_credentials.config.required_scope", + "Default": null, + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "authenticators.oauth2_client_credentials.config.required_scope.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "The OAuth 2.0 Token Endpoint that will be used to validate the client credentials.\n\n\u003eIf this authenticator is enabled, this value is required.", + "Examples": [ + "https://my-website.com/oauth2/token" + ], + "Name": "authenticators.oauth2_client_credentials.config.token_url", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "uri", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Enabled", + "Description": "En-/disables this component.", + "Examples": [ + true + ], + "Name": "authenticators.oauth2_client_credentials.enabled", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "OAuth 2.0 Token Introspection", + "Description": "The [`oauth2_introspection` authenticator](https://www.ory.sh/docs/oathkeeper/pipeline/authn#oauth2_introspection).", + "Examples": null, + "Name": "authenticators.oauth2_introspection", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "OAuth 2.0 Introspection Authenticator Configuration", + "Description": "This section is optional when the authenticator is disabled.", + "Examples": null, + "Name": "authenticators.oauth2_introspection.config", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "OAuth 2.0 Introspection URL", + "Description": "The OAuth 2.0 Token Introspection endpoint URL.\n\n\u003eIf this authenticator is enabled, this value is required.", + "Examples": [ + "https://my-website.com/oauth2/introspection" + ], + "Name": "authenticators.oauth2_introspection.config.introspection_url", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "uri", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Pre-Authorization", + "Description": "Enable pre-authorization in cases where the OAuth 2.0 Token Introspection endpoint is protected by OAuth 2.0 Bearer Tokens that can be retrieved using the OAuth 2.0 Client Credentials grant.", + "Examples": null, + "Name": "authenticators.oauth2_introspection.config.pre_authorization", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "OAuth 2.0 Client ID", + "Description": "The OAuth 2.0 Client ID to be used for the OAuth 2.0 Client Credentials Grant.\n\n\u003eIf pre-authorization is enabled, this value is required.", + "Examples": null, + "Name": "authenticators.oauth2_introspection.config.pre_authorization.client_id", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "OAuth 2.0 Client Secret", + "Description": "The OAuth 2.0 Client Secret to be used for the OAuth 2.0 Client Credentials Grant.\n\n\u003eIf pre-authorization is enabled, this value is required.", + "Examples": null, + "Name": "authenticators.oauth2_introspection.config.pre_authorization.client_secret", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "authenticators.oauth2_introspection.config.pre_authorization.enabled", + "Default": null, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": [ + true + ], + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "OAuth 2.0 Scope", + "Description": "The OAuth 2.0 Scope to be requested during the OAuth 2.0 Client Credentials Grant.", + "Examples": [ + [ + "[\"foo\", \"bar\"]" + ] + ], + "Name": "authenticators.oauth2_introspection.config.pre_authorization.scope", + "Default": null, + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "authenticators.oauth2_introspection.config.pre_authorization.scope.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "OAuth 2.0 Token URL", + "Description": "The OAuth 2.0 Token Endpoint where the OAuth 2.0 Client Credentials Grant will be performed.\n\n\u003eIf pre-authorization is enabled, this value is required.", + "Examples": null, + "Name": "authenticators.oauth2_introspection.config.pre_authorization.token_url", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "uri", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Required Scope", + "Description": "An array of OAuth 2.0 scopes that are required when accessing an endpoint protected by this handler.\n If the token used in the Authorization header did not request that specific scope, the request is denied.", + "Examples": null, + "Name": "authenticators.oauth2_introspection.config.required_scope", + "Default": null, + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "authenticators.oauth2_introspection.config.required_scope.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Scope Strategy", + "Description": "Sets the strategy validation algorithm.", + "Examples": null, + "Name": "authenticators.oauth2_introspection.config.scope_strategy", + "Default": "none", + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": [ + "hierarchic", + "exact", + "wildcard", + "none" + ], + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Target Audience", + "Description": "An array of audiences that are required when accessing an endpoint protected by this handler.\n If the token used in the Authorization header is not intended for any of the requested audiences, the request is denied.", + "Examples": null, + "Name": "authenticators.oauth2_introspection.config.target_audience", + "Default": null, + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "authenticators.oauth2_introspection.config.target_audience.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Token From", + "Description": "The location of the token.\n If not configured, the token will be received from a default location - 'Authorization' header.\n One and only one location (header or query) must be specified.", + "Examples": null, + "Name": "authenticators.oauth2_introspection.config.token_from", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Header", + "Description": "The header (case insensitive) that must contain a token for request authentication.\n It can't be set along with query_parameter.", + "Examples": null, + "Name": "authenticators.oauth2_introspection.config.token_from.header", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Query Parameter", + "Description": "The query parameter (case sensitive) that must contain a token for request authentication.\n It can't be set along with header.", + "Examples": null, + "Name": "authenticators.oauth2_introspection.config.token_from.query_parameter", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Trusted Issuers", + "Description": "The token must have been issued by one of the issuers listed in this array.", + "Examples": null, + "Name": "authenticators.oauth2_introspection.config.trusted_issuers", + "Default": null, + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "authenticators.oauth2_introspection.config.trusted_issuers.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Enabled", + "Description": "En-/disables this component.", + "Examples": [ + true + ], + "Name": "authenticators.oauth2_introspection.enabled", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Unauthorized", + "Description": "The [`unauthorized` authenticator](https://www.ory.sh/docs/oathkeeper/pipeline/authn#unauthorized).", + "Examples": null, + "Name": "authenticators.unauthorized", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Enabled", + "Description": "En-/disables this component.", + "Examples": [ + true + ], + "Name": "authenticators.unauthorized.enabled", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Authorizers", + "Description": "For more information on authorizers head over to: https://www.ory.sh/docs/oathkeeper/pipeline/authz", + "Examples": null, + "Name": "authorizers", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Allow", + "Description": "The [`allow` authorizer](https://www.ory.sh/docs/oathkeeper/pipeline/authz#allow).", + "Examples": null, + "Name": "authorizers.allow", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Enabled", + "Description": "En-/disables this component.", + "Examples": [ + true + ], + "Name": "authorizers.allow.enabled", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Deny", + "Description": "The [`deny` authorizer](https://www.ory.sh/docs/oathkeeper/pipeline/authz#allow).", + "Examples": null, + "Name": "authorizers.deny", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Enabled", + "Description": "En-/disables this component.", + "Examples": [ + true + ], + "Name": "authorizers.deny.enabled", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "ORY Keto Access Control Policies Engine", + "Description": "The [`keto_engine_acp_ory` authorizer](https://www.ory.sh/docs/oathkeeper/pipeline/authz#keto_engine_acp_ory).", + "Examples": null, + "Name": "authorizers.keto_engine_acp_ory", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "ORY Keto Access Control Policy Authorizer Configuration", + "Description": "This section is optional when the authorizer is disabled.", + "Examples": null, + "Name": "authorizers.keto_engine_acp_ory.config", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Base URL", + "Description": "The base URL of ORY Keto.\n\n\u003eIf this authorizer is enabled, this value is required.", + "Examples": [ + "http://my-keto/" + ], + "Name": "authorizers.keto_engine_acp_ory.config.base_url", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "uri", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "authorizers.keto_engine_acp_ory.config.flavor", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "authorizers.keto_engine_acp_ory.config.required_action", + "Default": "unset", + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "authorizers.keto_engine_acp_ory.config.required_resource", + "Default": "unset", + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "authorizers.keto_engine_acp_ory.config.subject", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Enabled", + "Description": "En-/disables this component.", + "Examples": [ + true + ], + "Name": "authorizers.keto_engine_acp_ory.enabled", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Log", + "Description": "Configure logging using the following options. Logging will always be sent to stdout and stderr.", + "Examples": null, + "Name": "log", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Format", + "Description": "The log format can either be text or JSON.", + "Examples": null, + "Name": "log.format", + "Default": "text", + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": [ + "text", + "json" + ], + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Level", + "Description": "Debug enables stack traces on errors. Can also be set using environment variable LOG_LEVEL.", + "Examples": null, + "Name": "log.level", + "Default": "info", + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": [ + "panic", + "fatal", + "error", + "warn", + "info", + "debug" + ], + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Mutators", + "Description": "For more information on mutators head over to: https://www.ory.sh/docs/oathkeeper/pipeline/mutator", + "Examples": null, + "Name": "mutators", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "HTTP Cookie", + "Description": "The [`cookie` mutator](https://www.ory.sh/docs/oathkeeper/pipeline/mutator#cookie).", + "Examples": null, + "Name": "mutators.cookie", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Cookie Mutator Configuration", + "Description": "This section is optional when the mutator is disabled.", + "Examples": null, + "Name": "mutators.cookie.config", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "mutators.cookie.config.cookies", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Enabled", + "Description": "En-/disables this component.", + "Examples": [ + true + ], + "Name": "mutators.cookie.enabled", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "HTTP Header", + "Description": "The [`header` mutator](https://www.ory.sh/docs/oathkeeper/pipeline/mutator#header).", + "Examples": null, + "Name": "mutators.header", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Header Mutator Configuration", + "Description": "This section is optional when the mutator is disabled.", + "Examples": null, + "Name": "mutators.header.config", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "mutators.header.config.headers", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Enabled", + "Description": "En-/disables this component.", + "Examples": [ + true + ], + "Name": "mutators.header.enabled", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Hydrator", + "Description": "The [`hydrator` mutator](https://www.ory.sh/docs/oathkeeper/pipeline/mutator#hydrator).", + "Examples": null, + "Name": "mutators.hydrator", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Hydrator Mutator Configuration", + "Description": "This section is optional when the mutator is disabled.", + "Examples": null, + "Name": "mutators.hydrator.config", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "mutators.hydrator.config.api", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "mutators.hydrator.config.api.auth", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "mutators.hydrator.config.api.auth.basic", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "mutators.hydrator.config.api.auth.basic.password", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "mutators.hydrator.config.api.auth.basic.username", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "mutators.hydrator.config.api.retry", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "mutators.hydrator.config.api.retry.delay_in_milliseconds", + "Default": 3, + "Type": 0, + "TypeHint": 3, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": "0", + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "mutators.hydrator.config.api.retry.number_of_retries", + "Default": 100, + "Type": 0, + "TypeHint": 2, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": "0", + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "mutators.hydrator.config.api.url", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "uri", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Enabled", + "Description": "En-/disables this component.", + "Examples": [ + true + ], + "Name": "mutators.hydrator.enabled", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "ID Token (JSON Web Token)", + "Description": "The [`id_token` mutator](https://www.ory.sh/docs/oathkeeper/pipeline/mutator#id_token).", + "Examples": null, + "Name": "mutators.id_token", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "ID Token Mutator Configuration", + "Description": "This section is optional when the mutator is disabled.", + "Examples": null, + "Name": "mutators.id_token.config", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "mutators.id_token.config.claims", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Issuer URL", + "Description": "Sets the \"iss\" value of the ID Token.\n\n\u003eIf this mutator is enabled, this value is required.", + "Examples": null, + "Name": "mutators.id_token.config.issuer_url", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "JSON Web Key URL", + "Description": "Sets the URL where keys should be fetched from. Supports remote locations (http, https) as well as local filesystem paths.\n\n\u003eIf this mutator is enabled, this value is required.", + "Examples": [ + "https://fetch-keys/from/this/location.json", + "file:///from/this/absolute/location.json", + "file://../from/this/relative/location.json" + ], + "Name": "mutators.id_token.config.jwks_url", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "uri", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Expire After", + "Description": "Sets the time-to-live of the JSON Web Token.", + "Examples": [ + "1h", + "1m", + "30s" + ], + "Name": "mutators.id_token.config.ttl", + "Default": "1m", + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Enabled", + "Description": "En-/disables this component.", + "Examples": [ + true + ], + "Name": "mutators.id_token.enabled", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "No Operation (noop)", + "Description": "The [`noop` mutator](https://www.ory.sh/docs/oathkeeper/pipeline/mutator#noop).", + "Examples": null, + "Name": "mutators.noop", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Enabled", + "Description": "En-/disables this component.", + "Examples": [ + true + ], + "Name": "mutators.noop.enabled", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Profiling", + "Description": "Enables CPU or memory profiling if set. For more details on profiling Go programs read [Profiling Go Programs](https://blog.golang.org/profiling-go-programs).", + "Examples": null, + "Name": "profiling", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": [ + "cpu", + "mem" + ], + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "HTTP(s)", + "Description": "", + "Examples": null, + "Name": "serve", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "HTTP REST API", + "Description": "", + "Examples": null, + "Name": "serve.api", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Cross Origin Resource Sharing (CORS)", + "Description": "Configure [Cross Origin Resource Sharing (CORS)](http://www.w3.org/TR/cors/) using the following options.", + "Examples": null, + "Name": "serve.api.cors", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Allow HTTP Credentials", + "Description": "Indicates whether the request can include user credentials like cookies, HTTP authentication or client side SSL certificates.", + "Examples": null, + "Name": "serve.api.cors.allow_credentials", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Allowed Request HTTP Headers", + "Description": "A list of non simple headers the client is allowed to use with cross-domain requests.", + "Examples": null, + "Name": "serve.api.cors.allowed_headers", + "Default": [ + "Authorization", + "Content-Type" + ], + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": 1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "serve.api.cors.allowed_headers.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Allowed HTTP Methods", + "Description": "A list of methods the client is allowed to use with cross-domain requests.", + "Examples": null, + "Name": "serve.api.cors.allowed_methods", + "Default": [ + "GET", + "POST", + "PUT", + "PATCH", + "DELETE" + ], + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "serve.api.cors.allowed_methods.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": [ + "GET", + "HEAD", + "POST", + "PUT", + "DELETE", + "CONNECT", + "TRACE", + "PATCH" + ], + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Allowed Origins", + "Description": "A list of origins a cross-domain request can be executed from. If the special * value is present in the list, all origins will be allowed. An origin may contain a wildcard (*) to replace 0 or more characters (i.e.: http://*.domain.com). Usage of wildcards implies a small performance penality. Only one wildcard can be used per origin.", + "Examples": [ + "https://example.com", + "https://*.example.com", + "https://*.foo.example.com" + ], + "Name": "serve.api.cors.allowed_origins", + "Default": [ + "*" + ], + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "serve.api.cors.allowed_origins.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": 1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Enable Debugging", + "Description": "Set to true to debug server side CORS issues.", + "Examples": null, + "Name": "serve.api.cors.debug", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Enable CORS", + "Description": "If set to true, CORS will be enabled and preflight-requests (OPTION) will be answered.", + "Examples": null, + "Name": "serve.api.cors.enabled", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Allowed Response HTTP Headers", + "Description": "Indicates which headers are safe to expose to the API of a CORS API specification", + "Examples": null, + "Name": "serve.api.cors.exposed_headers", + "Default": [ + "Content-Type" + ], + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": 1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "serve.api.cors.exposed_headers.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Maximum Age", + "Description": "Indicates how long (in seconds) the results of a preflight request can be cached. The default is 0 which stands for no max age.", + "Examples": null, + "Name": "serve.api.cors.max_age", + "Default": 0, + "Type": 0, + "TypeHint": 2, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Host", + "Description": "The network interface to listen on.", + "Examples": [ + "localhost", + "127.0.0.1" + ], + "Name": "serve.api.host", + "Default": "", + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Port", + "Description": "The port to listen on.", + "Examples": null, + "Name": "serve.api.port", + "Default": 4456, + "Type": 0, + "TypeHint": 2, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "HTTPS", + "Description": "Configure HTTP over TLS (HTTPS). All options can also be set using environment variables by replacing dots (`.`) with underscores (`_`) and uppercasing the key. For example, `some.prefix.tls.key.path` becomes `export SOME_PREFIX_TLS_KEY_PATH`. If all keys are left undefined, TLS will be disabled.", + "Examples": null, + "Name": "serve.api.tls", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "serve.api.tls.cert", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Base64 Encoded Inline", + "Description": "The base64 string of the PEM-encoded file content. Can be generated using for example `base64 -i path/to/file.pem`.", + "Examples": [ + "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tXG5NSUlEWlRDQ0FrMmdBd0lCQWdJRVY1eE90REFOQmdr..." + ], + "Name": "serve.api.tls.cert.base64", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Path to PEM-encoded Fle", + "Description": "", + "Examples": [ + "path/to/file.pem" + ], + "Name": "serve.api.tls.cert.path", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "serve.api.tls.key", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Base64 Encoded Inline", + "Description": "The base64 string of the PEM-encoded file content. Can be generated using for example `base64 -i path/to/file.pem`.", + "Examples": [ + "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tXG5NSUlEWlRDQ0FrMmdBd0lCQWdJRVY1eE90REFOQmdr..." + ], + "Name": "serve.api.tls.key.base64", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Path to PEM-encoded Fle", + "Description": "", + "Examples": [ + "path/to/file.pem" + ], + "Name": "serve.api.tls.key.path", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "HTTP Reverse Proxy", + "Description": "", + "Examples": null, + "Name": "serve.proxy", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Cross Origin Resource Sharing (CORS)", + "Description": "Configure [Cross Origin Resource Sharing (CORS)](http://www.w3.org/TR/cors/) using the following options.", + "Examples": null, + "Name": "serve.proxy.cors", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Allow HTTP Credentials", + "Description": "Indicates whether the request can include user credentials like cookies, HTTP authentication or client side SSL certificates.", + "Examples": null, + "Name": "serve.proxy.cors.allow_credentials", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Allowed Request HTTP Headers", + "Description": "A list of non simple headers the client is allowed to use with cross-domain requests.", + "Examples": null, + "Name": "serve.proxy.cors.allowed_headers", + "Default": [ + "Authorization", + "Content-Type" + ], + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": 1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "serve.proxy.cors.allowed_headers.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Allowed HTTP Methods", + "Description": "A list of methods the client is allowed to use with cross-domain requests.", + "Examples": null, + "Name": "serve.proxy.cors.allowed_methods", + "Default": [ + "GET", + "POST", + "PUT", + "PATCH", + "DELETE" + ], + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "serve.proxy.cors.allowed_methods.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": [ + "GET", + "HEAD", + "POST", + "PUT", + "DELETE", + "CONNECT", + "TRACE", + "PATCH" + ], + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Allowed Origins", + "Description": "A list of origins a cross-domain request can be executed from. If the special * value is present in the list, all origins will be allowed. An origin may contain a wildcard (*) to replace 0 or more characters (i.e.: http://*.domain.com). Usage of wildcards implies a small performance penality. Only one wildcard can be used per origin.", + "Examples": [ + "https://example.com", + "https://*.example.com", + "https://*.foo.example.com" + ], + "Name": "serve.proxy.cors.allowed_origins", + "Default": [ + "*" + ], + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "serve.proxy.cors.allowed_origins.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": 1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Enable Debugging", + "Description": "Set to true to debug server side CORS issues.", + "Examples": null, + "Name": "serve.proxy.cors.debug", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Enable CORS", + "Description": "If set to true, CORS will be enabled and preflight-requests (OPTION) will be answered.", + "Examples": null, + "Name": "serve.proxy.cors.enabled", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Allowed Response HTTP Headers", + "Description": "Indicates which headers are safe to expose to the API of a CORS API specification", + "Examples": null, + "Name": "serve.proxy.cors.exposed_headers", + "Default": [ + "Content-Type" + ], + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": 1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "serve.proxy.cors.exposed_headers.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Maximum Age", + "Description": "Indicates how long (in seconds) the results of a preflight request can be cached. The default is 0 which stands for no max age.", + "Examples": null, + "Name": "serve.proxy.cors.max_age", + "Default": 0, + "Type": 0, + "TypeHint": 2, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Host", + "Description": "The network interface to listen on. Leave empty to listen on all interfaces.", + "Examples": [ + "localhost", + "127.0.0.1" + ], + "Name": "serve.proxy.host", + "Default": "", + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Port", + "Description": "The port to listen on.", + "Examples": null, + "Name": "serve.proxy.port", + "Default": 4455, + "Type": 0, + "TypeHint": 2, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "HTTP Timeouts", + "Description": "Control the reverse proxy's HTTP timeouts.", + "Examples": null, + "Name": "serve.proxy.timeout", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "HTTP Idle Timeout", + "Description": " The maximum amount of time to wait for any action of a request session, reading data or writing the response.", + "Examples": [ + "5s", + "5m", + "5h" + ], + "Name": "serve.proxy.timeout.idle", + "Default": "120s", + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "HTTP Read Timeout", + "Description": "The maximum duration for reading the entire request, including the body.", + "Examples": [ + "5s", + "5m", + "5h" + ], + "Name": "serve.proxy.timeout.read", + "Default": "5s", + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "HTTP Write Timeout", + "Description": "The maximum duration before timing out writes of the response. Increase this parameter to prevent unexpected closing a client connection if an upstream request is responding slowly.", + "Examples": [ + "5s", + "5m", + "5h" + ], + "Name": "serve.proxy.timeout.write", + "Default": "120s", + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "HTTPS", + "Description": "Configure HTTP over TLS (HTTPS). All options can also be set using environment variables by replacing dots (`.`) with underscores (`_`) and uppercasing the key. For example, `some.prefix.tls.key.path` becomes `export SOME_PREFIX_TLS_KEY_PATH`. If all keys are left undefined, TLS will be disabled.", + "Examples": null, + "Name": "serve.proxy.tls", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "serve.proxy.tls.cert", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Base64 Encoded Inline", + "Description": "The base64 string of the PEM-encoded file content. Can be generated using for example `base64 -i path/to/file.pem`.", + "Examples": [ + "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tXG5NSUlEWlRDQ0FrMmdBd0lCQWdJRVY1eE90REFOQmdr..." + ], + "Name": "serve.proxy.tls.cert.base64", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Path to PEM-encoded Fle", + "Description": "", + "Examples": [ + "path/to/file.pem" + ], + "Name": "serve.proxy.tls.cert.path", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "serve.proxy.tls.key", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Base64 Encoded Inline", + "Description": "The base64 string of the PEM-encoded file content. Can be generated using for example `base64 -i path/to/file.pem`.", + "Examples": [ + "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tXG5NSUlEWlRDQ0FrMmdBd0lCQWdJRVY1eE90REFOQmdr..." + ], + "Name": "serve.proxy.tls.key.base64", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Path to PEM-encoded Fle", + "Description": "", + "Examples": [ + "path/to/file.pem" + ], + "Name": "serve.proxy.tls.key.path", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + } +] diff --git a/oryx/jsonschemax/.snapshots/TestListPaths-case=1.json b/oryx/jsonschemax/.snapshots/TestListPaths-case=1.json new file mode 100644 index 00000000000..f1b57215b74 --- /dev/null +++ b/oryx/jsonschemax/.snapshots/TestListPaths-case=1.json @@ -0,0 +1,65 @@ +[ + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "providers", + "Default": null, + "Type": [], + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "providers.#", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "providers.#.id", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + } +] diff --git a/oryx/jsonschemax/.snapshots/TestListPaths-case=2.json b/oryx/jsonschemax/.snapshots/TestListPaths-case=2.json new file mode 100644 index 00000000000..cf25af42ec3 --- /dev/null +++ b/oryx/jsonschemax/.snapshots/TestListPaths-case=2.json @@ -0,0 +1,23 @@ +[ + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "dsn", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + } +] diff --git a/oryx/jsonschemax/.snapshots/TestListPaths-case=3.json b/oryx/jsonschemax/.snapshots/TestListPaths-case=3.json new file mode 100644 index 00000000000..88d75673ba9 --- /dev/null +++ b/oryx/jsonschemax/.snapshots/TestListPaths-case=3.json @@ -0,0 +1,305 @@ +[ + { + "Title": "OpenID Connect and OAuth2 Providers", + "Description": "A list and configuration of OAuth2 and OpenID Connect providers ORY Kratos should integrate with.", + "Examples": null, + "Name": "providers", + "Default": null, + "Type": [], + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "providers.#", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": [ + "https://accounts.google.com/o/oauth2/v2/auth" + ], + "Name": "providers.#.auth_url", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "uri", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "providers.#.client_id", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "providers.#.client_secret", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": [ + "google" + ], + "Name": "providers.#.id", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": [ + "https://accounts.google.com" + ], + "Name": "providers.#.issuer_url", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "uri", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Jsonnet Mapper URL", + "Description": "The URL where the jsonnet source is located for mapping the provider's data to ORY Kratos data.", + "Examples": [ + "file://path/to/oidc.jsonnet", + "https://foo.bar.com/path/to/oidc.jsonnet", + "base64://bG9jYWwgc3ViamVjdCA9I..." + ], + "Name": "providers.#.mapper_url", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "uri", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Provider", + "Description": "Can be one of github, gitlab, generic, google, microsoft, discord.", + "Examples": [ + "google" + ], + "Name": "providers.#.provider", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": [ + "github", + "gitlab", + "generic", + "google", + "microsoft", + "discord" + ], + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "providers.#.scope", + "Default": null, + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": [ + "offline_access", + "profile" + ], + "Name": "providers.#.scope.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "Azure AD Tenant", + "Description": "The Azure AD Tenant to use for authentication.", + "Examples": [ + "common", + "organizations", + "consumers", + "8eaef023-2b34-4da1-9baa-8bc8c9d6a490", + "contoso.onmicrosoft.com" + ], + "Name": "providers.#.tenant", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": [ + "https://www.googleapis.com/oauth2/v4/token" + ], + "Name": "providers.#.token_url", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "uri", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + } +] diff --git a/oryx/jsonschemax/.snapshots/TestListPaths-case=4.json b/oryx/jsonschemax/.snapshots/TestListPaths-case=4.json new file mode 100644 index 00000000000..6513d10c84a --- /dev/null +++ b/oryx/jsonschemax/.snapshots/TestListPaths-case=4.json @@ -0,0 +1,86 @@ +[ + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "bar", + "Default": "asdf", + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": true, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "foo", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "list", + "Default": null, + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "list.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + } +] diff --git a/oryx/jsonschemax/.snapshots/TestListPaths-case=5.json b/oryx/jsonschemax/.snapshots/TestListPaths-case=5.json new file mode 100644 index 00000000000..6513d10c84a --- /dev/null +++ b/oryx/jsonschemax/.snapshots/TestListPaths-case=5.json @@ -0,0 +1,86 @@ +[ + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "bar", + "Default": "asdf", + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": true, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "foo", + "Default": false, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "list", + "Default": null, + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "list.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + } +] diff --git a/oryx/jsonschemax/.snapshots/TestListPaths-case=6.json b/oryx/jsonschemax/.snapshots/TestListPaths-case=6.json new file mode 100644 index 00000000000..52c81f205a9 --- /dev/null +++ b/oryx/jsonschemax/.snapshots/TestListPaths-case=6.json @@ -0,0 +1,46 @@ +[ + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "bar", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": { + "foobar": "bar" + } + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "foo", + "Default": null, + "Type": false, + "TypeHint": 4, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + } +] diff --git a/oryx/jsonschemax/.snapshots/TestListPaths-case=7.json b/oryx/jsonschemax/.snapshots/TestListPaths-case=7.json new file mode 100644 index 00000000000..7deafa75002 --- /dev/null +++ b/oryx/jsonschemax/.snapshots/TestListPaths-case=7.json @@ -0,0 +1,44 @@ +[ + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "bar", + "Default": null, + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "bar.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + } +] diff --git a/oryx/jsonschemax/.snapshots/TestListPaths-case=8.json b/oryx/jsonschemax/.snapshots/TestListPaths-case=8.json new file mode 100644 index 00000000000..4dbf7fffebb --- /dev/null +++ b/oryx/jsonschemax/.snapshots/TestListPaths-case=8.json @@ -0,0 +1,65 @@ +[ + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "baz", + "Default": null, + "Type": [], + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "baz.#", + "Default": null, + "Type": [], + "TypeHint": 8, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "baz.#.#", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + } +] diff --git a/oryx/jsonschemax/.snapshots/TestListPaths-case=9.json b/oryx/jsonschemax/.snapshots/TestListPaths-case=9.json new file mode 100644 index 00000000000..78536e106a2 --- /dev/null +++ b/oryx/jsonschemax/.snapshots/TestListPaths-case=9.json @@ -0,0 +1,65 @@ +[ + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "baz", + "Default": null, + "Type": [], + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "baz.#", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "baz.#.foo", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + } +] diff --git a/oryx/jsonschemax/.snapshots/TestListPathsWithRecursion-case=0.json b/oryx/jsonschemax/.snapshots/TestListPathsWithRecursion-case=0.json new file mode 100644 index 00000000000..3e460cbec3f --- /dev/null +++ b/oryx/jsonschemax/.snapshots/TestListPathsWithRecursion-case=0.json @@ -0,0 +1,233 @@ +[ + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "bar", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "bar.foo", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "bar.foo.bar", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "bar.foo.bar.foo", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "bar.foo.bar.foo.bar", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "bar.foo.bar.foo.bar.foo", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "bar.foo.bar.foo.bar.foos", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": 1, + "MaxLength": 10, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "bar.foo.bar.foo.bars", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "email", + "Pattern": ".*", + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "bar.foo.bar.foos", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": 1, + "MaxLength": 10, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "bar.foo.bars", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "email", + "Pattern": ".*", + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": true, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "bar.foos", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": 1, + "MaxLength": 10, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + } +] diff --git a/oryx/jsonschemax/README.md b/oryx/jsonschemax/README.md new file mode 100644 index 00000000000..5306c69451c --- /dev/null +++ b/oryx/jsonschemax/README.md @@ -0,0 +1,120 @@ +# JSON Schema Helpers + +This package contains utilities for working with JSON Schemas. + +## Listing all Possible JSON Schema Paths + +Using `jsonschemax.ListPaths()` you can get a list of all possible JSON paths in +a JSON Schema. + +```go +package main + +import ( + "bytes" + "fmt" + "github.com/ory/jsonschema/v3" + "github.com/ory/x/jsonschemax" +) + +var schema = "..." + +func main() { + c := jsonschema.NewCompiler() + _ = c.AddResource("test.json", bytes.NewBufferString(schema)) + paths, _ := jsonschemax.ListPaths("test.json", c) + fmt.Printf("%+v", paths) +} +``` + +All keys are delimited using `.`. Please note that arrays are denoted with `#` +when `ListPathsWithArraysIncluded` is used. For example, the JSON Schema + +```json +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "providers": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "string" + } + } + } + } + } +} +``` + +Results in paths: + +```json +[ + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "providers", + "Default": null, + "Type": [], + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "providers.#", + "Default": null, + "Type": {}, + "TypeHint": 5, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + }, + { + "Title": "", + "Description": "", + "Examples": null, + "Name": "providers.#.id", + "Default": null, + "Type": "", + "TypeHint": 1, + "Format": "", + "Pattern": null, + "Enum": null, + "Constant": null, + "ReadOnly": false, + "MinLength": -1, + "MaxLength": -1, + "Required": false, + "Minimum": null, + "Maximum": null, + "MultipleOf": null, + "CustomProperties": null + } +] +``` diff --git a/oryx/jsonschemax/error.go b/oryx/jsonschemax/error.go new file mode 100644 index 00000000000..f8b04828781 --- /dev/null +++ b/oryx/jsonschemax/error.go @@ -0,0 +1,40 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jsonschemax + +import ( + "github.com/ory/jsonschema/v3" +) + +// ErrorType is the schema error type. +type ErrorType int + +const ( + // ErrorTypeMissing represents a validation that failed because a value is missing. + ErrorTypeMissing ErrorType = iota + 1 +) + +// Error represents a schema error. +type Error struct { + // Type is the error type. + Type ErrorType + + // DocumentPointer is the JSON Pointer in the document. + DocumentPointer string + + // SchemaPointer is the JSON Pointer in the schema. + SchemaPointer string + + // DocumentFieldName is a pointer to the document in dot-notation: fo.bar.baz + DocumentFieldName string +} + +// NewFromSanthoshError converts github.com/santhosh-tekuri/jsonschema.ValidationError to Error. +func NewFromSanthoshError(validationError jsonschema.ValidationError) *Error { + return &Error{ + // DocumentPointer: JSONPointerToDotNotation(validationError.InstancePtr), + // SchemaPointer: JSONPointerToDotNotation(validationError.SchemaPtr), + // DocumentFieldName: JSONPointerToDotNotation(validationError.InstancePtr), + } +} diff --git a/oryx/jsonschemax/keys.go b/oryx/jsonschemax/keys.go new file mode 100644 index 00000000000..ab9638c1396 --- /dev/null +++ b/oryx/jsonschemax/keys.go @@ -0,0 +1,447 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jsonschemax + +import ( + "bytes" + "context" + "crypto/sha256" + "encoding/json" + "fmt" + "math/big" + "regexp" + "slices" + "sort" + "strings" + + "github.com/pkg/errors" + + "github.com/ory/jsonschema/v3" +) + +type ( + byName []Path + PathEnhancer interface { + EnhancePath(Path) map[string]interface{} + } + TypeHint int +) + +func (s byName) Len() int { return len(s) } +func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] } +func (s byName) Less(i, j int) bool { return s[i].Name < s[j].Name } + +const ( + String TypeHint = iota + 1 + Float + Int + Bool + JSON + Nil + + BoolSlice + StringSlice + IntSlice + FloatSlice +) + +// Path represents a JSON Schema Path. +type Path struct { + // Title of the path. + Title string + + // Description of the path. + Description string + + // Examples of the path. + Examples []interface{} + + // Name is the JSON path name. + Name string + + // Default is the default value of that path. + Default interface{} + + // Type is a prototype (e.g. float64(0)) of the path type. + Type interface{} + + TypeHint + + // Format is the format of the path if defined + Format string + + // Pattern is the pattern of the path if defined + Pattern *regexp.Regexp + + // Enum are the allowed enum values + Enum []interface{} + + // first element in slice is constant value. note: slice is used to capture nil constant. + Constant []interface{} + + // ReadOnly is whether the value is readonly + ReadOnly bool + + // -1 if not specified + MinLength int + MaxLength int + + // Required if set indicates this field is required. + Required bool + + Minimum *big.Float + Maximum *big.Float + + MultipleOf *big.Float + + CustomProperties map[string]interface{} +} + +// ListPathsBytes works like ListPathsWithRecursion but prepares the JSON Schema itself. +func ListPathsBytes(ctx context.Context, raw json.RawMessage, maxRecursion int16) ([]Path, error) { + compiler := jsonschema.NewCompiler() + compiler.ExtractAnnotations = true + id := fmt.Sprintf("%x.json", sha256.Sum256(raw)) + if err := compiler.AddResource(id, bytes.NewReader(raw)); err != nil { + return nil, err + } + compiler.ExtractAnnotations = true + return runPathsFromCompiler(ctx, id, compiler, maxRecursion, false) +} + +// ListPathsWithRecursion will follow circular references until maxRecursion is reached, without +// returning an error. +func ListPathsWithRecursion(ctx context.Context, ref string, compiler *jsonschema.Compiler, maxRecursion uint8) ([]Path, error) { + return runPathsFromCompiler(ctx, ref, compiler, int16(maxRecursion), false) +} + +// ListPaths lists all paths of a JSON Schema. Will return an error +// if circular references are found. +func ListPaths(ctx context.Context, ref string, compiler *jsonschema.Compiler) ([]Path, error) { + return runPathsFromCompiler(ctx, ref, compiler, -1, false) +} + +// ListPathsWithArraysIncluded lists all paths of a JSON Schema. Will return an error +// if circular references are found. +// Includes arrays with `#`. +func ListPathsWithArraysIncluded(ctx context.Context, ref string, compiler *jsonschema.Compiler) ([]Path, error) { + return runPathsFromCompiler(ctx, ref, compiler, -1, true) +} + +// ListPathsWithInitializedSchema loads the paths from the schema without compiling it. +// +// You MUST ensure that the compiler was using `ExtractAnnotations = true`. +func ListPathsWithInitializedSchema(schema *jsonschema.Schema) ([]Path, error) { + return runPaths(schema, -1, false) +} + +// ListPathsWithInitializedSchemaAndArraysIncluded loads the paths from the schema without compiling it. +// +// You MUST ensure that the compiler was using `ExtractAnnotations = true`. +// Includes arrays with `#`. +func ListPathsWithInitializedSchemaAndArraysIncluded(schema *jsonschema.Schema) ([]Path, error) { + return runPaths(schema, -1, true) +} + +func runPathsFromCompiler(ctx context.Context, ref string, compiler *jsonschema.Compiler, maxRecursion int16, includeArrays bool) ([]Path, error) { + if compiler == nil { + compiler = jsonschema.NewCompiler() + } + + compiler.ExtractAnnotations = true + + schema, err := compiler.Compile(ctx, ref) + if err != nil { + return nil, errors.WithStack(err) + } + + return runPaths(schema, maxRecursion, includeArrays) +} + +func runPaths(schema *jsonschema.Schema, maxRecursion int16, includeArrays bool) ([]Path, error) { + pointers := map[string]bool{} + paths, err := listPaths(schema, nil, nil, pointers, 0, maxRecursion, includeArrays) + if err != nil { + return nil, errors.WithStack(err) + } + + sort.Stable(paths) + return makeUnique(paths) +} + +func makeUnique(in byName) (byName, error) { + cache := make(map[string]Path) + for _, p := range in { + vc, ok := cache[p.Name] + if !ok { + cache[p.Name] = p + continue + } + + if fmt.Sprintf("%T", p.Type) != fmt.Sprintf("%T", p.Type) { + return nil, errors.Errorf("multiple types %+v are not supported for path: %s", []interface{}{p.Type, vc.Type}, p.Name) + } + + if vc.Default == nil { + cache[p.Name] = p + } + } + + k := 0 + out := make([]Path, len(cache)) + for _, v := range cache { + out[k] = v + k++ + } + + paths := byName(out) + sort.Sort(paths) + return paths, nil +} + +func appendPointer(in map[string]bool, pointer *jsonschema.Schema) map[string]bool { + out := make(map[string]bool) + for k, v := range in { + out[k] = v + } + out[fmt.Sprintf("%p", pointer)] = true + return out +} + +func listPaths(schema *jsonschema.Schema, parent *jsonschema.Schema, parents []string, pointers map[string]bool, currentRecursion int16, maxRecursion int16, includeArrays bool) (byName, error) { + var pathType interface{} + var pathTypeHint TypeHint + var paths []Path + _, isCircular := pointers[fmt.Sprintf("%p", schema)] + + if len(schema.Constant) > 0 { + switch schema.Constant[0].(type) { + case float64, json.Number: + pathType = float64(0) + pathTypeHint = Float + case int8, int16, int, int64: + pathType = int64(0) + pathTypeHint = Int + case string: + pathType = "" + pathTypeHint = String + case bool: + pathType = false + pathTypeHint = Bool + default: + pathType = schema.Constant[0] + pathTypeHint = JSON + } + } else if len(schema.Types) == 1 { + switch schema.Types[0] { + case "null": + pathType = nil + pathTypeHint = Nil + case "boolean": + pathType = false + pathTypeHint = Bool + case "number": + pathType = float64(0) + pathTypeHint = Float + case "integer": + pathType = float64(0) + pathTypeHint = Int + case "string": + pathType = "" + pathTypeHint = String + case "array": + pathType = []interface{}{} + if schema.Items != nil { + var itemSchemas []*jsonschema.Schema + switch t := schema.Items.(type) { + case []*jsonschema.Schema: + itemSchemas = t + case *jsonschema.Schema: + itemSchemas = []*jsonschema.Schema{t} + } + var types []string + for _, is := range itemSchemas { + types = append(types, is.Types...) + if is.Ref != nil { + types = append(types, is.Ref.Types...) + } + } + types = slices.Compact(types) + if len(types) == 1 { + switch types[0] { + case "boolean": + pathType = []bool{} + pathTypeHint = BoolSlice + case "number": + pathType = []float64{} + pathTypeHint = FloatSlice + case "integer": + pathType = []float64{} + pathTypeHint = IntSlice + case "string": + pathType = []string{} + pathTypeHint = StringSlice + default: + pathType = []interface{}{} + pathTypeHint = JSON + } + } + } + case "object": + pathType = map[string]interface{}{} + pathTypeHint = JSON + } + } else if len(schema.Types) > 2 { + pathType = nil + pathTypeHint = JSON + } + + var def interface{} = schema.Default + if v, ok := def.(json.Number); ok { + def, _ = v.Float64() + } + + if (pathType != nil || schema.Default != nil) && len(parents) > 0 { + name := parents[len(parents)-1] + var required bool + if parent != nil { + for _, r := range parent.Required { + if r == name { + required = true + break + } + } + } + + path := Path{ + Name: strings.Join(parents, "."), + Default: def, + Type: pathType, + TypeHint: pathTypeHint, + Format: schema.Format, + Pattern: schema.Pattern, + Enum: schema.Enum, + Constant: schema.Constant, + MinLength: schema.MinLength, + MaxLength: schema.MaxLength, + Minimum: schema.Minimum, + Maximum: schema.Maximum, + MultipleOf: schema.MultipleOf, + ReadOnly: schema.ReadOnly, + Title: schema.Title, + Description: schema.Description, + Examples: schema.Examples, + Required: required, + } + + for _, e := range schema.Extensions { + if enhancer, ok := e.(PathEnhancer); ok { + path.CustomProperties = enhancer.EnhancePath(path) + } + } + paths = append(paths, path) + } + + if isCircular { + if maxRecursion == -1 { + return nil, errors.Errorf("detected circular dependency in schema path: %s", strings.Join(parents, ".")) + } else if currentRecursion > maxRecursion { + return paths, nil + } + currentRecursion++ + } + + if schema.Ref != nil { + path, err := listPaths(schema.Ref, schema, parents, appendPointer(pointers, schema), currentRecursion, maxRecursion, includeArrays) + if err != nil { + return nil, err + } + paths = append(paths, path...) + } + + if schema.Not != nil { + path, err := listPaths(schema.Not, schema, parents, appendPointer(pointers, schema), currentRecursion, maxRecursion, includeArrays) + if err != nil { + return nil, err + } + paths = append(paths, path...) + } + + if schema.If != nil { + path, err := listPaths(schema.If, schema, parents, appendPointer(pointers, schema), currentRecursion, maxRecursion, includeArrays) + if err != nil { + return nil, err + } + paths = append(paths, path...) + } + + if schema.Then != nil { + path, err := listPaths(schema.Then, schema, parents, appendPointer(pointers, schema), currentRecursion, maxRecursion, includeArrays) + if err != nil { + return nil, err + } + paths = append(paths, path...) + } + + if schema.Else != nil { + path, err := listPaths(schema.Else, schema, parents, appendPointer(pointers, schema), currentRecursion, maxRecursion, includeArrays) + if err != nil { + return nil, err + } + paths = append(paths, path...) + } + + for _, sub := range schema.AllOf { + path, err := listPaths(sub, schema, parents, appendPointer(pointers, schema), currentRecursion, maxRecursion, includeArrays) + if err != nil { + return nil, err + } + paths = append(paths, path...) + } + + for _, sub := range schema.AnyOf { + path, err := listPaths(sub, schema, parents, appendPointer(pointers, schema), currentRecursion, maxRecursion, includeArrays) + if err != nil { + return nil, err + } + paths = append(paths, path...) + } + + for _, sub := range schema.OneOf { + path, err := listPaths(sub, schema, parents, appendPointer(pointers, schema), currentRecursion, maxRecursion, includeArrays) + if err != nil { + return nil, err + } + paths = append(paths, path...) + } + + for name, sub := range schema.Properties { + path, err := listPaths(sub, schema, append(parents, name), appendPointer(pointers, schema), currentRecursion, maxRecursion, includeArrays) + if err != nil { + return nil, err + } + paths = append(paths, path...) + } + + if schema.Items != nil && includeArrays { + switch t := schema.Items.(type) { + case []*jsonschema.Schema: + for _, sub := range t { + path, err := listPaths(sub, schema, append(parents, "#"), appendPointer(pointers, schema), currentRecursion, maxRecursion, includeArrays) + if err != nil { + return nil, err + } + paths = append(paths, path...) + } + case *jsonschema.Schema: + path, err := listPaths(t, schema, append(parents, "#"), appendPointer(pointers, schema), currentRecursion, maxRecursion, includeArrays) + if err != nil { + return nil, err + } + paths = append(paths, path...) + } + } + + return paths, nil +} diff --git a/oryx/jsonschemax/pointer.go b/oryx/jsonschemax/pointer.go new file mode 100644 index 00000000000..f0c279fddc5 --- /dev/null +++ b/oryx/jsonschemax/pointer.go @@ -0,0 +1,31 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jsonschemax + +import ( + "net/url" + "strings" + + "github.com/pkg/errors" +) + +// JSONPointerToDotNotation converts JSON Pointer "#/foo/bar" to dot-notation "foo.bar". +func JSONPointerToDotNotation(pointer string) (string, error) { + if !strings.HasPrefix(pointer, "#/") { + return pointer, errors.Errorf("remote JSON pointers are not supported: %s", pointer) + } + + var path []string + for _, item := range strings.Split(strings.TrimPrefix(pointer, "#/"), "/") { + item = strings.Replace(item, "~1", "/", -1) + item = strings.Replace(item, "~0", "~", -1) + item, err := url.PathUnescape(item) + if err != nil { + return "", err + } + path = append(path, strings.ReplaceAll(item, ".", "\\.")) + } + + return strings.Join(path, "."), nil +} diff --git a/oryx/jsonschemax/print.go b/oryx/jsonschemax/print.go new file mode 100644 index 00000000000..45c1b91e452 --- /dev/null +++ b/oryx/jsonschemax/print.go @@ -0,0 +1,72 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jsonschemax + +import ( + "errors" + "fmt" + "io" + "strings" + + "github.com/tidwall/gjson" + + "github.com/ory/jsonschema/v3" +) + +func FormatValidationErrorForCLI(w io.Writer, conf []byte, err error) { + if err == nil { + return + } + + if e := new(jsonschema.ValidationError); errors.As(err, &e) { + _, _ = fmt.Fprintln(w, "The configuration contains values or keys which are invalid:") + pointer, validation := FormatError(e) + + if pointer == "#" { + if len(e.Causes) == 0 { + _, _ = fmt.Fprintln(w, "(root)") + _, _ = fmt.Fprintln(w, "^-- "+validation) + _, _ = fmt.Fprintln(w, "") + } + } else { + spaces := make([]string, len(pointer)+3) + _, _ = fmt.Fprintf(w, "%s: %+v", pointer, gjson.GetBytes(conf, pointer).Value()) + _, _ = fmt.Fprintln(w, "") + _, _ = fmt.Fprintf(w, "%s^-- %s", strings.Join(spaces, " "), validation) + _, _ = fmt.Fprintln(w, "") + _, _ = fmt.Fprintln(w, "") + } + + for _, cause := range e.Causes { + FormatValidationErrorForCLI(w, conf, cause) + } + return + } +} + +func FormatError(e *jsonschema.ValidationError) (string, string) { + var ( + err error + pointer string + message string + ) + + pointer = e.InstancePtr + message = e.Message + switch ctx := e.Context.(type) { + case *jsonschema.ValidationErrorContextRequired: + if len(ctx.Missing) > 0 { + message = "one or more required properties are missing" + pointer = ctx.Missing[0] + } + } + + // We can ignore the error as it will simply echo the pointer. + pointer, err = JSONPointerToDotNotation(pointer) + if err != nil { + pointer = e.InstancePtr + } + + return pointer, message +} diff --git a/oryx/jsonschemax/stub/.config.yaml b/oryx/jsonschemax/stub/.config.yaml new file mode 100644 index 00000000000..2367e5d1c3f --- /dev/null +++ b/oryx/jsonschemax/stub/.config.yaml @@ -0,0 +1,3 @@ +dsn: memory +items: + - id: 1 diff --git a/oryx/jsonschemax/stub/.oathkeeper.schema.json b/oryx/jsonschemax/stub/.oathkeeper.schema.json new file mode 100644 index 00000000000..7126e15bc83 --- /dev/null +++ b/oryx/jsonschemax/stub/.oathkeeper.schema.json @@ -0,0 +1,1073 @@ +{ + "$id": "https://raw.githubusercontent.com/ory/oathkeeper/v0.32.1-beta.1/.schemas/config.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "ORY Oathkeeper Configuration", + "type": "object", + "definitions": { + "tlsxSource": { + "type": "object", + "additionalProperties": false, + "properties": { + "path": { + "title": "Path to PEM-encoded Fle", + "type": "string", + "examples": ["path/to/file.pem"] + }, + "base64": { + "title": "Base64 Encoded Inline", + "description": "The base64 string of the PEM-encoded file content. Can be generated using for example `base64 -i path/to/file.pem`.", + "type": "string", + "examples": [ + "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tXG5NSUlEWlRDQ0FrMmdBd0lCQWdJRVY1eE90REFOQmdr..." + ] + } + } + }, + "tlsx": { + "title": "HTTPS", + "description": "Configure HTTP over TLS (HTTPS). All options can also be set using environment variables by replacing dots (`.`) with underscores (`_`) and uppercasing the key. For example, `some.prefix.tls.key.path` becomes `export SOME_PREFIX_TLS_KEY_PATH`. If all keys are left undefined, TLS will be disabled.", + "type": "object", + "additionalProperties": false, + "properties": { + "key": { + "title": "Private Key (PEM)", + "allOf": [ + { + "$ref": "#/definitions/tlsxSource" + } + ] + }, + "cert": { + "title": "TLS Certificate (PEM)", + "allOf": [ + { + "$ref": "#/definitions/tlsxSource" + } + ] + } + } + }, + "cors": { + "title": "Cross Origin Resource Sharing (CORS)", + "description": "Configure [Cross Origin Resource Sharing (CORS)](http://www.w3.org/TR/cors/) using the following options.", + "type": "object", + "properties": { + "enabled": { + "type": "boolean", + "default": false, + "title": "Enable CORS", + "description": "If set to true, CORS will be enabled and preflight-requests (OPTION) will be answered." + }, + "allowed_origins": { + "title": "Allowed Origins", + "description": "A list of origins a cross-domain request can be executed from. If the special * value is present in the list, all origins will be allowed. An origin may contain a wildcard (*) to replace 0 or more characters (i.e.: http://*.domain.com). Usage of wildcards implies a small performance penality. Only one wildcard can be used per origin.", + "type": "array", + "items": { + "type": "string", + "minLength": 1 + }, + "default": ["*"], + "uniqueItems": true, + "examples": [ + "https://example.com", + "https://*.example.com", + "https://*.foo.example.com" + ] + }, + "allowed_methods": { + "type": "array", + "title": "Allowed HTTP Methods", + "description": "A list of methods the client is allowed to use with cross-domain requests.", + "items": { + "type": "string", + "enum": [ + "GET", + "HEAD", + "POST", + "PUT", + "DELETE", + "CONNECT", + "TRACE", + "PATCH" + ] + }, + "uniqueItems": true, + "default": ["GET", "POST", "PUT", "PATCH", "DELETE"] + }, + "allowed_headers": { + "description": "A list of non simple headers the client is allowed to use with cross-domain requests.", + "title": "Allowed Request HTTP Headers", + "type": "array", + "items": { + "type": "string" + }, + "minLength": 1, + "uniqueItems": true, + "default": ["Authorization", "Content-Type"] + }, + "exposed_headers": { + "description": "Indicates which headers are safe to expose to the API of a CORS API specification", + "title": "Allowed Response HTTP Headers", + "type": "array", + "items": { + "type": "string" + }, + "minLength": 1, + "uniqueItems": true, + "default": ["Content-Type"] + }, + "allow_credentials": { + "type": "boolean", + "title": "Allow HTTP Credentials", + "default": false, + "description": "Indicates whether the request can include user credentials like cookies, HTTP authentication or client side SSL certificates." + }, + "max_age": { + "type": "number", + "default": 0, + "title": "Maximum Age", + "description": "Indicates how long (in seconds) the results of a preflight request can be cached. The default is 0 which stands for no max age." + }, + "debug": { + "type": "boolean", + "default": false, + "title": "Enable Debugging", + "description": "Set to true to debug server side CORS issues." + } + }, + "additionalProperties": false + }, + "handlerSwitch": { + "title": "Enabled", + "type": "boolean", + "default": false, + "examples": [true], + "description": "En-/disables this component." + }, + "scopeStrategy": { + "title": "Scope Strategy", + "type": "string", + "enum": ["hierarchic", "exact", "wildcard", "none"], + "default": "none", + "description": "Sets the strategy validation algorithm." + }, + "configAuthenticatorsAnonymous": { + "type": "object", + "title": "Anonymous Authenticator Configuration", + "description": "This section is optional when the authenticator is disabled.", + "properties": { + "subject": { + "type": "string", + "title": "Anonymous Subject", + "examples": ["guest", "anon", "anonymous", "unknown"], + "default": "anonymous", + "description": "Sets the anonymous username." + } + }, + "additionalProperties": false + }, + "configAuthenticatorsCookieSession": { + "type": "object", + "title": "Cookie Session Authenticator Configuration", + "description": "This section is optional when the authenticator is disabled.", + "properties": { + "check_session_url": { + "title": "Session Check URL", + "type": "string", + "format": "uri", + "description": "The origin to proxy requests to. If the response is a 200 with body `{ \"subject\": \"...\", \"extra\": {} }`. The request will pass the subject through successfully, otherwise it will be marked as unauthorized.\n\n>If this authenticator is enabled, this value is required.", + "examples": ["https://session-store-host"] + }, + "only": { + "type": "array", + "items": { + "type": "string", + "additionalItems": false + }, + "title": "Only Cookies", + "description": "A list of possible cookies to look for on incoming requests, and will fallthrough to the next authenticator if none of the passed cookies are set on the request." + } + }, + "required": ["check_session_url"], + "additionalProperties": false + }, + "configAuthenticatorsJwt": { + "type": "object", + "title": "JWT Authenticator Configuration", + "description": "This section is optional when the authenticator is disabled.", + "required": ["jwks_urls"], + "properties": { + "required_scope": { + "type": "array", + "title": "Required Token Scope", + "description": "An array of OAuth 2.0 scopes that are required when accessing an endpoint protected by this handler.\n If the token used in the Authorization header did not request that specific scope, the request is denied.", + "items": { + "type": "string" + } + }, + "target_audience": { + "title": "Intended Audience", + "type": "array", + "description": "An array of audiences that are required when accessing an endpoint protected by this handler.\n If the token used in the Authorization header is not intended for any of the requested audiences, the request is denied.", + "items": { + "type": "string" + } + }, + "trusted_issuers": { + "type": "array", + "items": { + "type": "string" + } + }, + "allowed_algorithms": { + "type": "array", + "items": { + "type": "string" + } + }, + "jwks_urls": { + "title": "JSON Web Key URLs", + "type": "array", + "items": { + "type": "string", + "format": "uri" + }, + "description": "URLs where ORY Oathkeeper can retrieve JSON Web Keys from for validating the JSON Web Token. Usually something like \"https://my-keys.com/.well-known/jwks.json\". The response of that endpoint must return a JSON Web Key Set (JWKS).\n\n>If this authenticator is enabled, this value is required.", + "examples": [ + "https://my-website.com/.well-known/jwks.json", + "https://my-other-website.com/.well-known/jwks.json", + "file://path/to/local/jwks.json" + ] + }, + "scope_strategy": { + "$ref": "#/definitions/scopeStrategy" + }, + "token_from": { + "title": "Token From", + "description": "The location of the token.\n If not configured, the token will be received from a default location - 'Authorization' header.\n One and only one location (header or query) must be specified.", + "oneOf": [ + { + "type": "object", + "required": ["header"], + "properties": { + "header": { + "title": "Header", + "type": "string", + "description": "The header (case insensitive) that must contain a token for request authentication. It can't be set along with query_parameter." + } + } + }, + { + "type": "object", + "required": ["query_parameter"], + "properties": { + "query_parameter": { + "title": "Query Parameter", + "type": "string", + "description": "The query parameter (case sensitive) that must contain a token for request authentication. It can't be set along with header." + } + } + } + ] + } + }, + "additionalProperties": false + }, + "configAuthenticatorsOauth2ClientCredentials": { + "type": "object", + "title": "OAuth 2.0 Client Credentials Authenticator Configuration", + "description": "This section is optional when the authenticator is disabled.", + "properties": { + "token_url": { + "type": "string", + "description": "The OAuth 2.0 Token Endpoint that will be used to validate the client credentials.\n\n>If this authenticator is enabled, this value is required.", + "format": "uri", + "examples": ["https://my-website.com/oauth2/token"] + }, + "required_scope": { + "type": "array", + "title": "Request Permissions (Token Scope)", + "description": "Scopes is an array of OAuth 2.0 scopes that are required when accessing an endpoint protected by this rule.\n If the token used in the Authorization header did not request that specific scope, the request is denied.", + "items": { + "type": "string" + } + } + }, + "required": ["token_url"], + "additionalProperties": false + }, + "configAuthenticatorsOauth2Introspection": { + "type": "object", + "title": "OAuth 2.0 Introspection Authenticator Configuration", + "description": "This section is optional when the authenticator is disabled.", + "properties": { + "introspection_url": { + "type": "string", + "format": "uri", + "examples": ["https://my-website.com/oauth2/introspection"], + "title": "OAuth 2.0 Introspection URL", + "description": "The OAuth 2.0 Token Introspection endpoint URL.\n\n>If this authenticator is enabled, this value is required." + }, + "scope_strategy": { + "$ref": "#/definitions/scopeStrategy" + }, + "pre_authorization": { + "title": "Pre-Authorization", + "description": "Enable pre-authorization in cases where the OAuth 2.0 Token Introspection endpoint is protected by OAuth 2.0 Bearer Tokens that can be retrieved using the OAuth 2.0 Client Credentials grant.", + "type": "object", + "additionalProperties": false, + "required": ["client_id", "client_secret", "token_url"], + "properties": { + "enabled": { + "const": true + }, + "client_id": { + "type": "string", + "title": "OAuth 2.0 Client ID", + "description": "The OAuth 2.0 Client ID to be used for the OAuth 2.0 Client Credentials Grant.\n\n>If pre-authorization is enabled, this value is required." + }, + "client_secret": { + "type": "string", + "title": "OAuth 2.0 Client Secret", + "description": "The OAuth 2.0 Client Secret to be used for the OAuth 2.0 Client Credentials Grant.\n\n>If pre-authorization is enabled, this value is required." + }, + "token_url": { + "type": "string", + "format": "uri", + "title": "OAuth 2.0 Token URL", + "description": "The OAuth 2.0 Token Endpoint where the OAuth 2.0 Client Credentials Grant will be performed.\n\n>If pre-authorization is enabled, this value is required." + }, + "scope": { + "type": "array", + "items": { + "type": "string" + }, + "title": "OAuth 2.0 Scope", + "description": "The OAuth 2.0 Scope to be requested during the OAuth 2.0 Client Credentials Grant.", + "examples": [["[\"foo\", \"bar\"]"]] + } + } + }, + "required_scope": { + "title": "Required Scope", + "description": "An array of OAuth 2.0 scopes that are required when accessing an endpoint protected by this handler.\n If the token used in the Authorization header did not request that specific scope, the request is denied.", + "type": "array", + "items": { + "type": "string" + } + }, + "target_audience": { + "title": "Target Audience", + "description": "An array of audiences that are required when accessing an endpoint protected by this handler.\n If the token used in the Authorization header is not intended for any of the requested audiences, the request is denied.", + "type": "array", + "items": { + "type": "string" + } + }, + "trusted_issuers": { + "title": "Trusted Issuers", + "description": "The token must have been issued by one of the issuers listed in this array.", + "type": "array", + "items": { + "type": "string" + } + }, + "token_from": { + "title": "Token From", + "description": "The location of the token.\n If not configured, the token will be received from a default location - 'Authorization' header.\n One and only one location (header or query) must be specified.", + "type": "object", + "oneOf": [ + { + "required": ["header"], + "properties": { + "header": { + "title": "Header", + "type": "string", + "description": "The header (case insensitive) that must contain a token for request authentication.\n It can't be set along with query_parameter." + } + } + }, + { + "required": ["query_parameter"], + "properties": { + "query_parameter": { + "title": "Query Parameter", + "type": "string", + "description": "The query parameter (case sensitive) that must contain a token for request authentication.\n It can't be set along with header." + } + } + } + ] + } + }, + "required": ["introspection_url"], + "additionalProperties": false + }, + "configAuthorizersKetoEngineAcpOry": { + "type": "object", + "title": "ORY Keto Access Control Policy Authorizer Configuration", + "description": "This section is optional when the authorizer is disabled.", + "properties": { + "base_url": { + "title": "Base URL", + "type": "string", + "format": "uri", + "description": "The base URL of ORY Keto.\n\n>If this authorizer is enabled, this value is required.", + "examples": ["http://my-keto/"] + }, + "required_action": { + "type": "string", + "default": "unset" + }, + "required_resource": { + "type": "string", + "default": "unset" + }, + "subject": { + "type": "string" + }, + "flavor": { + "type": "string" + } + }, + "required": ["base_url", "required_action", "required_resource"], + "additionalProperties": false + }, + "configMutatorsCookie": { + "type": "object", + "title": "Cookie Mutator Configuration", + "description": "This section is optional when the mutator is disabled.", + "required": ["cookies"], + "properties": { + "cookies": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "configMutatorsHeader": { + "type": "object", + "title": "Header Mutator Configuration", + "description": "This section is optional when the mutator is disabled.", + "required": ["headers"], + "properties": { + "headers": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "configMutatorsHydrator": { + "type": "object", + "title": "Hydrator Mutator Configuration", + "description": "This section is optional when the mutator is disabled.", + "properties": { + "api": { + "additionalProperties": false, + "required": ["url"], + "type": "object", + "properties": { + "url": { + "type": "string", + "format": "uri" + }, + "auth": { + "type": "object", + "additionalProperties": false, + "properties": { + "basic": { + "required": ["username", "password"], + "type": "object", + "additionalProperties": false, + "properties": { + "username": { + "type": "string" + }, + "password": { + "type": "string" + } + } + } + } + }, + "retry": { + "type": "object", + "additionalProperties": false, + "properties": { + "number_of_retries": { + "type": "number", + "minimum": 0, + "default": 100 + }, + "delay_in_milliseconds": { + "type": "integer", + "minimum": 0, + "default": 3 + } + } + } + } + } + }, + "required": ["api"], + "additionalProperties": false + }, + "configMutatorsIdToken": { + "type": "object", + "title": "ID Token Mutator Configuration", + "description": "This section is optional when the mutator is disabled.", + "required": ["jwks_url", "issuer_url"], + "properties": { + "claims": { + "type": "string" + }, + "issuer_url": { + "type": "string", + "title": "Issuer URL", + "description": "Sets the \"iss\" value of the ID Token.\n\n>If this mutator is enabled, this value is required." + }, + "jwks_url": { + "type": "string", + "format": "uri", + "title": "JSON Web Key URL", + "description": "Sets the URL where keys should be fetched from. Supports remote locations (http, https) as well as local filesystem paths.\n\n>If this mutator is enabled, this value is required.", + "examples": [ + "https://fetch-keys/from/this/location.json", + "file:///from/this/absolute/location.json", + "file://../from/this/relative/location.json" + ] + }, + "ttl": { + "type": "string", + "title": "Expire After", + "description": "Sets the time-to-live of the JSON Web Token.", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "default": "1m", + "examples": ["1h", "1m", "30s"] + } + }, + "additionalProperties": false + } + }, + "properties": { + "serve": { + "title": "HTTP(s)", + "additionalProperties": false, + "type": "object", + "properties": { + "api": { + "type": "object", + "title": "HTTP REST API", + "additionalProperties": false, + "properties": { + "port": { + "type": "number", + "default": 4456, + "title": "Port", + "description": "The port to listen on." + }, + "host": { + "type": "string", + "default": "", + "examples": ["localhost", "127.0.0.1"], + "title": "Host", + "description": "The network interface to listen on." + }, + "cors": { + "$ref": "#/definitions/cors" + }, + "tls": { + "$ref": "#/definitions/tlsx" + } + } + }, + "proxy": { + "type": "object", + "title": "HTTP Reverse Proxy", + "additionalProperties": false, + "properties": { + "port": { + "type": "number", + "default": 4455, + "title": "Port", + "description": "The port to listen on." + }, + "host": { + "type": "string", + "default": "", + "examples": ["localhost", "127.0.0.1"], + "title": "Host", + "description": "The network interface to listen on. Leave empty to listen on all interfaces." + }, + "timeout": { + "title": "HTTP Timeouts", + "description": "Control the reverse proxy's HTTP timeouts.", + "type": "object", + "additionalProperties": false, + "properties": { + "read": { + "title": "HTTP Read Timeout", + "type": "string", + "default": "5s", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "description": "The maximum duration for reading the entire request, including the body.", + "examples": ["5s", "5m", "5h"] + }, + "write": { + "title": "HTTP Write Timeout", + "type": "string", + "default": "120s", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "description": "The maximum duration before timing out writes of the response. Increase this parameter to prevent unexpected closing a client connection if an upstream request is responding slowly.", + "examples": ["5s", "5m", "5h"] + }, + "idle": { + "title": "HTTP Idle Timeout", + "type": "string", + "default": "120s", + "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", + "description": " The maximum amount of time to wait for any action of a request session, reading data or writing the response.", + "examples": ["5s", "5m", "5h"] + } + } + }, + "cors": { + "$ref": "#/definitions/cors" + }, + "tls": { + "$ref": "#/definitions/tlsx" + } + } + } + } + }, + "access_rules": { + "title": "Access Rules", + "description": "Configure access rules. All sub-keys support configuration reloading without restarting.", + "type": "object", + "additionalProperties": false, + "properties": { + "repositories": { + "title": "Repositories", + "description": "Locations (list of URLs) where access rules should be fetched from on boot. It is expected that the documents at those locations return a JSON or YAML Array containing ORY Oathkeeper Access Rules:\n\n- If the URL Scheme is `file://`, the access rules (an array of access rules is expected) will be fetched from the local file system.\n- If the URL Scheme is `inline://`, the access rules (an array of access rules is expected) are expected to be a base64 encoded (with padding!) JSON/YAML string (base64_encode(`[{\"id\":\"foo-rule\",\"authenticators\":[....]}]`)).\n- If the URL Scheme is `http://` or `https://`, the access rules (an array of access rules is expected) will be fetched from the provided HTTP(s) location.", + "type": "array", + "items": { + "type": "string", + "format": "uri" + }, + "examples": [ + "[\"file://path/to/rules.json\",\"inline://W3siaWQiOiJmb28tcnVsZSIsImF1dGhlbnRpY2F0b3JzIjpbXX1d\",\"https://path-to-my-rules/rules.json\"]" + ] + } + } + }, + "authenticators": { + "title": "Authenticators", + "type": "object", + "description": "For more information on authenticators head over to: https://www.ory.sh/docs/oathkeeper/pipeline/authn", + "additionalProperties": false, + "properties": { + "anonymous": { + "title": "Anonymous", + "description": "The [`anonymous` authenticator](https://www.ory.sh/docs/oathkeeper/pipeline/authn#anonymous).", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "$ref": "#/definitions/handlerSwitch" + }, + "config": { + "$ref": "#/definitions/configAuthenticatorsAnonymous" + } + } + }, + "noop": { + "title": "No Operation (noop)", + "description": "The [`noop` authenticator](https://www.ory.sh/docs/oathkeeper/pipeline/authn#noop).", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "$ref": "#/definitions/handlerSwitch" + } + } + }, + "unauthorized": { + "title": "Unauthorized", + "description": "The [`unauthorized` authenticator](https://www.ory.sh/docs/oathkeeper/pipeline/authn#unauthorized).", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "$ref": "#/definitions/handlerSwitch" + } + } + }, + "cookie_session": { + "title": "Cookie Session", + "description": "The [`cookie_session` authenticator](https://www.ory.sh/docs/oathkeeper/pipeline/authn#cookie_session).", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "$ref": "#/definitions/handlerSwitch" + }, + "config": { + "$ref": "#/definitions/configAuthenticatorsCookieSession" + } + }, + "oneOf": [ + { + "properties": { + "enabled": { + "const": true + } + }, + "required": ["config"] + }, + { + "properties": { + "enabled": { + "const": false + } + } + } + ] + }, + "jwt": { + "title": "JSON Web Token (jwt)", + "description": "The [`jwt` authenticator](https://www.ory.sh/docs/oathkeeper/pipeline/authn#jwt).", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "$ref": "#/definitions/handlerSwitch" + }, + "config": { + "$ref": "#/definitions/configAuthenticatorsJwt" + } + }, + "oneOf": [ + { + "properties": { + "enabled": { + "const": true + } + }, + "required": ["config"] + }, + { + "properties": { + "enabled": { + "const": false + } + } + } + ] + }, + "oauth2_client_credentials": { + "title": "OAuth 2.0 Client Credentials", + "description": "The [`oauth2_client_credentials` authenticator](https://www.ory.sh/docs/oathkeeper/pipeline/authn#oauth2_client_credentials).", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "$ref": "#/definitions/handlerSwitch" + }, + "config": { + "$ref": "#/definitions/configAuthenticatorsOauth2ClientCredentials" + } + }, + "oneOf": [ + { + "properties": { + "enabled": { + "const": true + } + }, + "required": ["config"] + }, + { + "properties": { + "enabled": { + "const": false + } + } + } + ] + }, + "oauth2_introspection": { + "title": "OAuth 2.0 Token Introspection", + "description": "The [`oauth2_introspection` authenticator](https://www.ory.sh/docs/oathkeeper/pipeline/authn#oauth2_introspection).", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "$ref": "#/definitions/handlerSwitch" + }, + "config": { + "$ref": "#/definitions/configAuthenticatorsOauth2Introspection" + } + }, + "oneOf": [ + { + "properties": { + "enabled": { + "const": true + } + }, + "required": ["config"] + }, + { + "properties": { + "enabled": { + "const": false + } + } + } + ] + } + } + }, + "authorizers": { + "title": "Authorizers", + "type": "object", + "description": "For more information on authorizers head over to: https://www.ory.sh/docs/oathkeeper/pipeline/authz", + "additionalProperties": false, + "properties": { + "allow": { + "title": "Allow", + "description": "The [`allow` authorizer](https://www.ory.sh/docs/oathkeeper/pipeline/authz#allow).", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "$ref": "#/definitions/handlerSwitch" + } + } + }, + "deny": { + "title": "Deny", + "description": "The [`deny` authorizer](https://www.ory.sh/docs/oathkeeper/pipeline/authz#allow).", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "$ref": "#/definitions/handlerSwitch" + } + } + }, + "keto_engine_acp_ory": { + "title": "ORY Keto Access Control Policies Engine", + "description": "The [`keto_engine_acp_ory` authorizer](https://www.ory.sh/docs/oathkeeper/pipeline/authz#keto_engine_acp_ory).", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "$ref": "#/definitions/handlerSwitch" + }, + "config": { + "$ref": "#/definitions/configAuthorizersKetoEngineAcpOry" + } + }, + "oneOf": [ + { + "properties": { + "enabled": { + "const": true + } + }, + "required": ["config"] + }, + { + "properties": { + "enabled": { + "const": false + } + } + } + ] + } + } + }, + "mutators": { + "title": "Mutators", + "type": "object", + "description": "For more information on mutators head over to: https://www.ory.sh/docs/oathkeeper/pipeline/mutator", + "additionalProperties": false, + "properties": { + "noop": { + "title": "No Operation (noop)", + "description": "The [`noop` mutator](https://www.ory.sh/docs/oathkeeper/pipeline/mutator#noop).", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "$ref": "#/definitions/handlerSwitch" + } + } + }, + "cookie": { + "title": "HTTP Cookie", + "description": "The [`cookie` mutator](https://www.ory.sh/docs/oathkeeper/pipeline/mutator#cookie).", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "$ref": "#/definitions/handlerSwitch" + }, + "config": { + "$ref": "#/definitions/configMutatorsCookie" + } + }, + "oneOf": [ + { + "properties": { + "enabled": { + "const": true + } + }, + "required": ["config"] + }, + { + "properties": { + "enabled": { + "const": false + } + } + } + ] + }, + "header": { + "title": "HTTP Header", + "description": "The [`header` mutator](https://www.ory.sh/docs/oathkeeper/pipeline/mutator#header).", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "$ref": "#/definitions/handlerSwitch" + }, + "config": { + "$ref": "#/definitions/configMutatorsHeader" + } + }, + "oneOf": [ + { + "properties": { + "enabled": { + "const": true + } + }, + "required": ["config"] + }, + { + "properties": { + "enabled": { + "const": false + } + } + } + ] + }, + "hydrator": { + "title": "Hydrator", + "description": "The [`hydrator` mutator](https://www.ory.sh/docs/oathkeeper/pipeline/mutator#hydrator).", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "$ref": "#/definitions/handlerSwitch" + }, + "config": { + "$ref": "#/definitions/configMutatorsHydrator" + } + }, + "oneOf": [ + { + "properties": { + "enabled": { + "const": true + } + }, + "required": ["config"] + }, + { + "properties": { + "enabled": { + "const": false + } + } + } + ] + }, + "id_token": { + "title": "ID Token (JSON Web Token)", + "description": "The [`id_token` mutator](https://www.ory.sh/docs/oathkeeper/pipeline/mutator#id_token).", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "$ref": "#/definitions/handlerSwitch" + }, + "config": { + "$ref": "#/definitions/configMutatorsIdToken" + } + }, + "oneOf": [ + { + "properties": { + "enabled": { + "const": true + } + }, + "required": ["config"] + }, + { + "properties": { + "enabled": { + "const": false + } + } + } + ] + } + } + }, + "log": { + "title": "Log", + "description": "Configure logging using the following options. Logging will always be sent to stdout and stderr.", + "type": "object", + "properties": { + "level": { + "type": "string", + "default": "info", + "enum": ["panic", "fatal", "error", "warn", "info", "debug"], + "title": "Level", + "description": "Debug enables stack traces on errors. Can also be set using environment variable LOG_LEVEL." + }, + "format": { + "type": "string", + "default": "text", + "enum": ["text", "json"], + "title": "Format", + "description": "The log format can either be text or JSON." + } + }, + "additionalProperties": false + }, + "profiling": { + "title": "Profiling", + "description": "Enables CPU or memory profiling if set. For more details on profiling Go programs read [Profiling Go Programs](https://blog.golang.org/profiling-go-programs).", + "type": "string", + "enum": ["cpu", "mem"] + } + }, + "required": [], + "additionalProperties": false +} diff --git a/oryx/jsonschemax/stub/config.schema.json b/oryx/jsonschemax/stub/config.schema.json new file mode 100644 index 00000000000..537e6ac034a --- /dev/null +++ b/oryx/jsonschemax/stub/config.schema.json @@ -0,0 +1,12 @@ +{ + "$id": "https://example.com/config.schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "config", + "type": "object", + "properties": { + "dsn": { + "type": "string" + } + }, + "required": ["dsn"] +} diff --git a/oryx/jsonschemax/stub/json/.project-stub-name.json b/oryx/jsonschemax/stub/json/.project-stub-name.json new file mode 100644 index 00000000000..798f6ebc0ff --- /dev/null +++ b/oryx/jsonschemax/stub/json/.project-stub-name.json @@ -0,0 +1,7 @@ +{ + "serve": { + "admin": { + "port": 1 + } + } +} diff --git a/oryx/jsonschemax/stub/nested-array.schema.json b/oryx/jsonschemax/stub/nested-array.schema.json new file mode 100644 index 00000000000..b70c935517f --- /dev/null +++ b/oryx/jsonschemax/stub/nested-array.schema.json @@ -0,0 +1,105 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "providers": { + "title": "OpenID Connect and OAuth2 Providers", + "description": "A list and configuration of OAuth2 and OpenID Connect providers ORY Kratos should integrate with.", + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "string", + "examples": ["google"] + }, + "provider": { + "title": "Provider", + "description": "Can be one of github, gitlab, generic, google, microsoft, discord.", + "type": "string", + "enum": [ + "github", + "gitlab", + "generic", + "google", + "microsoft", + "discord" + ], + "examples": ["google"] + }, + "client_id": { + "type": "string" + }, + "client_secret": { + "type": "string" + }, + "issuer_url": { + "type": "string", + "format": "uri", + "examples": ["https://accounts.google.com"] + }, + "auth_url": { + "type": "string", + "format": "uri", + "examples": ["https://accounts.google.com/o/oauth2/v2/auth"] + }, + "token_url": { + "type": "string", + "format": "uri", + "examples": ["https://www.googleapis.com/oauth2/v4/token"] + }, + "mapper_url": { + "title": "Jsonnet Mapper URL", + "description": "The URL where the jsonnet source is located for mapping the provider's data to ORY Kratos data.", + "type": "string", + "format": "uri", + "examples": [ + "file://path/to/oidc.jsonnet", + "https://foo.bar.com/path/to/oidc.jsonnet", + "base64://bG9jYWwgc3ViamVjdCA9I..." + ] + }, + "scope": { + "type": "array", + "items": { + "type": "string", + "examples": ["offline_access", "profile"] + } + }, + "tenant": { + "title": "Azure AD Tenant", + "description": "The Azure AD Tenant to use for authentication.", + "type": "string", + "examples": [ + "common", + "organizations", + "consumers", + "8eaef023-2b34-4da1-9baa-8bc8c9d6a490", + "contoso.onmicrosoft.com" + ] + } + }, + "additionalProperties": false, + "required": [], + "if": { + "properties": { + "provider": { + "const": "microsoft" + } + }, + "required": ["provider"] + }, + "then": { + "required": ["tenant"] + }, + "else": { + "not": { + "properties": { + "tenant": {} + }, + "required": ["tenant"] + } + } + } + } + } +} diff --git a/oryx/jsonschemax/stub/nested-simple-array.schema.json b/oryx/jsonschemax/stub/nested-simple-array.schema.json new file mode 100644 index 00000000000..7bfe7a8f089 --- /dev/null +++ b/oryx/jsonschemax/stub/nested-simple-array.schema.json @@ -0,0 +1,16 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "providers": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "string" + } + } + } + } + } +} diff --git a/oryx/jsonschemax/stub/toml/.project-stub-name.toml b/oryx/jsonschemax/stub/toml/.project-stub-name.toml new file mode 100644 index 00000000000..5db36ed5872 --- /dev/null +++ b/oryx/jsonschemax/stub/toml/.project-stub-name.toml @@ -0,0 +1,4 @@ +[serve] + + [serve.admin] + port = "2" \ No newline at end of file diff --git a/oryx/jsonschemax/stub/yaml/.project-stub-name.yaml b/oryx/jsonschemax/stub/yaml/.project-stub-name.yaml new file mode 100644 index 00000000000..f41a4a0c8d3 --- /dev/null +++ b/oryx/jsonschemax/stub/yaml/.project-stub-name.yaml @@ -0,0 +1,4 @@ +# serve controls the configuration for the http(s) daemon +serve: + admin: + port: 3 diff --git a/oryx/jsonschemax/stub/yml/.project-stub-name.yml b/oryx/jsonschemax/stub/yml/.project-stub-name.yml new file mode 100644 index 00000000000..ccd7c3b0d94 --- /dev/null +++ b/oryx/jsonschemax/stub/yml/.project-stub-name.yml @@ -0,0 +1,3 @@ +serve: + admin: + port: 4 diff --git a/oryx/jsonx/.snapshots/TestEmbedSources-fixtures-fixture=1.json.json b/oryx/jsonx/.snapshots/TestEmbedSources-fixtures-fixture=1.json.json new file mode 100644 index 00000000000..810c96eeeb7 --- /dev/null +++ b/oryx/jsonx/.snapshots/TestEmbedSources-fixtures-fixture=1.json.json @@ -0,0 +1 @@ +"foo" diff --git a/oryx/jsonx/.snapshots/TestEmbedSources-fixtures-fixture=2.json.json b/oryx/jsonx/.snapshots/TestEmbedSources-fixtures-fixture=2.json.json new file mode 100644 index 00000000000..fab1a3b622b --- /dev/null +++ b/oryx/jsonx/.snapshots/TestEmbedSources-fixtures-fixture=2.json.json @@ -0,0 +1,3 @@ +{ + "some": "key" +} diff --git a/oryx/jsonx/.snapshots/TestEmbedSources-fixtures-fixture=3.json.json b/oryx/jsonx/.snapshots/TestEmbedSources-fixtures-fixture=3.json.json new file mode 100644 index 00000000000..7306c235b04 --- /dev/null +++ b/oryx/jsonx/.snapshots/TestEmbedSources-fixtures-fixture=3.json.json @@ -0,0 +1,3 @@ +{ + "some_key": 1234 +} diff --git a/oryx/jsonx/.snapshots/TestEmbedSources-fixtures-fixture=4.json.json b/oryx/jsonx/.snapshots/TestEmbedSources-fixtures-fixture=4.json.json new file mode 100644 index 00000000000..2d0d20a9033 --- /dev/null +++ b/oryx/jsonx/.snapshots/TestEmbedSources-fixtures-fixture=4.json.json @@ -0,0 +1,15 @@ +{ + "nested": { + "object": { + "source": "base64://aGVsbG8gd29ybGQ=" + }, + "array": [ + { + "nested": { + "source": "base64://aGVsbG8gd29ybGQ=" + } + }, + "base64://aGVsbG8gd29ybGQ=" + ] + } +} diff --git a/oryx/jsonx/.snapshots/TestEmbedSources-fixtures-fixture=5.json.json b/oryx/jsonx/.snapshots/TestEmbedSources-fixtures-fixture=5.json.json new file mode 100644 index 00000000000..bfa283bed8c --- /dev/null +++ b/oryx/jsonx/.snapshots/TestEmbedSources-fixtures-fixture=5.json.json @@ -0,0 +1 @@ +"https://gist.githubusercontent.com/aeneasr/eb4612d295f613ee44bada6e30e2a856/raw/29edbda41bcb27492a1ac56926e03dee9480708f/hello-world.txt" diff --git a/oryx/jsonx/.snapshots/TestEmbedSources-fixtures-fixture=6.json.json b/oryx/jsonx/.snapshots/TestEmbedSources-fixtures-fixture=6.json.json new file mode 100644 index 00000000000..a167c0b21d9 --- /dev/null +++ b/oryx/jsonx/.snapshots/TestEmbedSources-fixtures-fixture=6.json.json @@ -0,0 +1,15 @@ +{ + "nested": { + "object": { + "ignore_this_key": "https://gist.githubusercontent.com/aeneasr/eb4612d295f613ee44bada6e30e2a856/raw/29edbda41bcb27492a1ac56926e03dee9480708f/hello-world.txt" + }, + "array": [ + { + "nested": { + "source": "base64://aGVsbG8gd29ybGQ=" + } + }, + "base64://aGVsbG8gd29ybGQ=" + ] + } +} diff --git a/oryx/jsonx/.snapshots/TestEmbedSources-only_embeds_base64.json b/oryx/jsonx/.snapshots/TestEmbedSources-only_embeds_base64.json new file mode 100644 index 00000000000..f056437c9e6 --- /dev/null +++ b/oryx/jsonx/.snapshots/TestEmbedSources-only_embeds_base64.json @@ -0,0 +1,4 @@ +{ + "key": "https://foobar.com", + "bar": "base64://YXNkZg==" +} diff --git a/oryx/jsonx/debug.go b/oryx/jsonx/debug.go new file mode 100644 index 00000000000..9b738f1dd04 --- /dev/null +++ b/oryx/jsonx/debug.go @@ -0,0 +1,76 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jsonx + +import ( + "encoding/json" + "fmt" + "slices" +) + +// Anonymize takes a JSON byte array and anonymizes its content by +// recursively replacing all values with a string indicating their type. +// +// It recurses into nested objects and arrays, but ignores the "schemas" and "id". +func Anonymize(data []byte, except ...string) []byte { + obj := make(map[string]any) + if err := json.Unmarshal(data, &obj); err != nil { + return []byte(fmt.Sprintf(`{"error": "invalid JSON", "message": %q}`, err.Error())) + } + + anonymize(obj, except...) + + out, err := json.MarshalIndent(obj, "", " ") + if err != nil { + return []byte(fmt.Sprintf(`{"error": "could not marshal JSON shape", "message": %q}`, err.Error())) + } + + return out +} + +func anonymize(obj map[string]any, except ...string) { + for k, v := range obj { + if slices.Contains(except, k) { + continue + } + + switch v := v.(type) { + case []any: + for elIdx, el := range v { + switch el := el.(type) { + case map[string]any: + anonymize(el) + v[elIdx] = el + default: + v[elIdx] = jsonType(el) + } + } + + case map[string]any: + anonymize(v) + obj[k] = v + default: + obj[k] = jsonType(v) + } + } +} + +func jsonType(v any) string { + switch v := v.(type) { + case string: + return "string" + case float64: + return "number" + case bool: + return "boolean" + case nil: + return "null" + case []any: + return "array" + case map[string]any: + return "object" + default: + return fmt.Sprintf("%T", v) + } +} diff --git a/oryx/jsonx/decoder.go b/oryx/jsonx/decoder.go new file mode 100644 index 00000000000..d7a00a1af53 --- /dev/null +++ b/oryx/jsonx/decoder.go @@ -0,0 +1,16 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jsonx + +import ( + "encoding/json" + "io" +) + +// NewStrictDecoder is a shorthand for json.Decoder.DisallowUnknownFields +func NewStrictDecoder(b io.Reader) *json.Decoder { + d := json.NewDecoder(b) + d.DisallowUnknownFields() + return d +} diff --git a/oryx/jsonx/embed.go b/oryx/jsonx/embed.go new file mode 100644 index 00000000000..ae4f94cf1a8 --- /dev/null +++ b/oryx/jsonx/embed.go @@ -0,0 +1,113 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jsonx + +import ( + "encoding/base64" + "encoding/json" + "net/url" + "slices" + "strconv" + "strings" + + "github.com/tidwall/gjson" + "github.com/tidwall/sjson" + + "github.com/ory/x/osx" +) + +type options struct { + ignoreKeys []string + onlySchemes []string +} + +type OptionsModifier func(*options) + +func newOptions(o []OptionsModifier) *options { + opt := &options{} + for _, f := range o { + f(opt) + } + return opt +} + +func WithIgnoreKeys(keys ...string) OptionsModifier { + return func(o *options) { + o.ignoreKeys = keys + } +} + +func WithOnlySchemes(scheme ...string) OptionsModifier { + return func(o *options) { + o.onlySchemes = scheme + } +} + +func EmbedSources(in json.RawMessage, opts ...OptionsModifier) (out json.RawMessage, err error) { + out = make([]byte, len(in)) + copy(out, in) + if err := embed(gjson.ParseBytes(in), nil, &out, newOptions(opts)); err != nil { + return nil, err + } + return out, nil +} + +func embed(parsed gjson.Result, parents []string, result *json.RawMessage, o *options) (err error) { + if parsed.IsObject() { + parsed.ForEach(func(k, v gjson.Result) bool { + err = embed(v, append(parents, strings.ReplaceAll(k.String(), ".", "\\.")), result, o) + return err == nil + }) + if err != nil { + return err + } + } else if parsed.IsArray() { + for kk, vv := range parsed.Array() { + if err = embed(vv, append(parents, strconv.Itoa(kk)), result, o); err != nil { + return err + } + } + } else if parsed.Type != gjson.String { + return nil + } + + if len(parents) > 0 && slices.Contains(o.ignoreKeys, parents[len(parents)-1]) { + return nil + } + + loc, err := url.ParseRequestURI(parsed.String()) + if err != nil { + // Not a URL, return + return nil + } + + if len(o.onlySchemes) == 0 { + if loc.Scheme != "file" && loc.Scheme != "http" && loc.Scheme != "https" && loc.Scheme != "base64" { + // Not a known pattern, ignore + return nil + } + } else if !slices.Contains(o.onlySchemes, loc.Scheme) { + // Not a known pattern, ignore + return nil + } + + contents, err := osx.ReadFileFromAllSources(loc.String()) + if err != nil { + return err + } + + encoded := base64.StdEncoding.EncodeToString(contents) + key := strings.Join(parents, ".") + if key == "" { + key = "@" + } + + interim, err := sjson.SetBytes(*result, key, "base64://"+encoded) + if err != nil { + return err + } + + *result = interim + return +} diff --git a/oryx/jsonx/fixture/embed/1.json b/oryx/jsonx/fixture/embed/1.json new file mode 100644 index 00000000000..810c96eeeb7 --- /dev/null +++ b/oryx/jsonx/fixture/embed/1.json @@ -0,0 +1 @@ +"foo" diff --git a/oryx/jsonx/fixture/embed/2.json b/oryx/jsonx/fixture/embed/2.json new file mode 100644 index 00000000000..fab1a3b622b --- /dev/null +++ b/oryx/jsonx/fixture/embed/2.json @@ -0,0 +1,3 @@ +{ + "some": "key" +} diff --git a/oryx/jsonx/fixture/embed/3.json b/oryx/jsonx/fixture/embed/3.json new file mode 100644 index 00000000000..7306c235b04 --- /dev/null +++ b/oryx/jsonx/fixture/embed/3.json @@ -0,0 +1,3 @@ +{ + "some_key": 1234 +} diff --git a/oryx/jsonx/fixture/embed/4.json b/oryx/jsonx/fixture/embed/4.json new file mode 100644 index 00000000000..279ceab5f36 --- /dev/null +++ b/oryx/jsonx/fixture/embed/4.json @@ -0,0 +1,15 @@ +{ + "nested": { + "object": { + "source": "https://gist.githubusercontent.com/aeneasr/eb4612d295f613ee44bada6e30e2a856/raw/29edbda41bcb27492a1ac56926e03dee9480708f/hello-world.txt" + }, + "array": [ + { + "nested": { + "source": "https://gist.githubusercontent.com/aeneasr/eb4612d295f613ee44bada6e30e2a856/raw/29edbda41bcb27492a1ac56926e03dee9480708f/hello-world.txt" + } + }, + "https://gist.githubusercontent.com/aeneasr/eb4612d295f613ee44bada6e30e2a856/raw/29edbda41bcb27492a1ac56926e03dee9480708f/hello-world.txt" + ] + } +} diff --git a/oryx/jsonx/fixture/embed/5.json b/oryx/jsonx/fixture/embed/5.json new file mode 100644 index 00000000000..bfa283bed8c --- /dev/null +++ b/oryx/jsonx/fixture/embed/5.json @@ -0,0 +1 @@ +"https://gist.githubusercontent.com/aeneasr/eb4612d295f613ee44bada6e30e2a856/raw/29edbda41bcb27492a1ac56926e03dee9480708f/hello-world.txt" diff --git a/oryx/jsonx/fixture/embed/6.json b/oryx/jsonx/fixture/embed/6.json new file mode 100644 index 00000000000..1fde753186c --- /dev/null +++ b/oryx/jsonx/fixture/embed/6.json @@ -0,0 +1,15 @@ +{ + "nested": { + "object": { + "ignore_this_key": "https://gist.githubusercontent.com/aeneasr/eb4612d295f613ee44bada6e30e2a856/raw/29edbda41bcb27492a1ac56926e03dee9480708f/hello-world.txt" + }, + "array": [ + { + "nested": { + "source": "https://gist.githubusercontent.com/aeneasr/eb4612d295f613ee44bada6e30e2a856/raw/29edbda41bcb27492a1ac56926e03dee9480708f/hello-world.txt" + } + }, + "https://gist.githubusercontent.com/aeneasr/eb4612d295f613ee44bada6e30e2a856/raw/29edbda41bcb27492a1ac56926e03dee9480708f/hello-world.txt" + ] + } +} diff --git a/oryx/jsonx/flatten.go b/oryx/jsonx/flatten.go new file mode 100644 index 00000000000..e4e04bafb55 --- /dev/null +++ b/oryx/jsonx/flatten.go @@ -0,0 +1,39 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jsonx + +import ( + "encoding/json" + "strconv" + "strings" + + "github.com/tidwall/gjson" +) + +// Flatten flattens a JSON object using dot notation. +func Flatten(raw json.RawMessage) map[string]interface{} { + parsed := gjson.ParseBytes(raw) + if !parsed.IsObject() { + return nil + } + + flattened := make(map[string]interface{}) + flatten(parsed, nil, flattened) + return flattened +} + +func flatten(parsed gjson.Result, parents []string, flattened map[string]interface{}) { + if parsed.IsObject() { + parsed.ForEach(func(k, v gjson.Result) bool { + flatten(v, append(parents, strings.ReplaceAll(k.String(), ".", "\\.")), flattened) + return true + }) + } else if parsed.IsArray() { + for kk, vv := range parsed.Array() { + flatten(vv, append(parents, strconv.Itoa(kk)), flattened) + } + } else { + flattened[strings.Join(parents, ".")] = parsed.Value() + } +} diff --git a/oryx/jsonx/get.go b/oryx/jsonx/get.go new file mode 100644 index 00000000000..025961d8f3d --- /dev/null +++ b/oryx/jsonx/get.go @@ -0,0 +1,77 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jsonx + +import ( + "reflect" + "strings" + + "github.com/stretchr/testify/require" + "github.com/tidwall/gjson" +) + +func jsonKey(f reflect.StructField) *string { + if jsonTag := f.Tag.Get("json"); jsonTag != "" { + if jsonTag == "-" { + return nil + } + return &strings.Split(jsonTag, ",")[0] + } else if f.Anonymous { + return nil + } else if f.IsExported() { + return &f.Name + } + return nil +} + +// AllValidJSONKeys returns all JSON keys from the struct or *struct type. +// It does not return keys from nested slices, but embedded/nested structs. +func AllValidJSONKeys(s interface{}) (keys []string) { + t := reflect.TypeOf(s) + v := reflect.ValueOf(s) + if t.Kind() == reflect.Ptr { + t = t.Elem() + v = v.Elem() + } + for i := range t.NumField() { + f := t.Field(i) + jKey := jsonKey(f) + if k := f.Type.Kind(); k == reflect.Struct || k == reflect.Ptr { + subKeys := AllValidJSONKeys(v.Field(i).Interface()) + for _, subKey := range subKeys { + if jKey != nil { + keys = append(keys, *jKey+"."+subKey) + } else { + keys = append(keys, subKey) + } + } + } else if jKey != nil { + keys = append(keys, *jKey) + } + } + return keys +} + +// ParseEnsureKeys returns a result that has the GetRequireValidKey function. +func ParseEnsureKeys(original interface{}, raw []byte) *Result { + return &Result{ + keys: AllValidJSONKeys(original), + result: gjson.ParseBytes(raw), + } +} + +type Result struct { + result gjson.Result + keys []string +} + +// GetRequireValidKey ensures that the key is valid before returning the result. +func (r *Result) GetRequireValidKey(t require.TestingT, key string) gjson.Result { + require.Contains(t, r.keys, key) + return r.result.Get(key) +} + +func GetRequireValidKey(t require.TestingT, original interface{}, raw []byte, key string) gjson.Result { + return ParseEnsureKeys(original, raw).GetRequireValidKey(t, key) +} diff --git a/oryx/jsonx/helpers.go b/oryx/jsonx/helpers.go new file mode 100644 index 00000000000..594e793f99e --- /dev/null +++ b/oryx/jsonx/helpers.go @@ -0,0 +1,22 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jsonx + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestMarshalJSONString(t *testing.T, i interface{}) string { + out, err := json.Marshal(i) + require.NoError(t, err) + return string(out) +} + +// Deprecated: this function does nothing helpful +func TestUnmarshalJSON(t *testing.T, in []byte, i interface{}) { + require.NoError(t, json.Unmarshal(in, i)) +} diff --git a/oryx/jsonx/patch.go b/oryx/jsonx/patch.go new file mode 100644 index 00000000000..f3816c1603e --- /dev/null +++ b/oryx/jsonx/patch.go @@ -0,0 +1,99 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jsonx + +import ( + "encoding/json" + "fmt" + "strconv" + "strings" + + jsonpatch "github.com/evanphx/json-patch/v5" + "github.com/gobwas/glob" + "github.com/pkg/errors" + + "github.com/ory/x/pointerx" +) + +var opAllowList = map[string]struct{}{ + "add": {}, + "remove": {}, + "replace": {}, +} + +func isUnsupported(op jsonpatch.Operation) bool { + _, ok := opAllowList[op.Kind()] + + return !ok +} + +func isElementAccess(path string) bool { + if path == "" { + return false + } + elements := strings.Split(path, "/") + lastElement := elements[len(elements)-1:][0] + if lastElement == "-" { + return true + } + if _, err := strconv.Atoi(lastElement); err == nil { + return true + } + + return false +} + +// ApplyJSONPatch applies a JSON patch to an object and returns the modified +// object. The original object is not modified. It returns an error if the patch +// is invalid or if the patch includes paths that are denied. denyPaths is a +// list of path globs (interpreted with [glob.Compile] that are not allowed to +// be patched. +func ApplyJSONPatch[T any](p json.RawMessage, object T, denyPaths ...string) (result T, err error) { + patch, err := jsonpatch.DecodePatch(p) + if err != nil { + return result, errors.WithStack(err) + } + + denyPattern := fmt.Sprintf("{%s}", strings.ToLower(strings.Join(denyPaths, ","))) + matcher, err := glob.Compile(denyPattern, '/') + if err != nil { + return result, errors.WithStack(err) + } + + for _, op := range patch { + // Some operations are buggy, see https://github.com/evanphx/json-patch/pull/158 + if isUnsupported(op) { + return result, errors.Errorf("unsupported operation: %s", op.Kind()) + } + path, err := op.Path() + if err != nil { + return result, errors.Errorf("error parsing patch operations: %v", err) + } + if matcher.Match(strings.ToLower(path)) { + return result, errors.Errorf("patch includes denied path: %s", path) + } + + // JSON patch officially rejects replacing paths that don't exist, but we want to be more tolerant. + // Therefore, we will ensure that all paths that we want to replace exist in the original document. + if op.Kind() == "replace" && !isElementAccess(path) { + op["op"] = pointerx.Ptr(json.RawMessage(`"add"`)) + } + } + + original, err := json.Marshal(object) + if err != nil { + return result, errors.WithStack(err) + } + + options := jsonpatch.NewApplyOptions() + options.EnsurePathExistsOnAdd = true + + modified, err := patch.ApplyWithOptions(original, options) + if err != nil { + return result, errors.WithStack(err) + } + + err = json.Unmarshal(modified, &result) + return result, errors.WithStack(err) +} diff --git a/oryx/jsonx/stub/random.json b/oryx/jsonx/stub/random.json new file mode 100644 index 00000000000..101ee9dd941 --- /dev/null +++ b/oryx/jsonx/stub/random.json @@ -0,0 +1,64 @@ +{ + "floating": [ + -1273085434, + 953442581, + { + "ready": "silent", + "worker": "situation", + "joy": "difference", + "probably": -413625494, + "gray": { + "parent": "pull", + "shore": -738396277, + "usually": 1050049449, + "hold": [ + [ + 181518765, + [ + { + "steam": { + "box": false, + "cry": 1463961818, + "appropriate": 1249911539, + "through": 695239749, + "ago": true, + "entirely": -851427469 + }, + "leather": "across", + "flies": -1571371799, + "over": 512666854, + "thank": true, + "shaking": true + }, + "hit", + -648178744.4899056, + 1225027271, + -1481507228, + true + ], + -2114582277, + 1390060204.9360588, + 1615602630.9049141, + "darkness" + ], + 63427197.713988304, + -580344963.961421, + "stems", + 1016960217.612642, + 1240918909 + ], + "buy": true, + "wonder": false + }, + "little": "cloud" + }, + "grade", + false, + "thou" + ], + "wagon": -1722583702, + "shop": 1294397217, + "spend": "greatest", + "product": "whale", + "fall": "to" +} diff --git a/oryx/jwksx/.snapshots/TestFetcherNext-case=resolve_multiple_source_urls-case=succeeds_with_forced_kid.json b/oryx/jwksx/.snapshots/TestFetcherNext-case=resolve_multiple_source_urls-case=succeeds_with_forced_kid.json new file mode 100644 index 00000000000..ecb9a86abc0 --- /dev/null +++ b/oryx/jwksx/.snapshots/TestFetcherNext-case=resolve_multiple_source_urls-case=succeeds_with_forced_kid.json @@ -0,0 +1,7 @@ +{ + "alg": "HS256", + "k": "Y2hhbmdlbWVjaGFuZ2VtZWNoYW5nZW1lY2hhbmdlbWU", + "kid": "8d5f5ad0674ec2f2960b1a34f33370a0f71471fa0e3ef0c0a692977d276dafe8", + "kty": "oct", + "use": "sig" +} diff --git a/oryx/jwksx/.snapshots/TestFetcherNext-case=resolve_single_source_url-case=with_cache.json b/oryx/jwksx/.snapshots/TestFetcherNext-case=resolve_single_source_url-case=with_cache.json new file mode 100644 index 00000000000..f81e76cc303 --- /dev/null +++ b/oryx/jwksx/.snapshots/TestFetcherNext-case=resolve_single_source_url-case=with_cache.json @@ -0,0 +1,7 @@ +{ + "alg": "HS256", + "k": "Y2hhbmdlbWVjaGFuZ2VtZWNoYW5nZW1lY2hhbmdlbWU", + "kid": "7d5f5ad0674ec2f2960b1a34f33370a0f71471fa0e3ef0c0a692977d276dafe8", + "kty": "oct", + "use": "sig" +} diff --git a/oryx/jwksx/.snapshots/TestFetcherNext-case=resolve_single_source_url-case=with_cache_and_TTL.json b/oryx/jwksx/.snapshots/TestFetcherNext-case=resolve_single_source_url-case=with_cache_and_TTL.json new file mode 100644 index 00000000000..f81e76cc303 --- /dev/null +++ b/oryx/jwksx/.snapshots/TestFetcherNext-case=resolve_single_source_url-case=with_cache_and_TTL.json @@ -0,0 +1,7 @@ +{ + "alg": "HS256", + "k": "Y2hhbmdlbWVjaGFuZ2VtZWNoYW5nZW1lY2hhbmdlbWU", + "kid": "7d5f5ad0674ec2f2960b1a34f33370a0f71471fa0e3ef0c0a692977d276dafe8", + "kty": "oct", + "use": "sig" +} diff --git a/oryx/jwksx/.snapshots/TestFetcherNext-case=resolve_single_source_url-case=with_forced_key.json b/oryx/jwksx/.snapshots/TestFetcherNext-case=resolve_single_source_url-case=with_forced_key.json new file mode 100644 index 00000000000..f81e76cc303 --- /dev/null +++ b/oryx/jwksx/.snapshots/TestFetcherNext-case=resolve_single_source_url-case=with_forced_key.json @@ -0,0 +1,7 @@ +{ + "alg": "HS256", + "k": "Y2hhbmdlbWVjaGFuZ2VtZWNoYW5nZW1lY2hhbmdlbWU", + "kid": "7d5f5ad0674ec2f2960b1a34f33370a0f71471fa0e3ef0c0a692977d276dafe8", + "kty": "oct", + "use": "sig" +} diff --git a/oryx/jwksx/.snapshots/TestFetcherNext-case=resolve_single_source_url-case=without_cache.json b/oryx/jwksx/.snapshots/TestFetcherNext-case=resolve_single_source_url-case=without_cache.json new file mode 100644 index 00000000000..f81e76cc303 --- /dev/null +++ b/oryx/jwksx/.snapshots/TestFetcherNext-case=resolve_single_source_url-case=without_cache.json @@ -0,0 +1,7 @@ +{ + "alg": "HS256", + "k": "Y2hhbmdlbWVjaGFuZ2VtZWNoYW5nZW1lY2hhbmdlbWU", + "kid": "7d5f5ad0674ec2f2960b1a34f33370a0f71471fa0e3ef0c0a692977d276dafe8", + "kty": "oct", + "use": "sig" +} diff --git a/oryx/jwksx/fetcher.go b/oryx/jwksx/fetcher.go new file mode 100644 index 00000000000..4b476a30e0b --- /dev/null +++ b/oryx/jwksx/fetcher.go @@ -0,0 +1,74 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jwksx + +import ( + "encoding/json" + "net/http" + "sync" + + "github.com/go-jose/go-jose/v3" + "github.com/pkg/errors" +) + +// Fetcher is a small helper for fetching JSON Web Keys from remote endpoints. +// +// DEPRECATED: Use FetcherNext instead. +type Fetcher struct { + sync.RWMutex + remote string + c *http.Client + keys map[string]jose.JSONWebKey +} + +// NewFetcher returns a new fetcher that can download JSON Web Keys from remote endpoints. +// +// DEPRECATED: Use FetcherNext instead. +func NewFetcher(remote string) *Fetcher { + return &Fetcher{ + remote: remote, + c: http.DefaultClient, + keys: make(map[string]jose.JSONWebKey), + } +} + +// GetKey retrieves a JSON Web Key from the cache, fetches it from a remote if it is not yet cached or returns an error. +// +// DEPRECATED: Use FetcherNext instead. +func (f *Fetcher) GetKey(kid string) (*jose.JSONWebKey, error) { + f.RLock() + if k, ok := f.keys[kid]; ok { + f.RUnlock() + return &k, nil + } + f.RUnlock() + + res, err := f.c.Get(f.remote) + if err != nil { + return nil, errors.WithStack(err) + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return nil, errors.Errorf("expected status code 200 but got %d when requesting %s", res.StatusCode, f.remote) + } + + var set jose.JSONWebKeySet + if err := json.NewDecoder(res.Body).Decode(&set); err != nil { + return nil, errors.WithStack(err) + } + + for _, k := range set.Keys { + f.Lock() + f.keys[k.KeyID] = k + f.Unlock() + } + + f.RLock() + defer f.RUnlock() + if k, ok := f.keys[kid]; ok { + return &k, nil + } + + return nil, errors.Errorf("unable to find JSON Web Key with ID: %s", kid) +} diff --git a/oryx/jwksx/fetcher_v2.go b/oryx/jwksx/fetcher_v2.go new file mode 100644 index 00000000000..bb382e2e2a6 --- /dev/null +++ b/oryx/jwksx/fetcher_v2.go @@ -0,0 +1,169 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jwksx + +import ( + "context" + "crypto/sha256" + "time" + + "github.com/ory/herodot" + + "github.com/hashicorp/go-retryablehttp" + + "github.com/ory/x/fetcher" + + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" + + "github.com/ory/x/otelx" + + "github.com/dgraph-io/ristretto/v2" + "github.com/lestrrat-go/jwx/jwk" + "github.com/pkg/errors" + "golang.org/x/sync/errgroup" +) + +var ErrUnableToFindKeyID = errors.New("specified JWK kid can not be found in the JWK sets") + +type ( + fetcherNextOptions struct { + forceKID string + cacheTTL time.Duration + useCache bool + httpClient *retryablehttp.Client + } + // FetcherNext is a JWK fetcher that can be used to fetch JWKs from multiple locations. + FetcherNext struct { + cache *ristretto.Cache[[]byte, jwk.Set] + } + // FetcherNextOption is a functional option for the FetcherNext. + FetcherNextOption func(*fetcherNextOptions) +) + +// NewFetcherNext returns a new FetcherNext instance. +func NewFetcherNext(cache *ristretto.Cache[[]byte, jwk.Set]) *FetcherNext { + return &FetcherNext{ + cache: cache, + } +} + +// WithForceKID forces the key ID to be used. Required when multiple JWK sets are configured. +func WithForceKID(kid string) FetcherNextOption { + return func(o *fetcherNextOptions) { + o.forceKID = kid + } +} + +// WithCacheTTL sets the cache TTL. If not set, the TTL is unlimited. +func WithCacheTTL(ttl time.Duration) FetcherNextOption { + return func(o *fetcherNextOptions) { + o.cacheTTL = ttl + } +} + +// WithCacheEnabled enables the cache. +func WithCacheEnabled() FetcherNextOption { + return func(o *fetcherNextOptions) { + o.useCache = true + } +} + +// WithHTTPClient will use the given HTTP client to fetch the JSON Web Keys. +func WithHTTPClient(c *retryablehttp.Client) FetcherNextOption { + return func(o *fetcherNextOptions) { + o.httpClient = c + } +} + +func (f *FetcherNext) ResolveKey(ctx context.Context, locations string, modifiers ...FetcherNextOption) (jwk.Key, error) { + return f.ResolveKeyFromLocations(ctx, []string{locations}, modifiers...) +} + +func (f *FetcherNext) ResolveKeyFromLocations(ctx context.Context, locations []string, modifiers ...FetcherNextOption) (jwk.Key, error) { + opts := new(fetcherNextOptions) + for _, m := range modifiers { + m(opts) + } + + if len(locations) > 1 && opts.forceKID == "" { + return nil, errors.Errorf("a key ID must be specified when multiple JWK sets are configured") + } + + set := jwk.NewSet() + eg := new(errgroup.Group) + for k := range locations { + location := locations[k] + eg.Go(func() error { + remoteSet, err := f.fetch(ctx, location, opts) + if err != nil { + return err + } + + iterator := remoteSet.Iterate(ctx) + for iterator.Next(ctx) { + // Pair().Value is always of type jwk.Key when generated by Iterate. + set.Add(iterator.Pair().Value.(jwk.Key)) + } + + return nil + }) + } + + if err := eg.Wait(); err != nil { + return nil, err + } + + if opts.forceKID != "" { + key, found := set.LookupKeyID(opts.forceKID) + if !found { + return nil, errors.WithStack(ErrUnableToFindKeyID) + } + + return key, nil + } + + // No KID was forced? Use the first key we can find. + key, found := set.Get(0) + if !found { + return nil, errors.WithStack(ErrUnableToFindKeyID) + } + + return key, nil +} + +// fetch fetches the JWK set from the given location and if enabled, may use the cache to look up the JWK set. +func (f *FetcherNext) fetch(ctx context.Context, location string, opts *fetcherNextOptions) (_ jwk.Set, err error) { + tracer := trace.SpanFromContext(ctx).TracerProvider().Tracer("") + ctx, span := tracer.Start(ctx, "jwksx.FetcherNext.fetch", trace.WithAttributes(attribute.String("location", location))) + defer otelx.End(span, &err) + + cacheKey := sha256.Sum256([]byte(location)) + if opts.useCache { + if result, found := f.cache.Get(cacheKey[:]); found { + return result, nil + } + } + + var fopts []fetcher.Modifier + if opts.httpClient != nil { + fopts = append(fopts, fetcher.WithClient(opts.httpClient)) + } + + result, err := fetcher.NewFetcher(fopts...).FetchContext(ctx, location) + if err != nil { + return nil, err + } + + set, err := jwk.ParseReader(result) + if err != nil { + return nil, errors.WithStack(herodot.ErrBadRequest.WithReason("failed to parse JWK set").WithWrap(err)) + } + + if opts.useCache { + f.cache.SetWithTTL(cacheKey[:], set, 1, opts.cacheTTL) + } + + return set, nil +} diff --git a/oryx/jwksx/generator.go b/oryx/jwksx/generator.go new file mode 100644 index 00000000000..7fabb1176d1 --- /dev/null +++ b/oryx/jwksx/generator.go @@ -0,0 +1,129 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jwksx + +import ( + "crypto" + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "crypto/rsa" + "crypto/x509" + "io" + + "github.com/go-jose/go-jose/v3" + "github.com/gofrs/uuid" + "github.com/pkg/errors" + "golang.org/x/crypto/ed25519" +) + +// GenerateSigningKeys generates a JSON Web Key Set for signing. +func GenerateSigningKeys(id, alg string, bits int) (*jose.JSONWebKeySet, error) { + if id == "" { + id = uuid.Must(uuid.NewV4()).String() + } + + key, err := generate(jose.SignatureAlgorithm(alg), bits) + if err != nil { + return nil, err + } + + return &jose.JSONWebKeySet{ + Keys: []jose.JSONWebKey{ + { + Algorithm: alg, + Use: "sig", + Key: key, + KeyID: id, + Certificates: []*x509.Certificate{}, + }, + }, + }, nil +} + +// GenerateSigningKeysAvailableAlgorithms lists available algorithms that are supported by GenerateSigningKeys. +func GenerateSigningKeysAvailableAlgorithms() []string { + return []string{ + string(jose.HS256), string(jose.HS384), string(jose.HS512), + string(jose.ES256), string(jose.ES384), string(jose.ES512), string(jose.EdDSA), + string(jose.RS256), string(jose.RS384), string(jose.RS512), string(jose.PS256), string(jose.PS384), string(jose.PS512), + } +} + +// generate generates keypair for corresponding SignatureAlgorithm. +func generate(alg jose.SignatureAlgorithm, bits int) (crypto.PrivateKey, error) { + switch alg { + case jose.ES256, jose.ES384, jose.ES512, jose.EdDSA: + keylen := map[jose.SignatureAlgorithm]int{ + jose.ES256: 256, + jose.ES384: 384, + jose.ES512: 521, // sic! + jose.EdDSA: 256, + } + if bits != 0 && bits != keylen[alg] { + return nil, errors.Errorf(`jwksx: "%s" does not support arbitrary key length`, alg) + } + case jose.RS256, jose.RS384, jose.RS512, jose.PS256, jose.PS384, jose.PS512: + if bits == 0 { + bits = 2048 + } + if bits < 2048 { + return nil, errors.Errorf(`jwksx: key size must be at least 2048 bit for algorithm "%s"`, alg) + } + case jose.HS256: + if bits == 0 { + bits = 256 + } + if bits < 256 { + return nil, errors.Errorf(`jwksx: key size must be at least 256 bit for algorithm "%s"`, alg) + } + case jose.HS384: + if bits == 0 { + bits = 384 + } + if bits < 384 { + return nil, errors.Errorf(`jwksx: key size must be at least 2038448 bit for algorithm "%s"`, alg) + } + case jose.HS512: + if bits == 0 { + bits = 1024 + } + if bits < 512 { + return nil, errors.Errorf(`jwksx: key size must be at least 512 bit for algorithm "%s"`, alg) + } + } + + switch alg { + case jose.ES256: + // The cryptographic operations are implemented using constant-time algorithms. + key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + return key, errors.Wrapf(err, "jwks: unable to generate key") + case jose.ES384: + // NB: The cryptographic operations do not use constant-time algorithms. + key, err := ecdsa.GenerateKey(elliptic.P384(), rand.Reader) + return key, errors.Wrapf(err, "jwks: unable to generate key") + case jose.ES512: + // NB: The cryptographic operations do not use constant-time algorithms. + key, err := ecdsa.GenerateKey(elliptic.P521(), rand.Reader) + return key, errors.Wrapf(err, "jwks: unable to generate key") + case jose.EdDSA: + _, key, err := ed25519.GenerateKey(rand.Reader) + return key, errors.Wrapf(err, "jwks: unable to generate key") + case jose.RS256, jose.RS384, jose.RS512, jose.PS256, jose.PS384, jose.PS512: + key, err := rsa.GenerateKey(rand.Reader, bits) + return key, errors.Wrapf(err, "jwks: unable to generate key") + case jose.HS256, jose.HS384, jose.HS512: + if bits%8 != 0 { + return nil, errors.Errorf(`jwksx: key size must be a multiple of 8 for algorithm "%s" but got: %d`, alg, bits) + } + + key := make([]byte, bits/8) + if _, err := io.ReadFull(rand.Reader, key); err != nil { + return nil, errors.Wrapf(err, "jwks: unable to generate key") + } + return key, nil + default: + return nil, errors.Errorf(`jwksx: available algorithms are "%+v" but unknown algorithm was requested: "%s"`, GenerateSigningKeysAvailableAlgorithms(), alg) + } +} diff --git a/oryx/jwtmiddleware/middleware.go b/oryx/jwtmiddleware/middleware.go new file mode 100644 index 00000000000..fb56594d62f --- /dev/null +++ b/oryx/jwtmiddleware/middleware.go @@ -0,0 +1,158 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jwtmiddleware + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "strings" + + "github.com/golang-jwt/jwt/v5" + "github.com/pkg/errors" + + "github.com/ory/herodot" + + jwtmiddleware "github.com/auth0/go-jwt-middleware/v2" + "github.com/urfave/negroni" + + "github.com/ory/x/jwksx" +) + +// Deprecated: use jwtmiddleware.ContextKey{} instead. +var SessionContextKey = jwtmiddleware.ContextKey{} + +type Middleware struct { + o *middlewareOptions + wku string + jm *jwtmiddleware.JWTMiddleware +} + +type middlewareOptions struct { + Debug bool + ExcludePaths []string + SigningMethod jwt.SigningMethod + ErrorWriter herodot.Writer +} + +type MiddlewareOption func(*middlewareOptions) + +func SessionFromContext(ctx context.Context) (json.RawMessage, error) { + raw := ctx.Value(jwtmiddleware.ContextKey{}) + if raw == nil { + return nil, errors.WithStack(herodot.ErrUnauthorized.WithReasonf("Could not find credentials in the request.")) + } + + token, ok := raw.(*jwt.Token) + if !ok { + return nil, errors.WithStack(herodot.ErrInternalServerError.WithDebugf(`Expected context key "%s" to transport value of type *jwt.MapClaims but got type: %T`, SessionContextKey, raw)) + } + + session, err := json.Marshal(token.Claims) + if err != nil { + return nil, errors.WithStack(herodot.ErrInternalServerError.WithDebugf("Unable to encode session data: %s", err)) + } + + return session, nil +} + +func MiddlewareDebugEnabled() MiddlewareOption { + return func(o *middlewareOptions) { + o.Debug = true + } +} + +func MiddlewareExcludePaths(paths ...string) MiddlewareOption { + return func(o *middlewareOptions) { + o.ExcludePaths = append(o.ExcludePaths, paths...) + } +} + +func MiddlewareAllowSigningMethod(method jwt.SigningMethod) MiddlewareOption { + return func(o *middlewareOptions) { + o.SigningMethod = method + } +} + +func MiddlewareErrorWriter(w herodot.Writer) MiddlewareOption { + return func(o *middlewareOptions) { + o.ErrorWriter = w + } +} + +func NewMiddleware( + wellKnownURL string, + opts ...MiddlewareOption, +) *Middleware { + c := &middlewareOptions{ + SigningMethod: jwt.SigningMethodES256, + ErrorWriter: herodot.NewJSONWriter(nil), + } + + for _, o := range opts { + o(c) + } + jc := jwksx.NewFetcher(wellKnownURL) + return &Middleware{ + o: c, + wku: wellKnownURL, + jm: jwtmiddleware.New( + func(ctx context.Context, rawToken string) (any, error) { + return jwt.NewParser( + jwt.WithValidMethods([]string{c.SigningMethod.Alg()}), + ).Parse(rawToken, func(token *jwt.Token) (interface{}, error) { + if raw, ok := token.Header["kid"]; !ok { + return nil, errors.New(`jwt from authorization HTTP header is missing value for "kid" in token header`) + } else if kid, ok := raw.(string); !ok { + return nil, fmt.Errorf(`jwt from authorization HTTP header is expecting string value for "kid" in tokenWithoutKid header but got: %T`, raw) + } else if k, err := jc.GetKey(kid); err != nil { + return nil, err + } else { + return k.Key, nil + } + }) + }, + jwtmiddleware.WithCredentialsOptional(false), + jwtmiddleware.WithTokenExtractor(func(r *http.Request) (string, error) { + // wrapping the extractor to get a herodot.ErrorContainer + token, err := jwtmiddleware.AuthHeaderTokenExtractor(r) + if err != nil { + return "", herodot.ErrUnauthorized.WithReason(err.Error()) + } + return token, nil + }), + jwtmiddleware.WithErrorHandler(func(w http.ResponseWriter, r *http.Request, err error) { + switch { + case errors.Is(err, jwtmiddleware.ErrJWTInvalid): + reason := "The token is invalid or expired." + if err := errors.Unwrap(err); err != nil { + reason = err.Error() + } + c.ErrorWriter.WriteError(w, r, errors.WithStack(herodot.ErrUnauthorized.WithReason(reason))) + case errors.Is(err, jwtmiddleware.ErrJWTMissing): + c.ErrorWriter.WriteError(w, r, errors.WithStack(herodot.ErrUnauthorized.WithReason("The token is missing."))) + default: + c.ErrorWriter.WriteError(w, r, err) + } + }), + ), + } +} + +// Deprecated: use Middleware as a negroni.Handler directly instead. +func (h *Middleware) NegroniHandler() negroni.Handler { + return negroni.HandlerFunc(h.ServeHTTP) +} + +func (h *Middleware) ServeHTTP(w http.ResponseWriter, r *http.Request, next http.HandlerFunc) { + for _, excluded := range h.o.ExcludePaths { + if strings.HasPrefix(r.URL.Path, excluded) { + next(w, r) + return + } + } + + h.jm.CheckJWT(next).ServeHTTP(w, r) +} diff --git a/oryx/jwtmiddleware/stub/jwks.json b/oryx/jwtmiddleware/stub/jwks.json new file mode 100644 index 00000000000..57d130c401d --- /dev/null +++ b/oryx/jwtmiddleware/stub/jwks.json @@ -0,0 +1,10 @@ +{ + "use": "sig", + "kty": "EC", + "kid": "b71ff5bd-a016-4ac0-9f3f-a172552578ea", + "crv": "P-256", + "alg": "ES256", + "x": "7fVj_SeCx3TnkHANRWrpEho9BcYkU953LHUvKsSF5Wo", + "y": "2A9D_AAFPiJQLSJQ_h600Fy9jUrg9Q88gNPPZwHDb7o", + "d": "sRl-e-tGEVsNBF8FgEado9NAEipxhAFXGMryWDgbUMo" +} diff --git a/oryx/jwtx/claims.go b/oryx/jwtx/claims.go new file mode 100644 index 00000000000..7bbbca9cb0f --- /dev/null +++ b/oryx/jwtx/claims.go @@ -0,0 +1,80 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package jwtx + +import ( + "time" + + "github.com/pkg/errors" + + "github.com/ory/x/mapx" +) + +// Claims represents a JSON Web Token's standard claims. +type Claims struct { + // Audience identifies the recipients that the JWT is intended for. + Audience []string `json:"aud"` + + // Issuer identifies the principal that issued the JWT. + Issuer string `json:"iss"` + + // Subject identifies the principal that is the subject of the JWT. + Subject string `json:"sub"` + + // ExpiresAt identifies the expiration time on or after which the JWT most not be accepted for processing. + ExpiresAt time.Time `json:"exp"` + + // IssuedAt identifies the time at which the JWT was issued. + IssuedAt time.Time `json:"iat"` + + // NotBefore identifies the time before which the JWT must not be accepted for processing. + NotBefore time.Time `json:"nbf"` + + // JTI provides a unique identifier for the JWT. + JTI string `json:"jti"` +} + +// ParseMapStringInterfaceClaims converts map[string]interface{} to *Claims. +func ParseMapStringInterfaceClaims(claims map[string]interface{}) *Claims { + c := make(map[interface{}]interface{}) + for k, v := range claims { + c[k] = v + } + return ParseMapInterfaceInterfaceClaims(c) +} + +// ParseMapInterfaceInterfaceClaims converts map[interface{}]interface{} to *Claims. +func ParseMapInterfaceInterfaceClaims(claims map[interface{}]interface{}) *Claims { + result := &Claims{ + Issuer: mapx.GetStringDefault(claims, "iss", ""), + Subject: mapx.GetStringDefault(claims, "sub", ""), + JTI: mapx.GetStringDefault(claims, "jti", ""), + } + + if aud, err := mapx.GetString(claims, "aud"); err == nil { + result.Audience = []string{aud} + } else if errors.Is(err, mapx.ErrKeyCanNotBeTypeAsserted) { + if aud, err := mapx.GetStringSlice(claims, "aud"); err == nil { + result.Audience = aud + } else { + result.Audience = []string{} + } + } else { + result.Audience = []string{} + } + + if exp, err := mapx.GetTime(claims, "exp"); err == nil { + result.ExpiresAt = exp + } + + if iat, err := mapx.GetTime(claims, "iat"); err == nil { + result.IssuedAt = iat + } + + if nbf, err := mapx.GetTime(claims, "nbf"); err == nil { + result.NotBefore = nbf + } + + return result +} diff --git a/oryx/logrusx/config.schema.json b/oryx/logrusx/config.schema.json new file mode 100644 index 00000000000..568ea9063db --- /dev/null +++ b/oryx/logrusx/config.schema.json @@ -0,0 +1,43 @@ +{ + "$id": "ory://logging-config", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Log", + "description": "Configure logging using the following options. Logs will always be sent to stdout and stderr.", + "type": "object", + "properties": { + "level": { + "title": "Level", + "description": "The level of log entries to show. Debug enables stack traces on errors.", + "type": "string", + "default": "info", + "enum": ["panic", "fatal", "error", "warn", "info", "debug", "trace"] + }, + "format": { + "title": "Log Format", + "description": "The output format of log messages.", + "type": "string", + "default": "text", + "enum": ["json", "json_pretty", "gelf", "text"] + }, + "leak_sensitive_values": { + "type": "boolean", + "title": "Leak Sensitive Log Values", + "description": "If set will leak sensitive values (e.g. emails) in the logs.", + "default": false + }, + "redaction_text": { + "type": "string", + "title": "Sensitive log value redaction text", + "description": "Text to use, when redacting sensitive log value." + }, + "additional_redacted_headers": { + "type": "array", + "title": "Additional redacted headers", + "description": "List of HTTP headers which will be redacted.", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false +} diff --git a/oryx/logrusx/helper.go b/oryx/logrusx/helper.go new file mode 100644 index 00000000000..39d75aa0b7c --- /dev/null +++ b/oryx/logrusx/helper.go @@ -0,0 +1,262 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package logrusx + +import ( + "context" + "errors" + "fmt" + "log" + "net/http" + "net/url" + "reflect" + "strings" + + "github.com/sirupsen/logrus" + + "go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace" + "go.opentelemetry.io/otel/propagation" + "go.opentelemetry.io/otel/trace" + + "github.com/ory/x/errorsx" +) + +type ( + Logger struct { + *logrus.Entry + leakSensitive bool + redactionText string + additionalRedactedHeaders map[string]struct{} + opts []Option + } + Provider interface { + Logger() *Logger + } +) + +var opts = otelhttptrace.WithPropagators(propagation.NewCompositeTextMapPropagator(propagation.TraceContext{}, propagation.Baggage{})) + +func (l *Logger) Logrus() *logrus.Logger { + return l.Entry.Logger +} + +func (l *Logger) NewEntry() *Logger { + ll := *l + ll.Entry = logrus.NewEntry(l.Logger) + return &ll +} + +func (l *Logger) WithContext(ctx context.Context) *Logger { + ll := *l + ll.Entry = l.Logger.WithContext(ctx) + return &ll +} + +func (l *Logger) HTTPHeadersRedacted(h http.Header) map[string]interface{} { + headers := map[string]interface{}{} + + for key, value := range h { + switch keyLower := strings.ToLower(key); keyLower { + case "authorization", "cookie", "set-cookie", "x-session-token": + headers[keyLower] = l.maybeRedact(value) + case "location": + locationURL, err := url.Parse(h.Get("Location")) + if err != nil { + headers[keyLower] = l.maybeRedact(value) + continue + } + if l.leakSensitive { + headers[keyLower] = locationURL.String() + } else { + locationURL.RawQuery = "" + locationURL.Fragment = "" + headers[keyLower] = locationURL.Redacted() + } + default: + if _, ok := l.additionalRedactedHeaders[keyLower]; ok { + headers[keyLower] = l.maybeRedact(value) + continue + } + headers[keyLower] = h.Get(key) + } + } + + return headers +} + +func (l *Logger) WithRequest(r *http.Request) *Logger { + headers := l.HTTPHeadersRedacted(r.Header) + if ua := r.UserAgent(); len(ua) > 0 { + headers["user-agent"] = ua + } + + scheme := "https" + if r.TLS == nil { + scheme = "http" + } + + ll := l.WithField("http_request", map[string]interface{}{ + "remote": r.RemoteAddr, + "method": r.Method, + "path": r.URL.EscapedPath(), + "query": l.maybeRedact(r.URL.RawQuery), + "scheme": scheme, + "host": r.Host, + "headers": headers, + }) + + spanCtx := trace.SpanContextFromContext(r.Context()) + if !spanCtx.IsValid() { + _, _, spanCtx = otelhttptrace.Extract(r.Context(), r, opts) + } + if spanCtx.IsValid() { + traces := make(map[string]string, 2) + if spanCtx.HasTraceID() { + traces["trace_id"] = spanCtx.TraceID().String() + } + if spanCtx.HasSpanID() { + traces["span_id"] = spanCtx.SpanID().String() + } + ll = ll.WithField("otel", traces) + } + return ll +} + +func (l *Logger) WithSpanFromContext(ctx context.Context) *Logger { + spanCtx := trace.SpanContextFromContext(ctx) + if !spanCtx.IsValid() { + return l + } + + traces := make(map[string]string, 2) + if spanCtx.HasTraceID() { + traces["trace_id"] = spanCtx.TraceID().String() + } + if spanCtx.HasSpanID() { + traces["span_id"] = spanCtx.SpanID().String() + } + return l.WithField("otel", traces) +} + +func (l *Logger) Logf(level logrus.Level, format string, args ...interface{}) { + if !l.leakSensitive { + for i, arg := range args { + switch urlArg := arg.(type) { + case url.URL: + urlCopy := url.URL{Scheme: urlArg.Scheme, Host: urlArg.Host, Path: urlArg.Path} + args[i] = urlCopy + case *url.URL: + urlCopy := url.URL{Scheme: urlArg.Scheme, Host: urlArg.Host, Path: urlArg.Path} + args[i] = &urlCopy + default: + continue + } + } + } + l.Entry.Logf(level, format, args...) +} + +func (l *Logger) Tracef(format string, args ...interface{}) { + l.Logf(logrus.TraceLevel, format, args...) +} + +func (l *Logger) Debugf(format string, args ...interface{}) { + l.Logf(logrus.DebugLevel, format, args...) +} + +func (l *Logger) Infof(format string, args ...interface{}) { + l.Logf(logrus.InfoLevel, format, args...) +} + +func (l *Logger) Warnf(format string, args ...interface{}) { + l.Logf(logrus.WarnLevel, format, args...) +} + +func (l *Logger) Errorf(format string, args ...interface{}) { + l.Logf(logrus.ErrorLevel, format, args...) +} + +func (l *Logger) Fatalf(format string, args ...interface{}) { + l.Logf(logrus.FatalLevel, format, args...) + l.Entry.Logger.Exit(1) +} + +func (l *Logger) Panicf(format string, args ...interface{}) { + l.Logf(logrus.PanicLevel, format, args...) +} + +func (l *Logger) WithFields(f logrus.Fields) *Logger { + ll := *l + ll.Entry = l.Entry.WithFields(f) + return &ll +} + +func (l *Logger) WithField(key string, value interface{}) *Logger { + ll := *l + ll.Entry = l.Entry.WithField(key, value) + return &ll +} + +func (l *Logger) maybeRedact(value interface{}) interface{} { + if fmt.Sprintf("%v", value) == "" || value == nil { + return nil + } + if !l.leakSensitive { + return l.redactionText + } + return value +} + +func (l *Logger) WithSensitiveField(key string, value interface{}) *Logger { + return l.WithField(key, l.maybeRedact(value)) +} + +func (l *Logger) WithError(err error) *Logger { + if err == nil { + return l + } + + ctx := map[string]interface{}{"message": err.Error()} + if l.Entry.Logger.IsLevelEnabled(logrus.DebugLevel) { + if e, ok := err.(errorsx.StackTracer); ok { + ctx["stack_trace"] = fmt.Sprintf("%+v", e.StackTrace()) + } else { + ctx["stack_trace"] = fmt.Sprintf("stack trace could not be recovered from error type %s", reflect.TypeOf(err)) + } + } + if c := errorsx.ReasonCarrier(nil); errors.As(err, &c) { + ctx["reason"] = c.Reason() + } + if c := errorsx.RequestIDCarrier(nil); errors.As(err, &c) && c.RequestID() != "" { + ctx["request_id"] = c.RequestID() + } + if c := errorsx.DetailsCarrier(nil); errors.As(err, &c) && c.Details() != nil { + ctx["details"] = c.Details() + } + if c := errorsx.StatusCarrier(nil); errors.As(err, &c) && c.Status() != "" { + ctx["status"] = c.Status() + } + if c := errorsx.StatusCodeCarrier(nil); errors.As(err, &c) && c.StatusCode() != 0 { + ctx["status_code"] = c.StatusCode() + } + if c := errorsx.DebugCarrier(nil); errors.As(err, &c) { + ctx["debug"] = c.Debug() + } + + return l.WithField("error", ctx) +} + +func (l *Logger) StdLogger(lvl logrus.Level) *log.Logger { + return log.New(writer{l.Logger, lvl}, "", 0) +} + +type writer struct { + l *logrus.Logger + lvl logrus.Level +} + +func (w writer) Write(p []byte) (n int, err error) { + w.l.Log(w.lvl, strings.TrimSuffix(string(p), "\n")) + return len(p), nil +} diff --git a/oryx/logrusx/logrus.go b/oryx/logrusx/logrus.go new file mode 100644 index 00000000000..40adfa96be7 --- /dev/null +++ b/oryx/logrusx/logrus.go @@ -0,0 +1,272 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package logrusx + +import ( + "bytes" + "cmp" + _ "embed" + "io" + "net/http" + "os" + "strings" + "testing" + "time" + + "github.com/sirupsen/logrus" + + gelf "github.com/seatgeek/logrus-gelf-formatter" + + "github.com/ory/x/stringsx" +) + +type ( + options struct { + l *logrus.Logger + level *logrus.Level + formatter logrus.Formatter + format string + reportCaller bool + exitFunc func(int) + leakSensitive bool + redactionText string + additionalRedactedHeaders []string + hooks []logrus.Hook + c configurator + } + Option func(*options) + nullConfigurator struct{} + configurator interface { + Bool(key string) bool + String(key string) string + Strings(path string) []string + } +) + +//go:embed config.schema.json +var ConfigSchema string + +const ConfigSchemaID = "ory://logging-config" + +// AddConfigSchema adds the logging schema to the compiler. +// The interface is specified instead of `jsonschema.Compiler` to allow the use of any jsonschema library fork or version. +func AddConfigSchema(c interface { + AddResource(url string, r io.Reader) error +}, +) error { + return c.AddResource(ConfigSchemaID, bytes.NewBufferString(ConfigSchema)) +} + +func newLogger(parent *logrus.Logger, o *options) *logrus.Logger { + l := parent + if l == nil { + l = logrus.New() + } + + if o.exitFunc != nil { + l.ExitFunc = o.exitFunc + } + + for _, hook := range o.hooks { + l.AddHook(hook) + } + + setLevel(l, o) + setFormatter(l, o) + + l.ReportCaller = o.reportCaller || l.IsLevelEnabled(logrus.TraceLevel) + return l +} + +func setLevel(l *logrus.Logger, o *options) { + if o.level != nil { + l.Level = *o.level + } else { + var err error + l.Level, err = logrus.ParseLevel(cmp.Or( + o.c.String("log.level"), + os.Getenv("LOG_LEVEL"))) + if err != nil { + l.Level = logrus.InfoLevel + } + } +} + +func setFormatter(l *logrus.Logger, o *options) { + if o.formatter != nil { + l.Formatter = o.formatter + } else { + var unknownFormat bool // we first have to set the formatter before we can complain about the unknown format + + format := stringsx.SwitchExact(cmp.Or(o.format, o.c.String("log.format"), os.Getenv("LOG_FORMAT"))) + switch { + case format.AddCase("json"): + l.Formatter = &logrus.JSONFormatter{PrettyPrint: false, TimestampFormat: time.RFC3339Nano, DisableHTMLEscape: true} + case format.AddCase("json_pretty"): + l.Formatter = &logrus.JSONFormatter{PrettyPrint: true, TimestampFormat: time.RFC3339Nano, DisableHTMLEscape: true} + case format.AddCase("gelf"): + l.Formatter = new(gelf.GelfFormatter) + default: + unknownFormat = true + fallthrough + case format.AddCase("text", ""): + l.Formatter = &logrus.TextFormatter{ + DisableQuote: true, + DisableTimestamp: false, + FullTimestamp: true, + } + } + + if unknownFormat { + l.WithError(format.ToUnknownCaseErr()).Warn("got unknown \"log.format\", falling back to \"text\"") + } + } +} + +func ForceLevel(level logrus.Level) Option { + return func(o *options) { + o.level = &level + } +} + +func ForceFormatter(formatter logrus.Formatter) Option { + return func(o *options) { + o.formatter = formatter + } +} + +func WithConfigurator(c configurator) Option { + return func(o *options) { + o.c = c + } +} + +func ForceFormat(format string) Option { + return func(o *options) { + o.format = format + } +} + +func WithHook(hook logrus.Hook) Option { + return func(o *options) { + o.hooks = append(o.hooks, hook) + } +} + +func WithExitFunc(exitFunc func(int)) Option { + return func(o *options) { + o.exitFunc = exitFunc + } +} + +func ReportCaller(reportCaller bool) Option { + return func(o *options) { + o.reportCaller = reportCaller + } +} + +func UseLogger(l *logrus.Logger) Option { + return func(o *options) { + o.l = l + } +} + +func LeakSensitive() Option { + return func(o *options) { + o.leakSensitive = true + } +} + +func RedactionText(text string) Option { + return func(o *options) { + o.redactionText = text + } +} + +func WithAdditionalRedactedHeaders(headers []string) Option { + return func(o *options) { + o.additionalRedactedHeaders = headers + } +} + +func toHeaderMap(headers []string) map[string]struct{} { + m := make(map[string]struct{}, len(headers)) + for _, h := range headers { + m[strings.ToLower(h)] = struct{}{} + } + return m +} + +func (c *nullConfigurator) Bool(_ string) bool { + return false +} + +func (c *nullConfigurator) String(_ string) string { + return "" +} + +func (c *nullConfigurator) Strings(_ string) []string { + return []string{} +} + +func newOptions(opts []Option) *options { + o := new(options) + o.c = new(nullConfigurator) + for _, f := range opts { + f(o) + } + return o +} + +// New creates a new logger with all the important fields set. +func New(name string, version string, opts ...Option) *Logger { + o := newOptions(opts) + return &Logger{ + opts: opts, + leakSensitive: o.leakSensitive || o.c.Bool("log.leak_sensitive_values"), + redactionText: cmp.Or(o.redactionText, `Value is sensitive and has been redacted. To see the value set config key "log.leak_sensitive_values = true" or environment variable "LOG_LEAK_SENSITIVE_VALUES=true".`), + additionalRedactedHeaders: toHeaderMap(func() []string { + if len(o.additionalRedactedHeaders) > 0 { + return o.additionalRedactedHeaders + } + return o.c.Strings("log.additional_redacted_headers") + }()), + Entry: newLogger(o.l, o).WithFields(logrus.Fields{ + "audience": "application", "service_name": name, "service_version": version, + }), + } +} + +func NewT(t testing.TB, opts ...Option) *Logger { + opts = append(opts, LeakSensitive(), WithExitFunc(func(code int) { + t.Fatalf("Logger exited with code %d", code) + })) + l := New(t.Name(), "test", opts...) + l.Logger.Out = t.Output() + return l +} + +func (l *Logger) UseConfig(c configurator) { + l.leakSensitive = l.leakSensitive || c.Bool("log.leak_sensitive_values") + l.redactionText = cmp.Or(c.String("log.redaction_text"), l.redactionText) + newHeaders := toHeaderMap(c.Strings("log.additional_redacted_headers")) + for k := range newHeaders { + l.additionalRedactedHeaders[k] = struct{}{} + } + o := newOptions(append(l.opts, WithConfigurator(c))) + setLevel(l.Entry.Logger, o) + setFormatter(l.Entry.Logger, o) +} + +func (l *Logger) ReportError(r *http.Request, code int, err error, args ...interface{}) { + logger := l.WithError(err).WithRequest(r).WithField("http_response", map[string]interface{}{ + "status_code": code, + }) + switch { + case code < 500: + logger.Info(args...) + default: + logger.Error(args...) + } +} diff --git a/oryx/mapx/type_assert.go b/oryx/mapx/type_assert.go new file mode 100644 index 00000000000..39a5b6bca04 --- /dev/null +++ b/oryx/mapx/type_assert.go @@ -0,0 +1,180 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package mapx + +import ( + "encoding/json" + "errors" + "math" + "time" +) + +// ErrKeyDoesNotExist is returned when the key does not exist in the map. +var ErrKeyDoesNotExist = errors.New("key is not present in map") + +// ErrKeyCanNotBeTypeAsserted is returned when the key can not be type asserted. +var ErrKeyCanNotBeTypeAsserted = errors.New("key could not be type asserted") + +// GetString returns a string for a given key in values. +func GetString[K comparable](values map[K]any, key K) (string, error) { + if v, ok := values[key]; !ok { + return "", ErrKeyDoesNotExist + } else if sv, ok := v.(string); !ok { + return "", ErrKeyCanNotBeTypeAsserted + } else { + return sv, nil + } +} + +// GetStringSlice returns a string slice for a given key in values. +func GetStringSlice[K comparable](values map[K]any, key K) ([]string, error) { + v, ok := values[key] + if !ok { + return nil, ErrKeyDoesNotExist + } + switch v := v.(type) { + case []string: + return v, nil + case []any: + vs := make([]string, len(v)) + for k, v := range v { + var ok bool + vs[k], ok = v.(string) + if !ok { + return nil, ErrKeyCanNotBeTypeAsserted + } + } + return vs, nil + } + return nil, ErrKeyCanNotBeTypeAsserted +} + +// GetTime returns a string slice for a given key in values. +func GetTime[K comparable](values map[K]any, key K) (time.Time, error) { + v, ok := values[key] + if !ok { + return time.Time{}, ErrKeyDoesNotExist + } + + switch v := v.(type) { + case time.Time: + return v, nil + case int64: + return time.Unix(v, 0), nil + case int32: + return time.Unix(int64(v), 0), nil + case int: + return time.Unix(int64(v), 0), nil + case float64: + if v < math.MinInt64 || v > math.MaxInt64 { + return time.Time{}, errors.New("value is out of range") + } + return time.Unix(int64(v), 0), nil + case float32: + if v < math.MinInt64 || v > math.MaxInt64 { + return time.Time{}, errors.New("value is out of range") + } + return time.Unix(int64(v), 0), nil + } + + return time.Time{}, ErrKeyCanNotBeTypeAsserted +} + +// GetInt64 returns an int64 for a given key in values. +func GetInt64[K comparable](values map[K]any, key K) (int64, error) { + v, ok := values[key] + if !ok { + return 0, ErrKeyDoesNotExist + } + switch v := v.(type) { + case json.Number: + return v.Int64() + case int64: + return v, nil + case int: + return int64(v), nil + case int32: + return int64(v), nil + case uint: + vv := uint64(v) + if vv > math.MaxInt64 { + return 0, errors.New("value is out of range") + } + return int64(vv), nil + case uint32: + return int64(v), nil + case uint64: + if v > math.MaxInt64 { + return 0, errors.New("value is out of range") + } + return int64(v), nil + } + return 0, ErrKeyCanNotBeTypeAsserted +} + +// GetInt32 returns an int32 for a given key in values. +func GetInt32[K comparable](values map[K]any, key K) (int32, error) { + v, err := GetInt64(values, key) + if err != nil { + return 0, err + } + if v > math.MaxInt32 || v < math.MinInt32 { + return 0, errors.New("value is out of range") + } + return int32(v), nil +} + +// GetInt returns an int for a given key in values. +func GetInt[K comparable](values map[K]any, key K) (int, error) { + v, err := GetInt64(values, key) + if err != nil { + return 0, err + } + if v > math.MaxInt || v < math.MinInt { + return 0, errors.New("value is out of range") + } + return int(v), nil +} + +// GetFloat64Default returns a float64 or the default value for a given key in values. +func GetFloat64Default[K comparable](values map[K]any, key K, defaultValue float64) float64 { + f, err := GetFloat64(values, key) + if err != nil { + return defaultValue + } + return f +} + +// GetFloat64 returns a float64 for a given key in values. +func GetFloat64[K comparable](values map[K]any, key K) (float64, error) { + v, ok := values[key] + if !ok { + return 0, ErrKeyDoesNotExist + } + switch v := v.(type) { + case json.Number: + return v.Float64() + case float32: + return float64(v), nil + case float64: + return v, nil + } + return 0, ErrKeyCanNotBeTypeAsserted +} + +// GetStringDefault returns a string or the default value for a given key in values. +func GetStringDefault[K comparable](values map[K]any, key K, defaultValue string) string { + if s, err := GetString(values, key); err == nil { + return s + } + return defaultValue +} + +// GetStringSliceDefault returns a string slice or the default value for a given key in values. +func GetStringSliceDefault[K comparable](values map[K]any, key K, defaultValue []string) []string { + if s, err := GetStringSlice(values, key); err == nil { + return s + } + return defaultValue +} diff --git a/oryx/metricsx/metrics.go b/oryx/metricsx/metrics.go new file mode 100644 index 00000000000..25aab63074a --- /dev/null +++ b/oryx/metricsx/metrics.go @@ -0,0 +1,84 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package metricsx + +import ( + "runtime" + "sync" +) + +// MemoryStatistics is a JSON-able version of runtime.MemStats +type MemoryStatistics struct { + sync.Mutex + // Alloc is bytes of allocated heap objects. + Alloc uint64 `json:"alloc"` + // TotalAlloc is cumulative bytes allocated for heap objects. + TotalAlloc uint64 `json:"totalAlloc"` + // Sys is the total bytes of memory obtained from the OS. + Sys uint64 `json:"sys"` + // Lookups is the number of pointer lookups performed by the + // runtime. + Lookups uint64 `json:"lookups"` + // Mallocs is the cumulative count of heap objects allocated. + // The number of live objects is Mallocs - Frees. + Mallocs uint64 `json:"mallocs"` + // Frees is the cumulative count of heap objects freed. + Frees uint64 `json:"frees"` + // HeapAlloc is bytes of allocated heap objects. + HeapAlloc uint64 `json:"heapAlloc"` + // HeapSys is bytes of heap memory obtained from the OS. + HeapSys uint64 `json:"heapSys"` + // HeapIdle is bytes in idle (unused) spans. + HeapIdle uint64 `json:"heapIdle"` + // HeapInuse is bytes in in-use spans. + HeapInuse uint64 `json:"heapInuse"` + // HeapReleased is bytes of physical memory returned to the OS. + HeapReleased uint64 `json:"heapReleased"` + // HeapObjects is the number of allocated heap objects. + HeapObjects uint64 `json:"heapObjects"` + // NumGC is the number of completed GC cycles. + NumGC uint32 `json:"numGC"` +} + +// ToMap converts to a map[string]interface{}. +func (ms *MemoryStatistics) ToMap() map[string]interface{} { + return map[string]interface{}{ + "alloc": ms.Alloc, + "totalAlloc": ms.TotalAlloc, + "sys": ms.Sys, + "lookups": ms.Lookups, + "mallocs": ms.Mallocs, + "frees": ms.Frees, + "heapAlloc": ms.HeapAlloc, + "heapSys": ms.HeapSys, + "heapIdle": ms.HeapIdle, + "heapInuse": ms.HeapInuse, + "heapReleased": ms.HeapReleased, + "heapObjects": ms.HeapObjects, + "numGC": ms.NumGC, + "nonInteraction": 1, + } +} + +// Update takes the most recent stats from runtime. +func (ms *MemoryStatistics) Update() { + var m runtime.MemStats + runtime.ReadMemStats(&m) + + ms.Lock() + defer ms.Unlock() + ms.Alloc = m.Alloc + ms.TotalAlloc = m.TotalAlloc + ms.Sys = m.Sys + ms.Lookups = m.Lookups + ms.Mallocs = m.Mallocs + ms.Frees = m.Frees + ms.HeapAlloc = m.HeapAlloc + ms.HeapSys = m.HeapSys + ms.HeapIdle = m.HeapIdle + ms.HeapInuse = m.HeapInuse + ms.HeapReleased = m.HeapReleased + ms.HeapObjects = m.HeapObjects + ms.NumGC = m.NumGC +} diff --git a/oryx/metricsx/middleware.go b/oryx/metricsx/middleware.go new file mode 100644 index 00000000000..61ce67ee7ef --- /dev/null +++ b/oryx/metricsx/middleware.go @@ -0,0 +1,384 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package metricsx + +import ( + "context" + "crypto/sha256" + "encoding/hex" + "math" + "net/http" + "net/url" + "os" + "runtime" + "strconv" + "strings" + "sync" + "time" + + "google.golang.org/grpc" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" + + "github.com/gofrs/uuid" + "github.com/spf13/cobra" + + "github.com/ory/x/cmdx" + "github.com/ory/x/configx" + "github.com/ory/x/httpx" + "github.com/ory/x/logrusx" + "github.com/ory/x/resilience" + "github.com/ory/x/urlx" + + "github.com/ory/analytics-go/v5" +) + +const ( + XForwardedHostHeader = "X-Forwarded-Host" + AuthorityHeader = ":authority" +) + +var ( + instance *Service + lock sync.Mutex + knownHeaders = []string{AuthorityHeader, XForwardedHostHeader} +) + +// Service helps with providing context on metrics. +type Service struct { + optOut bool + instanceId string + + o *Options + + c analytics.Client + l *logrusx.Logger + + mem *MemoryStatistics +} + +// Hash returns a hashed string of the value. +func Hash(value string) string { + sha := sha256.Sum256([]byte(value)) + return hex.EncodeToString(sha[:]) +} + +// Options configures the metrics service. +type Options struct { + // Service represents the service name, for example "ory-hydra". + Service string + + // DeploymentId represents the cluster id, typically a hash of some unique configuration properties. + DeploymentId string + + // DBDialect specifies the database dialect in use (e.g., "postgres", "mysql", "sqlite"). + DBDialect string + + // When this instance was started + StartTime time.Time + + // IsDevelopment should be true if we assume that we're in a development environment. + IsDevelopment bool + + // WriteKey is the segment API key. + WriteKey string + + // WhitelistedPaths represents a list of paths that can be transmitted in clear text to segment. + WhitelistedPaths []string + + // BuildVersion represents the build version. + BuildVersion string + + // BuildHash represents the build git hash. + BuildHash string + + // BuildTime represents the build time. + BuildTime string + + // Hostname is a public URL configured for the service, used to derive hosted name for telemetry. + Hostname string + + // Config overrides the analytics.Config. If nil, sensible defaults will be used. + Config *analytics.Config + + // MemoryInterval sets how often memory statistics should be transmitted. Defaults to every 12 hours. + MemoryInterval time.Duration +} + +type void struct{} + +func (v *void) Logf(format string, args ...interface{}) { +} + +func (v *void) Errorf(format string, args ...interface{}) { +} + +// New returns a new metrics service. If one has been instantiated already, no new instance will be created. +func New( + cmd *cobra.Command, + l *logrusx.Logger, + c *configx.Provider, + o *Options, +) *Service { + lock.Lock() + defer lock.Unlock() + + if instance != nil { + return instance + } + + o.StartTime = time.Now() + + if o.BuildTime == "" { + o.BuildTime = "unknown" + } + + if o.BuildVersion == "" { + o.BuildVersion = "unknown" + } + + if o.BuildHash == "" { + o.BuildHash = "unknown" + } + + if o.Config == nil { + o.Config = &analytics.Config{ + Interval: time.Hour * 6, + } + } + + o.Config.Logger = new(void) + + if o.MemoryInterval < time.Minute { + o.MemoryInterval = time.Hour * 12 + } + + segment, err := analytics.NewWithConfig(o.WriteKey, *o.Config) + if err != nil { + l.WithError(err).Fatalf("Unable to initialise software quality assurance features.") + return nil + } + + optOut, err := cmd.Flags().GetBool("sqa-opt-out") + if err != nil { + cmdx.Must(err, `Unable to get command line flag "sqa-opt-out": %s`, err) + } + + if !optOut { + optOut = c.Bool("sqa.opt_out") + } + + if !optOut { + optOut = c.Bool("sqa_opt_out") + } + + if !optOut { + optOut, _ = strconv.ParseBool(os.Getenv("SQA_OPT_OUT")) + } + + if !optOut { + optOut, _ = strconv.ParseBool(os.Getenv("SQA-OPT-OUT")) + } + + if !optOut { + l.Info("Software quality assurance features are enabled. Learn more at: https://www.ory.sh/docs/ecosystem/sqa") + } + + m := &Service{ + optOut: optOut, + instanceId: uuid.Must(uuid.NewV4()).String(), + o: o, + c: segment, + l: l, + mem: new(MemoryStatistics), + } + + instance = m + + go m.Identify() + go m.Track() + + return m +} + +// Identify enables reporting to segment. +func (sw *Service) Identify() { + IdentifySend(sw, true) + + // User has not opt-out then make identify to be sent every 6 hours + if !sw.optOut { + for range time.Tick(time.Hour * 6) { + IdentifySend(sw, false) + } + } +} + +func IdentifySend(sw *Service, startup bool) { + if err := resilience.Retry(sw.l, time.Minute*5, time.Hour*6, func() error { + return sw.c.Enqueue(analytics.Identify{ + InstanceId: sw.instanceId, + DeploymentId: sw.o.DeploymentId, + Project: sw.o.Service, + + DatabaseDialect: sw.o.DBDialect, + ProductVersion: sw.o.BuildVersion, + ProductBuild: sw.o.BuildHash, + UptimeDeployment: 0, + UptimeInstance: math.Round(time.Since(sw.o.StartTime).Seconds()), + IsDevelopment: sw.o.IsDevelopment, + IsOptOut: sw.optOut, + Startup: startup, + }) + }); err != nil { + sw.l.WithError(err).Debug("Could not commit anonymized environment information") + } +} + +// Track commits memory statistics to segment. +func (sw *Service) Track() { + if sw.optOut { + return + } + + for { + sw.mem.Update() + if err := sw.c.Enqueue(analytics.Track{ + InstanceId: sw.instanceId, + DeploymentId: sw.o.DeploymentId, + Project: sw.o.Service, + + CPU: runtime.NumCPU(), + OsName: runtime.GOOS, + OsArchitecture: runtime.GOARCH, + Alloc: sw.mem.Alloc, + TotalAlloc: sw.mem.TotalAlloc, + Frees: sw.mem.Frees, + Mallocs: sw.mem.Mallocs, + Lookups: sw.mem.Lookups, + Sys: sw.mem.Sys, + NumGC: sw.mem.NumGC, + HeapAlloc: sw.mem.HeapAlloc, + HeapInuse: sw.mem.HeapInuse, + HeapIdle: sw.mem.HeapIdle, + HeapObjects: sw.mem.HeapObjects, + HeapReleased: sw.mem.HeapReleased, + HeapSys: sw.mem.HeapSys, + }); err != nil { + sw.l.WithError(err).Debug("Could not commit anonymized telemetry data") + } + time.Sleep(sw.o.MemoryInterval) + } +} + +// ServeHTTP is a middleware for sending meta information to segment. +func (sw *Service) ServeHTTP(rw http.ResponseWriter, r *http.Request, next http.HandlerFunc) { + var start time.Time + if !sw.optOut { + start = time.Now() + } + + next(rw, r) + + if sw.optOut { + return + } + + latency := time.Since(start).Milliseconds() + path := sw.anonymizePath(r.URL.Path) + host := urlx.ExtractPublicAddress(sw.o.Hostname, r.Header.Get(XForwardedHostHeader), r.Host) + + // Collecting request info + stat, _ := httpx.GetResponseMeta(rw) + + if err := sw.c.Enqueue(analytics.Page{ + InstanceId: sw.instanceId, + DeploymentId: sw.o.DeploymentId, + Project: sw.o.Service, + UrlHost: host, + UrlPath: path, + RequestCode: stat, + RequestLatency: int(latency), + }); err != nil { + sw.l.WithError(err).Debug("Could not commit anonymized telemetry data") + // do nothing... + } +} + +func (sw *Service) UnaryInterceptor(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) { + var start time.Time + if !sw.optOut { + start = time.Now() + } + + resp, err := handler(ctx, req) + + if sw.optOut { + return resp, err + } + + latency := time.Since(start).Milliseconds() + + hosts := []string{sw.o.Hostname} + if md, ok := metadata.FromIncomingContext(ctx); ok { + for _, h := range knownHeaders { + if v := md.Get(h); len(v) > 0 { + hosts = append(hosts, v[0]) + } + } + } + host := urlx.ExtractPublicAddress(hosts...) + + if err := sw.c.Enqueue(analytics.Page{ + InstanceId: sw.instanceId, + DeploymentId: sw.o.DeploymentId, + Project: sw.o.Service, + UrlHost: host, + UrlPath: info.FullMethod, + RequestCode: int(status.Code(err)), + RequestLatency: int(latency), + }); err != nil { + sw.l.WithError(err).Debug("Could not commit anonymized telemetry data") + // do nothing... + } + + return resp, err +} + +func (sw *Service) StreamInterceptor(srv interface{}, stream grpc.ServerStream, info *grpc.StreamServerInfo, handler grpc.StreamHandler) error { + // this needs a bit of thought, but we don't have streaming RPCs currently anyway + sw.l.Info("The telemetry stream interceptor is not yet implemented!") + return handler(srv, stream) +} + +func (sw *Service) Close() error { + return sw.c.Close() +} + +func (sw *Service) anonymizePath(path string) string { + paths := sw.o.WhitelistedPaths + path = strings.ToLower(path) + + for _, p := range paths { + p = strings.ToLower(p) + if path == p { + return p + } else if len(path) > len(p) && path[:len(p)+1] == p+"/" { + return p + } + } + + return "/" +} + +func (sw *Service) anonymizeQuery(query url.Values, salt string) string { + for _, q := range query { + for i, s := range q { + if s != "" { + s = Hash(s + "|" + salt) + q[i] = s + } + } + } + return query.Encode() +} diff --git a/oryx/migratest/refresh.go b/oryx/migratest/refresh.go new file mode 100644 index 00000000000..68583c5ca30 --- /dev/null +++ b/oryx/migratest/refresh.go @@ -0,0 +1,23 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +//go:build refresh +// +build refresh + +package migratest + +import ( + "encoding/json" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" +) + +func WriteFixtureOnError(t *testing.T, err error, actual interface{}, location string) { + content, err := json.MarshalIndent(actual, "", " ") + require.NoError(t, err) + require.NoError(t, os.MkdirAll(filepath.Dir(location), 0777)) + require.NoError(t, os.WriteFile(location, content, 0666)) +} diff --git a/oryx/migratest/run.go b/oryx/migratest/run.go new file mode 100644 index 00000000000..0b8d4d800d4 --- /dev/null +++ b/oryx/migratest/run.go @@ -0,0 +1,41 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package migratest + +import ( + "encoding/json" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func ContainsExpectedIds(t *testing.T, path string, ids []string) { + files, err := os.ReadDir(path) + require.NoError(t, err) + + for _, f := range files { + if filepath.Ext(f.Name()) == ".json" { + expected := strings.TrimSuffix(filepath.Base(f.Name()), ".json") + assert.Contains(t, ids, expected) + } + } +} + +func CompareWithFixture(t *testing.T, actual interface{}, prefix string, id string) { + location := filepath.Join("fixtures", prefix, id+".json") + //#nosec G304 -- false positive + expected, err := os.ReadFile(location) + WriteFixtureOnError(t, err, actual, location) + + actualJSON, err := json.Marshal(actual) + require.NoError(t, err) + + if !assert.JSONEq(t, string(expected), string(actualJSON)) { + WriteFixtureOnError(t, nil, actual, location) + } +} diff --git a/oryx/migratest/strict.go b/oryx/migratest/strict.go new file mode 100644 index 00000000000..3ff9d503cb1 --- /dev/null +++ b/oryx/migratest/strict.go @@ -0,0 +1,17 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +//go:build !refresh +// +build !refresh + +package migratest + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func WriteFixtureOnError(t *testing.T, err error, actual interface{}, location string) { + require.NoError(t, err) +} diff --git a/oryx/modx/version.go b/oryx/modx/version.go new file mode 100644 index 00000000000..f61be39e9c9 --- /dev/null +++ b/oryx/modx/version.go @@ -0,0 +1,34 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package modx + +import ( + "github.com/pkg/errors" + "golang.org/x/mod/modfile" +) + +// FindVersion returns the version for a module given the contents of a go.mod file. +func FindVersion(gomod []byte, module string) (string, error) { + m, err := modfile.Parse("go.mod", gomod, nil) + if err != nil { + return "", err + } + + for _, r := range m.Require { + if r.Mod.Path == module { + return r.Mod.Version, nil + } + } + + return "", errors.Errorf("no go.mod entry found for: %s", module) +} + +// MustFindVersion returns the version for a module given the contents of a go.mod file or panics. +func MustFindVersion(gomod []byte, module string) string { + v, err := FindVersion(gomod, module) + if err != nil { + panic(err) + } + return v +} diff --git a/oryx/networkx/listener.go b/oryx/networkx/listener.go new file mode 100644 index 00000000000..eadfd78438e --- /dev/null +++ b/oryx/networkx/listener.go @@ -0,0 +1,31 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package networkx + +import ( + "net" + "strings" + + "github.com/ory/x/configx" +) + +func AddressIsUnixSocket(address string) bool { + return strings.HasPrefix(address, "unix:") +} + +func MakeListener(address string, socketPermission *configx.UnixPermission) (net.Listener, error) { + if AddressIsUnixSocket(address) { + addr := strings.TrimPrefix(address, "unix:") + l, err := net.Listen("unix", addr) + if err != nil { + return nil, err + } + err = socketPermission.SetPermission(addr) + if err != nil { + return nil, err + } + return l, nil + } + return net.Listen("tcp", address) +} diff --git a/oryx/networkx/manager.go b/oryx/networkx/manager.go new file mode 100644 index 00000000000..c4512d953d5 --- /dev/null +++ b/oryx/networkx/manager.go @@ -0,0 +1,55 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package networkx + +import ( + "context" + "embed" + + "github.com/pkg/errors" + + "github.com/ory/pop/v6" + "github.com/ory/x/logrusx" + "github.com/ory/x/sqlcon" +) + +// Migrations of the network manager. Apply by merging with your local migrations using +// fsx.Merge() and then passing all to the migration box. +// +//go:embed migrations/sql/*.sql +var Migrations embed.FS + +type Manager struct { + c *pop.Connection +} + +// Deprecated: use networkx.Determine directly instead +func NewManager( + c *pop.Connection, + _ *logrusx.Logger, +) *Manager { + return &Manager{ + c: c, + } +} + +// Deprecated: use networkx.Determine directly instead +func (m *Manager) Determine(ctx context.Context) (*Network, error) { + return Determine(m.c.WithContext(ctx)) +} + +func Determine(c *pop.Connection) (*Network, error) { + var p Network + if err := sqlcon.HandleError(c.Q().Order("created_at ASC").First(&p)); err != nil { + if errors.Is(err, sqlcon.ErrNoRows) { + np := NewNetwork() + if err := c.Create(np); err != nil { + return nil, err + } + return np, nil + } + return nil, err + } + return &p, nil +} diff --git a/oryx/networkx/migrations/sql/20150100000001000000_networks.cockroach.down.sql b/oryx/networkx/migrations/sql/20150100000001000000_networks.cockroach.down.sql new file mode 100644 index 00000000000..9996f5ade48 --- /dev/null +++ b/oryx/networkx/migrations/sql/20150100000001000000_networks.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "networks"; \ No newline at end of file diff --git a/oryx/networkx/migrations/sql/20150100000001000000_networks.cockroach.up.sql b/oryx/networkx/migrations/sql/20150100000001000000_networks.cockroach.up.sql new file mode 100644 index 00000000000..b095b180614 --- /dev/null +++ b/oryx/networkx/migrations/sql/20150100000001000000_networks.cockroach.up.sql @@ -0,0 +1,6 @@ +CREATE TABLE "networks" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +); \ No newline at end of file diff --git a/oryx/networkx/migrations/sql/20150100000001000000_networks.mysql.down.sql b/oryx/networkx/migrations/sql/20150100000001000000_networks.mysql.down.sql new file mode 100644 index 00000000000..beb6b149b3c --- /dev/null +++ b/oryx/networkx/migrations/sql/20150100000001000000_networks.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `networks`; \ No newline at end of file diff --git a/oryx/networkx/migrations/sql/20150100000001000000_networks.mysql.up.sql b/oryx/networkx/migrations/sql/20150100000001000000_networks.mysql.up.sql new file mode 100644 index 00000000000..0ba5bfcf926 --- /dev/null +++ b/oryx/networkx/migrations/sql/20150100000001000000_networks.mysql.up.sql @@ -0,0 +1,6 @@ +CREATE TABLE `networks` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL +) ENGINE=InnoDB; \ No newline at end of file diff --git a/oryx/networkx/migrations/sql/20150100000001000000_networks.postgres.down.sql b/oryx/networkx/migrations/sql/20150100000001000000_networks.postgres.down.sql new file mode 100644 index 00000000000..9996f5ade48 --- /dev/null +++ b/oryx/networkx/migrations/sql/20150100000001000000_networks.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "networks"; \ No newline at end of file diff --git a/oryx/networkx/migrations/sql/20150100000001000000_networks.postgres.up.sql b/oryx/networkx/migrations/sql/20150100000001000000_networks.postgres.up.sql new file mode 100644 index 00000000000..b095b180614 --- /dev/null +++ b/oryx/networkx/migrations/sql/20150100000001000000_networks.postgres.up.sql @@ -0,0 +1,6 @@ +CREATE TABLE "networks" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +); \ No newline at end of file diff --git a/oryx/networkx/migrations/sql/20150100000001000000_networks.sqlite3.down.sql b/oryx/networkx/migrations/sql/20150100000001000000_networks.sqlite3.down.sql new file mode 100644 index 00000000000..9996f5ade48 --- /dev/null +++ b/oryx/networkx/migrations/sql/20150100000001000000_networks.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "networks"; \ No newline at end of file diff --git a/oryx/networkx/migrations/sql/20150100000001000000_networks.sqlite3.up.sql b/oryx/networkx/migrations/sql/20150100000001000000_networks.sqlite3.up.sql new file mode 100644 index 00000000000..f808e33ebd1 --- /dev/null +++ b/oryx/networkx/migrations/sql/20150100000001000000_networks.sqlite3.up.sql @@ -0,0 +1,5 @@ +CREATE TABLE "networks" ( +"id" TEXT PRIMARY KEY, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +); \ No newline at end of file diff --git a/oryx/networkx/migrations/templates/20150100000001_networks.down.fizz b/oryx/networkx/migrations/templates/20150100000001_networks.down.fizz new file mode 100644 index 00000000000..e6e32ac2412 --- /dev/null +++ b/oryx/networkx/migrations/templates/20150100000001_networks.down.fizz @@ -0,0 +1 @@ +drop_table("networks") diff --git a/oryx/networkx/migrations/templates/20150100000001_networks.up.fizz b/oryx/networkx/migrations/templates/20150100000001_networks.up.fizz new file mode 100644 index 00000000000..52cd06914fd --- /dev/null +++ b/oryx/networkx/migrations/templates/20150100000001_networks.up.fizz @@ -0,0 +1,3 @@ +create_table("networks") { + t.Column("id", "uuid", {primary: true}) +} diff --git a/oryx/networkx/network.go b/oryx/networkx/network.go new file mode 100644 index 00000000000..e9be9276aee --- /dev/null +++ b/oryx/networkx/network.go @@ -0,0 +1,30 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package networkx + +import ( + "time" + + "github.com/gofrs/uuid" +) + +type Network struct { + ID uuid.UUID `json:"id" db:"id"` + + // CreatedAt is a helper struct field for gobuffalo.pop. + CreatedAt time.Time `json:"-" db:"created_at"` + + // UpdatedAt is a helper struct field for gobuffalo.pop. + UpdatedAt time.Time `json:"-" db:"updated_at"` +} + +func (p Network) TableName() string { + return "networks" +} + +func NewNetwork() *Network { + return &Network{ + ID: uuid.Must(uuid.NewV4()), + } +} diff --git a/oryx/openapix/doc.go b/oryx/openapix/doc.go new file mode 100644 index 00000000000..654b27fa4c3 --- /dev/null +++ b/oryx/openapix/doc.go @@ -0,0 +1,6 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Package openapi contains definitions commonly used in Ory's APIs +// such as pagination, JSON patches, and more. +package openapix diff --git a/oryx/openapix/jsonpatch.go b/oryx/openapix/jsonpatch.go new file mode 100644 index 00000000000..38769068fcf --- /dev/null +++ b/oryx/openapix/jsonpatch.go @@ -0,0 +1,42 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package openapix + +// A JSONPatchDocument request +// +// swagger:model jsonPatchDocument +type JSONPatchDocument []JSONPatch + +// A JSONPatch document as defined by RFC 6902 +// +// swagger:model jsonPatch +type JSONPatch struct { + // The operation to be performed. One of "add", "remove", "replace", "move", "copy", or "test". + // + // required: true + // example: replace + Op string `json:"op"` + + // The path to the target path. Uses JSON pointer notation. + // + // Learn more [about JSON Pointers](https://datatracker.ietf.org/doc/html/rfc6901#section-5). + // + // required: true + // example: /name + Path string `json:"path"` + + // The value to be used within the operations. + // + // Learn more [about JSON Pointers](https://datatracker.ietf.org/doc/html/rfc6901#section-5). + // + // example: foobar + Value interface{} `json:"value"` + + // This field is used together with operation "move" and uses JSON Pointer notation. + // + // Learn more [about JSON Pointers](https://datatracker.ietf.org/doc/html/rfc6901#section-5). + // + // example: /name + From string `json:"from"` +} diff --git a/x/pagination.go b/oryx/openapix/pagination.go similarity index 55% rename from x/pagination.go rename to oryx/openapix/pagination.go index 37717bf4db1..aab32436766 100644 --- a/x/pagination.go +++ b/oryx/openapix/pagination.go @@ -1,17 +1,10 @@ -// Copyright © 2022 Ory Corp +// Copyright © 2023 Ory Corp // SPDX-License-Identifier: Apache-2.0 -package x +package openapix -import ( - "net/http" - "net/url" - - "github.com/ory/x/pagination/tokenpagination" -) - -// swagger:model paginationHeaders -type PaginationHeaders struct { +// swagger:model tokenPaginationHeaders +type TokenPaginationHeaders struct { // The link header contains pagination links. // // For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). @@ -25,8 +18,8 @@ type PaginationHeaders struct { XTotalCount string `json:"x-total-count"` } -// swagger:model pagination -type PaginationParams struct { +// swagger:model tokenPagination +type TokenPaginationParams struct { // Items per page // // This is the number of items per page to return. @@ -50,20 +43,3 @@ type PaginationParams struct { // min: 1 PageToken string `json:"page_token"` } - -const paginationMaxItems = 1000 -const paginationDefaultItems = 250 - -var paginator = &tokenpagination.TokenPaginator{ - MaxItems: paginationMaxItems, - DefaultItems: paginationDefaultItems, -} - -// ParsePagination parses limit and page from *http.Request with given limits and defaults. -func ParsePagination(r *http.Request) (page, itemsPerPage int) { - return paginator.ParsePagination(r) -} - -func PaginationHeader(w http.ResponseWriter, u *url.URL, total int64, page, itemsPerPage int) { - tokenpagination.PaginationHeader(w, u, total, page, itemsPerPage) -} diff --git a/oryx/osx/env.go b/oryx/osx/env.go new file mode 100644 index 00000000000..e5462ac2ca0 --- /dev/null +++ b/oryx/osx/env.go @@ -0,0 +1,14 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package osx + +import ( + "cmp" + "os" +) + +// GetenvDefault returns an environment variable or the default value if it is empty. +func GetenvDefault(key string, def string) string { + return cmp.Or(os.Getenv(key), def) +} diff --git a/oryx/osx/file.go b/oryx/osx/file.go new file mode 100644 index 00000000000..bdb2d307ae2 --- /dev/null +++ b/oryx/osx/file.go @@ -0,0 +1,221 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package osx + +import ( + "encoding/base64" + "io" + "net/url" + "os" + "strings" + + "github.com/hashicorp/go-retryablehttp" + + "github.com/pkg/errors" + + "github.com/ory/x/httpx" +) + +type options struct { + disableFileLoader bool + disableHTTPLoader bool + disableBase64Loader bool + base64enc *base64.Encoding + disableResilientBase64Loader bool + hc *retryablehttp.Client +} + +type Option func(o *options) + +func (o *options) apply(opts []Option) *options { + for _, f := range opts { + f(o) + } + return o +} + +func newOptions() *options { + return &options{ + disableFileLoader: false, + disableHTTPLoader: false, + disableBase64Loader: false, + base64enc: base64.RawURLEncoding, + hc: httpx.NewResilientClient(), + } +} + +// WithDisabledFileLoader disables the file loader. +func WithDisabledFileLoader() Option { + return func(o *options) { + o.disableFileLoader = true + } +} + +// WithEnabledFileLoader enables the file loader. +func WithEnabledFileLoader() Option { + return func(o *options) { + o.disableFileLoader = false + } +} + +// WithDisabledHTTPLoader disables the HTTP loader. +func WithDisabledHTTPLoader() Option { + return func(o *options) { + o.disableHTTPLoader = true + } +} + +// WithEnabledHTTPLoader enables the HTTP loader. +func WithEnabledHTTPLoader() Option { + return func(o *options) { + o.disableHTTPLoader = false + } +} + +// WithDisabledBase64Loader disables the base64 loader. +func WithDisabledBase64Loader() Option { + return func(o *options) { + o.disableBase64Loader = true + } +} + +// WithEnabledBase64Loader disables the base64 loader. +func WithEnabledBase64Loader() Option { + return func(o *options) { + o.disableBase64Loader = false + } +} + +// WithBase64Encoding sets the base64 encoding. +func WithBase64Encoding(enc *base64.Encoding) Option { + return func(o *options) { + o.base64enc = enc + } +} + +// WithoutResilientBase64Encoding sets the base64 encoding. +func WithoutResilientBase64Encoding() Option { + return func(o *options) { + o.disableResilientBase64Loader = true + } +} + +// WithHTTPClient sets the HTTP client. +func WithHTTPClient(hc *retryablehttp.Client) Option { + return func(o *options) { + o.hc = hc + } +} + +// RestrictedReadFile works similar to ReadFileFromAllSources but has all +// sources disabled per default. You need to enable the loaders you wish to use +// explicitly. +func RestrictedReadFile(source string, opts ...Option) (bytes []byte, err error) { + o := newOptions() + o.disableFileLoader = true + o.disableBase64Loader = true + o.disableHTTPLoader = true + return readFile(source, o.apply(opts)) +} + +// ReadFileFromAllSources reads a file from base64, http, https, and file sources. +// +// Using options, you can disable individual loaders. For example, the following will +// return an error: +// +// ReadFileFromAllSources("https://foo.bar/baz.txt", WithDisabledHTTPLoader()) +// +// Possible formats are: +// +// - /path/to/file +// - file:///path/to/file +// - https://host.com/path/to/file +// - http://host.com/path/to/file +// - base64:// +// +// For more options, check: +// +// - WithDisabledFileLoader +// - WithDisabledHTTPLoader +// - WithDisabledBase64Loader +// - WithBase64Encoding +// - WithHTTPClient +func ReadFileFromAllSources(source string, opts ...Option) (bytes []byte, err error) { + return readFile(source, newOptions().apply(opts)) +} + +func readFile(source string, o *options) (bytes []byte, err error) { + parsed, err := url.Parse(source) + if err != nil { + return nil, errors.Wrap(err, "failed to parse URL") + } + + switch parsed.Scheme { + case "": + if o.disableFileLoader { + return nil, errors.New("file loader disabled") + } + + //#nosec G304 -- false positive + bytes, err = os.ReadFile(source) + if err != nil { + return nil, errors.Wrap(err, "unable to read the file") + } + case "file": + if o.disableFileLoader { + return nil, errors.New("file loader disabled") + } + + //#nosec G304 -- false positive + bytes, err = os.ReadFile(parsed.Host + parsed.Path) + if err != nil { + return nil, errors.Wrap(err, "unable to read the file") + } + case "http", "https": + if o.disableHTTPLoader { + return nil, errors.New("http(s) loader disabled") + } + resp, err := o.hc.Get(parsed.String()) + if err != nil { + return nil, errors.Wrap(err, "unable to load remote file") + } + defer resp.Body.Close() + + bytes, err = io.ReadAll(resp.Body) + if err != nil { + return nil, errors.Wrap(err, "unable to read the HTTP response body") + } + case "base64": + if o.disableBase64Loader { + return nil, errors.New("base64 loader disabled") + } + + if o.disableResilientBase64Loader { + bytes, err = o.base64enc.DecodeString(strings.TrimPrefix(source, "base64://")) + if err != nil { + return nil, errors.Wrap(err, "unable to base64 decode the location") + } + return bytes, nil + } + + for _, enc := range []*base64.Encoding{ + base64.StdEncoding, + base64.URLEncoding, + base64.RawURLEncoding, + base64.RawStdEncoding, + } { + bytes, err = enc.DecodeString(strings.TrimPrefix(source, "base64://")) + if err == nil { + return bytes, nil + } + } + + return nil, errors.Wrap(err, "unable to base64 decode the location") + default: + return nil, errors.Errorf("unsupported source `%s`", parsed.Scheme) + } + + return bytes, nil + +} diff --git a/oryx/osx/stub/text.txt b/oryx/osx/stub/text.txt new file mode 100644 index 00000000000..95d09f2b101 --- /dev/null +++ b/oryx/osx/stub/text.txt @@ -0,0 +1 @@ +hello world \ No newline at end of file diff --git a/oryx/otelx/attribute.go b/oryx/otelx/attribute.go new file mode 100644 index 00000000000..c84489b753c --- /dev/null +++ b/oryx/otelx/attribute.go @@ -0,0 +1,54 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package otelx + +import ( + "database/sql" + "fmt" + + "go.opentelemetry.io/otel/attribute" +) + +const nullString = "" + +func StringAttrs(attrs map[string]string) []attribute.KeyValue { + s := make([]attribute.KeyValue, 0, len(attrs)) + for k, v := range attrs { + s = append(s, attribute.String(k, v)) + } + return s +} + +func AutoInt[I int | int32 | int64](k string, v I) attribute.KeyValue { + // Internally, the OpenTelemetry SDK uses int64 for all integer values anyway. + return attribute.Int64(k, int64(v)) +} + +func Nullable[V any, VN *V | sql.Null[V], A func(string, V) attribute.KeyValue](a A, k string, v VN) attribute.KeyValue { + switch v := any(v).(type) { + case *V: + if v == nil { + return attribute.String(k, nullString) + } + return a(k, *v) + case sql.Null[V]: + if !v.Valid { + return attribute.String(k, nullString) + } + return a(k, v.V) + } + // This should never happen, as the type switch above is exhaustive to the generic type VN. + return attribute.String(k, fmt.Sprintf("", v)) +} + +func NullString[V *string | sql.Null[string]](k string, v V) attribute.KeyValue { + return Nullable(attribute.String, k, v) +} + +func NullStringer(k string, v fmt.Stringer) attribute.KeyValue { + if v == nil { + return attribute.String(k, nullString) + } + return attribute.String(k, v.String()) +} diff --git a/oryx/otelx/config.go b/oryx/otelx/config.go new file mode 100644 index 00000000000..812fe2a363f --- /dev/null +++ b/oryx/otelx/config.go @@ -0,0 +1,66 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package otelx + +import ( + "bytes" + _ "embed" + "io" +) + +type JaegerConfig struct { + LocalAgentAddress string `json:"local_agent_address"` + Sampling JaegerSampling `json:"sampling"` +} + +type ZipkinConfig struct { + ServerURL string `json:"server_url"` + Sampling ZipkinSampling `json:"sampling"` +} + +type OTLPConfig struct { + ServerURL string `json:"server_url"` + Insecure bool `json:"insecure"` + Sampling OTLPSampling `json:"sampling"` + AuthorizationHeader string `json:"authorization_header"` +} + +type JaegerSampling struct { + ServerURL string `json:"server_url"` + TraceIdRatio float64 `json:"trace_id_ratio"` +} + +type ZipkinSampling struct { + SamplingRatio float64 `json:"sampling_ratio"` +} + +type OTLPSampling struct { + SamplingRatio float64 `json:"sampling_ratio"` +} + +type ProvidersConfig struct { + Jaeger JaegerConfig `json:"jaeger"` + Zipkin ZipkinConfig `json:"zipkin"` + OTLP OTLPConfig `json:"otlp"` +} + +type Config struct { + ServiceName string `json:"service_name"` + DeploymentEnvironment string `json:"deployment_environment"` + Provider string `json:"provider"` + Providers ProvidersConfig `json:"providers"` +} + +//go:embed config.schema.json +var ConfigSchema string + +const ConfigSchemaID = "ory://tracing-config" + +// AddConfigSchema adds the tracing schema to the compiler. +// The interface is specified instead of `jsonschema.Compiler` to allow the use of any jsonschema library fork or version. +func AddConfigSchema(c interface { + AddResource(url string, r io.Reader) error +}) error { + return c.AddResource(ConfigSchemaID, bytes.NewBufferString(ConfigSchema)) +} diff --git a/oryx/otelx/config.schema.json b/oryx/otelx/config.schema.json new file mode 100644 index 00000000000..1a668f31dc0 --- /dev/null +++ b/oryx/otelx/config.schema.json @@ -0,0 +1,152 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "ory://tracing-config", + "type": "object", + "additionalProperties": false, + "description": "Configure distributed tracing using OpenTelemetry", + "properties": { + "provider": { + "type": "string", + "description": "Set this to the tracing backend you wish to use. Supports Jaeger, Zipkin, and OTEL.", + "enum": ["jaeger", "otel", "zipkin"], + "examples": ["jaeger"] + }, + "service_name": { + "type": "string", + "description": "Specifies the service name to use on the tracer.", + "examples": ["Ory Hydra", "Ory Kratos", "Ory Keto", "Ory Oathkeeper"] + }, + "deployment_environment": { + "type": "string", + "description": "Specifies the deployment environment to use on the tracer.", + "examples": ["development", "staging", "production"] + }, + "providers": { + "type": "object", + "additionalProperties": false, + "properties": { + "jaeger": { + "type": "object", + "additionalProperties": false, + "description": "Configures the jaeger tracing backend.", + "properties": { + "local_agent_address": { + "type": "string", + "description": "The address of the jaeger-agent where spans should be sent to.", + "anyOf": [ + { + "title": "IPv6 Address and Port", + "pattern": "^\\[(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))\\]:([0-9]*)$" + }, + { + "title": "IPv4 Address and Port", + "pattern": "^([0-9]{1,3}\\.){3}[0-9]{1,3}:([0-9]*)$" + }, + { + "title": "Hostname and Port", + "pattern": "^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\\-]*[a-zA-Z0-9])\\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\\-]*[A-Za-z0-9]):([0-9]*)$" + } + ], + "examples": ["127.0.0.1:6831"] + }, + "sampling": { + "type": "object", + "propertyNames": { + "enum": ["server_url", "trace_id_ratio"] + }, + "additionalProperties": false, + "properties": { + "server_url": { + "type": "string", + "description": "The address of jaeger-agent's HTTP sampling server", + "format": "uri", + "examples": ["http://localhost:5778/sampling"] + }, + "trace_id_ratio": { + "type": "number", + "description": "Trace Id ratio sample", + "examples": [0.5] + } + } + } + } + }, + "zipkin": { + "type": "object", + "additionalProperties": false, + "description": "Configures the zipkin tracing backend.", + "properties": { + "server_url": { + "type": "string", + "description": "The address of the Zipkin server where spans should be sent to.", + "format": "uri", + "examples": ["http://localhost:9411/api/v2/spans"] + }, + "sampling": { + "type": "object", + "propertyNames": { + "enum": ["sampling_ratio"] + }, + "additionalProperties": false, + "properties": { + "sampling_ratio": { + "type": "number", + "description": "Sampling ratio for spans.", + "examples": [0.4] + } + } + } + } + }, + "otlp": { + "type": "object", + "additionalProperties": false, + "description": "Configures the OTLP tracing backend.", + "properties": { + "server_url": { + "type": "string", + "description": "The endpoint of the OTLP exporter (HTTP) where spans should be sent to.", + "anyOf": [ + { + "title": "IPv6 Address and Port", + "pattern": "^\\[(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))\\]:([0-9]*)$" + }, + { + "title": "IPv4 Address and Port", + "pattern": "^([0-9]{1,3}\\.){3}[0-9]{1,3}:([0-9]*)$" + }, + { + "title": "Hostname and Port", + "pattern": "^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\\-]*[a-zA-Z0-9])\\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\\-]*[A-Za-z0-9]):([0-9]*)$" + } + ], + "examples": ["localhost:4318"] + }, + "insecure": { + "type": "boolean", + "description": "Will use HTTP if set to true; defaults to HTTPS." + }, + "sampling": { + "type": "object", + "propertyNames": { + "enum": ["sampling_ratio"] + }, + "additionalProperties": false, + "properties": { + "sampling_ratio": { + "type": "number", + "description": "Sampling ratio for spans.", + "examples": [0.4] + } + } + }, + "authorization_header": { + "type": "string", + "examples": ["Bearer 2389s8fs9d8fus9f"] + } + } + } + } + } + } +} diff --git a/oryx/otelx/jaeger.go b/oryx/otelx/jaeger.go new file mode 100644 index 00000000000..bc9f1c7e13d --- /dev/null +++ b/oryx/otelx/jaeger.go @@ -0,0 +1,88 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package otelx + +import ( + "net" + + "go.opentelemetry.io/contrib/propagators/b3" + jaegerPropagator "go.opentelemetry.io/contrib/propagators/jaeger" + "go.opentelemetry.io/contrib/samplers/jaegerremote" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/exporters/jaeger" + "go.opentelemetry.io/otel/propagation" + "go.opentelemetry.io/otel/sdk/resource" + sdktrace "go.opentelemetry.io/otel/sdk/trace" + semconv "go.opentelemetry.io/otel/semconv/v1.27.0" + "go.opentelemetry.io/otel/trace" +) + +// SetupJaeger configures and returns a Jaeger tracer. +// +// The returned tracer will by default attempt to send spans to a local Jaeger agent. +// Optionally, [otelx.JaegerConfig.LocalAgentAddress] can be set to specify a different target. +// +// By default, unless a parent sampler has taken a sampling decision, every span is sampled. +// [otelx.JaegerSampling.TraceIdRatio] may be used to customize the sampling probability, +// optionally alongside [otelx.JaegerSampling.ServerURL] to consult a remote server +// for the sampling strategy to be used. +func SetupJaeger(t *Tracer, tracerName string, c *Config) (trace.Tracer, error) { + host, port, err := net.SplitHostPort(c.Providers.Jaeger.LocalAgentAddress) + if err != nil { + return nil, err + } + + exp, err := jaeger.New( + jaeger.WithAgentEndpoint( + jaeger.WithAgentHost(host), jaeger.WithAgentPort(port), + ), + ) + if err != nil { + return nil, err + } + + tpOpts := []sdktrace.TracerProviderOption{ + sdktrace.WithBatcher(exp), + sdktrace.WithResource(resource.NewWithAttributes( + semconv.SchemaURL, + semconv.ServiceName(c.ServiceName), + semconv.DeploymentEnvironmentName(c.DeploymentEnvironment))), + } + + samplingServerURL := c.Providers.Jaeger.Sampling.ServerURL + traceIdRatio := c.Providers.Jaeger.Sampling.TraceIdRatio + + sampler := sdktrace.TraceIDRatioBased(traceIdRatio) + + if samplingServerURL != "" { + sampler = jaegerremote.New( + "jaegerremote", + jaegerremote.WithSamplingServerURL(samplingServerURL), + jaegerremote.WithInitialSampler(sampler), + ) + } + + // Respect any sampling decision taken by the client. + sampler = sdktrace.ParentBased(sampler) + tpOpts = append(tpOpts, sdktrace.WithSampler(sampler)) + + tp := sdktrace.NewTracerProvider(tpOpts...) + otel.SetTracerProvider(tp) + + // At the moment, software across our cloud stack only support Zipkin (B3) + // and Jaeger propagation formats. Proposals for standardized formats for + // context propagation are in the works (ref: https://www.w3.org/TR/trace-context/ + // and https://www.w3.org/TR/baggage/). + // + // Simply add propagation.TraceContext{} and propagation.Baggage{} + // here to enable those as well. + prop := propagation.NewCompositeTextMapPropagator( + propagation.TraceContext{}, + jaegerPropagator.Jaeger{}, + b3.New(b3.WithInjectEncoding(b3.B3MultipleHeader|b3.B3SingleHeader)), + propagation.Baggage{}, + ) + otel.SetTextMapPropagator(prop) + return tp.Tracer(tracerName), nil +} diff --git a/oryx/otelx/middleware.go b/oryx/otelx/middleware.go new file mode 100644 index 00000000000..360be1e2d72 --- /dev/null +++ b/oryx/otelx/middleware.go @@ -0,0 +1,71 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package otelx + +import ( + "cmp" + "context" + "net/http" + "strings" + + "go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp" +) + +var WithDefaultFilters otelhttp.Option = otelhttp.WithFilter(func(r *http.Request) bool { + return !(strings.HasPrefix(r.URL.Path, "/health") || + strings.HasPrefix(r.URL.Path, "/admin/health") || + strings.HasPrefix(r.URL.Path, "/metrics") || + strings.HasPrefix(r.URL.Path, "/admin/metrics")) +}) + +type contextKey int + +const callbackContextKey contextKey = iota + +func SpanNameRecorderMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + defer func() { + cb, _ := r.Context().Value(callbackContextKey).(func(string)) + if cb == nil { + return + } + if r.Pattern != "" { + cb(r.Pattern) + } + }() + next.ServeHTTP(w, r) + }) +} + +func SpanNameRecorderNegroniFunc(w http.ResponseWriter, r *http.Request, next http.HandlerFunc) { + defer func() { + cb, _ := r.Context().Value(callbackContextKey).(func(string)) + if cb == nil { + return + } + if r.Pattern != "" { + cb(r.Pattern) + } + }() + next(w, r) +} + +func NewMiddleware(next http.Handler, operation string, opts ...otelhttp.Option) http.Handler { + myOpts := []otelhttp.Option{ + WithDefaultFilters, + otelhttp.WithSpanNameFormatter(func(operation string, r *http.Request) string { + return cmp.Or(r.Pattern, operation, r.Method+" "+r.URL.Path) + }), + } + handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + callback := func(s string) { + r.Pattern = cmp.Or(r.Pattern, s) + } + ctx := context.WithValue(r.Context(), callbackContextKey, callback) + r2 := r.WithContext(ctx) + next.ServeHTTP(w, r2) + r.Pattern = cmp.Or(r2.Pattern, r.Pattern) // best-effort in case callback never is called + }) + return otelhttp.NewHandler(handler, operation, append(myOpts, opts...)...) +} diff --git a/oryx/otelx/otel.go b/oryx/otelx/otel.go new file mode 100644 index 00000000000..4431b9450f5 --- /dev/null +++ b/oryx/otelx/otel.go @@ -0,0 +1,109 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package otelx + +import ( + "go.opentelemetry.io/otel/trace" + "go.opentelemetry.io/otel/trace/embedded" + "go.opentelemetry.io/otel/trace/noop" + + "github.com/ory/x/logrusx" + "github.com/ory/x/stringsx" +) + +type Tracer struct { + tracer trace.Tracer +} + +// Creates a new tracer. If name is empty, a default tracer name is used +// instead. See: https://godocs.io/go.opentelemetry.io/otel/sdk/trace#TracerProvider.Tracer +func New(name string, l *logrusx.Logger, c *Config) (*Tracer, error) { + t := &Tracer{} + + if err := t.setup(name, l, c); err != nil { + return nil, err + } + + return t, nil +} + +// Creates a new no-op tracer. +func NewNoop(_ *logrusx.Logger, c *Config) *Tracer { + tp := noop.NewTracerProvider() + t := &Tracer{tracer: tp.Tracer("")} + return t +} + +// setup constructs the tracer based on the given configuration. +func (t *Tracer) setup(name string, l *logrusx.Logger, c *Config) error { + switch f := stringsx.SwitchExact(c.Provider); { + case f.AddCase("jaeger"): + tracer, err := SetupJaeger(t, name, c) + if err != nil { + return err + } + + t.tracer = tracer + l.Infof("Jaeger tracer configured! Sending spans to %s", c.Providers.Jaeger.LocalAgentAddress) + case f.AddCase("zipkin"): + tracer, err := SetupZipkin(t, name, c) + if err != nil { + return err + } + + t.tracer = tracer + l.Infof("Zipkin tracer configured! Sending spans to %s", c.Providers.Zipkin.ServerURL) + case f.AddCase("otel"): + tracer, err := SetupOTLP(t, name, c) + if err != nil { + return err + } + + t.tracer = tracer + l.Infof("OTLP tracer configured! Sending spans to %s", c.Providers.OTLP.ServerURL) + case f.AddCase(""): + l.Infof("No tracer configured - skipping tracing setup") + t.tracer = noop.NewTracerProvider().Tracer(name) + default: + return f.ToUnknownCaseErr() + } + + return nil +} + +// IsLoaded returns true if the tracer has been loaded. +func (t *Tracer) IsLoaded() bool { + if t == nil || t.tracer == nil { + return false + } + return true +} + +// Tracer returns the underlying OpenTelemetry tracer. +func (t *Tracer) Tracer() trace.Tracer { + return t.tracer +} + +// WithOTLP returns a new tracer with the underlying OpenTelemetry Tracer +// replaced. +func (t *Tracer) WithOTLP(other trace.Tracer) *Tracer { + return &Tracer{other} +} + +// Provider returns a TracerProvider which in turn yields this tracer unmodified. +func (t *Tracer) Provider() trace.TracerProvider { + return tracerProvider{t: t.Tracer()} +} + +type tracerProvider struct { + embedded.TracerProvider + t trace.Tracer +} + +var _ trace.TracerProvider = tracerProvider{} + +// Tracer implements trace.TracerProvider. +func (tp tracerProvider) Tracer(name string, options ...trace.TracerOption) trace.Tracer { + return tp.t +} diff --git a/oryx/otelx/otlp.go b/oryx/otelx/otlp.go new file mode 100644 index 00000000000..f5c3d7d0750 --- /dev/null +++ b/oryx/otelx/otlp.go @@ -0,0 +1,68 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package otelx + +import ( + "context" + + "go.opentelemetry.io/contrib/propagators/b3" + jaegerPropagator "go.opentelemetry.io/contrib/propagators/jaeger" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/exporters/otlp/otlptrace" + "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp" + "go.opentelemetry.io/otel/propagation" + "go.opentelemetry.io/otel/sdk/resource" + sdktrace "go.opentelemetry.io/otel/sdk/trace" + semconv "go.opentelemetry.io/otel/semconv/v1.27.0" + "go.opentelemetry.io/otel/trace" +) + +func SetupOTLP(t *Tracer, tracerName string, c *Config) (trace.Tracer, error) { + ctx := context.Background() + + clientOpts := []otlptracehttp.Option{ + otlptracehttp.WithEndpoint(c.Providers.OTLP.ServerURL), + } + + if c.Providers.OTLP.Insecure { + clientOpts = append(clientOpts, otlptracehttp.WithInsecure()) + } + + if c.Providers.OTLP.AuthorizationHeader != "" { + clientOpts = append(clientOpts, + otlptracehttp.WithHeaders(map[string]string{"Authorization": c.Providers.OTLP.AuthorizationHeader}), + ) + } + + exp, err := otlptrace.New( + ctx, otlptracehttp.NewClient(clientOpts...), + ) + if err != nil { + return nil, err + } + + tpOpts := []sdktrace.TracerProviderOption{ + sdktrace.WithBatcher(exp), + sdktrace.WithResource(resource.NewWithAttributes( + semconv.SchemaURL, + semconv.ServiceName(c.ServiceName), + semconv.DeploymentEnvironmentName(c.DeploymentEnvironment), + )), + sdktrace.WithSampler(sdktrace.ParentBased(sdktrace.TraceIDRatioBased( + c.Providers.OTLP.Sampling.SamplingRatio, + ))), + } + + tp := sdktrace.NewTracerProvider(tpOpts...) + otel.SetTracerProvider(tp) + + otel.SetTextMapPropagator(propagation.NewCompositeTextMapPropagator( + propagation.TraceContext{}, + jaegerPropagator.Jaeger{}, + b3.New(b3.WithInjectEncoding(b3.B3MultipleHeader|b3.B3SingleHeader)), + propagation.Baggage{}, + )) + + return tp.Tracer(tracerName), nil +} diff --git a/oryx/otelx/semconv/context.go b/oryx/otelx/semconv/context.go new file mode 100644 index 00000000000..a67bfd42f7b --- /dev/null +++ b/oryx/otelx/semconv/context.go @@ -0,0 +1,53 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package semconv + +import ( + "context" + "net/http" + + "go.opentelemetry.io/otel/attribute" + + "github.com/ory/x/httpx" +) + +type contextKey int + +const contextKeyAttributes contextKey = iota + +func ContextWithAttributes(ctx context.Context, attrs ...attribute.KeyValue) context.Context { + existing, _ := ctx.Value(contextKeyAttributes).([]attribute.KeyValue) + return context.WithValue(ctx, contextKeyAttributes, append(existing, attrs...)) +} + +func AttributesFromContext(ctx context.Context) []attribute.KeyValue { + fromCtx, _ := ctx.Value(contextKeyAttributes).([]attribute.KeyValue) + uniq := make(map[attribute.Key]struct{}) + attrs := make([]attribute.KeyValue, 0) + for i := len(fromCtx) - 1; i >= 0; i-- { + if _, ok := uniq[fromCtx[i].Key]; !ok { + uniq[fromCtx[i].Key] = struct{}{} + attrs = append(attrs, fromCtx[i]) + } + } + reverse(attrs) + return attrs +} + +func Middleware(rw http.ResponseWriter, r *http.Request, next http.HandlerFunc) { + ctx := ContextWithAttributes(r.Context(), + append( + AttrGeoLocation(*httpx.ClientGeoLocation(r)), + AttrClientIP(httpx.ClientIP(r)), + )..., + ) + + next(rw, r.WithContext(ctx)) +} + +func reverse[S ~[]E, E any](s S) { + for i, j := 0, len(s)-1; i < j; i, j = i+1, j-1 { + s[i], s[j] = s[j], s[i] + } +} diff --git a/oryx/otelx/semconv/deprecated.go b/oryx/otelx/semconv/deprecated.go new file mode 100644 index 00000000000..4a2615aeed7 --- /dev/null +++ b/oryx/otelx/semconv/deprecated.go @@ -0,0 +1,38 @@ +// Copyright © 2024 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package semconv + +import ( + "context" + + otelattr "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" +) + +// NewDeprecatedFeatureUsedEvent creates a new event indicating that a deprecated feature was used. +// It returns the event name and a trace.EventOption that can be used to +// add the event to a span. +// +// span.AddEvent(NewDeprecatedFeatureUsedEvent(ctx, "deprecated-feature-id", otelattr.String("key", "value"))) +func NewDeprecatedFeatureUsedEvent(ctx context.Context, deprecatedCodeFeatureID string, attrs ...otelattr.KeyValue) (string, trace.EventOption) { + return DeprecatedFeatureUsed.String(), + trace.WithAttributes( + append( + append( + attrs, + AttributesFromContext(ctx)..., + ), + AttrDeprecatedFeatureID(deprecatedCodeFeatureID), + )..., + ) +} + +const ( + AttributeKeyDeprecatedCodePathIDAttributeKey AttributeKey = "DeprecatedFeatureID" + DeprecatedFeatureUsed Event = "DeprecatedFeatureUsed" +) + +func AttrDeprecatedFeatureID(id string) otelattr.KeyValue { + return otelattr.String(AttributeKeyDeprecatedCodePathIDAttributeKey.String(), id) +} diff --git a/oryx/otelx/semconv/events.go b/oryx/otelx/semconv/events.go new file mode 100644 index 00000000000..7e9759a30ac --- /dev/null +++ b/oryx/otelx/semconv/events.go @@ -0,0 +1,104 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Package semconv contains OpenTelemetry semantic convention constants. +package semconv + +import ( + "github.com/gofrs/uuid" + otelattr "go.opentelemetry.io/otel/attribute" + + "github.com/ory/x/httpx" +) + +type Event string + +func (e Event) String() string { + return string(e) +} + +type AttributeKey string + +func (a AttributeKey) String() string { + return string(a) +} + +const ( + AttributeKeyIdentityID AttributeKey = "IdentityID" + AttributeKeyNID AttributeKey = "ProjectID" + AttributeKeyClientIP AttributeKey = "ClientIP" + AttributeKeyGeoLocationCity AttributeKey = "GeoLocationCity" + AttributeKeyGeoLocationRegion AttributeKey = "GeoLocationRegion" + AttributeKeyGeoLocationCountry AttributeKey = "GeoLocationCountry" + AttributeKeyGeoLocationLatitude AttributeKey = "GeoLocationLatitude" + AttributeKeyGeoLocationLongitude AttributeKey = "GeoLocationLongitude" + AttributeKeyWorkspace AttributeKey = "WorkspaceID" + AttributeKeySubscriptionID AttributeKey = "SubscriptionID" + AttributeKeyProjectEnvironment AttributeKey = "ProjectEnvironment" + AttributeKeyWorkspaceAPIKeyID AttributeKey = "WorkspaceAPIKeyID" + AttributeKeyProjectAPIKeyID AttributeKey = "ProjectAPIKeyID" +) + +func AttrIdentityID[V string | uuid.UUID](val V) otelattr.KeyValue { + return otelattr.String(AttributeKeyIdentityID.String(), uuidOrString(val)) +} + +func AttrNID(val uuid.UUID) otelattr.KeyValue { + return otelattr.String(AttributeKeyNID.String(), val.String()) +} + +func AttrWorkspace(val uuid.UUID) otelattr.KeyValue { + return otelattr.String(AttributeKeyWorkspace.String(), val.String()) +} + +func AttrSubscription(val uuid.UUID) otelattr.KeyValue { + return otelattr.String(AttributeKeySubscriptionID.String(), val.String()) +} + +func AttrProjectEnvironment(val string) otelattr.KeyValue { + return otelattr.String(AttributeKeyProjectEnvironment.String(), val) +} + +func AttrClientIP(val string) otelattr.KeyValue { + return otelattr.String(AttributeKeyClientIP.String(), val) +} + +func AttrGeoLocation(val httpx.GeoLocation) []otelattr.KeyValue { + geoLocationAttributes := make([]otelattr.KeyValue, 0, 3) + + if val.City != "" { + geoLocationAttributes = append(geoLocationAttributes, otelattr.String(AttributeKeyGeoLocationCity.String(), val.City)) + } + if val.Country != "" { + geoLocationAttributes = append(geoLocationAttributes, otelattr.String(AttributeKeyGeoLocationCountry.String(), val.Country)) + } + if val.Region != "" { + geoLocationAttributes = append(geoLocationAttributes, otelattr.String(AttributeKeyGeoLocationRegion.String(), val.Region)) + } + if val.Latitude != nil { + geoLocationAttributes = append(geoLocationAttributes, otelattr.Float64(AttributeKeyGeoLocationLatitude.String(), *val.Latitude)) + } + if val.Longitude != nil { + geoLocationAttributes = append(geoLocationAttributes, otelattr.Float64(AttributeKeyGeoLocationLongitude.String(), *val.Longitude)) + } + + return geoLocationAttributes +} + +func AttrWorkspaceAPIKeyID[V string | uuid.UUID](val V) otelattr.KeyValue { + return otelattr.String(AttributeKeyWorkspaceAPIKeyID.String(), uuidOrString(val)) +} + +func AttrProjectAPIKeyID[V string | uuid.UUID](val V) otelattr.KeyValue { + return otelattr.String(AttributeKeyProjectAPIKeyID.String(), uuidOrString(val)) +} + +func uuidOrString[V string | uuid.UUID](val V) string { + switch val := any(val).(type) { + case string: + return val + case uuid.UUID: + return val.String() + } + panic("unreachable") +} diff --git a/oryx/otelx/semconv/warning.go b/oryx/otelx/semconv/warning.go new file mode 100644 index 00000000000..79f9ae4c197 --- /dev/null +++ b/oryx/otelx/semconv/warning.go @@ -0,0 +1,38 @@ +// Copyright © 2024 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package semconv + +import ( + "context" + + otelattr "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" +) + +// NewWarning creates a new warning event with the given ID and attributes. +// It returns the event name and a trace.EventOption that can be used to +// add the event to a span. +// +// span.AddEvent(NewWarning(ctx, "warning-id", otelattr.String("key", "value"))) +func NewWarning(ctx context.Context, id string, attrs ...otelattr.KeyValue) (string, trace.EventOption) { + return Warning.String(), + trace.WithAttributes( + append( + append( + attrs, + AttributesFromContext(ctx)..., + ), + otelattr.String(AttributeWarningID.String(), id), + )..., + ) +} + +const ( + Warning Event = "Warning" + AttributeWarningID AttributeKey = "WarningID" +) + +func AttrWarningID(id string) otelattr.KeyValue { + return otelattr.String(AttributeWarningID.String(), id) +} diff --git a/oryx/otelx/withspan.go b/oryx/otelx/withspan.go new file mode 100644 index 00000000000..5fda63de021 --- /dev/null +++ b/oryx/otelx/withspan.go @@ -0,0 +1,148 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package otelx + +import ( + "context" + "errors" + "fmt" + "reflect" + "runtime" + "strings" + + pkgerrors "github.com/pkg/errors" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/codes" + semconv "go.opentelemetry.io/otel/semconv/v1.27.0" + "go.opentelemetry.io/otel/trace" +) + +// WithSpan wraps execution of f in a span identified by name. +// +// If f returns an error or panics, the span status will be set to the error +// state. The error (or panic) will be propagated unmodified. +// +// f will be wrapped in a child span by default. To make a new root span +// instead, pass the trace.WithNewRoot() option. +func WithSpan(ctx context.Context, name string, f func(context.Context) error, opts ...trace.SpanStartOption) (err error) { + ctx, span := trace.SpanFromContext(ctx).TracerProvider().Tracer("").Start(ctx, name, opts...) + defer func() { + defer span.End() + if r := recover(); r != nil { + setErrorStatusPanic(span, r) + panic(r) + } else if err != nil { + span.SetStatus(codes.Error, err.Error()) + setErrorTags(span, err) + } + }() + return f(ctx) +} + +// End finishes span, and automatically sets the error state if *err is not nil +// or during panicking. +// +// Usage: +// +// func Divide(ctx context.Context, numerator, denominator int) (ratio int, err error) { +// ctx, span := tracer.Start(ctx, "Divide") +// defer otelx.End(span, &err) +// if denominator == 0 { +// return 0, errors.New("cannot divide by zero") +// } +// return numerator / denominator, nil +// } +// +// During a panic, we don't fully conform to OpenTelemetry's semantic +// conventions because that would require us to emit a span event to attach the +// stacktrace and error type, and we don't want to do that. Instead, we set the +// tags on the span directly. +// https://opentelemetry.io/docs/specs/semconv/exceptions/exceptions-spans/ +// +// For improved compatibility with Datadog, we also set some additional tags as +// documented here: +// https://docs.datadoghq.com/standard-attributes/?product=apm&search=error +func End(span trace.Span, err *error) { + defer span.End() + if r := recover(); r != nil { + setErrorStatusPanic(span, r) + panic(r) + } + if err == nil || *err == nil { + return + } + span.SetStatus(codes.Error, (*err).Error()) + setErrorTags(span, *err) +} + +func setErrorStatusPanic(span trace.Span, recovered any) { + span.SetAttributes( + // OpenTelemetry says to add these attributes to an event, not the span + // itself. We don't want to do that, so we're adding them to the span + // directly. + semconv.ExceptionEscaped(true), + // OpenTelemetry describes "exception.stacktrace" We don't love that, + // though, so we're using "error.stack" instead, like DataDog). + attribute.String("error.stack", stacktrace()), + ) + if t := reflect.TypeOf(recovered); t != nil { + span.SetAttributes(semconv.ExceptionType(t.String())) + } + switch e := recovered.(type) { + case error: + span.SetStatus(codes.Error, "panic: "+e.Error()) + setErrorTags(span, e) + case string, fmt.Stringer: + span.SetStatus(codes.Error, fmt.Sprintf("panic: %v", e)) + default: + span.SetStatus(codes.Error, "panic") + case nil: + // nothing + } +} + +func setErrorTags(span trace.Span, err error) { + span.SetAttributes( + attribute.String("error", err.Error()), + attribute.String("error.message", err.Error()), // DataDog compat + attribute.String("error.type", fmt.Sprintf("%T", errors.Unwrap(err))), // the innermost error type is the most useful here + ) + if e := interface{ StackTrace() pkgerrors.StackTrace }(nil); errors.As(err, &e) { + span.SetAttributes(attribute.String("error.stack", fmt.Sprintf("%+v", e.StackTrace()))) + } + if e := interface{ Reason() string }(nil); errors.As(err, &e) { + span.SetAttributes(attribute.String("error.reason", e.Reason())) + } + if e := interface{ Debug() string }(nil); errors.As(err, &e) { + span.SetAttributes(attribute.String("error.debug", e.Debug())) + } + if e := interface{ ID() string }(nil); errors.As(err, &e) { + span.SetAttributes(attribute.String("error.id", e.ID())) + } + if e := interface{ Details() map[string]interface{} }(nil); errors.As(err, &e) { + for k, v := range e.Details() { + span.SetAttributes(attribute.String("error.details."+k, fmt.Sprintf("%v", v))) + } + } +} + +func stacktrace() string { + pc := make([]uintptr, 5) + n := runtime.Callers(4, pc) + if n == 0 { + return "" + } + pc = pc[:n] + frames := runtime.CallersFrames(pc) + + var builder strings.Builder + for { + frame, more := frames.Next() + fmt.Fprintf(&builder, "%s\n\t%s:%d\n", frame.Function, frame.File, frame.Line) + if !more { + break + } + } + return builder.String() +} diff --git a/oryx/otelx/zipkin.go b/oryx/otelx/zipkin.go new file mode 100644 index 00000000000..59922c4a662 --- /dev/null +++ b/oryx/otelx/zipkin.go @@ -0,0 +1,37 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package otelx + +import ( + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/exporters/zipkin" + "go.opentelemetry.io/otel/sdk/resource" + sdktrace "go.opentelemetry.io/otel/sdk/trace" + semconv "go.opentelemetry.io/otel/semconv/v1.27.0" + "go.opentelemetry.io/otel/trace" +) + +func SetupZipkin(t *Tracer, tracerName string, c *Config) (trace.Tracer, error) { + exp, err := zipkin.New(c.Providers.Zipkin.ServerURL) + if err != nil { + return nil, err + } + + tpOpts := []sdktrace.TracerProviderOption{ + sdktrace.WithBatcher(exp), + sdktrace.WithResource(resource.NewWithAttributes( + semconv.SchemaURL, + semconv.ServiceName(c.ServiceName), + semconv.DeploymentEnvironmentName(c.DeploymentEnvironment), + )), + sdktrace.WithSampler(sdktrace.ParentBased(sdktrace.TraceIDRatioBased( + c.Providers.Zipkin.Sampling.SamplingRatio, + ))), + } + + tp := sdktrace.NewTracerProvider(tpOpts...) + otel.SetTracerProvider(tp) + + return tp.Tracer(tracerName), nil +} diff --git a/oryx/package-lock.json b/oryx/package-lock.json new file mode 100644 index 00000000000..17684a20877 --- /dev/null +++ b/oryx/package-lock.json @@ -0,0 +1,664 @@ +{ + "name": "x", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "devDependencies": { + "license-checker": "^25.0.1", + "ory-prettier-styles": "1.3.0", + "prettier": "2.8.8" + } + }, + "node_modules/abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", + "dev": true, + "license": "ISC" + }, + "node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/array-find-index": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz", + "integrity": "sha512-M1HQyIXcBGtVywBt8WVdim+lrNaK7VHp99Qt5pSNziXznKHViIBbXWtfRTpEFpF/c4FdfxNAsCCwPp5phBYJtw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==", + "dev": true, + "license": "MIT" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true, + "license": "MIT" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/debuglog": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/debuglog/-/debuglog-1.0.1.tgz", + "integrity": "sha512-syBZ+rnAK3EgMsH2aYEOLUW7mZSY9Gb+0wUMCFsZvcmiz+HigA0LOcq/HoQqVuGG+EKykunc7QG2bzrponfaSw==", + "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.", + "dev": true, + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/dezalgo": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", + "integrity": "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==", + "dev": true, + "license": "ISC", + "dependencies": { + "asap": "^2.0.0", + "wrappy": "1" + } + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true, + "license": "ISC" + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/hosted-git-info": { + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "dev": true, + "license": "ISC" + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true, + "license": "MIT" + }, + "node_modules/license-checker": { + "version": "25.0.1", + "resolved": "https://registry.npmjs.org/license-checker/-/license-checker-25.0.1.tgz", + "integrity": "sha512-mET5AIwl7MR2IAKYYoVBBpV0OnkKQ1xGj2IMMeEFIs42QAkEVjRtFZGWmQ28WeU7MP779iAgOaOy93Mn44mn6g==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "chalk": "^2.4.1", + "debug": "^3.1.0", + "mkdirp": "^0.5.1", + "nopt": "^4.0.1", + "read-installed": "~4.0.3", + "semver": "^5.5.0", + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0", + "spdx-satisfies": "^4.0.0", + "treeify": "^1.1.0" + }, + "bin": { + "license-checker": "bin/license-checker" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/nopt": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.3.tgz", + "integrity": "sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg==", + "dev": true, + "license": "ISC", + "dependencies": { + "abbrev": "1", + "osenv": "^0.1.4" + }, + "bin": { + "nopt": "bin/nopt.js" + } + }, + "node_modules/normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, + "node_modules/npm-normalize-package-bin": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz", + "integrity": "sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==", + "dev": true, + "license": "ISC" + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/ory-prettier-styles": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/ory-prettier-styles/-/ory-prettier-styles-1.3.0.tgz", + "integrity": "sha512-Vfn0G6CyLaadwcCamwe1SQCf37ZQfBDgMrhRI70dE/2fbE3Q43/xu7K5c32I5FGt/EliroWty5yBjmdkj0eWug==", + "dev": true + }, + "node_modules/os-homedir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", + "integrity": "sha512-B5JU3cabzk8c67mRRd3ECmROafjYMXbuzlwtqdM8IbS8ktlTix8aFGb2bAGKrSRIlnfKwovGUUr72JUPyOb6kQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/os-tmpdir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/osenv": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz", + "integrity": "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==", + "deprecated": "This package is no longer supported.", + "dev": true, + "license": "ISC", + "dependencies": { + "os-homedir": "^1.0.0", + "os-tmpdir": "^1.0.0" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/prettier": { + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", + "integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==", + "dev": true, + "license": "MIT", + "bin": { + "prettier": "bin-prettier.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/read-installed": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/read-installed/-/read-installed-4.0.3.tgz", + "integrity": "sha512-O03wg/IYuV/VtnK2h/KXEt9VIbMUFbk3ERG0Iu4FhLZw0EP0T9znqrYDGn6ncbEsXUFaUjiVAWXHzxwt3lhRPQ==", + "deprecated": "This package is no longer supported.", + "dev": true, + "license": "ISC", + "dependencies": { + "debuglog": "^1.0.1", + "read-package-json": "^2.0.0", + "readdir-scoped-modules": "^1.0.0", + "semver": "2 || 3 || 4 || 5", + "slide": "~1.1.3", + "util-extend": "^1.0.1" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.2" + } + }, + "node_modules/read-package-json": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-2.1.2.tgz", + "integrity": "sha512-D1KmuLQr6ZSJS0tW8hf3WGpRlwszJOXZ3E8Yd/DNRaM5d+1wVRZdHlpGBLAuovjr28LbWvjpWkBHMxpRGGjzNA==", + "deprecated": "This package is no longer supported. Please use @npmcli/package-json instead.", + "dev": true, + "license": "ISC", + "dependencies": { + "glob": "^7.1.1", + "json-parse-even-better-errors": "^2.3.0", + "normalize-package-data": "^2.0.0", + "npm-normalize-package-bin": "^1.0.0" + } + }, + "node_modules/readdir-scoped-modules": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/readdir-scoped-modules/-/readdir-scoped-modules-1.1.0.tgz", + "integrity": "sha512-asaikDeqAQg7JifRsZn1NJZXo9E+VwlyCfbkZhwyISinqk5zNS6266HS5kah6P0SaQKGF6SkNnZVHUzHFYxYDw==", + "deprecated": "This functionality has been moved to @npmcli/fs", + "dev": true, + "license": "ISC", + "dependencies": { + "debuglog": "^1.0.1", + "dezalgo": "^1.0.0", + "graceful-fs": "^4.1.2", + "once": "^1.3.0" + } + }, + "node_modules/resolve": { + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/slide": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/slide/-/slide-1.1.6.tgz", + "integrity": "sha512-NwrtjCg+lZoqhFU8fOwl4ay2ei8PaqCBOUV3/ektPY9trO1yQ1oXEfmHAhKArUVUr/hOHvy5f6AdP17dCM0zMw==", + "dev": true, + "license": "ISC", + "engines": { + "node": "*" + } + }, + "node_modules/spdx-compare": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/spdx-compare/-/spdx-compare-1.0.0.tgz", + "integrity": "sha512-C1mDZOX0hnu0ep9dfmuoi03+eOdDoz2yvK79RxbcrVEG1NO1Ph35yW102DHWKN4pk80nwCgeMmSY5L25VE4D9A==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-find-index": "^1.0.2", + "spdx-expression-parse": "^3.0.0", + "spdx-ranges": "^2.0.0" + } + }, + "node_modules/spdx-correct": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", + "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-exceptions": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", + "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", + "dev": true, + "license": "CC-BY-3.0" + }, + "node_modules/spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.22", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.22.tgz", + "integrity": "sha512-4PRT4nh1EImPbt2jASOKHX7PB7I+e4IWNLvkKFDxNhJlfjbYlleYQh285Z/3mPTHSAK/AvdMmw5BNNuYH8ShgQ==", + "dev": true, + "license": "CC0-1.0" + }, + "node_modules/spdx-ranges": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/spdx-ranges/-/spdx-ranges-2.1.1.tgz", + "integrity": "sha512-mcdpQFV7UDAgLpXEE/jOMqvK4LBoO0uTQg0uvXUewmEFhpiZx5yJSZITHB8w1ZahKdhfZqP5GPEOKLyEq5p8XA==", + "dev": true, + "license": "(MIT AND CC-BY-3.0)" + }, + "node_modules/spdx-satisfies": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/spdx-satisfies/-/spdx-satisfies-4.0.1.tgz", + "integrity": "sha512-WVzZ/cXAzoNmjCWiEluEA3BjHp5tiUmmhn9MK+X0tBbR9sOqtC6UQwmgCNrAIZvNlMuBUYAaHYfb2oqlF9SwKA==", + "dev": true, + "license": "MIT", + "dependencies": { + "spdx-compare": "^1.0.0", + "spdx-expression-parse": "^3.0.0", + "spdx-ranges": "^2.0.0" + } + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/treeify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/treeify/-/treeify-1.1.0.tgz", + "integrity": "sha512-1m4RA7xVAJrSGrrXGs0L3YTwyvBs2S8PbRHaLZAkFw7JR8oIFwYtysxlBZhYIa7xSyiYJKZ3iGrrk55cGA3i9A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/util-extend": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/util-extend/-/util-extend-1.0.3.tgz", + "integrity": "sha512-mLs5zAK+ctllYBj+iAQvlDCwoxU/WDOUaJkcFudeiAX6OajC6BKXJUa9a+tbtkC11dz2Ufb7h0lyvIOVn4LADA==", + "dev": true, + "license": "MIT" + }, + "node_modules/validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true, + "license": "ISC" + } + } +} diff --git a/oryx/package.go b/oryx/package.go new file mode 100644 index 00000000000..5b760d6bc9b --- /dev/null +++ b/oryx/package.go @@ -0,0 +1,4 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package x diff --git a/oryx/package.json b/oryx/package.json new file mode 100644 index 00000000000..c1d3461c628 --- /dev/null +++ b/oryx/package.json @@ -0,0 +1,9 @@ +{ + "private": true, + "prettier": "ory-prettier-styles", + "devDependencies": { + "license-checker": "^25.0.1", + "ory-prettier-styles": "1.3.0", + "prettier": "2.8.8" + } +} diff --git a/oryx/pagination/README.md b/oryx/pagination/README.md new file mode 100644 index 00000000000..69ba0e0c343 --- /dev/null +++ b/oryx/pagination/README.md @@ -0,0 +1,29 @@ +# pagination + +A simple helper for dealing with pagination. + +``` +go get github.com/ory/pagination +``` + +## Example + +```go +package main + +import ( + "github.com/ory/pagination" + "net/http" + "net/url" + "fmt" +) + +func main() { + u, _ := url.Parse("http://localhost/foo?offset=0&limit=10") + limit, offset := pagination.Parse(&http.Request{URL: u}, 5, 5, 10) + + items := []string{"a", "b", "c", "d"} + start, end := pagination.Index(limit, offset, len(items)) + fmt.Printf("Got items: %v", items[start:end]) +} +``` diff --git a/oryx/pagination/header.go b/oryx/pagination/header.go new file mode 100644 index 00000000000..17533e5b551 --- /dev/null +++ b/oryx/pagination/header.go @@ -0,0 +1,96 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package pagination + +import ( + "fmt" + "math" + "net/http" + "net/url" + "strconv" + "strings" +) + +func header(u *url.URL, rel string, limit, offset int64) string { + q := u.Query() + q.Set("limit", fmt.Sprintf("%d", limit)) + q.Set("offset", fmt.Sprintf("%d", offset)) + u.RawQuery = q.Encode() + return fmt.Sprintf("<%s>; rel=\"%s\"", u.String(), rel) +} + +type formatter func(location *url.URL, rel string, itemsPerPage int64, offset int64) string + +// HeaderWithFormatter adds an HTTP header for pagination which uses a custom formatter for generating the URL links. +func HeaderWithFormatter(w http.ResponseWriter, u *url.URL, total int64, page, itemsPerPage int, f formatter) { + if itemsPerPage <= 0 { + itemsPerPage = 1 + } + + itemsPerPage64 := int64(itemsPerPage) + offset := int64(page) * itemsPerPage64 + + // lastOffset will either equal the offset required to contain the remainder, + // or the limit. + var lastOffset int64 + if total%itemsPerPage64 == 0 { + lastOffset = total - itemsPerPage64 + } else { + lastOffset = (total / itemsPerPage64) * itemsPerPage64 + } + + w.Header().Set("X-Total-Count", strconv.FormatInt(total, 10)) + + // Check for last page + if offset >= lastOffset { + if total == 0 { + w.Header().Set("Link", strings.Join([]string{ + f(u, "first", itemsPerPage64, 0), + f(u, "next", itemsPerPage64, ((offset/itemsPerPage64)+1)*itemsPerPage64), + f(u, "prev", itemsPerPage64, ((offset/itemsPerPage64)-1)*itemsPerPage64), + }, ",")) + return + } + + if total <= itemsPerPage64 { + w.Header().Set("link", f(u, "first", total, 0)) + return + } + + w.Header().Set("Link", strings.Join([]string{ + f(u, "first", itemsPerPage64, 0), + f(u, "prev", itemsPerPage64, lastOffset-itemsPerPage64), + }, ",")) + return + } + + if offset < itemsPerPage64 { + w.Header().Set("Link", strings.Join([]string{ + f(u, "next", itemsPerPage64, itemsPerPage64), + f(u, "last", itemsPerPage64, lastOffset), + }, ",")) + return + } + + w.Header().Set("Link", strings.Join([]string{ + f(u, "first", itemsPerPage64, 0), + f(u, "next", itemsPerPage64, ((offset/itemsPerPage64)+1)*itemsPerPage64), + f(u, "prev", itemsPerPage64, ((offset/itemsPerPage64)-1)*itemsPerPage64), + f(u, "last", itemsPerPage64, lastOffset), + }, ",")) +} + +// Header adds an http header for pagination using a responsewriter where backwards compatibility is required. +// The header will contain links any combination of the first, last, next, or previous (prev) pages in a paginated list (given a limit and an offset, and optionally a total). +// If total is not set, then no "last" page will be calculated. +// If no limit is provided, then it will default to 1. +func Header(w http.ResponseWriter, u *url.URL, total int, limit, offset int) { + var page int + if limit == 0 { + limit = 1 + } + + page = int(math.Floor(float64(offset) / float64(limit))) + HeaderWithFormatter(w, u, int64(total), page, limit, header) +} diff --git a/oryx/pagination/items.go b/oryx/pagination/items.go new file mode 100644 index 00000000000..6094f2a7aec --- /dev/null +++ b/oryx/pagination/items.go @@ -0,0 +1,12 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package pagination + +// MaxItemsPerPage is used to prevent DoS attacks against large lists by limiting the items per page to 500. +func MaxItemsPerPage(max, is int) int { + if is > max { + return max + } + return is +} diff --git a/oryx/pagination/keysetpagination/header.go b/oryx/pagination/keysetpagination/header.go new file mode 100644 index 00000000000..0b04c773fb2 --- /dev/null +++ b/oryx/pagination/keysetpagination/header.go @@ -0,0 +1,112 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package keysetpagination + +import ( + "cmp" + "fmt" + "net/http" + "net/url" + "strconv" + "strings" + + "github.com/pkg/errors" +) + +// Pagination Request Parameters +// +// The `Link` HTTP header contains multiple links (`first`, `next`) formatted as: +// `; rel="first"` +// +// For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). +// +// swagger:model keysetPaginationRequestParameters +type RequestParameters struct { + // Items per Page + // + // This is the number of items per page to return. + // For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). + // + // required: false + // in: query + // default: 250 + // min: 1 + // max: 1000 + PageSize int `json:"page_size"` + + // Next Page Token + // + // The next page token. + // For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). + // + // required: false + // in: query + PageToken string `json:"page_token"` +} + +// Pagination Response Header +// +// The `Link` HTTP header contains multiple links (`first`, `next`) formatted as: +// `; rel="first"` +// +// For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). +// +// swagger:model keysetPaginationResponseHeaders +type ResponseHeaders struct { + // The Link HTTP Header + // + // The `Link` header contains a comma-delimited list of links to the following pages: + // + // - first: The first page of results. + // - next: The next page of results. + // + // Pages are omitted if they do not exist. For example, if there is no next page, the `next` link is omitted. Examples: + // + // ; rel="next" + // + Link string `json:"link"` +} + +func header(u *url.URL, rel, token string, size int) string { + q := u.Query() + q.Set("page_token", token) + q.Set("page_size", strconv.Itoa(size)) + u.RawQuery = q.Encode() + return fmt.Sprintf("<%s>; rel=\"%s\"", u.String(), rel) +} + +// Header adds the Link header for the page encoded by the paginator. +// It contains links to the first and next page, if one exists. +func Header(w http.ResponseWriter, u *url.URL, p *Paginator) { + size := p.Size() + link := []string{header(u, "first", p.defaultToken.Encode(), size)} + if !p.isLast { + link = append(link, header(u, "next", p.Token().Encode(), size)) + } + w.Header().Set("Link", strings.Join(link, ",")) +} + +// Parse returns the pagination options from the URL query. +func Parse(q url.Values, p PageTokenConstructor) ([]Option, error) { + var opts []Option + if pt := cmp.Or(q["page_token"]...); pt != "" { + pageToken, err := url.QueryUnescape(pt) + if err != nil { + return nil, errors.WithStack(err) + } + parsed, err := p(pageToken) + if err != nil { + return nil, errors.WithStack(err) + } + opts = append(opts, WithToken(parsed)) + } + if ps := cmp.Or(q["page_size"]...); ps != "" { + size, err := strconv.Atoi(ps) + if err != nil { + return nil, errors.WithStack(err) + } + opts = append(opts, WithSize(size)) + } + return opts, nil +} diff --git a/oryx/pagination/keysetpagination/page_token.go b/oryx/pagination/keysetpagination/page_token.go new file mode 100644 index 00000000000..c88167855f6 --- /dev/null +++ b/oryx/pagination/keysetpagination/page_token.go @@ -0,0 +1,74 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package keysetpagination + +import ( + "encoding/base64" + "fmt" + "strings" +) + +type PageToken = interface { + Parse(string) map[string]string + Encode() string +} + +var _ PageToken = new(StringPageToken) +var _ PageToken = new(MapPageToken) + +type StringPageToken string + +func (s StringPageToken) Parse(idField string) map[string]string { + return map[string]string{idField: string(s)} +} + +func (s StringPageToken) Encode() string { + return string(s) +} + +func NewStringPageToken(s string) (PageToken, error) { + return StringPageToken(s), nil +} + +type MapPageToken map[string]string + +func (m MapPageToken) Parse(_ string) map[string]string { + return map[string]string(m) +} + +const pageTokenColumnDelim = "/" + +func (m MapPageToken) Encode() string { + elems := make([]string, 0, len(m)) + for k, v := range m { + elems = append(elems, fmt.Sprintf("%s=%s", k, v)) + } + + // For now: use Base64 instead of URL escaping, as the Timestamp format we need to use can contain a `+` sign, + // which represents a space in URLs, so it's not properly encoded by the Go library. + return base64.RawStdEncoding.EncodeToString([]byte(strings.Join(elems, pageTokenColumnDelim))) +} + +func NewMapPageToken(s string) (PageToken, error) { + b, err := base64.RawStdEncoding.DecodeString(s) + if err != nil { + return nil, err + } + tokens := strings.Split(string(b), pageTokenColumnDelim) + + r := map[string]string{} + + for _, p := range tokens { + if columnName, value, found := strings.Cut(p, "="); found { + r[columnName] = value + } + } + + return MapPageToken(r), nil +} + +var _ PageTokenConstructor = NewMapPageToken +var _ PageTokenConstructor = NewStringPageToken + +type PageTokenConstructor = func(string) (PageToken, error) diff --git a/oryx/pagination/keysetpagination/paginator.go b/oryx/pagination/keysetpagination/paginator.go new file mode 100644 index 00000000000..8f47562f454 --- /dev/null +++ b/oryx/pagination/keysetpagination/paginator.go @@ -0,0 +1,258 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package keysetpagination + +import ( + "errors" + "fmt" + + "github.com/ory/pop/v6" + "github.com/ory/pop/v6/columns" +) + +type ( + Item = interface{ PageToken() PageToken } + + Order string + + columnOrdering struct { + name string + order Order + } + Paginator struct { + token, defaultToken PageToken + size, defaultSize, maxSize int + isLast bool + additionalColumn columnOrdering + } + Option func(*Paginator) *Paginator +) + +var ErrUnknownOrder = errors.New("unknown order") + +const ( + OrderDescending Order = "DESC" + OrderAscending Order = "ASC" + + DefaultSize = 100 + DefaultMaxSize = 500 +) + +func (o Order) extract() (string, string, error) { + switch o { + case OrderAscending: + return ">", string(o), nil + case OrderDescending: + return "<", string(o), nil + default: + return "", "", ErrUnknownOrder + } +} + +func (p *Paginator) Token() PageToken { + if p.token == nil { + return p.defaultToken + } + return p.token +} + +func (p *Paginator) Size() int { + size := p.size + if size <= 0 { + size = p.defaultSize + if size == 0 { + size = 100 + } + } + if size > p.maxSize { + size = p.maxSize + } + return size +} + +func (p *Paginator) IsLast() bool { + return p.isLast +} + +func (p *Paginator) ToOptions() []Option { + opts := make([]Option, 0, 7) + if p.token != nil { + opts = append(opts, WithToken(p.token)) + } + if p.defaultToken != nil { + opts = append(opts, WithDefaultToken(p.defaultToken)) + } + if p.size > 0 { + opts = append(opts, WithSize(p.size)) + } + if p.defaultSize != DefaultSize { + opts = append(opts, WithDefaultSize(p.defaultSize)) + } + if p.maxSize != DefaultMaxSize { + opts = append(opts, WithMaxSize(p.maxSize)) + } + if p.additionalColumn.name != "" { + opts = append(opts, WithColumn(p.additionalColumn.name, p.additionalColumn.order)) + } + if p.isLast { + opts = append(opts, withIsLast(p.isLast)) + } + return opts +} + +func (p *Paginator) multipleOrderFieldsQuery(q *pop.Query, idField string, cols map[string]*columns.Column, quoteAndContextualize func(string) string) { + tokenParts := p.Token().Parse(idField) + idValue := tokenParts[idField] + + column, ok := cols[p.additionalColumn.name] + if !ok { + q.Where(fmt.Sprintf(`%s > ?`, quoteAndContextualize(idField)), idValue) + return + } + + quoteName := quoteAndContextualize(column.Name) + + value, ok := tokenParts[column.Name] + + if !ok { + q.Where(fmt.Sprintf(`%s > ?`, quoteAndContextualize(idField)), idValue) + return + } + + sign, keyword, err := p.additionalColumn.order.extract() + if err != nil { + q.Where(fmt.Sprintf(`%s > ?`, quoteAndContextualize(idField)), idValue) + return + } + + q. + Where(fmt.Sprintf("(%s %s ? OR (%s = ? AND %s > ?))", quoteName, sign, quoteName, quoteAndContextualize(idField)), value, value, idValue). + Order(fmt.Sprintf("%s %s", quoteName, keyword)) + +} + +// Paginate returns a function that paginates a pop.Query. +// Usage: +// +// q := c.Where("foo = ?", foo).Scope(keysetpagination.Paginate[MyItemType](paginator)) +// +// This function works regardless of whether your type implements the Item +// interface with pointer or value receivers. To understand the type parameters, +// see this document: +// https://go.googlesource.com/proposal/+/refs/heads/master/design/43651-type-parameters.md#pointer-method-example +func Paginate[I any, PI interface { + Item + *I +}](p *Paginator) pop.ScopeFunc { + model := pop.Model{Value: new(I)} + id := model.IDField() + tableName := model.Alias() + return func(q *pop.Query) *pop.Query { + quote := q.Connection.Dialect.Quote + eid := quote(tableName) + "." + quote(id) + + quoteAndContextualize := func(name string) string { + return quote(tableName) + "." + quote(name) + } + p.multipleOrderFieldsQuery(q, id, model.Columns().Cols, quoteAndContextualize) + + return q. + Limit(p.Size() + 1). + // we always need to order by the id field last + Order(fmt.Sprintf(`%s ASC`, eid)) + } +} + +// Result removes the last item (if applicable) and returns the paginator for the next page. +// +// This function works regardless of whether your type implements the Item +// interface with pointer or value receivers. To understand the type parameters, +// see this document: +// https://go.googlesource.com/proposal/+/refs/heads/master/design/43651-type-parameters.md#pointer-method-example +func Result[I any, PI interface { + Item + *I +}](items []I, p *Paginator) ([]I, *Paginator) { + if len(items) > p.Size() { + items = items[:p.Size()] + return items, &Paginator{ + token: PI(&items[len(items)-1]).PageToken(), + defaultToken: p.defaultToken, + size: p.size, + defaultSize: p.defaultSize, + maxSize: p.maxSize, + } + } + return items, &Paginator{ + defaultToken: p.defaultToken, + size: p.size, + defaultSize: p.defaultSize, + maxSize: p.maxSize, + isLast: true, + } +} + +func WithDefaultToken(t PageToken) Option { + return func(opts *Paginator) *Paginator { + opts.defaultToken = t + return opts + } +} + +func WithDefaultSize(size int) Option { + return func(opts *Paginator) *Paginator { + opts.defaultSize = size + return opts + } +} + +func WithMaxSize(size int) Option { + return func(opts *Paginator) *Paginator { + opts.maxSize = size + return opts + } +} + +func WithToken(t PageToken) Option { + return func(opts *Paginator) *Paginator { + opts.token = t + return opts + } +} + +func WithSize(size int) Option { + return func(opts *Paginator) *Paginator { + opts.size = size + return opts + } +} + +func WithColumn(name string, order Order) Option { + return func(opts *Paginator) *Paginator { + opts.additionalColumn = columnOrdering{ + name: name, + order: order, + } + return opts + } +} + +func withIsLast(isLast bool) Option { + return func(opts *Paginator) *Paginator { + opts.isLast = isLast + return opts + } +} + +func GetPaginator(modifiers ...Option) *Paginator { + opts := &Paginator{ + // these can still be overridden by the modifiers, but they should never be unset + maxSize: DefaultMaxSize, + defaultSize: DefaultSize, + } + for _, f := range modifiers { + opts = f(opts) + } + return opts +} diff --git a/oryx/pagination/keysetpagination/parse_header.go b/oryx/pagination/keysetpagination/parse_header.go new file mode 100644 index 00000000000..8be68b031cc --- /dev/null +++ b/oryx/pagination/keysetpagination/parse_header.go @@ -0,0 +1,44 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package keysetpagination + +import ( + "net/http" + "net/url" + + "github.com/peterhellberg/link" +) + +// PaginationResult represents a parsed result of the link HTTP header. +type PaginationResult struct { + // NextToken is the next page token. If it's empty, there is no next page. + NextToken string + + // FirstToken is the first page token. + FirstToken string +} + +// ParseHeader parses the response header's Link. +func ParseHeader(r *http.Response) *PaginationResult { + links := link.ParseResponse(r) + return &PaginationResult{ + NextToken: findRel(links, "next"), + FirstToken: findRel(links, "first"), + } +} + +func findRel(links link.Group, rel string) string { + for idx, l := range links { + if idx == rel { + parsed, err := url.Parse(l.URI) + if err != nil { + continue + } + + return parsed.Query().Get("page_token") + } + } + + return "" +} diff --git a/oryx/pagination/keysetpagination_v2/page_token.go b/oryx/pagination/keysetpagination_v2/page_token.go new file mode 100644 index 00000000000..29cc11accb6 --- /dev/null +++ b/oryx/pagination/keysetpagination_v2/page_token.go @@ -0,0 +1,117 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package keysetpagination + +import ( + "encoding/json" + "time" + + "github.com/gofrs/uuid" + "github.com/pkg/errors" + "github.com/ssoready/hyrumtoken" + + "github.com/ory/herodot" +) + +var fallbackEncryptionKey = &[32]byte{} + +type ( + PageToken struct { + testNow func() time.Time + cols []Column + } + jsonPageToken = struct { + ExpiresAt time.Time `json:"e"` + Cols []jsonColumn `json:"c"` + } + jsonColumn = struct { + Name string `json:"n"` + Order Order `json:"o"` + ValueAny any `json:"v"` + ValueTime time.Time `json:"vt"` + ValueUUID uuid.UUID `json:"vu"` + ValueInt int64 `json:"vi"` + } + Column struct { + Name string + Order Order + Value any + } +) + +func (t PageToken) Columns() []Column { return t.cols } + +// Encrypt encrypts the page token using the first key in the provided keyset. +// It uses a fallback key if no keys are provided. +func (t PageToken) Encrypt(keys [][32]byte) string { + key := fallbackEncryptionKey + if len(keys) > 0 { + key = &keys[0] + } + return hyrumtoken.Marshal(key, t) +} + +func (t PageToken) MarshalJSON() ([]byte, error) { + now := time.Now + if t.testNow != nil { + now = t.testNow + } + toEncode := jsonPageToken{ + ExpiresAt: now().Add(time.Hour).UTC(), + Cols: make([]jsonColumn, len(t.cols)), + } + for i, col := range t.cols { + toEncode.Cols[i] = jsonColumn{ + Name: col.Name, + Order: col.Order, + } + switch v := col.Value.(type) { + case time.Time: + toEncode.Cols[i].ValueTime = v + case uuid.UUID: + toEncode.Cols[i].ValueUUID = v + case int64: + toEncode.Cols[i].ValueInt = v + default: + toEncode.Cols[i].ValueAny = v + } + } + return json.Marshal(toEncode) +} + +var ErrPageTokenExpired = herodot.ErrBadRequest.WithReason("page token expired, do not persist page tokens") + +func (t *PageToken) UnmarshalJSON(data []byte) error { + rawToken := jsonPageToken{} + if err := json.Unmarshal(data, &rawToken); err != nil { + return err + } + t.cols = make([]Column, len(rawToken.Cols)) + for i, col := range rawToken.Cols { + t.cols[i] = Column{ + Name: col.Name, + Order: col.Order, + } + switch { + case col.ValueAny != nil: + t.cols[i].Value = col.ValueAny + case !col.ValueTime.IsZero(): + t.cols[i].Value = col.ValueTime + case col.ValueUUID != uuid.Nil: + t.cols[i].Value = col.ValueUUID + case col.ValueInt != 0: + t.cols[i].Value = col.ValueInt + } + } + now := time.Now + if t.testNow != nil { + now = t.testNow + } + if rawToken.ExpiresAt.Before(now().UTC()) { + return errors.WithStack(ErrPageTokenExpired) + } + return nil +} + +func NewPageToken(cols ...Column) PageToken { return PageToken{cols: cols} } diff --git a/oryx/pagination/keysetpagination_v2/paginator.go b/oryx/pagination/keysetpagination_v2/paginator.go new file mode 100644 index 00000000000..8a1d7de5be0 --- /dev/null +++ b/oryx/pagination/keysetpagination_v2/paginator.go @@ -0,0 +1,150 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package keysetpagination + +import ( + "cmp" + "reflect" + + "github.com/jmoiron/sqlx/reflectx" +) + +type ( + Paginator struct { + token, defaultToken PageToken + size, defaultSize, maxSize int + isLast bool + } + Option func(*Paginator) +) + +const ( + DefaultSize = 100 + DefaultMaxSize = 500 +) + +var dbStructTagMapper = reflectx.NewMapper("db") + +func (p *Paginator) DefaultToken() PageToken { return p.defaultToken } +func (p *Paginator) IsLast() bool { return p.isLast } + +func (p *Paginator) PageToken() PageToken { + if p.token.cols != nil { + return p.token + } + return p.defaultToken +} + +func (p *Paginator) Size() int { + defaultSize := cmp.Or(p.defaultSize, DefaultSize) + maxSize := cmp.Or(p.maxSize, DefaultMaxSize) + + size := p.size + if size <= 0 { + size = defaultSize + } + if size > maxSize { + size = maxSize + } + + return size +} + +func (p *Paginator) ToOptions() []Option { + opts := make([]Option, 0, 6) + if p.token.cols != nil { + opts = append(opts, WithToken(p.token)) + } + if p.defaultToken.cols != nil { + opts = append(opts, WithDefaultToken(p.defaultToken)) + } + if p.size > 0 { + opts = append(opts, WithSize(p.size)) + } + if p.defaultSize != DefaultSize { + opts = append(opts, WithDefaultSize(p.defaultSize)) + } + if p.maxSize != DefaultMaxSize { + opts = append(opts, WithMaxSize(p.maxSize)) + } + if p.isLast { + opts = append(opts, withIsLast(p.isLast)) + } + return opts +} + +// Result removes the last item (if applicable) and returns the paginator for the next page. +func Result[I any](items []I, p *Paginator) ([]I, *Paginator) { + return ResultFunc(items, p, func(last I, colName string) any { + lastItemVal := reflect.ValueOf(last) + return dbStructTagMapper.FieldByName(lastItemVal, colName).Interface() + }) +} + +// ResultFunc removes the last item (if applicable) and returns the paginator for the next page. +// The extractor function is used to extract the column values from the last item. +func ResultFunc[I any](items []I, p *Paginator, extractor func(last I, colName string) any) ([]I, *Paginator) { + if len(items) <= p.Size() { + return items, &Paginator{ + isLast: true, + + defaultToken: p.defaultToken, + size: p.size, + defaultSize: p.defaultSize, + maxSize: p.maxSize, + } + } + + items = items[:p.Size()] + lastItem := items[len(items)-1] + + currentCols := p.PageToken().Columns() + newCols := make([]Column, len(currentCols)) + for i, col := range currentCols { + newCols[i] = Column{ + Name: col.Name, + Order: col.Order, + Value: extractor(lastItem, col.Name), + } + } + + return items, &Paginator{ + token: NewPageToken(newCols...), + defaultToken: p.defaultToken, + size: p.size, + defaultSize: p.defaultSize, + maxSize: p.maxSize, + } +} + +func WithSize(size int) Option { + return func(p *Paginator) { p.size = size } +} +func WithDefaultSize(size int) Option { + return func(p *Paginator) { p.defaultSize = size } +} +func WithMaxSize(size int) Option { + return func(p *Paginator) { p.maxSize = size } +} +func WithToken(t PageToken) Option { + return func(p *Paginator) { p.token = t } +} +func WithDefaultToken(t PageToken) Option { + return func(p *Paginator) { p.defaultToken = t } +} +func withIsLast(isLast bool) Option { + return func(p *Paginator) { p.isLast = isLast } +} + +func NewPaginator(modifiers ...Option) *Paginator { + p := &Paginator{ + // these can still be overridden by the modifiers, but they should never be unset + maxSize: DefaultMaxSize, + defaultSize: DefaultSize, + } + for _, f := range modifiers { + f(p) + } + return p +} diff --git a/oryx/pagination/keysetpagination_v2/parse_header.go b/oryx/pagination/keysetpagination_v2/parse_header.go new file mode 100644 index 00000000000..a9847f10a62 --- /dev/null +++ b/oryx/pagination/keysetpagination_v2/parse_header.go @@ -0,0 +1,35 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package keysetpagination + +import ( + "net/http" + "net/url" + + "github.com/peterhellberg/link" +) + +// ParseHeader parses the response header's Link and returns the first and next page tokens. +func ParseHeader(r *http.Response) (first, next string, isLast bool) { + links := link.ParseResponse(r) + first, _ = findRel(links, "first") + next, hasNext := findRel(links, "next") + return first, next, !hasNext +} + +func findRel(links link.Group, rel string) (string, bool) { + for idx, l := range links { + if idx == rel { + parsed, err := url.Parse(l.URI) + if err != nil { + continue + } + q := parsed.Query() + + return q.Get("page_token"), q.Has("page_token") + } + } + + return "", false +} diff --git a/oryx/pagination/keysetpagination_v2/query_builder.go b/oryx/pagination/keysetpagination_v2/query_builder.go new file mode 100644 index 00000000000..6e63b60bf8b --- /dev/null +++ b/oryx/pagination/keysetpagination_v2/query_builder.go @@ -0,0 +1,97 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package keysetpagination + +import ( + "fmt" + "strings" + + "github.com/ory/pop/v6" +) + +type Order int + +const ( + OrderAscending Order = iota + OrderDescending +) + +func (o Order) extract() (string, string) { + switch o { + case OrderAscending: + return ">", "ASC" + case OrderDescending: + return "<", "DESC" + default: + panic(fmt.Sprintf("keyset pagination: unknown order %d", o)) + } +} + +// Paginate returns a function that paginates a pop.Query. +// Usage: +// +// q := c.Where("foo = ?", foo).Scope(keysetpagination.Paginate[MyItemType](paginator)) +func Paginate[I any](p *Paginator) pop.ScopeFunc { + model := pop.Model{Value: *new(I)} + tableName := model.Alias() + return func(q *pop.Query) *pop.Query { + quoteAndContextualize := func(name string) string { + quote := q.Connection.Dialect.Quote + return quote(tableName) + "." + quote(name) + } + where, args, order := BuildWhereAndOrder(p.PageToken().Columns(), quoteAndContextualize) + // IMPORTANT: Ensures correct query logic by grouping conditions. + // Without parentheses, `WHERE otherCond AND pageCond1 OR pageCond2` would be + // evaluated as `(otherCond = ? AND pageCond1) OR pageCond2`, potentially returning + // rows that do not match `otherCond`. + // We fix it by forcing the query to be: `WHERE otherCond AND (paginationCond1 OR paginationCond2)`. + where = "(" + where + ")" + + return q. + Where(where, args...). + Order(order). + Limit(p.Size() + 1) + } +} + +func BuildWhereAndOrder(columns []Column, quote func(string) string) (string, []any, string) { + var whereBuilder, orderByBuilder, prevEqual strings.Builder + args := make([]any, 0, len(columns)*(len(columns)+1)/2) + prevEqualArgs := make([]any, 0, len(columns)) + + whereBuilder.WriteRune('(') + + for i, part := range columns { + column := quote(part.Name) + sign, keyword := part.Order.extract() + + // Build query + if i > 0 { + whereBuilder.WriteString(") OR (") + } + whereBuilder.WriteString(prevEqual.String()) + if prevEqual.Len() > 0 { + whereBuilder.WriteString(" AND ") + } + whereBuilder.WriteString(fmt.Sprintf("%s %s ?", column, sign)) + + // Build orderBy + if i > 0 { + orderByBuilder.WriteString(", ") + } + orderByBuilder.WriteString(column + " " + keyword) + + // Update prevEqual + if i > 0 { + prevEqual.WriteString(" AND ") + } + prevEqual.WriteString(fmt.Sprintf("%s = ?", column)) + prevEqualArgs = append(prevEqualArgs, part.Value) + args = append(args, prevEqualArgs...) + } + + whereBuilder.WriteRune(')') + + return whereBuilder.String(), args, orderByBuilder.String() +} diff --git a/oryx/pagination/keysetpagination_v2/request_params.go b/oryx/pagination/keysetpagination_v2/request_params.go new file mode 100644 index 00000000000..98b20ddb209 --- /dev/null +++ b/oryx/pagination/keysetpagination_v2/request_params.go @@ -0,0 +1,124 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package keysetpagination + +import ( + "cmp" + "fmt" + "net/http" + "net/url" + "strconv" + "strings" + + "github.com/pkg/errors" + "github.com/ssoready/hyrumtoken" +) + +// Pagination Request Parameters +// +// For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). +// +// swagger:model keysetPaginationRequestParameters +type RequestParameters struct { + // Items per Page + // + // This is the number of items per page to return. + // For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). + // + // required: false + // in: query + // default: 250 + // min: 1 + // max: 1000 + PageSize int `json:"page_size"` + + // Next Page Token + // + // The next page token. + // For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). + // + // required: false + // in: query + PageToken string `json:"page_token"` +} + +// Pagination Response Header +// +// The `Link` HTTP header contains multiple links (`first`, `next`) formatted as: +// `; rel="first"` +// +// For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). +// +// swagger:model keysetPaginationResponseHeaders +type ResponseHeaders struct { + // The Link HTTP Header + // + // The `Link` header contains a comma-delimited list of links to the following pages: + // + // - first: The first page of results. + // - next: The next page of results. + // + // Pages are omitted if they do not exist. For example, if there is no next page, the `next` link is omitted. Examples: + // + // ; rel="next" + // + Link string `json:"link"` +} + +// SetLinkHeader adds the Link header for the page encoded by the paginator. +// It contains links to the first and next page, if one exists. +func SetLinkHeader(w http.ResponseWriter, keys [][32]byte, u *url.URL, p *Paginator) { + size := p.Size() + link := []string{linkPart(u, "first", p.DefaultToken().Encrypt(keys), size)} + if !p.isLast { + link = append(link, linkPart(u, "next", p.PageToken().Encrypt(keys), size)) + } + w.Header().Set("Link", strings.Join(link, ",")) +} + +func linkPart(u *url.URL, rel, token string, size int) string { + q := u.Query() + q.Set("page_token", token) + q.Set("page_size", strconv.Itoa(size)) + u.RawQuery = q.Encode() + return fmt.Sprintf("<%s>; rel=%q", u.String(), rel) +} + +// ParseQueryParams extracts the pagination options from the URL query. +func ParseQueryParams(keys [][32]byte, q url.Values) ([]Option, error) { + var opts []Option + if t := cmp.Or(q["page_token"]...); t != "" { + raw, err := url.QueryUnescape(t) + if err != nil { + return nil, errors.WithStack(err) + } + token, err := ParsePageToken(keys, raw) + if err != nil { + return nil, err + } + opts = append(opts, WithToken(token)) + } + if s := cmp.Or(q["page_size"]...); s != "" { + size, err := strconv.Atoi(s) + if err != nil { + return nil, errors.WithStack(err) + } + opts = append(opts, WithSize(size)) + } + return opts, nil +} + +// ParsePageToken parses a page token from the given raw string using the provided keys. +// It panics if no keys are provided. +func ParsePageToken(keys [][32]byte, raw string) (t PageToken, err error) { + for i := range keys { + err = errors.WithStack(hyrumtoken.Unmarshal(&keys[i], raw, &t)) + if err == nil { + return + } + } + // as a last resort, try the fallback key + err = hyrumtoken.Unmarshal(fallbackEncryptionKey, raw, &t) + return t, errors.WithStack(err) +} diff --git a/oryx/pagination/limit.go b/oryx/pagination/limit.go new file mode 100644 index 00000000000..85d80e2593d --- /dev/null +++ b/oryx/pagination/limit.go @@ -0,0 +1,16 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Package pagination provides helpers for dealing with pagination. +package pagination + +// Index uses limit, offset, and a slice's length to compute start and end indices for said slice. +func Index(limit, offset, length int) (start, end int) { + if offset > length { + return length, length + } else if limit+offset > length { + return offset, length + } + + return offset, offset + limit +} diff --git a/oryx/pagination/migrationpagination/.snapshots/TestPaginationHeader-Create_next_and_last,_but_not_previous_or_first_if_at_the_beginning.json b/oryx/pagination/migrationpagination/.snapshots/TestPaginationHeader-Create_next_and_last,_but_not_previous_or_first_if_at_the_beginning.json new file mode 100644 index 00000000000..6edb5858595 --- /dev/null +++ b/oryx/pagination/migrationpagination/.snapshots/TestPaginationHeader-Create_next_and_last,_but_not_previous_or_first_if_at_the_beginning.json @@ -0,0 +1,5 @@ +[ + "\u003chttp://example.com?page=1\u0026page_size=50\u0026page_token=eyJvZmZzZXQiOiI1MCIsInYiOjJ9\u0026per_page=50\u003e", + "rel=\"next\",\u003chttp://example.com?page=2\u0026page_size=50\u0026page_token=eyJvZmZzZXQiOiIxMDAiLCJ2IjoyfQ\u0026per_page=50\u003e", + "rel=\"last\"" +] diff --git a/oryx/pagination/migrationpagination/.snapshots/TestPaginationHeader-Create_only_first_if_the_limits_provided_exceeds_the_number_of_clients_found.json b/oryx/pagination/migrationpagination/.snapshots/TestPaginationHeader-Create_only_first_if_the_limits_provided_exceeds_the_number_of_clients_found.json new file mode 100644 index 00000000000..e8b628924a5 --- /dev/null +++ b/oryx/pagination/migrationpagination/.snapshots/TestPaginationHeader-Create_only_first_if_the_limits_provided_exceeds_the_number_of_clients_found.json @@ -0,0 +1,4 @@ +[ + "\u003chttp://example.com?page=0\u0026page_size=5\u0026page_token=eyJvZmZzZXQiOiIwIiwidiI6Mn0\u0026per_page=5\u003e", + "rel=\"first\"" +] diff --git a/oryx/pagination/migrationpagination/.snapshots/TestPaginationHeader-Create_previous,_next,_first,_and_last_if_in_the_middle.json b/oryx/pagination/migrationpagination/.snapshots/TestPaginationHeader-Create_previous,_next,_first,_and_last_if_in_the_middle.json new file mode 100644 index 00000000000..62b145733b2 --- /dev/null +++ b/oryx/pagination/migrationpagination/.snapshots/TestPaginationHeader-Create_previous,_next,_first,_and_last_if_in_the_middle.json @@ -0,0 +1,7 @@ +[ + "\u003chttp://example.com?page=0\u0026page_size=50\u0026page_token=eyJvZmZzZXQiOiIwIiwidiI6Mn0\u0026per_page=50\u003e", + "rel=\"first\",\u003chttp://example.com?page=4\u0026page_size=50\u0026page_token=eyJvZmZzZXQiOiIyMDAiLCJ2IjoyfQ\u0026per_page=50\u003e", + "rel=\"next\",\u003chttp://example.com?page=2\u0026page_size=50\u0026page_token=eyJvZmZzZXQiOiIxMDAiLCJ2IjoyfQ\u0026per_page=50\u003e", + "rel=\"prev\",\u003chttp://example.com?page=5\u0026page_size=50\u0026page_token=eyJvZmZzZXQiOiIyNTAiLCJ2IjoyfQ\u0026per_page=50\u003e", + "rel=\"last\"" +] diff --git a/oryx/pagination/migrationpagination/.snapshots/TestPaginationHeader-Create_previous,_next,_first,_but_not_last_if_in_the_middle_and_no_total_was_provided.json b/oryx/pagination/migrationpagination/.snapshots/TestPaginationHeader-Create_previous,_next,_first,_but_not_last_if_in_the_middle_and_no_total_was_provided.json new file mode 100644 index 00000000000..c8797e2b8a2 --- /dev/null +++ b/oryx/pagination/migrationpagination/.snapshots/TestPaginationHeader-Create_previous,_next,_first,_but_not_last_if_in_the_middle_and_no_total_was_provided.json @@ -0,0 +1,6 @@ +[ + "\u003chttp://example.com?page=0\u0026page_size=50\u0026page_token=eyJvZmZzZXQiOiIwIiwidiI6Mn0\u0026per_page=50\u003e", + "rel=\"first\",\u003chttp://example.com?page=4\u0026page_size=50\u0026page_token=eyJvZmZzZXQiOiIyMDAiLCJ2IjoyfQ\u0026per_page=50\u003e", + "rel=\"next\",\u003chttp://example.com?page=2\u0026page_size=50\u0026page_token=eyJvZmZzZXQiOiIxMDAiLCJ2IjoyfQ\u0026per_page=50\u003e", + "rel=\"prev\"" +] diff --git a/oryx/pagination/migrationpagination/.snapshots/TestPaginationHeader-Create_previous_and_first_but_not_next_or_last_if_at_the_end.json b/oryx/pagination/migrationpagination/.snapshots/TestPaginationHeader-Create_previous_and_first_but_not_next_or_last_if_at_the_end.json new file mode 100644 index 00000000000..d7f309297da --- /dev/null +++ b/oryx/pagination/migrationpagination/.snapshots/TestPaginationHeader-Create_previous_and_first_but_not_next_or_last_if_at_the_end.json @@ -0,0 +1,5 @@ +[ + "\u003chttp://example.com?page=0\u0026page_size=50\u0026page_token=eyJvZmZzZXQiOiIwIiwidiI6Mn0\u0026per_page=50\u003e", + "rel=\"first\",\u003chttp://example.com?page=1\u0026page_size=50\u0026page_token=eyJvZmZzZXQiOiI1MCIsInYiOjJ9\u0026per_page=50\u003e", + "rel=\"prev\"" +] diff --git a/oryx/pagination/migrationpagination/.snapshots/TestPaginationHeader-Header_should_default_limit_to_1_no_limit_was_provided.json b/oryx/pagination/migrationpagination/.snapshots/TestPaginationHeader-Header_should_default_limit_to_1_no_limit_was_provided.json new file mode 100644 index 00000000000..bf1395ccbe6 --- /dev/null +++ b/oryx/pagination/migrationpagination/.snapshots/TestPaginationHeader-Header_should_default_limit_to_1_no_limit_was_provided.json @@ -0,0 +1,7 @@ +[ + "\u003chttp://example.com?page=0\u0026page_size=1\u0026page_token=eyJvZmZzZXQiOiIwIiwidiI6Mn0\u0026per_page=1\u003e", + "rel=\"first\",\u003chttp://example.com?page=21\u0026page_size=1\u0026page_token=eyJvZmZzZXQiOiIyMSIsInYiOjJ9\u0026per_page=1\u003e", + "rel=\"next\",\u003chttp://example.com?page=19\u0026page_size=1\u0026page_token=eyJvZmZzZXQiOiIxOSIsInYiOjJ9\u0026per_page=1\u003e", + "rel=\"prev\",\u003chttp://example.com?page=99\u0026page_size=1\u0026page_token=eyJvZmZzZXQiOiI5OSIsInYiOjJ9\u0026per_page=1\u003e", + "rel=\"last\"" +] diff --git a/oryx/pagination/migrationpagination/header.go b/oryx/pagination/migrationpagination/header.go new file mode 100644 index 00000000000..47663cedb78 --- /dev/null +++ b/oryx/pagination/migrationpagination/header.go @@ -0,0 +1,92 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package migrationpagination + +// swagger:model mixedPaginationRequestParameters +type RequestParameters struct { + // Deprecated Items per Page + // + // DEPRECATED: Please use `page_token` instead. This parameter will be removed in the future. + // + // This is the number of items per page. + // + // required: false + // in: query + // default: 250 + // min: 1 + // max: 1000 + PerPage int `json:"per_page"` + + // Deprecated Pagination Page + // + // DEPRECATED: Please use `page_token` instead. This parameter will be removed in the future. + // + // This value is currently an integer, but it is not sequential. The value is not the page number, but a + // reference. The next page can be any number and some numbers might return an empty list. + // + // For example, page 2 might not follow after page 1. And even if page 3 and 5 exist, but page 4 might not exist. + // The first page can be retrieved by omitting this parameter. Following page pointers will be returned in the + // `Link` header. + // + // required: false + // in: query + Page int `json:"page"` + + // Page Size + // + // This is the number of items per page to return. For details on pagination please head over to the + // [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). + // + // required: false + // in: query + // default: 250 + // min: 1 + // max: 500 + PageSize int `json:"page_size"` + + // Next Page Token + // + // The next page token. For details on pagination please head over to the + // [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). + // + // required: false + // in: query + // default: 1 + // min: 1 + PageToken string `json:"page_token"` +} + +// Pagination Response Header +// +// The `Link` HTTP header contains multiple links (`first`, `next`, `last`, `previous`) formatted as: +// `; rel="{page}"` +// +// For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). +// +// swagger:model mixedPagePaginationResponseHeaders +type ResponseHeaderAnnotation struct { + // The Link HTTP Header + // + // The `Link` header contains a comma-delimited list of links to the following pages: + // + // - first: The first page of results. + // - next: The next page of results. + // - prev: The previous page of results. + // - last: The last page of results. + // + // Pages are omitted if they do not exist. For example, if there is no next page, the `next` link is omitted. + // + // The header value may look like follows: + // + // ; rel="first",; rel="next",; rel="prev",; rel="last" + Link string `json:"link"` + + // The X-Total-Count HTTP Header + // + // The `X-Total-Count` header contains the total number of items in the collection. + // + // DEPRECATED: This header will be removed eventually. Please use the `Link` header + // instead to check whether you are on the last page. + TotalCount int `json:"x-total-count"` +} diff --git a/oryx/pagination/migrationpagination/pagination.go b/oryx/pagination/migrationpagination/pagination.go new file mode 100644 index 00000000000..09073ab47e6 --- /dev/null +++ b/oryx/pagination/migrationpagination/pagination.go @@ -0,0 +1,48 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package migrationpagination + +import ( + "fmt" + "net/http" + "net/url" + + "github.com/ory/x/pagination" + "github.com/ory/x/pagination/pagepagination" + "github.com/ory/x/pagination/tokenpagination" +) + +type Paginator struct { + p *pagepagination.PagePaginator + t *tokenpagination.TokenPaginator +} + +func NewPaginator(p *pagepagination.PagePaginator, t *tokenpagination.TokenPaginator) *Paginator { + return &Paginator{p: p, t: t} +} + +func NewDefaultPaginator() *Paginator { + return &Paginator{p: new(pagepagination.PagePaginator), t: new(tokenpagination.TokenPaginator)} +} + +func (p *Paginator) ParsePagination(r *http.Request) (page, itemsPerPage int) { + if r.URL.Query().Has("page_token") || r.URL.Query().Has("page_size") { + return p.t.ParsePagination(r) + } + return p.p.ParsePagination(r) +} + +func header(u *url.URL, rel string, itemsPerPage, offset int64) string { + q := u.Query() + q.Set("page_size", fmt.Sprintf("%d", itemsPerPage)) + q.Set("page_token", tokenpagination.Encode(offset)) + q.Set("per_page", fmt.Sprintf("%d", itemsPerPage)) + q.Set("page", fmt.Sprintf("%d", offset/itemsPerPage)) + u.RawQuery = q.Encode() + return fmt.Sprintf("<%s>; rel=\"%s\"", u.String(), rel) +} + +func PaginationHeader(w http.ResponseWriter, u *url.URL, total int64, page, itemsPerPage int) { + pagination.HeaderWithFormatter(w, u, total, page, itemsPerPage, header) +} diff --git a/oryx/pagination/pagepagination/header.go b/oryx/pagination/pagepagination/header.go new file mode 100644 index 00000000000..64ab0653b9d --- /dev/null +++ b/oryx/pagination/pagepagination/header.go @@ -0,0 +1,84 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package pagepagination + +// Pagination Request Parameters +// +// The `Link` HTTP header contains multiple links (`first`, `next`, `last`, `previous`) formatted as: +// `; rel="{page}"` +// +// For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). +// +// swagger:model pagePaginationRequestParameters +type RequestParameters struct { + // Legacy Items per Page + // + // A DEPRECATED alias for `page_size`. Please transition to using `page_size` going forward. + // + // required: false + // in: query + // default: 250 + // min: 1 + // max: 1000 + PerPage int `json:"per_page"` + + // Legacy Pagination Page + // + // A DEPRECATED alias for `page_token`. Please transition to using `page_token` going forward. + // + // required: false + // in: query + // default: 1 + // min: 1 + Page int `json:"page"` + + // Items per Page + // + // This is the number of items per page to return. For details on pagination please head over to the + // [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). + // + // required: false + // in: query + // default: 250 + // min: 1 + // max: 500 + PageSize int `json:"page_size"` + + // Next Page Token + // + // The next page token. For details on pagination please head over to the + // [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). + // + // required: false + // in: query + // default: 1 + // min: 1 + PageToken string `json:"page_token"` +} + +// swagger:model pagePaginationResponseHeaders +type ResponseHeaderAnnotation struct { + // The Link HTTP Header + // + // The `Link` header contains a comma-delimited list of links to the following pages: + // + // - first: The first page of results. + // - next: The next page of results. + // - prev: The previous page of results. + // - last: The last page of results. + // + // Pages are omitted if they do not exist. For example, if there is no next page, the `next` link is omitted. + // + // This header will include the `per_page` and `page` parameters for legacy reasons, but these parameters will eventually be removed. + // + // Example: Link: ; rel="first",; rel="next",; rel="prev",; rel="last" + Link string `json:"link"` + + // The X-Total-Count HTTP Header + // + // The `X-Total-Count` header contains the total number of items in the collection. + // + // Example: 123 + TotalCount int `json:"x-total-count"` +} diff --git a/oryx/pagination/pagepagination/pagination.go b/oryx/pagination/pagepagination/pagination.go new file mode 100644 index 00000000000..a7a370337bd --- /dev/null +++ b/oryx/pagination/pagepagination/pagination.go @@ -0,0 +1,79 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package pagepagination + +import ( + "fmt" + "net/http" + "net/url" + "strconv" + + "github.com/ory/x/pagination" +) + +type PagePaginator struct { + MaxItems int + DefaultItems int +} + +func (p *PagePaginator) defaults() { + if p.MaxItems == 0 { + p.MaxItems = 1000 + } + + if p.DefaultItems == 0 { + p.DefaultItems = 250 + } +} + +// ParsePagination parses limit and page from *http.Request with given limits and defaults. +func (p *PagePaginator) ParsePagination(r *http.Request) (page, itemsPerPage int) { + p.defaults() + + if offsetParam := r.URL.Query().Get("page"); offsetParam == "" { + page = 0 + } else { + if offset, err := strconv.ParseInt(offsetParam, 10, 0); err != nil { + page = 0 + } else { + page = int(offset) + } + } + + if limitParam := r.URL.Query().Get("per_page"); limitParam == "" { + itemsPerPage = p.DefaultItems + } else { + if limit, err := strconv.ParseInt(limitParam, 10, 0); err != nil { + itemsPerPage = p.DefaultItems + } else { + itemsPerPage = int(limit) + } + } + + if itemsPerPage > p.MaxItems { + itemsPerPage = p.MaxItems + } + + if itemsPerPage < 1 { + itemsPerPage = 1 + } + + if page < 0 { + page = 0 + } + + return +} + +func header(u *url.URL, rel string, limit, offset int64) string { + q := u.Query() + q.Set("per_page", fmt.Sprintf("%d", limit)) + q.Set("page", fmt.Sprintf("%d", offset/limit)) + u.RawQuery = q.Encode() + return fmt.Sprintf("<%s>; rel=\"%s\"", u.String(), rel) +} + +func PaginationHeader(w http.ResponseWriter, u *url.URL, total int64, page, itemsPerPage int) { + pagination.HeaderWithFormatter(w, u, total, page, itemsPerPage, header) +} diff --git a/oryx/pagination/parse.go b/oryx/pagination/parse.go new file mode 100644 index 00000000000..54f051cb863 --- /dev/null +++ b/oryx/pagination/parse.go @@ -0,0 +1,48 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package pagination + +import ( + "net/http" + "strconv" +) + +// Parse parses limit and offset from *http.Request with given limits and defaults. +func Parse(r *http.Request, defaultLimit, defaultOffset, maxLimit int) (int, int) { + var offset, limit int + + if offsetParam := r.URL.Query().Get("offset"); offsetParam == "" { + offset = defaultOffset + } else { + if offset64, err := strconv.ParseInt(offsetParam, 10, 64); err != nil { + offset = defaultOffset + } else { + offset = int(offset64) + } + } + + if limitParam := r.URL.Query().Get("limit"); limitParam == "" { + limit = defaultLimit + } else { + if limit64, err := strconv.ParseInt(limitParam, 10, 64); err != nil { + limit = defaultLimit + } else { + limit = int(limit64) + } + } + + if limit > maxLimit { + limit = maxLimit + } + + if limit < 0 { + limit = 0 + } + + if offset < 0 { + offset = 0 + } + + return limit, offset +} diff --git a/oryx/pagination/tokenpagination/.snapshots/TestPaginationHeader-Create_next_and_last,_but_not_previous_or_first_if_at_the_beginning.json b/oryx/pagination/tokenpagination/.snapshots/TestPaginationHeader-Create_next_and_last,_but_not_previous_or_first_if_at_the_beginning.json new file mode 100644 index 00000000000..2ef4cf13e33 --- /dev/null +++ b/oryx/pagination/tokenpagination/.snapshots/TestPaginationHeader-Create_next_and_last,_but_not_previous_or_first_if_at_the_beginning.json @@ -0,0 +1,5 @@ +[ + "\u003chttp://example.com?page_size=50\u0026page_token=eyJvZmZzZXQiOiI1MCIsInYiOjJ9\u003e", + "rel=\"next\",\u003chttp://example.com?page_size=50\u0026page_token=eyJvZmZzZXQiOiIxMDAiLCJ2IjoyfQ\u003e", + "rel=\"last\"" +] diff --git a/oryx/pagination/tokenpagination/.snapshots/TestPaginationHeader-Create_only_first_if_the_limits_provided_exceeds_the_number_of_clients_found.json b/oryx/pagination/tokenpagination/.snapshots/TestPaginationHeader-Create_only_first_if_the_limits_provided_exceeds_the_number_of_clients_found.json new file mode 100644 index 00000000000..77b678382c5 --- /dev/null +++ b/oryx/pagination/tokenpagination/.snapshots/TestPaginationHeader-Create_only_first_if_the_limits_provided_exceeds_the_number_of_clients_found.json @@ -0,0 +1,4 @@ +[ + "\u003chttp://example.com?page_size=5\u0026page_token=eyJvZmZzZXQiOiIwIiwidiI6Mn0\u003e", + "rel=\"first\"" +] diff --git a/oryx/pagination/tokenpagination/.snapshots/TestPaginationHeader-Create_previous,_next,_first,_and_last_if_in_the_middle.json b/oryx/pagination/tokenpagination/.snapshots/TestPaginationHeader-Create_previous,_next,_first,_and_last_if_in_the_middle.json new file mode 100644 index 00000000000..821898e1eac --- /dev/null +++ b/oryx/pagination/tokenpagination/.snapshots/TestPaginationHeader-Create_previous,_next,_first,_and_last_if_in_the_middle.json @@ -0,0 +1,7 @@ +[ + "\u003chttp://example.com?page_size=50\u0026page_token=eyJvZmZzZXQiOiIwIiwidiI6Mn0\u003e", + "rel=\"first\",\u003chttp://example.com?page_size=50\u0026page_token=eyJvZmZzZXQiOiIyMDAiLCJ2IjoyfQ\u003e", + "rel=\"next\",\u003chttp://example.com?page_size=50\u0026page_token=eyJvZmZzZXQiOiIxMDAiLCJ2IjoyfQ\u003e", + "rel=\"prev\",\u003chttp://example.com?page_size=50\u0026page_token=eyJvZmZzZXQiOiIyNTAiLCJ2IjoyfQ\u003e", + "rel=\"last\"" +] diff --git a/oryx/pagination/tokenpagination/.snapshots/TestPaginationHeader-Create_previous,_next,_first,_but_not_last_if_in_the_middle_and_no_total_was_provided.json b/oryx/pagination/tokenpagination/.snapshots/TestPaginationHeader-Create_previous,_next,_first,_but_not_last_if_in_the_middle_and_no_total_was_provided.json new file mode 100644 index 00000000000..c131e472c7e --- /dev/null +++ b/oryx/pagination/tokenpagination/.snapshots/TestPaginationHeader-Create_previous,_next,_first,_but_not_last_if_in_the_middle_and_no_total_was_provided.json @@ -0,0 +1,6 @@ +[ + "\u003chttp://example.com?page_size=50\u0026page_token=eyJvZmZzZXQiOiIwIiwidiI6Mn0\u003e", + "rel=\"first\",\u003chttp://example.com?page_size=50\u0026page_token=eyJvZmZzZXQiOiIyMDAiLCJ2IjoyfQ\u003e", + "rel=\"next\",\u003chttp://example.com?page_size=50\u0026page_token=eyJvZmZzZXQiOiIxMDAiLCJ2IjoyfQ\u003e", + "rel=\"prev\"" +] diff --git a/oryx/pagination/tokenpagination/.snapshots/TestPaginationHeader-Create_previous_and_first_but_not_next_or_last_if_at_the_end.json b/oryx/pagination/tokenpagination/.snapshots/TestPaginationHeader-Create_previous_and_first_but_not_next_or_last_if_at_the_end.json new file mode 100644 index 00000000000..1fb35d54a36 --- /dev/null +++ b/oryx/pagination/tokenpagination/.snapshots/TestPaginationHeader-Create_previous_and_first_but_not_next_or_last_if_at_the_end.json @@ -0,0 +1,5 @@ +[ + "\u003chttp://example.com?page_size=50\u0026page_token=eyJvZmZzZXQiOiIwIiwidiI6Mn0\u003e", + "rel=\"first\",\u003chttp://example.com?page_size=50\u0026page_token=eyJvZmZzZXQiOiI1MCIsInYiOjJ9\u003e", + "rel=\"prev\"" +] diff --git a/oryx/pagination/tokenpagination/.snapshots/TestPaginationHeader-Header_should_default_limit_to_1_no_limit_was_provided.json b/oryx/pagination/tokenpagination/.snapshots/TestPaginationHeader-Header_should_default_limit_to_1_no_limit_was_provided.json new file mode 100644 index 00000000000..e5697f01328 --- /dev/null +++ b/oryx/pagination/tokenpagination/.snapshots/TestPaginationHeader-Header_should_default_limit_to_1_no_limit_was_provided.json @@ -0,0 +1,7 @@ +[ + "\u003chttp://example.com?page_size=1\u0026page_token=eyJvZmZzZXQiOiIwIiwidiI6Mn0\u003e", + "rel=\"first\",\u003chttp://example.com?page_size=1\u0026page_token=eyJvZmZzZXQiOiIyMSIsInYiOjJ9\u003e", + "rel=\"next\",\u003chttp://example.com?page_size=1\u0026page_token=eyJvZmZzZXQiOiIxOSIsInYiOjJ9\u003e", + "rel=\"prev\",\u003chttp://example.com?page_size=1\u0026page_token=eyJvZmZzZXQiOiI5OSIsInYiOjJ9\u003e", + "rel=\"last\"" +] diff --git a/oryx/pagination/tokenpagination/header.go b/oryx/pagination/tokenpagination/header.go new file mode 100644 index 00000000000..721afc26627 --- /dev/null +++ b/oryx/pagination/tokenpagination/header.go @@ -0,0 +1,67 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package tokenpagination + +// Pagination Request Parameters +// +// The `Link` HTTP header contains multiple links (`first`, `next`, `last`, `previous`) formatted as: +// `; rel="{page}"` +// +// For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). +// +// swagger:model tokenPaginationRequestParameters +type RequestParameters struct { + // Items per Page + // + // This is the number of items per page to return. + // For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). + // + // required: false + // in: query + // default: 250 + // min: 1 + // max: 500 + PageSize int `json:"page_size"` + + // Next Page Token + // + // The next page token. + // For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). + // + // required: false + // in: query + // default: 1 + // min: 1 + PageToken string `json:"page_token"` +} + +// Pagination Response Header +// +// The `Link` HTTP header contains multiple links (`first`, `next`, `last`, `previous`) formatted as: +// `; rel="{page}"` +// +// For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination). +// +// swagger:model tokenPaginationResponseHeaders +type ResponseHeaders struct { + // The Link HTTP Header + // + // The `Link` header contains a comma-delimited list of links to the following pages: + // + // - first: The first page of results. + // - next: The next page of results. + // - prev: The previous page of results. + // - last: The last page of results. + // + // Pages are omitted if they do not exist. For example, if there is no next page, the `next` link is omitted. Examples: + // + // ; rel="first",; rel="next",; rel="prev",; rel="last" + // + Link string `json:"link"` + + // The X-Total-Count HTTP Header + // + // The `X-Total-Count` header contains the total number of items in the collection. + TotalCount int `json:"x-total-count"` +} diff --git a/oryx/pagination/tokenpagination/pagination.go b/oryx/pagination/tokenpagination/pagination.go new file mode 100644 index 00000000000..8ba569179ff --- /dev/null +++ b/oryx/pagination/tokenpagination/pagination.go @@ -0,0 +1,93 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package tokenpagination + +import ( + "encoding/base64" + "fmt" + "net/http" + "net/url" + "strconv" + + "github.com/pkg/errors" + "github.com/tidwall/gjson" + + "github.com/ory/x/pagination" + + "github.com/ory/herodot" +) + +func Encode(offset int64) string { + return base64.RawURLEncoding.EncodeToString([]byte(fmt.Sprintf(`{"offset":"%d","v":2}`, offset))) +} + +func decode(s string) (int, error) { + b, err := base64.RawURLEncoding.DecodeString(s) + if err != nil { + return 0, errors.WithStack(herodot.ErrBadRequest.WithWrap(err).WithReasonf("Unable to parse pagination token: %s", err)) + } + + return int(gjson.Get(string(b), "offset").Int()), nil +} + +type TokenPaginator struct { + MaxItems int + DefaultItems int +} + +func (p *TokenPaginator) defaults() { + if p.MaxItems == 0 { + p.MaxItems = 1000 + } + + if p.DefaultItems == 0 { + p.DefaultItems = 250 + } +} + +// ParsePagination parses limit and page from *http.Request with given limits and defaults. +func (p *TokenPaginator) ParsePagination(r *http.Request) (page, itemsPerPage int) { + p.defaults() + + var offset int + if offsetParam := r.URL.Query().Get("page_token"); len(offsetParam) > 0 { + offset, _ = decode(offsetParam) + } + + if gotLimit, err := strconv.ParseInt(r.URL.Query().Get("page_size"), 10, 0); err == nil { + itemsPerPage = int(gotLimit) + } else { + itemsPerPage = p.DefaultItems + } + + if itemsPerPage > p.MaxItems { + itemsPerPage = p.MaxItems + } + + if itemsPerPage < 1 { + itemsPerPage = 1 + } + + if offset > 0 { + page = offset / itemsPerPage + } + + if page < 0 { + page = 0 + } + + return +} + +func header(u *url.URL, rel string, itemsPerPage, offset int64) string { + q := u.Query() + q.Set("page_size", fmt.Sprintf("%d", itemsPerPage)) + q.Set("page_token", Encode(offset)) + u.RawQuery = q.Encode() + return fmt.Sprintf("<%s>; rel=\"%s\"", u.String(), rel) +} + +func PaginationHeader(w http.ResponseWriter, u *url.URL, total int64, page, itemsPerPage int) { + pagination.HeaderWithFormatter(w, u, total, page, itemsPerPage, header) +} diff --git a/oryx/pointerx/pointerx.go b/oryx/pointerx/pointerx.go new file mode 100644 index 00000000000..b24494be6d4 --- /dev/null +++ b/oryx/pointerx/pointerx.go @@ -0,0 +1,123 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package pointerx + +// Ptr returns the input value's pointer. +func Ptr[T any](v T) *T { + return &v +} + +// Deref returns the input values de-referenced value, or zero value if nil. +func Deref[T any](p *T) T { + if p == nil { + var zero T + return zero + } + return *p +} + +// String returns the input value's pointer. +// Deprecated: use Ptr instead. +func String(s string) *string { + return &s +} + +// StringR is the reverse to String. +// Deprecated: use Deref instead. +func StringR(s *string) string { + if s == nil { + return "" + } + return *s +} + +// Int returns the input value's pointer. +// Deprecated: use Ptr instead. +func Int(s int) *int { + return &s +} + +// IntR is the reverse to Int. +// Deprecated: use Deref instead. +func IntR(s *int) int { + if s == nil { + return int(0) + } + return *s +} + +// Int32 returns the input value's pointer. +// Deprecated: use Ptr instead. +func Int32(s int32) *int32 { + return &s +} + +// Int32R is the reverse to Int32. +// Deprecated: use Deref instead. +func Int32R(s *int32) int32 { + if s == nil { + return int32(0) + } + return *s +} + +// Int64 returns the input value's pointer. +// Deprecated: use Ptr instead. +func Int64(s int64) *int64 { + return &s +} + +// Int64R is the reverse to Int64. +// Deprecated: use Deref instead. +func Int64R(s *int64) int64 { + if s == nil { + return int64(0) + } + return *s +} + +// Float32 returns the input value's pointer. +// Deprecated: use Ptr instead. +func Float32(s float32) *float32 { + return &s +} + +// Float32R is the reverse to Float32. +// Deprecated: use Deref instead. +func Float32R(s *float32) float32 { + if s == nil { + return float32(0) + } + return *s +} + +// Float64 returns the input value's pointer. +// Deprecated: use Ptr instead. +func Float64(s float64) *float64 { + return &s +} + +// Float64R is the reverse to Float64. +// Deprecated: use Deref instead. +func Float64R(s *float64) float64 { + if s == nil { + return float64(0) + } + return *s +} + +// Bool returns the input value's pointer. +// Deprecated: use Ptr instead. +func Bool(s bool) *bool { + return &s +} + +// BoolR is the reverse to Bool. +// Deprecated: use Deref instead. +func BoolR(s *bool) bool { + if s == nil { + return false + } + return *s +} diff --git a/oryx/popx/.snapshots/TestMigrateSQLUp-final_status.txt b/oryx/popx/.snapshots/TestMigrateSQLUp-final_status.txt new file mode 100644 index 00000000000..9ca270525aa --- /dev/null +++ b/oryx/popx/.snapshots/TestMigrateSQLUp-final_status.txt @@ -0,0 +1,212 @@ +stdout: Version Name Status +20191100000001000000 identities Applied +20191100000001000001 identities Applied +20191100000001000002 identities Applied +20191100000001000003 identities Applied +20191100000001000004 identities Applied +20191100000001000005 identities Applied +20191100000002000000 requests Applied +20191100000002000001 requests Applied +20191100000002000002 requests Applied +20191100000002000003 requests Applied +20191100000002000004 requests Applied +20191100000003000000 sessions Applied +20191100000004000000 errors Applied +20191100000006000000 courier Applied +20191100000007000000 errors Applied +20191100000007000001 errors Applied +20191100000007000002 errors Applied +20191100000007000003 errors Applied +20191100000008000000 selfservice_verification Applied +20191100000008000001 selfservice_verification Applied +20191100000008000002 selfservice_verification Applied +20191100000008000003 selfservice_verification Applied +20191100000008000004 selfservice_verification Applied +20191100000008000005 selfservice_verification Applied +20191100000010000000 errors Applied +20191100000010000001 errors Applied +20191100000010000002 errors Applied +20191100000010000003 errors Applied +20191100000010000004 errors Applied +20191100000011000000 courier_body_type Applied +20191100000011000001 courier_body_type Applied +20191100000011000002 courier_body_type Applied +20191100000011000003 courier_body_type Applied +20191100000012000000 login_request_forced Applied +20191100000012000001 login_request_forced Applied +20191100000012000002 login_request_forced Applied +20191100000012000003 login_request_forced Applied +20200317160354000000 create_profile_request_forms Applied +20200317160354000001 create_profile_request_forms Applied +20200317160354000002 create_profile_request_forms Applied +20200317160354000003 create_profile_request_forms Applied +20200317160354000004 create_profile_request_forms Applied +20200317160354000005 create_profile_request_forms Applied +20200317160354000006 create_profile_request_forms Applied +20200401183443000000 continuity_containers Applied +20200402142539000000 rename_profile_flows Applied +20200402142539000001 rename_profile_flows Applied +20200402142539000002 rename_profile_flows Applied +20200519101057000000 create_recovery_addresses Applied +20200519101057000001 create_recovery_addresses Applied +20200519101057000002 create_recovery_addresses Applied +20200519101057000003 create_recovery_addresses Applied +20200519101057000004 create_recovery_addresses Applied +20200519101057000005 create_recovery_addresses Applied +20200519101057000006 create_recovery_addresses Applied +20200519101057000007 create_recovery_addresses Applied +20200601101000000000 create_messages Applied +20200601101000000001 create_messages Applied +20200601101000000002 create_messages Applied +20200601101000000003 create_messages Applied +20200605111551000000 messages Applied +20200605111551000001 messages Applied +20200605111551000002 messages Applied +20200605111551000003 messages Applied +20200605111551000004 messages Applied +20200605111551000005 messages Applied +20200605111551000006 messages Applied +20200605111551000007 messages Applied +20200605111551000008 messages Applied +20200605111551000009 messages Applied +20200605111551000010 messages Applied +20200605111551000011 messages Applied +20200607165100000000 settings Applied +20200607165100000001 settings Applied +20200607165100000002 settings Applied +20200607165100000003 settings Applied +20200607165100000004 settings Applied +20200705105359000000 rename_identities_schema Applied +20200810141652000000 flow_type Applied +20200810141652000001 flow_type Applied +20200810141652000002 flow_type Applied +20200810141652000003 flow_type Applied +20200810141652000004 flow_type Applied +20200810141652000005 flow_type Applied +20200810141652000006 flow_type Applied +20200810141652000007 flow_type Applied +20200810141652000008 flow_type Applied +20200810141652000009 flow_type Applied +20200810141652000010 flow_type Applied +20200810141652000011 flow_type Applied +20200810141652000012 flow_type Applied +20200810141652000013 flow_type Applied +20200810141652000014 flow_type Applied +20200810141652000015 flow_type Applied +20200810141652000016 flow_type Applied +20200810141652000017 flow_type Applied +20200810141652000018 flow_type Applied +20200810141652000019 flow_type Applied +20200810161022000000 flow_rename Applied +20200810161022000001 flow_rename Applied +20200810161022000002 flow_rename Applied +20200810161022000003 flow_rename Applied +20200810161022000004 flow_rename Applied +20200810161022000005 flow_rename Applied +20200810161022000006 flow_rename Applied +20200810161022000007 flow_rename Applied +20200810161022000008 flow_rename Applied +20200810162450000000 flow_fields_rename Applied +20200810162450000001 flow_fields_rename Applied +20200810162450000002 flow_fields_rename Applied +20200810162450000003 flow_fields_rename Applied +20200812124254000000 add_session_token Applied +20200812124254000001 add_session_token Applied +20200812124254000002 add_session_token Applied +20200812124254000003 add_session_token Applied +20200812124254000004 add_session_token Applied +20200812124254000005 add_session_token Applied +20200812124254000006 add_session_token Applied +20200812124254000007 add_session_token Applied +20200812160551000000 add_session_revoke Applied +20200812160551000001 add_session_revoke Applied +20200812160551000002 add_session_revoke Applied +20200812160551000003 add_session_revoke Applied +20200812160551000004 add_session_revoke Applied +20200812160551000005 add_session_revoke Applied +20200812160551000006 add_session_revoke Applied +20200812160551000007 add_session_revoke Applied +20200830121710000000 update_recovery_token Applied +20200830130642000000 add_verification_methods Applied +20200830130642000001 add_verification_methods Applied +20200830130642000002 add_verification_methods Applied +20200830130642000003 add_verification_methods Applied +20200830130642000004 add_verification_methods Applied +20200830130642000005 add_verification_methods Applied +20200830130642000006 add_verification_methods Applied +20200830130642000007 add_verification_methods Applied +20200830130642000008 add_verification_methods Applied +20200830130642000009 add_verification_methods Applied +20200830130642000010 add_verification_methods Applied +20200830130643000000 add_verification_methods Applied +20200830130644000000 add_verification_methods Applied +20200830130644000001 add_verification_methods Applied +20200830130645000000 add_verification_methods Applied +20200830130646000000 add_verification_methods Applied +20200830130646000001 add_verification_methods Applied +20200830130646000002 add_verification_methods Applied +20200830130646000003 add_verification_methods Applied +20200830130646000004 add_verification_methods Applied +20200830130646000005 add_verification_methods Applied +20200830130646000006 add_verification_methods Applied +20200830130646000007 add_verification_methods Applied +20200830130646000008 add_verification_methods Applied +20200830130646000009 add_verification_methods Applied +20200830130646000010 add_verification_methods Applied +20200830130646000011 add_verification_methods Applied +20200830154602000000 add_verification_token Applied +20200830154602000001 add_verification_token Applied +20200830154602000002 add_verification_token Applied +20200830154602000003 add_verification_token Applied +20200830154602000004 add_verification_token Applied +20200830172221000000 recovery_token_expires Applied +20200830172221000001 recovery_token_expires Applied +20200830172221000002 recovery_token_expires Applied +20200830172221000003 recovery_token_expires Applied +20200830172221000004 recovery_token_expires Applied +20200830172221000005 recovery_token_expires Applied +20200830172221000006 recovery_token_expires Applied +20200830172221000007 recovery_token_expires Applied +20200830172221000008 recovery_token_expires Applied +20200830172221000009 recovery_token_expires Applied +20200830172221000010 recovery_token_expires Applied +20200830172221000011 recovery_token_expires Applied +20200830172221000012 recovery_token_expires Applied +20200830172221000013 recovery_token_expires Applied +20200830172221000014 recovery_token_expires Applied +20200830172221000015 recovery_token_expires Applied +20200830172221000016 recovery_token_expires Applied +20200830172221000017 recovery_token_expires Applied +20200830172221000018 recovery_token_expires Applied +20200830172221000019 recovery_token_expires Applied +20200830172221000020 recovery_token_expires Applied +20200830172221000021 recovery_token_expires Applied +20200830172221000022 recovery_token_expires Applied +20200830172221000023 recovery_token_expires Applied +20200830172221000024 recovery_token_expires Applied +20200831110752000000 identity_verifiable_address_remove_code Applied +20200831110752000001 identity_verifiable_address_remove_code Applied +20200831110752000002 identity_verifiable_address_remove_code Applied +20200831110752000003 identity_verifiable_address_remove_code Applied +20200831110752000004 identity_verifiable_address_remove_code Applied +20200831110752000005 identity_verifiable_address_remove_code Applied +20200831110752000006 identity_verifiable_address_remove_code Applied +20200831110752000007 identity_verifiable_address_remove_code Applied +20200831110752000008 identity_verifiable_address_remove_code Applied +20200831110752000009 identity_verifiable_address_remove_code Applied +20200831110752000010 identity_verifiable_address_remove_code Applied +20200831110752000011 identity_verifiable_address_remove_code Applied +20200831110752000012 identity_verifiable_address_remove_code Applied +20200831110752000013 identity_verifiable_address_remove_code Applied +20200831110752000014 identity_verifiable_address_remove_code Applied +20200831110752000015 identity_verifiable_address_remove_code Applied +20200831110752000016 identity_verifiable_address_remove_code Applied +20200831110752000017 identity_verifiable_address_remove_code Applied +20200831110752000018 identity_verifiable_address_remove_code Applied +20200831110752000019 identity_verifiable_address_remove_code Applied +20200831110752000020 identity_verifiable_address_remove_code Applied +20200831110752000021 identity_verifiable_address_remove_code Applied +20201201161451000000 credential_types_values Applied +20201201161451000001 credential_types_values Applied + +stderr: diff --git a/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_down_but_do_not_confirm.txt b/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_down_but_do_not_confirm.txt new file mode 100644 index 00000000000..96811ff2e2d --- /dev/null +++ b/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_down_but_do_not_confirm.txt @@ -0,0 +1,225 @@ +stdout: The migration plan is as follows: +Version Name Status +20191100000001000000 identities Applied +20191100000001000001 identities Applied +20191100000001000002 identities Applied +20191100000001000003 identities Applied +20191100000001000004 identities Applied +20191100000001000005 identities Applied +20191100000002000000 requests Applied +20191100000002000001 requests Applied +20191100000002000002 requests Applied +20191100000002000003 requests Applied +20191100000002000004 requests Applied +20191100000003000000 sessions Applied +20191100000004000000 errors Applied +20191100000006000000 courier Applied +20191100000007000000 errors Applied +20191100000007000001 errors Applied +20191100000007000002 errors Applied +20191100000007000003 errors Applied +20191100000008000000 selfservice_verification Applied +20191100000008000001 selfservice_verification Applied +20191100000008000002 selfservice_verification Applied +20191100000008000003 selfservice_verification Applied +20191100000008000004 selfservice_verification Applied +20191100000008000005 selfservice_verification Applied +20191100000010000000 errors Applied +20191100000010000001 errors Applied +20191100000010000002 errors Applied +20191100000010000003 errors Applied +20191100000010000004 errors Applied +20191100000011000000 courier_body_type Applied +20191100000011000001 courier_body_type Applied +20191100000011000002 courier_body_type Applied +20191100000011000003 courier_body_type Applied +20191100000012000000 login_request_forced Applied +20191100000012000001 login_request_forced Applied +20191100000012000002 login_request_forced Applied +20191100000012000003 login_request_forced Applied +20200317160354000000 create_profile_request_forms Applied +20200317160354000001 create_profile_request_forms Applied +20200317160354000002 create_profile_request_forms Applied +20200317160354000003 create_profile_request_forms Applied +20200317160354000004 create_profile_request_forms Applied +20200317160354000005 create_profile_request_forms Applied +20200317160354000006 create_profile_request_forms Applied +20200401183443000000 continuity_containers Applied +20200402142539000000 rename_profile_flows Applied +20200402142539000001 rename_profile_flows Applied +20200402142539000002 rename_profile_flows Applied +20200519101057000000 create_recovery_addresses Applied +20200519101057000001 create_recovery_addresses Applied +20200519101057000002 create_recovery_addresses Applied +20200519101057000003 create_recovery_addresses Applied +20200519101057000004 create_recovery_addresses Applied +20200519101057000005 create_recovery_addresses Applied +20200519101057000006 create_recovery_addresses Applied +20200519101057000007 create_recovery_addresses Applied +20200601101000000000 create_messages Applied +20200601101000000001 create_messages Applied +20200601101000000002 create_messages Applied +20200601101000000003 create_messages Applied +20200605111551000000 messages Applied +20200605111551000001 messages Applied +20200605111551000002 messages Applied +20200605111551000003 messages Applied +20200605111551000004 messages Applied +20200605111551000005 messages Applied +20200605111551000006 messages Applied +20200605111551000007 messages Applied +20200605111551000008 messages Applied +20200605111551000009 messages Applied +20200605111551000010 messages Applied +20200605111551000011 messages Applied +20200607165100000000 settings Applied +20200607165100000001 settings Applied +20200607165100000002 settings Applied +20200607165100000003 settings Applied +20200607165100000004 settings Applied +20200705105359000000 rename_identities_schema Applied +20200810141652000000 flow_type Applied +20200810141652000001 flow_type Applied +20200810141652000002 flow_type Applied +20200810141652000003 flow_type Applied +20200810141652000004 flow_type Applied +20200810141652000005 flow_type Applied +20200810141652000006 flow_type Applied +20200810141652000007 flow_type Applied +20200810141652000008 flow_type Applied +20200810141652000009 flow_type Applied +20200810141652000010 flow_type Applied +20200810141652000011 flow_type Applied +20200810141652000012 flow_type Applied +20200810141652000013 flow_type Applied +20200810141652000014 flow_type Applied +20200810141652000015 flow_type Applied +20200810141652000016 flow_type Applied +20200810141652000017 flow_type Applied +20200810141652000018 flow_type Applied +20200810141652000019 flow_type Applied +20200810161022000000 flow_rename Applied +20200810161022000001 flow_rename Applied +20200810161022000002 flow_rename Applied +20200810161022000003 flow_rename Applied +20200810161022000004 flow_rename Applied +20200810161022000005 flow_rename Applied +20200810161022000006 flow_rename Applied +20200810161022000007 flow_rename Applied +20200810161022000008 flow_rename Applied +20200810162450000000 flow_fields_rename Applied +20200810162450000001 flow_fields_rename Applied +20200810162450000002 flow_fields_rename Applied +20200810162450000003 flow_fields_rename Applied +20200812124254000000 add_session_token Applied +20200812124254000001 add_session_token Applied +20200812124254000002 add_session_token Applied +20200812124254000003 add_session_token Applied +20200812124254000004 add_session_token Applied +20200812124254000005 add_session_token Applied +20200812124254000006 add_session_token Applied +20200812124254000007 add_session_token Applied +20200812160551000000 add_session_revoke Applied +20200812160551000001 add_session_revoke Applied +20200812160551000002 add_session_revoke Applied +20200812160551000003 add_session_revoke Applied +20200812160551000004 add_session_revoke Applied +20200812160551000005 add_session_revoke Applied +20200812160551000006 add_session_revoke Applied +20200812160551000007 add_session_revoke Applied +20200830121710000000 update_recovery_token Applied +20200830130642000000 add_verification_methods Applied +20200830130642000001 add_verification_methods Applied +20200830130642000002 add_verification_methods Applied +20200830130642000003 add_verification_methods Applied +20200830130642000004 add_verification_methods Applied +20200830130642000005 add_verification_methods Applied +20200830130642000006 add_verification_methods Applied +20200830130642000007 add_verification_methods Applied +20200830130642000008 add_verification_methods Applied +20200830130642000009 add_verification_methods Applied +20200830130642000010 add_verification_methods Applied +20200830130643000000 add_verification_methods Applied +20200830130644000000 add_verification_methods Applied +20200830130644000001 add_verification_methods Applied +20200830130645000000 add_verification_methods Applied +20200830130646000000 add_verification_methods Applied +20200830130646000001 add_verification_methods Applied +20200830130646000002 add_verification_methods Applied +20200830130646000003 add_verification_methods Applied +20200830130646000004 add_verification_methods Applied +20200830130646000005 add_verification_methods Applied +20200830130646000006 add_verification_methods Applied +20200830130646000007 add_verification_methods Applied +20200830130646000008 add_verification_methods Applied +20200830130646000009 add_verification_methods Applied +20200830130646000010 add_verification_methods Applied +20200830130646000011 add_verification_methods Applied +20200830154602000000 add_verification_token Applied +20200830154602000001 add_verification_token Applied +20200830154602000002 add_verification_token Applied +20200830154602000003 add_verification_token Applied +20200830154602000004 add_verification_token Applied +20200830172221000000 recovery_token_expires Applied +20200830172221000001 recovery_token_expires Applied +20200830172221000002 recovery_token_expires Applied +20200830172221000003 recovery_token_expires Applied +20200830172221000004 recovery_token_expires Applied +20200830172221000005 recovery_token_expires Applied +20200830172221000006 recovery_token_expires Applied +20200830172221000007 recovery_token_expires Applied +20200830172221000008 recovery_token_expires Applied +20200830172221000009 recovery_token_expires Applied +20200830172221000010 recovery_token_expires Applied +20200830172221000011 recovery_token_expires Applied +20200830172221000012 recovery_token_expires Applied +20200830172221000013 recovery_token_expires Applied +20200830172221000014 recovery_token_expires Applied +20200830172221000015 recovery_token_expires Applied +20200830172221000016 recovery_token_expires Applied +20200830172221000017 recovery_token_expires Applied +20200830172221000018 recovery_token_expires Applied +20200830172221000019 recovery_token_expires Applied +20200830172221000020 recovery_token_expires Applied +20200830172221000021 recovery_token_expires Applied +20200830172221000022 recovery_token_expires Applied +20200830172221000023 recovery_token_expires Applied +20200830172221000024 recovery_token_expires Applied +20200831110752000000 identity_verifiable_address_remove_code Applied +20200831110752000001 identity_verifiable_address_remove_code Applied +20200831110752000002 identity_verifiable_address_remove_code Applied +20200831110752000003 identity_verifiable_address_remove_code Applied +20200831110752000004 identity_verifiable_address_remove_code Applied +20200831110752000005 identity_verifiable_address_remove_code Applied +20200831110752000006 identity_verifiable_address_remove_code Applied +20200831110752000007 identity_verifiable_address_remove_code Applied +20200831110752000008 identity_verifiable_address_remove_code Applied +20200831110752000009 identity_verifiable_address_remove_code Applied +20200831110752000010 identity_verifiable_address_remove_code Applied +20200831110752000011 identity_verifiable_address_remove_code Applied +20200831110752000012 identity_verifiable_address_remove_code Applied +20200831110752000013 identity_verifiable_address_remove_code Applied +20200831110752000014 identity_verifiable_address_remove_code Applied +20200831110752000015 identity_verifiable_address_remove_code Applied +20200831110752000016 identity_verifiable_address_remove_code Applied +20200831110752000017 identity_verifiable_address_remove_code Applied +20200831110752000018 identity_verifiable_address_remove_code Rollback +20200831110752000019 identity_verifiable_address_remove_code Rollback +20200831110752000020 identity_verifiable_address_remove_code Pending +20200831110752000021 identity_verifiable_address_remove_code Pending +20201201161451000000 credential_types_values Pending +20201201161451000001 credential_types_values Pending + +The SQL statements to be executed from top to bottom are: + +------------ 20200831110752000019 - identity_verifiable_address_remove_code ------------ +UPDATE identity_verifiable_addresses SET code = substr(hex(randomblob(32)), 0, 32) WHERE code IS NULL + +------------ 20200831110752000018 - identity_verifiable_address_remove_code ------------ +UPDATE identity_verifiable_addresses SET expires_at = CURRENT_TIMESTAMP WHERE expires_at IS NULL + +Do you wish to execute this migration plan? [y/n]: ------------ WARNING ------------ +Migration aborted. + +stderr: To skip the next question use flag --yes (at your own risk). + diff --git a/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_down_but_no_steps.txt b/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_down_but_no_steps.txt new file mode 100644 index 00000000000..fb2bb5e97ad --- /dev/null +++ b/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_down_but_no_steps.txt @@ -0,0 +1,217 @@ +stdout: The migration plan is as follows: +Version Name Status +20191100000001000000 identities Applied +20191100000001000001 identities Applied +20191100000001000002 identities Applied +20191100000001000003 identities Applied +20191100000001000004 identities Applied +20191100000001000005 identities Applied +20191100000002000000 requests Applied +20191100000002000001 requests Applied +20191100000002000002 requests Applied +20191100000002000003 requests Applied +20191100000002000004 requests Applied +20191100000003000000 sessions Applied +20191100000004000000 errors Applied +20191100000006000000 courier Applied +20191100000007000000 errors Applied +20191100000007000001 errors Applied +20191100000007000002 errors Applied +20191100000007000003 errors Applied +20191100000008000000 selfservice_verification Applied +20191100000008000001 selfservice_verification Applied +20191100000008000002 selfservice_verification Applied +20191100000008000003 selfservice_verification Applied +20191100000008000004 selfservice_verification Applied +20191100000008000005 selfservice_verification Applied +20191100000010000000 errors Applied +20191100000010000001 errors Applied +20191100000010000002 errors Applied +20191100000010000003 errors Applied +20191100000010000004 errors Applied +20191100000011000000 courier_body_type Applied +20191100000011000001 courier_body_type Applied +20191100000011000002 courier_body_type Applied +20191100000011000003 courier_body_type Applied +20191100000012000000 login_request_forced Applied +20191100000012000001 login_request_forced Applied +20191100000012000002 login_request_forced Applied +20191100000012000003 login_request_forced Applied +20200317160354000000 create_profile_request_forms Applied +20200317160354000001 create_profile_request_forms Applied +20200317160354000002 create_profile_request_forms Applied +20200317160354000003 create_profile_request_forms Applied +20200317160354000004 create_profile_request_forms Applied +20200317160354000005 create_profile_request_forms Applied +20200317160354000006 create_profile_request_forms Applied +20200401183443000000 continuity_containers Applied +20200402142539000000 rename_profile_flows Applied +20200402142539000001 rename_profile_flows Applied +20200402142539000002 rename_profile_flows Applied +20200519101057000000 create_recovery_addresses Applied +20200519101057000001 create_recovery_addresses Applied +20200519101057000002 create_recovery_addresses Applied +20200519101057000003 create_recovery_addresses Applied +20200519101057000004 create_recovery_addresses Applied +20200519101057000005 create_recovery_addresses Applied +20200519101057000006 create_recovery_addresses Applied +20200519101057000007 create_recovery_addresses Applied +20200601101000000000 create_messages Applied +20200601101000000001 create_messages Applied +20200601101000000002 create_messages Applied +20200601101000000003 create_messages Applied +20200605111551000000 messages Applied +20200605111551000001 messages Applied +20200605111551000002 messages Applied +20200605111551000003 messages Applied +20200605111551000004 messages Applied +20200605111551000005 messages Applied +20200605111551000006 messages Applied +20200605111551000007 messages Applied +20200605111551000008 messages Applied +20200605111551000009 messages Applied +20200605111551000010 messages Applied +20200605111551000011 messages Applied +20200607165100000000 settings Applied +20200607165100000001 settings Applied +20200607165100000002 settings Applied +20200607165100000003 settings Applied +20200607165100000004 settings Applied +20200705105359000000 rename_identities_schema Applied +20200810141652000000 flow_type Applied +20200810141652000001 flow_type Applied +20200810141652000002 flow_type Applied +20200810141652000003 flow_type Applied +20200810141652000004 flow_type Applied +20200810141652000005 flow_type Applied +20200810141652000006 flow_type Applied +20200810141652000007 flow_type Applied +20200810141652000008 flow_type Applied +20200810141652000009 flow_type Applied +20200810141652000010 flow_type Applied +20200810141652000011 flow_type Applied +20200810141652000012 flow_type Applied +20200810141652000013 flow_type Applied +20200810141652000014 flow_type Applied +20200810141652000015 flow_type Applied +20200810141652000016 flow_type Applied +20200810141652000017 flow_type Applied +20200810141652000018 flow_type Applied +20200810141652000019 flow_type Applied +20200810161022000000 flow_rename Applied +20200810161022000001 flow_rename Applied +20200810161022000002 flow_rename Applied +20200810161022000003 flow_rename Applied +20200810161022000004 flow_rename Applied +20200810161022000005 flow_rename Applied +20200810161022000006 flow_rename Applied +20200810161022000007 flow_rename Applied +20200810161022000008 flow_rename Applied +20200810162450000000 flow_fields_rename Applied +20200810162450000001 flow_fields_rename Applied +20200810162450000002 flow_fields_rename Applied +20200810162450000003 flow_fields_rename Applied +20200812124254000000 add_session_token Applied +20200812124254000001 add_session_token Applied +20200812124254000002 add_session_token Applied +20200812124254000003 add_session_token Applied +20200812124254000004 add_session_token Applied +20200812124254000005 add_session_token Applied +20200812124254000006 add_session_token Applied +20200812124254000007 add_session_token Applied +20200812160551000000 add_session_revoke Applied +20200812160551000001 add_session_revoke Applied +20200812160551000002 add_session_revoke Applied +20200812160551000003 add_session_revoke Applied +20200812160551000004 add_session_revoke Applied +20200812160551000005 add_session_revoke Applied +20200812160551000006 add_session_revoke Applied +20200812160551000007 add_session_revoke Applied +20200830121710000000 update_recovery_token Applied +20200830130642000000 add_verification_methods Applied +20200830130642000001 add_verification_methods Applied +20200830130642000002 add_verification_methods Applied +20200830130642000003 add_verification_methods Applied +20200830130642000004 add_verification_methods Applied +20200830130642000005 add_verification_methods Applied +20200830130642000006 add_verification_methods Applied +20200830130642000007 add_verification_methods Applied +20200830130642000008 add_verification_methods Applied +20200830130642000009 add_verification_methods Applied +20200830130642000010 add_verification_methods Applied +20200830130643000000 add_verification_methods Applied +20200830130644000000 add_verification_methods Applied +20200830130644000001 add_verification_methods Applied +20200830130645000000 add_verification_methods Applied +20200830130646000000 add_verification_methods Applied +20200830130646000001 add_verification_methods Applied +20200830130646000002 add_verification_methods Applied +20200830130646000003 add_verification_methods Applied +20200830130646000004 add_verification_methods Applied +20200830130646000005 add_verification_methods Applied +20200830130646000006 add_verification_methods Applied +20200830130646000007 add_verification_methods Applied +20200830130646000008 add_verification_methods Applied +20200830130646000009 add_verification_methods Applied +20200830130646000010 add_verification_methods Applied +20200830130646000011 add_verification_methods Applied +20200830154602000000 add_verification_token Applied +20200830154602000001 add_verification_token Applied +20200830154602000002 add_verification_token Applied +20200830154602000003 add_verification_token Applied +20200830154602000004 add_verification_token Applied +20200830172221000000 recovery_token_expires Applied +20200830172221000001 recovery_token_expires Applied +20200830172221000002 recovery_token_expires Applied +20200830172221000003 recovery_token_expires Applied +20200830172221000004 recovery_token_expires Applied +20200830172221000005 recovery_token_expires Applied +20200830172221000006 recovery_token_expires Applied +20200830172221000007 recovery_token_expires Applied +20200830172221000008 recovery_token_expires Applied +20200830172221000009 recovery_token_expires Applied +20200830172221000010 recovery_token_expires Applied +20200830172221000011 recovery_token_expires Applied +20200830172221000012 recovery_token_expires Applied +20200830172221000013 recovery_token_expires Applied +20200830172221000014 recovery_token_expires Applied +20200830172221000015 recovery_token_expires Applied +20200830172221000016 recovery_token_expires Applied +20200830172221000017 recovery_token_expires Applied +20200830172221000018 recovery_token_expires Applied +20200830172221000019 recovery_token_expires Applied +20200830172221000020 recovery_token_expires Applied +20200830172221000021 recovery_token_expires Applied +20200830172221000022 recovery_token_expires Applied +20200830172221000023 recovery_token_expires Applied +20200830172221000024 recovery_token_expires Applied +20200831110752000000 identity_verifiable_address_remove_code Applied +20200831110752000001 identity_verifiable_address_remove_code Applied +20200831110752000002 identity_verifiable_address_remove_code Applied +20200831110752000003 identity_verifiable_address_remove_code Applied +20200831110752000004 identity_verifiable_address_remove_code Applied +20200831110752000005 identity_verifiable_address_remove_code Applied +20200831110752000006 identity_verifiable_address_remove_code Applied +20200831110752000007 identity_verifiable_address_remove_code Applied +20200831110752000008 identity_verifiable_address_remove_code Applied +20200831110752000009 identity_verifiable_address_remove_code Applied +20200831110752000010 identity_verifiable_address_remove_code Applied +20200831110752000011 identity_verifiable_address_remove_code Applied +20200831110752000012 identity_verifiable_address_remove_code Applied +20200831110752000013 identity_verifiable_address_remove_code Applied +20200831110752000014 identity_verifiable_address_remove_code Applied +20200831110752000015 identity_verifiable_address_remove_code Applied +20200831110752000016 identity_verifiable_address_remove_code Applied +20200831110752000017 identity_verifiable_address_remove_code Applied +20200831110752000018 identity_verifiable_address_remove_code Applied +20200831110752000019 identity_verifiable_address_remove_code Applied +20200831110752000020 identity_verifiable_address_remove_code Pending +20200831110752000021 identity_verifiable_address_remove_code Pending +20201201161451000000 credential_types_values Pending +20201201161451000001 credential_types_values Pending + +stderr: +There are apparently no migrations to roll back. +Please provide the --steps argument with a value larger than 0. + + diff --git a/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_down_four_steps.txt b/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_down_four_steps.txt new file mode 100644 index 00000000000..112d130101a --- /dev/null +++ b/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_down_four_steps.txt @@ -0,0 +1,230 @@ +stdout: The migration plan is as follows: +Version Name Status +20191100000001000000 identities Applied +20191100000001000001 identities Applied +20191100000001000002 identities Applied +20191100000001000003 identities Applied +20191100000001000004 identities Applied +20191100000001000005 identities Applied +20191100000002000000 requests Applied +20191100000002000001 requests Applied +20191100000002000002 requests Applied +20191100000002000003 requests Applied +20191100000002000004 requests Applied +20191100000003000000 sessions Applied +20191100000004000000 errors Applied +20191100000006000000 courier Applied +20191100000007000000 errors Applied +20191100000007000001 errors Applied +20191100000007000002 errors Applied +20191100000007000003 errors Applied +20191100000008000000 selfservice_verification Applied +20191100000008000001 selfservice_verification Applied +20191100000008000002 selfservice_verification Applied +20191100000008000003 selfservice_verification Applied +20191100000008000004 selfservice_verification Applied +20191100000008000005 selfservice_verification Applied +20191100000010000000 errors Applied +20191100000010000001 errors Applied +20191100000010000002 errors Applied +20191100000010000003 errors Applied +20191100000010000004 errors Applied +20191100000011000000 courier_body_type Applied +20191100000011000001 courier_body_type Applied +20191100000011000002 courier_body_type Applied +20191100000011000003 courier_body_type Applied +20191100000012000000 login_request_forced Applied +20191100000012000001 login_request_forced Applied +20191100000012000002 login_request_forced Applied +20191100000012000003 login_request_forced Applied +20200317160354000000 create_profile_request_forms Applied +20200317160354000001 create_profile_request_forms Applied +20200317160354000002 create_profile_request_forms Applied +20200317160354000003 create_profile_request_forms Applied +20200317160354000004 create_profile_request_forms Applied +20200317160354000005 create_profile_request_forms Applied +20200317160354000006 create_profile_request_forms Applied +20200401183443000000 continuity_containers Applied +20200402142539000000 rename_profile_flows Applied +20200402142539000001 rename_profile_flows Applied +20200402142539000002 rename_profile_flows Applied +20200519101057000000 create_recovery_addresses Applied +20200519101057000001 create_recovery_addresses Applied +20200519101057000002 create_recovery_addresses Applied +20200519101057000003 create_recovery_addresses Applied +20200519101057000004 create_recovery_addresses Applied +20200519101057000005 create_recovery_addresses Applied +20200519101057000006 create_recovery_addresses Applied +20200519101057000007 create_recovery_addresses Applied +20200601101000000000 create_messages Applied +20200601101000000001 create_messages Applied +20200601101000000002 create_messages Applied +20200601101000000003 create_messages Applied +20200605111551000000 messages Applied +20200605111551000001 messages Applied +20200605111551000002 messages Applied +20200605111551000003 messages Applied +20200605111551000004 messages Applied +20200605111551000005 messages Applied +20200605111551000006 messages Applied +20200605111551000007 messages Applied +20200605111551000008 messages Applied +20200605111551000009 messages Applied +20200605111551000010 messages Applied +20200605111551000011 messages Applied +20200607165100000000 settings Applied +20200607165100000001 settings Applied +20200607165100000002 settings Applied +20200607165100000003 settings Applied +20200607165100000004 settings Applied +20200705105359000000 rename_identities_schema Applied +20200810141652000000 flow_type Applied +20200810141652000001 flow_type Applied +20200810141652000002 flow_type Applied +20200810141652000003 flow_type Applied +20200810141652000004 flow_type Applied +20200810141652000005 flow_type Applied +20200810141652000006 flow_type Applied +20200810141652000007 flow_type Applied +20200810141652000008 flow_type Applied +20200810141652000009 flow_type Applied +20200810141652000010 flow_type Applied +20200810141652000011 flow_type Applied +20200810141652000012 flow_type Applied +20200810141652000013 flow_type Applied +20200810141652000014 flow_type Applied +20200810141652000015 flow_type Applied +20200810141652000016 flow_type Applied +20200810141652000017 flow_type Applied +20200810141652000018 flow_type Applied +20200810141652000019 flow_type Applied +20200810161022000000 flow_rename Applied +20200810161022000001 flow_rename Applied +20200810161022000002 flow_rename Applied +20200810161022000003 flow_rename Applied +20200810161022000004 flow_rename Applied +20200810161022000005 flow_rename Applied +20200810161022000006 flow_rename Applied +20200810161022000007 flow_rename Applied +20200810161022000008 flow_rename Applied +20200810162450000000 flow_fields_rename Applied +20200810162450000001 flow_fields_rename Applied +20200810162450000002 flow_fields_rename Applied +20200810162450000003 flow_fields_rename Applied +20200812124254000000 add_session_token Applied +20200812124254000001 add_session_token Applied +20200812124254000002 add_session_token Applied +20200812124254000003 add_session_token Applied +20200812124254000004 add_session_token Applied +20200812124254000005 add_session_token Applied +20200812124254000006 add_session_token Applied +20200812124254000007 add_session_token Applied +20200812160551000000 add_session_revoke Applied +20200812160551000001 add_session_revoke Applied +20200812160551000002 add_session_revoke Applied +20200812160551000003 add_session_revoke Applied +20200812160551000004 add_session_revoke Applied +20200812160551000005 add_session_revoke Applied +20200812160551000006 add_session_revoke Applied +20200812160551000007 add_session_revoke Applied +20200830121710000000 update_recovery_token Applied +20200830130642000000 add_verification_methods Applied +20200830130642000001 add_verification_methods Applied +20200830130642000002 add_verification_methods Applied +20200830130642000003 add_verification_methods Applied +20200830130642000004 add_verification_methods Applied +20200830130642000005 add_verification_methods Applied +20200830130642000006 add_verification_methods Applied +20200830130642000007 add_verification_methods Applied +20200830130642000008 add_verification_methods Applied +20200830130642000009 add_verification_methods Applied +20200830130642000010 add_verification_methods Applied +20200830130643000000 add_verification_methods Applied +20200830130644000000 add_verification_methods Applied +20200830130644000001 add_verification_methods Applied +20200830130645000000 add_verification_methods Applied +20200830130646000000 add_verification_methods Applied +20200830130646000001 add_verification_methods Applied +20200830130646000002 add_verification_methods Applied +20200830130646000003 add_verification_methods Applied +20200830130646000004 add_verification_methods Applied +20200830130646000005 add_verification_methods Applied +20200830130646000006 add_verification_methods Applied +20200830130646000007 add_verification_methods Applied +20200830130646000008 add_verification_methods Applied +20200830130646000009 add_verification_methods Applied +20200830130646000010 add_verification_methods Applied +20200830130646000011 add_verification_methods Applied +20200830154602000000 add_verification_token Applied +20200830154602000001 add_verification_token Applied +20200830154602000002 add_verification_token Applied +20200830154602000003 add_verification_token Applied +20200830154602000004 add_verification_token Applied +20200830172221000000 recovery_token_expires Applied +20200830172221000001 recovery_token_expires Applied +20200830172221000002 recovery_token_expires Applied +20200830172221000003 recovery_token_expires Applied +20200830172221000004 recovery_token_expires Applied +20200830172221000005 recovery_token_expires Applied +20200830172221000006 recovery_token_expires Applied +20200830172221000007 recovery_token_expires Applied +20200830172221000008 recovery_token_expires Applied +20200830172221000009 recovery_token_expires Applied +20200830172221000010 recovery_token_expires Applied +20200830172221000011 recovery_token_expires Applied +20200830172221000012 recovery_token_expires Applied +20200830172221000013 recovery_token_expires Applied +20200830172221000014 recovery_token_expires Applied +20200830172221000015 recovery_token_expires Applied +20200830172221000016 recovery_token_expires Applied +20200830172221000017 recovery_token_expires Applied +20200830172221000018 recovery_token_expires Applied +20200830172221000019 recovery_token_expires Applied +20200830172221000020 recovery_token_expires Applied +20200830172221000021 recovery_token_expires Applied +20200830172221000022 recovery_token_expires Applied +20200830172221000023 recovery_token_expires Applied +20200830172221000024 recovery_token_expires Applied +20200831110752000000 identity_verifiable_address_remove_code Applied +20200831110752000001 identity_verifiable_address_remove_code Applied +20200831110752000002 identity_verifiable_address_remove_code Applied +20200831110752000003 identity_verifiable_address_remove_code Applied +20200831110752000004 identity_verifiable_address_remove_code Applied +20200831110752000005 identity_verifiable_address_remove_code Applied +20200831110752000006 identity_verifiable_address_remove_code Applied +20200831110752000007 identity_verifiable_address_remove_code Applied +20200831110752000008 identity_verifiable_address_remove_code Applied +20200831110752000009 identity_verifiable_address_remove_code Applied +20200831110752000010 identity_verifiable_address_remove_code Applied +20200831110752000011 identity_verifiable_address_remove_code Applied +20200831110752000012 identity_verifiable_address_remove_code Applied +20200831110752000013 identity_verifiable_address_remove_code Applied +20200831110752000014 identity_verifiable_address_remove_code Applied +20200831110752000015 identity_verifiable_address_remove_code Applied +20200831110752000016 identity_verifiable_address_remove_code Applied +20200831110752000017 identity_verifiable_address_remove_code Applied +20200831110752000018 identity_verifiable_address_remove_code Applied +20200831110752000019 identity_verifiable_address_remove_code Applied +20200831110752000020 identity_verifiable_address_remove_code Rollback +20200831110752000021 identity_verifiable_address_remove_code Rollback +20201201161451000000 credential_types_values Rollback +20201201161451000001 credential_types_values Rollback + +The SQL statements to be executed from top to bottom are: + +------------ 20201201161451000001 - credential_types_values ------------ + + +------------ 20201201161451000000 - credential_types_values ------------ +DELETE FROM identity_credential_types WHERE name = 'password' OR name = 'oidc'; + +------------ 20200831110752000021 - identity_verifiable_address_remove_code ------------ +ALTER TABLE "identity_verifiable_addresses" ADD COLUMN "code" TEXT + +------------ 20200831110752000020 - identity_verifiable_address_remove_code ------------ +ALTER TABLE "identity_verifiable_addresses" ADD COLUMN "expires_at" DATETIME + +------------ SUCCESS ------------ +Successfully applied migrations! + +stderr: diff --git a/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_down_two_steps.txt b/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_down_two_steps.txt new file mode 100644 index 00000000000..dda2da5434a --- /dev/null +++ b/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_down_two_steps.txt @@ -0,0 +1,225 @@ +stdout: The migration plan is as follows: +Version Name Status +20191100000001000000 identities Applied +20191100000001000001 identities Applied +20191100000001000002 identities Applied +20191100000001000003 identities Applied +20191100000001000004 identities Applied +20191100000001000005 identities Applied +20191100000002000000 requests Applied +20191100000002000001 requests Applied +20191100000002000002 requests Applied +20191100000002000003 requests Applied +20191100000002000004 requests Applied +20191100000003000000 sessions Applied +20191100000004000000 errors Applied +20191100000006000000 courier Applied +20191100000007000000 errors Applied +20191100000007000001 errors Applied +20191100000007000002 errors Applied +20191100000007000003 errors Applied +20191100000008000000 selfservice_verification Applied +20191100000008000001 selfservice_verification Applied +20191100000008000002 selfservice_verification Applied +20191100000008000003 selfservice_verification Applied +20191100000008000004 selfservice_verification Applied +20191100000008000005 selfservice_verification Applied +20191100000010000000 errors Applied +20191100000010000001 errors Applied +20191100000010000002 errors Applied +20191100000010000003 errors Applied +20191100000010000004 errors Applied +20191100000011000000 courier_body_type Applied +20191100000011000001 courier_body_type Applied +20191100000011000002 courier_body_type Applied +20191100000011000003 courier_body_type Applied +20191100000012000000 login_request_forced Applied +20191100000012000001 login_request_forced Applied +20191100000012000002 login_request_forced Applied +20191100000012000003 login_request_forced Applied +20200317160354000000 create_profile_request_forms Applied +20200317160354000001 create_profile_request_forms Applied +20200317160354000002 create_profile_request_forms Applied +20200317160354000003 create_profile_request_forms Applied +20200317160354000004 create_profile_request_forms Applied +20200317160354000005 create_profile_request_forms Applied +20200317160354000006 create_profile_request_forms Applied +20200401183443000000 continuity_containers Applied +20200402142539000000 rename_profile_flows Applied +20200402142539000001 rename_profile_flows Applied +20200402142539000002 rename_profile_flows Applied +20200519101057000000 create_recovery_addresses Applied +20200519101057000001 create_recovery_addresses Applied +20200519101057000002 create_recovery_addresses Applied +20200519101057000003 create_recovery_addresses Applied +20200519101057000004 create_recovery_addresses Applied +20200519101057000005 create_recovery_addresses Applied +20200519101057000006 create_recovery_addresses Applied +20200519101057000007 create_recovery_addresses Applied +20200601101000000000 create_messages Applied +20200601101000000001 create_messages Applied +20200601101000000002 create_messages Applied +20200601101000000003 create_messages Applied +20200605111551000000 messages Applied +20200605111551000001 messages Applied +20200605111551000002 messages Applied +20200605111551000003 messages Applied +20200605111551000004 messages Applied +20200605111551000005 messages Applied +20200605111551000006 messages Applied +20200605111551000007 messages Applied +20200605111551000008 messages Applied +20200605111551000009 messages Applied +20200605111551000010 messages Applied +20200605111551000011 messages Applied +20200607165100000000 settings Applied +20200607165100000001 settings Applied +20200607165100000002 settings Applied +20200607165100000003 settings Applied +20200607165100000004 settings Applied +20200705105359000000 rename_identities_schema Applied +20200810141652000000 flow_type Applied +20200810141652000001 flow_type Applied +20200810141652000002 flow_type Applied +20200810141652000003 flow_type Applied +20200810141652000004 flow_type Applied +20200810141652000005 flow_type Applied +20200810141652000006 flow_type Applied +20200810141652000007 flow_type Applied +20200810141652000008 flow_type Applied +20200810141652000009 flow_type Applied +20200810141652000010 flow_type Applied +20200810141652000011 flow_type Applied +20200810141652000012 flow_type Applied +20200810141652000013 flow_type Applied +20200810141652000014 flow_type Applied +20200810141652000015 flow_type Applied +20200810141652000016 flow_type Applied +20200810141652000017 flow_type Applied +20200810141652000018 flow_type Applied +20200810141652000019 flow_type Applied +20200810161022000000 flow_rename Applied +20200810161022000001 flow_rename Applied +20200810161022000002 flow_rename Applied +20200810161022000003 flow_rename Applied +20200810161022000004 flow_rename Applied +20200810161022000005 flow_rename Applied +20200810161022000006 flow_rename Applied +20200810161022000007 flow_rename Applied +20200810161022000008 flow_rename Applied +20200810162450000000 flow_fields_rename Applied +20200810162450000001 flow_fields_rename Applied +20200810162450000002 flow_fields_rename Applied +20200810162450000003 flow_fields_rename Applied +20200812124254000000 add_session_token Applied +20200812124254000001 add_session_token Applied +20200812124254000002 add_session_token Applied +20200812124254000003 add_session_token Applied +20200812124254000004 add_session_token Applied +20200812124254000005 add_session_token Applied +20200812124254000006 add_session_token Applied +20200812124254000007 add_session_token Applied +20200812160551000000 add_session_revoke Applied +20200812160551000001 add_session_revoke Applied +20200812160551000002 add_session_revoke Applied +20200812160551000003 add_session_revoke Applied +20200812160551000004 add_session_revoke Applied +20200812160551000005 add_session_revoke Applied +20200812160551000006 add_session_revoke Applied +20200812160551000007 add_session_revoke Applied +20200830121710000000 update_recovery_token Applied +20200830130642000000 add_verification_methods Applied +20200830130642000001 add_verification_methods Applied +20200830130642000002 add_verification_methods Applied +20200830130642000003 add_verification_methods Applied +20200830130642000004 add_verification_methods Applied +20200830130642000005 add_verification_methods Applied +20200830130642000006 add_verification_methods Applied +20200830130642000007 add_verification_methods Applied +20200830130642000008 add_verification_methods Applied +20200830130642000009 add_verification_methods Applied +20200830130642000010 add_verification_methods Applied +20200830130643000000 add_verification_methods Applied +20200830130644000000 add_verification_methods Applied +20200830130644000001 add_verification_methods Applied +20200830130645000000 add_verification_methods Applied +20200830130646000000 add_verification_methods Applied +20200830130646000001 add_verification_methods Applied +20200830130646000002 add_verification_methods Applied +20200830130646000003 add_verification_methods Applied +20200830130646000004 add_verification_methods Applied +20200830130646000005 add_verification_methods Applied +20200830130646000006 add_verification_methods Applied +20200830130646000007 add_verification_methods Applied +20200830130646000008 add_verification_methods Applied +20200830130646000009 add_verification_methods Applied +20200830130646000010 add_verification_methods Applied +20200830130646000011 add_verification_methods Applied +20200830154602000000 add_verification_token Applied +20200830154602000001 add_verification_token Applied +20200830154602000002 add_verification_token Applied +20200830154602000003 add_verification_token Applied +20200830154602000004 add_verification_token Applied +20200830172221000000 recovery_token_expires Applied +20200830172221000001 recovery_token_expires Applied +20200830172221000002 recovery_token_expires Applied +20200830172221000003 recovery_token_expires Applied +20200830172221000004 recovery_token_expires Applied +20200830172221000005 recovery_token_expires Applied +20200830172221000006 recovery_token_expires Applied +20200830172221000007 recovery_token_expires Applied +20200830172221000008 recovery_token_expires Applied +20200830172221000009 recovery_token_expires Applied +20200830172221000010 recovery_token_expires Applied +20200830172221000011 recovery_token_expires Applied +20200830172221000012 recovery_token_expires Applied +20200830172221000013 recovery_token_expires Applied +20200830172221000014 recovery_token_expires Applied +20200830172221000015 recovery_token_expires Applied +20200830172221000016 recovery_token_expires Applied +20200830172221000017 recovery_token_expires Applied +20200830172221000018 recovery_token_expires Applied +20200830172221000019 recovery_token_expires Applied +20200830172221000020 recovery_token_expires Applied +20200830172221000021 recovery_token_expires Applied +20200830172221000022 recovery_token_expires Applied +20200830172221000023 recovery_token_expires Applied +20200830172221000024 recovery_token_expires Applied +20200831110752000000 identity_verifiable_address_remove_code Applied +20200831110752000001 identity_verifiable_address_remove_code Applied +20200831110752000002 identity_verifiable_address_remove_code Applied +20200831110752000003 identity_verifiable_address_remove_code Applied +20200831110752000004 identity_verifiable_address_remove_code Applied +20200831110752000005 identity_verifiable_address_remove_code Applied +20200831110752000006 identity_verifiable_address_remove_code Applied +20200831110752000007 identity_verifiable_address_remove_code Applied +20200831110752000008 identity_verifiable_address_remove_code Applied +20200831110752000009 identity_verifiable_address_remove_code Applied +20200831110752000010 identity_verifiable_address_remove_code Applied +20200831110752000011 identity_verifiable_address_remove_code Applied +20200831110752000012 identity_verifiable_address_remove_code Applied +20200831110752000013 identity_verifiable_address_remove_code Applied +20200831110752000014 identity_verifiable_address_remove_code Applied +20200831110752000015 identity_verifiable_address_remove_code Applied +20200831110752000016 identity_verifiable_address_remove_code Applied +20200831110752000017 identity_verifiable_address_remove_code Applied +20200831110752000018 identity_verifiable_address_remove_code Rollback +20200831110752000019 identity_verifiable_address_remove_code Rollback +20200831110752000020 identity_verifiable_address_remove_code Pending +20200831110752000021 identity_verifiable_address_remove_code Pending +20201201161451000000 credential_types_values Pending +20201201161451000001 credential_types_values Pending + +The SQL statements to be executed from top to bottom are: + +------------ 20200831110752000019 - identity_verifiable_address_remove_code ------------ +UPDATE identity_verifiable_addresses SET code = substr(hex(randomblob(32)), 0, 32) WHERE code IS NULL + +------------ 20200831110752000018 - identity_verifiable_address_remove_code ------------ +UPDATE identity_verifiable_addresses SET expires_at = CURRENT_TIMESTAMP WHERE expires_at IS NULL + +Do you wish to execute this migration plan? [y/n]: ------------ SUCCESS ------------ +Successfully applied migrations! + +stderr: To skip the next question use flag --yes (at your own risk). + diff --git a/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_rollbacks_up_again.txt b/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_rollbacks_up_again.txt new file mode 100644 index 00000000000..e066817d89a --- /dev/null +++ b/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_rollbacks_up_again.txt @@ -0,0 +1,237 @@ +stdout: The migration plan is as follows: +Version Name Status +20191100000001000000 identities Applied +20191100000001000001 identities Applied +20191100000001000002 identities Applied +20191100000001000003 identities Applied +20191100000001000004 identities Applied +20191100000001000005 identities Applied +20191100000002000000 requests Applied +20191100000002000001 requests Applied +20191100000002000002 requests Applied +20191100000002000003 requests Applied +20191100000002000004 requests Applied +20191100000003000000 sessions Applied +20191100000004000000 errors Applied +20191100000006000000 courier Applied +20191100000007000000 errors Applied +20191100000007000001 errors Applied +20191100000007000002 errors Applied +20191100000007000003 errors Applied +20191100000008000000 selfservice_verification Applied +20191100000008000001 selfservice_verification Applied +20191100000008000002 selfservice_verification Applied +20191100000008000003 selfservice_verification Applied +20191100000008000004 selfservice_verification Applied +20191100000008000005 selfservice_verification Applied +20191100000010000000 errors Applied +20191100000010000001 errors Applied +20191100000010000002 errors Applied +20191100000010000003 errors Applied +20191100000010000004 errors Applied +20191100000011000000 courier_body_type Applied +20191100000011000001 courier_body_type Applied +20191100000011000002 courier_body_type Applied +20191100000011000003 courier_body_type Applied +20191100000012000000 login_request_forced Applied +20191100000012000001 login_request_forced Applied +20191100000012000002 login_request_forced Applied +20191100000012000003 login_request_forced Applied +20200317160354000000 create_profile_request_forms Applied +20200317160354000001 create_profile_request_forms Applied +20200317160354000002 create_profile_request_forms Applied +20200317160354000003 create_profile_request_forms Applied +20200317160354000004 create_profile_request_forms Applied +20200317160354000005 create_profile_request_forms Applied +20200317160354000006 create_profile_request_forms Applied +20200401183443000000 continuity_containers Applied +20200402142539000000 rename_profile_flows Applied +20200402142539000001 rename_profile_flows Applied +20200402142539000002 rename_profile_flows Applied +20200519101057000000 create_recovery_addresses Applied +20200519101057000001 create_recovery_addresses Applied +20200519101057000002 create_recovery_addresses Applied +20200519101057000003 create_recovery_addresses Applied +20200519101057000004 create_recovery_addresses Applied +20200519101057000005 create_recovery_addresses Applied +20200519101057000006 create_recovery_addresses Applied +20200519101057000007 create_recovery_addresses Applied +20200601101000000000 create_messages Applied +20200601101000000001 create_messages Applied +20200601101000000002 create_messages Applied +20200601101000000003 create_messages Applied +20200605111551000000 messages Applied +20200605111551000001 messages Applied +20200605111551000002 messages Applied +20200605111551000003 messages Applied +20200605111551000004 messages Applied +20200605111551000005 messages Applied +20200605111551000006 messages Applied +20200605111551000007 messages Applied +20200605111551000008 messages Applied +20200605111551000009 messages Applied +20200605111551000010 messages Applied +20200605111551000011 messages Applied +20200607165100000000 settings Applied +20200607165100000001 settings Applied +20200607165100000002 settings Applied +20200607165100000003 settings Applied +20200607165100000004 settings Applied +20200705105359000000 rename_identities_schema Applied +20200810141652000000 flow_type Applied +20200810141652000001 flow_type Applied +20200810141652000002 flow_type Applied +20200810141652000003 flow_type Applied +20200810141652000004 flow_type Applied +20200810141652000005 flow_type Applied +20200810141652000006 flow_type Applied +20200810141652000007 flow_type Applied +20200810141652000008 flow_type Applied +20200810141652000009 flow_type Applied +20200810141652000010 flow_type Applied +20200810141652000011 flow_type Applied +20200810141652000012 flow_type Applied +20200810141652000013 flow_type Applied +20200810141652000014 flow_type Applied +20200810141652000015 flow_type Applied +20200810141652000016 flow_type Applied +20200810141652000017 flow_type Applied +20200810141652000018 flow_type Applied +20200810141652000019 flow_type Applied +20200810161022000000 flow_rename Applied +20200810161022000001 flow_rename Applied +20200810161022000002 flow_rename Applied +20200810161022000003 flow_rename Applied +20200810161022000004 flow_rename Applied +20200810161022000005 flow_rename Applied +20200810161022000006 flow_rename Applied +20200810161022000007 flow_rename Applied +20200810161022000008 flow_rename Applied +20200810162450000000 flow_fields_rename Applied +20200810162450000001 flow_fields_rename Applied +20200810162450000002 flow_fields_rename Applied +20200810162450000003 flow_fields_rename Applied +20200812124254000000 add_session_token Applied +20200812124254000001 add_session_token Applied +20200812124254000002 add_session_token Applied +20200812124254000003 add_session_token Applied +20200812124254000004 add_session_token Applied +20200812124254000005 add_session_token Applied +20200812124254000006 add_session_token Applied +20200812124254000007 add_session_token Applied +20200812160551000000 add_session_revoke Applied +20200812160551000001 add_session_revoke Applied +20200812160551000002 add_session_revoke Applied +20200812160551000003 add_session_revoke Applied +20200812160551000004 add_session_revoke Applied +20200812160551000005 add_session_revoke Applied +20200812160551000006 add_session_revoke Applied +20200812160551000007 add_session_revoke Applied +20200830121710000000 update_recovery_token Applied +20200830130642000000 add_verification_methods Applied +20200830130642000001 add_verification_methods Applied +20200830130642000002 add_verification_methods Applied +20200830130642000003 add_verification_methods Applied +20200830130642000004 add_verification_methods Applied +20200830130642000005 add_verification_methods Applied +20200830130642000006 add_verification_methods Applied +20200830130642000007 add_verification_methods Applied +20200830130642000008 add_verification_methods Applied +20200830130642000009 add_verification_methods Applied +20200830130642000010 add_verification_methods Applied +20200830130643000000 add_verification_methods Applied +20200830130644000000 add_verification_methods Applied +20200830130644000001 add_verification_methods Applied +20200830130645000000 add_verification_methods Applied +20200830130646000000 add_verification_methods Applied +20200830130646000001 add_verification_methods Applied +20200830130646000002 add_verification_methods Applied +20200830130646000003 add_verification_methods Applied +20200830130646000004 add_verification_methods Applied +20200830130646000005 add_verification_methods Applied +20200830130646000006 add_verification_methods Applied +20200830130646000007 add_verification_methods Applied +20200830130646000008 add_verification_methods Applied +20200830130646000009 add_verification_methods Applied +20200830130646000010 add_verification_methods Applied +20200830130646000011 add_verification_methods Applied +20200830154602000000 add_verification_token Applied +20200830154602000001 add_verification_token Applied +20200830154602000002 add_verification_token Applied +20200830154602000003 add_verification_token Applied +20200830154602000004 add_verification_token Applied +20200830172221000000 recovery_token_expires Applied +20200830172221000001 recovery_token_expires Applied +20200830172221000002 recovery_token_expires Applied +20200830172221000003 recovery_token_expires Applied +20200830172221000004 recovery_token_expires Applied +20200830172221000005 recovery_token_expires Applied +20200830172221000006 recovery_token_expires Applied +20200830172221000007 recovery_token_expires Applied +20200830172221000008 recovery_token_expires Applied +20200830172221000009 recovery_token_expires Applied +20200830172221000010 recovery_token_expires Applied +20200830172221000011 recovery_token_expires Applied +20200830172221000012 recovery_token_expires Applied +20200830172221000013 recovery_token_expires Applied +20200830172221000014 recovery_token_expires Applied +20200830172221000015 recovery_token_expires Applied +20200830172221000016 recovery_token_expires Applied +20200830172221000017 recovery_token_expires Applied +20200830172221000018 recovery_token_expires Applied +20200830172221000019 recovery_token_expires Applied +20200830172221000020 recovery_token_expires Applied +20200830172221000021 recovery_token_expires Applied +20200830172221000022 recovery_token_expires Applied +20200830172221000023 recovery_token_expires Applied +20200830172221000024 recovery_token_expires Applied +20200831110752000000 identity_verifiable_address_remove_code Applied +20200831110752000001 identity_verifiable_address_remove_code Applied +20200831110752000002 identity_verifiable_address_remove_code Applied +20200831110752000003 identity_verifiable_address_remove_code Applied +20200831110752000004 identity_verifiable_address_remove_code Applied +20200831110752000005 identity_verifiable_address_remove_code Applied +20200831110752000006 identity_verifiable_address_remove_code Applied +20200831110752000007 identity_verifiable_address_remove_code Applied +20200831110752000008 identity_verifiable_address_remove_code Applied +20200831110752000009 identity_verifiable_address_remove_code Applied +20200831110752000010 identity_verifiable_address_remove_code Applied +20200831110752000011 identity_verifiable_address_remove_code Applied +20200831110752000012 identity_verifiable_address_remove_code Applied +20200831110752000013 identity_verifiable_address_remove_code Applied +20200831110752000014 identity_verifiable_address_remove_code Applied +20200831110752000015 identity_verifiable_address_remove_code Applied +20200831110752000016 identity_verifiable_address_remove_code Applied +20200831110752000017 identity_verifiable_address_remove_code Applied +20200831110752000018 identity_verifiable_address_remove_code Pending +20200831110752000019 identity_verifiable_address_remove_code Pending +20200831110752000020 identity_verifiable_address_remove_code Pending +20200831110752000021 identity_verifiable_address_remove_code Pending +20201201161451000000 credential_types_values Pending +20201201161451000001 credential_types_values Pending + +The SQL statements to be executed from top to bottom are: + +------------ 20200831110752000018 - identity_verifiable_address_remove_code ------------ + + +------------ 20200831110752000019 - identity_verifiable_address_remove_code ------------ + + +------------ 20200831110752000020 - identity_verifiable_address_remove_code ------------ + + +------------ 20200831110752000021 - identity_verifiable_address_remove_code ------------ + + +------------ 20201201161451000000 - credential_types_values ------------ +INSERT INTO identity_credential_types (id, name) SELECT '78c1b41d-8341-4507-aa60-aff1d4369670', 'password' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'password') + +------------ 20201201161451000001 - credential_types_values ------------ +INSERT INTO identity_credential_types (id, name) SELECT '6fa5e2e0-bfce-4631-b62b-cf2b0252b289', 'oidc' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'oidc'); + +Do you wish to execute this migration plan? [y/n]: ------------ SUCCESS ------------ +Successfully applied migrations! + +stderr: To skip the next question use flag --yes (at your own risk). + diff --git a/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_rollbacks_up_without_confirm.txt b/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_rollbacks_up_without_confirm.txt new file mode 100644 index 00000000000..6d297e396af --- /dev/null +++ b/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_rollbacks_up_without_confirm.txt @@ -0,0 +1,219 @@ +stdout: The migration plan is as follows: +Version Name Status +20191100000001000000 identities Applied +20191100000001000001 identities Applied +20191100000001000002 identities Applied +20191100000001000003 identities Applied +20191100000001000004 identities Applied +20191100000001000005 identities Applied +20191100000002000000 requests Applied +20191100000002000001 requests Applied +20191100000002000002 requests Applied +20191100000002000003 requests Applied +20191100000002000004 requests Applied +20191100000003000000 sessions Applied +20191100000004000000 errors Applied +20191100000006000000 courier Applied +20191100000007000000 errors Applied +20191100000007000001 errors Applied +20191100000007000002 errors Applied +20191100000007000003 errors Applied +20191100000008000000 selfservice_verification Applied +20191100000008000001 selfservice_verification Applied +20191100000008000002 selfservice_verification Applied +20191100000008000003 selfservice_verification Applied +20191100000008000004 selfservice_verification Applied +20191100000008000005 selfservice_verification Applied +20191100000010000000 errors Applied +20191100000010000001 errors Applied +20191100000010000002 errors Applied +20191100000010000003 errors Applied +20191100000010000004 errors Applied +20191100000011000000 courier_body_type Applied +20191100000011000001 courier_body_type Applied +20191100000011000002 courier_body_type Applied +20191100000011000003 courier_body_type Applied +20191100000012000000 login_request_forced Applied +20191100000012000001 login_request_forced Applied +20191100000012000002 login_request_forced Applied +20191100000012000003 login_request_forced Applied +20200317160354000000 create_profile_request_forms Applied +20200317160354000001 create_profile_request_forms Applied +20200317160354000002 create_profile_request_forms Applied +20200317160354000003 create_profile_request_forms Applied +20200317160354000004 create_profile_request_forms Applied +20200317160354000005 create_profile_request_forms Applied +20200317160354000006 create_profile_request_forms Applied +20200401183443000000 continuity_containers Applied +20200402142539000000 rename_profile_flows Applied +20200402142539000001 rename_profile_flows Applied +20200402142539000002 rename_profile_flows Applied +20200519101057000000 create_recovery_addresses Applied +20200519101057000001 create_recovery_addresses Applied +20200519101057000002 create_recovery_addresses Applied +20200519101057000003 create_recovery_addresses Applied +20200519101057000004 create_recovery_addresses Applied +20200519101057000005 create_recovery_addresses Applied +20200519101057000006 create_recovery_addresses Applied +20200519101057000007 create_recovery_addresses Applied +20200601101000000000 create_messages Applied +20200601101000000001 create_messages Applied +20200601101000000002 create_messages Applied +20200601101000000003 create_messages Applied +20200605111551000000 messages Applied +20200605111551000001 messages Applied +20200605111551000002 messages Applied +20200605111551000003 messages Applied +20200605111551000004 messages Applied +20200605111551000005 messages Applied +20200605111551000006 messages Applied +20200605111551000007 messages Applied +20200605111551000008 messages Applied +20200605111551000009 messages Applied +20200605111551000010 messages Applied +20200605111551000011 messages Applied +20200607165100000000 settings Applied +20200607165100000001 settings Applied +20200607165100000002 settings Applied +20200607165100000003 settings Applied +20200607165100000004 settings Applied +20200705105359000000 rename_identities_schema Applied +20200810141652000000 flow_type Applied +20200810141652000001 flow_type Applied +20200810141652000002 flow_type Applied +20200810141652000003 flow_type Applied +20200810141652000004 flow_type Applied +20200810141652000005 flow_type Applied +20200810141652000006 flow_type Applied +20200810141652000007 flow_type Applied +20200810141652000008 flow_type Applied +20200810141652000009 flow_type Applied +20200810141652000010 flow_type Applied +20200810141652000011 flow_type Applied +20200810141652000012 flow_type Applied +20200810141652000013 flow_type Applied +20200810141652000014 flow_type Applied +20200810141652000015 flow_type Applied +20200810141652000016 flow_type Applied +20200810141652000017 flow_type Applied +20200810141652000018 flow_type Applied +20200810141652000019 flow_type Applied +20200810161022000000 flow_rename Applied +20200810161022000001 flow_rename Applied +20200810161022000002 flow_rename Applied +20200810161022000003 flow_rename Applied +20200810161022000004 flow_rename Applied +20200810161022000005 flow_rename Applied +20200810161022000006 flow_rename Applied +20200810161022000007 flow_rename Applied +20200810161022000008 flow_rename Applied +20200810162450000000 flow_fields_rename Applied +20200810162450000001 flow_fields_rename Applied +20200810162450000002 flow_fields_rename Applied +20200810162450000003 flow_fields_rename Applied +20200812124254000000 add_session_token Applied +20200812124254000001 add_session_token Applied +20200812124254000002 add_session_token Applied +20200812124254000003 add_session_token Applied +20200812124254000004 add_session_token Applied +20200812124254000005 add_session_token Applied +20200812124254000006 add_session_token Applied +20200812124254000007 add_session_token Applied +20200812160551000000 add_session_revoke Applied +20200812160551000001 add_session_revoke Applied +20200812160551000002 add_session_revoke Applied +20200812160551000003 add_session_revoke Applied +20200812160551000004 add_session_revoke Applied +20200812160551000005 add_session_revoke Applied +20200812160551000006 add_session_revoke Applied +20200812160551000007 add_session_revoke Applied +20200830121710000000 update_recovery_token Applied +20200830130642000000 add_verification_methods Applied +20200830130642000001 add_verification_methods Applied +20200830130642000002 add_verification_methods Applied +20200830130642000003 add_verification_methods Applied +20200830130642000004 add_verification_methods Applied +20200830130642000005 add_verification_methods Applied +20200830130642000006 add_verification_methods Applied +20200830130642000007 add_verification_methods Applied +20200830130642000008 add_verification_methods Applied +20200830130642000009 add_verification_methods Applied +20200830130642000010 add_verification_methods Applied +20200830130643000000 add_verification_methods Applied +20200830130644000000 add_verification_methods Applied +20200830130644000001 add_verification_methods Applied +20200830130645000000 add_verification_methods Applied +20200830130646000000 add_verification_methods Applied +20200830130646000001 add_verification_methods Applied +20200830130646000002 add_verification_methods Applied +20200830130646000003 add_verification_methods Applied +20200830130646000004 add_verification_methods Applied +20200830130646000005 add_verification_methods Applied +20200830130646000006 add_verification_methods Applied +20200830130646000007 add_verification_methods Applied +20200830130646000008 add_verification_methods Applied +20200830130646000009 add_verification_methods Applied +20200830130646000010 add_verification_methods Applied +20200830130646000011 add_verification_methods Applied +20200830154602000000 add_verification_token Applied +20200830154602000001 add_verification_token Applied +20200830154602000002 add_verification_token Applied +20200830154602000003 add_verification_token Applied +20200830154602000004 add_verification_token Applied +20200830172221000000 recovery_token_expires Applied +20200830172221000001 recovery_token_expires Applied +20200830172221000002 recovery_token_expires Applied +20200830172221000003 recovery_token_expires Applied +20200830172221000004 recovery_token_expires Applied +20200830172221000005 recovery_token_expires Applied +20200830172221000006 recovery_token_expires Applied +20200830172221000007 recovery_token_expires Applied +20200830172221000008 recovery_token_expires Applied +20200830172221000009 recovery_token_expires Applied +20200830172221000010 recovery_token_expires Applied +20200830172221000011 recovery_token_expires Applied +20200830172221000012 recovery_token_expires Applied +20200830172221000013 recovery_token_expires Applied +20200830172221000014 recovery_token_expires Applied +20200830172221000015 recovery_token_expires Applied +20200830172221000016 recovery_token_expires Applied +20200830172221000017 recovery_token_expires Applied +20200830172221000018 recovery_token_expires Applied +20200830172221000019 recovery_token_expires Applied +20200830172221000020 recovery_token_expires Applied +20200830172221000021 recovery_token_expires Applied +20200830172221000022 recovery_token_expires Applied +20200830172221000023 recovery_token_expires Applied +20200830172221000024 recovery_token_expires Applied +20200831110752000000 identity_verifiable_address_remove_code Applied +20200831110752000001 identity_verifiable_address_remove_code Applied +20200831110752000002 identity_verifiable_address_remove_code Applied +20200831110752000003 identity_verifiable_address_remove_code Applied +20200831110752000004 identity_verifiable_address_remove_code Applied +20200831110752000005 identity_verifiable_address_remove_code Applied +20200831110752000006 identity_verifiable_address_remove_code Applied +20200831110752000007 identity_verifiable_address_remove_code Applied +20200831110752000008 identity_verifiable_address_remove_code Applied +20200831110752000009 identity_verifiable_address_remove_code Applied +20200831110752000010 identity_verifiable_address_remove_code Applied +20200831110752000011 identity_verifiable_address_remove_code Applied +20200831110752000012 identity_verifiable_address_remove_code Applied +20200831110752000013 identity_verifiable_address_remove_code Applied +20200831110752000014 identity_verifiable_address_remove_code Applied +20200831110752000015 identity_verifiable_address_remove_code Applied +20200831110752000016 identity_verifiable_address_remove_code Applied +20200831110752000017 identity_verifiable_address_remove_code Applied +20200831110752000018 identity_verifiable_address_remove_code Applied +20200831110752000019 identity_verifiable_address_remove_code Applied +20200831110752000020 identity_verifiable_address_remove_code Applied +20200831110752000021 identity_verifiable_address_remove_code Applied +20201201161451000000 credential_types_values Applied +20201201161451000001 credential_types_values Applied + +The SQL statements to be executed from top to bottom are: + +Do you wish to execute this migration plan? [y/n]: ------------ WARNING ------------ +Migration aborted. + +stderr: To skip the next question use flag --yes (at your own risk). + diff --git a/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_up.txt b/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_up.txt new file mode 100644 index 00000000000..ad1f3de36e0 --- /dev/null +++ b/oryx/popx/.snapshots/TestMigrateSQLUp-migrate_up.txt @@ -0,0 +1,1172 @@ +stdout: The migration plan is as follows: +Version Name Status +20191100000001000000 identities Pending +20191100000001000001 identities Pending +20191100000001000002 identities Pending +20191100000001000003 identities Pending +20191100000001000004 identities Pending +20191100000001000005 identities Pending +20191100000002000000 requests Pending +20191100000002000001 requests Pending +20191100000002000002 requests Pending +20191100000002000003 requests Pending +20191100000002000004 requests Pending +20191100000003000000 sessions Pending +20191100000004000000 errors Pending +20191100000006000000 courier Pending +20191100000007000000 errors Pending +20191100000007000001 errors Pending +20191100000007000002 errors Pending +20191100000007000003 errors Pending +20191100000008000000 selfservice_verification Pending +20191100000008000001 selfservice_verification Pending +20191100000008000002 selfservice_verification Pending +20191100000008000003 selfservice_verification Pending +20191100000008000004 selfservice_verification Pending +20191100000008000005 selfservice_verification Pending +20191100000010000000 errors Pending +20191100000010000001 errors Pending +20191100000010000002 errors Pending +20191100000010000003 errors Pending +20191100000010000004 errors Pending +20191100000011000000 courier_body_type Pending +20191100000011000001 courier_body_type Pending +20191100000011000002 courier_body_type Pending +20191100000011000003 courier_body_type Pending +20191100000012000000 login_request_forced Pending +20191100000012000001 login_request_forced Pending +20191100000012000002 login_request_forced Pending +20191100000012000003 login_request_forced Pending +20200317160354000000 create_profile_request_forms Pending +20200317160354000001 create_profile_request_forms Pending +20200317160354000002 create_profile_request_forms Pending +20200317160354000003 create_profile_request_forms Pending +20200317160354000004 create_profile_request_forms Pending +20200317160354000005 create_profile_request_forms Pending +20200317160354000006 create_profile_request_forms Pending +20200401183443000000 continuity_containers Pending +20200402142539000000 rename_profile_flows Pending +20200402142539000001 rename_profile_flows Pending +20200402142539000002 rename_profile_flows Pending +20200519101057000000 create_recovery_addresses Pending +20200519101057000001 create_recovery_addresses Pending +20200519101057000002 create_recovery_addresses Pending +20200519101057000003 create_recovery_addresses Pending +20200519101057000004 create_recovery_addresses Pending +20200519101057000005 create_recovery_addresses Pending +20200519101057000006 create_recovery_addresses Pending +20200519101057000007 create_recovery_addresses Pending +20200601101000000000 create_messages Pending +20200601101000000001 create_messages Pending +20200601101000000002 create_messages Pending +20200601101000000003 create_messages Pending +20200605111551000000 messages Pending +20200605111551000001 messages Pending +20200605111551000002 messages Pending +20200605111551000003 messages Pending +20200605111551000004 messages Pending +20200605111551000005 messages Pending +20200605111551000006 messages Pending +20200605111551000007 messages Pending +20200605111551000008 messages Pending +20200605111551000009 messages Pending +20200605111551000010 messages Pending +20200605111551000011 messages Pending +20200607165100000000 settings Pending +20200607165100000001 settings Pending +20200607165100000002 settings Pending +20200607165100000003 settings Pending +20200607165100000004 settings Pending +20200705105359000000 rename_identities_schema Pending +20200810141652000000 flow_type Pending +20200810141652000001 flow_type Pending +20200810141652000002 flow_type Pending +20200810141652000003 flow_type Pending +20200810141652000004 flow_type Pending +20200810141652000005 flow_type Pending +20200810141652000006 flow_type Pending +20200810141652000007 flow_type Pending +20200810141652000008 flow_type Pending +20200810141652000009 flow_type Pending +20200810141652000010 flow_type Pending +20200810141652000011 flow_type Pending +20200810141652000012 flow_type Pending +20200810141652000013 flow_type Pending +20200810141652000014 flow_type Pending +20200810141652000015 flow_type Pending +20200810141652000016 flow_type Pending +20200810141652000017 flow_type Pending +20200810141652000018 flow_type Pending +20200810141652000019 flow_type Pending +20200810161022000000 flow_rename Pending +20200810161022000001 flow_rename Pending +20200810161022000002 flow_rename Pending +20200810161022000003 flow_rename Pending +20200810161022000004 flow_rename Pending +20200810161022000005 flow_rename Pending +20200810161022000006 flow_rename Pending +20200810161022000007 flow_rename Pending +20200810161022000008 flow_rename Pending +20200810162450000000 flow_fields_rename Pending +20200810162450000001 flow_fields_rename Pending +20200810162450000002 flow_fields_rename Pending +20200810162450000003 flow_fields_rename Pending +20200812124254000000 add_session_token Pending +20200812124254000001 add_session_token Pending +20200812124254000002 add_session_token Pending +20200812124254000003 add_session_token Pending +20200812124254000004 add_session_token Pending +20200812124254000005 add_session_token Pending +20200812124254000006 add_session_token Pending +20200812124254000007 add_session_token Pending +20200812160551000000 add_session_revoke Pending +20200812160551000001 add_session_revoke Pending +20200812160551000002 add_session_revoke Pending +20200812160551000003 add_session_revoke Pending +20200812160551000004 add_session_revoke Pending +20200812160551000005 add_session_revoke Pending +20200812160551000006 add_session_revoke Pending +20200812160551000007 add_session_revoke Pending +20200830121710000000 update_recovery_token Pending +20200830130642000000 add_verification_methods Pending +20200830130642000001 add_verification_methods Pending +20200830130642000002 add_verification_methods Pending +20200830130642000003 add_verification_methods Pending +20200830130642000004 add_verification_methods Pending +20200830130642000005 add_verification_methods Pending +20200830130642000006 add_verification_methods Pending +20200830130642000007 add_verification_methods Pending +20200830130642000008 add_verification_methods Pending +20200830130642000009 add_verification_methods Pending +20200830130642000010 add_verification_methods Pending +20200830130643000000 add_verification_methods Pending +20200830130644000000 add_verification_methods Pending +20200830130644000001 add_verification_methods Pending +20200830130645000000 add_verification_methods Pending +20200830130646000000 add_verification_methods Pending +20200830130646000001 add_verification_methods Pending +20200830130646000002 add_verification_methods Pending +20200830130646000003 add_verification_methods Pending +20200830130646000004 add_verification_methods Pending +20200830130646000005 add_verification_methods Pending +20200830130646000006 add_verification_methods Pending +20200830130646000007 add_verification_methods Pending +20200830130646000008 add_verification_methods Pending +20200830130646000009 add_verification_methods Pending +20200830130646000010 add_verification_methods Pending +20200830130646000011 add_verification_methods Pending +20200830154602000000 add_verification_token Pending +20200830154602000001 add_verification_token Pending +20200830154602000002 add_verification_token Pending +20200830154602000003 add_verification_token Pending +20200830154602000004 add_verification_token Pending +20200830172221000000 recovery_token_expires Pending +20200830172221000001 recovery_token_expires Pending +20200830172221000002 recovery_token_expires Pending +20200830172221000003 recovery_token_expires Pending +20200830172221000004 recovery_token_expires Pending +20200830172221000005 recovery_token_expires Pending +20200830172221000006 recovery_token_expires Pending +20200830172221000007 recovery_token_expires Pending +20200830172221000008 recovery_token_expires Pending +20200830172221000009 recovery_token_expires Pending +20200830172221000010 recovery_token_expires Pending +20200830172221000011 recovery_token_expires Pending +20200830172221000012 recovery_token_expires Pending +20200830172221000013 recovery_token_expires Pending +20200830172221000014 recovery_token_expires Pending +20200830172221000015 recovery_token_expires Pending +20200830172221000016 recovery_token_expires Pending +20200830172221000017 recovery_token_expires Pending +20200830172221000018 recovery_token_expires Pending +20200830172221000019 recovery_token_expires Pending +20200830172221000020 recovery_token_expires Pending +20200830172221000021 recovery_token_expires Pending +20200830172221000022 recovery_token_expires Pending +20200830172221000023 recovery_token_expires Pending +20200830172221000024 recovery_token_expires Pending +20200831110752000000 identity_verifiable_address_remove_code Pending +20200831110752000001 identity_verifiable_address_remove_code Pending +20200831110752000002 identity_verifiable_address_remove_code Pending +20200831110752000003 identity_verifiable_address_remove_code Pending +20200831110752000004 identity_verifiable_address_remove_code Pending +20200831110752000005 identity_verifiable_address_remove_code Pending +20200831110752000006 identity_verifiable_address_remove_code Pending +20200831110752000007 identity_verifiable_address_remove_code Pending +20200831110752000008 identity_verifiable_address_remove_code Pending +20200831110752000009 identity_verifiable_address_remove_code Pending +20200831110752000010 identity_verifiable_address_remove_code Pending +20200831110752000011 identity_verifiable_address_remove_code Pending +20200831110752000012 identity_verifiable_address_remove_code Pending +20200831110752000013 identity_verifiable_address_remove_code Pending +20200831110752000014 identity_verifiable_address_remove_code Pending +20200831110752000015 identity_verifiable_address_remove_code Pending +20200831110752000016 identity_verifiable_address_remove_code Pending +20200831110752000017 identity_verifiable_address_remove_code Pending +20200831110752000018 identity_verifiable_address_remove_code Pending +20200831110752000019 identity_verifiable_address_remove_code Pending +20200831110752000020 identity_verifiable_address_remove_code Pending +20200831110752000021 identity_verifiable_address_remove_code Pending +20201201161451000000 credential_types_values Pending +20201201161451000001 credential_types_values Pending + +The SQL statements to be executed from top to bottom are: + +------------ 20191100000001000000 - identities ------------ +CREATE TABLE "identities" ( +"id" TEXT PRIMARY KEY, +"traits_schema_id" TEXT NOT NULL, +"traits" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +) + +------------ 20191100000001000001 - identities ------------ +CREATE TABLE "identity_credential_types" ( +"id" TEXT PRIMARY KEY, +"name" TEXT NOT NULL +) + +------------ 20191100000001000002 - identities ------------ +CREATE UNIQUE INDEX "identity_credential_types_name_idx" ON "identity_credential_types" (name) + +------------ 20191100000001000003 - identities ------------ +CREATE TABLE "identity_credentials" ( +"id" TEXT PRIMARY KEY, +"config" TEXT NOT NULL, +"identity_credential_type_id" char(36) NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON DELETE cascade, +FOREIGN KEY (identity_credential_type_id) REFERENCES identity_credential_types (id) ON DELETE cascade +) + +------------ 20191100000001000004 - identities ------------ +CREATE TABLE "identity_credential_identifiers" ( +"id" TEXT PRIMARY KEY, +"identifier" TEXT NOT NULL, +"identity_credential_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_credential_id) REFERENCES identity_credentials (id) ON DELETE cascade +) + +------------ 20191100000001000005 - identities ------------ +CREATE UNIQUE INDEX "identity_credential_identifiers_identifier_idx" ON "identity_credential_identifiers" (identifier); + +------------ 20191100000002000000 - requests ------------ +CREATE TABLE "selfservice_login_requests" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" DATETIME NOT NULL, +"active_method" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +) + +------------ 20191100000002000001 - requests ------------ +CREATE TABLE "selfservice_login_request_methods" ( +"id" TEXT PRIMARY KEY, +"method" TEXT NOT NULL, +"selfservice_login_request_id" char(36) NOT NULL, +"config" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (selfservice_login_request_id) REFERENCES selfservice_login_requests (id) ON DELETE cascade +) + +------------ 20191100000002000002 - requests ------------ +CREATE TABLE "selfservice_registration_requests" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" DATETIME NOT NULL, +"active_method" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +) + +------------ 20191100000002000003 - requests ------------ +CREATE TABLE "selfservice_registration_request_methods" ( +"id" TEXT PRIMARY KEY, +"method" TEXT NOT NULL, +"selfservice_registration_request_id" char(36) NOT NULL, +"config" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (selfservice_registration_request_id) REFERENCES selfservice_registration_requests (id) ON DELETE cascade +) + +------------ 20191100000002000004 - requests ------------ +CREATE TABLE "selfservice_profile_management_requests" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" DATETIME NOT NULL, +"form" TEXT NOT NULL, +"update_successful" bool NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON DELETE cascade +); + +------------ 20191100000003000000 - sessions ------------ +CREATE TABLE "sessions" ( +"id" TEXT PRIMARY KEY, +"issued_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" DATETIME NOT NULL, +"authenticated_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON DELETE cascade +); + +------------ 20191100000004000000 - errors ------------ +CREATE TABLE "selfservice_errors" ( +"id" TEXT PRIMARY KEY, +"errors" TEXT NOT NULL, +"seen_at" DATETIME NOT NULL, +"was_seen" bool NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +); + +------------ 20191100000006000000 - courier ------------ +CREATE TABLE "courier_messages" ( +"id" TEXT PRIMARY KEY, +"type" INTEGER NOT NULL, +"status" INTEGER NOT NULL, +"body" TEXT NOT NULL, +"subject" TEXT NOT NULL, +"recipient" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +); + +------------ 20191100000007000000 - errors ------------ +ALTER TABLE "selfservice_errors" ADD COLUMN "csrf_token" TEXT NOT NULL DEFAULT ''; + +------------ 20191100000007000001 - errors ------------ + + +------------ 20191100000007000002 - errors ------------ + + +------------ 20191100000007000003 - errors ------------ + + +------------ 20191100000008000000 - selfservice_verification ------------ +CREATE TABLE "identity_verifiable_addresses" ( +"id" TEXT PRIMARY KEY, +"code" TEXT NOT NULL, +"status" TEXT NOT NULL, +"via" TEXT NOT NULL, +"verified" bool NOT NULL, +"value" TEXT NOT NULL, +"verified_at" DATETIME, +"expires_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON DELETE cascade +) + +------------ 20191100000008000001 - selfservice_verification ------------ +CREATE UNIQUE INDEX "identity_verifiable_addresses_code_uq_idx" ON "identity_verifiable_addresses" (code) + +------------ 20191100000008000002 - selfservice_verification ------------ +CREATE INDEX "identity_verifiable_addresses_code_idx" ON "identity_verifiable_addresses" (code) + +------------ 20191100000008000003 - selfservice_verification ------------ +CREATE UNIQUE INDEX "identity_verifiable_addresses_status_via_uq_idx" ON "identity_verifiable_addresses" (via, value) + +------------ 20191100000008000004 - selfservice_verification ------------ +CREATE INDEX "identity_verifiable_addresses_status_via_idx" ON "identity_verifiable_addresses" (via, value) + +------------ 20191100000008000005 - selfservice_verification ------------ +CREATE TABLE "selfservice_verification_requests" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" DATETIME NOT NULL, +"form" TEXT NOT NULL, +"via" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"success" bool NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +); + +------------ 20191100000010000000 - errors ------------ +CREATE TABLE "_selfservice_errors_tmp" ( +"id" TEXT PRIMARY KEY, +"errors" TEXT NOT NULL, +"seen_at" DATETIME, +"was_seen" bool NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"csrf_token" TEXT NOT NULL DEFAULT '' +) + +------------ 20191100000010000001 - errors ------------ +INSERT INTO "_selfservice_errors_tmp" (id, errors, seen_at, was_seen, created_at, updated_at, csrf_token) SELECT id, errors, seen_at, was_seen, created_at, updated_at, csrf_token FROM "selfservice_errors" + +------------ 20191100000010000002 - errors ------------ +DROP TABLE "selfservice_errors" + +------------ 20191100000010000003 - errors ------------ +ALTER TABLE "_selfservice_errors_tmp" RENAME TO "selfservice_errors"; + +------------ 20191100000010000004 - errors ------------ + + +------------ 20191100000011000000 - courier_body_type ------------ +CREATE TABLE "_courier_messages_tmp" ( +"id" TEXT PRIMARY KEY, +"type" INTEGER NOT NULL, +"status" INTEGER NOT NULL, +"body" TEXT NOT NULL, +"subject" TEXT NOT NULL, +"recipient" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +) + +------------ 20191100000011000001 - courier_body_type ------------ +INSERT INTO "_courier_messages_tmp" (id, type, status, body, subject, recipient, created_at, updated_at) SELECT id, type, status, body, subject, recipient, created_at, updated_at FROM "courier_messages" + +------------ 20191100000011000002 - courier_body_type ------------ +DROP TABLE "courier_messages" + +------------ 20191100000011000003 - courier_body_type ------------ +ALTER TABLE "_courier_messages_tmp" RENAME TO "courier_messages"; + +------------ 20191100000012000000 - login_request_forced ------------ +ALTER TABLE "selfservice_login_requests" ADD COLUMN "forced" bool NOT NULL DEFAULT 'false'; + +------------ 20191100000012000001 - login_request_forced ------------ + + +------------ 20191100000012000002 - login_request_forced ------------ + + +------------ 20191100000012000003 - login_request_forced ------------ + + +------------ 20200317160354000000 - create_profile_request_forms ------------ +CREATE TABLE "selfservice_profile_management_request_methods" ( +"id" TEXT PRIMARY KEY, +"method" TEXT NOT NULL, +"selfservice_profile_management_request_id" char(36) NOT NULL, +"config" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +) + +------------ 20200317160354000001 - create_profile_request_forms ------------ +ALTER TABLE "selfservice_profile_management_requests" ADD COLUMN "active_method" TEXT + +------------ 20200317160354000002 - create_profile_request_forms ------------ +INSERT INTO selfservice_profile_management_request_methods (id, method, selfservice_profile_management_request_id, config) SELECT id, 'traits', id, form FROM selfservice_profile_management_requests + +------------ 20200317160354000003 - create_profile_request_forms ------------ +CREATE TABLE "_selfservice_profile_management_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"update_successful" bool NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"active_method" TEXT, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +) + +------------ 20200317160354000004 - create_profile_request_forms ------------ +INSERT INTO "_selfservice_profile_management_requests_tmp" (id, request_url, issued_at, expires_at, update_successful, identity_id, created_at, updated_at, active_method) SELECT id, request_url, issued_at, expires_at, update_successful, identity_id, created_at, updated_at, active_method FROM "selfservice_profile_management_requests" + +------------ 20200317160354000005 - create_profile_request_forms ------------ + +DROP TABLE "selfservice_profile_management_requests" + +------------ 20200317160354000006 - create_profile_request_forms ------------ +ALTER TABLE "_selfservice_profile_management_requests_tmp" RENAME TO "selfservice_profile_management_requests"; + +------------ 20200401183443000000 - continuity_containers ------------ +CREATE TABLE "continuity_containers" ( +"id" TEXT PRIMARY KEY, +"identity_id" char(36), +"name" TEXT NOT NULL, +"payload" TEXT, +"expires_at" DATETIME NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON DELETE cascade +); + +------------ 20200402142539000000 - rename_profile_flows ------------ +ALTER TABLE "selfservice_profile_management_request_methods" RENAME COLUMN "selfservice_profile_management_request_id" TO "selfservice_settings_request_id" + +------------ 20200402142539000001 - rename_profile_flows ------------ +ALTER TABLE "selfservice_profile_management_request_methods" RENAME TO "selfservice_settings_request_methods" + +------------ 20200402142539000002 - rename_profile_flows ------------ +ALTER TABLE "selfservice_profile_management_requests" RENAME TO "selfservice_settings_requests"; + +------------ 20200519101057000000 - create_recovery_addresses ------------ +CREATE TABLE "identity_recovery_addresses" ( +"id" TEXT PRIMARY KEY, +"via" TEXT NOT NULL, +"value" TEXT NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON DELETE cascade +) + +------------ 20200519101057000001 - create_recovery_addresses ------------ +CREATE UNIQUE INDEX "identity_recovery_addresses_status_via_uq_idx" ON "identity_recovery_addresses" (via, value) + +------------ 20200519101057000002 - create_recovery_addresses ------------ +CREATE INDEX "identity_recovery_addresses_status_via_idx" ON "identity_recovery_addresses" (via, value) + +------------ 20200519101057000003 - create_recovery_addresses ------------ +CREATE TABLE "selfservice_recovery_requests" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" DATETIME NOT NULL, +"messages" TEXT, +"active_method" TEXT, +"csrf_token" TEXT NOT NULL, +"state" TEXT NOT NULL, +"recovered_identity_id" char(36), +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (recovered_identity_id) REFERENCES identities (id) ON DELETE cascade +) + +------------ 20200519101057000004 - create_recovery_addresses ------------ +CREATE TABLE "selfservice_recovery_request_methods" ( +"id" TEXT PRIMARY KEY, +"method" TEXT NOT NULL, +"config" TEXT NOT NULL, +"selfservice_recovery_request_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (selfservice_recovery_request_id) REFERENCES selfservice_recovery_requests (id) ON DELETE cascade +) + +------------ 20200519101057000005 - create_recovery_addresses ------------ +CREATE TABLE "identity_recovery_tokens" ( +"id" TEXT PRIMARY KEY, +"token" TEXT NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" DATETIME, +"identity_recovery_address_id" char(36) NOT NULL, +"selfservice_recovery_request_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_recovery_address_id) REFERENCES identity_recovery_addresses (id) ON DELETE cascade, +FOREIGN KEY (selfservice_recovery_request_id) REFERENCES selfservice_recovery_requests (id) ON DELETE cascade +) + +------------ 20200519101057000006 - create_recovery_addresses ------------ +CREATE UNIQUE INDEX "identity_recovery_addresses_code_uq_idx" ON "identity_recovery_tokens" (token) + +------------ 20200519101057000007 - create_recovery_addresses ------------ +CREATE INDEX "identity_recovery_addresses_code_idx" ON "identity_recovery_tokens" (token); + +------------ 20200601101000000000 - create_messages ------------ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "messages" TEXT; + +------------ 20200601101000000001 - create_messages ------------ + + +------------ 20200601101000000002 - create_messages ------------ + + +------------ 20200601101000000003 - create_messages ------------ + + +------------ 20200605111551000000 - messages ------------ +ALTER TABLE "selfservice_verification_requests" ADD COLUMN "messages" TEXT + +------------ 20200605111551000001 - messages ------------ +ALTER TABLE "selfservice_login_requests" ADD COLUMN "messages" TEXT + +------------ 20200605111551000002 - messages ------------ +ALTER TABLE "selfservice_registration_requests" ADD COLUMN "messages" TEXT; + +------------ 20200605111551000003 - messages ------------ + + +------------ 20200605111551000004 - messages ------------ + + +------------ 20200605111551000005 - messages ------------ + + +------------ 20200605111551000006 - messages ------------ + + +------------ 20200605111551000007 - messages ------------ + + +------------ 20200605111551000008 - messages ------------ + + +------------ 20200605111551000009 - messages ------------ + + +------------ 20200605111551000010 - messages ------------ + + +------------ 20200605111551000011 - messages ------------ + + +------------ 20200607165100000000 - settings ------------ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "state" TEXT NOT NULL DEFAULT 'show_form' + +------------ 20200607165100000001 - settings ------------ +CREATE TABLE "_selfservice_settings_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"active_method" TEXT, +"messages" TEXT, +"state" TEXT NOT NULL DEFAULT 'show_form', +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +) + +------------ 20200607165100000002 - settings ------------ +INSERT INTO "_selfservice_settings_requests_tmp" (id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, active_method, messages, state) SELECT id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, active_method, messages, state FROM "selfservice_settings_requests" + +------------ 20200607165100000003 - settings ------------ + +DROP TABLE "selfservice_settings_requests" + +------------ 20200607165100000004 - settings ------------ +ALTER TABLE "_selfservice_settings_requests_tmp" RENAME TO "selfservice_settings_requests"; + +------------ 20200705105359000000 - rename_identities_schema ------------ +ALTER TABLE "identities" RENAME COLUMN "traits_schema_id" TO "schema_id"; + +------------ 20200810141652000000 - flow_type ------------ +ALTER TABLE "selfservice_login_requests" ADD COLUMN "type" TEXT NOT NULL DEFAULT 'browser' + +------------ 20200810141652000001 - flow_type ------------ +ALTER TABLE "selfservice_registration_requests" ADD COLUMN "type" TEXT NOT NULL DEFAULT 'browser' + +------------ 20200810141652000002 - flow_type ------------ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "type" TEXT NOT NULL DEFAULT 'browser' + +------------ 20200810141652000003 - flow_type ------------ +ALTER TABLE "selfservice_recovery_requests" ADD COLUMN "type" TEXT NOT NULL DEFAULT 'browser' + +------------ 20200810141652000004 - flow_type ------------ +ALTER TABLE "selfservice_verification_requests" ADD COLUMN "type" TEXT NOT NULL DEFAULT 'browser'; + +------------ 20200810141652000005 - flow_type ------------ + + +------------ 20200810141652000006 - flow_type ------------ + + +------------ 20200810141652000007 - flow_type ------------ + + +------------ 20200810141652000008 - flow_type ------------ + + +------------ 20200810141652000009 - flow_type ------------ + + +------------ 20200810141652000010 - flow_type ------------ + + +------------ 20200810141652000011 - flow_type ------------ + + +------------ 20200810141652000012 - flow_type ------------ + + +------------ 20200810141652000013 - flow_type ------------ + + +------------ 20200810141652000014 - flow_type ------------ + + +------------ 20200810141652000015 - flow_type ------------ + + +------------ 20200810141652000016 - flow_type ------------ + + +------------ 20200810141652000017 - flow_type ------------ + + +------------ 20200810141652000018 - flow_type ------------ + + +------------ 20200810141652000019 - flow_type ------------ + + +------------ 20200810161022000000 - flow_rename ------------ +ALTER TABLE "selfservice_login_request_methods" RENAME TO "selfservice_login_flow_methods" + +------------ 20200810161022000001 - flow_rename ------------ +ALTER TABLE "selfservice_login_requests" RENAME TO "selfservice_login_flows" + +------------ 20200810161022000002 - flow_rename ------------ +ALTER TABLE "selfservice_registration_request_methods" RENAME TO "selfservice_registration_flow_methods" + +------------ 20200810161022000003 - flow_rename ------------ +ALTER TABLE "selfservice_registration_requests" RENAME TO "selfservice_registration_flows" + +------------ 20200810161022000004 - flow_rename ------------ +ALTER TABLE "selfservice_settings_request_methods" RENAME TO "selfservice_settings_flow_methods" + +------------ 20200810161022000005 - flow_rename ------------ +ALTER TABLE "selfservice_settings_requests" RENAME TO "selfservice_settings_flows" + +------------ 20200810161022000006 - flow_rename ------------ +ALTER TABLE "selfservice_recovery_request_methods" RENAME TO "selfservice_recovery_flow_methods" + +------------ 20200810161022000007 - flow_rename ------------ +ALTER TABLE "selfservice_recovery_requests" RENAME TO "selfservice_recovery_flows" + +------------ 20200810161022000008 - flow_rename ------------ +ALTER TABLE "selfservice_verification_requests" RENAME TO "selfservice_verification_flows"; + +------------ 20200810162450000000 - flow_fields_rename ------------ +ALTER TABLE "selfservice_login_flow_methods" RENAME COLUMN "selfservice_login_request_id" TO "selfservice_login_flow_id" + +------------ 20200810162450000001 - flow_fields_rename ------------ +ALTER TABLE "selfservice_registration_flow_methods" RENAME COLUMN "selfservice_registration_request_id" TO "selfservice_registration_flow_id" + +------------ 20200810162450000002 - flow_fields_rename ------------ +ALTER TABLE "selfservice_recovery_flow_methods" RENAME COLUMN "selfservice_recovery_request_id" TO "selfservice_recovery_flow_id" + +------------ 20200810162450000003 - flow_fields_rename ------------ +ALTER TABLE "selfservice_settings_flow_methods" RENAME COLUMN "selfservice_settings_request_id" TO "selfservice_settings_flow_id"; + +------------ 20200812124254000000 - add_session_token ------------ +DELETE FROM sessions + +------------ 20200812124254000001 - add_session_token ------------ +ALTER TABLE "sessions" ADD COLUMN "token" TEXT + +------------ 20200812124254000002 - add_session_token ------------ +CREATE TABLE "_sessions_tmp" ( +"id" TEXT PRIMARY KEY, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"authenticated_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"token" TEXT, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +) + +------------ 20200812124254000003 - add_session_token ------------ +INSERT INTO "_sessions_tmp" (id, issued_at, expires_at, authenticated_at, identity_id, created_at, updated_at, token) SELECT id, issued_at, expires_at, authenticated_at, identity_id, created_at, updated_at, token FROM "sessions" + +------------ 20200812124254000004 - add_session_token ------------ +DROP TABLE "sessions" + +------------ 20200812124254000005 - add_session_token ------------ +ALTER TABLE "_sessions_tmp" RENAME TO "sessions" + +------------ 20200812124254000006 - add_session_token ------------ +CREATE UNIQUE INDEX "sessions_token_uq_idx" ON "sessions" (token) + +------------ 20200812124254000007 - add_session_token ------------ +CREATE INDEX "sessions_token_idx" ON "sessions" (token); + +------------ 20200812160551000000 - add_session_revoke ------------ +ALTER TABLE "sessions" ADD COLUMN "active" NUMERIC DEFAULT 'false'; + +------------ 20200812160551000001 - add_session_revoke ------------ + + +------------ 20200812160551000002 - add_session_revoke ------------ + + +------------ 20200812160551000003 - add_session_revoke ------------ + + +------------ 20200812160551000004 - add_session_revoke ------------ + + +------------ 20200812160551000005 - add_session_revoke ------------ + + +------------ 20200812160551000006 - add_session_revoke ------------ + + +------------ 20200812160551000007 - add_session_revoke ------------ + + +------------ 20200830121710000000 - update_recovery_token ------------ +ALTER TABLE "identity_recovery_tokens" RENAME COLUMN "selfservice_recovery_request_id" TO "selfservice_recovery_flow_id"; + +------------ 20200830130642000000 - add_verification_methods ------------ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "state" TEXT NOT NULL DEFAULT 'show_form'; + +------------ 20200830130642000001 - add_verification_methods ------------ + + +------------ 20200830130642000002 - add_verification_methods ------------ + + +------------ 20200830130642000003 - add_verification_methods ------------ + + +------------ 20200830130642000004 - add_verification_methods ------------ + + +------------ 20200830130642000005 - add_verification_methods ------------ + + +------------ 20200830130642000006 - add_verification_methods ------------ + + +------------ 20200830130642000007 - add_verification_methods ------------ + + +------------ 20200830130642000008 - add_verification_methods ------------ + + +------------ 20200830130642000009 - add_verification_methods ------------ + + +------------ 20200830130642000010 - add_verification_methods ------------ + + +------------ 20200830130643000000 - add_verification_methods ------------ +UPDATE selfservice_verification_flows SET state='passed_challenge' WHERE success IS TRUE; + +------------ 20200830130644000000 - add_verification_methods ------------ +CREATE TABLE "selfservice_verification_flow_methods" ( +"id" TEXT PRIMARY KEY, +"method" TEXT NOT NULL, +"selfservice_verification_flow_id" char(36) NOT NULL, +"config" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +) + +------------ 20200830130644000001 - add_verification_methods ------------ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "active_method" TEXT; + +------------ 20200830130645000000 - add_verification_methods ------------ +INSERT INTO selfservice_verification_flow_methods (id, method, selfservice_verification_flow_id, config, created_at, updated_at) SELECT id, 'link', id, form, created_at, updated_at FROM selfservice_verification_flows; + +------------ 20200830130646000000 - add_verification_methods ------------ +CREATE TABLE "_selfservice_verification_flows_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"via" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"success" bool NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"messages" TEXT, +"type" TEXT NOT NULL DEFAULT 'browser', +"state" TEXT NOT NULL DEFAULT 'show_form', +"active_method" TEXT +) + +------------ 20200830130646000001 - add_verification_methods ------------ +INSERT INTO "_selfservice_verification_flows_tmp" (id, request_url, issued_at, expires_at, via, csrf_token, success, created_at, updated_at, messages, type, state, active_method) SELECT id, request_url, issued_at, expires_at, via, csrf_token, success, created_at, updated_at, messages, type, state, active_method FROM "selfservice_verification_flows" + +------------ 20200830130646000002 - add_verification_methods ------------ + +DROP TABLE "selfservice_verification_flows" + +------------ 20200830130646000003 - add_verification_methods ------------ +ALTER TABLE "_selfservice_verification_flows_tmp" RENAME TO "selfservice_verification_flows" + +------------ 20200830130646000004 - add_verification_methods ------------ +CREATE TABLE "_selfservice_verification_flows_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"csrf_token" TEXT NOT NULL, +"success" bool NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"messages" TEXT, +"type" TEXT NOT NULL DEFAULT 'browser', +"state" TEXT NOT NULL DEFAULT 'show_form', +"active_method" TEXT +) + +------------ 20200830130646000005 - add_verification_methods ------------ +INSERT INTO "_selfservice_verification_flows_tmp" (id, request_url, issued_at, expires_at, csrf_token, success, created_at, updated_at, messages, type, state, active_method) SELECT id, request_url, issued_at, expires_at, csrf_token, success, created_at, updated_at, messages, type, state, active_method FROM "selfservice_verification_flows" + +------------ 20200830130646000006 - add_verification_methods ------------ + +DROP TABLE "selfservice_verification_flows" + +------------ 20200830130646000007 - add_verification_methods ------------ +ALTER TABLE "_selfservice_verification_flows_tmp" RENAME TO "selfservice_verification_flows" + +------------ 20200830130646000008 - add_verification_methods ------------ +CREATE TABLE "_selfservice_verification_flows_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"messages" TEXT, +"type" TEXT NOT NULL DEFAULT 'browser', +"state" TEXT NOT NULL DEFAULT 'show_form', +"active_method" TEXT +) + +------------ 20200830130646000009 - add_verification_methods ------------ +INSERT INTO "_selfservice_verification_flows_tmp" (id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, messages, type, state, active_method) SELECT id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, messages, type, state, active_method FROM "selfservice_verification_flows" + +------------ 20200830130646000010 - add_verification_methods ------------ + +DROP TABLE "selfservice_verification_flows" + +------------ 20200830130646000011 - add_verification_methods ------------ +ALTER TABLE "_selfservice_verification_flows_tmp" RENAME TO "selfservice_verification_flows"; + +------------ 20200830154602000000 - add_verification_token ------------ +CREATE TABLE "identity_verification_tokens" ( +"id" TEXT PRIMARY KEY, +"token" TEXT NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" DATETIME, +"expires_at" DATETIME NOT NULL, +"issued_at" DATETIME NOT NULL, +"identity_verifiable_address_id" char(36) NOT NULL, +"selfservice_verification_flow_id" char(36), +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_verifiable_address_id) REFERENCES identity_verifiable_addresses (id) ON DELETE cascade, +FOREIGN KEY (selfservice_verification_flow_id) REFERENCES selfservice_verification_flows (id) ON DELETE cascade +) + +------------ 20200830154602000001 - add_verification_token ------------ +CREATE UNIQUE INDEX "identity_verification_tokens_token_uq_idx" ON "identity_verification_tokens" (token) + +------------ 20200830154602000002 - add_verification_token ------------ +CREATE INDEX "identity_verification_tokens_token_idx" ON "identity_verification_tokens" (token) + +------------ 20200830154602000003 - add_verification_token ------------ +CREATE INDEX "identity_verification_tokens_verifiable_address_id_idx" ON "identity_verification_tokens" (identity_verifiable_address_id) + +------------ 20200830154602000004 - add_verification_token ------------ +CREATE INDEX "identity_verification_tokens_verification_flow_id_idx" ON "identity_verification_tokens" (selfservice_verification_flow_id); + +------------ 20200830172221000000 - recovery_token_expires ------------ +ALTER TABLE "identity_recovery_tokens" ADD COLUMN "expires_at" DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00' + +------------ 20200830172221000001 - recovery_token_expires ------------ +ALTER TABLE "identity_recovery_tokens" ADD COLUMN "issued_at" DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00' + +------------ 20200830172221000002 - recovery_token_expires ------------ +DROP INDEX IF EXISTS "identity_recovery_addresses_code_idx" + +------------ 20200830172221000003 - recovery_token_expires ------------ +DROP INDEX IF EXISTS "identity_recovery_addresses_code_uq_idx" + +------------ 20200830172221000004 - recovery_token_expires ------------ +CREATE TABLE "_identity_recovery_tokens_tmp" ( +"id" TEXT PRIMARY KEY, +"token" TEXT NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" DATETIME, +"identity_recovery_address_id" char(36) NOT NULL, +"selfservice_recovery_flow_id" char(36), +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"expires_at" DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00', +"issued_at" DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00', +FOREIGN KEY (selfservice_recovery_flow_id) REFERENCES selfservice_recovery_flows (id) ON UPDATE NO ACTION ON DELETE CASCADE, +FOREIGN KEY (identity_recovery_address_id) REFERENCES identity_recovery_addresses (id) ON UPDATE NO ACTION ON DELETE CASCADE +) + +------------ 20200830172221000005 - recovery_token_expires ------------ +CREATE INDEX "identity_recovery_addresses_code_idx" ON "_identity_recovery_tokens_tmp" (token) + +------------ 20200830172221000006 - recovery_token_expires ------------ +CREATE UNIQUE INDEX "identity_recovery_addresses_code_uq_idx" ON "_identity_recovery_tokens_tmp" (token) + +------------ 20200830172221000007 - recovery_token_expires ------------ +INSERT INTO "_identity_recovery_tokens_tmp" (id, token, used, used_at, identity_recovery_address_id, selfservice_recovery_flow_id, created_at, updated_at, expires_at, issued_at) SELECT id, token, used, used_at, identity_recovery_address_id, selfservice_recovery_flow_id, created_at, updated_at, expires_at, issued_at FROM "identity_recovery_tokens" + +------------ 20200830172221000008 - recovery_token_expires ------------ +DROP TABLE "identity_recovery_tokens" + +------------ 20200830172221000009 - recovery_token_expires ------------ +ALTER TABLE "_identity_recovery_tokens_tmp" RENAME TO "identity_recovery_tokens"; + +------------ 20200830172221000010 - recovery_token_expires ------------ + + +------------ 20200830172221000011 - recovery_token_expires ------------ + + +------------ 20200830172221000012 - recovery_token_expires ------------ + + +------------ 20200830172221000013 - recovery_token_expires ------------ + + +------------ 20200830172221000014 - recovery_token_expires ------------ + + +------------ 20200830172221000015 - recovery_token_expires ------------ + + +------------ 20200830172221000016 - recovery_token_expires ------------ + + +------------ 20200830172221000017 - recovery_token_expires ------------ + + +------------ 20200830172221000018 - recovery_token_expires ------------ + + +------------ 20200830172221000019 - recovery_token_expires ------------ + + +------------ 20200830172221000020 - recovery_token_expires ------------ + + +------------ 20200830172221000021 - recovery_token_expires ------------ + + +------------ 20200830172221000022 - recovery_token_expires ------------ + + +------------ 20200830172221000023 - recovery_token_expires ------------ + + +------------ 20200830172221000024 - recovery_token_expires ------------ + + +------------ 20200831110752000000 - identity_verifiable_address_remove_code ------------ +DROP INDEX IF EXISTS "identity_verifiable_addresses_code_uq_idx" + +------------ 20200831110752000001 - identity_verifiable_address_remove_code ------------ +DROP INDEX IF EXISTS "identity_verifiable_addresses_code_idx" + +------------ 20200831110752000002 - identity_verifiable_address_remove_code ------------ +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_idx" + +------------ 20200831110752000003 - identity_verifiable_address_remove_code ------------ +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_uq_idx" + +------------ 20200831110752000004 - identity_verifiable_address_remove_code ------------ +CREATE TABLE "_identity_verifiable_addresses_tmp" ( +"id" TEXT PRIMARY KEY, +"status" TEXT NOT NULL, +"via" TEXT NOT NULL, +"verified" bool NOT NULL, +"value" TEXT NOT NULL, +"verified_at" DATETIME, +"expires_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +) + +------------ 20200831110752000005 - identity_verifiable_address_remove_code ------------ +CREATE INDEX "identity_verifiable_addresses_status_via_idx" ON "_identity_verifiable_addresses_tmp" (via, value) + +------------ 20200831110752000006 - identity_verifiable_address_remove_code ------------ +CREATE UNIQUE INDEX "identity_verifiable_addresses_status_via_uq_idx" ON "_identity_verifiable_addresses_tmp" (via, value) + +------------ 20200831110752000007 - identity_verifiable_address_remove_code ------------ +INSERT INTO "_identity_verifiable_addresses_tmp" (id, status, via, verified, value, verified_at, expires_at, identity_id, created_at, updated_at) SELECT id, status, via, verified, value, verified_at, expires_at, identity_id, created_at, updated_at FROM "identity_verifiable_addresses" + +------------ 20200831110752000008 - identity_verifiable_address_remove_code ------------ + +DROP TABLE "identity_verifiable_addresses" + +------------ 20200831110752000009 - identity_verifiable_address_remove_code ------------ +ALTER TABLE "_identity_verifiable_addresses_tmp" RENAME TO "identity_verifiable_addresses" + +------------ 20200831110752000010 - identity_verifiable_address_remove_code ------------ +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_idx" + +------------ 20200831110752000011 - identity_verifiable_address_remove_code ------------ +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_uq_idx" + +------------ 20200831110752000012 - identity_verifiable_address_remove_code ------------ +CREATE TABLE "_identity_verifiable_addresses_tmp" ( +"id" TEXT PRIMARY KEY, +"status" TEXT NOT NULL, +"via" TEXT NOT NULL, +"verified" bool NOT NULL, +"value" TEXT NOT NULL, +"verified_at" DATETIME, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +) + +------------ 20200831110752000013 - identity_verifiable_address_remove_code ------------ +CREATE INDEX "identity_verifiable_addresses_status_via_idx" ON "_identity_verifiable_addresses_tmp" (via, value) + +------------ 20200831110752000014 - identity_verifiable_address_remove_code ------------ +CREATE UNIQUE INDEX "identity_verifiable_addresses_status_via_uq_idx" ON "_identity_verifiable_addresses_tmp" (via, value) + +------------ 20200831110752000015 - identity_verifiable_address_remove_code ------------ +INSERT INTO "_identity_verifiable_addresses_tmp" (id, status, via, verified, value, verified_at, identity_id, created_at, updated_at) SELECT id, status, via, verified, value, verified_at, identity_id, created_at, updated_at FROM "identity_verifiable_addresses" + +------------ 20200831110752000016 - identity_verifiable_address_remove_code ------------ + +DROP TABLE "identity_verifiable_addresses" + +------------ 20200831110752000017 - identity_verifiable_address_remove_code ------------ +ALTER TABLE "_identity_verifiable_addresses_tmp" RENAME TO "identity_verifiable_addresses"; + +------------ 20200831110752000018 - identity_verifiable_address_remove_code ------------ + + +------------ 20200831110752000019 - identity_verifiable_address_remove_code ------------ + + +------------ 20200831110752000020 - identity_verifiable_address_remove_code ------------ + + +------------ 20200831110752000021 - identity_verifiable_address_remove_code ------------ + + +------------ 20201201161451000000 - credential_types_values ------------ +INSERT INTO identity_credential_types (id, name) SELECT '78c1b41d-8341-4507-aa60-aff1d4369670', 'password' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'password') + +------------ 20201201161451000001 - credential_types_values ------------ +INSERT INTO identity_credential_types (id, name) SELECT '6fa5e2e0-bfce-4631-b62b-cf2b0252b289', 'oidc' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'oidc'); + +------------ SUCCESS ------------ +Successfully applied migrations! + +stderr: diff --git a/oryx/popx/.snapshots/TestMigrateSQLUp-status_migrated.txt b/oryx/popx/.snapshots/TestMigrateSQLUp-status_migrated.txt new file mode 100644 index 00000000000..9ca270525aa --- /dev/null +++ b/oryx/popx/.snapshots/TestMigrateSQLUp-status_migrated.txt @@ -0,0 +1,212 @@ +stdout: Version Name Status +20191100000001000000 identities Applied +20191100000001000001 identities Applied +20191100000001000002 identities Applied +20191100000001000003 identities Applied +20191100000001000004 identities Applied +20191100000001000005 identities Applied +20191100000002000000 requests Applied +20191100000002000001 requests Applied +20191100000002000002 requests Applied +20191100000002000003 requests Applied +20191100000002000004 requests Applied +20191100000003000000 sessions Applied +20191100000004000000 errors Applied +20191100000006000000 courier Applied +20191100000007000000 errors Applied +20191100000007000001 errors Applied +20191100000007000002 errors Applied +20191100000007000003 errors Applied +20191100000008000000 selfservice_verification Applied +20191100000008000001 selfservice_verification Applied +20191100000008000002 selfservice_verification Applied +20191100000008000003 selfservice_verification Applied +20191100000008000004 selfservice_verification Applied +20191100000008000005 selfservice_verification Applied +20191100000010000000 errors Applied +20191100000010000001 errors Applied +20191100000010000002 errors Applied +20191100000010000003 errors Applied +20191100000010000004 errors Applied +20191100000011000000 courier_body_type Applied +20191100000011000001 courier_body_type Applied +20191100000011000002 courier_body_type Applied +20191100000011000003 courier_body_type Applied +20191100000012000000 login_request_forced Applied +20191100000012000001 login_request_forced Applied +20191100000012000002 login_request_forced Applied +20191100000012000003 login_request_forced Applied +20200317160354000000 create_profile_request_forms Applied +20200317160354000001 create_profile_request_forms Applied +20200317160354000002 create_profile_request_forms Applied +20200317160354000003 create_profile_request_forms Applied +20200317160354000004 create_profile_request_forms Applied +20200317160354000005 create_profile_request_forms Applied +20200317160354000006 create_profile_request_forms Applied +20200401183443000000 continuity_containers Applied +20200402142539000000 rename_profile_flows Applied +20200402142539000001 rename_profile_flows Applied +20200402142539000002 rename_profile_flows Applied +20200519101057000000 create_recovery_addresses Applied +20200519101057000001 create_recovery_addresses Applied +20200519101057000002 create_recovery_addresses Applied +20200519101057000003 create_recovery_addresses Applied +20200519101057000004 create_recovery_addresses Applied +20200519101057000005 create_recovery_addresses Applied +20200519101057000006 create_recovery_addresses Applied +20200519101057000007 create_recovery_addresses Applied +20200601101000000000 create_messages Applied +20200601101000000001 create_messages Applied +20200601101000000002 create_messages Applied +20200601101000000003 create_messages Applied +20200605111551000000 messages Applied +20200605111551000001 messages Applied +20200605111551000002 messages Applied +20200605111551000003 messages Applied +20200605111551000004 messages Applied +20200605111551000005 messages Applied +20200605111551000006 messages Applied +20200605111551000007 messages Applied +20200605111551000008 messages Applied +20200605111551000009 messages Applied +20200605111551000010 messages Applied +20200605111551000011 messages Applied +20200607165100000000 settings Applied +20200607165100000001 settings Applied +20200607165100000002 settings Applied +20200607165100000003 settings Applied +20200607165100000004 settings Applied +20200705105359000000 rename_identities_schema Applied +20200810141652000000 flow_type Applied +20200810141652000001 flow_type Applied +20200810141652000002 flow_type Applied +20200810141652000003 flow_type Applied +20200810141652000004 flow_type Applied +20200810141652000005 flow_type Applied +20200810141652000006 flow_type Applied +20200810141652000007 flow_type Applied +20200810141652000008 flow_type Applied +20200810141652000009 flow_type Applied +20200810141652000010 flow_type Applied +20200810141652000011 flow_type Applied +20200810141652000012 flow_type Applied +20200810141652000013 flow_type Applied +20200810141652000014 flow_type Applied +20200810141652000015 flow_type Applied +20200810141652000016 flow_type Applied +20200810141652000017 flow_type Applied +20200810141652000018 flow_type Applied +20200810141652000019 flow_type Applied +20200810161022000000 flow_rename Applied +20200810161022000001 flow_rename Applied +20200810161022000002 flow_rename Applied +20200810161022000003 flow_rename Applied +20200810161022000004 flow_rename Applied +20200810161022000005 flow_rename Applied +20200810161022000006 flow_rename Applied +20200810161022000007 flow_rename Applied +20200810161022000008 flow_rename Applied +20200810162450000000 flow_fields_rename Applied +20200810162450000001 flow_fields_rename Applied +20200810162450000002 flow_fields_rename Applied +20200810162450000003 flow_fields_rename Applied +20200812124254000000 add_session_token Applied +20200812124254000001 add_session_token Applied +20200812124254000002 add_session_token Applied +20200812124254000003 add_session_token Applied +20200812124254000004 add_session_token Applied +20200812124254000005 add_session_token Applied +20200812124254000006 add_session_token Applied +20200812124254000007 add_session_token Applied +20200812160551000000 add_session_revoke Applied +20200812160551000001 add_session_revoke Applied +20200812160551000002 add_session_revoke Applied +20200812160551000003 add_session_revoke Applied +20200812160551000004 add_session_revoke Applied +20200812160551000005 add_session_revoke Applied +20200812160551000006 add_session_revoke Applied +20200812160551000007 add_session_revoke Applied +20200830121710000000 update_recovery_token Applied +20200830130642000000 add_verification_methods Applied +20200830130642000001 add_verification_methods Applied +20200830130642000002 add_verification_methods Applied +20200830130642000003 add_verification_methods Applied +20200830130642000004 add_verification_methods Applied +20200830130642000005 add_verification_methods Applied +20200830130642000006 add_verification_methods Applied +20200830130642000007 add_verification_methods Applied +20200830130642000008 add_verification_methods Applied +20200830130642000009 add_verification_methods Applied +20200830130642000010 add_verification_methods Applied +20200830130643000000 add_verification_methods Applied +20200830130644000000 add_verification_methods Applied +20200830130644000001 add_verification_methods Applied +20200830130645000000 add_verification_methods Applied +20200830130646000000 add_verification_methods Applied +20200830130646000001 add_verification_methods Applied +20200830130646000002 add_verification_methods Applied +20200830130646000003 add_verification_methods Applied +20200830130646000004 add_verification_methods Applied +20200830130646000005 add_verification_methods Applied +20200830130646000006 add_verification_methods Applied +20200830130646000007 add_verification_methods Applied +20200830130646000008 add_verification_methods Applied +20200830130646000009 add_verification_methods Applied +20200830130646000010 add_verification_methods Applied +20200830130646000011 add_verification_methods Applied +20200830154602000000 add_verification_token Applied +20200830154602000001 add_verification_token Applied +20200830154602000002 add_verification_token Applied +20200830154602000003 add_verification_token Applied +20200830154602000004 add_verification_token Applied +20200830172221000000 recovery_token_expires Applied +20200830172221000001 recovery_token_expires Applied +20200830172221000002 recovery_token_expires Applied +20200830172221000003 recovery_token_expires Applied +20200830172221000004 recovery_token_expires Applied +20200830172221000005 recovery_token_expires Applied +20200830172221000006 recovery_token_expires Applied +20200830172221000007 recovery_token_expires Applied +20200830172221000008 recovery_token_expires Applied +20200830172221000009 recovery_token_expires Applied +20200830172221000010 recovery_token_expires Applied +20200830172221000011 recovery_token_expires Applied +20200830172221000012 recovery_token_expires Applied +20200830172221000013 recovery_token_expires Applied +20200830172221000014 recovery_token_expires Applied +20200830172221000015 recovery_token_expires Applied +20200830172221000016 recovery_token_expires Applied +20200830172221000017 recovery_token_expires Applied +20200830172221000018 recovery_token_expires Applied +20200830172221000019 recovery_token_expires Applied +20200830172221000020 recovery_token_expires Applied +20200830172221000021 recovery_token_expires Applied +20200830172221000022 recovery_token_expires Applied +20200830172221000023 recovery_token_expires Applied +20200830172221000024 recovery_token_expires Applied +20200831110752000000 identity_verifiable_address_remove_code Applied +20200831110752000001 identity_verifiable_address_remove_code Applied +20200831110752000002 identity_verifiable_address_remove_code Applied +20200831110752000003 identity_verifiable_address_remove_code Applied +20200831110752000004 identity_verifiable_address_remove_code Applied +20200831110752000005 identity_verifiable_address_remove_code Applied +20200831110752000006 identity_verifiable_address_remove_code Applied +20200831110752000007 identity_verifiable_address_remove_code Applied +20200831110752000008 identity_verifiable_address_remove_code Applied +20200831110752000009 identity_verifiable_address_remove_code Applied +20200831110752000010 identity_verifiable_address_remove_code Applied +20200831110752000011 identity_verifiable_address_remove_code Applied +20200831110752000012 identity_verifiable_address_remove_code Applied +20200831110752000013 identity_verifiable_address_remove_code Applied +20200831110752000014 identity_verifiable_address_remove_code Applied +20200831110752000015 identity_verifiable_address_remove_code Applied +20200831110752000016 identity_verifiable_address_remove_code Applied +20200831110752000017 identity_verifiable_address_remove_code Applied +20200831110752000018 identity_verifiable_address_remove_code Applied +20200831110752000019 identity_verifiable_address_remove_code Applied +20200831110752000020 identity_verifiable_address_remove_code Applied +20200831110752000021 identity_verifiable_address_remove_code Applied +20201201161451000000 credential_types_values Applied +20201201161451000001 credential_types_values Applied + +stderr: diff --git a/oryx/popx/.snapshots/TestMigrateSQLUp-status_pre.txt b/oryx/popx/.snapshots/TestMigrateSQLUp-status_pre.txt new file mode 100644 index 00000000000..1f2f2fc119b --- /dev/null +++ b/oryx/popx/.snapshots/TestMigrateSQLUp-status_pre.txt @@ -0,0 +1,212 @@ +stdout: Version Name Status +20191100000001000000 identities Pending +20191100000001000001 identities Pending +20191100000001000002 identities Pending +20191100000001000003 identities Pending +20191100000001000004 identities Pending +20191100000001000005 identities Pending +20191100000002000000 requests Pending +20191100000002000001 requests Pending +20191100000002000002 requests Pending +20191100000002000003 requests Pending +20191100000002000004 requests Pending +20191100000003000000 sessions Pending +20191100000004000000 errors Pending +20191100000006000000 courier Pending +20191100000007000000 errors Pending +20191100000007000001 errors Pending +20191100000007000002 errors Pending +20191100000007000003 errors Pending +20191100000008000000 selfservice_verification Pending +20191100000008000001 selfservice_verification Pending +20191100000008000002 selfservice_verification Pending +20191100000008000003 selfservice_verification Pending +20191100000008000004 selfservice_verification Pending +20191100000008000005 selfservice_verification Pending +20191100000010000000 errors Pending +20191100000010000001 errors Pending +20191100000010000002 errors Pending +20191100000010000003 errors Pending +20191100000010000004 errors Pending +20191100000011000000 courier_body_type Pending +20191100000011000001 courier_body_type Pending +20191100000011000002 courier_body_type Pending +20191100000011000003 courier_body_type Pending +20191100000012000000 login_request_forced Pending +20191100000012000001 login_request_forced Pending +20191100000012000002 login_request_forced Pending +20191100000012000003 login_request_forced Pending +20200317160354000000 create_profile_request_forms Pending +20200317160354000001 create_profile_request_forms Pending +20200317160354000002 create_profile_request_forms Pending +20200317160354000003 create_profile_request_forms Pending +20200317160354000004 create_profile_request_forms Pending +20200317160354000005 create_profile_request_forms Pending +20200317160354000006 create_profile_request_forms Pending +20200401183443000000 continuity_containers Pending +20200402142539000000 rename_profile_flows Pending +20200402142539000001 rename_profile_flows Pending +20200402142539000002 rename_profile_flows Pending +20200519101057000000 create_recovery_addresses Pending +20200519101057000001 create_recovery_addresses Pending +20200519101057000002 create_recovery_addresses Pending +20200519101057000003 create_recovery_addresses Pending +20200519101057000004 create_recovery_addresses Pending +20200519101057000005 create_recovery_addresses Pending +20200519101057000006 create_recovery_addresses Pending +20200519101057000007 create_recovery_addresses Pending +20200601101000000000 create_messages Pending +20200601101000000001 create_messages Pending +20200601101000000002 create_messages Pending +20200601101000000003 create_messages Pending +20200605111551000000 messages Pending +20200605111551000001 messages Pending +20200605111551000002 messages Pending +20200605111551000003 messages Pending +20200605111551000004 messages Pending +20200605111551000005 messages Pending +20200605111551000006 messages Pending +20200605111551000007 messages Pending +20200605111551000008 messages Pending +20200605111551000009 messages Pending +20200605111551000010 messages Pending +20200605111551000011 messages Pending +20200607165100000000 settings Pending +20200607165100000001 settings Pending +20200607165100000002 settings Pending +20200607165100000003 settings Pending +20200607165100000004 settings Pending +20200705105359000000 rename_identities_schema Pending +20200810141652000000 flow_type Pending +20200810141652000001 flow_type Pending +20200810141652000002 flow_type Pending +20200810141652000003 flow_type Pending +20200810141652000004 flow_type Pending +20200810141652000005 flow_type Pending +20200810141652000006 flow_type Pending +20200810141652000007 flow_type Pending +20200810141652000008 flow_type Pending +20200810141652000009 flow_type Pending +20200810141652000010 flow_type Pending +20200810141652000011 flow_type Pending +20200810141652000012 flow_type Pending +20200810141652000013 flow_type Pending +20200810141652000014 flow_type Pending +20200810141652000015 flow_type Pending +20200810141652000016 flow_type Pending +20200810141652000017 flow_type Pending +20200810141652000018 flow_type Pending +20200810141652000019 flow_type Pending +20200810161022000000 flow_rename Pending +20200810161022000001 flow_rename Pending +20200810161022000002 flow_rename Pending +20200810161022000003 flow_rename Pending +20200810161022000004 flow_rename Pending +20200810161022000005 flow_rename Pending +20200810161022000006 flow_rename Pending +20200810161022000007 flow_rename Pending +20200810161022000008 flow_rename Pending +20200810162450000000 flow_fields_rename Pending +20200810162450000001 flow_fields_rename Pending +20200810162450000002 flow_fields_rename Pending +20200810162450000003 flow_fields_rename Pending +20200812124254000000 add_session_token Pending +20200812124254000001 add_session_token Pending +20200812124254000002 add_session_token Pending +20200812124254000003 add_session_token Pending +20200812124254000004 add_session_token Pending +20200812124254000005 add_session_token Pending +20200812124254000006 add_session_token Pending +20200812124254000007 add_session_token Pending +20200812160551000000 add_session_revoke Pending +20200812160551000001 add_session_revoke Pending +20200812160551000002 add_session_revoke Pending +20200812160551000003 add_session_revoke Pending +20200812160551000004 add_session_revoke Pending +20200812160551000005 add_session_revoke Pending +20200812160551000006 add_session_revoke Pending +20200812160551000007 add_session_revoke Pending +20200830121710000000 update_recovery_token Pending +20200830130642000000 add_verification_methods Pending +20200830130642000001 add_verification_methods Pending +20200830130642000002 add_verification_methods Pending +20200830130642000003 add_verification_methods Pending +20200830130642000004 add_verification_methods Pending +20200830130642000005 add_verification_methods Pending +20200830130642000006 add_verification_methods Pending +20200830130642000007 add_verification_methods Pending +20200830130642000008 add_verification_methods Pending +20200830130642000009 add_verification_methods Pending +20200830130642000010 add_verification_methods Pending +20200830130643000000 add_verification_methods Pending +20200830130644000000 add_verification_methods Pending +20200830130644000001 add_verification_methods Pending +20200830130645000000 add_verification_methods Pending +20200830130646000000 add_verification_methods Pending +20200830130646000001 add_verification_methods Pending +20200830130646000002 add_verification_methods Pending +20200830130646000003 add_verification_methods Pending +20200830130646000004 add_verification_methods Pending +20200830130646000005 add_verification_methods Pending +20200830130646000006 add_verification_methods Pending +20200830130646000007 add_verification_methods Pending +20200830130646000008 add_verification_methods Pending +20200830130646000009 add_verification_methods Pending +20200830130646000010 add_verification_methods Pending +20200830130646000011 add_verification_methods Pending +20200830154602000000 add_verification_token Pending +20200830154602000001 add_verification_token Pending +20200830154602000002 add_verification_token Pending +20200830154602000003 add_verification_token Pending +20200830154602000004 add_verification_token Pending +20200830172221000000 recovery_token_expires Pending +20200830172221000001 recovery_token_expires Pending +20200830172221000002 recovery_token_expires Pending +20200830172221000003 recovery_token_expires Pending +20200830172221000004 recovery_token_expires Pending +20200830172221000005 recovery_token_expires Pending +20200830172221000006 recovery_token_expires Pending +20200830172221000007 recovery_token_expires Pending +20200830172221000008 recovery_token_expires Pending +20200830172221000009 recovery_token_expires Pending +20200830172221000010 recovery_token_expires Pending +20200830172221000011 recovery_token_expires Pending +20200830172221000012 recovery_token_expires Pending +20200830172221000013 recovery_token_expires Pending +20200830172221000014 recovery_token_expires Pending +20200830172221000015 recovery_token_expires Pending +20200830172221000016 recovery_token_expires Pending +20200830172221000017 recovery_token_expires Pending +20200830172221000018 recovery_token_expires Pending +20200830172221000019 recovery_token_expires Pending +20200830172221000020 recovery_token_expires Pending +20200830172221000021 recovery_token_expires Pending +20200830172221000022 recovery_token_expires Pending +20200830172221000023 recovery_token_expires Pending +20200830172221000024 recovery_token_expires Pending +20200831110752000000 identity_verifiable_address_remove_code Pending +20200831110752000001 identity_verifiable_address_remove_code Pending +20200831110752000002 identity_verifiable_address_remove_code Pending +20200831110752000003 identity_verifiable_address_remove_code Pending +20200831110752000004 identity_verifiable_address_remove_code Pending +20200831110752000005 identity_verifiable_address_remove_code Pending +20200831110752000006 identity_verifiable_address_remove_code Pending +20200831110752000007 identity_verifiable_address_remove_code Pending +20200831110752000008 identity_verifiable_address_remove_code Pending +20200831110752000009 identity_verifiable_address_remove_code Pending +20200831110752000010 identity_verifiable_address_remove_code Pending +20200831110752000011 identity_verifiable_address_remove_code Pending +20200831110752000012 identity_verifiable_address_remove_code Pending +20200831110752000013 identity_verifiable_address_remove_code Pending +20200831110752000014 identity_verifiable_address_remove_code Pending +20200831110752000015 identity_verifiable_address_remove_code Pending +20200831110752000016 identity_verifiable_address_remove_code Pending +20200831110752000017 identity_verifiable_address_remove_code Pending +20200831110752000018 identity_verifiable_address_remove_code Pending +20200831110752000019 identity_verifiable_address_remove_code Pending +20200831110752000020 identity_verifiable_address_remove_code Pending +20200831110752000021 identity_verifiable_address_remove_code Pending +20201201161451000000 credential_types_values Pending +20201201161451000001 credential_types_values Pending + +stderr: diff --git a/oryx/popx/.snapshots/TestMigrateSQLUp-status_two_steps_rolled_back.txt b/oryx/popx/.snapshots/TestMigrateSQLUp-status_two_steps_rolled_back.txt new file mode 100644 index 00000000000..04309e14ac5 --- /dev/null +++ b/oryx/popx/.snapshots/TestMigrateSQLUp-status_two_steps_rolled_back.txt @@ -0,0 +1,212 @@ +stdout: Version Name Status +20191100000001000000 identities Applied +20191100000001000001 identities Applied +20191100000001000002 identities Applied +20191100000001000003 identities Applied +20191100000001000004 identities Applied +20191100000001000005 identities Applied +20191100000002000000 requests Applied +20191100000002000001 requests Applied +20191100000002000002 requests Applied +20191100000002000003 requests Applied +20191100000002000004 requests Applied +20191100000003000000 sessions Applied +20191100000004000000 errors Applied +20191100000006000000 courier Applied +20191100000007000000 errors Applied +20191100000007000001 errors Applied +20191100000007000002 errors Applied +20191100000007000003 errors Applied +20191100000008000000 selfservice_verification Applied +20191100000008000001 selfservice_verification Applied +20191100000008000002 selfservice_verification Applied +20191100000008000003 selfservice_verification Applied +20191100000008000004 selfservice_verification Applied +20191100000008000005 selfservice_verification Applied +20191100000010000000 errors Applied +20191100000010000001 errors Applied +20191100000010000002 errors Applied +20191100000010000003 errors Applied +20191100000010000004 errors Applied +20191100000011000000 courier_body_type Applied +20191100000011000001 courier_body_type Applied +20191100000011000002 courier_body_type Applied +20191100000011000003 courier_body_type Applied +20191100000012000000 login_request_forced Applied +20191100000012000001 login_request_forced Applied +20191100000012000002 login_request_forced Applied +20191100000012000003 login_request_forced Applied +20200317160354000000 create_profile_request_forms Applied +20200317160354000001 create_profile_request_forms Applied +20200317160354000002 create_profile_request_forms Applied +20200317160354000003 create_profile_request_forms Applied +20200317160354000004 create_profile_request_forms Applied +20200317160354000005 create_profile_request_forms Applied +20200317160354000006 create_profile_request_forms Applied +20200401183443000000 continuity_containers Applied +20200402142539000000 rename_profile_flows Applied +20200402142539000001 rename_profile_flows Applied +20200402142539000002 rename_profile_flows Applied +20200519101057000000 create_recovery_addresses Applied +20200519101057000001 create_recovery_addresses Applied +20200519101057000002 create_recovery_addresses Applied +20200519101057000003 create_recovery_addresses Applied +20200519101057000004 create_recovery_addresses Applied +20200519101057000005 create_recovery_addresses Applied +20200519101057000006 create_recovery_addresses Applied +20200519101057000007 create_recovery_addresses Applied +20200601101000000000 create_messages Applied +20200601101000000001 create_messages Applied +20200601101000000002 create_messages Applied +20200601101000000003 create_messages Applied +20200605111551000000 messages Applied +20200605111551000001 messages Applied +20200605111551000002 messages Applied +20200605111551000003 messages Applied +20200605111551000004 messages Applied +20200605111551000005 messages Applied +20200605111551000006 messages Applied +20200605111551000007 messages Applied +20200605111551000008 messages Applied +20200605111551000009 messages Applied +20200605111551000010 messages Applied +20200605111551000011 messages Applied +20200607165100000000 settings Applied +20200607165100000001 settings Applied +20200607165100000002 settings Applied +20200607165100000003 settings Applied +20200607165100000004 settings Applied +20200705105359000000 rename_identities_schema Applied +20200810141652000000 flow_type Applied +20200810141652000001 flow_type Applied +20200810141652000002 flow_type Applied +20200810141652000003 flow_type Applied +20200810141652000004 flow_type Applied +20200810141652000005 flow_type Applied +20200810141652000006 flow_type Applied +20200810141652000007 flow_type Applied +20200810141652000008 flow_type Applied +20200810141652000009 flow_type Applied +20200810141652000010 flow_type Applied +20200810141652000011 flow_type Applied +20200810141652000012 flow_type Applied +20200810141652000013 flow_type Applied +20200810141652000014 flow_type Applied +20200810141652000015 flow_type Applied +20200810141652000016 flow_type Applied +20200810141652000017 flow_type Applied +20200810141652000018 flow_type Applied +20200810141652000019 flow_type Applied +20200810161022000000 flow_rename Applied +20200810161022000001 flow_rename Applied +20200810161022000002 flow_rename Applied +20200810161022000003 flow_rename Applied +20200810161022000004 flow_rename Applied +20200810161022000005 flow_rename Applied +20200810161022000006 flow_rename Applied +20200810161022000007 flow_rename Applied +20200810161022000008 flow_rename Applied +20200810162450000000 flow_fields_rename Applied +20200810162450000001 flow_fields_rename Applied +20200810162450000002 flow_fields_rename Applied +20200810162450000003 flow_fields_rename Applied +20200812124254000000 add_session_token Applied +20200812124254000001 add_session_token Applied +20200812124254000002 add_session_token Applied +20200812124254000003 add_session_token Applied +20200812124254000004 add_session_token Applied +20200812124254000005 add_session_token Applied +20200812124254000006 add_session_token Applied +20200812124254000007 add_session_token Applied +20200812160551000000 add_session_revoke Applied +20200812160551000001 add_session_revoke Applied +20200812160551000002 add_session_revoke Applied +20200812160551000003 add_session_revoke Applied +20200812160551000004 add_session_revoke Applied +20200812160551000005 add_session_revoke Applied +20200812160551000006 add_session_revoke Applied +20200812160551000007 add_session_revoke Applied +20200830121710000000 update_recovery_token Applied +20200830130642000000 add_verification_methods Applied +20200830130642000001 add_verification_methods Applied +20200830130642000002 add_verification_methods Applied +20200830130642000003 add_verification_methods Applied +20200830130642000004 add_verification_methods Applied +20200830130642000005 add_verification_methods Applied +20200830130642000006 add_verification_methods Applied +20200830130642000007 add_verification_methods Applied +20200830130642000008 add_verification_methods Applied +20200830130642000009 add_verification_methods Applied +20200830130642000010 add_verification_methods Applied +20200830130643000000 add_verification_methods Applied +20200830130644000000 add_verification_methods Applied +20200830130644000001 add_verification_methods Applied +20200830130645000000 add_verification_methods Applied +20200830130646000000 add_verification_methods Applied +20200830130646000001 add_verification_methods Applied +20200830130646000002 add_verification_methods Applied +20200830130646000003 add_verification_methods Applied +20200830130646000004 add_verification_methods Applied +20200830130646000005 add_verification_methods Applied +20200830130646000006 add_verification_methods Applied +20200830130646000007 add_verification_methods Applied +20200830130646000008 add_verification_methods Applied +20200830130646000009 add_verification_methods Applied +20200830130646000010 add_verification_methods Applied +20200830130646000011 add_verification_methods Applied +20200830154602000000 add_verification_token Applied +20200830154602000001 add_verification_token Applied +20200830154602000002 add_verification_token Applied +20200830154602000003 add_verification_token Applied +20200830154602000004 add_verification_token Applied +20200830172221000000 recovery_token_expires Applied +20200830172221000001 recovery_token_expires Applied +20200830172221000002 recovery_token_expires Applied +20200830172221000003 recovery_token_expires Applied +20200830172221000004 recovery_token_expires Applied +20200830172221000005 recovery_token_expires Applied +20200830172221000006 recovery_token_expires Applied +20200830172221000007 recovery_token_expires Applied +20200830172221000008 recovery_token_expires Applied +20200830172221000009 recovery_token_expires Applied +20200830172221000010 recovery_token_expires Applied +20200830172221000011 recovery_token_expires Applied +20200830172221000012 recovery_token_expires Applied +20200830172221000013 recovery_token_expires Applied +20200830172221000014 recovery_token_expires Applied +20200830172221000015 recovery_token_expires Applied +20200830172221000016 recovery_token_expires Applied +20200830172221000017 recovery_token_expires Applied +20200830172221000018 recovery_token_expires Applied +20200830172221000019 recovery_token_expires Applied +20200830172221000020 recovery_token_expires Applied +20200830172221000021 recovery_token_expires Applied +20200830172221000022 recovery_token_expires Applied +20200830172221000023 recovery_token_expires Applied +20200830172221000024 recovery_token_expires Applied +20200831110752000000 identity_verifiable_address_remove_code Applied +20200831110752000001 identity_verifiable_address_remove_code Applied +20200831110752000002 identity_verifiable_address_remove_code Applied +20200831110752000003 identity_verifiable_address_remove_code Applied +20200831110752000004 identity_verifiable_address_remove_code Applied +20200831110752000005 identity_verifiable_address_remove_code Applied +20200831110752000006 identity_verifiable_address_remove_code Applied +20200831110752000007 identity_verifiable_address_remove_code Applied +20200831110752000008 identity_verifiable_address_remove_code Applied +20200831110752000009 identity_verifiable_address_remove_code Applied +20200831110752000010 identity_verifiable_address_remove_code Applied +20200831110752000011 identity_verifiable_address_remove_code Applied +20200831110752000012 identity_verifiable_address_remove_code Applied +20200831110752000013 identity_verifiable_address_remove_code Applied +20200831110752000014 identity_verifiable_address_remove_code Applied +20200831110752000015 identity_verifiable_address_remove_code Applied +20200831110752000016 identity_verifiable_address_remove_code Applied +20200831110752000017 identity_verifiable_address_remove_code Applied +20200831110752000018 identity_verifiable_address_remove_code Applied +20200831110752000019 identity_verifiable_address_remove_code Applied +20200831110752000020 identity_verifiable_address_remove_code Pending +20200831110752000021 identity_verifiable_address_remove_code Pending +20201201161451000000 credential_types_values Pending +20201201161451000001 credential_types_values Pending + +stderr: diff --git a/oryx/popx/.snapshots/TestMigrateSQLUp-status_two_versions_rolled_back.txt b/oryx/popx/.snapshots/TestMigrateSQLUp-status_two_versions_rolled_back.txt new file mode 100644 index 00000000000..6961f8d7283 --- /dev/null +++ b/oryx/popx/.snapshots/TestMigrateSQLUp-status_two_versions_rolled_back.txt @@ -0,0 +1,212 @@ +stdout: Version Name Status +20191100000001000000 identities Applied +20191100000001000001 identities Applied +20191100000001000002 identities Applied +20191100000001000003 identities Applied +20191100000001000004 identities Applied +20191100000001000005 identities Applied +20191100000002000000 requests Applied +20191100000002000001 requests Applied +20191100000002000002 requests Applied +20191100000002000003 requests Applied +20191100000002000004 requests Applied +20191100000003000000 sessions Applied +20191100000004000000 errors Applied +20191100000006000000 courier Applied +20191100000007000000 errors Applied +20191100000007000001 errors Applied +20191100000007000002 errors Applied +20191100000007000003 errors Applied +20191100000008000000 selfservice_verification Applied +20191100000008000001 selfservice_verification Applied +20191100000008000002 selfservice_verification Applied +20191100000008000003 selfservice_verification Applied +20191100000008000004 selfservice_verification Applied +20191100000008000005 selfservice_verification Applied +20191100000010000000 errors Applied +20191100000010000001 errors Applied +20191100000010000002 errors Applied +20191100000010000003 errors Applied +20191100000010000004 errors Applied +20191100000011000000 courier_body_type Applied +20191100000011000001 courier_body_type Applied +20191100000011000002 courier_body_type Applied +20191100000011000003 courier_body_type Applied +20191100000012000000 login_request_forced Applied +20191100000012000001 login_request_forced Applied +20191100000012000002 login_request_forced Applied +20191100000012000003 login_request_forced Applied +20200317160354000000 create_profile_request_forms Applied +20200317160354000001 create_profile_request_forms Applied +20200317160354000002 create_profile_request_forms Applied +20200317160354000003 create_profile_request_forms Applied +20200317160354000004 create_profile_request_forms Applied +20200317160354000005 create_profile_request_forms Applied +20200317160354000006 create_profile_request_forms Applied +20200401183443000000 continuity_containers Applied +20200402142539000000 rename_profile_flows Applied +20200402142539000001 rename_profile_flows Applied +20200402142539000002 rename_profile_flows Applied +20200519101057000000 create_recovery_addresses Applied +20200519101057000001 create_recovery_addresses Applied +20200519101057000002 create_recovery_addresses Applied +20200519101057000003 create_recovery_addresses Applied +20200519101057000004 create_recovery_addresses Applied +20200519101057000005 create_recovery_addresses Applied +20200519101057000006 create_recovery_addresses Applied +20200519101057000007 create_recovery_addresses Applied +20200601101000000000 create_messages Applied +20200601101000000001 create_messages Applied +20200601101000000002 create_messages Applied +20200601101000000003 create_messages Applied +20200605111551000000 messages Applied +20200605111551000001 messages Applied +20200605111551000002 messages Applied +20200605111551000003 messages Applied +20200605111551000004 messages Applied +20200605111551000005 messages Applied +20200605111551000006 messages Applied +20200605111551000007 messages Applied +20200605111551000008 messages Applied +20200605111551000009 messages Applied +20200605111551000010 messages Applied +20200605111551000011 messages Applied +20200607165100000000 settings Applied +20200607165100000001 settings Applied +20200607165100000002 settings Applied +20200607165100000003 settings Applied +20200607165100000004 settings Applied +20200705105359000000 rename_identities_schema Applied +20200810141652000000 flow_type Applied +20200810141652000001 flow_type Applied +20200810141652000002 flow_type Applied +20200810141652000003 flow_type Applied +20200810141652000004 flow_type Applied +20200810141652000005 flow_type Applied +20200810141652000006 flow_type Applied +20200810141652000007 flow_type Applied +20200810141652000008 flow_type Applied +20200810141652000009 flow_type Applied +20200810141652000010 flow_type Applied +20200810141652000011 flow_type Applied +20200810141652000012 flow_type Applied +20200810141652000013 flow_type Applied +20200810141652000014 flow_type Applied +20200810141652000015 flow_type Applied +20200810141652000016 flow_type Applied +20200810141652000017 flow_type Applied +20200810141652000018 flow_type Applied +20200810141652000019 flow_type Applied +20200810161022000000 flow_rename Applied +20200810161022000001 flow_rename Applied +20200810161022000002 flow_rename Applied +20200810161022000003 flow_rename Applied +20200810161022000004 flow_rename Applied +20200810161022000005 flow_rename Applied +20200810161022000006 flow_rename Applied +20200810161022000007 flow_rename Applied +20200810161022000008 flow_rename Applied +20200810162450000000 flow_fields_rename Applied +20200810162450000001 flow_fields_rename Applied +20200810162450000002 flow_fields_rename Applied +20200810162450000003 flow_fields_rename Applied +20200812124254000000 add_session_token Applied +20200812124254000001 add_session_token Applied +20200812124254000002 add_session_token Applied +20200812124254000003 add_session_token Applied +20200812124254000004 add_session_token Applied +20200812124254000005 add_session_token Applied +20200812124254000006 add_session_token Applied +20200812124254000007 add_session_token Applied +20200812160551000000 add_session_revoke Applied +20200812160551000001 add_session_revoke Applied +20200812160551000002 add_session_revoke Applied +20200812160551000003 add_session_revoke Applied +20200812160551000004 add_session_revoke Applied +20200812160551000005 add_session_revoke Applied +20200812160551000006 add_session_revoke Applied +20200812160551000007 add_session_revoke Applied +20200830121710000000 update_recovery_token Applied +20200830130642000000 add_verification_methods Applied +20200830130642000001 add_verification_methods Applied +20200830130642000002 add_verification_methods Applied +20200830130642000003 add_verification_methods Applied +20200830130642000004 add_verification_methods Applied +20200830130642000005 add_verification_methods Applied +20200830130642000006 add_verification_methods Applied +20200830130642000007 add_verification_methods Applied +20200830130642000008 add_verification_methods Applied +20200830130642000009 add_verification_methods Applied +20200830130642000010 add_verification_methods Applied +20200830130643000000 add_verification_methods Applied +20200830130644000000 add_verification_methods Applied +20200830130644000001 add_verification_methods Applied +20200830130645000000 add_verification_methods Applied +20200830130646000000 add_verification_methods Applied +20200830130646000001 add_verification_methods Applied +20200830130646000002 add_verification_methods Applied +20200830130646000003 add_verification_methods Applied +20200830130646000004 add_verification_methods Applied +20200830130646000005 add_verification_methods Applied +20200830130646000006 add_verification_methods Applied +20200830130646000007 add_verification_methods Applied +20200830130646000008 add_verification_methods Applied +20200830130646000009 add_verification_methods Applied +20200830130646000010 add_verification_methods Applied +20200830130646000011 add_verification_methods Applied +20200830154602000000 add_verification_token Applied +20200830154602000001 add_verification_token Applied +20200830154602000002 add_verification_token Applied +20200830154602000003 add_verification_token Applied +20200830154602000004 add_verification_token Applied +20200830172221000000 recovery_token_expires Applied +20200830172221000001 recovery_token_expires Applied +20200830172221000002 recovery_token_expires Applied +20200830172221000003 recovery_token_expires Applied +20200830172221000004 recovery_token_expires Applied +20200830172221000005 recovery_token_expires Applied +20200830172221000006 recovery_token_expires Applied +20200830172221000007 recovery_token_expires Applied +20200830172221000008 recovery_token_expires Applied +20200830172221000009 recovery_token_expires Applied +20200830172221000010 recovery_token_expires Applied +20200830172221000011 recovery_token_expires Applied +20200830172221000012 recovery_token_expires Applied +20200830172221000013 recovery_token_expires Applied +20200830172221000014 recovery_token_expires Applied +20200830172221000015 recovery_token_expires Applied +20200830172221000016 recovery_token_expires Applied +20200830172221000017 recovery_token_expires Applied +20200830172221000018 recovery_token_expires Applied +20200830172221000019 recovery_token_expires Applied +20200830172221000020 recovery_token_expires Applied +20200830172221000021 recovery_token_expires Applied +20200830172221000022 recovery_token_expires Applied +20200830172221000023 recovery_token_expires Applied +20200830172221000024 recovery_token_expires Applied +20200831110752000000 identity_verifiable_address_remove_code Applied +20200831110752000001 identity_verifiable_address_remove_code Applied +20200831110752000002 identity_verifiable_address_remove_code Applied +20200831110752000003 identity_verifiable_address_remove_code Applied +20200831110752000004 identity_verifiable_address_remove_code Applied +20200831110752000005 identity_verifiable_address_remove_code Applied +20200831110752000006 identity_verifiable_address_remove_code Applied +20200831110752000007 identity_verifiable_address_remove_code Applied +20200831110752000008 identity_verifiable_address_remove_code Applied +20200831110752000009 identity_verifiable_address_remove_code Applied +20200831110752000010 identity_verifiable_address_remove_code Applied +20200831110752000011 identity_verifiable_address_remove_code Applied +20200831110752000012 identity_verifiable_address_remove_code Applied +20200831110752000013 identity_verifiable_address_remove_code Applied +20200831110752000014 identity_verifiable_address_remove_code Applied +20200831110752000015 identity_verifiable_address_remove_code Applied +20200831110752000016 identity_verifiable_address_remove_code Applied +20200831110752000017 identity_verifiable_address_remove_code Applied +20200831110752000018 identity_verifiable_address_remove_code Pending +20200831110752000019 identity_verifiable_address_remove_code Pending +20200831110752000020 identity_verifiable_address_remove_code Pending +20200831110752000021 identity_verifiable_address_remove_code Pending +20201201161451000000 credential_types_values Pending +20201201161451000001 credential_types_values Pending + +stderr: diff --git a/oryx/popx/cmd.go b/oryx/popx/cmd.go new file mode 100644 index 00000000000..321e876c1aa --- /dev/null +++ b/oryx/popx/cmd.go @@ -0,0 +1,270 @@ +// Copyright © 2024 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package popx + +import ( + "context" + "fmt" + "time" + + "github.com/spf13/cobra" + + "github.com/ory/x/cmdx" + "github.com/ory/x/errorsx" + "github.com/ory/x/flagx" +) + +type MigrationProvider interface { + MigrationStatus(context.Context) (MigrationStatuses, error) + MigrateUp(context.Context) error + MigrateDown(context.Context, int) error +} + +type MigrationPreparer interface { + PrepareMigration(context.Context) error +} + +func RegisterMigrateSQLUpFlags(cmd *cobra.Command) *cobra.Command { + cmd.Flags().BoolP("yes", "y", false, "If set all confirmation requests are accepted without user interaction.") + return cmd +} + +func NewMigrateSQLUpCmd(runE func(cmd *cobra.Command, args []string) error) *cobra.Command { + return RegisterMigrateSQLUpFlags(&cobra.Command{ + Use: "up [database_url]", + Args: cobra.RangeArgs(0, 1), + Short: "Apply all pending SQL migrations", + Long: `This command applies all pending SQL migrations for Ory {{ title .Root.Name }}. + +:::warning + +Before running this command, create a backup of your database. This command can be destructive as it may apply changes that cannot be easily reverted. Run this command close to the SQL instance (same VPC / same machine). + +::: + +It is recommended to review the migrations before running them. You can do this by running the command without the --yes flag: + + DSN=... {{ .CommandPath }} -e`, + Example: `Apply all pending migrations: + DSN=... {{ .CommandPath }} -e + +Apply all pending migrations: + DSN=... {{ .CommandPath }} -e --yes`, + RunE: runE, + }) +} + +func MigrateSQLUp(cmd *cobra.Command, p MigrationProvider) (err error) { + // convert migration tables + if prep, ok := p.(MigrationPreparer); ok { + if err := prep.PrepareMigration(cmd.Context()); err != nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not convert the migration table:\n%+v\n", err) + return cmdx.FailSilently(cmd) + } + } + + // print migration status + _, _ = fmt.Fprintln(cmd.OutOrStdout(), "The migration plan is as follows:") + + // print migration status + status, err := p.MigrationStatus(cmd.Context()) + if err != nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not get the migration status:\n%+v\n", errorsx.WithStack(err)) + return cmdx.FailSilently(cmd) + } + cmdx.PrintTable(cmd, status) + + _, _ = fmt.Fprintf(cmd.OutOrStdout(), "\nThe SQL statements to be executed from top to bottom are:\n\n") + for i := range status { + if status[i].State == Pending { + _, _ = fmt.Fprintf(cmd.OutOrStdout(), "------------ %s - %s ------------\n", status[i].Version, status[i].Name) + _, _ = fmt.Fprintf(cmd.OutOrStdout(), "%s\n\n", status[i].ContentUp) + } + } + + if !flagx.MustGetBool(cmd, "yes") { + _, _ = fmt.Fprintln(cmd.ErrOrStderr(), "To skip the next question use flag --yes (at your own risk).") + if !cmdx.AskForConfirmation("Do you wish to execute this migration plan?", cmd.InOrStdin(), cmd.OutOrStdout()) { + _, _ = fmt.Fprintf(cmd.OutOrStdout(), "------------ WARNING ------------\n") + _, _ = fmt.Fprintln(cmd.OutOrStdout(), "Migration aborted.") + return nil + } + } + + // apply migrations + if err := p.MigrateUp(cmd.Context()); err != nil { + _, _ = fmt.Fprintf(cmd.OutOrStdout(), "------------ ERROR ------------\n") + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not apply migrations:\n%+v\n", errorsx.WithStack(err)) + return cmdx.FailSilently(cmd) + } + + _, _ = fmt.Fprintf(cmd.OutOrStdout(), "------------ SUCCESS ------------\n") + _, _ = fmt.Fprintln(cmd.OutOrStdout(), "Successfully applied migrations!") + return nil +} + +func RegisterMigrateSQLDownFlags(cmd *cobra.Command) *cobra.Command { + cmd.Flags().BoolP("yes", "y", false, "If set all confirmation requests are accepted without user interaction.") + cmd.Flags().Int("steps", 0, "The number of migrations to roll back.") + return cmd +} + +func NewMigrateSQLDownCmd(runE func(cmd *cobra.Command, args []string) error) *cobra.Command { + return RegisterMigrateSQLDownFlags(&cobra.Command{ + Use: "down [database_url]", + Args: cobra.RangeArgs(0, 1), + Short: "Rollback the last applied SQL migrations", + Long: `This command rolls back the last applied SQL migrations for Ory {{ title .Root.Name }}. + +:::warning + +Before running this command, create a backup of your database. This command can be destructive as it may revert changes made by previous migrations. Run this command close to the SQL instance (same VPC / same machine). + +::: + +It is recommended to review the migrations before running them. You can do this by running the command without the --yes flag: + + DSN=... {{ .CommandPath }} -e`, + Example: `See the current migration status: + DSN=... {{ .CommandPath }} -e + +Rollback the last 10 migrations: + {{ .CommandPath }} $DSN --steps 10 + +Rollback the last 10 migrations without confirmation: + DSN=... {{ .CommandPath }} -e --yes --steps 10`, + RunE: runE, + }) +} + +func MigrateSQLDown(cmd *cobra.Command, p MigrationProvider) (err error) { + steps := flagx.MustGetInt(cmd, "steps") + if steps < 0 { + _, _ = fmt.Fprintln(cmd.ErrOrStderr(), "Flag --steps must be larger than 0.") + return cmdx.FailSilently(cmd) + } + + // convert migration tables + if prep, ok := p.(MigrationPreparer); ok { + if err := prep.PrepareMigration(cmd.Context()); err != nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not convert the migration table:\n%+v\n", err) + return cmdx.FailSilently(cmd) + } + } + + status, err := p.MigrationStatus(cmd.Context()) + if err != nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not get the migration status:\n%+v\n", errorsx.WithStack(err)) + return cmdx.FailSilently(cmd) + } + + // Now we need to rollback the last `steps` migrations that have a status of "Applied": + var count int + var rollingBack int + for i := len(status) - 1; i >= 0; i-- { + if status[i].State == Applied { + count++ + if steps > 0 && count <= steps { + status[i].State = "Rollback" + rollingBack++ + } + } + } + + // print migration status + _, _ = fmt.Fprintln(cmd.OutOrStdout(), "The migration plan is as follows:") + cmdx.PrintTable(cmd, status) + + if rollingBack < 1 { + _, _ = fmt.Fprintln(cmd.ErrOrStderr(), "") + _, _ = fmt.Fprintln(cmd.ErrOrStderr(), "There are apparently no migrations to roll back.") + _, _ = fmt.Fprintln(cmd.ErrOrStderr(), "Please provide the --steps argument with a value larger than 0.") + _, _ = fmt.Fprintln(cmd.ErrOrStderr(), "") + return cmdx.FailSilently(cmd) + } + + _, _ = fmt.Fprintf(cmd.OutOrStdout(), "\nThe SQL statements to be executed from top to bottom are:\n\n") + + for i := len(status) - 1; i >= 0; i-- { + if status[i].State == "Rollback" { + _, _ = fmt.Fprintf(cmd.OutOrStdout(), "------------ %s - %s ------------\n", status[i].Version, status[i].Name) + _, _ = fmt.Fprintf(cmd.OutOrStdout(), "%s\n\n", status[i].ContentDown) + } + } + + if !flagx.MustGetBool(cmd, "yes") { + _, _ = fmt.Fprintln(cmd.ErrOrStderr(), "To skip the next question use flag --yes (at your own risk).") + if !cmdx.AskForConfirmation("Do you wish to execute this migration plan?", cmd.InOrStdin(), cmd.OutOrStdout()) { + _, _ = fmt.Fprintf(cmd.OutOrStdout(), "------------ WARNING ------------\n") + _, _ = fmt.Fprintln(cmd.OutOrStdout(), "Migration aborted.") + return nil + } + } + + // apply migrations + if err := p.MigrateDown(cmd.Context(), rollingBack); err != nil { + _, _ = fmt.Fprintf(cmd.OutOrStdout(), "------------ ERROR ------------\n") + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not apply migrations:\n%+v\n", errorsx.WithStack(err)) + return cmdx.FailSilently(cmd) + } + + _, _ = fmt.Fprintf(cmd.OutOrStdout(), "------------ SUCCESS ------------\n") + _, _ = fmt.Fprintln(cmd.OutOrStdout(), "Successfully applied migrations!") + return nil +} + +func RegisterMigrateStatusFlags(cmd *cobra.Command) *cobra.Command { + cmdx.RegisterFormatFlags(cmd.PersistentFlags()) + cmd.Flags().BoolP("read-from-env", "e", false, "If set, reads the database connection string from the environment variable DSN or config file key dsn.") + cmd.Flags().Bool("block", false, "Block until all migrations have been applied") + return cmd +} + +func NewMigrateSQLStatusCmd(runE func(cmd *cobra.Command, args []string) error) *cobra.Command { + return RegisterMigrateStatusFlags(&cobra.Command{ + Use: "status [database_url]", + Short: "Display the current migration status", + Long: `This command shows the current migration status for Ory {{ title .Root.Name }}. + +You can use this command to check which migrations have been applied and which are pending. + +To block until all migrations are applied, use the --block flag: + + DSN=... {{ .CommandPath }} -e --block`, + Example: `See the current migration status: + DSN=... {{ .CommandPath }} -e + +Block until all migrations are applied: + DSN=... {{ .CommandPath }} -e --block`, + RunE: runE, + }) +} + +func MigrateStatus(cmd *cobra.Command, p MigrationProvider) (err error) { + block := flagx.MustGetBool(cmd, "block") + ctx := cmd.Context() + s, err := p.MigrationStatus(ctx) + if err != nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not get migration status: %+v\n", err) + return cmdx.FailSilently(cmd) + } + + for block && s.HasPending() { + _, _ = fmt.Fprintf(cmd.OutOrStdout(), "Waiting for migrations to finish...\n") + for _, m := range s { + if m.State == Pending { + _, _ = fmt.Fprintf(cmd.OutOrStdout(), " - %s\n", m.Name) + } + } + time.Sleep(time.Second) + s, err = p.MigrationStatus(ctx) + if err != nil { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Could not get migration status: %+v\n", err) + return cmdx.FailSilently(cmd) + } + } + + cmdx.PrintTable(cmd, s) + return nil +} diff --git a/oryx/popx/db_columns.go b/oryx/popx/db_columns.go new file mode 100644 index 00000000000..65f17d8ad9d --- /dev/null +++ b/oryx/popx/db_columns.go @@ -0,0 +1,45 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package popx + +import ( + "fmt" + + "github.com/ory/pop/v6" +) + +func DBColumns[T any](quoter Quoter) string { + return (&pop.Model{Value: new(T)}).Columns().QuotedString(quoter) +} + +// IndexHint returns the table name including the index hint, if the database +// supports it. +func IndexHint(conn *pop.Connection, table string, index string) string { + if conn.Dialect.Name() == "cockroach" { + return table + "@" + index + } + return table +} + +func DBColumnsExcluding[T any](quoter Quoter, exclude ...string) string { + cols := (&pop.Model{Value: new(T)}).Columns() + for _, e := range exclude { + cols.Remove(e) + } + return cols.QuotedString(quoter) +} + +type ( + AliasQuoter struct { + Alias string + Quoter Quoter + } + Quoter interface { + Quote(key string) string + } +) + +func (pq *AliasQuoter) Quote(key string) string { + return fmt.Sprintf("%s.%s", pq.Quoter.Quote(pq.Alias), pq.Quoter.Quote(key)) +} diff --git a/oryx/popx/loggers.go b/oryx/popx/loggers.go new file mode 100644 index 00000000000..d9921905672 --- /dev/null +++ b/oryx/popx/loggers.go @@ -0,0 +1,53 @@ +// Copyright © 2024 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package popx + +import ( + "fmt" + "testing" + + "github.com/ory/pop/v6" + "github.com/ory/pop/v6/logging" +) + +func formatter(lvl logging.Level, s string, args ...interface{}) string { + if pop.Debug == false { + return "" + } + + if lvl == logging.SQL { + if len(args) > 0 { + xargs := make([]string, len(args)) + for i, a := range args { + switch a.(type) { + case string: + xargs[i] = fmt.Sprintf("%q", a) + default: + xargs[i] = fmt.Sprintf("%v", a) + } + } + s = fmt.Sprintf("%s - %s | %s", lvl, s, xargs) + } else { + s = fmt.Sprintf("%s - %s", lvl, s) + } + } else { + s = fmt.Sprintf(s, args...) + s = fmt.Sprintf("%s - %s", lvl, s) + } + return s +} + +func TestingLogger(t testing.TB) func(lvl logging.Level, s string, args ...interface{}) { + return func(lvl logging.Level, s string, args ...interface{}) { + if line := formatter(lvl, s, args...); len(line) > 0 { + t.Log(line) + } + } +} + +func NullLogger() func(lvl logging.Level, s string, args ...interface{}) { + return func(lvl logging.Level, s string, args ...interface{}) { + // do nothing + } +} diff --git a/oryx/popx/match.go b/oryx/popx/match.go new file mode 100644 index 00000000000..080e7222be5 --- /dev/null +++ b/oryx/popx/match.go @@ -0,0 +1,69 @@ +// Copyright © 2024 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package popx + +import ( + "regexp" + + "github.com/pkg/errors" + + "github.com/ory/pop/v6" +) + +var MigrationFileRegexp = regexp.MustCompile( + `^(\d+)_([^.]+)(\.[a-z0-9]+)?(\.autocommit)?\.(up|down)\.(sql)$`, +) + +// match holds the information parsed from a migration filename. +type match struct { + Version string + Name string + DBType string + Direction string + Type string + Autocommit bool +} + +// parseMigrationFilename parses a migration filename. +func parseMigrationFilename(filename string) (*match, error) { + matches := MigrationFileRegexp.FindAllStringSubmatch(filename, -1) + if len(matches) == 0 { + return nil, nil + } + m := matches[0] + + var autocommit bool + var dbType string + if m[3] == ".autocommit" { + // A special case where autocommit group moves forward to the 3rd index. + autocommit = true + dbType = "all" + } else if m[3] == "" { + dbType = "all" + } else { + dbType = pop.CanonicalDialect(m[3][1:]) + if !pop.DialectSupported(dbType) { + return nil, errors.Errorf("unsupported dialect %s", dbType) + } + } + + if m[6] == "fizz" && dbType != "all" { + return nil, errors.Errorf("invalid database type %q, expected \"all\" because fizz is database type independent", dbType) + } + + if m[4] == ".autocommit" { + autocommit = true + } else if m[4] != "" { + return nil, errors.Errorf("invalid autocommit flag %q", m[4]) + } + + return &match{ + Version: m[1], + Name: m[2], + DBType: dbType, + Autocommit: autocommit, + Direction: m[5], + Type: m[6], + }, nil +} diff --git a/oryx/popx/migration_box.go b/oryx/popx/migration_box.go new file mode 100644 index 00000000000..5fde4ef61e9 --- /dev/null +++ b/oryx/popx/migration_box.go @@ -0,0 +1,305 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package popx + +import ( + "database/sql" + "fmt" + "io/fs" + "path" + "regexp" + "slices" + "sort" + "strings" + "testing" + "time" + + "github.com/pkg/errors" + "github.com/stretchr/testify/require" + + "github.com/ory/pop/v6" + + "github.com/ory/x/logrusx" +) + +type ( + // MigrationBox is a embed migration box. + MigrationBox struct { + c *pop.Connection + migrationsUp Migrations + migrationsDown Migrations + perMigrationTimeout time.Duration + dumpMigrations bool + l *logrusx.Logger + migrationContent MigrationContent + } + MigrationContent func(mf Migration, c *pop.Connection, r []byte, usingTemplate bool) (string, error) + MigrationBoxOption func(*MigrationBox) +) + +func WithTemplateValues(v map[string]interface{}) MigrationBoxOption { + return func(m *MigrationBox) { + m.migrationContent = ParameterizedMigrationContent(v) + } +} + +func WithMigrationContentMiddleware(middleware func(content string, err error) (string, error)) MigrationBoxOption { + return func(m *MigrationBox) { + prev := m.migrationContent + m.migrationContent = func(mf Migration, c *pop.Connection, r []byte, usingTemplate bool) (string, error) { + return middleware(prev(mf, c, r, usingTemplate)) + } + } +} + +// WithGoMigrations adds migrations that have a custom migration runner. +// TEST THEM THOROUGHLY! +// It will be very hard to fix a buggy migration. +func WithGoMigrations(migrations Migrations) MigrationBoxOption { + return func(mb *MigrationBox) { + for _, m := range migrations { + switch m.Direction { + case "up": + mb.migrationsUp = append(mb.migrationsUp, m) + case "down": + mb.migrationsDown = append(mb.migrationsDown, m) + default: + panic(fmt.Sprintf("unknown migration direction %q for %q", m.Direction, m.Version)) + } + } + } +} + +func WithPerMigrationTimeout(timeout time.Duration) MigrationBoxOption { + return func(m *MigrationBox) { + m.perMigrationTimeout = timeout + } +} + +func WithDumpMigrations() MigrationBoxOption { + return func(m *MigrationBox) { + m.dumpMigrations = true + } +} + +// WithTestdata adds testdata to the migration box. +func WithTestdata(t *testing.T, testdata fs.FS) MigrationBoxOption { + testdataPattern := regexp.MustCompile(`^(\d+)_testdata(|\.[a-zA-Z0-9]+).sql$`) + return func(m *MigrationBox) { + require.NoError(t, fs.WalkDir(testdata, ".", func(path string, info fs.DirEntry, err error) error { + if err != nil { + return errors.WithStack(err) + } + if !info.Type().IsRegular() { + t.Logf("skipping testdata entry that is not a file: %s", path) + return nil + } + + match := testdataPattern.FindStringSubmatch(info.Name()) + if len(match) != 2 && len(match) != 3 { + t.Logf(`WARNING! Found a test migration which does not match the test data pattern: %s`, info.Name()) + return nil + } + + version := match[1] + flavor := "all" + if len(match) == 3 && len(match[2]) > 0 { + flavor = pop.CanonicalDialect(strings.TrimPrefix(match[2], ".")) + } + + //t.Logf("Found test migration \"%s\" (%s, %+v): %s", flavor, match, err, info.Name()) + + m.migrationsUp = append(m.migrationsUp, Migration{ + Version: version + "9", // run testdata after version + Path: path, + Name: info.Name(), + DBType: flavor, + Direction: "up", + Type: "sql", + Runner: func(m Migration, c *pop.Connection) error { + b, err := fs.ReadFile(testdata, m.Path) + if err != nil { + return errors.WithStack(err) + } + if isMigrationEmpty(string(b)) { + return nil + } + _, err = c.Store.SQLDB().Exec(string(b)) + return errors.WithStack(err) + }, + }) + + m.migrationsDown = append(m.migrationsDown, Migration{ + Version: version + "9", // run testdata after version + Path: path, + Name: info.Name(), + DBType: flavor, + Direction: "down", + Type: "sql", + Runner: func(m Migration, _ *pop.Connection) error { return nil }, + }) + + return nil + })) + } +} + +var emptySQLReplace = regexp.MustCompile(`(?m)^(\s*--.*|\s*)$`) + +func isMigrationEmpty(content string) bool { + return len(strings.ReplaceAll(emptySQLReplace.ReplaceAllString(content, ""), "\n", "")) == 0 +} + +type queryExecutor interface { + Exec(query string, args ...any) (sql.Result, error) +} + +// NewMigrationBox creates a new migration box. +func NewMigrationBox(dir fs.FS, c *pop.Connection, l *logrusx.Logger, opts ...MigrationBoxOption) (*MigrationBox, error) { + mb := &MigrationBox{ + c: c, + l: l, + migrationContent: ParameterizedMigrationContent(nil), + } + + for _, o := range opts { + o(mb) + } + + txRunner := func(b []byte) func(Migration, *pop.Connection) error { + return func(mf Migration, c *pop.Connection) error { + content, err := mb.migrationContent(mf, c, b, true) + if err != nil { + return errors.Wrapf(err, "error processing %s", mf.Path) + } + if isMigrationEmpty(content) { + l.WithField("migration", mf.Path).Trace("This is usually ok - ignoring migration because content is empty. This is ok!") + return nil + } + + var q queryExecutor = c.Store.SQLDB() + if c.TX != nil { + q = c.TX + } + + if _, err = q.Exec(content); err != nil { + return errors.Wrapf(err, "error executing %s, sql: %s", mf.Path, content) + } + return nil + } + } + + err := mb.findMigrations(dir, txRunner) + if err != nil { + return mb, err + } + + if err := mb.check(); err != nil { + return nil, err + } + return mb, nil +} + +func (mb *MigrationBox) findMigrations( + dir fs.FS, + runner func([]byte) func(m Migration, c *pop.Connection) error, +) error { + err := fs.WalkDir(dir, ".", func(p string, info fs.DirEntry, err error) error { + if err != nil { + return errors.WithStack(err) + } + + if !info.Type().IsRegular() { + mb.l.Tracef("ignoring non file: %s", info.Name()) + return nil + } + + if path.Ext(info.Name()) != ".sql" { + mb.l.Tracef("ignoring non SQL file: %s", info.Name()) + return nil + } + + details, err := parseMigrationFilename(info.Name()) + if err != nil { + if strings.HasPrefix(err.Error(), "unsupported dialect") { + mb.l.Tracef("This is usually ok - ignoring migration file %s because dialect is not supported: %s", info.Name(), err.Error()) + return nil + } + return errors.WithStack(err) + } + + if details == nil { + return errors.Errorf("Found a migration file that does not match the file pattern: filename=%s pattern=%s", info.Name(), MigrationFileRegexp) + } + + content, err := fs.ReadFile(dir, p) + if err != nil { + return errors.WithStack(err) + } + + mf := Migration{ + Path: p, + Version: details.Version, + Name: details.Name, + DBType: details.DBType, + Direction: details.Direction, + Type: details.Type, + Content: string(content), + Autocommit: details.Autocommit, + } + + mf.Runner = runner(content) + + switch details.Direction { + case "up": + mb.migrationsUp = append(mb.migrationsUp, mf) + case "down": + mb.migrationsDown = append(mb.migrationsDown, mf) + default: + return errors.Errorf("unknown migration direction %q for %q", details.Direction, info.Name()) + } + return nil + }) + + // Sort descending. + sort.Sort(mb.migrationsDown) + slices.Reverse(mb.migrationsDown) + + // Sort ascending. + sort.Sort(mb.migrationsUp) + + return errors.WithStack(err) +} + +// hasDownMigrationWithVersion checks if there is a migration with the given +// version. +func (mb *MigrationBox) hasDownMigrationWithVersion(version string) bool { + for _, down := range mb.migrationsDown { + if version == down.Version { + return true + } + } + return false +} + +// check checks that every "up" migration has a corresponding "down" migration. +func (mb *MigrationBox) check() error { + for _, up := range mb.migrationsUp { + if !mb.hasDownMigrationWithVersion(up.Version) { + return errors.Errorf("migration %s has no corresponding down migration", up.Version) + } + } + + for _, n := range mb.migrationsUp { + if err := n.Valid(); err != nil { + return errors.WithStack(err) + } + } + for _, n := range mb.migrationsDown { + if err := n.Valid(); err != nil { + return errors.WithStack(err) + } + } + return nil +} diff --git a/oryx/popx/migration_content.go b/oryx/popx/migration_content.go new file mode 100644 index 00000000000..9fc47be7e9c --- /dev/null +++ b/oryx/popx/migration_content.go @@ -0,0 +1,53 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package popx + +import ( + "bytes" + "text/template" + + "github.com/pkg/errors" + + "github.com/ory/pop/v6" +) + +func ParameterizedMigrationContent(params map[string]interface{}) func(mf Migration, c *pop.Connection, r []byte, usingTemplate bool) (string, error) { + return func(mf Migration, c *pop.Connection, b []byte, usingTemplate bool) (string, error) { + content := "" + if usingTemplate { + t := template.New("migration") + t.Funcs(SQLTemplateFuncs) + t, err := t.Parse(string(b)) + if err != nil { + return "", errors.Wrapf(err, "could not parse template %s", mf.Path) + } + var bb bytes.Buffer + err = t.Execute(&bb, struct { + IsSQLite bool + IsCockroach bool + IsMySQL bool + IsMariaDB bool + IsPostgreSQL bool + DialectDetails *pop.ConnectionDetails + Parameters map[string]interface{} + }{ + IsSQLite: c.Dialect.Name() == "sqlite3", + IsCockroach: c.Dialect.Name() == "cockroach", + IsMySQL: c.Dialect.Name() == "mysql", + IsMariaDB: c.Dialect.Name() == "mariadb", + IsPostgreSQL: c.Dialect.Name() == "postgres", + DialectDetails: c.Dialect.Details(), + Parameters: params, + }) + if err != nil { + return "", errors.Wrapf(err, "could not execute migration template %s", mf.Path) + } + content = bb.String() + } else { + content = string(b) + } + + return content, nil + } +} diff --git a/oryx/popx/migration_info.go b/oryx/popx/migration_info.go new file mode 100644 index 00000000000..ab8a8c09e1b --- /dev/null +++ b/oryx/popx/migration_info.go @@ -0,0 +1,102 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package popx + +import ( + "sort" + "strings" + + "github.com/pkg/errors" + + "github.com/ory/pop/v6" +) + +// Migration handles the data for a given database migration +type Migration struct { + // Path to the migration (./migrations/123_create_widgets.up.sql) + Path string + // Version of the migration (123) + Version string + // Name of the migration (create_widgets) + Name string + // Direction of the migration (up|down) + Direction string + // Type of migration (sql|go) + Type string + // DB type (all|postgres|mysql...) + DBType string + // Runner function to run/execute the migration. Will be wrapped in a + // database transaction. Mutually exclusive with RunnerNoTx + Runner func(Migration, *pop.Connection) error + // Content is the raw content of the migration file + Content string + // Autocommit indicates whether the migration should be run in autocommit mode + Autocommit bool +} + +func (m Migration) Valid() error { + if m.Runner == nil { + return errors.Errorf("no runner defined for %s", m.Path) + } + + return nil +} + +// Migrations is a collection of Migration +type Migrations []Migration + +func (mfs Migrations) Len() int { return len(mfs) } +func (mfs Migrations) Less(i, j int) bool { return compareMigration(mfs[i], mfs[j]) < 0 } +func (mfs Migrations) Swap(i, j int) { mfs[i], mfs[j] = mfs[j], mfs[i] } + +func compareMigration(a, b Migration) int { + if a.Version != b.Version { + return strings.Compare(a.Version, b.Version) + } + // Force "all" to be greater. + if a.DBType == "all" && b.DBType != "all" { + return 1 + } else if a.DBType != "all" && b.DBType == "all" { + return -1 + } + return strings.Compare(a.DBType, b.DBType) +} + +func (mfs Migrations) sortAndFilter(dialect string) Migrations { + usable := make(map[string]Migration, len(mfs)) + for _, v := range mfs { + if v.DBType == dialect { + usable[v.Version] = v + } else if v.DBType == "all" { + // Add "all" only if we do not have a more specific migration for the dialect. + // If a more specific migration is found later, it will override this one. + if _, ok := usable[v.Version]; !ok { + usable[v.Version] = v + } + } + } + + filtered := make(Migrations, 0, len(usable)) + for k := range usable { + filtered = append(filtered, usable[k]) + } + sort.Sort(filtered) + return filtered +} + +func (mfs Migrations) find(version, dbType string) *Migration { + var candidate *Migration + for _, m := range mfs { + if m.Version == version { + switch m.DBType { + case "all": + // there might still be a more specific migration for the dbType + candidate = &m + case dbType: + return &m + } + } + } + return candidate +} diff --git a/oryx/popx/migrator.go b/oryx/popx/migrator.go new file mode 100644 index 00000000000..1360234342b --- /dev/null +++ b/oryx/popx/migrator.go @@ -0,0 +1,537 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package popx + +import ( + "context" + "fmt" + "math" + "os" + "regexp" + "slices" + "strings" + "time" + + "github.com/cockroachdb/cockroach-go/v2/crdb" + "github.com/pkg/errors" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" + + "github.com/ory/pop/v6" + "github.com/ory/x/cmdx" + "github.com/ory/x/logrusx" + "github.com/ory/x/otelx" + "github.com/ory/x/sqlcon" +) + +const ( + Pending = "Pending" + Applied = "Applied" + tracingComponent = "github.com/ory/x/popx" +) + +func (mb *MigrationBox) shouldNotUseTransaction(m Migration) bool { + return m.Autocommit || mb.c.Dialect.Name() == "cockroach" || mb.c.Dialect.Name() == "mysql" +} + +// Up runs pending "up" migrations and applies them to the database. +func (mb *MigrationBox) Up(ctx context.Context) error { + _, err := mb.UpTo(ctx, 0) + return errors.WithStack(err) +} + +// UpTo runs up to step "up" migrations and applies them to the database. +// If step <= 0 all pending migrations are run. +func (mb *MigrationBox) UpTo(ctx context.Context, step int) (applied int, err error) { + ctx, span := startSpan(ctx, MigrationUpOpName, trace.WithAttributes(attribute.Int("step", step))) + defer otelx.End(span, &err) + + c := mb.c.WithContext(ctx) + err = mb.exec(ctx, func() error { + mtn := sanitizedMigrationTableName(c) + mfs := mb.migrationsUp.sortAndFilter(c.Dialect.Name()) + for _, mi := range mfs { + l := mb.l.WithField("version", mi.Version).WithField("migration_name", mi.Name).WithField("migration_file", mi.Path) + + appliedMigrations := make([]string, 0, 2) + legacyVersion := mi.Version + if len(legacyVersion) > 14 { + legacyVersion = legacyVersion[:14] + } + err := c.RawQuery(fmt.Sprintf("SELECT version FROM %s WHERE version IN (?, ?)", mtn), mi.Version, legacyVersion).All(&appliedMigrations) + if err != nil { + return errors.Wrapf(err, "problem checking for migration version %s", mi.Version) + } + + if slices.Contains(appliedMigrations, mi.Version) { + l.Debug("Migration has already been applied, skipping.") + continue + } + + if slices.Contains(appliedMigrations, legacyVersion) { + l.WithField("legacy_version", legacyVersion).WithField("migration_table", mtn).Debug("Migration has already been applied in a legacy migration run. Updating version in migration table.") + if err := mb.isolatedTransaction(ctx, "init-migrate", func(conn *pop.Connection) error { + // We do not want to remove the legacy migration version or subsequent migrations might be applied twice. + // + // Do not activate the following - it is just for reference. + // + // if _, err := tx.Store.Exec(fmt.Sprintf("DELETE FROM %s WHERE version = ?", mtn), legacyVersion); err != nil { + // return errors.Wrapf(err, "problem removing legacy version %s", mi.Version) + // } + + // #nosec G201 - mtn is a system-wide const + err := conn.RawQuery(fmt.Sprintf("INSERT INTO %s (version) VALUES (?)", mtn), mi.Version).Exec() + return errors.Wrapf(err, "problem inserting migration version %s", mi.Version) + }); err != nil { + return errors.WithStack(err) + } + continue + } + + l.Info("Migration has not yet been applied, running migration.") + + if err := mi.Valid(); err != nil { + return errors.WithStack(err) + } + + noTx := mb.shouldNotUseTransaction(mi) + if noTx { + l.Info("NOT running migrations inside a transaction") + if err := mi.Runner(mi, c); err != nil { + return errors.WithStack(err) + } + + // #nosec G201 - mtn is a system-wide const + if err := c.RawQuery(fmt.Sprintf("INSERT INTO %s (version) VALUES (?)", mtn), mi.Version).Exec(); err != nil { + return errors.Wrapf(err, "problem inserting migration version %s. YOUR DATABASE MAY BE IN AN INCONSISTENT STATE! MANUAL INTERVENTION REQUIRED!", mi.Version) + } + } else { + if err := mb.isolatedTransaction(ctx, "up", func(conn *pop.Connection) error { + if err := mi.Runner(mi, conn); err != nil { + return errors.WithStack(err) + } + + // #nosec G201 - mtn is a system-wide const + if err := conn.RawQuery(fmt.Sprintf("INSERT INTO %s (version) VALUES (?)", mtn), mi.Version).Exec(); err != nil { + return errors.Wrapf(err, "problem inserting migration version %s", mi.Version) + } + return nil + }); err != nil { + return errors.WithStack(err) + } + } + + l.WithField("autocommit", noTx).Infof("> %s applied successfully", mi.Name) + applied++ + if step > 0 && applied >= step { + break + } + } + if applied == 0 { + mb.l.Infof("Migrations already up to date, nothing to apply") + } else { + mb.l.Infof("Successfully applied %d migrations.", applied) + } + return nil + }) + return applied, errors.WithStack(err) +} + +// Down runs pending "down" migrations and rolls back the +// database by the specified number of steps. +// If step <= 0, all down migrations are run. +func (mb *MigrationBox) Down(ctx context.Context, steps int) (err error) { + ctx, span := startSpan(ctx, MigrationDownOpName, trace.WithAttributes(attribute.Int("steps", steps))) + defer otelx.End(span, &err) + + if steps <= 0 { + steps = math.MaxInt + } + + c := mb.c.WithContext(ctx) + return errors.WithStack(mb.exec(ctx, func() (err error) { + mtn := sanitizedMigrationTableName(c) + count, err := c.Count(mtn) + if err != nil { + return errors.Wrap(err, "migration down: unable count existing migration") + } + steps = min(steps, count) + + mfs := mb.migrationsDown.sortAndFilter(c.Dialect.Name()) + slices.Reverse(mfs) + if len(mfs) > count { + // skip all migrations that were not yet applied + mfs = mfs[len(mfs)-count:] + } + + reverted := 0 + defer func() { + migrationsToRevertCount := min(steps, len(mfs)) + mb.l.Debugf("Successfully reverted %d/%d migrations.", reverted, migrationsToRevertCount) + if err != nil { + mb.l.WithError(err).Error("Problem reverting migrations.") + } + }() + for i, mi := range mfs { + if i >= steps { + break + } + l := mb.l.WithField("version", mi.Version).WithField("migration_name", mi.Name).WithField("migration_file", mi.Path) + l.Debugf("handling migration %s", mi.Name) + exists, err := c.Where("version = ?", mi.Version).Exists(mtn) + if err != nil { + return errors.Wrapf(err, "problem checking for migration version %s", mi.Version) + } + + if !exists && len(mi.Version) > 14 { + legacyVersion := mi.Version[:14] + legacyVersionExists, err := c.Where("version = ?", legacyVersion).Exists(mtn) + if err != nil { + return errors.Wrapf(err, "problem checking for legacy migration version %s", legacyVersion) + } + + if !legacyVersionExists { + return errors.Errorf("neither normal (%s) nor legacy migration (%s) exist", mi.Version, legacyVersion) + } + } else if !exists { + return errors.Errorf("migration version %s does not exist", mi.Version) + } + + if err := mi.Valid(); err != nil { + return errors.WithStack(err) + } + + if mb.shouldNotUseTransaction(mi) { + err := mi.Runner(mi, c) + if err != nil { + return errors.WithStack(err) + } + + // #nosec G201 - mtn is a system-wide const + if err := c.RawQuery(fmt.Sprintf("DELETE FROM %s WHERE version = ?", mtn), mi.Version).Exec(); err != nil { + return errors.Wrapf(err, "problem deleting migration version %s. YOUR DATABASE MAY BE IN AN INCONSISTENT STATE! MANUAL INTERVENTION REQUIRED!", mi.Version) + } + } else { + if err := mb.isolatedTransaction(ctx, "down", func(conn *pop.Connection) error { + err := mi.Runner(mi, conn) + if err != nil { + return errors.WithStack(err) + } + + // #nosec G201 - mtn is a system-wide const + if err := conn.RawQuery(fmt.Sprintf("DELETE FROM %s WHERE version = ?", mtn), mi.Version).Exec(); err != nil { + return errors.Wrapf(err, "problem deleting migration version %s", mi.Version) + } + + return nil + }); err != nil { + return errors.WithStack(err) + } + } + + l.Infof("%s applied successfully", mi.Name) + reverted++ + } + return nil + })) +} + +func (mb *MigrationBox) createTransactionalMigrationTable(ctx context.Context, c *pop.Connection, l *logrusx.Logger) error { + mtn := sanitizedMigrationTableName(c) + + if err := mb.createMigrationStatusTableTransaction(ctx, []string{ + fmt.Sprintf(`CREATE TABLE %s (version VARCHAR (48) NOT NULL, version_self INT NOT NULL DEFAULT 0)`, mtn), + fmt.Sprintf(`CREATE UNIQUE INDEX %s_version_idx ON %s (version)`, mtn, mtn), + fmt.Sprintf(`CREATE INDEX %s_version_self_idx ON %s (version_self)`, mtn, mtn), + }); err != nil { + return errors.WithStack(err) + } + + l.WithField("migration_table", mtn).Debug("Transactional migration table created successfully.") + + return nil +} + +func (mb *MigrationBox) migrateToTransactionalMigrationTable(ctx context.Context, c *pop.Connection, l *logrusx.Logger) error { + // This means the new pop migrator has also not yet been applied, do that now. + mtn := sanitizedMigrationTableName(c) + + withOn := fmt.Sprintf(" ON %s", mtn) + if c.Dialect.Name() != "mysql" { + withOn = "" + } + + interimTable := fmt.Sprintf("%s_transactional", mtn) + workload := [][]string{ + { + fmt.Sprintf(`DROP INDEX %s_version_idx%s`, mtn, withOn), + fmt.Sprintf(`CREATE TABLE %s (version VARCHAR (48) NOT NULL, version_self INT NOT NULL DEFAULT 0)`, interimTable), + fmt.Sprintf(`CREATE UNIQUE INDEX %s_version_idx ON %s (version)`, mtn, interimTable), + fmt.Sprintf(`CREATE INDEX %s_version_self_idx ON %s (version_self)`, mtn, interimTable), + // #nosec G201 - mtn is a system-wide const + fmt.Sprintf(`INSERT INTO %s (version) SELECT version FROM %s`, interimTable, mtn), + fmt.Sprintf(`ALTER TABLE %s RENAME TO %s_pop_legacy`, mtn, mtn), + }, + { + fmt.Sprintf(`ALTER TABLE %s RENAME TO %s`, interimTable, mtn), + }, + } + + if err := mb.createMigrationStatusTableTransaction(ctx, workload...); err != nil { + return errors.WithStack(err) + } + + l.WithField("migration_table", mtn).Debug("Successfully migrated legacy schema_migration to new transactional schema_migration table.") + + return nil +} + +func (mb *MigrationBox) isolatedTransaction(ctx context.Context, direction string, fn func(c *pop.Connection) error) (err error) { + ctx, span := startSpan(ctx, MigrationRunTransactionOpName, trace.WithAttributes(attribute.String("migration_direction", direction))) + defer otelx.End(span, &err) + + if mb.perMigrationTimeout > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, mb.perMigrationTimeout) + defer cancel() + } + + return Transaction(ctx, mb.c.WithContext(ctx), func(ctx context.Context, connection *pop.Connection) error { + return errors.WithStack(fn(connection)) + }) +} + +func (mb *MigrationBox) createMigrationStatusTableTransaction(ctx context.Context, transactions ...[]string) error { + for _, statements := range transactions { + // CockroachDB does not support transactional schema changes, so we have to run + // the statements outside of a transaction. + if mb.c.Dialect.Name() == "cockroach" || mb.c.Dialect.Name() == "mysql" { + for _, statement := range statements { + if err := mb.c.WithContext(ctx).RawQuery(statement).Exec(); err != nil { + return errors.Wrapf(err, "unable to execute statement: %s", statement) + } + } + } else { + if err := mb.isolatedTransaction(ctx, "init", func(conn *pop.Connection) error { + for _, statement := range statements { + if err := conn.WithContext(ctx).RawQuery(statement).Exec(); err != nil { + return errors.Wrapf(err, "unable to execute statement: %s", statement) + } + } + return nil + }); err != nil { + return errors.WithStack(err) + } + } + } + + return nil +} + +// CreateSchemaMigrations sets up a table to track migrations. This is an idempotent +// operation. +func (mb *MigrationBox) CreateSchemaMigrations(ctx context.Context) error { + ctx, span := startSpan(ctx, MigrationInitOpName) + defer span.End() + + c := mb.c.WithContext(ctx) + + mtn := sanitizedMigrationTableName(c) + mb.l.WithField("migration_table", mtn).Debug("Checking if legacy migration table exists.") + _, err := c.Store.Exec(fmt.Sprintf("select version from %s", mtn)) + if err != nil { + mb.l.WithError(err).WithField("migration_table", mtn).Debug("An error occurred while checking for the legacy migration table, maybe it does not exist yet? Trying to create.") + // This means that the legacy pop migrator has not yet been applied + return errors.WithStack(mb.createTransactionalMigrationTable(ctx, c, mb.l)) + } + + mb.l.WithField("migration_table", mtn).Debug("A migration table exists, checking if it is a transactional migration table.") + _, err = c.Store.Exec(fmt.Sprintf("select version, version_self from %s", mtn)) + if err != nil { + mb.l.WithError(err).WithField("migration_table", mtn).Debug("An error occurred while checking for the transactional migration table, maybe it does not exist yet? Trying to create.") + return errors.WithStack(mb.migrateToTransactionalMigrationTable(ctx, c, mb.l)) + } + + mb.l.WithField("migration_table", mtn).Debug("Migration tables exist and are up to date.") + return nil +} + +type MigrationStatus struct { + State string `json:"state"` + Version string `json:"version"` + Name string `json:"name"` + ContentUp string `json:"content"` + ContentDown string `json:"content_down"` +} + +type MigrationStatuses []MigrationStatus + +var _ cmdx.Table = (MigrationStatuses)(nil) + +func (m MigrationStatuses) Header() []string { + return []string{"Version", "Name", "Status"} +} + +func (m MigrationStatuses) Table() [][]string { + t := make([][]string, len(m)) + for i, s := range m { + t[i] = []string{s.Version, s.Name, s.State} + } + return t +} + +func (m MigrationStatuses) Interface() interface{} { + return m +} + +func (m MigrationStatuses) Len() int { + return len(m) +} + +func (m MigrationStatuses) IDs() []string { + ids := make([]string, len(m)) + for i, s := range m { + ids[i] = s.Version + } + return ids +} + +func (m MigrationStatuses) HasPending() bool { + for _, mm := range m { + if mm.State == Pending { + return true + } + } + return false +} + +func sanitizedMigrationTableName(con *pop.Connection) string { + return regexp.MustCompile(`\W`).ReplaceAllString(con.MigrationTableName(), "") +} + +func errIsTableNotFound(err error) bool { + return strings.Contains(err.Error(), "no such table:") || // sqlite + strings.Contains(err.Error(), "Error 1146") || // MySQL + strings.Contains(err.Error(), "SQLSTATE 42P01") // PostgreSQL / CockroachDB +} + +// Status prints out the status of applied/pending migrations. +func (mb *MigrationBox) Status(ctx context.Context) (MigrationStatuses, error) { + ctx, span := startSpan(ctx, MigrationStatusOpName) + defer span.End() + + con := mb.c.WithContext(ctx) + + migrationsUp := mb.migrationsUp.sortAndFilter(con.Dialect.Name()) + + if len(migrationsUp) == 0 { + return nil, errors.Errorf("unable to find any migrations for dialect: %s", con.Dialect.Name()) + } + + alreadyApplied := make([]string, 0, len(migrationsUp)) + err := con.RawQuery(fmt.Sprintf("SELECT version FROM %s", sanitizedMigrationTableName(con))).All(&alreadyApplied) + if err != nil { + if errIsTableNotFound(err) { + // This means that no migrations have been applied and we need to apply all of them first! + // + // It also means that we can ignore this state and act as if no migrations have been applied yet. + } else { + // On any other error, we fail. + return nil, errors.Wrap(err, "problem with migration") + } + } + + statuses := make(MigrationStatuses, len(migrationsUp)) + for k, mf := range migrationsUp { + downContent := "-- error: no down migration defined for this migration" + if mDown := mb.migrationsDown.find(mf.Version, con.Dialect.Name()); mDown != nil { + downContent = mDown.Content + } + statuses[k] = MigrationStatus{ + State: Pending, + Version: mf.Version, + Name: mf.Name, + ContentUp: mf.Content, + ContentDown: downContent, + } + + if slices.ContainsFunc(alreadyApplied, func(applied string) bool { + return applied == mf.Version || (len(mf.Version) > 14 && applied == mf.Version[:14]) + }) { + statuses[k].State = Applied + continue + } + } + + return statuses, nil +} + +// DumpMigrationSchema will generate a file of the current database schema +func (mb *MigrationBox) DumpMigrationSchema(ctx context.Context) error { + c := mb.c.WithContext(ctx) + schema := "schema.sql" + f, err := os.Create(schema) //#nosec:G304) //#nosec:G304 + if err != nil { + return errors.WithStack(err) + } + err = c.Dialect.DumpSchema(f) + if err != nil { + _ = os.RemoveAll(schema) + return errors.WithStack(err) + } + return nil +} + +func (mb *MigrationBox) exec(ctx context.Context, fn func() error) error { + now := time.Now() + defer func() { + if !mb.dumpMigrations { + return + } + err := mb.DumpMigrationSchema(ctx) + if err != nil { + mb.l.WithError(err).Error("Migrator: unable to dump schema") + } + }() + defer mb.printTimer(now) + + err := mb.CreateSchemaMigrations(ctx) + if err != nil { + return errors.Wrap(err, "migrator: problem creating schema migrations") + } + + if mb.c.Dialect.Name() == "sqlite3" { + if err := mb.c.RawQuery("PRAGMA foreign_keys=OFF").Exec(); err != nil { + return sqlcon.HandleError(err) + } + } + + if mb.c.Dialect.Name() == "cockroach" { + outer := fn + fn = func() error { + return errors.WithStack(crdb.Execute(outer)) + } + } + + if err := fn(); err != nil { + return errors.WithStack(err) + } + + if mb.c.Dialect.Name() == "sqlite3" { + if err := mb.c.RawQuery("PRAGMA foreign_keys=ON").Exec(); err != nil { + return sqlcon.HandleError(err) + } + } + + return nil +} + +func (mb *MigrationBox) printTimer(timerStart time.Time) { + diff := time.Since(timerStart).Seconds() + if diff > 60 { + mb.l.Debugf("%.4f minutes", diff/60) + } else { + mb.l.Debugf("%.4f seconds", diff) + } +} diff --git a/oryx/popx/span.go b/oryx/popx/span.go new file mode 100644 index 00000000000..9aef3c60293 --- /dev/null +++ b/oryx/popx/span.go @@ -0,0 +1,22 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package popx + +import ( + "context" + + "go.opentelemetry.io/otel/trace" +) + +const ( + MigrationStatusOpName = "migration-status" + MigrationInitOpName = "migration-init" + MigrationUpOpName = "migration-up" + MigrationRunTransactionOpName = "migration-run-transaction" + MigrationDownOpName = "migration-down" +) + +func startSpan(ctx context.Context, opName string, opts ...trace.SpanStartOption) (context.Context, trace.Span) { + return trace.SpanFromContext(ctx).TracerProvider().Tracer(tracingComponent).Start(ctx, opName, opts...) +} diff --git a/oryx/popx/sql_template_funcs.go b/oryx/popx/sql_template_funcs.go new file mode 100644 index 00000000000..32aa24fffb6 --- /dev/null +++ b/oryx/popx/sql_template_funcs.go @@ -0,0 +1,23 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package popx + +import ( + "regexp" + + "github.com/pkg/errors" +) + +var SQLTemplateFuncs = map[string]interface{}{ + "identifier": Identifier, +} + +var identifierPattern = regexp.MustCompile("^[a-zA-Z][a-zA-Z0-9_]*$") + +func Identifier(i string) (string, error) { + if !identifierPattern.MatchString(i) { + return "", errors.Errorf("invalid SQL identifier '%s'", i) + } + return i, nil +} diff --git a/oryx/popx/stub/migrations/check/valid/123_a.down.sql b/oryx/popx/stub/migrations/check/valid/123_a.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/check/valid/123_a.mysql.up.sql b/oryx/popx/stub/migrations/check/valid/123_a.mysql.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/check/valid/123_a.postgres.up.sql b/oryx/popx/stub/migrations/check/valid/123_a.postgres.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/legacy/20191100000001_identities.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20191100000001_identities.cockroach.down.sql new file mode 100644 index 00000000000..30e62aa561d --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000001_identities.cockroach.down.sql @@ -0,0 +1,4 @@ +DROP TABLE "identity_credential_identifiers";COMMIT TRANSACTION;BEGIN TRANSACTION; +DROP TABLE "identity_credentials";COMMIT TRANSACTION;BEGIN TRANSACTION; +DROP TABLE "identity_credential_types";COMMIT TRANSACTION;BEGIN TRANSACTION; +DROP TABLE "identities";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000001_identities.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20191100000001_identities.cockroach.up.sql new file mode 100644 index 00000000000..638b1b7b167 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000001_identities.cockroach.up.sql @@ -0,0 +1,35 @@ +CREATE TABLE "identities" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"traits_schema_id" VARCHAR (2048) NOT NULL, +"traits" json NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE TABLE "identity_credential_types" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"name" VARCHAR (32) NOT NULL +);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE UNIQUE INDEX "identity_credential_types_name_idx" ON "identity_credential_types" (name);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE TABLE "identity_credentials" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"config" json NOT NULL, +"identity_credential_type_id" UUID NOT NULL, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "identity_credentials_identities_id_fk" FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade, +CONSTRAINT "identity_credentials_identity_credential_types_id_fk" FOREIGN KEY ("identity_credential_type_id") REFERENCES "identity_credential_types" ("id") ON DELETE cascade +);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE TABLE "identity_credential_identifiers" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"identifier" VARCHAR (255) NOT NULL, +"identity_credential_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "identity_credential_identifiers_identity_credentials_id_fk" FOREIGN KEY ("identity_credential_id") REFERENCES "identity_credentials" ("id") ON DELETE cascade +);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE UNIQUE INDEX "identity_credential_identifiers_identifier_idx" ON "identity_credential_identifiers" (identifier);COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000001_identities.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20191100000001_identities.mysql.down.sql new file mode 100644 index 00000000000..fcf24325535 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000001_identities.mysql.down.sql @@ -0,0 +1,4 @@ +DROP TABLE `identity_credential_identifiers`; +DROP TABLE `identity_credentials`; +DROP TABLE `identity_credential_types`; +DROP TABLE `identities`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000001_identities.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20191100000001_identities.mysql.up.sql new file mode 100644 index 00000000000..35d1c2aef9e --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000001_identities.mysql.up.sql @@ -0,0 +1,35 @@ +CREATE TABLE `identities` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`traits_schema_id` VARCHAR (2048) NOT NULL, +`traits` JSON NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL +) ENGINE=InnoDB; +CREATE TABLE `identity_credential_types` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`name` VARCHAR (32) NOT NULL +) ENGINE=InnoDB; +CREATE UNIQUE INDEX `identity_credential_types_name_idx` ON `identity_credential_types` (`name`); +CREATE TABLE `identity_credentials` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`config` JSON NOT NULL, +`identity_credential_type_id` char(36) NOT NULL, +`identity_id` char(36) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`identity_id`) REFERENCES `identities` (`id`) ON DELETE cascade, +FOREIGN KEY (`identity_credential_type_id`) REFERENCES `identity_credential_types` (`id`) ON DELETE cascade +) ENGINE=InnoDB; +CREATE TABLE `identity_credential_identifiers` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`identifier` VARCHAR (255) NOT NULL, +`identity_credential_id` char(36) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`identity_credential_id`) REFERENCES `identity_credentials` (`id`) ON DELETE cascade +) ENGINE=InnoDB; +CREATE UNIQUE INDEX `identity_credential_identifiers_identifier_idx` ON `identity_credential_identifiers` (`identifier`); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000001_identities.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20191100000001_identities.postgres.down.sql new file mode 100644 index 00000000000..923da2045d5 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000001_identities.postgres.down.sql @@ -0,0 +1,4 @@ +DROP TABLE "identity_credential_identifiers"; +DROP TABLE "identity_credentials"; +DROP TABLE "identity_credential_types"; +DROP TABLE "identities"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000001_identities.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20191100000001_identities.postgres.up.sql new file mode 100644 index 00000000000..fec4915d6a6 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000001_identities.postgres.up.sql @@ -0,0 +1,35 @@ +CREATE TABLE "identities" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"traits_schema_id" VARCHAR (2048) NOT NULL, +"traits" jsonb NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +); +CREATE TABLE "identity_credential_types" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"name" VARCHAR (32) NOT NULL +); +CREATE UNIQUE INDEX "identity_credential_types_name_idx" ON "identity_credential_types" (name); +CREATE TABLE "identity_credentials" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"config" jsonb NOT NULL, +"identity_credential_type_id" UUID NOT NULL, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade, +FOREIGN KEY ("identity_credential_type_id") REFERENCES "identity_credential_types" ("id") ON DELETE cascade +); +CREATE TABLE "identity_credential_identifiers" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"identifier" VARCHAR (255) NOT NULL, +"identity_credential_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("identity_credential_id") REFERENCES "identity_credentials" ("id") ON DELETE cascade +); +CREATE UNIQUE INDEX "identity_credential_identifiers_identifier_idx" ON "identity_credential_identifiers" (identifier); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000001_identities.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20191100000001_identities.sqlite3.down.sql new file mode 100644 index 00000000000..923da2045d5 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000001_identities.sqlite3.down.sql @@ -0,0 +1,4 @@ +DROP TABLE "identity_credential_identifiers"; +DROP TABLE "identity_credentials"; +DROP TABLE "identity_credential_types"; +DROP TABLE "identities"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000001_identities.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20191100000001_identities.sqlite3.up.sql new file mode 100644 index 00000000000..bf912cb1097 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000001_identities.sqlite3.up.sql @@ -0,0 +1,31 @@ +CREATE TABLE "identities" ( +"id" TEXT PRIMARY KEY, +"traits_schema_id" TEXT NOT NULL, +"traits" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +); +CREATE TABLE "identity_credential_types" ( +"id" TEXT PRIMARY KEY, +"name" TEXT NOT NULL +); +CREATE UNIQUE INDEX "identity_credential_types_name_idx" ON "identity_credential_types" (name); +CREATE TABLE "identity_credentials" ( +"id" TEXT PRIMARY KEY, +"config" TEXT NOT NULL, +"identity_credential_type_id" char(36) NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON DELETE cascade, +FOREIGN KEY (identity_credential_type_id) REFERENCES identity_credential_types (id) ON DELETE cascade +); +CREATE TABLE "identity_credential_identifiers" ( +"id" TEXT PRIMARY KEY, +"identifier" TEXT NOT NULL, +"identity_credential_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_credential_id) REFERENCES identity_credentials (id) ON DELETE cascade +); +CREATE UNIQUE INDEX "identity_credential_identifiers_identifier_idx" ON "identity_credential_identifiers" (identifier); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000002_requests.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20191100000002_requests.cockroach.down.sql new file mode 100644 index 00000000000..14ba1503ec4 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000002_requests.cockroach.down.sql @@ -0,0 +1,5 @@ +DROP TABLE "selfservice_login_request_methods";COMMIT TRANSACTION;BEGIN TRANSACTION; +DROP TABLE "selfservice_login_requests";COMMIT TRANSACTION;BEGIN TRANSACTION; +DROP TABLE "selfservice_registration_request_methods";COMMIT TRANSACTION;BEGIN TRANSACTION; +DROP TABLE "selfservice_registration_requests";COMMIT TRANSACTION;BEGIN TRANSACTION; +DROP TABLE "selfservice_profile_management_requests";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000002_requests.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20191100000002_requests.cockroach.up.sql new file mode 100644 index 00000000000..1bca145e00a --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000002_requests.cockroach.up.sql @@ -0,0 +1,55 @@ +CREATE TABLE "selfservice_login_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"active_method" VARCHAR (32) NOT NULL, +"csrf_token" VARCHAR (255) NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE TABLE "selfservice_login_request_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"selfservice_login_request_id" UUID NOT NULL, +"config" json NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "selfservice_login_request_methods_selfservice_login_requests_id_fk" FOREIGN KEY ("selfservice_login_request_id") REFERENCES "selfservice_login_requests" ("id") ON DELETE cascade +);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE TABLE "selfservice_registration_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"active_method" VARCHAR (32) NOT NULL, +"csrf_token" VARCHAR (255) NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE TABLE "selfservice_registration_request_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"selfservice_registration_request_id" UUID NOT NULL, +"config" json NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "selfservice_registration_request_methods_selfservice_registration_requests_id_fk" FOREIGN KEY ("selfservice_registration_request_id") REFERENCES "selfservice_registration_requests" ("id") ON DELETE cascade +);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE TABLE "selfservice_profile_management_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"form" json NOT NULL, +"update_successful" bool NOT NULL, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "selfservice_profile_management_requests_identities_id_fk" FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +);COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000002_requests.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20191100000002_requests.mysql.down.sql new file mode 100644 index 00000000000..8aac48a3c58 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000002_requests.mysql.down.sql @@ -0,0 +1,5 @@ +DROP TABLE `selfservice_login_request_methods`; +DROP TABLE `selfservice_login_requests`; +DROP TABLE `selfservice_registration_request_methods`; +DROP TABLE `selfservice_registration_requests`; +DROP TABLE `selfservice_profile_management_requests`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000002_requests.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20191100000002_requests.mysql.up.sql new file mode 100644 index 00000000000..9894e3993b8 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000002_requests.mysql.up.sql @@ -0,0 +1,55 @@ +CREATE TABLE `selfservice_login_requests` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`request_url` VARCHAR (2048) NOT NULL, +`issued_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +`expires_at` DATETIME NOT NULL, +`active_method` VARCHAR (32) NOT NULL, +`csrf_token` VARCHAR (255) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL +) ENGINE=InnoDB; +CREATE TABLE `selfservice_login_request_methods` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`method` VARCHAR (32) NOT NULL, +`selfservice_login_request_id` char(36) NOT NULL, +`config` JSON NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`selfservice_login_request_id`) REFERENCES `selfservice_login_requests` (`id`) ON DELETE cascade +) ENGINE=InnoDB; +CREATE TABLE `selfservice_registration_requests` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`request_url` VARCHAR (2048) NOT NULL, +`issued_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +`expires_at` DATETIME NOT NULL, +`active_method` VARCHAR (32) NOT NULL, +`csrf_token` VARCHAR (255) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL +) ENGINE=InnoDB; +CREATE TABLE `selfservice_registration_request_methods` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`method` VARCHAR (32) NOT NULL, +`selfservice_registration_request_id` char(36) NOT NULL, +`config` JSON NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`selfservice_registration_request_id`) REFERENCES `selfservice_registration_requests` (`id`) ON DELETE cascade +) ENGINE=InnoDB; +CREATE TABLE `selfservice_profile_management_requests` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`request_url` VARCHAR (2048) NOT NULL, +`issued_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +`expires_at` DATETIME NOT NULL, +`form` JSON NOT NULL, +`update_successful` bool NOT NULL, +`identity_id` char(36) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`identity_id`) REFERENCES `identities` (`id`) ON DELETE cascade +) ENGINE=InnoDB; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000002_requests.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20191100000002_requests.postgres.down.sql new file mode 100644 index 00000000000..356a2e69cec --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000002_requests.postgres.down.sql @@ -0,0 +1,5 @@ +DROP TABLE "selfservice_login_request_methods"; +DROP TABLE "selfservice_login_requests"; +DROP TABLE "selfservice_registration_request_methods"; +DROP TABLE "selfservice_registration_requests"; +DROP TABLE "selfservice_profile_management_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000002_requests.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20191100000002_requests.postgres.up.sql new file mode 100644 index 00000000000..d24b8b0669f --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000002_requests.postgres.up.sql @@ -0,0 +1,55 @@ +CREATE TABLE "selfservice_login_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"active_method" VARCHAR (32) NOT NULL, +"csrf_token" VARCHAR (255) NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +); +CREATE TABLE "selfservice_login_request_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"selfservice_login_request_id" UUID NOT NULL, +"config" jsonb NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("selfservice_login_request_id") REFERENCES "selfservice_login_requests" ("id") ON DELETE cascade +); +CREATE TABLE "selfservice_registration_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"active_method" VARCHAR (32) NOT NULL, +"csrf_token" VARCHAR (255) NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +); +CREATE TABLE "selfservice_registration_request_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"selfservice_registration_request_id" UUID NOT NULL, +"config" jsonb NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("selfservice_registration_request_id") REFERENCES "selfservice_registration_requests" ("id") ON DELETE cascade +); +CREATE TABLE "selfservice_profile_management_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"form" jsonb NOT NULL, +"update_successful" bool NOT NULL, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000002_requests.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20191100000002_requests.sqlite3.down.sql new file mode 100644 index 00000000000..356a2e69cec --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000002_requests.sqlite3.down.sql @@ -0,0 +1,5 @@ +DROP TABLE "selfservice_login_request_methods"; +DROP TABLE "selfservice_login_requests"; +DROP TABLE "selfservice_registration_request_methods"; +DROP TABLE "selfservice_registration_requests"; +DROP TABLE "selfservice_profile_management_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000002_requests.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20191100000002_requests.sqlite3.up.sql new file mode 100644 index 00000000000..d2655273524 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000002_requests.sqlite3.up.sql @@ -0,0 +1,50 @@ +CREATE TABLE "selfservice_login_requests" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" DATETIME NOT NULL, +"active_method" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +); +CREATE TABLE "selfservice_login_request_methods" ( +"id" TEXT PRIMARY KEY, +"method" TEXT NOT NULL, +"selfservice_login_request_id" char(36) NOT NULL, +"config" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (selfservice_login_request_id) REFERENCES selfservice_login_requests (id) ON DELETE cascade +); +CREATE TABLE "selfservice_registration_requests" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" DATETIME NOT NULL, +"active_method" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +); +CREATE TABLE "selfservice_registration_request_methods" ( +"id" TEXT PRIMARY KEY, +"method" TEXT NOT NULL, +"selfservice_registration_request_id" char(36) NOT NULL, +"config" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (selfservice_registration_request_id) REFERENCES selfservice_registration_requests (id) ON DELETE cascade +); +CREATE TABLE "selfservice_profile_management_requests" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" DATETIME NOT NULL, +"form" TEXT NOT NULL, +"update_successful" bool NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON DELETE cascade +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000003_sessions.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20191100000003_sessions.cockroach.down.sql new file mode 100644 index 00000000000..b7ffdf06966 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000003_sessions.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "sessions";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000003_sessions.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20191100000003_sessions.cockroach.up.sql new file mode 100644 index 00000000000..c2b8ea0191c --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000003_sessions.cockroach.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE "sessions" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"authenticated_at" timestamp NOT NULL, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "sessions_identities_id_fk" FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +);COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000003_sessions.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20191100000003_sessions.mysql.down.sql new file mode 100644 index 00000000000..b37f476a3ae --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000003_sessions.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `sessions`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000003_sessions.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20191100000003_sessions.mysql.up.sql new file mode 100644 index 00000000000..ae325f9c3f6 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000003_sessions.mysql.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE `sessions` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`issued_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +`expires_at` DATETIME NOT NULL, +`authenticated_at` DATETIME NOT NULL, +`identity_id` char(36) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`identity_id`) REFERENCES `identities` (`id`) ON DELETE cascade +) ENGINE=InnoDB; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000003_sessions.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20191100000003_sessions.postgres.down.sql new file mode 100644 index 00000000000..d49b7aec9a9 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000003_sessions.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "sessions"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000003_sessions.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20191100000003_sessions.postgres.up.sql new file mode 100644 index 00000000000..fab43234ebb --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000003_sessions.postgres.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE "sessions" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"authenticated_at" timestamp NOT NULL, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000003_sessions.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20191100000003_sessions.sqlite3.down.sql new file mode 100644 index 00000000000..d49b7aec9a9 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000003_sessions.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "sessions"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000003_sessions.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20191100000003_sessions.sqlite3.up.sql new file mode 100644 index 00000000000..c1226647bed --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000003_sessions.sqlite3.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "sessions" ( +"id" TEXT PRIMARY KEY, +"issued_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" DATETIME NOT NULL, +"authenticated_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON DELETE cascade +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000004_errors.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20191100000004_errors.cockroach.down.sql new file mode 100644 index 00000000000..3081431724e --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000004_errors.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_errors";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000004_errors.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20191100000004_errors.cockroach.up.sql new file mode 100644 index 00000000000..4e6d1a9ce49 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000004_errors.cockroach.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE "selfservice_errors" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"errors" json NOT NULL, +"seen_at" timestamp NOT NULL, +"was_seen" bool NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +);COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000004_errors.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20191100000004_errors.mysql.down.sql new file mode 100644 index 00000000000..dcf8246d0f4 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000004_errors.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `selfservice_errors`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000004_errors.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20191100000004_errors.mysql.up.sql new file mode 100644 index 00000000000..b2afc3c4cf1 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000004_errors.mysql.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE `selfservice_errors` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`errors` JSON NOT NULL, +`seen_at` DATETIME NOT NULL, +`was_seen` bool NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL +) ENGINE=InnoDB; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000004_errors.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20191100000004_errors.postgres.down.sql new file mode 100644 index 00000000000..b6a3306190f --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000004_errors.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_errors"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000004_errors.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20191100000004_errors.postgres.up.sql new file mode 100644 index 00000000000..e0a5c9e5ccc --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000004_errors.postgres.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE "selfservice_errors" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"errors" jsonb NOT NULL, +"seen_at" timestamp NOT NULL, +"was_seen" bool NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000004_errors.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20191100000004_errors.sqlite3.down.sql new file mode 100644 index 00000000000..b6a3306190f --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000004_errors.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_errors"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000004_errors.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20191100000004_errors.sqlite3.up.sql new file mode 100644 index 00000000000..1eb73f632c9 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000004_errors.sqlite3.up.sql @@ -0,0 +1,8 @@ +CREATE TABLE "selfservice_errors" ( +"id" TEXT PRIMARY KEY, +"errors" TEXT NOT NULL, +"seen_at" DATETIME NOT NULL, +"was_seen" bool NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000005_identities.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20191100000005_identities.mysql.down.sql new file mode 100644 index 00000000000..139e50a971e --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000005_identities.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE identity_credential_identifiers MODIFY COLUMN identifier VARCHAR(255); diff --git a/oryx/popx/stub/migrations/legacy/20191100000005_identities.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20191100000005_identities.mysql.up.sql new file mode 100644 index 00000000000..8069ee98f31 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000005_identities.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE identity_credential_identifiers MODIFY COLUMN identifier VARCHAR(255) BINARY; diff --git a/oryx/popx/stub/migrations/legacy/20191100000006_courier.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20191100000006_courier.cockroach.down.sql new file mode 100644 index 00000000000..efa6f4e6046 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000006_courier.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "courier_messages";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000006_courier.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20191100000006_courier.cockroach.up.sql new file mode 100644 index 00000000000..5b10f1914eb --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000006_courier.cockroach.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE "courier_messages" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"type" int NOT NULL, +"status" int NOT NULL, +"body" VARCHAR (255) NOT NULL, +"subject" VARCHAR (255) NOT NULL, +"recipient" VARCHAR (255) NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +);COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000006_courier.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20191100000006_courier.mysql.down.sql new file mode 100644 index 00000000000..1c69440c879 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000006_courier.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `courier_messages`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000006_courier.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20191100000006_courier.mysql.up.sql new file mode 100644 index 00000000000..24e0ac93ee0 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000006_courier.mysql.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE `courier_messages` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`type` INTEGER NOT NULL, +`status` INTEGER NOT NULL, +`body` VARCHAR (255) NOT NULL, +`subject` VARCHAR (255) NOT NULL, +`recipient` VARCHAR (255) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL +) ENGINE=InnoDB; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000006_courier.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20191100000006_courier.postgres.down.sql new file mode 100644 index 00000000000..0d9747b1828 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000006_courier.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "courier_messages"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000006_courier.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20191100000006_courier.postgres.up.sql new file mode 100644 index 00000000000..70af9f07e03 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000006_courier.postgres.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE "courier_messages" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"type" int NOT NULL, +"status" int NOT NULL, +"body" VARCHAR (255) NOT NULL, +"subject" VARCHAR (255) NOT NULL, +"recipient" VARCHAR (255) NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000006_courier.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20191100000006_courier.sqlite3.down.sql new file mode 100644 index 00000000000..0d9747b1828 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000006_courier.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "courier_messages"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000006_courier.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20191100000006_courier.sqlite3.up.sql new file mode 100644 index 00000000000..e718e319311 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000006_courier.sqlite3.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "courier_messages" ( +"id" TEXT PRIMARY KEY, +"type" INTEGER NOT NULL, +"status" INTEGER NOT NULL, +"body" TEXT NOT NULL, +"subject" TEXT NOT NULL, +"recipient" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000007_errors.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20191100000007_errors.cockroach.down.sql new file mode 100644 index 00000000000..3b38079f2ca --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000007_errors.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_errors" DROP COLUMN "csrf_token";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000007_errors.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20191100000007_errors.cockroach.up.sql new file mode 100644 index 00000000000..434d2ef18e6 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000007_errors.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_errors" ADD COLUMN "csrf_token" VARCHAR (255) NOT NULL DEFAULT '';COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000007_errors.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20191100000007_errors.mysql.down.sql new file mode 100644 index 00000000000..9fbb33cd8d4 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000007_errors.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_errors` DROP COLUMN `csrf_token`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000007_errors.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20191100000007_errors.mysql.up.sql new file mode 100644 index 00000000000..f54bdc2b46c --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000007_errors.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_errors` ADD COLUMN `csrf_token` VARCHAR (255) NOT NULL DEFAULT ""; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000007_errors.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20191100000007_errors.postgres.down.sql new file mode 100644 index 00000000000..6f93d740a4f --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000007_errors.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_errors" DROP COLUMN "csrf_token"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000007_errors.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20191100000007_errors.postgres.up.sql new file mode 100644 index 00000000000..4e04c0f2669 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000007_errors.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_errors" ADD COLUMN "csrf_token" VARCHAR (255) NOT NULL DEFAULT ''; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000007_errors.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20191100000007_errors.sqlite3.down.sql new file mode 100644 index 00000000000..af1b23469ef --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000007_errors.sqlite3.down.sql @@ -0,0 +1,12 @@ +CREATE TABLE "_selfservice_errors_tmp" ( +"id" TEXT PRIMARY KEY, +"errors" TEXT NOT NULL, +"seen_at" DATETIME, +"was_seen" bool NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +); +INSERT INTO "_selfservice_errors_tmp" (id, errors, seen_at, was_seen, created_at, updated_at) SELECT id, errors, seen_at, was_seen, created_at, updated_at FROM "selfservice_errors"; + +DROP TABLE "selfservice_errors"; +ALTER TABLE "_selfservice_errors_tmp" RENAME TO "selfservice_errors"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000007_errors.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20191100000007_errors.sqlite3.up.sql new file mode 100644 index 00000000000..f55e6a91a06 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000007_errors.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_errors" ADD COLUMN "csrf_token" TEXT NOT NULL DEFAULT ''; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.cockroach.down.sql new file mode 100644 index 00000000000..ba30655e6ca --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.cockroach.down.sql @@ -0,0 +1,2 @@ +DROP TABLE "selfservice_verification_requests";COMMIT TRANSACTION;BEGIN TRANSACTION; +DROP TABLE "identity_verifiable_addresses";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.cockroach.up.sql new file mode 100644 index 00000000000..5e030a26e2d --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.cockroach.up.sql @@ -0,0 +1,32 @@ +CREATE TABLE "identity_verifiable_addresses" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"code" VARCHAR (32) NOT NULL, +"status" VARCHAR (16) NOT NULL, +"via" VARCHAR (16) NOT NULL, +"verified" bool NOT NULL, +"value" VARCHAR (400) NOT NULL, +"verified_at" timestamp, +"expires_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "identity_verifiable_addresses_identities_id_fk" FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE UNIQUE INDEX "identity_verifiable_addresses_code_uq_idx" ON "identity_verifiable_addresses" (code);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE INDEX "identity_verifiable_addresses_code_idx" ON "identity_verifiable_addresses" (code);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE UNIQUE INDEX "identity_verifiable_addresses_status_via_uq_idx" ON "identity_verifiable_addresses" (via, value);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE INDEX "identity_verifiable_addresses_status_via_idx" ON "identity_verifiable_addresses" (via, value);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE TABLE "selfservice_verification_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"form" json NOT NULL, +"via" VARCHAR (16) NOT NULL, +"csrf_token" VARCHAR (255) NOT NULL, +"success" bool NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +);COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.mysql.down.sql new file mode 100644 index 00000000000..2e080c84a17 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.mysql.down.sql @@ -0,0 +1,2 @@ +DROP TABLE `selfservice_verification_requests`; +DROP TABLE `identity_verifiable_addresses`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.mysql.up.sql new file mode 100644 index 00000000000..5951c67421a --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.mysql.up.sql @@ -0,0 +1,32 @@ +CREATE TABLE `identity_verifiable_addresses` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`code` VARCHAR (32) NOT NULL, +`status` VARCHAR (16) NOT NULL, +`via` VARCHAR (16) NOT NULL, +`verified` bool NOT NULL, +`value` VARCHAR (400) NOT NULL, +`verified_at` DATETIME, +`expires_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +`identity_id` char(36) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`identity_id`) REFERENCES `identities` (`id`) ON DELETE cascade +) ENGINE=InnoDB; +CREATE UNIQUE INDEX `identity_verifiable_addresses_code_uq_idx` ON `identity_verifiable_addresses` (`code`); +CREATE INDEX `identity_verifiable_addresses_code_idx` ON `identity_verifiable_addresses` (`code`); +CREATE UNIQUE INDEX `identity_verifiable_addresses_status_via_uq_idx` ON `identity_verifiable_addresses` (`via`, `value`); +CREATE INDEX `identity_verifiable_addresses_status_via_idx` ON `identity_verifiable_addresses` (`via`, `value`); +CREATE TABLE `selfservice_verification_requests` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`request_url` VARCHAR (2048) NOT NULL, +`issued_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +`expires_at` DATETIME NOT NULL, +`form` JSON NOT NULL, +`via` VARCHAR (16) NOT NULL, +`csrf_token` VARCHAR (255) NOT NULL, +`success` bool NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL +) ENGINE=InnoDB; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.postgres.down.sql new file mode 100644 index 00000000000..593423b3047 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.postgres.down.sql @@ -0,0 +1,2 @@ +DROP TABLE "selfservice_verification_requests"; +DROP TABLE "identity_verifiable_addresses"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.postgres.up.sql new file mode 100644 index 00000000000..419ec91128a --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.postgres.up.sql @@ -0,0 +1,32 @@ +CREATE TABLE "identity_verifiable_addresses" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"code" VARCHAR (32) NOT NULL, +"status" VARCHAR (16) NOT NULL, +"via" VARCHAR (16) NOT NULL, +"verified" bool NOT NULL, +"value" VARCHAR (400) NOT NULL, +"verified_at" timestamp, +"expires_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +); +CREATE UNIQUE INDEX "identity_verifiable_addresses_code_uq_idx" ON "identity_verifiable_addresses" (code); +CREATE INDEX "identity_verifiable_addresses_code_idx" ON "identity_verifiable_addresses" (code); +CREATE UNIQUE INDEX "identity_verifiable_addresses_status_via_uq_idx" ON "identity_verifiable_addresses" (via, value); +CREATE INDEX "identity_verifiable_addresses_status_via_idx" ON "identity_verifiable_addresses" (via, value); +CREATE TABLE "selfservice_verification_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"form" jsonb NOT NULL, +"via" VARCHAR (16) NOT NULL, +"csrf_token" VARCHAR (255) NOT NULL, +"success" bool NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.sqlite3.down.sql new file mode 100644 index 00000000000..593423b3047 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.sqlite3.down.sql @@ -0,0 +1,2 @@ +DROP TABLE "selfservice_verification_requests"; +DROP TABLE "identity_verifiable_addresses"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.sqlite3.up.sql new file mode 100644 index 00000000000..a12f20aed92 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000008_selfservice_verification.sqlite3.up.sql @@ -0,0 +1,30 @@ +CREATE TABLE "identity_verifiable_addresses" ( +"id" TEXT PRIMARY KEY, +"code" TEXT NOT NULL, +"status" TEXT NOT NULL, +"via" TEXT NOT NULL, +"verified" bool NOT NULL, +"value" TEXT NOT NULL, +"verified_at" DATETIME, +"expires_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON DELETE cascade +); +CREATE UNIQUE INDEX "identity_verifiable_addresses_code_uq_idx" ON "identity_verifiable_addresses" (code); +CREATE INDEX "identity_verifiable_addresses_code_idx" ON "identity_verifiable_addresses" (code); +CREATE UNIQUE INDEX "identity_verifiable_addresses_status_via_uq_idx" ON "identity_verifiable_addresses" (via, value); +CREATE INDEX "identity_verifiable_addresses_status_via_idx" ON "identity_verifiable_addresses" (via, value); +CREATE TABLE "selfservice_verification_requests" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" DATETIME NOT NULL, +"form" TEXT NOT NULL, +"via" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"success" bool NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000009_verification.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20191100000009_verification.mysql.down.sql new file mode 100644 index 00000000000..f8a7e0f3c3a --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000009_verification.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE identity_verifiable_addresses MODIFY COLUMN code VARCHAR(255); diff --git a/oryx/popx/stub/migrations/legacy/20191100000009_verification.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20191100000009_verification.mysql.up.sql new file mode 100644 index 00000000000..d16bc788e88 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000009_verification.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE identity_verifiable_addresses MODIFY COLUMN code VARCHAR(255) BINARY; diff --git a/oryx/popx/stub/migrations/legacy/20191100000010_errors.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20191100000010_errors.cockroach.down.sql new file mode 100644 index 00000000000..c42e3bd3ae6 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000010_errors.cockroach.down.sql @@ -0,0 +1,5 @@ +UPDATE selfservice_errors SET seen_at = '1980-01-01 00:00:00' WHERE seen_at = NULL; +ALTER TABLE "selfservice_errors" RENAME COLUMN "seen_at" TO "_seen_at_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_errors" ADD COLUMN "seen_at" timestamp;COMMIT TRANSACTION;BEGIN TRANSACTION; +UPDATE "selfservice_errors" SET "seen_at" = "_seen_at_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_errors" DROP COLUMN "_seen_at_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000010_errors.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20191100000010_errors.cockroach.up.sql new file mode 100644 index 00000000000..e9b8fc9acd1 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000010_errors.cockroach.up.sql @@ -0,0 +1,4 @@ +ALTER TABLE "selfservice_errors" RENAME COLUMN "seen_at" TO "_seen_at_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_errors" ADD COLUMN "seen_at" timestamp;COMMIT TRANSACTION;BEGIN TRANSACTION; +UPDATE "selfservice_errors" SET "seen_at" = "_seen_at_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_errors" DROP COLUMN "_seen_at_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000010_errors.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20191100000010_errors.mysql.down.sql new file mode 100644 index 00000000000..525b179f257 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000010_errors.mysql.down.sql @@ -0,0 +1,2 @@ +UPDATE selfservice_errors SET seen_at = '1980-01-01 00:00:00' WHERE seen_at = NULL; +ALTER TABLE `selfservice_errors` MODIFY `seen_at` DATETIME; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000010_errors.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20191100000010_errors.mysql.up.sql new file mode 100644 index 00000000000..6e0978925c3 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000010_errors.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_errors` MODIFY `seen_at` DATETIME; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000010_errors.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20191100000010_errors.postgres.down.sql new file mode 100644 index 00000000000..ccf29d7cbc0 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000010_errors.postgres.down.sql @@ -0,0 +1,2 @@ +UPDATE selfservice_errors SET seen_at = '1980-01-01 00:00:00' WHERE seen_at = NULL; +ALTER TABLE "selfservice_errors" ALTER COLUMN "seen_at" TYPE timestamp, ALTER COLUMN "seen_at" DROP NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000010_errors.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20191100000010_errors.postgres.up.sql new file mode 100644 index 00000000000..57ee0241abb --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000010_errors.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_errors" ALTER COLUMN "seen_at" TYPE timestamp, ALTER COLUMN "seen_at" DROP NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000010_errors.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20191100000010_errors.sqlite3.down.sql new file mode 100644 index 00000000000..52ef93800e3 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000010_errors.sqlite3.down.sql @@ -0,0 +1,13 @@ +UPDATE selfservice_errors SET seen_at = '1980-01-01 00:00:00' WHERE seen_at = NULL; +CREATE TABLE "_selfservice_errors_tmp" ( +"id" TEXT PRIMARY KEY, +"errors" TEXT NOT NULL, +"seen_at" DATETIME, +"was_seen" bool NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"csrf_token" TEXT NOT NULL DEFAULT '' +); +INSERT INTO "_selfservice_errors_tmp" (id, errors, seen_at, was_seen, created_at, updated_at, csrf_token) SELECT id, errors, seen_at, was_seen, created_at, updated_at, csrf_token FROM "selfservice_errors"; +DROP TABLE "selfservice_errors"; +ALTER TABLE "_selfservice_errors_tmp" RENAME TO "selfservice_errors"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000010_errors.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20191100000010_errors.sqlite3.up.sql new file mode 100644 index 00000000000..fc59202d6f7 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000010_errors.sqlite3.up.sql @@ -0,0 +1,12 @@ +CREATE TABLE "_selfservice_errors_tmp" ( +"id" TEXT PRIMARY KEY, +"errors" TEXT NOT NULL, +"seen_at" DATETIME, +"was_seen" bool NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"csrf_token" TEXT NOT NULL DEFAULT '' +); +INSERT INTO "_selfservice_errors_tmp" (id, errors, seen_at, was_seen, created_at, updated_at, csrf_token) SELECT id, errors, seen_at, was_seen, created_at, updated_at, csrf_token FROM "selfservice_errors"; +DROP TABLE "selfservice_errors"; +ALTER TABLE "_selfservice_errors_tmp" RENAME TO "selfservice_errors"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000011_courier_body_type.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20191100000011_courier_body_type.cockroach.up.sql new file mode 100644 index 00000000000..53dd25f0d64 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000011_courier_body_type.cockroach.up.sql @@ -0,0 +1,5 @@ +ALTER TABLE "courier_messages" RENAME COLUMN "body" TO "_body_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "courier_messages" ADD COLUMN "body" text;COMMIT TRANSACTION;BEGIN TRANSACTION; +UPDATE "courier_messages" SET "body" = "_body_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "courier_messages" ALTER COLUMN "body" SET NOT NULL;COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "courier_messages" DROP COLUMN "_body_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000011_courier_body_type.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20191100000011_courier_body_type.mysql.up.sql new file mode 100644 index 00000000000..28235616136 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000011_courier_body_type.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `courier_messages` MODIFY `body` text NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000011_courier_body_type.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20191100000011_courier_body_type.postgres.up.sql new file mode 100644 index 00000000000..55a3ecf38c5 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000011_courier_body_type.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "courier_messages" ALTER COLUMN "body" TYPE text, ALTER COLUMN "body" SET NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000011_courier_body_type.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20191100000011_courier_body_type.sqlite3.up.sql new file mode 100644 index 00000000000..abd09ecca36 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000011_courier_body_type.sqlite3.up.sql @@ -0,0 +1,13 @@ +CREATE TABLE "_courier_messages_tmp" ( +"id" TEXT PRIMARY KEY, +"type" INTEGER NOT NULL, +"status" INTEGER NOT NULL, +"body" TEXT NOT NULL, +"subject" TEXT NOT NULL, +"recipient" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +); +INSERT INTO "_courier_messages_tmp" (id, type, status, body, subject, recipient, created_at, updated_at) SELECT id, type, status, body, subject, recipient, created_at, updated_at FROM "courier_messages"; +DROP TABLE "courier_messages"; +ALTER TABLE "_courier_messages_tmp" RENAME TO "courier_messages"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.cockroach.down.sql new file mode 100644 index 00000000000..f6ee1d9082b --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" DROP COLUMN "forced";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.cockroach.up.sql new file mode 100644 index 00000000000..56056e912fe --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" ADD COLUMN "forced" bool NOT NULL DEFAULT 'false';COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.mysql.down.sql new file mode 100644 index 00000000000..acdb077bc3d --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_login_requests` DROP COLUMN `forced`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.mysql.up.sql new file mode 100644 index 00000000000..d1a0dceac4a --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_login_requests` ADD COLUMN `forced` bool NOT NULL DEFAULT false; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.postgres.down.sql new file mode 100644 index 00000000000..8dbb74664fc --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" DROP COLUMN "forced"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.postgres.up.sql new file mode 100644 index 00000000000..b84202f2384 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" ADD COLUMN "forced" bool NOT NULL DEFAULT 'false'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.sqlite3.down.sql new file mode 100644 index 00000000000..a4db2cc1d0a --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.sqlite3.down.sql @@ -0,0 +1,14 @@ +CREATE TABLE "_selfservice_login_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"active_method" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +); +INSERT INTO "_selfservice_login_requests_tmp" (id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at) SELECT id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at FROM "selfservice_login_requests"; + +DROP TABLE "selfservice_login_requests"; +ALTER TABLE "_selfservice_login_requests_tmp" RENAME TO "selfservice_login_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.sqlite3.up.sql new file mode 100644 index 00000000000..b84202f2384 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20191100000012_login_request_forced.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" ADD COLUMN "forced" bool NOT NULL DEFAULT 'false'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.cockroach.down.sql new file mode 100644 index 00000000000..aa1ddb27178 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.cockroach.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE "selfservice_profile_management_requests" ADD COLUMN "form" json NOT NULL DEFAULT '{}';COMMIT TRANSACTION;BEGIN TRANSACTION; +DROP TABLE "selfservice_profile_management_request_methods";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_profile_management_requests" DROP COLUMN "active_method";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.cockroach.up.sql new file mode 100644 index 00000000000..560179321e5 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.cockroach.up.sql @@ -0,0 +1,12 @@ +CREATE TABLE "selfservice_profile_management_request_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"selfservice_profile_management_request_id" UUID NOT NULL, +"config" json NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +);COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_profile_management_requests" ADD COLUMN "active_method" VARCHAR (32);COMMIT TRANSACTION;BEGIN TRANSACTION; +INSERT INTO selfservice_profile_management_request_methods (id, method, selfservice_profile_management_request_id, config) SELECT id, 'traits', id, form FROM selfservice_profile_management_requests; +ALTER TABLE "selfservice_profile_management_requests" DROP COLUMN "form";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.mysql.down.sql new file mode 100644 index 00000000000..d2e92cd5001 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.mysql.down.sql @@ -0,0 +1,5 @@ +ALTER TABLE `selfservice_profile_management_requests` ADD COLUMN `form` JSON; +UPDATE selfservice_profile_management_requests SET form=(SELECT * FROM (SELECT m.config FROM selfservice_profile_management_requests AS r INNER JOIN selfservice_profile_management_request_methods AS m ON r.id=m.selfservice_profile_management_request_id) as t); +ALTER TABLE `selfservice_profile_management_requests` MODIFY `form` JSON; +DROP TABLE `selfservice_profile_management_request_methods`; +ALTER TABLE `selfservice_profile_management_requests` DROP COLUMN `active_method`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.mysql.up.sql new file mode 100644 index 00000000000..1c15ee716ba --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.mysql.up.sql @@ -0,0 +1,12 @@ +CREATE TABLE `selfservice_profile_management_request_methods` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`method` VARCHAR (32) NOT NULL, +`selfservice_profile_management_request_id` char(36) NOT NULL, +`config` JSON NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL +) ENGINE=InnoDB; +ALTER TABLE `selfservice_profile_management_requests` ADD COLUMN `active_method` VARCHAR (32); +INSERT INTO selfservice_profile_management_request_methods (id, method, selfservice_profile_management_request_id, config) SELECT id, 'traits', id, form FROM selfservice_profile_management_requests; +ALTER TABLE `selfservice_profile_management_requests` DROP COLUMN `form`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.postgres.down.sql new file mode 100644 index 00000000000..a800a7bf02f --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.postgres.down.sql @@ -0,0 +1,5 @@ +ALTER TABLE "selfservice_profile_management_requests" ADD COLUMN "form" jsonb; +UPDATE selfservice_profile_management_requests SET form=(SELECT * FROM (SELECT m.config FROM selfservice_profile_management_requests AS r INNER JOIN selfservice_profile_management_request_methods AS m ON r.id=m.selfservice_profile_management_request_id) as t); +ALTER TABLE "selfservice_profile_management_requests" ALTER COLUMN "form" TYPE jsonb, ALTER COLUMN "form" DROP NOT NULL; +DROP TABLE "selfservice_profile_management_request_methods"; +ALTER TABLE "selfservice_profile_management_requests" DROP COLUMN "active_method"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.postgres.up.sql new file mode 100644 index 00000000000..e5b92972101 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.postgres.up.sql @@ -0,0 +1,12 @@ +CREATE TABLE "selfservice_profile_management_request_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"selfservice_profile_management_request_id" UUID NOT NULL, +"config" jsonb NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +); +ALTER TABLE "selfservice_profile_management_requests" ADD COLUMN "active_method" VARCHAR (32); +INSERT INTO selfservice_profile_management_request_methods (id, method, selfservice_profile_management_request_id, config) SELECT id, 'traits', id, form FROM selfservice_profile_management_requests; +ALTER TABLE "selfservice_profile_management_requests" DROP COLUMN "form"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.sqlite3.down.sql new file mode 100644 index 00000000000..c1e707ff1ff --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.sqlite3.down.sql @@ -0,0 +1,16 @@ +DROP TABLE "selfservice_profile_management_request_methods"; +CREATE TABLE "_selfservice_profile_management_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"update_successful" bool NOT NULL DEFAULT 'false', +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +); +INSERT INTO "_selfservice_profile_management_requests_tmp" (id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, update_successful) SELECT id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, update_successful FROM "selfservice_profile_management_requests"; + +DROP TABLE "selfservice_profile_management_requests"; +ALTER TABLE "_selfservice_profile_management_requests_tmp" RENAME TO "selfservice_profile_management_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.sqlite3.up.sql new file mode 100644 index 00000000000..f8c1bd87b2d --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200317160354_create_profile_request_forms.sqlite3.up.sql @@ -0,0 +1,26 @@ +CREATE TABLE "selfservice_profile_management_request_methods" ( +"id" TEXT PRIMARY KEY, +"method" TEXT NOT NULL, +"selfservice_profile_management_request_id" char(36) NOT NULL, +"config" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +); +ALTER TABLE "selfservice_profile_management_requests" ADD COLUMN "active_method" TEXT; +INSERT INTO selfservice_profile_management_request_methods (id, method, selfservice_profile_management_request_id, config) SELECT id, 'traits', id, form FROM selfservice_profile_management_requests; +CREATE TABLE "_selfservice_profile_management_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"update_successful" bool NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"active_method" TEXT, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +); +INSERT INTO "_selfservice_profile_management_requests_tmp" (id, request_url, issued_at, expires_at, update_successful, identity_id, created_at, updated_at, active_method) SELECT id, request_url, issued_at, expires_at, update_successful, identity_id, created_at, updated_at, active_method FROM "selfservice_profile_management_requests"; + +DROP TABLE "selfservice_profile_management_requests"; +ALTER TABLE "_selfservice_profile_management_requests_tmp" RENAME TO "selfservice_profile_management_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.cockroach.down.sql new file mode 100644 index 00000000000..11b99bc1909 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "continuity_containers";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.cockroach.up.sql new file mode 100644 index 00000000000..36a86ae5fb4 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.cockroach.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE "continuity_containers" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"identity_id" UUID, +"name" VARCHAR (255) NOT NULL, +"payload" json, +"expires_at" timestamp NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "continuity_containers_identities_id_fk" FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +);COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.mysql.down.sql new file mode 100644 index 00000000000..17396f6a130 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `continuity_containers`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.mysql.up.sql new file mode 100644 index 00000000000..42b55315051 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.mysql.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE `continuity_containers` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`identity_id` char(36), +`name` VARCHAR (255) NOT NULL, +`payload` JSON, +`expires_at` DATETIME NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`identity_id`) REFERENCES `identities` (`id`) ON DELETE cascade +) ENGINE=InnoDB; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.postgres.down.sql new file mode 100644 index 00000000000..3aef4256500 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "continuity_containers"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.postgres.up.sql new file mode 100644 index 00000000000..ab8cfd55263 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.postgres.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE "continuity_containers" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"identity_id" UUID, +"name" VARCHAR (255) NOT NULL, +"payload" jsonb, +"expires_at" timestamp NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.sqlite3.down.sql new file mode 100644 index 00000000000..3aef4256500 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "continuity_containers"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.sqlite3.up.sql new file mode 100644 index 00000000000..b0e018249ad --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200401183443_continuity_containers.sqlite3.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "continuity_containers" ( +"id" TEXT PRIMARY KEY, +"identity_id" char(36), +"name" TEXT NOT NULL, +"payload" TEXT, +"expires_at" DATETIME NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON DELETE cascade +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.cockroach.down.sql new file mode 100644 index 00000000000..2d7a810d930 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.cockroach.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE "selfservice_settings_request_methods" RENAME COLUMN "selfservice_settings_request_id" TO "selfservice_profile_management_request_id";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_settings_request_methods" RENAME TO "selfservice_profile_management_request_methods";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_settings_requests" RENAME TO "selfservice_profile_management_requests";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.cockroach.up.sql new file mode 100644 index 00000000000..90b3c60dfe6 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.cockroach.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE "selfservice_profile_management_request_methods" RENAME COLUMN "selfservice_profile_management_request_id" TO "selfservice_settings_request_id";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_profile_management_request_methods" RENAME TO "selfservice_settings_request_methods";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_profile_management_requests" RENAME TO "selfservice_settings_requests";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.mysql.down.sql new file mode 100644 index 00000000000..dba65df340d --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.mysql.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE `selfservice_settings_request_methods` CHANGE `selfservice_settings_request_id` `selfservice_profile_management_request_id` char(36) NOT NULL; +ALTER TABLE `selfservice_settings_request_methods` RENAME TO `selfservice_profile_management_request_methods`; +ALTER TABLE `selfservice_settings_requests` RENAME TO `selfservice_profile_management_requests`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.mysql.up.sql new file mode 100644 index 00000000000..7d66d480235 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.mysql.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE `selfservice_profile_management_request_methods` CHANGE `selfservice_profile_management_request_id` `selfservice_settings_request_id` char(36) NOT NULL; +ALTER TABLE `selfservice_profile_management_request_methods` RENAME TO `selfservice_settings_request_methods`; +ALTER TABLE `selfservice_profile_management_requests` RENAME TO `selfservice_settings_requests`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.postgres.down.sql new file mode 100644 index 00000000000..d37a192c0a2 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.postgres.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE "selfservice_settings_request_methods" RENAME COLUMN "selfservice_settings_request_id" TO "selfservice_profile_management_request_id"; +ALTER TABLE "selfservice_settings_request_methods" RENAME TO "selfservice_profile_management_request_methods"; +ALTER TABLE "selfservice_settings_requests" RENAME TO "selfservice_profile_management_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.postgres.up.sql new file mode 100644 index 00000000000..7d75607bc81 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.postgres.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE "selfservice_profile_management_request_methods" RENAME COLUMN "selfservice_profile_management_request_id" TO "selfservice_settings_request_id"; +ALTER TABLE "selfservice_profile_management_request_methods" RENAME TO "selfservice_settings_request_methods"; +ALTER TABLE "selfservice_profile_management_requests" RENAME TO "selfservice_settings_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.sqlite3.down.sql new file mode 100644 index 00000000000..d37a192c0a2 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.sqlite3.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE "selfservice_settings_request_methods" RENAME COLUMN "selfservice_settings_request_id" TO "selfservice_profile_management_request_id"; +ALTER TABLE "selfservice_settings_request_methods" RENAME TO "selfservice_profile_management_request_methods"; +ALTER TABLE "selfservice_settings_requests" RENAME TO "selfservice_profile_management_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.sqlite3.up.sql new file mode 100644 index 00000000000..7d75607bc81 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200402142539_rename_profile_flows.sqlite3.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE "selfservice_profile_management_request_methods" RENAME COLUMN "selfservice_profile_management_request_id" TO "selfservice_settings_request_id"; +ALTER TABLE "selfservice_profile_management_request_methods" RENAME TO "selfservice_settings_request_methods"; +ALTER TABLE "selfservice_profile_management_requests" RENAME TO "selfservice_settings_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.cockroach.down.sql new file mode 100644 index 00000000000..32088d95b9b --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.cockroach.down.sql @@ -0,0 +1,4 @@ +DROP TABLE "identity_recovery_tokens";COMMIT TRANSACTION;BEGIN TRANSACTION; +DROP TABLE "selfservice_recovery_request_methods";COMMIT TRANSACTION;BEGIN TRANSACTION; +DROP TABLE "selfservice_recovery_requests";COMMIT TRANSACTION;BEGIN TRANSACTION; +DROP TABLE "identity_recovery_addresses";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.cockroach.up.sql new file mode 100644 index 00000000000..f6c8b50670c --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.cockroach.up.sql @@ -0,0 +1,52 @@ +CREATE TABLE "identity_recovery_addresses" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"via" VARCHAR (16) NOT NULL, +"value" VARCHAR (400) NOT NULL, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "identity_recovery_addresses_identities_id_fk" FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE UNIQUE INDEX "identity_recovery_addresses_status_via_uq_idx" ON "identity_recovery_addresses" (via, value);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE INDEX "identity_recovery_addresses_status_via_idx" ON "identity_recovery_addresses" (via, value);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE TABLE "selfservice_recovery_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"messages" json, +"active_method" VARCHAR (32), +"csrf_token" VARCHAR (255) NOT NULL, +"state" VARCHAR (32) NOT NULL, +"recovered_identity_id" UUID, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "selfservice_recovery_requests_identities_id_fk" FOREIGN KEY ("recovered_identity_id") REFERENCES "identities" ("id") ON DELETE cascade +);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE TABLE "selfservice_recovery_request_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"config" json NOT NULL, +"selfservice_recovery_request_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "selfservice_recovery_request_methods_selfservice_recovery_requests_id_fk" FOREIGN KEY ("selfservice_recovery_request_id") REFERENCES "selfservice_recovery_requests" ("id") ON DELETE cascade +);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE TABLE "identity_recovery_tokens" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"token" VARCHAR (64) NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" timestamp, +"identity_recovery_address_id" UUID NOT NULL, +"selfservice_recovery_request_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "identity_recovery_tokens_identity_recovery_addresses_id_fk" FOREIGN KEY ("identity_recovery_address_id") REFERENCES "identity_recovery_addresses" ("id") ON DELETE cascade, +CONSTRAINT "identity_recovery_tokens_selfservice_recovery_requests_id_fk" FOREIGN KEY ("selfservice_recovery_request_id") REFERENCES "selfservice_recovery_requests" ("id") ON DELETE cascade +);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE UNIQUE INDEX "identity_recovery_addresses_code_uq_idx" ON "identity_recovery_tokens" (token);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE INDEX "identity_recovery_addresses_code_idx" ON "identity_recovery_tokens" (token);COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.mysql.down.sql new file mode 100644 index 00000000000..888e5040b37 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.mysql.down.sql @@ -0,0 +1,4 @@ +DROP TABLE `identity_recovery_tokens`; +DROP TABLE `selfservice_recovery_request_methods`; +DROP TABLE `selfservice_recovery_requests`; +DROP TABLE `identity_recovery_addresses`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.mysql.up.sql new file mode 100644 index 00000000000..a9353817358 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.mysql.up.sql @@ -0,0 +1,52 @@ +CREATE TABLE `identity_recovery_addresses` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`via` VARCHAR (16) NOT NULL, +`value` VARCHAR (400) NOT NULL, +`identity_id` char(36) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`identity_id`) REFERENCES `identities` (`id`) ON DELETE cascade +) ENGINE=InnoDB; +CREATE UNIQUE INDEX `identity_recovery_addresses_status_via_uq_idx` ON `identity_recovery_addresses` (`via`, `value`); +CREATE INDEX `identity_recovery_addresses_status_via_idx` ON `identity_recovery_addresses` (`via`, `value`); +CREATE TABLE `selfservice_recovery_requests` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`request_url` VARCHAR (2048) NOT NULL, +`issued_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +`expires_at` DATETIME NOT NULL, +`messages` JSON, +`active_method` VARCHAR (32), +`csrf_token` VARCHAR (255) NOT NULL, +`state` VARCHAR (32) NOT NULL, +`recovered_identity_id` char(36), +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`recovered_identity_id`) REFERENCES `identities` (`id`) ON DELETE cascade +) ENGINE=InnoDB; +CREATE TABLE `selfservice_recovery_request_methods` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`method` VARCHAR (32) NOT NULL, +`config` JSON NOT NULL, +`selfservice_recovery_request_id` char(36) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`selfservice_recovery_request_id`) REFERENCES `selfservice_recovery_requests` (`id`) ON DELETE cascade +) ENGINE=InnoDB; +CREATE TABLE `identity_recovery_tokens` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`token` VARCHAR (64) NOT NULL, +`used` bool NOT NULL DEFAULT false, +`used_at` DATETIME, +`identity_recovery_address_id` char(36) NOT NULL, +`selfservice_recovery_request_id` char(36) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`identity_recovery_address_id`) REFERENCES `identity_recovery_addresses` (`id`) ON DELETE cascade, +FOREIGN KEY (`selfservice_recovery_request_id`) REFERENCES `selfservice_recovery_requests` (`id`) ON DELETE cascade +) ENGINE=InnoDB; +CREATE UNIQUE INDEX `identity_recovery_addresses_code_uq_idx` ON `identity_recovery_tokens` (`token`); +CREATE INDEX `identity_recovery_addresses_code_idx` ON `identity_recovery_tokens` (`token`); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.postgres.down.sql new file mode 100644 index 00000000000..b4747249234 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.postgres.down.sql @@ -0,0 +1,4 @@ +DROP TABLE "identity_recovery_tokens"; +DROP TABLE "selfservice_recovery_request_methods"; +DROP TABLE "selfservice_recovery_requests"; +DROP TABLE "identity_recovery_addresses"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.postgres.up.sql new file mode 100644 index 00000000000..5b3fa1c7563 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.postgres.up.sql @@ -0,0 +1,52 @@ +CREATE TABLE "identity_recovery_addresses" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"via" VARCHAR (16) NOT NULL, +"value" VARCHAR (400) NOT NULL, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +); +CREATE UNIQUE INDEX "identity_recovery_addresses_status_via_uq_idx" ON "identity_recovery_addresses" (via, value); +CREATE INDEX "identity_recovery_addresses_status_via_idx" ON "identity_recovery_addresses" (via, value); +CREATE TABLE "selfservice_recovery_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"messages" jsonb, +"active_method" VARCHAR (32), +"csrf_token" VARCHAR (255) NOT NULL, +"state" VARCHAR (32) NOT NULL, +"recovered_identity_id" UUID, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("recovered_identity_id") REFERENCES "identities" ("id") ON DELETE cascade +); +CREATE TABLE "selfservice_recovery_request_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"config" jsonb NOT NULL, +"selfservice_recovery_request_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("selfservice_recovery_request_id") REFERENCES "selfservice_recovery_requests" ("id") ON DELETE cascade +); +CREATE TABLE "identity_recovery_tokens" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"token" VARCHAR (64) NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" timestamp, +"identity_recovery_address_id" UUID NOT NULL, +"selfservice_recovery_request_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("identity_recovery_address_id") REFERENCES "identity_recovery_addresses" ("id") ON DELETE cascade, +FOREIGN KEY ("selfservice_recovery_request_id") REFERENCES "selfservice_recovery_requests" ("id") ON DELETE cascade +); +CREATE UNIQUE INDEX "identity_recovery_addresses_code_uq_idx" ON "identity_recovery_tokens" (token); +CREATE INDEX "identity_recovery_addresses_code_idx" ON "identity_recovery_tokens" (token); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.sqlite3.down.sql new file mode 100644 index 00000000000..b4747249234 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.sqlite3.down.sql @@ -0,0 +1,4 @@ +DROP TABLE "identity_recovery_tokens"; +DROP TABLE "selfservice_recovery_request_methods"; +DROP TABLE "selfservice_recovery_requests"; +DROP TABLE "identity_recovery_addresses"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.sqlite3.up.sql new file mode 100644 index 00000000000..29016190206 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200519101057_create_recovery_addresses.sqlite3.up.sql @@ -0,0 +1,48 @@ +CREATE TABLE "identity_recovery_addresses" ( +"id" TEXT PRIMARY KEY, +"via" TEXT NOT NULL, +"value" TEXT NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON DELETE cascade +); +CREATE UNIQUE INDEX "identity_recovery_addresses_status_via_uq_idx" ON "identity_recovery_addresses" (via, value); +CREATE INDEX "identity_recovery_addresses_status_via_idx" ON "identity_recovery_addresses" (via, value); +CREATE TABLE "selfservice_recovery_requests" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" DATETIME NOT NULL, +"messages" TEXT, +"active_method" TEXT, +"csrf_token" TEXT NOT NULL, +"state" TEXT NOT NULL, +"recovered_identity_id" char(36), +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (recovered_identity_id) REFERENCES identities (id) ON DELETE cascade +); +CREATE TABLE "selfservice_recovery_request_methods" ( +"id" TEXT PRIMARY KEY, +"method" TEXT NOT NULL, +"config" TEXT NOT NULL, +"selfservice_recovery_request_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (selfservice_recovery_request_id) REFERENCES selfservice_recovery_requests (id) ON DELETE cascade +); +CREATE TABLE "identity_recovery_tokens" ( +"id" TEXT PRIMARY KEY, +"token" TEXT NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" DATETIME, +"identity_recovery_address_id" char(36) NOT NULL, +"selfservice_recovery_request_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_recovery_address_id) REFERENCES identity_recovery_addresses (id) ON DELETE cascade, +FOREIGN KEY (selfservice_recovery_request_id) REFERENCES selfservice_recovery_requests (id) ON DELETE cascade +); +CREATE UNIQUE INDEX "identity_recovery_addresses_code_uq_idx" ON "identity_recovery_tokens" (token); +CREATE INDEX "identity_recovery_addresses_code_idx" ON "identity_recovery_tokens" (token); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200519101058_create_recovery_addresses.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200519101058_create_recovery_addresses.mysql.down.sql new file mode 100644 index 00000000000..54c99e1acb3 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200519101058_create_recovery_addresses.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE identity_recovery_tokens MODIFY COLUMN token VARCHAR(64); diff --git a/oryx/popx/stub/migrations/legacy/20200519101058_create_recovery_addresses.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200519101058_create_recovery_addresses.mysql.up.sql new file mode 100644 index 00000000000..7972b3405fb --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200519101058_create_recovery_addresses.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE identity_recovery_tokens MODIFY COLUMN token VARCHAR(64) BINARY; diff --git a/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.cockroach.down.sql new file mode 100644 index 00000000000..35028f91cfa --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" DROP COLUMN "messages";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.cockroach.up.sql new file mode 100644 index 00000000000..127f682e820 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "messages" json;COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.mysql.down.sql new file mode 100644 index 00000000000..d80e1cae215 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_settings_requests` DROP COLUMN `messages`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.mysql.up.sql new file mode 100644 index 00000000000..2c843fda0d9 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_settings_requests` ADD COLUMN `messages` JSON; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.postgres.down.sql new file mode 100644 index 00000000000..a9ca7f9c0d2 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" DROP COLUMN "messages"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.postgres.up.sql new file mode 100644 index 00000000000..e5b5661b1ab --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "messages" jsonb; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.sqlite3.down.sql new file mode 100644 index 00000000000..346124abafc --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.sqlite3.down.sql @@ -0,0 +1,16 @@ +CREATE TABLE "_selfservice_settings_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"active_method" TEXT, +"update_successful" bool NOT NULL DEFAULT 'false', +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +); +INSERT INTO "_selfservice_settings_requests_tmp" (id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, active_method, update_successful) SELECT id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, active_method, update_successful FROM "selfservice_settings_requests"; + +DROP TABLE "selfservice_settings_requests"; +ALTER TABLE "_selfservice_settings_requests_tmp" RENAME TO "selfservice_settings_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.sqlite3.up.sql new file mode 100644 index 00000000000..587ca18d7b6 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200601101000_create_messages.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "messages" TEXT; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200601101001_verification.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200601101001_verification.mysql.down.sql new file mode 100644 index 00000000000..d16bc788e88 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200601101001_verification.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE identity_verifiable_addresses MODIFY COLUMN code VARCHAR(255) BINARY; diff --git a/oryx/popx/stub/migrations/legacy/20200601101001_verification.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200601101001_verification.mysql.up.sql new file mode 100644 index 00000000000..3bf20defb8c --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200601101001_verification.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE identity_verifiable_addresses MODIFY COLUMN code VARCHAR(32) BINARY; diff --git a/oryx/popx/stub/migrations/legacy/20200605111551_messages.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20200605111551_messages.cockroach.down.sql new file mode 100644 index 00000000000..80601b5c781 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200605111551_messages.cockroach.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE "selfservice_verification_requests" DROP COLUMN "messages";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_login_requests" DROP COLUMN "messages";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_registration_requests" DROP COLUMN "messages";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200605111551_messages.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200605111551_messages.cockroach.up.sql new file mode 100644 index 00000000000..a9cfe755d60 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200605111551_messages.cockroach.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE "selfservice_verification_requests" ADD COLUMN "messages" json;COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_login_requests" ADD COLUMN "messages" json;COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_registration_requests" ADD COLUMN "messages" json;COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200605111551_messages.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200605111551_messages.mysql.down.sql new file mode 100644 index 00000000000..427a3870d26 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200605111551_messages.mysql.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE `selfservice_verification_requests` DROP COLUMN `messages`; +ALTER TABLE `selfservice_login_requests` DROP COLUMN `messages`; +ALTER TABLE `selfservice_registration_requests` DROP COLUMN `messages`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200605111551_messages.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200605111551_messages.mysql.up.sql new file mode 100644 index 00000000000..21085a211b0 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200605111551_messages.mysql.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE `selfservice_verification_requests` ADD COLUMN `messages` JSON; +ALTER TABLE `selfservice_login_requests` ADD COLUMN `messages` JSON; +ALTER TABLE `selfservice_registration_requests` ADD COLUMN `messages` JSON; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200605111551_messages.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20200605111551_messages.postgres.down.sql new file mode 100644 index 00000000000..7c739150549 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200605111551_messages.postgres.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE "selfservice_verification_requests" DROP COLUMN "messages"; +ALTER TABLE "selfservice_login_requests" DROP COLUMN "messages"; +ALTER TABLE "selfservice_registration_requests" DROP COLUMN "messages"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200605111551_messages.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200605111551_messages.postgres.up.sql new file mode 100644 index 00000000000..4ac597fdcdf --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200605111551_messages.postgres.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE "selfservice_verification_requests" ADD COLUMN "messages" jsonb; +ALTER TABLE "selfservice_login_requests" ADD COLUMN "messages" jsonb; +ALTER TABLE "selfservice_registration_requests" ADD COLUMN "messages" jsonb; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200605111551_messages.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20200605111551_messages.sqlite3.down.sql new file mode 100644 index 00000000000..e1d0c117d7a --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200605111551_messages.sqlite3.down.sql @@ -0,0 +1,44 @@ +CREATE TABLE "_selfservice_verification_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"via" TEXT NOT NULL DEFAULT 'email', +"success" bool NOT NULL DEFAULT 'FALSE' +); +INSERT INTO "_selfservice_verification_requests_tmp" (id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, via, success) SELECT id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, via, success FROM "selfservice_verification_requests"; + +DROP TABLE "selfservice_verification_requests"; +ALTER TABLE "_selfservice_verification_requests_tmp" RENAME TO "selfservice_verification_requests"; +CREATE TABLE "_selfservice_login_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"active_method" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"forced" bool NOT NULL DEFAULT 'false' +); +INSERT INTO "_selfservice_login_requests_tmp" (id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at, forced) SELECT id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at, forced FROM "selfservice_login_requests"; + +DROP TABLE "selfservice_login_requests"; +ALTER TABLE "_selfservice_login_requests_tmp" RENAME TO "selfservice_login_requests"; +CREATE TABLE "_selfservice_registration_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"active_method" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +); +INSERT INTO "_selfservice_registration_requests_tmp" (id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at) SELECT id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at FROM "selfservice_registration_requests"; + +DROP TABLE "selfservice_registration_requests"; +ALTER TABLE "_selfservice_registration_requests_tmp" RENAME TO "selfservice_registration_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200605111551_messages.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200605111551_messages.sqlite3.up.sql new file mode 100644 index 00000000000..cc2a6060a28 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200605111551_messages.sqlite3.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE "selfservice_verification_requests" ADD COLUMN "messages" TEXT; +ALTER TABLE "selfservice_login_requests" ADD COLUMN "messages" TEXT; +ALTER TABLE "selfservice_registration_requests" ADD COLUMN "messages" TEXT; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200607165100_settings.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20200607165100_settings.cockroach.down.sql new file mode 100644 index 00000000000..5877dc96a68 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200607165100_settings.cockroach.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE "selfservice_settings_requests" DROP COLUMN "state";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "update_successful" bool NOT NULL DEFAULT 'false';COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200607165100_settings.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200607165100_settings.cockroach.up.sql new file mode 100644 index 00000000000..5848a58d04a --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200607165100_settings.cockroach.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "state" VARCHAR (255) NOT NULL DEFAULT 'show_form';COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_settings_requests" DROP COLUMN "update_successful";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200607165100_settings.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200607165100_settings.mysql.down.sql new file mode 100644 index 00000000000..c43477e1f08 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200607165100_settings.mysql.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE `selfservice_settings_requests` DROP COLUMN `state`; +ALTER TABLE `selfservice_settings_requests` ADD COLUMN `update_successful` bool NOT NULL DEFAULT false; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200607165100_settings.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200607165100_settings.mysql.up.sql new file mode 100644 index 00000000000..00b29fa27fd --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200607165100_settings.mysql.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE `selfservice_settings_requests` ADD COLUMN `state` VARCHAR (255) NOT NULL DEFAULT 'show_form'; +ALTER TABLE `selfservice_settings_requests` DROP COLUMN `update_successful`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200607165100_settings.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20200607165100_settings.postgres.down.sql new file mode 100644 index 00000000000..eaee8998b19 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200607165100_settings.postgres.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE "selfservice_settings_requests" DROP COLUMN "state"; +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "update_successful" bool NOT NULL DEFAULT 'false'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200607165100_settings.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200607165100_settings.postgres.up.sql new file mode 100644 index 00000000000..4c2ac98bb13 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200607165100_settings.postgres.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "state" VARCHAR (255) NOT NULL DEFAULT 'show_form'; +ALTER TABLE "selfservice_settings_requests" DROP COLUMN "update_successful"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200607165100_settings.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20200607165100_settings.sqlite3.down.sql new file mode 100644 index 00000000000..7366ef25cfd --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200607165100_settings.sqlite3.down.sql @@ -0,0 +1,17 @@ +CREATE TABLE "_selfservice_settings_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"active_method" TEXT, +"messages" TEXT, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +); +INSERT INTO "_selfservice_settings_requests_tmp" (id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, active_method, messages) SELECT id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, active_method, messages FROM "selfservice_settings_requests"; + +DROP TABLE "selfservice_settings_requests"; +ALTER TABLE "_selfservice_settings_requests_tmp" RENAME TO "selfservice_settings_requests"; +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "update_successful" bool NOT NULL DEFAULT 'false'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200607165100_settings.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200607165100_settings.sqlite3.up.sql new file mode 100644 index 00000000000..3892e6271bf --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200607165100_settings.sqlite3.up.sql @@ -0,0 +1,18 @@ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "state" TEXT NOT NULL DEFAULT 'show_form'; +CREATE TABLE "_selfservice_settings_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"active_method" TEXT, +"messages" TEXT, +"state" TEXT NOT NULL DEFAULT 'show_form', +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +); +INSERT INTO "_selfservice_settings_requests_tmp" (id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, active_method, messages, state) SELECT id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, active_method, messages, state FROM "selfservice_settings_requests"; + +DROP TABLE "selfservice_settings_requests"; +ALTER TABLE "_selfservice_settings_requests_tmp" RENAME TO "selfservice_settings_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.cockroach.down.sql new file mode 100644 index 00000000000..07a1c56fadd --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identities" RENAME COLUMN "schema_id" TO "traits_schema_id";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.cockroach.up.sql new file mode 100644 index 00000000000..fc35d52520a --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identities" RENAME COLUMN "traits_schema_id" TO "schema_id";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.mysql.down.sql new file mode 100644 index 00000000000..7e3303f9622 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `identities` CHANGE `schema_id` `traits_schema_id` varchar(2048) NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.mysql.up.sql new file mode 100644 index 00000000000..92a92fa94fe --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `identities` CHANGE `traits_schema_id` `schema_id` varchar(2048) NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.postgres.down.sql new file mode 100644 index 00000000000..d2dee7d0fd0 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identities" RENAME COLUMN "schema_id" TO "traits_schema_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.postgres.up.sql new file mode 100644 index 00000000000..ce7cd59733a --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identities" RENAME COLUMN "traits_schema_id" TO "schema_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.sqlite3.down.sql new file mode 100644 index 00000000000..d2dee7d0fd0 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identities" RENAME COLUMN "schema_id" TO "traits_schema_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.sqlite3.up.sql new file mode 100644 index 00000000000..ce7cd59733a --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200705105359_rename_identities_schema.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identities" RENAME COLUMN "traits_schema_id" TO "schema_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.cockroach.down.sql new file mode 100644 index 00000000000..10fcdad03c8 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.cockroach.down.sql @@ -0,0 +1,5 @@ +ALTER TABLE "selfservice_login_requests" DROP COLUMN "type";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_registration_requests" DROP COLUMN "type";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_settings_requests" DROP COLUMN "type";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_recovery_requests" DROP COLUMN "type";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_verification_requests" DROP COLUMN "type";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.cockroach.up.sql new file mode 100644 index 00000000000..b1edbc9b025 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.cockroach.up.sql @@ -0,0 +1,5 @@ +ALTER TABLE "selfservice_login_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser';COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_registration_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser';COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser';COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_recovery_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser';COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_verification_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser';COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.mysql.down.sql new file mode 100644 index 00000000000..6d503a70dee --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.mysql.down.sql @@ -0,0 +1,5 @@ +ALTER TABLE `selfservice_login_requests` DROP COLUMN `type`; +ALTER TABLE `selfservice_registration_requests` DROP COLUMN `type`; +ALTER TABLE `selfservice_settings_requests` DROP COLUMN `type`; +ALTER TABLE `selfservice_recovery_requests` DROP COLUMN `type`; +ALTER TABLE `selfservice_verification_requests` DROP COLUMN `type`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.mysql.up.sql new file mode 100644 index 00000000000..2953d0009a0 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.mysql.up.sql @@ -0,0 +1,5 @@ +ALTER TABLE `selfservice_login_requests` ADD COLUMN `type` VARCHAR (16) NOT NULL DEFAULT 'browser'; +ALTER TABLE `selfservice_registration_requests` ADD COLUMN `type` VARCHAR (16) NOT NULL DEFAULT 'browser'; +ALTER TABLE `selfservice_settings_requests` ADD COLUMN `type` VARCHAR (16) NOT NULL DEFAULT 'browser'; +ALTER TABLE `selfservice_recovery_requests` ADD COLUMN `type` VARCHAR (16) NOT NULL DEFAULT 'browser'; +ALTER TABLE `selfservice_verification_requests` ADD COLUMN `type` VARCHAR (16) NOT NULL DEFAULT 'browser'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.postgres.down.sql new file mode 100644 index 00000000000..e36b97a57ca --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.postgres.down.sql @@ -0,0 +1,5 @@ +ALTER TABLE "selfservice_login_requests" DROP COLUMN "type"; +ALTER TABLE "selfservice_registration_requests" DROP COLUMN "type"; +ALTER TABLE "selfservice_settings_requests" DROP COLUMN "type"; +ALTER TABLE "selfservice_recovery_requests" DROP COLUMN "type"; +ALTER TABLE "selfservice_verification_requests" DROP COLUMN "type"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.postgres.up.sql new file mode 100644 index 00000000000..74d85bce764 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.postgres.up.sql @@ -0,0 +1,5 @@ +ALTER TABLE "selfservice_login_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser'; +ALTER TABLE "selfservice_registration_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser'; +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser'; +ALTER TABLE "selfservice_recovery_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser'; +ALTER TABLE "selfservice_verification_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..238eea647bb --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.sqlite3.down.sql @@ -0,0 +1,82 @@ +CREATE TABLE "_selfservice_login_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"active_method" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"forced" bool NOT NULL DEFAULT 'false', +"messages" TEXT +); +INSERT INTO "_selfservice_login_requests_tmp" (id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at, forced, messages) SELECT id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at, forced, messages FROM "selfservice_login_requests"; + +DROP TABLE "selfservice_login_requests"; +ALTER TABLE "_selfservice_login_requests_tmp" RENAME TO "selfservice_login_requests"; +CREATE TABLE "_selfservice_registration_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"active_method" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"messages" TEXT +); +INSERT INTO "_selfservice_registration_requests_tmp" (id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at, messages) SELECT id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at, messages FROM "selfservice_registration_requests"; + +DROP TABLE "selfservice_registration_requests"; +ALTER TABLE "_selfservice_registration_requests_tmp" RENAME TO "selfservice_registration_requests"; +CREATE TABLE "_selfservice_settings_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"active_method" TEXT, +"messages" TEXT, +"state" TEXT NOT NULL DEFAULT 'show_form', +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +); +INSERT INTO "_selfservice_settings_requests_tmp" (id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, active_method, messages, state) SELECT id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, active_method, messages, state FROM "selfservice_settings_requests"; + +DROP TABLE "selfservice_settings_requests"; +ALTER TABLE "_selfservice_settings_requests_tmp" RENAME TO "selfservice_settings_requests"; +CREATE TABLE "_selfservice_recovery_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"messages" TEXT, +"active_method" TEXT, +"csrf_token" TEXT NOT NULL, +"state" TEXT NOT NULL, +"recovered_identity_id" char(36), +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (recovered_identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +); +INSERT INTO "_selfservice_recovery_requests_tmp" (id, request_url, issued_at, expires_at, messages, active_method, csrf_token, state, recovered_identity_id, created_at, updated_at) SELECT id, request_url, issued_at, expires_at, messages, active_method, csrf_token, state, recovered_identity_id, created_at, updated_at FROM "selfservice_recovery_requests"; + +DROP TABLE "selfservice_recovery_requests"; +ALTER TABLE "_selfservice_recovery_requests_tmp" RENAME TO "selfservice_recovery_requests"; +CREATE TABLE "_selfservice_verification_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"messages" TEXT, +"via" TEXT NOT NULL DEFAULT 'email', +"success" bool NOT NULL DEFAULT 'FALSE' +); +INSERT INTO "_selfservice_verification_requests_tmp" (id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, messages, via, success) SELECT id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, messages, via, success FROM "selfservice_verification_requests"; + +DROP TABLE "selfservice_verification_requests"; +ALTER TABLE "_selfservice_verification_requests_tmp" RENAME TO "selfservice_verification_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..85a72ea667d --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810141652_flow_type.sqlite3.up.sql @@ -0,0 +1,5 @@ +ALTER TABLE "selfservice_login_requests" ADD COLUMN "type" TEXT NOT NULL DEFAULT 'browser'; +ALTER TABLE "selfservice_registration_requests" ADD COLUMN "type" TEXT NOT NULL DEFAULT 'browser'; +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "type" TEXT NOT NULL DEFAULT 'browser'; +ALTER TABLE "selfservice_recovery_requests" ADD COLUMN "type" TEXT NOT NULL DEFAULT 'browser'; +ALTER TABLE "selfservice_verification_requests" ADD COLUMN "type" TEXT NOT NULL DEFAULT 'browser'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.cockroach.down.sql new file mode 100644 index 00000000000..7b665f0e166 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.cockroach.down.sql @@ -0,0 +1,9 @@ +ALTER TABLE "selfservice_login_flows" RENAME TO "selfservice_login_requests";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_login_flow_methods" RENAME TO "selfservice_login_request_methods";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_registration_flow_methods" RENAME TO "selfservice_registration_request_methods";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_registration_flows" RENAME TO "selfservice_registration_requests";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_settings_flow_methods" RENAME TO "selfservice_settings_request_methods";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_settings_flows" RENAME TO "selfservice_settings_requests";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_recovery_flow_methods" RENAME TO "selfservice_recovery_request_methods";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_recovery_flows" RENAME TO "selfservice_recovery_requests";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_verification_flows" RENAME TO "selfservice_verification_requests";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.cockroach.up.sql new file mode 100644 index 00000000000..9b606779796 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.cockroach.up.sql @@ -0,0 +1,9 @@ +ALTER TABLE "selfservice_login_request_methods" RENAME TO "selfservice_login_flow_methods";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_login_requests" RENAME TO "selfservice_login_flows";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_registration_request_methods" RENAME TO "selfservice_registration_flow_methods";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_registration_requests" RENAME TO "selfservice_registration_flows";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_settings_request_methods" RENAME TO "selfservice_settings_flow_methods";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_settings_requests" RENAME TO "selfservice_settings_flows";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_recovery_request_methods" RENAME TO "selfservice_recovery_flow_methods";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_recovery_requests" RENAME TO "selfservice_recovery_flows";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_verification_requests" RENAME TO "selfservice_verification_flows";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.mysql.down.sql new file mode 100644 index 00000000000..6ba6c693318 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.mysql.down.sql @@ -0,0 +1,9 @@ +ALTER TABLE `selfservice_login_flows` RENAME TO `selfservice_login_requests`; +ALTER TABLE `selfservice_login_flow_methods` RENAME TO `selfservice_login_request_methods`; +ALTER TABLE `selfservice_registration_flow_methods` RENAME TO `selfservice_registration_request_methods`; +ALTER TABLE `selfservice_registration_flows` RENAME TO `selfservice_registration_requests`; +ALTER TABLE `selfservice_settings_flow_methods` RENAME TO `selfservice_settings_request_methods`; +ALTER TABLE `selfservice_settings_flows` RENAME TO `selfservice_settings_requests`; +ALTER TABLE `selfservice_recovery_flow_methods` RENAME TO `selfservice_recovery_request_methods`; +ALTER TABLE `selfservice_recovery_flows` RENAME TO `selfservice_recovery_requests`; +ALTER TABLE `selfservice_verification_flows` RENAME TO `selfservice_verification_requests`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.mysql.up.sql new file mode 100644 index 00000000000..28508f8a06e --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.mysql.up.sql @@ -0,0 +1,9 @@ +ALTER TABLE `selfservice_login_request_methods` RENAME TO `selfservice_login_flow_methods`; +ALTER TABLE `selfservice_login_requests` RENAME TO `selfservice_login_flows`; +ALTER TABLE `selfservice_registration_request_methods` RENAME TO `selfservice_registration_flow_methods`; +ALTER TABLE `selfservice_registration_requests` RENAME TO `selfservice_registration_flows`; +ALTER TABLE `selfservice_settings_request_methods` RENAME TO `selfservice_settings_flow_methods`; +ALTER TABLE `selfservice_settings_requests` RENAME TO `selfservice_settings_flows`; +ALTER TABLE `selfservice_recovery_request_methods` RENAME TO `selfservice_recovery_flow_methods`; +ALTER TABLE `selfservice_recovery_requests` RENAME TO `selfservice_recovery_flows`; +ALTER TABLE `selfservice_verification_requests` RENAME TO `selfservice_verification_flows`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.postgres.down.sql new file mode 100644 index 00000000000..60d6d0dd119 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.postgres.down.sql @@ -0,0 +1,9 @@ +ALTER TABLE "selfservice_login_flows" RENAME TO "selfservice_login_requests"; +ALTER TABLE "selfservice_login_flow_methods" RENAME TO "selfservice_login_request_methods"; +ALTER TABLE "selfservice_registration_flow_methods" RENAME TO "selfservice_registration_request_methods"; +ALTER TABLE "selfservice_registration_flows" RENAME TO "selfservice_registration_requests"; +ALTER TABLE "selfservice_settings_flow_methods" RENAME TO "selfservice_settings_request_methods"; +ALTER TABLE "selfservice_settings_flows" RENAME TO "selfservice_settings_requests"; +ALTER TABLE "selfservice_recovery_flow_methods" RENAME TO "selfservice_recovery_request_methods"; +ALTER TABLE "selfservice_recovery_flows" RENAME TO "selfservice_recovery_requests"; +ALTER TABLE "selfservice_verification_flows" RENAME TO "selfservice_verification_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.postgres.up.sql new file mode 100644 index 00000000000..be3be9e5ed4 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.postgres.up.sql @@ -0,0 +1,9 @@ +ALTER TABLE "selfservice_login_request_methods" RENAME TO "selfservice_login_flow_methods"; +ALTER TABLE "selfservice_login_requests" RENAME TO "selfservice_login_flows"; +ALTER TABLE "selfservice_registration_request_methods" RENAME TO "selfservice_registration_flow_methods"; +ALTER TABLE "selfservice_registration_requests" RENAME TO "selfservice_registration_flows"; +ALTER TABLE "selfservice_settings_request_methods" RENAME TO "selfservice_settings_flow_methods"; +ALTER TABLE "selfservice_settings_requests" RENAME TO "selfservice_settings_flows"; +ALTER TABLE "selfservice_recovery_request_methods" RENAME TO "selfservice_recovery_flow_methods"; +ALTER TABLE "selfservice_recovery_requests" RENAME TO "selfservice_recovery_flows"; +ALTER TABLE "selfservice_verification_requests" RENAME TO "selfservice_verification_flows"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.sqlite3.down.sql new file mode 100644 index 00000000000..60d6d0dd119 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.sqlite3.down.sql @@ -0,0 +1,9 @@ +ALTER TABLE "selfservice_login_flows" RENAME TO "selfservice_login_requests"; +ALTER TABLE "selfservice_login_flow_methods" RENAME TO "selfservice_login_request_methods"; +ALTER TABLE "selfservice_registration_flow_methods" RENAME TO "selfservice_registration_request_methods"; +ALTER TABLE "selfservice_registration_flows" RENAME TO "selfservice_registration_requests"; +ALTER TABLE "selfservice_settings_flow_methods" RENAME TO "selfservice_settings_request_methods"; +ALTER TABLE "selfservice_settings_flows" RENAME TO "selfservice_settings_requests"; +ALTER TABLE "selfservice_recovery_flow_methods" RENAME TO "selfservice_recovery_request_methods"; +ALTER TABLE "selfservice_recovery_flows" RENAME TO "selfservice_recovery_requests"; +ALTER TABLE "selfservice_verification_flows" RENAME TO "selfservice_verification_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.sqlite3.up.sql new file mode 100644 index 00000000000..be3be9e5ed4 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810161022_flow_rename.sqlite3.up.sql @@ -0,0 +1,9 @@ +ALTER TABLE "selfservice_login_request_methods" RENAME TO "selfservice_login_flow_methods"; +ALTER TABLE "selfservice_login_requests" RENAME TO "selfservice_login_flows"; +ALTER TABLE "selfservice_registration_request_methods" RENAME TO "selfservice_registration_flow_methods"; +ALTER TABLE "selfservice_registration_requests" RENAME TO "selfservice_registration_flows"; +ALTER TABLE "selfservice_settings_request_methods" RENAME TO "selfservice_settings_flow_methods"; +ALTER TABLE "selfservice_settings_requests" RENAME TO "selfservice_settings_flows"; +ALTER TABLE "selfservice_recovery_request_methods" RENAME TO "selfservice_recovery_flow_methods"; +ALTER TABLE "selfservice_recovery_requests" RENAME TO "selfservice_recovery_flows"; +ALTER TABLE "selfservice_verification_requests" RENAME TO "selfservice_verification_flows"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.cockroach.down.sql new file mode 100644 index 00000000000..0ccaced0ddf --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.cockroach.down.sql @@ -0,0 +1,4 @@ +ALTER TABLE "selfservice_login_flow_methods" RENAME COLUMN "selfservice_login_flow_id" TO "selfservice_login_request_id";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_registration_flow_methods" RENAME COLUMN "selfservice_registration_flow_id" TO "selfservice_registration_request_id";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_settings_flow_methods" RENAME COLUMN "selfservice_settings_flow_id" TO "selfservice_settings_request_id";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_recovery_flow_methods" RENAME COLUMN "selfservice_recovery_flow_id" TO "selfservice_recovery_request_id";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.cockroach.up.sql new file mode 100644 index 00000000000..9f7efff7e16 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.cockroach.up.sql @@ -0,0 +1,4 @@ +ALTER TABLE "selfservice_login_flow_methods" RENAME COLUMN "selfservice_login_request_id" TO "selfservice_login_flow_id";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_registration_flow_methods" RENAME COLUMN "selfservice_registration_request_id" TO "selfservice_registration_flow_id";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_recovery_flow_methods" RENAME COLUMN "selfservice_recovery_request_id" TO "selfservice_recovery_flow_id";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_settings_flow_methods" RENAME COLUMN "selfservice_settings_request_id" TO "selfservice_settings_flow_id";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.mysql.down.sql new file mode 100644 index 00000000000..9a5473340c7 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.mysql.down.sql @@ -0,0 +1,4 @@ +ALTER TABLE `selfservice_login_flow_methods` CHANGE `selfservice_login_flow_id` `selfservice_login_request_id` char(36) NOT NULL; +ALTER TABLE `selfservice_registration_flow_methods` CHANGE `selfservice_registration_flow_id` `selfservice_registration_request_id` char(36) NOT NULL; +ALTER TABLE `selfservice_settings_flow_methods` CHANGE `selfservice_settings_flow_id` `selfservice_settings_request_id` char(36) NOT NULL; +ALTER TABLE `selfservice_recovery_flow_methods` CHANGE `selfservice_recovery_flow_id` `selfservice_recovery_request_id` char(36) NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.mysql.up.sql new file mode 100644 index 00000000000..93844b82e58 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.mysql.up.sql @@ -0,0 +1,4 @@ +ALTER TABLE `selfservice_login_flow_methods` CHANGE `selfservice_login_request_id` `selfservice_login_flow_id` char(36) NOT NULL; +ALTER TABLE `selfservice_registration_flow_methods` CHANGE `selfservice_registration_request_id` `selfservice_registration_flow_id` char(36) NOT NULL; +ALTER TABLE `selfservice_recovery_flow_methods` CHANGE `selfservice_recovery_request_id` `selfservice_recovery_flow_id` char(36) NOT NULL; +ALTER TABLE `selfservice_settings_flow_methods` CHANGE `selfservice_settings_request_id` `selfservice_settings_flow_id` char(36) NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.postgres.down.sql new file mode 100644 index 00000000000..931cf718659 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.postgres.down.sql @@ -0,0 +1,4 @@ +ALTER TABLE "selfservice_login_flow_methods" RENAME COLUMN "selfservice_login_flow_id" TO "selfservice_login_request_id"; +ALTER TABLE "selfservice_registration_flow_methods" RENAME COLUMN "selfservice_registration_flow_id" TO "selfservice_registration_request_id"; +ALTER TABLE "selfservice_settings_flow_methods" RENAME COLUMN "selfservice_settings_flow_id" TO "selfservice_settings_request_id"; +ALTER TABLE "selfservice_recovery_flow_methods" RENAME COLUMN "selfservice_recovery_flow_id" TO "selfservice_recovery_request_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.postgres.up.sql new file mode 100644 index 00000000000..9f351a5b96e --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.postgres.up.sql @@ -0,0 +1,4 @@ +ALTER TABLE "selfservice_login_flow_methods" RENAME COLUMN "selfservice_login_request_id" TO "selfservice_login_flow_id"; +ALTER TABLE "selfservice_registration_flow_methods" RENAME COLUMN "selfservice_registration_request_id" TO "selfservice_registration_flow_id"; +ALTER TABLE "selfservice_recovery_flow_methods" RENAME COLUMN "selfservice_recovery_request_id" TO "selfservice_recovery_flow_id"; +ALTER TABLE "selfservice_settings_flow_methods" RENAME COLUMN "selfservice_settings_request_id" TO "selfservice_settings_flow_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.sqlite3.down.sql new file mode 100644 index 00000000000..931cf718659 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.sqlite3.down.sql @@ -0,0 +1,4 @@ +ALTER TABLE "selfservice_login_flow_methods" RENAME COLUMN "selfservice_login_flow_id" TO "selfservice_login_request_id"; +ALTER TABLE "selfservice_registration_flow_methods" RENAME COLUMN "selfservice_registration_flow_id" TO "selfservice_registration_request_id"; +ALTER TABLE "selfservice_settings_flow_methods" RENAME COLUMN "selfservice_settings_flow_id" TO "selfservice_settings_request_id"; +ALTER TABLE "selfservice_recovery_flow_methods" RENAME COLUMN "selfservice_recovery_flow_id" TO "selfservice_recovery_request_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.sqlite3.up.sql new file mode 100644 index 00000000000..9f351a5b96e --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200810162450_flow_fields_rename.sqlite3.up.sql @@ -0,0 +1,4 @@ +ALTER TABLE "selfservice_login_flow_methods" RENAME COLUMN "selfservice_login_request_id" TO "selfservice_login_flow_id"; +ALTER TABLE "selfservice_registration_flow_methods" RENAME COLUMN "selfservice_registration_request_id" TO "selfservice_registration_flow_id"; +ALTER TABLE "selfservice_recovery_flow_methods" RENAME COLUMN "selfservice_recovery_request_id" TO "selfservice_recovery_flow_id"; +ALTER TABLE "selfservice_settings_flow_methods" RENAME COLUMN "selfservice_settings_request_id" TO "selfservice_settings_flow_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.cockroach.down.sql new file mode 100644 index 00000000000..f9e4897d955 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" DROP COLUMN "token";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.cockroach.up.sql new file mode 100644 index 00000000000..2549800558e --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.cockroach.up.sql @@ -0,0 +1,8 @@ +DELETE FROM sessions;COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "sessions" ADD COLUMN "token" VARCHAR (32);COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "sessions" RENAME COLUMN "token" TO "_token_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "sessions" ADD COLUMN "token" VARCHAR (32);COMMIT TRANSACTION;BEGIN TRANSACTION; +UPDATE "sessions" SET "token" = "_token_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "sessions" DROP COLUMN "_token_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE UNIQUE INDEX "sessions_token_uq_idx" ON "sessions" (token);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE INDEX "sessions_token_idx" ON "sessions" (token);COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.mysql.down.sql new file mode 100644 index 00000000000..3ee676ed8f3 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `sessions` DROP COLUMN `token`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.mysql.up.sql new file mode 100644 index 00000000000..fed24fb5670 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.mysql.up.sql @@ -0,0 +1,5 @@ +DELETE FROM sessions; +ALTER TABLE `sessions` ADD COLUMN `token` VARCHAR (32); +ALTER TABLE `sessions` MODIFY `token` VARCHAR (32); +CREATE UNIQUE INDEX `sessions_token_uq_idx` ON `sessions` (`token`); +CREATE INDEX `sessions_token_idx` ON `sessions` (`token`); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.postgres.down.sql new file mode 100644 index 00000000000..9cab681fc1e --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" DROP COLUMN "token"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.postgres.up.sql new file mode 100644 index 00000000000..77edf55152f --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.postgres.up.sql @@ -0,0 +1,5 @@ +DELETE FROM sessions; +ALTER TABLE "sessions" ADD COLUMN "token" VARCHAR (32); +ALTER TABLE "sessions" ALTER COLUMN "token" TYPE VARCHAR (32), ALTER COLUMN "token" DROP NOT NULL; +CREATE UNIQUE INDEX "sessions_token_uq_idx" ON "sessions" (token); +CREATE INDEX "sessions_token_idx" ON "sessions" (token); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.sqlite3.down.sql new file mode 100644 index 00000000000..75b3650950f --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.sqlite3.down.sql @@ -0,0 +1,16 @@ +DROP INDEX IF EXISTS "sessions_token_uq_idx"; +DROP INDEX IF EXISTS "sessions_token_idx"; +CREATE TABLE "_sessions_tmp" ( +"id" TEXT PRIMARY KEY, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"authenticated_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +); +INSERT INTO "_sessions_tmp" (id, issued_at, expires_at, authenticated_at, identity_id, created_at, updated_at) SELECT id, issued_at, expires_at, authenticated_at, identity_id, created_at, updated_at FROM "sessions"; + +DROP TABLE "sessions"; +ALTER TABLE "_sessions_tmp" RENAME TO "sessions"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.sqlite3.up.sql new file mode 100644 index 00000000000..a9f847a378e --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200812124254_add_session_token.sqlite3.up.sql @@ -0,0 +1,18 @@ +DELETE FROM sessions; +ALTER TABLE "sessions" ADD COLUMN "token" TEXT; +CREATE TABLE "_sessions_tmp" ( +"id" TEXT PRIMARY KEY, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"authenticated_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"token" TEXT, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +); +INSERT INTO "_sessions_tmp" (id, issued_at, expires_at, authenticated_at, identity_id, created_at, updated_at, token) SELECT id, issued_at, expires_at, authenticated_at, identity_id, created_at, updated_at, token FROM "sessions"; +DROP TABLE "sessions"; +ALTER TABLE "_sessions_tmp" RENAME TO "sessions"; +CREATE UNIQUE INDEX "sessions_token_uq_idx" ON "sessions" (token); +CREATE INDEX "sessions_token_idx" ON "sessions" (token); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.cockroach.down.sql new file mode 100644 index 00000000000..f51c1999428 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" DROP COLUMN "active";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.cockroach.up.sql new file mode 100644 index 00000000000..cc90f31fc16 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" ADD COLUMN "active" boolean DEFAULT 'false';COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.mysql.down.sql new file mode 100644 index 00000000000..fd675bf09cf --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `sessions` DROP COLUMN `active`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.mysql.up.sql new file mode 100644 index 00000000000..80f88e214c7 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `sessions` ADD COLUMN `active` boolean DEFAULT false; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.postgres.down.sql new file mode 100644 index 00000000000..4e81ca50803 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" DROP COLUMN "active"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.postgres.up.sql new file mode 100644 index 00000000000..d0f23849f23 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" ADD COLUMN "active" boolean DEFAULT 'false'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.sqlite3.down.sql new file mode 100644 index 00000000000..9e7bfcb613c --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.sqlite3.down.sql @@ -0,0 +1,19 @@ +DROP INDEX IF EXISTS "sessions_token_idx"; +DROP INDEX IF EXISTS "sessions_token_uq_idx"; +CREATE TABLE "_sessions_tmp" ( +"id" TEXT PRIMARY KEY, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"authenticated_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"token" TEXT, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +); +CREATE INDEX "sessions_token_idx" ON "_sessions_tmp" (token); +CREATE UNIQUE INDEX "sessions_token_uq_idx" ON "_sessions_tmp" (token); +INSERT INTO "_sessions_tmp" (id, issued_at, expires_at, authenticated_at, identity_id, created_at, updated_at, token) SELECT id, issued_at, expires_at, authenticated_at, identity_id, created_at, updated_at, token FROM "sessions"; + +DROP TABLE "sessions"; +ALTER TABLE "_sessions_tmp" RENAME TO "sessions"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.sqlite3.up.sql new file mode 100644 index 00000000000..77302570222 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200812160551_add_session_revoke.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" ADD COLUMN "active" NUMERIC DEFAULT 'false'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.cockroach.down.sql new file mode 100644 index 00000000000..180633b1914 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" RENAME COLUMN "selfservice_recovery_flow_id" TO "selfservice_recovery_request_id";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.cockroach.up.sql new file mode 100644 index 00000000000..b254b6eaef6 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" RENAME COLUMN "selfservice_recovery_request_id" TO "selfservice_recovery_flow_id";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.mysql.down.sql new file mode 100644 index 00000000000..b1096fe505a --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `identity_recovery_tokens` CHANGE `selfservice_recovery_flow_id` `selfservice_recovery_request_id` char(36) NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.mysql.up.sql new file mode 100644 index 00000000000..26017ff6451 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `identity_recovery_tokens` CHANGE `selfservice_recovery_request_id` `selfservice_recovery_flow_id` char(36) NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.postgres.down.sql new file mode 100644 index 00000000000..5dad6b6d7ae --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" RENAME COLUMN "selfservice_recovery_flow_id" TO "selfservice_recovery_request_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.postgres.up.sql new file mode 100644 index 00000000000..3f0a2da51fc --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" RENAME COLUMN "selfservice_recovery_request_id" TO "selfservice_recovery_flow_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.sqlite3.down.sql new file mode 100644 index 00000000000..5dad6b6d7ae --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" RENAME COLUMN "selfservice_recovery_flow_id" TO "selfservice_recovery_request_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.sqlite3.up.sql new file mode 100644 index 00000000000..3f0a2da51fc --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830121710_update_recovery_token.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" RENAME COLUMN "selfservice_recovery_request_id" TO "selfservice_recovery_flow_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.cockroach.down.sql new file mode 100644 index 00000000000..25bc8494b71 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.cockroach.down.sql @@ -0,0 +1,6 @@ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "form" json NOT NULL DEFAULT '{}';COMMIT TRANSACTION;BEGIN TRANSACTION; +DROP TABLE "selfservice_verification_flow_methods";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "active_method";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "state";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "via" VARCHAR (16) NOT NULL DEFAULT 'email';COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "success" bool NOT NULL DEFAULT FALSE;COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.cockroach.up.sql new file mode 100644 index 00000000000..c325dcbb92a --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "state" VARCHAR (255) NOT NULL DEFAULT 'show_form';COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.mysql.down.sql new file mode 100644 index 00000000000..fd97b4cc64a --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.mysql.down.sql @@ -0,0 +1,8 @@ +ALTER TABLE `selfservice_verification_flows` ADD COLUMN `form` JSON; +UPDATE selfservice_verification_flows SET form=(SELECT * FROM (SELECT m.config FROM selfservice_verification_flows AS r INNER JOIN selfservice_verification_flow_methods AS m ON r.id=m.selfservice_verification_flow_id) as t); +ALTER TABLE `selfservice_verification_flows` MODIFY `form` JSON; +DROP TABLE `selfservice_verification_flow_methods`; +ALTER TABLE `selfservice_verification_flows` DROP COLUMN `active_method`; +ALTER TABLE `selfservice_verification_flows` DROP COLUMN `state`; +ALTER TABLE `selfservice_verification_flows` ADD COLUMN `via` VARCHAR (16) NOT NULL DEFAULT 'email'; +ALTER TABLE `selfservice_verification_flows` ADD COLUMN `success` bool NOT NULL DEFAULT FALSE; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..ee5b748a4d9 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_verification_flows` ADD COLUMN `state` VARCHAR (255) NOT NULL DEFAULT 'show_form'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.postgres.down.sql new file mode 100644 index 00000000000..adbf65d9802 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.postgres.down.sql @@ -0,0 +1,8 @@ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "form" jsonb; +UPDATE selfservice_verification_flows SET form=(SELECT * FROM (SELECT m.config FROM selfservice_verification_flows AS r INNER JOIN selfservice_verification_flow_methods AS m ON r.id=m.selfservice_verification_flow_id) as t); +ALTER TABLE "selfservice_verification_flows" ALTER COLUMN "form" TYPE jsonb, ALTER COLUMN "form" DROP NOT NULL; +DROP TABLE "selfservice_verification_flow_methods"; +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "active_method"; +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "state"; +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "via" VARCHAR (16) NOT NULL DEFAULT 'email'; +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "success" bool NOT NULL DEFAULT FALSE; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..5792ef9ebbb --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "state" VARCHAR (255) NOT NULL DEFAULT 'show_form'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..27a7bfa3bc0 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.sqlite3.down.sql @@ -0,0 +1,34 @@ +DROP TABLE "selfservice_verification_flow_methods"; +CREATE TABLE "_selfservice_verification_flows_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"messages" TEXT, +"type" TEXT NOT NULL DEFAULT 'browser', +"state" TEXT NOT NULL DEFAULT 'show_form' +); +INSERT INTO "_selfservice_verification_flows_tmp" (id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, messages, type, state) SELECT id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, messages, type, state FROM "selfservice_verification_flows"; + +DROP TABLE "selfservice_verification_flows"; +ALTER TABLE "_selfservice_verification_flows_tmp" RENAME TO "selfservice_verification_flows"; +CREATE TABLE "_selfservice_verification_flows_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"messages" TEXT, +"type" TEXT NOT NULL DEFAULT 'browser' +); +INSERT INTO "_selfservice_verification_flows_tmp" (id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, messages, type) SELECT id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, messages, type FROM "selfservice_verification_flows"; + +DROP TABLE "selfservice_verification_flows"; +ALTER TABLE "_selfservice_verification_flows_tmp" RENAME TO "selfservice_verification_flows"; +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "via" TEXT NOT NULL DEFAULT 'email'; +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "success" bool NOT NULL DEFAULT FALSE; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..af3d919d03e --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130642_add_verification_methods.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "state" TEXT NOT NULL DEFAULT 'show_form'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130643_add_verification_methods.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200830130643_add_verification_methods.cockroach.up.sql new file mode 100644 index 00000000000..ea4615e685f --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130643_add_verification_methods.cockroach.up.sql @@ -0,0 +1 @@ +UPDATE selfservice_verification_flows SET state='passed_challenge' WHERE success IS TRUE; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130643_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200830130643_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..ea4615e685f --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130643_add_verification_methods.mysql.up.sql @@ -0,0 +1 @@ +UPDATE selfservice_verification_flows SET state='passed_challenge' WHERE success IS TRUE; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130643_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200830130643_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..ea4615e685f --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130643_add_verification_methods.postgres.up.sql @@ -0,0 +1 @@ +UPDATE selfservice_verification_flows SET state='passed_challenge' WHERE success IS TRUE; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130643_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200830130643_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..ea4615e685f --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130643_add_verification_methods.sqlite3.up.sql @@ -0,0 +1 @@ +UPDATE selfservice_verification_flows SET state='passed_challenge' WHERE success IS TRUE; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130644_add_verification_methods.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200830130644_add_verification_methods.cockroach.up.sql new file mode 100644 index 00000000000..34053ed9266 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130644_add_verification_methods.cockroach.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "selfservice_verification_flow_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"selfservice_verification_flow_id" UUID NOT NULL, +"config" json NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +);COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "active_method" VARCHAR (32);COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130644_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200830130644_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..2dfbfde2dee --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130644_add_verification_methods.mysql.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE `selfservice_verification_flow_methods` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`method` VARCHAR (32) NOT NULL, +`selfservice_verification_flow_id` char(36) NOT NULL, +`config` JSON NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL +) ENGINE=InnoDB; +ALTER TABLE `selfservice_verification_flows` ADD COLUMN `active_method` VARCHAR (32); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130644_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200830130644_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..e8761c6cdef --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130644_add_verification_methods.postgres.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "selfservice_verification_flow_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"selfservice_verification_flow_id" UUID NOT NULL, +"config" jsonb NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +); +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "active_method" VARCHAR (32); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130644_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200830130644_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..6677fbd678c --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130644_add_verification_methods.sqlite3.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE "selfservice_verification_flow_methods" ( +"id" TEXT PRIMARY KEY, +"method" TEXT NOT NULL, +"selfservice_verification_flow_id" char(36) NOT NULL, +"config" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +); +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "active_method" TEXT; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130645_add_verification_methods.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200830130645_add_verification_methods.cockroach.up.sql new file mode 100644 index 00000000000..6b5e1fc22ee --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130645_add_verification_methods.cockroach.up.sql @@ -0,0 +1 @@ +INSERT INTO selfservice_verification_flow_methods (id, method, selfservice_verification_flow_id, config, created_at, updated_at) SELECT id, 'link', id, form, created_at, updated_at FROM selfservice_verification_flows; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130645_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200830130645_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..6b5e1fc22ee --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130645_add_verification_methods.mysql.up.sql @@ -0,0 +1 @@ +INSERT INTO selfservice_verification_flow_methods (id, method, selfservice_verification_flow_id, config, created_at, updated_at) SELECT id, 'link', id, form, created_at, updated_at FROM selfservice_verification_flows; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130645_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200830130645_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..6b5e1fc22ee --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130645_add_verification_methods.postgres.up.sql @@ -0,0 +1 @@ +INSERT INTO selfservice_verification_flow_methods (id, method, selfservice_verification_flow_id, config, created_at, updated_at) SELECT id, 'link', id, form, created_at, updated_at FROM selfservice_verification_flows; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130645_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200830130645_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..6b5e1fc22ee --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130645_add_verification_methods.sqlite3.up.sql @@ -0,0 +1 @@ +INSERT INTO selfservice_verification_flow_methods (id, method, selfservice_verification_flow_id, config, created_at, updated_at) SELECT id, 'link', id, form, created_at, updated_at FROM selfservice_verification_flows; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130646_add_verification_methods.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200830130646_add_verification_methods.cockroach.up.sql new file mode 100644 index 00000000000..fdccf043a2b --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130646_add_verification_methods.cockroach.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "form";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "via";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "success";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130646_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200830130646_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..496f1eb3058 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130646_add_verification_methods.mysql.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE `selfservice_verification_flows` DROP COLUMN `form`; +ALTER TABLE `selfservice_verification_flows` DROP COLUMN `via`; +ALTER TABLE `selfservice_verification_flows` DROP COLUMN `success`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130646_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200830130646_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..3aa5b8e80ec --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130646_add_verification_methods.postgres.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "form"; +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "via"; +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "success"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830130646_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200830130646_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..db1391dccbf --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830130646_add_verification_methods.sqlite3.up.sql @@ -0,0 +1,54 @@ +CREATE TABLE "_selfservice_verification_flows_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"via" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"success" bool NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"messages" TEXT, +"type" TEXT NOT NULL DEFAULT 'browser', +"state" TEXT NOT NULL DEFAULT 'show_form', +"active_method" TEXT +); +INSERT INTO "_selfservice_verification_flows_tmp" (id, request_url, issued_at, expires_at, via, csrf_token, success, created_at, updated_at, messages, type, state, active_method) SELECT id, request_url, issued_at, expires_at, via, csrf_token, success, created_at, updated_at, messages, type, state, active_method FROM "selfservice_verification_flows"; + +DROP TABLE "selfservice_verification_flows"; +ALTER TABLE "_selfservice_verification_flows_tmp" RENAME TO "selfservice_verification_flows"; +CREATE TABLE "_selfservice_verification_flows_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"csrf_token" TEXT NOT NULL, +"success" bool NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"messages" TEXT, +"type" TEXT NOT NULL DEFAULT 'browser', +"state" TEXT NOT NULL DEFAULT 'show_form', +"active_method" TEXT +); +INSERT INTO "_selfservice_verification_flows_tmp" (id, request_url, issued_at, expires_at, csrf_token, success, created_at, updated_at, messages, type, state, active_method) SELECT id, request_url, issued_at, expires_at, csrf_token, success, created_at, updated_at, messages, type, state, active_method FROM "selfservice_verification_flows"; + +DROP TABLE "selfservice_verification_flows"; +ALTER TABLE "_selfservice_verification_flows_tmp" RENAME TO "selfservice_verification_flows"; +CREATE TABLE "_selfservice_verification_flows_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"messages" TEXT, +"type" TEXT NOT NULL DEFAULT 'browser', +"state" TEXT NOT NULL DEFAULT 'show_form', +"active_method" TEXT +); +INSERT INTO "_selfservice_verification_flows_tmp" (id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, messages, type, state, active_method) SELECT id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, messages, type, state, active_method FROM "selfservice_verification_flows"; + +DROP TABLE "selfservice_verification_flows"; +ALTER TABLE "_selfservice_verification_flows_tmp" RENAME TO "selfservice_verification_flows"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.cockroach.down.sql new file mode 100644 index 00000000000..374a2cf8746 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_verification_tokens";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.cockroach.up.sql new file mode 100644 index 00000000000..12d59e26e93 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.cockroach.up.sql @@ -0,0 +1,19 @@ +CREATE TABLE "identity_verification_tokens" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"token" VARCHAR (64) NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" timestamp, +"expires_at" timestamp NOT NULL, +"issued_at" timestamp NOT NULL, +"identity_verifiable_address_id" UUID NOT NULL, +"selfservice_verification_flow_id" UUID, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "identity_verification_tokens_identity_verifiable_addresses_id_fk" FOREIGN KEY ("identity_verifiable_address_id") REFERENCES "identity_verifiable_addresses" ("id") ON DELETE cascade, +CONSTRAINT "identity_verification_tokens_selfservice_verification_flows_id_fk" FOREIGN KEY ("selfservice_verification_flow_id") REFERENCES "selfservice_verification_flows" ("id") ON DELETE cascade +);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE UNIQUE INDEX "identity_verification_tokens_token_uq_idx" ON "identity_verification_tokens" (token);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE INDEX "identity_verification_tokens_token_idx" ON "identity_verification_tokens" (token);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE INDEX "identity_verification_tokens_verifiable_address_id_idx" ON "identity_verification_tokens" (identity_verifiable_address_id);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE INDEX "identity_verification_tokens_verification_flow_id_idx" ON "identity_verification_tokens" (selfservice_verification_flow_id);COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.mysql.down.sql new file mode 100644 index 00000000000..5696963717f --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `identity_verification_tokens`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.mysql.up.sql new file mode 100644 index 00000000000..6050119cf43 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.mysql.up.sql @@ -0,0 +1,19 @@ +CREATE TABLE `identity_verification_tokens` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`token` VARCHAR (64) NOT NULL, +`used` bool NOT NULL DEFAULT false, +`used_at` DATETIME, +`expires_at` DATETIME NOT NULL, +`issued_at` DATETIME NOT NULL, +`identity_verifiable_address_id` char(36) NOT NULL, +`selfservice_verification_flow_id` char(36), +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`identity_verifiable_address_id`) REFERENCES `identity_verifiable_addresses` (`id`) ON DELETE cascade, +FOREIGN KEY (`selfservice_verification_flow_id`) REFERENCES `selfservice_verification_flows` (`id`) ON DELETE cascade +) ENGINE=InnoDB; +CREATE UNIQUE INDEX `identity_verification_tokens_token_uq_idx` ON `identity_verification_tokens` (`token`); +CREATE INDEX `identity_verification_tokens_token_idx` ON `identity_verification_tokens` (`token`); +CREATE INDEX `identity_verification_tokens_verifiable_address_id_idx` ON `identity_verification_tokens` (`identity_verifiable_address_id`); +CREATE INDEX `identity_verification_tokens_verification_flow_id_idx` ON `identity_verification_tokens` (`selfservice_verification_flow_id`); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.postgres.down.sql new file mode 100644 index 00000000000..8b455721a90 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_verification_tokens"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.postgres.up.sql new file mode 100644 index 00000000000..a17183955b4 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.postgres.up.sql @@ -0,0 +1,19 @@ +CREATE TABLE "identity_verification_tokens" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"token" VARCHAR (64) NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" timestamp, +"expires_at" timestamp NOT NULL, +"issued_at" timestamp NOT NULL, +"identity_verifiable_address_id" UUID NOT NULL, +"selfservice_verification_flow_id" UUID, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("identity_verifiable_address_id") REFERENCES "identity_verifiable_addresses" ("id") ON DELETE cascade, +FOREIGN KEY ("selfservice_verification_flow_id") REFERENCES "selfservice_verification_flows" ("id") ON DELETE cascade +); +CREATE UNIQUE INDEX "identity_verification_tokens_token_uq_idx" ON "identity_verification_tokens" (token); +CREATE INDEX "identity_verification_tokens_token_idx" ON "identity_verification_tokens" (token); +CREATE INDEX "identity_verification_tokens_verifiable_address_id_idx" ON "identity_verification_tokens" (identity_verifiable_address_id); +CREATE INDEX "identity_verification_tokens_verification_flow_id_idx" ON "identity_verification_tokens" (selfservice_verification_flow_id); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.sqlite3.down.sql new file mode 100644 index 00000000000..8b455721a90 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_verification_tokens"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.sqlite3.up.sql new file mode 100644 index 00000000000..9250c460471 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830154602_add_verification_token.sqlite3.up.sql @@ -0,0 +1,18 @@ +CREATE TABLE "identity_verification_tokens" ( +"id" TEXT PRIMARY KEY, +"token" TEXT NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" DATETIME, +"expires_at" DATETIME NOT NULL, +"issued_at" DATETIME NOT NULL, +"identity_verifiable_address_id" char(36) NOT NULL, +"selfservice_verification_flow_id" char(36), +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_verifiable_address_id) REFERENCES identity_verifiable_addresses (id) ON DELETE cascade, +FOREIGN KEY (selfservice_verification_flow_id) REFERENCES selfservice_verification_flows (id) ON DELETE cascade +); +CREATE UNIQUE INDEX "identity_verification_tokens_token_uq_idx" ON "identity_verification_tokens" (token); +CREATE INDEX "identity_verification_tokens_token_idx" ON "identity_verification_tokens" (token); +CREATE INDEX "identity_verification_tokens_verifiable_address_id_idx" ON "identity_verification_tokens" (identity_verifiable_address_id); +CREATE INDEX "identity_verification_tokens_verification_flow_id_idx" ON "identity_verification_tokens" (selfservice_verification_flow_id); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.cockroach.down.sql new file mode 100644 index 00000000000..9a992a2f817 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.cockroach.down.sql @@ -0,0 +1,10 @@ +DELETE FROM identity_recovery_tokens WHERE selfservice_recovery_flow_id IS NULL; +ALTER TABLE "identity_recovery_tokens" DROP CONSTRAINT "identity_recovery_tokens_selfservice_recovery_requests_id_fk";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_recovery_tokens" RENAME COLUMN "selfservice_recovery_flow_id" TO "_selfservice_recovery_flow_id_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_recovery_tokens" ADD COLUMN "selfservice_recovery_flow_id" UUID;COMMIT TRANSACTION;BEGIN TRANSACTION; +UPDATE "identity_recovery_tokens" SET "selfservice_recovery_flow_id" = "_selfservice_recovery_flow_id_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_recovery_tokens" ALTER COLUMN "selfservice_recovery_flow_id" SET NOT NULL;COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_recovery_tokens" DROP COLUMN "_selfservice_recovery_flow_id_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_recovery_tokens" ADD CONSTRAINT "identity_recovery_tokens_selfservice_recovery_requests_id_fk" FOREIGN KEY ("selfservice_recovery_flow_id") REFERENCES "selfservice_recovery_flows" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_recovery_tokens" DROP COLUMN "expires_at";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_recovery_tokens" DROP COLUMN "issued_at";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.cockroach.up.sql new file mode 100644 index 00000000000..adb3e1df19c --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.cockroach.up.sql @@ -0,0 +1,8 @@ +ALTER TABLE "identity_recovery_tokens" ADD COLUMN "expires_at" timestamp NOT NULL DEFAULT '2000-01-01 00:00:00';COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_recovery_tokens" ADD COLUMN "issued_at" timestamp NOT NULL DEFAULT '2000-01-01 00:00:00';COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_recovery_tokens" DROP CONSTRAINT "identity_recovery_tokens_selfservice_recovery_requests_id_fk";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_recovery_tokens" RENAME COLUMN "selfservice_recovery_flow_id" TO "_selfservice_recovery_flow_id_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_recovery_tokens" ADD COLUMN "selfservice_recovery_flow_id" UUID;COMMIT TRANSACTION;BEGIN TRANSACTION; +UPDATE "identity_recovery_tokens" SET "selfservice_recovery_flow_id" = "_selfservice_recovery_flow_id_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_recovery_tokens" DROP COLUMN "_selfservice_recovery_flow_id_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_recovery_tokens" ADD CONSTRAINT "identity_recovery_tokens_selfservice_recovery_requests_id_fk" FOREIGN KEY ("selfservice_recovery_flow_id") REFERENCES "selfservice_recovery_flows" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.mysql.down.sql new file mode 100644 index 00000000000..696bde31672 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.mysql.down.sql @@ -0,0 +1,4 @@ +DELETE FROM identity_recovery_tokens WHERE selfservice_recovery_flow_id IS NULL; +ALTER TABLE `identity_recovery_tokens` MODIFY `selfservice_recovery_flow_id` char(36) NOT NULL; +ALTER TABLE `identity_recovery_tokens` DROP COLUMN `expires_at`; +ALTER TABLE `identity_recovery_tokens` DROP COLUMN `issued_at`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.mysql.up.sql new file mode 100644 index 00000000000..e5f6e9dca6f --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.mysql.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE `identity_recovery_tokens` ADD COLUMN `expires_at` DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00'; +ALTER TABLE `identity_recovery_tokens` ADD COLUMN `issued_at` DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00'; +ALTER TABLE `identity_recovery_tokens` MODIFY `selfservice_recovery_flow_id` char(36); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.postgres.down.sql new file mode 100644 index 00000000000..38e7b8b80be --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.postgres.down.sql @@ -0,0 +1,4 @@ +DELETE FROM identity_recovery_tokens WHERE selfservice_recovery_flow_id IS NULL; +ALTER TABLE "identity_recovery_tokens" ALTER COLUMN "selfservice_recovery_flow_id" TYPE UUID, ALTER COLUMN "selfservice_recovery_flow_id" SET NOT NULL; +ALTER TABLE "identity_recovery_tokens" DROP COLUMN "expires_at"; +ALTER TABLE "identity_recovery_tokens" DROP COLUMN "issued_at"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.postgres.up.sql new file mode 100644 index 00000000000..79c2e87dcb1 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.postgres.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE "identity_recovery_tokens" ADD COLUMN "expires_at" timestamp NOT NULL DEFAULT '2000-01-01 00:00:00'; +ALTER TABLE "identity_recovery_tokens" ADD COLUMN "issued_at" timestamp NOT NULL DEFAULT '2000-01-01 00:00:00'; +ALTER TABLE "identity_recovery_tokens" ALTER COLUMN "selfservice_recovery_flow_id" TYPE UUID, ALTER COLUMN "selfservice_recovery_flow_id" DROP NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..ea864bb2f14 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1,63 @@ +DELETE FROM identity_recovery_tokens WHERE selfservice_recovery_flow_id IS NULL; +DROP INDEX IF EXISTS "identity_recovery_addresses_code_uq_idx"; +DROP INDEX IF EXISTS "identity_recovery_addresses_code_idx"; +CREATE TABLE "_identity_recovery_tokens_tmp" ( +"id" TEXT PRIMARY KEY, +"token" TEXT NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" DATETIME, +"identity_recovery_address_id" char(36) NOT NULL, +"selfservice_recovery_flow_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"expires_at" DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00', +"issued_at" DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00', +FOREIGN KEY (identity_recovery_address_id) REFERENCES identity_recovery_addresses (id) ON UPDATE NO ACTION ON DELETE CASCADE, +FOREIGN KEY (selfservice_recovery_flow_id) REFERENCES selfservice_recovery_flows (id) ON UPDATE NO ACTION ON DELETE CASCADE +); +CREATE UNIQUE INDEX "identity_recovery_addresses_code_uq_idx" ON "_identity_recovery_tokens_tmp" (token); +CREATE INDEX "identity_recovery_addresses_code_idx" ON "_identity_recovery_tokens_tmp" (token); +INSERT INTO "_identity_recovery_tokens_tmp" (id, token, used, used_at, identity_recovery_address_id, selfservice_recovery_flow_id, created_at, updated_at, expires_at, issued_at) SELECT id, token, used, used_at, identity_recovery_address_id, selfservice_recovery_flow_id, created_at, updated_at, expires_at, issued_at FROM "identity_recovery_tokens"; +DROP TABLE "identity_recovery_tokens"; +ALTER TABLE "_identity_recovery_tokens_tmp" RENAME TO "identity_recovery_tokens"; +DROP INDEX IF EXISTS "identity_recovery_addresses_code_uq_idx"; +DROP INDEX IF EXISTS "identity_recovery_addresses_code_idx"; +CREATE TABLE "_identity_recovery_tokens_tmp" ( +"id" TEXT PRIMARY KEY, +"token" TEXT NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" DATETIME, +"identity_recovery_address_id" char(36) NOT NULL, +"selfservice_recovery_flow_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00', +FOREIGN KEY (identity_recovery_address_id) REFERENCES identity_recovery_addresses (id) ON UPDATE NO ACTION ON DELETE CASCADE, +FOREIGN KEY (selfservice_recovery_flow_id) REFERENCES selfservice_recovery_flows (id) ON UPDATE NO ACTION ON DELETE CASCADE +); +CREATE UNIQUE INDEX "identity_recovery_addresses_code_uq_idx" ON "_identity_recovery_tokens_tmp" (token); +CREATE INDEX "identity_recovery_addresses_code_idx" ON "_identity_recovery_tokens_tmp" (token); +INSERT INTO "_identity_recovery_tokens_tmp" (id, token, used, used_at, identity_recovery_address_id, selfservice_recovery_flow_id, created_at, updated_at, issued_at) SELECT id, token, used, used_at, identity_recovery_address_id, selfservice_recovery_flow_id, created_at, updated_at, issued_at FROM "identity_recovery_tokens"; + +DROP TABLE "identity_recovery_tokens"; +ALTER TABLE "_identity_recovery_tokens_tmp" RENAME TO "identity_recovery_tokens"; +DROP INDEX IF EXISTS "identity_recovery_addresses_code_uq_idx"; +DROP INDEX IF EXISTS "identity_recovery_addresses_code_idx"; +CREATE TABLE "_identity_recovery_tokens_tmp" ( +"id" TEXT PRIMARY KEY, +"token" TEXT NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" DATETIME, +"identity_recovery_address_id" char(36) NOT NULL, +"selfservice_recovery_flow_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_recovery_address_id) REFERENCES identity_recovery_addresses (id) ON UPDATE NO ACTION ON DELETE CASCADE, +FOREIGN KEY (selfservice_recovery_flow_id) REFERENCES selfservice_recovery_flows (id) ON UPDATE NO ACTION ON DELETE CASCADE +); +CREATE UNIQUE INDEX "identity_recovery_addresses_code_uq_idx" ON "_identity_recovery_tokens_tmp" (token); +CREATE INDEX "identity_recovery_addresses_code_idx" ON "_identity_recovery_tokens_tmp" (token); +INSERT INTO "_identity_recovery_tokens_tmp" (id, token, used, used_at, identity_recovery_address_id, selfservice_recovery_flow_id, created_at, updated_at) SELECT id, token, used, used_at, identity_recovery_address_id, selfservice_recovery_flow_id, created_at, updated_at FROM "identity_recovery_tokens"; + +DROP TABLE "identity_recovery_tokens"; +ALTER TABLE "_identity_recovery_tokens_tmp" RENAME TO "identity_recovery_tokens"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..7ad4a619cd7 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200830172221_recovery_token_expires.sqlite3.up.sql @@ -0,0 +1,23 @@ +ALTER TABLE "identity_recovery_tokens" ADD COLUMN "expires_at" DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00'; +ALTER TABLE "identity_recovery_tokens" ADD COLUMN "issued_at" DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00'; +DROP INDEX IF EXISTS "identity_recovery_addresses_code_idx"; +DROP INDEX IF EXISTS "identity_recovery_addresses_code_uq_idx"; +CREATE TABLE "_identity_recovery_tokens_tmp" ( +"id" TEXT PRIMARY KEY, +"token" TEXT NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" DATETIME, +"identity_recovery_address_id" char(36) NOT NULL, +"selfservice_recovery_flow_id" char(36), +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"expires_at" DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00', +"issued_at" DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00', +FOREIGN KEY (selfservice_recovery_flow_id) REFERENCES selfservice_recovery_flows (id) ON UPDATE NO ACTION ON DELETE CASCADE, +FOREIGN KEY (identity_recovery_address_id) REFERENCES identity_recovery_addresses (id) ON UPDATE NO ACTION ON DELETE CASCADE +); +CREATE INDEX "identity_recovery_addresses_code_idx" ON "_identity_recovery_tokens_tmp" (token); +CREATE UNIQUE INDEX "identity_recovery_addresses_code_uq_idx" ON "_identity_recovery_tokens_tmp" (token); +INSERT INTO "_identity_recovery_tokens_tmp" (id, token, used, used_at, identity_recovery_address_id, selfservice_recovery_flow_id, created_at, updated_at, expires_at, issued_at) SELECT id, token, used, used_at, identity_recovery_address_id, selfservice_recovery_flow_id, created_at, updated_at, expires_at, issued_at FROM "identity_recovery_tokens"; +DROP TABLE "identity_recovery_tokens"; +ALTER TABLE "_identity_recovery_tokens_tmp" RENAME TO "identity_recovery_tokens"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.cockroach.down.sql new file mode 100644 index 00000000000..932fd7175a4 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.cockroach.down.sql @@ -0,0 +1,15 @@ +ALTER TABLE "identity_verifiable_addresses" ADD COLUMN "code" VARCHAR (32);COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_verifiable_addresses" ADD COLUMN "expires_at" timestamp;COMMIT TRANSACTION;BEGIN TRANSACTION; +UPDATE identity_verifiable_addresses SET code = substr(md5(uuid_v4()), 0, 32) WHERE code IS NULL; +UPDATE identity_verifiable_addresses SET expires_at = CURRENT_TIMESTAMP WHERE expires_at IS NULL; +ALTER TABLE "identity_verifiable_addresses" RENAME COLUMN "code" TO "_code_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_verifiable_addresses" ADD COLUMN "code" VARCHAR (32);COMMIT TRANSACTION;BEGIN TRANSACTION; +UPDATE "identity_verifiable_addresses" SET "code" = "_code_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_verifiable_addresses" ALTER COLUMN "code" SET NOT NULL;COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_verifiable_addresses" DROP COLUMN "_code_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_verifiable_addresses" RENAME COLUMN "expires_at" TO "_expires_at_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_verifiable_addresses" ADD COLUMN "expires_at" timestamp;COMMIT TRANSACTION;BEGIN TRANSACTION; +UPDATE "identity_verifiable_addresses" SET "expires_at" = "_expires_at_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_verifiable_addresses" DROP COLUMN "_expires_at_tmp";COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE UNIQUE INDEX "identity_verifiable_addresses_code_uq_idx" ON "identity_verifiable_addresses" (code);COMMIT TRANSACTION;BEGIN TRANSACTION; +CREATE INDEX "identity_verifiable_addresses_code_idx" ON "identity_verifiable_addresses" (code);COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.cockroach.up.sql new file mode 100644 index 00000000000..24a61dfc341 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.cockroach.up.sql @@ -0,0 +1,4 @@ +DROP INDEX IF EXISTS "identity_verifiable_addresses_code_uq_idx";COMMIT TRANSACTION;BEGIN TRANSACTION; +DROP INDEX IF EXISTS "identity_verifiable_addresses_code_idx";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_verifiable_addresses" DROP COLUMN "code";COMMIT TRANSACTION;BEGIN TRANSACTION; +ALTER TABLE "identity_verifiable_addresses" DROP COLUMN "expires_at";COMMIT TRANSACTION;BEGIN TRANSACTION; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.mysql.down.sql new file mode 100644 index 00000000000..be3f6007465 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.mysql.down.sql @@ -0,0 +1,8 @@ +ALTER TABLE `identity_verifiable_addresses` ADD COLUMN `code` VARCHAR (32); +ALTER TABLE `identity_verifiable_addresses` ADD COLUMN `expires_at` DATETIME; +UPDATE identity_verifiable_addresses SET code = LEFT(MD5(RAND()), 32) WHERE code IS NULL; +UPDATE identity_verifiable_addresses SET expires_at = CURRENT_TIMESTAMP WHERE expires_at IS NULL; +ALTER TABLE `identity_verifiable_addresses` MODIFY `code` VARCHAR (32) NOT NULL; +ALTER TABLE `identity_verifiable_addresses` MODIFY `expires_at` DATETIME; +CREATE UNIQUE INDEX `identity_verifiable_addresses_code_uq_idx` ON `identity_verifiable_addresses` (`code`); +CREATE INDEX `identity_verifiable_addresses_code_idx` ON `identity_verifiable_addresses` (`code`); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.mysql.up.sql new file mode 100644 index 00000000000..91dafe3bfff --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.mysql.up.sql @@ -0,0 +1,4 @@ +DROP INDEX `identity_verifiable_addresses_code_uq_idx` ON `identity_verifiable_addresses`; +DROP INDEX `identity_verifiable_addresses_code_idx` ON `identity_verifiable_addresses`; +ALTER TABLE `identity_verifiable_addresses` DROP COLUMN `code`; +ALTER TABLE `identity_verifiable_addresses` DROP COLUMN `expires_at`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.postgres.down.sql new file mode 100644 index 00000000000..6425e54a221 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.postgres.down.sql @@ -0,0 +1,8 @@ +ALTER TABLE "identity_verifiable_addresses" ADD COLUMN "code" VARCHAR (32); +ALTER TABLE "identity_verifiable_addresses" ADD COLUMN "expires_at" timestamp; +UPDATE identity_verifiable_addresses SET code = substr(md5(random()::text), 0, 32) WHERE code IS NULL; +UPDATE identity_verifiable_addresses SET expires_at = CURRENT_TIMESTAMP WHERE expires_at IS NULL; +ALTER TABLE "identity_verifiable_addresses" ALTER COLUMN "code" TYPE VARCHAR (32), ALTER COLUMN "code" SET NOT NULL; +ALTER TABLE "identity_verifiable_addresses" ALTER COLUMN "expires_at" TYPE timestamp, ALTER COLUMN "expires_at" DROP NOT NULL; +CREATE UNIQUE INDEX "identity_verifiable_addresses_code_uq_idx" ON "identity_verifiable_addresses" (code); +CREATE INDEX "identity_verifiable_addresses_code_idx" ON "identity_verifiable_addresses" (code); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.postgres.up.sql new file mode 100644 index 00000000000..840985ef322 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.postgres.up.sql @@ -0,0 +1,4 @@ +DROP INDEX "identity_verifiable_addresses_code_uq_idx"; +DROP INDEX "identity_verifiable_addresses_code_idx"; +ALTER TABLE "identity_verifiable_addresses" DROP COLUMN "code"; +ALTER TABLE "identity_verifiable_addresses" DROP COLUMN "expires_at"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..f79f141b1ea --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1,48 @@ +ALTER TABLE "identity_verifiable_addresses" ADD COLUMN "code" TEXT; +ALTER TABLE "identity_verifiable_addresses" ADD COLUMN "expires_at" DATETIME; +UPDATE identity_verifiable_addresses SET code = substr(hex(randomblob(32)), 0, 32) WHERE code IS NULL; +UPDATE identity_verifiable_addresses SET expires_at = CURRENT_TIMESTAMP WHERE expires_at IS NULL; +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_uq_idx"; +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_idx"; +CREATE TABLE "_identity_verifiable_addresses_tmp" ( +"id" TEXT PRIMARY KEY, +"status" TEXT NOT NULL, +"via" TEXT NOT NULL, +"verified" bool NOT NULL, +"value" TEXT NOT NULL, +"verified_at" DATETIME, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"code" TEXT NOT NULL, +"expires_at" DATETIME, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +); +CREATE UNIQUE INDEX "identity_verifiable_addresses_status_via_uq_idx" ON "_identity_verifiable_addresses_tmp" (via, value); +CREATE INDEX "identity_verifiable_addresses_status_via_idx" ON "_identity_verifiable_addresses_tmp" (via, value); +INSERT INTO "_identity_verifiable_addresses_tmp" (id, status, via, verified, value, verified_at, identity_id, created_at, updated_at, code, expires_at) SELECT id, status, via, verified, value, verified_at, identity_id, created_at, updated_at, code, expires_at FROM "identity_verifiable_addresses"; +DROP TABLE "identity_verifiable_addresses"; +ALTER TABLE "_identity_verifiable_addresses_tmp" RENAME TO "identity_verifiable_addresses"; +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_uq_idx"; +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_idx"; +CREATE TABLE "_identity_verifiable_addresses_tmp" ( +"id" TEXT PRIMARY KEY, +"status" TEXT NOT NULL, +"via" TEXT NOT NULL, +"verified" bool NOT NULL, +"value" TEXT NOT NULL, +"verified_at" DATETIME, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"code" TEXT NOT NULL, +"expires_at" DATETIME, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +); +CREATE UNIQUE INDEX "identity_verifiable_addresses_status_via_uq_idx" ON "_identity_verifiable_addresses_tmp" (via, value); +CREATE INDEX "identity_verifiable_addresses_status_via_idx" ON "_identity_verifiable_addresses_tmp" (via, value); +INSERT INTO "_identity_verifiable_addresses_tmp" (id, status, via, verified, value, verified_at, identity_id, created_at, updated_at, code, expires_at) SELECT id, status, via, verified, value, verified_at, identity_id, created_at, updated_at, code, expires_at FROM "identity_verifiable_addresses"; +DROP TABLE "identity_verifiable_addresses"; +ALTER TABLE "_identity_verifiable_addresses_tmp" RENAME TO "identity_verifiable_addresses"; +CREATE UNIQUE INDEX "identity_verifiable_addresses_code_uq_idx" ON "identity_verifiable_addresses" (code); +CREATE INDEX "identity_verifiable_addresses_code_idx" ON "identity_verifiable_addresses" (code); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..1279dc64fd5 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20200831110752_identity_verifiable_address_remove_code.sqlite3.up.sql @@ -0,0 +1,43 @@ +DROP INDEX IF EXISTS "identity_verifiable_addresses_code_uq_idx"; +DROP INDEX IF EXISTS "identity_verifiable_addresses_code_idx"; +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_idx"; +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_uq_idx"; +CREATE TABLE "_identity_verifiable_addresses_tmp" ( +"id" TEXT PRIMARY KEY, +"status" TEXT NOT NULL, +"via" TEXT NOT NULL, +"verified" bool NOT NULL, +"value" TEXT NOT NULL, +"verified_at" DATETIME, +"expires_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +); +CREATE INDEX "identity_verifiable_addresses_status_via_idx" ON "_identity_verifiable_addresses_tmp" (via, value); +CREATE UNIQUE INDEX "identity_verifiable_addresses_status_via_uq_idx" ON "_identity_verifiable_addresses_tmp" (via, value); +INSERT INTO "_identity_verifiable_addresses_tmp" (id, status, via, verified, value, verified_at, expires_at, identity_id, created_at, updated_at) SELECT id, status, via, verified, value, verified_at, expires_at, identity_id, created_at, updated_at FROM "identity_verifiable_addresses"; + +DROP TABLE "identity_verifiable_addresses"; +ALTER TABLE "_identity_verifiable_addresses_tmp" RENAME TO "identity_verifiable_addresses"; +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_idx"; +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_uq_idx"; +CREATE TABLE "_identity_verifiable_addresses_tmp" ( +"id" TEXT PRIMARY KEY, +"status" TEXT NOT NULL, +"via" TEXT NOT NULL, +"verified" bool NOT NULL, +"value" TEXT NOT NULL, +"verified_at" DATETIME, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +); +CREATE INDEX "identity_verifiable_addresses_status_via_idx" ON "_identity_verifiable_addresses_tmp" (via, value); +CREATE UNIQUE INDEX "identity_verifiable_addresses_status_via_uq_idx" ON "_identity_verifiable_addresses_tmp" (via, value); +INSERT INTO "_identity_verifiable_addresses_tmp" (id, status, via, verified, value, verified_at, identity_id, created_at, updated_at) SELECT id, status, via, verified, value, verified_at, identity_id, created_at, updated_at FROM "identity_verifiable_addresses"; + +DROP TABLE "identity_verifiable_addresses"; +ALTER TABLE "_identity_verifiable_addresses_tmp" RENAME TO "identity_verifiable_addresses"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.cockroach.down.sql b/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.cockroach.down.sql new file mode 100644 index 00000000000..a2e136ce537 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.cockroach.down.sql @@ -0,0 +1 @@ +DELETE FROM identity_credential_types WHERE name = 'password' OR name = 'oidc'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.cockroach.up.sql b/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.cockroach.up.sql new file mode 100644 index 00000000000..ec08e32a9bd --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.cockroach.up.sql @@ -0,0 +1,2 @@ +INSERT INTO identity_credential_types (id, name) SELECT '78c1b41d-8341-4507-aa60-aff1d4369670', 'password' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'password'); +INSERT INTO identity_credential_types (id, name) SELECT '6fa5e2e0-bfce-4631-b62b-cf2b0252b289', 'oidc' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'oidc'); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.mysql.down.sql b/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.mysql.down.sql new file mode 100644 index 00000000000..a2e136ce537 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.mysql.down.sql @@ -0,0 +1 @@ +DELETE FROM identity_credential_types WHERE name = 'password' OR name = 'oidc'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.mysql.up.sql b/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.mysql.up.sql new file mode 100644 index 00000000000..ec08e32a9bd --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.mysql.up.sql @@ -0,0 +1,2 @@ +INSERT INTO identity_credential_types (id, name) SELECT '78c1b41d-8341-4507-aa60-aff1d4369670', 'password' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'password'); +INSERT INTO identity_credential_types (id, name) SELECT '6fa5e2e0-bfce-4631-b62b-cf2b0252b289', 'oidc' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'oidc'); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.postgres.down.sql b/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.postgres.down.sql new file mode 100644 index 00000000000..a2e136ce537 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.postgres.down.sql @@ -0,0 +1 @@ +DELETE FROM identity_credential_types WHERE name = 'password' OR name = 'oidc'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.postgres.up.sql b/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.postgres.up.sql new file mode 100644 index 00000000000..ec08e32a9bd --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.postgres.up.sql @@ -0,0 +1,2 @@ +INSERT INTO identity_credential_types (id, name) SELECT '78c1b41d-8341-4507-aa60-aff1d4369670', 'password' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'password'); +INSERT INTO identity_credential_types (id, name) SELECT '6fa5e2e0-bfce-4631-b62b-cf2b0252b289', 'oidc' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'oidc'); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.sqlite3.down.sql b/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.sqlite3.down.sql new file mode 100644 index 00000000000..a2e136ce537 --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.sqlite3.down.sql @@ -0,0 +1 @@ +DELETE FROM identity_credential_types WHERE name = 'password' OR name = 'oidc'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.sqlite3.up.sql b/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.sqlite3.up.sql new file mode 100644 index 00000000000..ec08e32a9bd --- /dev/null +++ b/oryx/popx/stub/migrations/legacy/20201201161451_credential_types_values.sqlite3.up.sql @@ -0,0 +1,2 @@ +INSERT INTO identity_credential_types (id, name) SELECT '78c1b41d-8341-4507-aa60-aff1d4369670', 'password' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'password'); +INSERT INTO identity_credential_types (id, name) SELECT '6fa5e2e0-bfce-4631-b62b-cf2b0252b289', 'oidc' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'oidc'); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/notx/20241031_notx.autocommit.down.sql b/oryx/popx/stub/migrations/notx/20241031_notx.autocommit.down.sql new file mode 100644 index 00000000000..8ea7e8fb1bf --- /dev/null +++ b/oryx/popx/stub/migrations/notx/20241031_notx.autocommit.down.sql @@ -0,0 +1 @@ +BEGIN;ROLLBACK; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/notx/20241031_notx.autocommit.up.sql b/oryx/popx/stub/migrations/notx/20241031_notx.autocommit.up.sql new file mode 100644 index 00000000000..8ea7e8fb1bf --- /dev/null +++ b/oryx/popx/stub/migrations/notx/20241031_notx.autocommit.up.sql @@ -0,0 +1 @@ +BEGIN;ROLLBACK; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/source/20191100000001_identities.down.fizz b/oryx/popx/stub/migrations/source/20191100000001_identities.down.fizz new file mode 100644 index 00000000000..149f0fd1a34 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000001_identities.down.fizz @@ -0,0 +1,4 @@ +drop_table("identity_credential_identifiers") +drop_table("identity_credentials") +drop_table("identity_credential_types") +drop_table("identities") diff --git a/oryx/popx/stub/migrations/source/20191100000001_identities.up.fizz b/oryx/popx/stub/migrations/source/20191100000001_identities.up.fizz new file mode 100644 index 00000000000..ee115259649 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000001_identities.up.fizz @@ -0,0 +1,34 @@ +create_table("identities") { + t.Column("id", "uuid", {primary: true}) + t.Column("traits_schema_id", "string", {"size": 2048}) + t.Column("traits", "json") +} + +create_table("identity_credential_types") { + t.Column("id", "uuid", {primary: true}) + t.Column("name", "string", { "size": 32 }) + + t.DisableTimestamps() +} + +add_index("identity_credential_types", "name", {"unique": true}) + +create_table("identity_credentials") { + t.Column("id", "uuid", {primary: true}) + t.Column("config", "json") + + t.Column("identity_credential_type_id", "uuid") + t.Column("identity_id", "uuid") + + t.ForeignKey("identity_id", {"identities": ["id"]}, {"on_delete": "cascade"}) + t.ForeignKey("identity_credential_type_id", {"identity_credential_types": ["id"]}, {"on_delete": "cascade"}) +} + +create_table("identity_credential_identifiers") { + t.Column("id", "uuid", {primary: true}) + t.Column("identifier", "string", {"size": 255}) + t.Column("identity_credential_id", "uuid") + t.ForeignKey("identity_credential_id", {"identity_credentials": ["id"]}, {"on_delete": "cascade"}) +} + +add_index("identity_credential_identifiers", "identifier", {"unique": true}) diff --git a/oryx/popx/stub/migrations/source/20191100000002_requests.down.fizz b/oryx/popx/stub/migrations/source/20191100000002_requests.down.fizz new file mode 100644 index 00000000000..d8a2fe23d25 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000002_requests.down.fizz @@ -0,0 +1,7 @@ +drop_table("selfservice_login_request_methods") +drop_table("selfservice_login_requests") + +drop_table("selfservice_registration_request_methods") +drop_table("selfservice_registration_requests") + +drop_table("selfservice_profile_management_requests") diff --git a/oryx/popx/stub/migrations/source/20191100000002_requests.up.fizz b/oryx/popx/stub/migrations/source/20191100000002_requests.up.fizz new file mode 100644 index 00000000000..2823b612f48 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000002_requests.up.fizz @@ -0,0 +1,47 @@ +create_table("selfservice_login_requests") { + t.Column("id", "uuid", {primary: true}) + t.Column("request_url", "string", {"size": 2048}) + t.Column("issued_at", "timestamp", { "default_raw": "CURRENT_TIMESTAMP" }) + t.Column("expires_at", "timestamp") + t.Column("active_method", "string", {"size": 32}) + t.Column("csrf_token", "string") +} + +create_table("selfservice_login_request_methods") { + t.Column("id", "uuid", {primary: true}) + t.Column("method", "string", {"size": 32}) + t.Column("selfservice_login_request_id", "uuid") + t.Column("config", "json") + + t.ForeignKey("selfservice_login_request_id", {"selfservice_login_requests": ["id"]}, {"on_delete": "cascade"}) +} + +create_table("selfservice_registration_requests") { + t.Column("id", "uuid", {primary: true}) + t.Column("request_url", "string", {"size": 2048}) + t.Column("issued_at", "timestamp", { "default_raw": "CURRENT_TIMESTAMP" }) + t.Column("expires_at", "timestamp") + t.Column("active_method", "string", {"size": 32}) + t.Column("csrf_token", "string") +} + +create_table("selfservice_registration_request_methods") { + t.Column("id", "uuid", {primary: true}) + t.Column("method", "string", {"size": 32}) + t.Column("selfservice_registration_request_id", "uuid") + t.Column("config", "json") + + t.ForeignKey("selfservice_registration_request_id", {"selfservice_registration_requests": ["id"]}, {"on_delete": "cascade"}) +} + +create_table("selfservice_profile_management_requests") { + t.Column("id", "uuid", {primary: true}) + t.Column("request_url", "string", {"size": 2048}) + t.Column("issued_at", "timestamp", { "default_raw": "CURRENT_TIMESTAMP" }) + t.Column("expires_at", "timestamp") + t.Column("form", "json") + t.Column("update_successful", "bool") + t.Column("identity_id", "uuid") + + t.ForeignKey("identity_id", {"identities": ["id"]}, {"on_delete": "cascade"}) +} diff --git a/oryx/popx/stub/migrations/source/20191100000003_sessions.down.fizz b/oryx/popx/stub/migrations/source/20191100000003_sessions.down.fizz new file mode 100644 index 00000000000..dc5c982c81f --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000003_sessions.down.fizz @@ -0,0 +1 @@ +drop_table("sessions") diff --git a/oryx/popx/stub/migrations/source/20191100000003_sessions.up.fizz b/oryx/popx/stub/migrations/source/20191100000003_sessions.up.fizz new file mode 100644 index 00000000000..f0eb2f3f136 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000003_sessions.up.fizz @@ -0,0 +1,9 @@ +create_table("sessions") { + t.Column("id", "uuid", {primary: true}) + t.Column("issued_at", "timestamp", { "default_raw": "CURRENT_TIMESTAMP" }) + t.Column("expires_at", "timestamp") + t.Column("authenticated_at", "timestamp") + t.Column("identity_id", "uuid") + + t.ForeignKey("identity_id", {"identities": ["id"]}, {"on_delete": "cascade"}) +} diff --git a/oryx/popx/stub/migrations/source/20191100000004_errors.down.fizz b/oryx/popx/stub/migrations/source/20191100000004_errors.down.fizz new file mode 100644 index 00000000000..9ada90a727b --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000004_errors.down.fizz @@ -0,0 +1 @@ +drop_table("selfservice_errors") diff --git a/oryx/popx/stub/migrations/source/20191100000004_errors.up.fizz b/oryx/popx/stub/migrations/source/20191100000004_errors.up.fizz new file mode 100644 index 00000000000..2911b5e73ba --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000004_errors.up.fizz @@ -0,0 +1,6 @@ +create_table("selfservice_errors") { + t.Column("id", "uuid", {primary: true}) + t.Column("errors", "json") + t.Column("seen_at", "timestamp") + t.Column("was_seen", "bool") +} diff --git a/oryx/popx/stub/migrations/source/20191100000005_identities.mysql.down.sql b/oryx/popx/stub/migrations/source/20191100000005_identities.mysql.down.sql new file mode 100644 index 00000000000..139e50a971e --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000005_identities.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE identity_credential_identifiers MODIFY COLUMN identifier VARCHAR(255); diff --git a/oryx/popx/stub/migrations/source/20191100000005_identities.mysql.up.sql b/oryx/popx/stub/migrations/source/20191100000005_identities.mysql.up.sql new file mode 100644 index 00000000000..8069ee98f31 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000005_identities.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE identity_credential_identifiers MODIFY COLUMN identifier VARCHAR(255) BINARY; diff --git a/oryx/popx/stub/migrations/source/20191100000006_courier.down.fizz b/oryx/popx/stub/migrations/source/20191100000006_courier.down.fizz new file mode 100644 index 00000000000..2da9c63dfc1 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000006_courier.down.fizz @@ -0,0 +1 @@ +drop_table("courier_messages") diff --git a/oryx/popx/stub/migrations/source/20191100000006_courier.up.fizz b/oryx/popx/stub/migrations/source/20191100000006_courier.up.fizz new file mode 100644 index 00000000000..5f6fda1012e --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000006_courier.up.fizz @@ -0,0 +1,10 @@ +create_table("courier_messages") { + t.Column("id", "uuid", {primary: true}) + + t.Column("type", "int") + t.Column("status", "int") + + t.Column("body", "string") + t.Column("subject", "string") + t.Column("recipient", "string") +} diff --git a/oryx/popx/stub/migrations/source/20191100000007_errors.down.fizz b/oryx/popx/stub/migrations/source/20191100000007_errors.down.fizz new file mode 100644 index 00000000000..6f093e7baa7 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000007_errors.down.fizz @@ -0,0 +1 @@ +drop_column("selfservice_errors", "csrf_token") diff --git a/oryx/popx/stub/migrations/source/20191100000007_errors.up.fizz b/oryx/popx/stub/migrations/source/20191100000007_errors.up.fizz new file mode 100644 index 00000000000..b5aa72a831c --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000007_errors.up.fizz @@ -0,0 +1 @@ +add_column("selfservice_errors", "csrf_token", "string", {"default": ""}) diff --git a/oryx/popx/stub/migrations/source/20191100000008_selfservice_verification.down.fizz b/oryx/popx/stub/migrations/source/20191100000008_selfservice_verification.down.fizz new file mode 100644 index 00000000000..48fd423d896 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000008_selfservice_verification.down.fizz @@ -0,0 +1,2 @@ +drop_table("selfservice_verification_requests") +drop_table("identity_verifiable_addresses") diff --git a/oryx/popx/stub/migrations/source/20191100000008_selfservice_verification.up.fizz b/oryx/popx/stub/migrations/source/20191100000008_selfservice_verification.up.fizz new file mode 100644 index 00000000000..3f58ba99620 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000008_selfservice_verification.up.fizz @@ -0,0 +1,35 @@ +create_table("identity_verifiable_addresses") { + t.Column("id", "uuid", {primary: true}) + + t.Column("code", "string", {"size": 32}) + t.Column("status", "string", {"size": 16}) + t.Column("via", "string", {"size": 16}) + t.Column("verified", "bool") + + t.Column("value", "string", {"size": 400}) + + t.Column("verified_at", "timestamp", {"null": true}) + t.Column("expires_at", "timestamp", { "default_raw": "CURRENT_TIMESTAMP" }) + + t.Column("identity_id", "uuid") + t.ForeignKey("identity_id", {"identities": ["id"]}, {"on_delete": "cascade"}) +} + +add_index("identity_verifiable_addresses", ["code"], { "unique": true, "name": "identity_verifiable_addresses_code_uq_idx" }) +add_index("identity_verifiable_addresses", ["code"], { "name": "identity_verifiable_addresses_code_idx" }) + +add_index("identity_verifiable_addresses", ["via", "value"], { "unique": true, "name": "identity_verifiable_addresses_status_via_uq_idx" }) +add_index("identity_verifiable_addresses", ["via", "value"], { "name": "identity_verifiable_addresses_status_via_idx" }) + +create_table("selfservice_verification_requests") { + t.Column("id", "uuid", {primary: true}) + + t.Column("request_url", "string", {"size": 2048}) + t.Column("issued_at", "timestamp", { "default_raw": "CURRENT_TIMESTAMP" }) + t.Column("expires_at", "timestamp") + + t.Column("form", "json") + t.Column("via", "string", {"size": 16}) + t.Column("csrf_token", "string") + t.Column("success", "bool") +} diff --git a/oryx/popx/stub/migrations/source/20191100000009_verification.mysql.down.sql b/oryx/popx/stub/migrations/source/20191100000009_verification.mysql.down.sql new file mode 100644 index 00000000000..f8a7e0f3c3a --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000009_verification.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE identity_verifiable_addresses MODIFY COLUMN code VARCHAR(255); diff --git a/oryx/popx/stub/migrations/source/20191100000009_verification.mysql.up.sql b/oryx/popx/stub/migrations/source/20191100000009_verification.mysql.up.sql new file mode 100644 index 00000000000..d16bc788e88 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000009_verification.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE identity_verifiable_addresses MODIFY COLUMN code VARCHAR(255) BINARY; diff --git a/oryx/popx/stub/migrations/source/20191100000010_errors.down.fizz b/oryx/popx/stub/migrations/source/20191100000010_errors.down.fizz new file mode 100644 index 00000000000..daaf4c0ed82 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000010_errors.down.fizz @@ -0,0 +1,2 @@ +sql("UPDATE selfservice_errors SET seen_at = '1980-01-01 00:00:00' WHERE seen_at = NULL;") +change_column("selfservice_errors", "seen_at", "timestamp", { null: false }) diff --git a/oryx/popx/stub/migrations/source/20191100000010_errors.up.fizz b/oryx/popx/stub/migrations/source/20191100000010_errors.up.fizz new file mode 100644 index 00000000000..542c123e909 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000010_errors.up.fizz @@ -0,0 +1 @@ +change_column("selfservice_errors", "seen_at", "timestamp", { "null": true }) diff --git a/oryx/popx/stub/migrations/source/20191100000011_courier_body_type.down.fizz b/oryx/popx/stub/migrations/source/20191100000011_courier_body_type.down.fizz new file mode 100644 index 00000000000..178c60cf04a --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000011_courier_body_type.down.fizz @@ -0,0 +1,7 @@ +<%# + +Do nothing because the change will not be able to preserve data and the change is insignificant as it's compatible +with both code bases (prior and after this change). + +WARNING: https://github.com/gobuffalo/fizz/issues/45#issuecomment-586833728 +%> diff --git a/oryx/popx/stub/migrations/source/20191100000011_courier_body_type.up.fizz b/oryx/popx/stub/migrations/source/20191100000011_courier_body_type.up.fizz new file mode 100644 index 00000000000..3ca90e2d282 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000011_courier_body_type.up.fizz @@ -0,0 +1 @@ +change_column("courier_messages", "body", "text", {}) diff --git a/oryx/popx/stub/migrations/source/20191100000012_login_request_forced.down.fizz b/oryx/popx/stub/migrations/source/20191100000012_login_request_forced.down.fizz new file mode 100644 index 00000000000..43e866fe01d --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000012_login_request_forced.down.fizz @@ -0,0 +1 @@ +drop_column("selfservice_login_requests", "forced") diff --git a/oryx/popx/stub/migrations/source/20191100000012_login_request_forced.up.fizz b/oryx/popx/stub/migrations/source/20191100000012_login_request_forced.up.fizz new file mode 100644 index 00000000000..66fcd59166a --- /dev/null +++ b/oryx/popx/stub/migrations/source/20191100000012_login_request_forced.up.fizz @@ -0,0 +1 @@ +add_column("selfservice_login_requests", "forced", "bool", {"default": false}) diff --git a/oryx/popx/stub/migrations/source/20200317160354_create_profile_request_forms.down.fizz b/oryx/popx/stub/migrations/source/20200317160354_create_profile_request_forms.down.fizz new file mode 100644 index 00000000000..946a0d6f7ab --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200317160354_create_profile_request_forms.down.fizz @@ -0,0 +1,12 @@ +{{ if or .IsPostgreSQL .IsMySQL .IsMariaDB }} + add_column("selfservice_profile_management_requests", "form", "json", { "null": true }) + sql("UPDATE selfservice_profile_management_requests SET form=(SELECT * FROM (SELECT m.config FROM selfservice_profile_management_requests AS r INNER JOIN selfservice_profile_management_request_methods AS m ON r.id=m.selfservice_profile_management_request_id) as t);") + change_column("selfservice_profile_management_requests", "form", "json", { "null": false }) +{{ end }} + +{{ if .IsCockroach }} + add_column("selfservice_profile_management_requests", "form", "json", { "default": "{}" }) +{{ end }} + +drop_table("selfservice_profile_management_request_methods") +drop_column("selfservice_profile_management_requests", "active_method") diff --git a/oryx/popx/stub/migrations/source/20200317160354_create_profile_request_forms.up.fizz b/oryx/popx/stub/migrations/source/20200317160354_create_profile_request_forms.up.fizz new file mode 100644 index 00000000000..276dca672f7 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200317160354_create_profile_request_forms.up.fizz @@ -0,0 +1,12 @@ +create_table("selfservice_profile_management_request_methods") { + t.Column("id", "uuid", {primary: true}) + t.Column("method", "string", {"size": 32}) + t.Column("selfservice_profile_management_request_id", "uuid") + t.Column("config", "json") +} + +add_column("selfservice_profile_management_requests", "active_method", "string", {"size": 32, null: true}) + +sql("INSERT INTO selfservice_profile_management_request_methods (id, method, selfservice_profile_management_request_id, config) SELECT id, 'traits', id, form FROM selfservice_profile_management_requests;") + +drop_column("selfservice_profile_management_requests", "form") diff --git a/oryx/popx/stub/migrations/source/20200401183443_continuity_containers.down.fizz b/oryx/popx/stub/migrations/source/20200401183443_continuity_containers.down.fizz new file mode 100644 index 00000000000..956151d3f41 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200401183443_continuity_containers.down.fizz @@ -0,0 +1 @@ +drop_table("continuity_containers") diff --git a/oryx/popx/stub/migrations/source/20200401183443_continuity_containers.up.fizz b/oryx/popx/stub/migrations/source/20200401183443_continuity_containers.up.fizz new file mode 100644 index 00000000000..efaff422144 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200401183443_continuity_containers.up.fizz @@ -0,0 +1,11 @@ +create_table("continuity_containers") { + t.Column("id", "uuid", {primary: true}) + + t.Column("identity_id", "uuid", {null: true}) + + t.Column("name", "string") + t.Column("payload", "json", {null: true}) + t.Column("expires_at", "timestamp") + + t.ForeignKey("identity_id", {"identities": ["id"]}, {"on_delete": "cascade"}) +} diff --git a/oryx/popx/stub/migrations/source/20200402142539_rename_profile_flows.down.fizz b/oryx/popx/stub/migrations/source/20200402142539_rename_profile_flows.down.fizz new file mode 100644 index 00000000000..cbf6f9842c3 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200402142539_rename_profile_flows.down.fizz @@ -0,0 +1,5 @@ +rename_column("selfservice_settings_request_methods", "selfservice_settings_request_id", "selfservice_profile_management_request_id") + +rename_table("selfservice_settings_request_methods", "selfservice_profile_management_request_methods") +rename_table("selfservice_settings_requests", "selfservice_profile_management_requests") + diff --git a/oryx/popx/stub/migrations/source/20200402142539_rename_profile_flows.up.fizz b/oryx/popx/stub/migrations/source/20200402142539_rename_profile_flows.up.fizz new file mode 100644 index 00000000000..4b0132be7fc --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200402142539_rename_profile_flows.up.fizz @@ -0,0 +1,4 @@ +rename_column("selfservice_profile_management_request_methods", "selfservice_profile_management_request_id", "selfservice_settings_request_id") + +rename_table("selfservice_profile_management_request_methods", "selfservice_settings_request_methods") +rename_table("selfservice_profile_management_requests", "selfservice_settings_requests") diff --git a/oryx/popx/stub/migrations/source/20200519101057_create_recovery_addresses.down.fizz b/oryx/popx/stub/migrations/source/20200519101057_create_recovery_addresses.down.fizz new file mode 100644 index 00000000000..04b5fe66293 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200519101057_create_recovery_addresses.down.fizz @@ -0,0 +1,4 @@ +drop_table("identity_recovery_tokens") +drop_table("selfservice_recovery_request_methods") +drop_table("selfservice_recovery_requests") +drop_table("identity_recovery_addresses") diff --git a/oryx/popx/stub/migrations/source/20200519101057_create_recovery_addresses.up.fizz b/oryx/popx/stub/migrations/source/20200519101057_create_recovery_addresses.up.fizz new file mode 100644 index 00000000000..b0371141b01 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200519101057_create_recovery_addresses.up.fizz @@ -0,0 +1,52 @@ +create_table("identity_recovery_addresses") { + t.Column("id", "uuid", {primary: true}) + + t.Column("via", "string", {"size": 16}) + t.Column("value", "string", {"size": 400}) + + t.Column("identity_id", "uuid") + t.ForeignKey("identity_id", {"identities": ["id"]}, {"on_delete": "cascade"}) +} + +add_index("identity_recovery_addresses", ["via", "value"], { "unique": true, "name": "identity_recovery_addresses_status_via_uq_idx" }) +add_index("identity_recovery_addresses", ["via", "value"], { "name": "identity_recovery_addresses_status_via_idx" }) + +create_table("selfservice_recovery_requests") { + t.Column("id", "uuid", {primary: true}) + t.Column("request_url", "string", {"size": 2048}) + t.Column("issued_at", "timestamp", { "default_raw": "CURRENT_TIMESTAMP" }) + t.Column("expires_at", "timestamp") + t.Column("messages", "json", {"null": true}) + t.Column("active_method", "string", {"size": 32, "null": true}) + t.Column("csrf_token", "string") + t.Column("state", "string", {"size": 32}) + + t.Column("recovered_identity_id", "uuid", { "null": true }) + t.ForeignKey("recovered_identity_id", {"identities": ["id"]}, {"on_delete": "cascade"}) +} + +create_table("selfservice_recovery_request_methods") { + t.Column("id", "uuid", {primary: true}) + t.Column("method", "string", {"size": 32}) + t.Column("config", "json") + + t.Column("selfservice_recovery_request_id", "uuid") + t.ForeignKey("selfservice_recovery_request_id", {"selfservice_recovery_requests": ["id"]}, {"on_delete": "cascade"}) +} + +create_table("identity_recovery_tokens") { + t.Column("id", "uuid", {primary: true}) + + t.Column("token", "string", {"size": 64}) + t.Column("used", "bool", {"default": false}) + t.Column("used_at", "timestamp", {"null": true}) + + t.Column("identity_recovery_address_id", "uuid") + t.ForeignKey("identity_recovery_address_id", {"identity_recovery_addresses": ["id"]}, {"on_delete": "cascade"}) + + t.Column("selfservice_recovery_request_id", "uuid") + t.ForeignKey("selfservice_recovery_request_id", {"selfservice_recovery_requests": ["id"]}, {"on_delete": "cascade"}) +} + +add_index("identity_recovery_tokens", ["token"], { "unique": true, "name": "identity_recovery_addresses_code_uq_idx" }) +add_index("identity_recovery_tokens", ["token"], { "name": "identity_recovery_addresses_code_idx" }) diff --git a/oryx/popx/stub/migrations/source/20200519101058_create_recovery_addresses.mysql.down.sql b/oryx/popx/stub/migrations/source/20200519101058_create_recovery_addresses.mysql.down.sql new file mode 100644 index 00000000000..54c99e1acb3 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200519101058_create_recovery_addresses.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE identity_recovery_tokens MODIFY COLUMN token VARCHAR(64); diff --git a/oryx/popx/stub/migrations/source/20200519101058_create_recovery_addresses.mysql.up.sql b/oryx/popx/stub/migrations/source/20200519101058_create_recovery_addresses.mysql.up.sql new file mode 100644 index 00000000000..7972b3405fb --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200519101058_create_recovery_addresses.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE identity_recovery_tokens MODIFY COLUMN token VARCHAR(64) BINARY; diff --git a/oryx/popx/stub/migrations/source/20200601101000_create_messages.down.fizz b/oryx/popx/stub/migrations/source/20200601101000_create_messages.down.fizz new file mode 100644 index 00000000000..602d6ec6aeb --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200601101000_create_messages.down.fizz @@ -0,0 +1 @@ +drop_column("selfservice_settings_requests", "messages") diff --git a/oryx/popx/stub/migrations/source/20200601101000_create_messages.up.fizz b/oryx/popx/stub/migrations/source/20200601101000_create_messages.up.fizz new file mode 100644 index 00000000000..a4e0d5f3c1d --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200601101000_create_messages.up.fizz @@ -0,0 +1 @@ +add_column("selfservice_settings_requests", "messages", "json", {"null": true}) diff --git a/oryx/popx/stub/migrations/source/20200601101001_verification.mysql.down.sql b/oryx/popx/stub/migrations/source/20200601101001_verification.mysql.down.sql new file mode 100644 index 00000000000..d16bc788e88 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200601101001_verification.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE identity_verifiable_addresses MODIFY COLUMN code VARCHAR(255) BINARY; diff --git a/oryx/popx/stub/migrations/source/20200601101001_verification.mysql.up.sql b/oryx/popx/stub/migrations/source/20200601101001_verification.mysql.up.sql new file mode 100644 index 00000000000..3bf20defb8c --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200601101001_verification.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE identity_verifiable_addresses MODIFY COLUMN code VARCHAR(32) BINARY; diff --git a/oryx/popx/stub/migrations/source/20200605111551_messages.down.fizz b/oryx/popx/stub/migrations/source/20200605111551_messages.down.fizz new file mode 100644 index 00000000000..81d91dba9a1 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200605111551_messages.down.fizz @@ -0,0 +1,3 @@ +drop_column("selfservice_verification_requests", "messages") +drop_column("selfservice_login_requests", "messages") +drop_column("selfservice_registration_requests", "messages") diff --git a/oryx/popx/stub/migrations/source/20200605111551_messages.up.fizz b/oryx/popx/stub/migrations/source/20200605111551_messages.up.fizz new file mode 100644 index 00000000000..23704c0d5f8 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200605111551_messages.up.fizz @@ -0,0 +1,3 @@ +add_column("selfservice_verification_requests", "messages", "json", {"null": true}) +add_column("selfservice_login_requests", "messages", "json", {"null": true}) +add_column("selfservice_registration_requests", "messages", "json", {"null": true}) diff --git a/oryx/popx/stub/migrations/source/20200607165100_settings.down.fizz b/oryx/popx/stub/migrations/source/20200607165100_settings.down.fizz new file mode 100644 index 00000000000..89b26ed5ab7 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200607165100_settings.down.fizz @@ -0,0 +1,2 @@ +drop_column("selfservice_settings_requests", "state") +add_column("selfservice_settings_requests", "update_successful", "bool", {"default": false}) diff --git a/oryx/popx/stub/migrations/source/20200607165100_settings.up.fizz b/oryx/popx/stub/migrations/source/20200607165100_settings.up.fizz new file mode 100644 index 00000000000..c7f36073590 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200607165100_settings.up.fizz @@ -0,0 +1,2 @@ +add_column("selfservice_settings_requests", "state", "string", {"default": "show_form"}) +drop_column("selfservice_settings_requests", "update_successful") diff --git a/oryx/popx/stub/migrations/source/20200705105359_rename_identities_schema.down.fizz b/oryx/popx/stub/migrations/source/20200705105359_rename_identities_schema.down.fizz new file mode 100644 index 00000000000..ed0715fca89 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200705105359_rename_identities_schema.down.fizz @@ -0,0 +1 @@ +rename_column("identities", "schema_id", "traits_schema_id") diff --git a/oryx/popx/stub/migrations/source/20200705105359_rename_identities_schema.up.fizz b/oryx/popx/stub/migrations/source/20200705105359_rename_identities_schema.up.fizz new file mode 100644 index 00000000000..5a9159b835a --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200705105359_rename_identities_schema.up.fizz @@ -0,0 +1 @@ +rename_column("identities", "traits_schema_id", "schema_id") diff --git a/oryx/popx/stub/migrations/source/20200810141652_flow_type.down.fizz b/oryx/popx/stub/migrations/source/20200810141652_flow_type.down.fizz new file mode 100644 index 00000000000..eee4e4e2333 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200810141652_flow_type.down.fizz @@ -0,0 +1,5 @@ +drop_column("selfservice_login_requests", "type") +drop_column("selfservice_registration_requests", "type") +drop_column("selfservice_settings_requests", "type") +drop_column("selfservice_recovery_requests", "type") +drop_column("selfservice_verification_requests", "type") diff --git a/oryx/popx/stub/migrations/source/20200810141652_flow_type.up.fizz b/oryx/popx/stub/migrations/source/20200810141652_flow_type.up.fizz new file mode 100644 index 00000000000..90c2a763e70 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200810141652_flow_type.up.fizz @@ -0,0 +1,5 @@ +add_column("selfservice_login_requests", "type", "string", {"default": "browser", "size": 16}) +add_column("selfservice_registration_requests", "type", "string", {"default": "browser", "size": 16}) +add_column("selfservice_settings_requests", "type", "string", {"default": "browser", "size": 16}) +add_column("selfservice_recovery_requests", "type", "string", {"default": "browser", "size": 16}) +add_column("selfservice_verification_requests", "type", "string", {"default": "browser", "size": 16}) diff --git a/oryx/popx/stub/migrations/source/20200810161022_flow_rename.down.fizz b/oryx/popx/stub/migrations/source/20200810161022_flow_rename.down.fizz new file mode 100644 index 00000000000..3ddf846d554 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200810161022_flow_rename.down.fizz @@ -0,0 +1,13 @@ +rename_table("selfservice_login_flows", "selfservice_login_requests") +rename_table("selfservice_login_flow_methods", "selfservice_login_request_methods") + +rename_table("selfservice_registration_flow_methods", "selfservice_registration_request_methods") +rename_table("selfservice_registration_flows", "selfservice_registration_requests") + +rename_table("selfservice_settings_flow_methods", "selfservice_settings_request_methods") +rename_table("selfservice_settings_flows", "selfservice_settings_requests") + +rename_table("selfservice_recovery_flow_methods", "selfservice_recovery_request_methods") +rename_table("selfservice_recovery_flows", "selfservice_recovery_requests") + +rename_table("selfservice_verification_flows", "selfservice_verification_requests") diff --git a/oryx/popx/stub/migrations/source/20200810161022_flow_rename.up.fizz b/oryx/popx/stub/migrations/source/20200810161022_flow_rename.up.fizz new file mode 100644 index 00000000000..469afdd3bab --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200810161022_flow_rename.up.fizz @@ -0,0 +1,13 @@ +rename_table("selfservice_login_request_methods", "selfservice_login_flow_methods") +rename_table("selfservice_login_requests", "selfservice_login_flows") + +rename_table("selfservice_registration_request_methods", "selfservice_registration_flow_methods") +rename_table("selfservice_registration_requests", "selfservice_registration_flows") + +rename_table("selfservice_settings_request_methods", "selfservice_settings_flow_methods") +rename_table("selfservice_settings_requests", "selfservice_settings_flows") + +rename_table("selfservice_recovery_request_methods", "selfservice_recovery_flow_methods") +rename_table("selfservice_recovery_requests", "selfservice_recovery_flows") + +rename_table("selfservice_verification_requests", "selfservice_verification_flows") diff --git a/oryx/popx/stub/migrations/source/20200810162450_flow_fields_rename.down.fizz b/oryx/popx/stub/migrations/source/20200810162450_flow_fields_rename.down.fizz new file mode 100644 index 00000000000..86a600ceadc --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200810162450_flow_fields_rename.down.fizz @@ -0,0 +1,7 @@ +rename_column("selfservice_login_flow_methods", "selfservice_login_flow_id", "selfservice_login_request_id") + +rename_column("selfservice_registration_flow_methods", "selfservice_registration_flow_id", "selfservice_registration_request_id") + +rename_column("selfservice_settings_flow_methods", "selfservice_settings_flow_id", "selfservice_settings_request_id") + +rename_column("selfservice_recovery_flow_methods", "selfservice_recovery_flow_id", "selfservice_recovery_request_id") diff --git a/oryx/popx/stub/migrations/source/20200810162450_flow_fields_rename.up.fizz b/oryx/popx/stub/migrations/source/20200810162450_flow_fields_rename.up.fizz new file mode 100644 index 00000000000..bc24ef316b8 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200810162450_flow_fields_rename.up.fizz @@ -0,0 +1,7 @@ +rename_column("selfservice_login_flow_methods", "selfservice_login_request_id", "selfservice_login_flow_id") + +rename_column("selfservice_registration_flow_methods", "selfservice_registration_request_id", "selfservice_registration_flow_id") + +rename_column("selfservice_recovery_flow_methods", "selfservice_recovery_request_id", "selfservice_recovery_flow_id") + +rename_column("selfservice_settings_flow_methods", "selfservice_settings_request_id", "selfservice_settings_flow_id") diff --git a/oryx/popx/stub/migrations/source/20200812124254_add_session_token.down.fizz b/oryx/popx/stub/migrations/source/20200812124254_add_session_token.down.fizz new file mode 100644 index 00000000000..a25137adf41 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200812124254_add_session_token.down.fizz @@ -0,0 +1 @@ +drop_column("sessions", "token") diff --git a/oryx/popx/stub/migrations/source/20200812124254_add_session_token.up.fizz b/oryx/popx/stub/migrations/source/20200812124254_add_session_token.up.fizz new file mode 100644 index 00000000000..3a9141a3e6f --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200812124254_add_session_token.up.fizz @@ -0,0 +1,7 @@ +sql("DELETE FROM sessions") + +add_column("sessions", "token", "string", {"size": 32, "null": true}) +change_column("sessions", "token", "string", {"size": 32, "null": false}) + +add_index("sessions", "token", {"unique": true, "name": "sessions_token_uq_idx"}) +add_index("sessions", "token", {"name": "sessions_token_idx" }) diff --git a/oryx/popx/stub/migrations/source/20200812160551_add_session_revoke.down.fizz b/oryx/popx/stub/migrations/source/20200812160551_add_session_revoke.down.fizz new file mode 100644 index 00000000000..23e604ca0e5 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200812160551_add_session_revoke.down.fizz @@ -0,0 +1 @@ +drop_column("sessions", "active") diff --git a/oryx/popx/stub/migrations/source/20200812160551_add_session_revoke.up.fizz b/oryx/popx/stub/migrations/source/20200812160551_add_session_revoke.up.fizz new file mode 100644 index 00000000000..f85888274af --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200812160551_add_session_revoke.up.fizz @@ -0,0 +1 @@ +add_column("sessions", "active", "boolean", {"null": false, "default": false}) diff --git a/oryx/popx/stub/migrations/source/20200830121710_update_recovery_token.down.fizz b/oryx/popx/stub/migrations/source/20200830121710_update_recovery_token.down.fizz new file mode 100644 index 00000000000..a05f0d57969 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200830121710_update_recovery_token.down.fizz @@ -0,0 +1 @@ +rename_column("identity_recovery_tokens", "selfservice_recovery_flow_id", "selfservice_recovery_request_id") diff --git a/oryx/popx/stub/migrations/source/20200830121710_update_recovery_token.up.fizz b/oryx/popx/stub/migrations/source/20200830121710_update_recovery_token.up.fizz new file mode 100644 index 00000000000..8601646ef41 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200830121710_update_recovery_token.up.fizz @@ -0,0 +1 @@ +rename_column("identity_recovery_tokens", "selfservice_recovery_request_id", "selfservice_recovery_flow_id") diff --git a/oryx/popx/stub/migrations/source/20200830130642_add_verification_methods.down.fizz b/oryx/popx/stub/migrations/source/20200830130642_add_verification_methods.down.fizz new file mode 100644 index 00000000000..2bb6500176d --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200830130642_add_verification_methods.down.fizz @@ -0,0 +1,16 @@ +{{ if or .IsPostgreSQL .IsMySQL .IsMariaDB }} + add_column("selfservice_verification_flows", "form", "json", { "null": true }) + sql("UPDATE selfservice_verification_flows SET form=(SELECT * FROM (SELECT m.config FROM selfservice_verification_flows AS r INNER JOIN selfservice_verification_flow_methods AS m ON r.id=m.selfservice_verification_flow_id) as t);") + change_column("selfservice_verification_flows", "form", "json", { "null": false }) +{{ end }} + +{{ if .IsCockroach }} + add_column("selfservice_verification_flows", "form", "json", { "default": "{}" }) +{{ end }} + +drop_table("selfservice_verification_flow_methods") +drop_column("selfservice_verification_flows", "active_method") +drop_column("selfservice_verification_flows", "state") + +add_column("selfservice_verification_flows", "via", "string", {"size": 16, "default": "email"}) +add_column("selfservice_verification_flows", "success", "bool", {"default_raw": "FALSE"}) diff --git a/oryx/popx/stub/migrations/source/20200830130642_add_verification_methods.up.fizz b/oryx/popx/stub/migrations/source/20200830130642_add_verification_methods.up.fizz new file mode 100644 index 00000000000..2819d93380e --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200830130642_add_verification_methods.up.fizz @@ -0,0 +1 @@ +add_column("selfservice_verification_flows", "state", "string", {"default": "show_form"}) diff --git a/oryx/popx/stub/migrations/source/20200830130643_add_verification_methods.down.fizz b/oryx/popx/stub/migrations/source/20200830130643_add_verification_methods.down.fizz new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/source/20200830130643_add_verification_methods.up.fizz b/oryx/popx/stub/migrations/source/20200830130643_add_verification_methods.up.fizz new file mode 100644 index 00000000000..376ec633957 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200830130643_add_verification_methods.up.fizz @@ -0,0 +1 @@ +sql("UPDATE selfservice_verification_flows SET state='passed_challenge' WHERE success IS TRUE") diff --git a/oryx/popx/stub/migrations/source/20200830130644_add_verification_methods.down.fizz b/oryx/popx/stub/migrations/source/20200830130644_add_verification_methods.down.fizz new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/source/20200830130644_add_verification_methods.up.fizz b/oryx/popx/stub/migrations/source/20200830130644_add_verification_methods.up.fizz new file mode 100644 index 00000000000..250846bfc18 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200830130644_add_verification_methods.up.fizz @@ -0,0 +1,8 @@ +create_table("selfservice_verification_flow_methods") { + t.Column("id", "uuid", {primary: true}) + t.Column("method", "string", {"size": 32}) + t.Column("selfservice_verification_flow_id", "uuid") + t.Column("config", "json") +} + +add_column("selfservice_verification_flows", "active_method", "string", {"size": 32, null: true}) diff --git a/oryx/popx/stub/migrations/source/20200830130645_add_verification_methods.down.fizz b/oryx/popx/stub/migrations/source/20200830130645_add_verification_methods.down.fizz new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/source/20200830130645_add_verification_methods.up.fizz b/oryx/popx/stub/migrations/source/20200830130645_add_verification_methods.up.fizz new file mode 100644 index 00000000000..acad208ccfd --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200830130645_add_verification_methods.up.fizz @@ -0,0 +1 @@ +sql("INSERT INTO selfservice_verification_flow_methods (id, method, selfservice_verification_flow_id, config, created_at, updated_at) SELECT id, 'link', id, form, created_at, updated_at FROM selfservice_verification_flows;") diff --git a/oryx/popx/stub/migrations/source/20200830130646_add_verification_methods.down.fizz b/oryx/popx/stub/migrations/source/20200830130646_add_verification_methods.down.fizz new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/source/20200830130646_add_verification_methods.up.fizz b/oryx/popx/stub/migrations/source/20200830130646_add_verification_methods.up.fizz new file mode 100644 index 00000000000..ee0a577b9e4 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200830130646_add_verification_methods.up.fizz @@ -0,0 +1,3 @@ +drop_column("selfservice_verification_flows", "form") +drop_column("selfservice_verification_flows", "via") +drop_column("selfservice_verification_flows", "success") diff --git a/oryx/popx/stub/migrations/source/20200830154602_add_verification_token.down.fizz b/oryx/popx/stub/migrations/source/20200830154602_add_verification_token.down.fizz new file mode 100644 index 00000000000..beb5a421ca3 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200830154602_add_verification_token.down.fizz @@ -0,0 +1 @@ +drop_table("identity_verification_tokens") diff --git a/oryx/popx/stub/migrations/source/20200830154602_add_verification_token.up.fizz b/oryx/popx/stub/migrations/source/20200830154602_add_verification_token.up.fizz new file mode 100644 index 00000000000..c6182dc7747 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200830154602_add_verification_token.up.fizz @@ -0,0 +1,21 @@ +create_table("identity_verification_tokens") { + t.Column("id", "uuid", {primary: true}) + + t.Column("token", "string", {"size": 64}) + t.Column("used", "bool", {"default": false}) + t.Column("used_at", "timestamp", {"null": true}) + t.Column("expires_at", "timestamp") + t.Column("issued_at", "timestamp") + + t.Column("identity_verifiable_address_id", "uuid") + t.ForeignKey("identity_verifiable_address_id", {"identity_verifiable_addresses": ["id"]}, {"on_delete": "cascade"}) + + t.Column("selfservice_verification_flow_id", "uuid", {"null": true}) + t.ForeignKey("selfservice_verification_flow_id", {"selfservice_verification_flows": ["id"]}, {"on_delete": "cascade"}) +} + +add_index("identity_verification_tokens", ["token"], { "unique": true, "name": "identity_verification_tokens_token_uq_idx" }) +add_index("identity_verification_tokens", ["token"], { "name": "identity_verification_tokens_token_idx" }) + +add_index("identity_verification_tokens", ["identity_verifiable_address_id"], { "name": "identity_verification_tokens_verifiable_address_id_idx" }) +add_index("identity_verification_tokens", ["selfservice_verification_flow_id"], { "name": "identity_verification_tokens_verification_flow_id_idx" }) diff --git a/oryx/popx/stub/migrations/source/20200830172221_recovery_token_expires.down.fizz b/oryx/popx/stub/migrations/source/20200830172221_recovery_token_expires.down.fizz new file mode 100644 index 00000000000..ef694c93d7e --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200830172221_recovery_token_expires.down.fizz @@ -0,0 +1,4 @@ +sql("DELETE FROM identity_recovery_tokens WHERE selfservice_recovery_flow_id IS NULL") +change_column("identity_recovery_tokens", "selfservice_recovery_flow_id", "uuid") +drop_column("identity_recovery_tokens", "expires_at") +drop_column("identity_recovery_tokens", "issued_at") diff --git a/oryx/popx/stub/migrations/source/20200830172221_recovery_token_expires.up.fizz b/oryx/popx/stub/migrations/source/20200830172221_recovery_token_expires.up.fizz new file mode 100644 index 00000000000..aa8546e359a --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200830172221_recovery_token_expires.up.fizz @@ -0,0 +1,3 @@ +add_column("identity_recovery_tokens", "expires_at", "timestamp", { "default": "2000-01-01 00:00:00" }) +add_column("identity_recovery_tokens", "issued_at", "timestamp", { "default": "2000-01-01 00:00:00" }) +change_column("identity_recovery_tokens", "selfservice_recovery_flow_id", "uuid", {"null": true}) diff --git a/oryx/popx/stub/migrations/source/20200831110752_identity_verifiable_address_remove_code.down.fizz b/oryx/popx/stub/migrations/source/20200831110752_identity_verifiable_address_remove_code.down.fizz new file mode 100755 index 00000000000..fde97135e42 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200831110752_identity_verifiable_address_remove_code.down.fizz @@ -0,0 +1,28 @@ +add_column("identity_verifiable_addresses", "code", "string", {"size": 32, "null": true}) +add_column("identity_verifiable_addresses", "expires_at", "timestamp", { "null": true }) + +{{ if .IsSQLite }} + sql("UPDATE identity_verifiable_addresses SET code = substr(hex(randomblob(32)), 0, 32) WHERE code IS NULL") + sql("UPDATE identity_verifiable_addresses SET expires_at = CURRENT_TIMESTAMP WHERE expires_at IS NULL") +{{ end }} + +{{ if or .IsMySQL .IsMariaDB }} + sql("UPDATE identity_verifiable_addresses SET code = LEFT(MD5(RAND()), 32) WHERE code IS NULL") + sql("UPDATE identity_verifiable_addresses SET expires_at = CURRENT_TIMESTAMP WHERE expires_at IS NULL") +{{ end }} + +{{ if .IsPostgreSQL }} + sql("UPDATE identity_verifiable_addresses SET code = substr(md5(random()::text), 0, 32) WHERE code IS NULL") + sql("UPDATE identity_verifiable_addresses SET expires_at = CURRENT_TIMESTAMP WHERE expires_at IS NULL") +{{ end }} + +{{ if .IsCockroach }} + sql("UPDATE identity_verifiable_addresses SET code = substr(md5(uuid_v4()), 0, 32) WHERE code IS NULL") + sql("UPDATE identity_verifiable_addresses SET expires_at = CURRENT_TIMESTAMP WHERE expires_at IS NULL") +{{ end }} + +change_column("identity_verifiable_addresses", "code", "string", {"size": 32}) +change_column("identity_verifiable_addresses", "expires_at", "timestamp", { "null": false }) + +add_index("identity_verifiable_addresses", ["code"], { "unique": true, "name": "identity_verifiable_addresses_code_uq_idx" }) +add_index("identity_verifiable_addresses", ["code"], { "name": "identity_verifiable_addresses_code_idx" }) diff --git a/oryx/popx/stub/migrations/source/20200831110752_identity_verifiable_address_remove_code.up.fizz b/oryx/popx/stub/migrations/source/20200831110752_identity_verifiable_address_remove_code.up.fizz new file mode 100755 index 00000000000..4a1d7795603 --- /dev/null +++ b/oryx/popx/stub/migrations/source/20200831110752_identity_verifiable_address_remove_code.up.fizz @@ -0,0 +1,5 @@ +drop_index("identity_verifiable_addresses", "identity_verifiable_addresses_code_uq_idx") +drop_index("identity_verifiable_addresses", "identity_verifiable_addresses_code_idx") + +drop_column("identity_verifiable_addresses", "code") +drop_column("identity_verifiable_addresses", "expires_at") diff --git a/oryx/popx/stub/migrations/source/20201201161451_credential_types_values.down.fizz b/oryx/popx/stub/migrations/source/20201201161451_credential_types_values.down.fizz new file mode 100644 index 00000000000..ba680935b3a --- /dev/null +++ b/oryx/popx/stub/migrations/source/20201201161451_credential_types_values.down.fizz @@ -0,0 +1 @@ +sql("DELETE FROM identity_credential_types WHERE name = 'password' OR name = 'oidc'") diff --git a/oryx/popx/stub/migrations/source/20201201161451_credential_types_values.up.fizz b/oryx/popx/stub/migrations/source/20201201161451_credential_types_values.up.fizz new file mode 100644 index 00000000000..66512ade86f --- /dev/null +++ b/oryx/popx/stub/migrations/source/20201201161451_credential_types_values.up.fizz @@ -0,0 +1,3 @@ +sql("INSERT INTO identity_credential_types (id, name) SELECT '78c1b41d-8341-4507-aa60-aff1d4369670', 'password' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'password')") +sql("INSERT INTO identity_credential_types (id, name) SELECT '6fa5e2e0-bfce-4631-b62b-cf2b0252b289', 'oidc' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'oidc')") + diff --git a/oryx/popx/stub/migrations/templating/0_sql_create_tablename_template.down.sql b/oryx/popx/stub/migrations/templating/0_sql_create_tablename_template.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/templating/0_sql_create_tablename_template.up.sql b/oryx/popx/stub/migrations/templating/0_sql_create_tablename_template.up.sql new file mode 100644 index 00000000000..f73626d35f0 --- /dev/null +++ b/oryx/popx/stub/migrations/templating/0_sql_create_tablename_template.up.sql @@ -0,0 +1 @@ +CREATE TABLE {{ identifier .Parameters.tableName }} ( "id" UUID NOT NULL, PRIMARY KEY ("id")); diff --git a/oryx/popx/stub/migrations/testdata/20220513_testdata.invalid b/oryx/popx/stub/migrations/testdata/20220513_testdata.invalid new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/testdata/20220513_testdata.sql b/oryx/popx/stub/migrations/testdata/20220513_testdata.sql new file mode 100644 index 00000000000..6687fec614c --- /dev/null +++ b/oryx/popx/stub/migrations/testdata/20220513_testdata.sql @@ -0,0 +1 @@ +INSERT INTO testdata (Data) VALUES ('testdata'); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/testdata/20220514_testdata.sql b/oryx/popx/stub/migrations/testdata/20220514_testdata.sql new file mode 100644 index 00000000000..56d7b981ba2 --- /dev/null +++ b/oryx/popx/stub/migrations/testdata/20220514_testdata.sql @@ -0,0 +1 @@ +-- empty migrations should not error \ No newline at end of file diff --git a/oryx/popx/stub/migrations/testdata/invalid b/oryx/popx/stub/migrations/testdata/invalid new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/testdata/invalid_testdata.sql b/oryx/popx/stub/migrations/testdata/invalid_testdata.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/testdata_migrations/20220513_create_table.down.sql b/oryx/popx/stub/migrations/testdata_migrations/20220513_create_table.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/testdata_migrations/20220513_create_table.up.sql b/oryx/popx/stub/migrations/testdata_migrations/20220513_create_table.up.sql new file mode 100644 index 00000000000..59c85da3365 --- /dev/null +++ b/oryx/popx/stub/migrations/testdata_migrations/20220513_create_table.up.sql @@ -0,0 +1,3 @@ +CREATE TABLE "testdata" ( + "data" character varying(255) NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.cockroach.down.sql new file mode 100644 index 00000000000..bf3e56ce3ec --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "identities"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.cockroach.up.sql new file mode 100644 index 00000000000..ae0df019f5e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.cockroach.up.sql @@ -0,0 +1,8 @@ +CREATE TABLE "identities" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"traits_schema_id" VARCHAR (2048) NOT NULL, +"traits" json NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.mysql.down.sql new file mode 100644 index 00000000000..ae2d9ecc296 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `identities`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.mysql.up.sql new file mode 100644 index 00000000000..f257ad023da --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.mysql.up.sql @@ -0,0 +1,8 @@ +CREATE TABLE `identities` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`traits_schema_id` VARCHAR (2048) NOT NULL, +`traits` JSON NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL +) ENGINE=InnoDB \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.postgres.down.sql new file mode 100644 index 00000000000..bf3e56ce3ec --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "identities"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.postgres.up.sql new file mode 100644 index 00000000000..4fab90da064 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.postgres.up.sql @@ -0,0 +1,8 @@ +CREATE TABLE "identities" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"traits_schema_id" VARCHAR (2048) NOT NULL, +"traits" jsonb NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.sqlite3.down.sql new file mode 100644 index 00000000000..bf3e56ce3ec --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "identities"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.sqlite3.up.sql new file mode 100644 index 00000000000..7448d6962bc --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000000_identities.sqlite3.up.sql @@ -0,0 +1,7 @@ +CREATE TABLE "identities" ( +"id" TEXT PRIMARY KEY, +"traits_schema_id" TEXT NOT NULL, +"traits" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.cockroach.down.sql new file mode 100644 index 00000000000..f533e0fc728 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_credential_types" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.cockroach.up.sql new file mode 100644 index 00000000000..a5245e7353e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.cockroach.up.sql @@ -0,0 +1,5 @@ +CREATE TABLE "identity_credential_types" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"name" VARCHAR (32) NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.mysql.down.sql new file mode 100644 index 00000000000..0440ced8772 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `identity_credential_types` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.mysql.up.sql new file mode 100644 index 00000000000..52f95d6105c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.mysql.up.sql @@ -0,0 +1,5 @@ +CREATE TABLE `identity_credential_types` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`name` VARCHAR (32) NOT NULL +) ENGINE=InnoDB \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.postgres.down.sql new file mode 100644 index 00000000000..f533e0fc728 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_credential_types" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.postgres.up.sql new file mode 100644 index 00000000000..a5245e7353e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.postgres.up.sql @@ -0,0 +1,5 @@ +CREATE TABLE "identity_credential_types" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"name" VARCHAR (32) NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.sqlite3.down.sql new file mode 100644 index 00000000000..f533e0fc728 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_credential_types" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.sqlite3.up.sql new file mode 100644 index 00000000000..a48493a3e6c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000001_identities.sqlite3.up.sql @@ -0,0 +1,4 @@ +CREATE TABLE "identity_credential_types" ( +"id" TEXT PRIMARY KEY, +"name" TEXT NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.cockroach.down.sql new file mode 100644 index 00000000000..6b34364e837 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_credentials" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.cockroach.up.sql new file mode 100644 index 00000000000..a881431b36b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.cockroach.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_credential_types_name_idx" ON "identity_credential_types" (name) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.mysql.down.sql new file mode 100644 index 00000000000..6884f067806 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `identity_credentials` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.mysql.up.sql new file mode 100644 index 00000000000..4770736a3d0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.mysql.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX `identity_credential_types_name_idx` ON `identity_credential_types` (`name`) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.postgres.down.sql new file mode 100644 index 00000000000..6b34364e837 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_credentials" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.postgres.up.sql new file mode 100644 index 00000000000..a881431b36b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.postgres.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_credential_types_name_idx" ON "identity_credential_types" (name) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.sqlite3.down.sql new file mode 100644 index 00000000000..6b34364e837 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_credentials" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.sqlite3.up.sql new file mode 100644 index 00000000000..a881431b36b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000002_identities.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_credential_types_name_idx" ON "identity_credential_types" (name) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.cockroach.down.sql new file mode 100644 index 00000000000..a56ae36a1d6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_credential_identifiers" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.cockroach.up.sql new file mode 100644 index 00000000000..25bb5e5ea0e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.cockroach.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE "identity_credentials" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"config" json NOT NULL, +"identity_credential_type_id" UUID NOT NULL, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "identity_credentials_identities_id_fk" FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade, +CONSTRAINT "identity_credentials_identity_credential_types_id_fk" FOREIGN KEY ("identity_credential_type_id") REFERENCES "identity_credential_types" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.mysql.down.sql new file mode 100644 index 00000000000..b96b95d4661 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `identity_credential_identifiers` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.mysql.up.sql new file mode 100644 index 00000000000..ad1aa8a07ec --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.mysql.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE `identity_credentials` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`config` JSON NOT NULL, +`identity_credential_type_id` char(36) NOT NULL, +`identity_id` char(36) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`identity_id`) REFERENCES `identities` (`id`) ON DELETE cascade, +FOREIGN KEY (`identity_credential_type_id`) REFERENCES `identity_credential_types` (`id`) ON DELETE cascade +) ENGINE=InnoDB \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.postgres.down.sql new file mode 100644 index 00000000000..a56ae36a1d6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_credential_identifiers" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.postgres.up.sql new file mode 100644 index 00000000000..3bdf1c1171b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.postgres.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE "identity_credentials" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"config" jsonb NOT NULL, +"identity_credential_type_id" UUID NOT NULL, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade, +FOREIGN KEY ("identity_credential_type_id") REFERENCES "identity_credential_types" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.sqlite3.down.sql new file mode 100644 index 00000000000..a56ae36a1d6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_credential_identifiers" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.sqlite3.up.sql new file mode 100644 index 00000000000..190bcc008be --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000003_identities.sqlite3.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "identity_credentials" ( +"id" TEXT PRIMARY KEY, +"config" TEXT NOT NULL, +"identity_credential_type_id" char(36) NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON DELETE cascade, +FOREIGN KEY (identity_credential_type_id) REFERENCES identity_credential_types (id) ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.cockroach.up.sql new file mode 100644 index 00000000000..5ebd7afb0dc --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.cockroach.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE "identity_credential_identifiers" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"identifier" VARCHAR (255) NOT NULL, +"identity_credential_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "identity_credential_identifiers_identity_credentials_id_fk" FOREIGN KEY ("identity_credential_id") REFERENCES "identity_credentials" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.mysql.up.sql new file mode 100644 index 00000000000..a32c5874146 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.mysql.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE `identity_credential_identifiers` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`identifier` VARCHAR (255) NOT NULL, +`identity_credential_id` char(36) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`identity_credential_id`) REFERENCES `identity_credentials` (`id`) ON DELETE cascade +) ENGINE=InnoDB \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.postgres.up.sql new file mode 100644 index 00000000000..2b20a63e76a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.postgres.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE "identity_credential_identifiers" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"identifier" VARCHAR (255) NOT NULL, +"identity_credential_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("identity_credential_id") REFERENCES "identity_credentials" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.sqlite3.up.sql new file mode 100644 index 00000000000..e7ba7d77806 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000004_identities.sqlite3.up.sql @@ -0,0 +1,8 @@ +CREATE TABLE "identity_credential_identifiers" ( +"id" TEXT PRIMARY KEY, +"identifier" TEXT NOT NULL, +"identity_credential_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_credential_id) REFERENCES identity_credentials (id) ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.cockroach.up.sql new file mode 100644 index 00000000000..fb24576e671 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.cockroach.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_credential_identifiers_identifier_idx" ON "identity_credential_identifiers" (identifier); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.mysql.up.sql new file mode 100644 index 00000000000..759def91dc0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.mysql.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX `identity_credential_identifiers_identifier_idx` ON `identity_credential_identifiers` (`identifier`); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.postgres.up.sql new file mode 100644 index 00000000000..fb24576e671 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.postgres.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_credential_identifiers_identifier_idx" ON "identity_credential_identifiers" (identifier); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.sqlite3.up.sql new file mode 100644 index 00000000000..fb24576e671 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000001000005_identities.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_credential_identifiers_identifier_idx" ON "identity_credential_identifiers" (identifier); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.cockroach.down.sql new file mode 100644 index 00000000000..f5d0e0a1b95 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_profile_management_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.cockroach.up.sql new file mode 100644 index 00000000000..2627dfedbd3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.cockroach.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE "selfservice_login_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"active_method" VARCHAR (32) NOT NULL, +"csrf_token" VARCHAR (255) NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.mysql.down.sql new file mode 100644 index 00000000000..77db2f72461 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `selfservice_profile_management_requests`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.mysql.up.sql new file mode 100644 index 00000000000..5e7e5528160 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.mysql.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE `selfservice_login_requests` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`request_url` VARCHAR (2048) NOT NULL, +`issued_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +`expires_at` DATETIME NOT NULL, +`active_method` VARCHAR (32) NOT NULL, +`csrf_token` VARCHAR (255) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL +) ENGINE=InnoDB \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.postgres.down.sql new file mode 100644 index 00000000000..f5d0e0a1b95 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_profile_management_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.postgres.up.sql new file mode 100644 index 00000000000..2627dfedbd3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.postgres.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE "selfservice_login_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"active_method" VARCHAR (32) NOT NULL, +"csrf_token" VARCHAR (255) NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.sqlite3.down.sql new file mode 100644 index 00000000000..f5d0e0a1b95 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_profile_management_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.sqlite3.up.sql new file mode 100644 index 00000000000..f7b5788afe0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000000_requests.sqlite3.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "selfservice_login_requests" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" DATETIME NOT NULL, +"active_method" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.cockroach.down.sql new file mode 100644 index 00000000000..9bb6db3d852 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_registration_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.cockroach.up.sql new file mode 100644 index 00000000000..14c44bb4c58 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.cockroach.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "selfservice_login_request_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"selfservice_login_request_id" UUID NOT NULL, +"config" json NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "selfservice_login_request_methods_selfservice_login_requests_id_fk" FOREIGN KEY ("selfservice_login_request_id") REFERENCES "selfservice_login_requests" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.mysql.down.sql new file mode 100644 index 00000000000..0cc2d408d81 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `selfservice_registration_requests` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.mysql.up.sql new file mode 100644 index 00000000000..fbbcafbe27e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.mysql.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE `selfservice_login_request_methods` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`method` VARCHAR (32) NOT NULL, +`selfservice_login_request_id` char(36) NOT NULL, +`config` JSON NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`selfservice_login_request_id`) REFERENCES `selfservice_login_requests` (`id`) ON DELETE cascade +) ENGINE=InnoDB \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.postgres.down.sql new file mode 100644 index 00000000000..9bb6db3d852 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_registration_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.postgres.up.sql new file mode 100644 index 00000000000..fb69b6b21d7 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.postgres.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "selfservice_login_request_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"selfservice_login_request_id" UUID NOT NULL, +"config" jsonb NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("selfservice_login_request_id") REFERENCES "selfservice_login_requests" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.sqlite3.down.sql new file mode 100644 index 00000000000..9bb6db3d852 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_registration_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.sqlite3.up.sql new file mode 100644 index 00000000000..48c9c7c3a36 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000001_requests.sqlite3.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE "selfservice_login_request_methods" ( +"id" TEXT PRIMARY KEY, +"method" TEXT NOT NULL, +"selfservice_login_request_id" char(36) NOT NULL, +"config" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (selfservice_login_request_id) REFERENCES selfservice_login_requests (id) ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.cockroach.down.sql new file mode 100644 index 00000000000..d48f97cfc88 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_registration_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.cockroach.up.sql new file mode 100644 index 00000000000..4c08df7d777 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.cockroach.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE "selfservice_registration_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"active_method" VARCHAR (32) NOT NULL, +"csrf_token" VARCHAR (255) NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.mysql.down.sql new file mode 100644 index 00000000000..eb5929d3843 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `selfservice_registration_request_methods` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.mysql.up.sql new file mode 100644 index 00000000000..596c6f47f41 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.mysql.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE `selfservice_registration_requests` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`request_url` VARCHAR (2048) NOT NULL, +`issued_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +`expires_at` DATETIME NOT NULL, +`active_method` VARCHAR (32) NOT NULL, +`csrf_token` VARCHAR (255) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL +) ENGINE=InnoDB \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.postgres.down.sql new file mode 100644 index 00000000000..d48f97cfc88 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_registration_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.postgres.up.sql new file mode 100644 index 00000000000..4c08df7d777 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.postgres.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE "selfservice_registration_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"active_method" VARCHAR (32) NOT NULL, +"csrf_token" VARCHAR (255) NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.sqlite3.down.sql new file mode 100644 index 00000000000..d48f97cfc88 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_registration_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.sqlite3.up.sql new file mode 100644 index 00000000000..0342d263c00 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000002_requests.sqlite3.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "selfservice_registration_requests" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" DATETIME NOT NULL, +"active_method" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.cockroach.down.sql new file mode 100644 index 00000000000..8078deb0fb8 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_login_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.cockroach.up.sql new file mode 100644 index 00000000000..6adfd9f72d2 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.cockroach.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "selfservice_registration_request_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"selfservice_registration_request_id" UUID NOT NULL, +"config" json NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "selfservice_registration_request_methods_selfservice_registration_requests_id_fk" FOREIGN KEY ("selfservice_registration_request_id") REFERENCES "selfservice_registration_requests" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.mysql.down.sql new file mode 100644 index 00000000000..481215ffcbb --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `selfservice_login_requests` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.mysql.up.sql new file mode 100644 index 00000000000..da52b0c1512 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.mysql.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE `selfservice_registration_request_methods` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`method` VARCHAR (32) NOT NULL, +`selfservice_registration_request_id` char(36) NOT NULL, +`config` JSON NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`selfservice_registration_request_id`) REFERENCES `selfservice_registration_requests` (`id`) ON DELETE cascade +) ENGINE=InnoDB \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.postgres.down.sql new file mode 100644 index 00000000000..8078deb0fb8 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_login_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.postgres.up.sql new file mode 100644 index 00000000000..d293ae9784b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.postgres.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "selfservice_registration_request_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"selfservice_registration_request_id" UUID NOT NULL, +"config" jsonb NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("selfservice_registration_request_id") REFERENCES "selfservice_registration_requests" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.sqlite3.down.sql new file mode 100644 index 00000000000..8078deb0fb8 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_login_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.sqlite3.up.sql new file mode 100644 index 00000000000..68d1d3858a0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000003_requests.sqlite3.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE "selfservice_registration_request_methods" ( +"id" TEXT PRIMARY KEY, +"method" TEXT NOT NULL, +"selfservice_registration_request_id" char(36) NOT NULL, +"config" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (selfservice_registration_request_id) REFERENCES selfservice_registration_requests (id) ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.cockroach.down.sql new file mode 100644 index 00000000000..f2c0d1e5097 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_login_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.cockroach.up.sql new file mode 100644 index 00000000000..d0730807990 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.cockroach.up.sql @@ -0,0 +1,13 @@ +CREATE TABLE "selfservice_profile_management_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"form" json NOT NULL, +"update_successful" bool NOT NULL, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "selfservice_profile_management_requests_identities_id_fk" FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.mysql.down.sql new file mode 100644 index 00000000000..2f7a60a6cd9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `selfservice_login_request_methods` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.mysql.up.sql new file mode 100644 index 00000000000..1fc559ae27f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.mysql.up.sql @@ -0,0 +1,13 @@ +CREATE TABLE `selfservice_profile_management_requests` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`request_url` VARCHAR (2048) NOT NULL, +`issued_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +`expires_at` DATETIME NOT NULL, +`form` JSON NOT NULL, +`update_successful` bool NOT NULL, +`identity_id` char(36) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`identity_id`) REFERENCES `identities` (`id`) ON DELETE cascade +) ENGINE=InnoDB; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.postgres.down.sql new file mode 100644 index 00000000000..f2c0d1e5097 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_login_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.postgres.up.sql new file mode 100644 index 00000000000..753046512bf --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.postgres.up.sql @@ -0,0 +1,13 @@ +CREATE TABLE "selfservice_profile_management_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"form" jsonb NOT NULL, +"update_successful" bool NOT NULL, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.sqlite3.down.sql new file mode 100644 index 00000000000..f2c0d1e5097 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_login_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.sqlite3.up.sql new file mode 100644 index 00000000000..48fa320714e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000002000004_requests.sqlite3.up.sql @@ -0,0 +1,12 @@ +CREATE TABLE "selfservice_profile_management_requests" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" DATETIME NOT NULL, +"form" TEXT NOT NULL, +"update_successful" bool NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON DELETE cascade +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.cockroach.down.sql new file mode 100644 index 00000000000..d49b7aec9a9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "sessions"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.cockroach.up.sql new file mode 100644 index 00000000000..9cf7fc24132 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.cockroach.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE "sessions" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"authenticated_at" timestamp NOT NULL, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "sessions_identities_id_fk" FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.mysql.down.sql new file mode 100644 index 00000000000..b37f476a3ae --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `sessions`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.mysql.up.sql new file mode 100644 index 00000000000..ae325f9c3f6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.mysql.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE `sessions` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`issued_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +`expires_at` DATETIME NOT NULL, +`authenticated_at` DATETIME NOT NULL, +`identity_id` char(36) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`identity_id`) REFERENCES `identities` (`id`) ON DELETE cascade +) ENGINE=InnoDB; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.postgres.down.sql new file mode 100644 index 00000000000..d49b7aec9a9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "sessions"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.postgres.up.sql new file mode 100644 index 00000000000..fab43234ebb --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.postgres.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE "sessions" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"authenticated_at" timestamp NOT NULL, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.sqlite3.down.sql new file mode 100644 index 00000000000..d49b7aec9a9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "sessions"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.sqlite3.up.sql new file mode 100644 index 00000000000..c1226647bed --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000003000000_sessions.sqlite3.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "sessions" ( +"id" TEXT PRIMARY KEY, +"issued_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" DATETIME NOT NULL, +"authenticated_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON DELETE cascade +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.cockroach.down.sql new file mode 100644 index 00000000000..b6a3306190f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_errors"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.cockroach.up.sql new file mode 100644 index 00000000000..a920e94febd --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.cockroach.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE "selfservice_errors" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"errors" json NOT NULL, +"seen_at" timestamp NOT NULL, +"was_seen" bool NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.mysql.down.sql new file mode 100644 index 00000000000..dcf8246d0f4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `selfservice_errors`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.mysql.up.sql new file mode 100644 index 00000000000..b2afc3c4cf1 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.mysql.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE `selfservice_errors` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`errors` JSON NOT NULL, +`seen_at` DATETIME NOT NULL, +`was_seen` bool NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL +) ENGINE=InnoDB; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.postgres.down.sql new file mode 100644 index 00000000000..b6a3306190f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_errors"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.postgres.up.sql new file mode 100644 index 00000000000..e0a5c9e5ccc --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.postgres.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE "selfservice_errors" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"errors" jsonb NOT NULL, +"seen_at" timestamp NOT NULL, +"was_seen" bool NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.sqlite3.down.sql new file mode 100644 index 00000000000..b6a3306190f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_errors"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.sqlite3.up.sql new file mode 100644 index 00000000000..1eb73f632c9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000004000000_errors.sqlite3.up.sql @@ -0,0 +1,8 @@ +CREATE TABLE "selfservice_errors" ( +"id" TEXT PRIMARY KEY, +"errors" TEXT NOT NULL, +"seen_at" DATETIME NOT NULL, +"was_seen" bool NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000005000000_identities.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000005000000_identities.mysql.down.sql new file mode 100644 index 00000000000..13fd1fee0ff --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000005000000_identities.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE identity_credential_identifiers MODIFY COLUMN identifier VARCHAR(255) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000005000000_identities.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000005000000_identities.mysql.up.sql new file mode 100644 index 00000000000..0dc4431e240 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000005000000_identities.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE identity_credential_identifiers MODIFY COLUMN identifier VARCHAR(255) BINARY \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000005000001_identities.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000005000001_identities.mysql.down.sql new file mode 100644 index 00000000000..13fd1fee0ff --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000005000001_identities.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE identity_credential_identifiers MODIFY COLUMN identifier VARCHAR(255) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000005000001_identities.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000005000001_identities.mysql.up.sql new file mode 100644 index 00000000000..0dc4431e240 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000005000001_identities.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE identity_credential_identifiers MODIFY COLUMN identifier VARCHAR(255) BINARY \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.cockroach.down.sql new file mode 100644 index 00000000000..0d9747b1828 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "courier_messages"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.cockroach.up.sql new file mode 100644 index 00000000000..70af9f07e03 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.cockroach.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE "courier_messages" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"type" int NOT NULL, +"status" int NOT NULL, +"body" VARCHAR (255) NOT NULL, +"subject" VARCHAR (255) NOT NULL, +"recipient" VARCHAR (255) NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.mysql.down.sql new file mode 100644 index 00000000000..1c69440c879 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `courier_messages`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.mysql.up.sql new file mode 100644 index 00000000000..24e0ac93ee0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.mysql.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE `courier_messages` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`type` INTEGER NOT NULL, +`status` INTEGER NOT NULL, +`body` VARCHAR (255) NOT NULL, +`subject` VARCHAR (255) NOT NULL, +`recipient` VARCHAR (255) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL +) ENGINE=InnoDB; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.postgres.down.sql new file mode 100644 index 00000000000..0d9747b1828 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "courier_messages"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.postgres.up.sql new file mode 100644 index 00000000000..70af9f07e03 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.postgres.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE "courier_messages" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"type" int NOT NULL, +"status" int NOT NULL, +"body" VARCHAR (255) NOT NULL, +"subject" VARCHAR (255) NOT NULL, +"recipient" VARCHAR (255) NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.sqlite3.down.sql new file mode 100644 index 00000000000..0d9747b1828 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "courier_messages"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.sqlite3.up.sql new file mode 100644 index 00000000000..e718e319311 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000006000000_courier.sqlite3.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "courier_messages" ( +"id" TEXT PRIMARY KEY, +"type" INTEGER NOT NULL, +"status" INTEGER NOT NULL, +"body" TEXT NOT NULL, +"subject" TEXT NOT NULL, +"recipient" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.cockroach.down.sql new file mode 100644 index 00000000000..6f93d740a4f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_errors" DROP COLUMN "csrf_token"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.cockroach.up.sql new file mode 100644 index 00000000000..4e04c0f2669 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_errors" ADD COLUMN "csrf_token" VARCHAR (255) NOT NULL DEFAULT ''; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.mysql.down.sql new file mode 100644 index 00000000000..9fbb33cd8d4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_errors` DROP COLUMN `csrf_token`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.mysql.up.sql new file mode 100644 index 00000000000..f54bdc2b46c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_errors` ADD COLUMN `csrf_token` VARCHAR (255) NOT NULL DEFAULT ""; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.postgres.down.sql new file mode 100644 index 00000000000..6f93d740a4f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_errors" DROP COLUMN "csrf_token"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.postgres.up.sql new file mode 100644 index 00000000000..4e04c0f2669 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_errors" ADD COLUMN "csrf_token" VARCHAR (255) NOT NULL DEFAULT ''; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.sqlite3.down.sql new file mode 100644 index 00000000000..95b13b65cc5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_errors_tmp" RENAME TO "selfservice_errors"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.sqlite3.up.sql new file mode 100644 index 00000000000..f55e6a91a06 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000007000000_errors.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_errors" ADD COLUMN "csrf_token" TEXT NOT NULL DEFAULT ''; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000007000001_errors.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000007000001_errors.sqlite3.down.sql new file mode 100644 index 00000000000..c1af035b153 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000007000001_errors.sqlite3.down.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_errors" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000007000001_errors.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000007000001_errors.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000007000002_errors.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000007000002_errors.sqlite3.down.sql new file mode 100644 index 00000000000..1a2d145512a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000007000002_errors.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_errors_tmp" (id, errors, seen_at, was_seen, created_at, updated_at) SELECT id, errors, seen_at, was_seen, created_at, updated_at FROM "selfservice_errors" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000007000002_errors.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000007000002_errors.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000007000003_errors.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000007000003_errors.sqlite3.down.sql new file mode 100644 index 00000000000..11afafe3bad --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000007000003_errors.sqlite3.down.sql @@ -0,0 +1,8 @@ +CREATE TABLE "_selfservice_errors_tmp" ( +"id" TEXT PRIMARY KEY, +"errors" TEXT NOT NULL, +"seen_at" DATETIME, +"was_seen" bool NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000007000003_errors.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000007000003_errors.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.cockroach.down.sql new file mode 100644 index 00000000000..2c3a752803d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_verifiable_addresses"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.cockroach.up.sql new file mode 100644 index 00000000000..83c7bd35db3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.cockroach.up.sql @@ -0,0 +1,15 @@ +CREATE TABLE "identity_verifiable_addresses" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"code" VARCHAR (32) NOT NULL, +"status" VARCHAR (16) NOT NULL, +"via" VARCHAR (16) NOT NULL, +"verified" bool NOT NULL, +"value" VARCHAR (400) NOT NULL, +"verified_at" timestamp, +"expires_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "identity_verifiable_addresses_identities_id_fk" FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.mysql.down.sql new file mode 100644 index 00000000000..10d36392268 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `identity_verifiable_addresses`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.mysql.up.sql new file mode 100644 index 00000000000..207fc038277 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.mysql.up.sql @@ -0,0 +1,15 @@ +CREATE TABLE `identity_verifiable_addresses` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`code` VARCHAR (32) NOT NULL, +`status` VARCHAR (16) NOT NULL, +`via` VARCHAR (16) NOT NULL, +`verified` bool NOT NULL, +`value` VARCHAR (400) NOT NULL, +`verified_at` DATETIME, +`expires_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +`identity_id` char(36) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`identity_id`) REFERENCES `identities` (`id`) ON DELETE cascade +) ENGINE=InnoDB \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.postgres.down.sql new file mode 100644 index 00000000000..2c3a752803d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_verifiable_addresses"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.postgres.up.sql new file mode 100644 index 00000000000..300e5348d73 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.postgres.up.sql @@ -0,0 +1,15 @@ +CREATE TABLE "identity_verifiable_addresses" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"code" VARCHAR (32) NOT NULL, +"status" VARCHAR (16) NOT NULL, +"via" VARCHAR (16) NOT NULL, +"verified" bool NOT NULL, +"value" VARCHAR (400) NOT NULL, +"verified_at" timestamp, +"expires_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.sqlite3.down.sql new file mode 100644 index 00000000000..2c3a752803d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_verifiable_addresses"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.sqlite3.up.sql new file mode 100644 index 00000000000..e920a7a56a1 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000000_selfservice_verification.sqlite3.up.sql @@ -0,0 +1,14 @@ +CREATE TABLE "identity_verifiable_addresses" ( +"id" TEXT PRIMARY KEY, +"code" TEXT NOT NULL, +"status" TEXT NOT NULL, +"via" TEXT NOT NULL, +"verified" bool NOT NULL, +"value" TEXT NOT NULL, +"verified_at" DATETIME, +"expires_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.cockroach.down.sql new file mode 100644 index 00000000000..79aff96a0f4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_verification_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.cockroach.up.sql new file mode 100644 index 00000000000..ecf8ba9c94e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.cockroach.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_verifiable_addresses_code_uq_idx" ON "identity_verifiable_addresses" (code) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.mysql.down.sql new file mode 100644 index 00000000000..da1b36ad33a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `selfservice_verification_requests` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.mysql.up.sql new file mode 100644 index 00000000000..3f787b6f05e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.mysql.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX `identity_verifiable_addresses_code_uq_idx` ON `identity_verifiable_addresses` (`code`) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.postgres.down.sql new file mode 100644 index 00000000000..79aff96a0f4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_verification_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.postgres.up.sql new file mode 100644 index 00000000000..ecf8ba9c94e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.postgres.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_verifiable_addresses_code_uq_idx" ON "identity_verifiable_addresses" (code) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.sqlite3.down.sql new file mode 100644 index 00000000000..79aff96a0f4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_verification_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.sqlite3.up.sql new file mode 100644 index 00000000000..ecf8ba9c94e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000001_selfservice_verification.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_verifiable_addresses_code_uq_idx" ON "identity_verifiable_addresses" (code) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.cockroach.up.sql new file mode 100644 index 00000000000..cad7d49180e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.cockroach.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verifiable_addresses_code_idx" ON "identity_verifiable_addresses" (code) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.mysql.up.sql new file mode 100644 index 00000000000..3df061044b4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.mysql.up.sql @@ -0,0 +1 @@ +CREATE INDEX `identity_verifiable_addresses_code_idx` ON `identity_verifiable_addresses` (`code`) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.postgres.up.sql new file mode 100644 index 00000000000..cad7d49180e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.postgres.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verifiable_addresses_code_idx" ON "identity_verifiable_addresses" (code) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.sqlite3.up.sql new file mode 100644 index 00000000000..cad7d49180e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000002_selfservice_verification.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verifiable_addresses_code_idx" ON "identity_verifiable_addresses" (code) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.cockroach.up.sql new file mode 100644 index 00000000000..703f37c97b3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.cockroach.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_verifiable_addresses_status_via_uq_idx" ON "identity_verifiable_addresses" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.mysql.up.sql new file mode 100644 index 00000000000..3deaf4ef075 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.mysql.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX `identity_verifiable_addresses_status_via_uq_idx` ON `identity_verifiable_addresses` (`via`, `value`) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.postgres.up.sql new file mode 100644 index 00000000000..703f37c97b3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.postgres.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_verifiable_addresses_status_via_uq_idx" ON "identity_verifiable_addresses" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.sqlite3.up.sql new file mode 100644 index 00000000000..703f37c97b3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000003_selfservice_verification.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_verifiable_addresses_status_via_uq_idx" ON "identity_verifiable_addresses" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.cockroach.up.sql new file mode 100644 index 00000000000..918ff3f9b97 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.cockroach.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verifiable_addresses_status_via_idx" ON "identity_verifiable_addresses" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.mysql.up.sql new file mode 100644 index 00000000000..5380dd4bea4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.mysql.up.sql @@ -0,0 +1 @@ +CREATE INDEX `identity_verifiable_addresses_status_via_idx` ON `identity_verifiable_addresses` (`via`, `value`) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.postgres.up.sql new file mode 100644 index 00000000000..918ff3f9b97 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.postgres.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verifiable_addresses_status_via_idx" ON "identity_verifiable_addresses" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.sqlite3.up.sql new file mode 100644 index 00000000000..918ff3f9b97 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000004_selfservice_verification.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verifiable_addresses_status_via_idx" ON "identity_verifiable_addresses" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.cockroach.up.sql new file mode 100644 index 00000000000..e843d513012 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.cockroach.up.sql @@ -0,0 +1,13 @@ +CREATE TABLE "selfservice_verification_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"form" json NOT NULL, +"via" VARCHAR (16) NOT NULL, +"csrf_token" VARCHAR (255) NOT NULL, +"success" bool NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.mysql.up.sql new file mode 100644 index 00000000000..a3dcda2ac48 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.mysql.up.sql @@ -0,0 +1,13 @@ +CREATE TABLE `selfservice_verification_requests` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`request_url` VARCHAR (2048) NOT NULL, +`issued_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +`expires_at` DATETIME NOT NULL, +`form` JSON NOT NULL, +`via` VARCHAR (16) NOT NULL, +`csrf_token` VARCHAR (255) NOT NULL, +`success` bool NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL +) ENGINE=InnoDB; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.postgres.up.sql new file mode 100644 index 00000000000..86d22fbdd51 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.postgres.up.sql @@ -0,0 +1,13 @@ +CREATE TABLE "selfservice_verification_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"form" jsonb NOT NULL, +"via" VARCHAR (16) NOT NULL, +"csrf_token" VARCHAR (255) NOT NULL, +"success" bool NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.sqlite3.up.sql new file mode 100644 index 00000000000..c4c063f6dc9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000008000005_selfservice_verification.sqlite3.up.sql @@ -0,0 +1,12 @@ +CREATE TABLE "selfservice_verification_requests" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" DATETIME NOT NULL, +"form" TEXT NOT NULL, +"via" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"success" bool NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000009000000_verification.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000009000000_verification.mysql.down.sql new file mode 100644 index 00000000000..45202338cd6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000009000000_verification.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE identity_verifiable_addresses MODIFY COLUMN code VARCHAR(255) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000009000000_verification.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000009000000_verification.mysql.up.sql new file mode 100644 index 00000000000..4c7ee79729a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000009000000_verification.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE identity_verifiable_addresses MODIFY COLUMN code VARCHAR(255) BINARY \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000009000001_verification.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000009000001_verification.mysql.down.sql new file mode 100644 index 00000000000..45202338cd6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000009000001_verification.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE identity_verifiable_addresses MODIFY COLUMN code VARCHAR(255) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000009000001_verification.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000009000001_verification.mysql.up.sql new file mode 100644 index 00000000000..4c7ee79729a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000009000001_verification.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE identity_verifiable_addresses MODIFY COLUMN code VARCHAR(255) BINARY \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.cockroach.down.sql new file mode 100644 index 00000000000..ebf18f8d727 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_errors" DROP COLUMN "_seen_at_tmp"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.cockroach.up.sql new file mode 100644 index 00000000000..96875363fbc --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_errors" RENAME COLUMN "seen_at" TO "_seen_at_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.mysql.down.sql new file mode 100644 index 00000000000..6e0978925c3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_errors` MODIFY `seen_at` DATETIME; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.mysql.up.sql new file mode 100644 index 00000000000..6e0978925c3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_errors` MODIFY `seen_at` DATETIME; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.postgres.down.sql new file mode 100644 index 00000000000..57ee0241abb --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_errors" ALTER COLUMN "seen_at" TYPE timestamp, ALTER COLUMN "seen_at" DROP NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.postgres.up.sql new file mode 100644 index 00000000000..57ee0241abb --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_errors" ALTER COLUMN "seen_at" TYPE timestamp, ALTER COLUMN "seen_at" DROP NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.sqlite3.down.sql new file mode 100644 index 00000000000..95b13b65cc5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_errors_tmp" RENAME TO "selfservice_errors"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.sqlite3.up.sql new file mode 100644 index 00000000000..1ab82fba76b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000000_errors.sqlite3.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE "_selfservice_errors_tmp" ( +"id" TEXT PRIMARY KEY, +"errors" TEXT NOT NULL, +"seen_at" DATETIME, +"was_seen" bool NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"csrf_token" TEXT NOT NULL DEFAULT '' +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.cockroach.down.sql new file mode 100644 index 00000000000..3fd6e5e56e4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.cockroach.down.sql @@ -0,0 +1 @@ +UPDATE "selfservice_errors" SET "seen_at" = "_seen_at_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.cockroach.up.sql new file mode 100644 index 00000000000..670dfb966cb --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_errors" ADD COLUMN "seen_at" timestamp \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.mysql.down.sql new file mode 100644 index 00000000000..a0b197307e9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.mysql.down.sql @@ -0,0 +1 @@ +UPDATE selfservice_errors SET seen_at = '1980-01-01 00:00:00' WHERE seen_at = NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.mysql.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.postgres.down.sql new file mode 100644 index 00000000000..a0b197307e9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.postgres.down.sql @@ -0,0 +1 @@ +UPDATE selfservice_errors SET seen_at = '1980-01-01 00:00:00' WHERE seen_at = NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.postgres.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.sqlite3.down.sql new file mode 100644 index 00000000000..ffaad717b9c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_errors" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.sqlite3.up.sql new file mode 100644 index 00000000000..f8924c507ac --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000001_errors.sqlite3.up.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_errors_tmp" (id, errors, seen_at, was_seen, created_at, updated_at, csrf_token) SELECT id, errors, seen_at, was_seen, created_at, updated_at, csrf_token FROM "selfservice_errors" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000002_errors.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000010000002_errors.cockroach.down.sql new file mode 100644 index 00000000000..670dfb966cb --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000002_errors.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_errors" ADD COLUMN "seen_at" timestamp \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000002_errors.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000010000002_errors.cockroach.up.sql new file mode 100644 index 00000000000..3fd6e5e56e4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000002_errors.cockroach.up.sql @@ -0,0 +1 @@ +UPDATE "selfservice_errors" SET "seen_at" = "_seen_at_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000002_errors.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000010000002_errors.sqlite3.down.sql new file mode 100644 index 00000000000..f8924c507ac --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000002_errors.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_errors_tmp" (id, errors, seen_at, was_seen, created_at, updated_at, csrf_token) SELECT id, errors, seen_at, was_seen, created_at, updated_at, csrf_token FROM "selfservice_errors" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000002_errors.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000010000002_errors.sqlite3.up.sql new file mode 100644 index 00000000000..ffaad717b9c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000002_errors.sqlite3.up.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_errors" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000003_errors.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000010000003_errors.cockroach.down.sql new file mode 100644 index 00000000000..96875363fbc --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000003_errors.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_errors" RENAME COLUMN "seen_at" TO "_seen_at_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000003_errors.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000010000003_errors.cockroach.up.sql new file mode 100644 index 00000000000..ebf18f8d727 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000003_errors.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_errors" DROP COLUMN "_seen_at_tmp"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000003_errors.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000010000003_errors.sqlite3.down.sql new file mode 100644 index 00000000000..1ab82fba76b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000003_errors.sqlite3.down.sql @@ -0,0 +1,9 @@ +CREATE TABLE "_selfservice_errors_tmp" ( +"id" TEXT PRIMARY KEY, +"errors" TEXT NOT NULL, +"seen_at" DATETIME, +"was_seen" bool NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"csrf_token" TEXT NOT NULL DEFAULT '' +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000003_errors.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000010000003_errors.sqlite3.up.sql new file mode 100644 index 00000000000..95b13b65cc5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000003_errors.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_errors_tmp" RENAME TO "selfservice_errors"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000004_errors.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000010000004_errors.cockroach.down.sql new file mode 100644 index 00000000000..a0b197307e9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000004_errors.cockroach.down.sql @@ -0,0 +1 @@ +UPDATE selfservice_errors SET seen_at = '1980-01-01 00:00:00' WHERE seen_at = NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000004_errors.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000010000004_errors.cockroach.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000004_errors.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000010000004_errors.sqlite3.down.sql new file mode 100644 index 00000000000..a0b197307e9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000010000004_errors.sqlite3.down.sql @@ -0,0 +1 @@ +UPDATE selfservice_errors SET seen_at = '1980-01-01 00:00:00' WHERE seen_at = NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000010000004_errors.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000010000004_errors.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.cockroach.up.sql new file mode 100644 index 00000000000..046714aa8b3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "courier_messages" RENAME COLUMN "body" TO "_body_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.mysql.up.sql new file mode 100644 index 00000000000..28235616136 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `courier_messages` MODIFY `body` text NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.postgres.up.sql new file mode 100644 index 00000000000..55a3ecf38c5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "courier_messages" ALTER COLUMN "body" TYPE text, ALTER COLUMN "body" SET NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.sqlite3.up.sql new file mode 100644 index 00000000000..fe38552492f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000011000000_courier_body_type.sqlite3.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "_courier_messages_tmp" ( +"id" TEXT PRIMARY KEY, +"type" INTEGER NOT NULL, +"status" INTEGER NOT NULL, +"body" TEXT NOT NULL, +"subject" TEXT NOT NULL, +"recipient" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000001_courier_body_type.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000011000001_courier_body_type.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000001_courier_body_type.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000011000001_courier_body_type.cockroach.up.sql new file mode 100644 index 00000000000..88ff829d4c4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000011000001_courier_body_type.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "courier_messages" ADD COLUMN "body" text \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000001_courier_body_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000011000001_courier_body_type.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000001_courier_body_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000011000001_courier_body_type.sqlite3.up.sql new file mode 100644 index 00000000000..ca6ae216219 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000011000001_courier_body_type.sqlite3.up.sql @@ -0,0 +1 @@ +INSERT INTO "_courier_messages_tmp" (id, type, status, body, subject, recipient, created_at, updated_at) SELECT id, type, status, body, subject, recipient, created_at, updated_at FROM "courier_messages" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000002_courier_body_type.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000011000002_courier_body_type.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000002_courier_body_type.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000011000002_courier_body_type.cockroach.up.sql new file mode 100644 index 00000000000..0c6a5687469 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000011000002_courier_body_type.cockroach.up.sql @@ -0,0 +1 @@ +UPDATE "courier_messages" SET "body" = "_body_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000002_courier_body_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000011000002_courier_body_type.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000002_courier_body_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000011000002_courier_body_type.sqlite3.up.sql new file mode 100644 index 00000000000..0623a36829d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000011000002_courier_body_type.sqlite3.up.sql @@ -0,0 +1 @@ +DROP TABLE "courier_messages" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000003_courier_body_type.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000011000003_courier_body_type.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000003_courier_body_type.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000011000003_courier_body_type.cockroach.up.sql new file mode 100644 index 00000000000..fe0b4caf232 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000011000003_courier_body_type.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "courier_messages" ALTER COLUMN "body" SET NOT NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000003_courier_body_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000011000003_courier_body_type.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000003_courier_body_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000011000003_courier_body_type.sqlite3.up.sql new file mode 100644 index 00000000000..a91ff045090 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000011000003_courier_body_type.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "_courier_messages_tmp" RENAME TO "courier_messages"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000004_courier_body_type.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000011000004_courier_body_type.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000011000004_courier_body_type.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000011000004_courier_body_type.cockroach.up.sql new file mode 100644 index 00000000000..228eae402b6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000011000004_courier_body_type.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "courier_messages" DROP COLUMN "_body_tmp"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.cockroach.down.sql new file mode 100644 index 00000000000..8dbb74664fc --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" DROP COLUMN "forced"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.cockroach.up.sql new file mode 100644 index 00000000000..b84202f2384 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" ADD COLUMN "forced" bool NOT NULL DEFAULT 'false'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.mysql.down.sql new file mode 100644 index 00000000000..acdb077bc3d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_login_requests` DROP COLUMN `forced`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.mysql.up.sql new file mode 100644 index 00000000000..d1a0dceac4a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_login_requests` ADD COLUMN `forced` bool NOT NULL DEFAULT false; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.postgres.down.sql new file mode 100644 index 00000000000..8dbb74664fc --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" DROP COLUMN "forced"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.postgres.up.sql new file mode 100644 index 00000000000..b84202f2384 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" ADD COLUMN "forced" bool NOT NULL DEFAULT 'false'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.sqlite3.down.sql new file mode 100644 index 00000000000..fd575c606e7 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_login_requests_tmp" RENAME TO "selfservice_login_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.sqlite3.up.sql new file mode 100644 index 00000000000..b84202f2384 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000012000000_login_request_forced.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" ADD COLUMN "forced" bool NOT NULL DEFAULT 'false'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000012000001_login_request_forced.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000012000001_login_request_forced.sqlite3.down.sql new file mode 100644 index 00000000000..47b51d51bcb --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000012000001_login_request_forced.sqlite3.down.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_login_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000012000001_login_request_forced.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000012000001_login_request_forced.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000012000002_login_request_forced.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000012000002_login_request_forced.sqlite3.down.sql new file mode 100644 index 00000000000..7fd3f621cee --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000012000002_login_request_forced.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_login_requests_tmp" (id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at) SELECT id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at FROM "selfservice_login_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000012000002_login_request_forced.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000012000002_login_request_forced.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20191100000012000003_login_request_forced.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20191100000012000003_login_request_forced.sqlite3.down.sql new file mode 100644 index 00000000000..9d051c42cbe --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20191100000012000003_login_request_forced.sqlite3.down.sql @@ -0,0 +1,10 @@ +CREATE TABLE "_selfservice_login_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"active_method" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20191100000012000003_login_request_forced.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20191100000012000003_login_request_forced.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.cockroach.down.sql new file mode 100644 index 00000000000..3b57e83a779 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_profile_management_requests" DROP COLUMN "active_method"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.cockroach.up.sql new file mode 100644 index 00000000000..04ae97438ff --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.cockroach.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE "selfservice_profile_management_request_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"selfservice_profile_management_request_id" UUID NOT NULL, +"config" json NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.mysql.down.sql new file mode 100644 index 00000000000..0e22af6e3c2 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_profile_management_requests` DROP COLUMN `active_method`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.mysql.up.sql new file mode 100644 index 00000000000..9cc49fa2a0a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.mysql.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE `selfservice_profile_management_request_methods` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`method` VARCHAR (32) NOT NULL, +`selfservice_profile_management_request_id` char(36) NOT NULL, +`config` JSON NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL +) ENGINE=InnoDB \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.postgres.down.sql new file mode 100644 index 00000000000..3b57e83a779 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_profile_management_requests" DROP COLUMN "active_method"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.postgres.up.sql new file mode 100644 index 00000000000..bc0b322527c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.postgres.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE "selfservice_profile_management_request_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"selfservice_profile_management_request_id" UUID NOT NULL, +"config" jsonb NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.sqlite3.down.sql new file mode 100644 index 00000000000..62711a04d26 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_profile_management_requests_tmp" RENAME TO "selfservice_profile_management_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.sqlite3.up.sql new file mode 100644 index 00000000000..566fcf64cdf --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000000_create_profile_request_forms.sqlite3.up.sql @@ -0,0 +1,8 @@ +CREATE TABLE "selfservice_profile_management_request_methods" ( +"id" TEXT PRIMARY KEY, +"method" TEXT NOT NULL, +"selfservice_profile_management_request_id" char(36) NOT NULL, +"config" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.cockroach.down.sql new file mode 100644 index 00000000000..e263b28ef68 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_profile_management_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.cockroach.up.sql new file mode 100644 index 00000000000..b10ec6552e6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_profile_management_requests" ADD COLUMN "active_method" VARCHAR (32) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.mysql.down.sql new file mode 100644 index 00000000000..9867e642c9f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `selfservice_profile_management_request_methods` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.mysql.up.sql new file mode 100644 index 00000000000..e392ad82d19 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_profile_management_requests` ADD COLUMN `active_method` VARCHAR (32) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.postgres.down.sql new file mode 100644 index 00000000000..e263b28ef68 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_profile_management_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.postgres.up.sql new file mode 100644 index 00000000000..b10ec6552e6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_profile_management_requests" ADD COLUMN "active_method" VARCHAR (32) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.sqlite3.down.sql new file mode 100644 index 00000000000..f61bf8d0357 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.sqlite3.down.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_profile_management_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.sqlite3.up.sql new file mode 100644 index 00000000000..5366d7dc8e3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000001_create_profile_request_forms.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_profile_management_requests" ADD COLUMN "active_method" TEXT \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.cockroach.down.sql new file mode 100644 index 00000000000..edfcfd4af77 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_profile_management_requests" ADD COLUMN "form" json NOT NULL DEFAULT '{}' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.cockroach.up.sql new file mode 100644 index 00000000000..50a0e95df3f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.cockroach.up.sql @@ -0,0 +1 @@ +INSERT INTO selfservice_profile_management_request_methods (id, method, selfservice_profile_management_request_id, config) SELECT id, 'traits', id, form FROM selfservice_profile_management_requests \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.mysql.down.sql new file mode 100644 index 00000000000..e4dbeac48d7 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_profile_management_requests` MODIFY `form` JSON \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.mysql.up.sql new file mode 100644 index 00000000000..50a0e95df3f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.mysql.up.sql @@ -0,0 +1 @@ +INSERT INTO selfservice_profile_management_request_methods (id, method, selfservice_profile_management_request_id, config) SELECT id, 'traits', id, form FROM selfservice_profile_management_requests \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.postgres.down.sql new file mode 100644 index 00000000000..f7e14b93863 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_profile_management_requests" ALTER COLUMN "form" TYPE jsonb, ALTER COLUMN "form" DROP NOT NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.postgres.up.sql new file mode 100644 index 00000000000..50a0e95df3f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.postgres.up.sql @@ -0,0 +1 @@ +INSERT INTO selfservice_profile_management_request_methods (id, method, selfservice_profile_management_request_id, config) SELECT id, 'traits', id, form FROM selfservice_profile_management_requests \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.sqlite3.down.sql new file mode 100644 index 00000000000..f071ec252e1 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_profile_management_requests_tmp" (id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, update_successful) SELECT id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, update_successful FROM "selfservice_profile_management_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.sqlite3.up.sql new file mode 100644 index 00000000000..50a0e95df3f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000002_create_profile_request_forms.sqlite3.up.sql @@ -0,0 +1 @@ +INSERT INTO selfservice_profile_management_request_methods (id, method, selfservice_profile_management_request_id, config) SELECT id, 'traits', id, form FROM selfservice_profile_management_requests \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.cockroach.up.sql new file mode 100644 index 00000000000..ea653446548 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_profile_management_requests" DROP COLUMN "form"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.mysql.down.sql new file mode 100644 index 00000000000..7793d13c3ea --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.mysql.down.sql @@ -0,0 +1 @@ +UPDATE selfservice_profile_management_requests SET form=(SELECT * FROM (SELECT m.config FROM selfservice_profile_management_requests AS r INNER JOIN selfservice_profile_management_request_methods AS m ON r.id=m.selfservice_profile_management_request_id) as t) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.mysql.up.sql new file mode 100644 index 00000000000..adbcdb4fc9f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_profile_management_requests` DROP COLUMN `form`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.postgres.down.sql new file mode 100644 index 00000000000..7793d13c3ea --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.postgres.down.sql @@ -0,0 +1 @@ +UPDATE selfservice_profile_management_requests SET form=(SELECT * FROM (SELECT m.config FROM selfservice_profile_management_requests AS r INNER JOIN selfservice_profile_management_request_methods AS m ON r.id=m.selfservice_profile_management_request_id) as t) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.postgres.up.sql new file mode 100644 index 00000000000..ea653446548 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_profile_management_requests" DROP COLUMN "form"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.sqlite3.down.sql new file mode 100644 index 00000000000..669e51a804d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.sqlite3.down.sql @@ -0,0 +1,11 @@ +CREATE TABLE "_selfservice_profile_management_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"update_successful" bool NOT NULL DEFAULT 'false', +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.sqlite3.up.sql new file mode 100644 index 00000000000..c82eda82bdc --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000003_create_profile_request_forms.sqlite3.up.sql @@ -0,0 +1,12 @@ +CREATE TABLE "_selfservice_profile_management_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"update_successful" bool NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"active_method" TEXT, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000004_create_profile_request_forms.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000004_create_profile_request_forms.mysql.down.sql new file mode 100644 index 00000000000..f4861a4fd7b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000004_create_profile_request_forms.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_profile_management_requests` ADD COLUMN `form` JSON \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000004_create_profile_request_forms.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000004_create_profile_request_forms.mysql.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000004_create_profile_request_forms.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000004_create_profile_request_forms.postgres.down.sql new file mode 100644 index 00000000000..0c541b83aa9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000004_create_profile_request_forms.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_profile_management_requests" ADD COLUMN "form" jsonb \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000004_create_profile_request_forms.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000004_create_profile_request_forms.postgres.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000004_create_profile_request_forms.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000004_create_profile_request_forms.sqlite3.down.sql new file mode 100644 index 00000000000..e263b28ef68 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000004_create_profile_request_forms.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_profile_management_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000004_create_profile_request_forms.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000004_create_profile_request_forms.sqlite3.up.sql new file mode 100644 index 00000000000..ecd6b613271 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000004_create_profile_request_forms.sqlite3.up.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_profile_management_requests_tmp" (id, request_url, issued_at, expires_at, update_successful, identity_id, created_at, updated_at, active_method) SELECT id, request_url, issued_at, expires_at, update_successful, identity_id, created_at, updated_at, active_method FROM "selfservice_profile_management_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000005_create_profile_request_forms.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000005_create_profile_request_forms.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000005_create_profile_request_forms.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000005_create_profile_request_forms.sqlite3.up.sql new file mode 100644 index 00000000000..f61bf8d0357 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000005_create_profile_request_forms.sqlite3.up.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_profile_management_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000006_create_profile_request_forms.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200317160354000006_create_profile_request_forms.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200317160354000006_create_profile_request_forms.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200317160354000006_create_profile_request_forms.sqlite3.up.sql new file mode 100644 index 00000000000..62711a04d26 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200317160354000006_create_profile_request_forms.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_profile_management_requests_tmp" RENAME TO "selfservice_profile_management_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.cockroach.down.sql new file mode 100644 index 00000000000..3aef4256500 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "continuity_containers"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.cockroach.up.sql new file mode 100644 index 00000000000..cf9678dff74 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.cockroach.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE "continuity_containers" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"identity_id" UUID, +"name" VARCHAR (255) NOT NULL, +"payload" json, +"expires_at" timestamp NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "continuity_containers_identities_id_fk" FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.mysql.down.sql new file mode 100644 index 00000000000..17396f6a130 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `continuity_containers`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.mysql.up.sql new file mode 100644 index 00000000000..42b55315051 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.mysql.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE `continuity_containers` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`identity_id` char(36), +`name` VARCHAR (255) NOT NULL, +`payload` JSON, +`expires_at` DATETIME NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`identity_id`) REFERENCES `identities` (`id`) ON DELETE cascade +) ENGINE=InnoDB; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.postgres.down.sql new file mode 100644 index 00000000000..3aef4256500 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "continuity_containers"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.postgres.up.sql new file mode 100644 index 00000000000..ab8cfd55263 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.postgres.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE "continuity_containers" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"identity_id" UUID, +"name" VARCHAR (255) NOT NULL, +"payload" jsonb, +"expires_at" timestamp NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.sqlite3.down.sql new file mode 100644 index 00000000000..3aef4256500 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "continuity_containers"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.sqlite3.up.sql new file mode 100644 index 00000000000..b0e018249ad --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200401183443000000_continuity_containers.sqlite3.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "continuity_containers" ( +"id" TEXT PRIMARY KEY, +"identity_id" char(36), +"name" TEXT NOT NULL, +"payload" TEXT, +"expires_at" DATETIME NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON DELETE cascade +); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.cockroach.down.sql new file mode 100644 index 00000000000..52a8b095e6f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" RENAME TO "selfservice_profile_management_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.cockroach.up.sql new file mode 100644 index 00000000000..ca1a50c39ec --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_profile_management_request_methods" RENAME COLUMN "selfservice_profile_management_request_id" TO "selfservice_settings_request_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.mysql.down.sql new file mode 100644 index 00000000000..c0c0acee422 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_settings_requests` RENAME TO `selfservice_profile_management_requests`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.mysql.up.sql new file mode 100644 index 00000000000..81040d76000 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_profile_management_request_methods` CHANGE `selfservice_profile_management_request_id` `selfservice_settings_request_id` char(36) NOT NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.postgres.down.sql new file mode 100644 index 00000000000..52a8b095e6f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" RENAME TO "selfservice_profile_management_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.postgres.up.sql new file mode 100644 index 00000000000..ca1a50c39ec --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_profile_management_request_methods" RENAME COLUMN "selfservice_profile_management_request_id" TO "selfservice_settings_request_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.sqlite3.down.sql new file mode 100644 index 00000000000..52a8b095e6f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" RENAME TO "selfservice_profile_management_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.sqlite3.up.sql new file mode 100644 index 00000000000..ca1a50c39ec --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000000_rename_profile_flows.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_profile_management_request_methods" RENAME COLUMN "selfservice_profile_management_request_id" TO "selfservice_settings_request_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.cockroach.down.sql new file mode 100644 index 00000000000..1873ddea8dd --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_request_methods" RENAME TO "selfservice_profile_management_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.cockroach.up.sql new file mode 100644 index 00000000000..2c4303f444b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_profile_management_request_methods" RENAME TO "selfservice_settings_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.mysql.down.sql new file mode 100644 index 00000000000..39fefdb59ab --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_settings_request_methods` RENAME TO `selfservice_profile_management_request_methods` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.mysql.up.sql new file mode 100644 index 00000000000..cf512503264 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_profile_management_request_methods` RENAME TO `selfservice_settings_request_methods` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.postgres.down.sql new file mode 100644 index 00000000000..1873ddea8dd --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_request_methods" RENAME TO "selfservice_profile_management_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.postgres.up.sql new file mode 100644 index 00000000000..2c4303f444b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_profile_management_request_methods" RENAME TO "selfservice_settings_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.sqlite3.down.sql new file mode 100644 index 00000000000..1873ddea8dd --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_request_methods" RENAME TO "selfservice_profile_management_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.sqlite3.up.sql new file mode 100644 index 00000000000..2c4303f444b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000001_rename_profile_flows.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_profile_management_request_methods" RENAME TO "selfservice_settings_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.cockroach.down.sql new file mode 100644 index 00000000000..26a15a4e2a3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_request_methods" RENAME COLUMN "selfservice_settings_request_id" TO "selfservice_profile_management_request_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.cockroach.up.sql new file mode 100644 index 00000000000..e62d4ea6edb --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_profile_management_requests" RENAME TO "selfservice_settings_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.mysql.down.sql new file mode 100644 index 00000000000..978cf2258d3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_settings_request_methods` CHANGE `selfservice_settings_request_id` `selfservice_profile_management_request_id` char(36) NOT NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.mysql.up.sql new file mode 100644 index 00000000000..29362ddad0a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_profile_management_requests` RENAME TO `selfservice_settings_requests`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.postgres.down.sql new file mode 100644 index 00000000000..26a15a4e2a3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_request_methods" RENAME COLUMN "selfservice_settings_request_id" TO "selfservice_profile_management_request_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.postgres.up.sql new file mode 100644 index 00000000000..e62d4ea6edb --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_profile_management_requests" RENAME TO "selfservice_settings_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.sqlite3.down.sql new file mode 100644 index 00000000000..26a15a4e2a3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_request_methods" RENAME COLUMN "selfservice_settings_request_id" TO "selfservice_profile_management_request_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.sqlite3.up.sql new file mode 100644 index 00000000000..e62d4ea6edb --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200402142539000002_rename_profile_flows.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_profile_management_requests" RENAME TO "selfservice_settings_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.cockroach.down.sql new file mode 100644 index 00000000000..51596f16d46 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_recovery_addresses"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.cockroach.up.sql new file mode 100644 index 00000000000..0d1895343a2 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.cockroach.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "identity_recovery_addresses" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"via" VARCHAR (16) NOT NULL, +"value" VARCHAR (400) NOT NULL, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "identity_recovery_addresses_identities_id_fk" FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.mysql.down.sql new file mode 100644 index 00000000000..d79504e28e9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `identity_recovery_addresses`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.mysql.up.sql new file mode 100644 index 00000000000..432c9846ea2 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.mysql.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE `identity_recovery_addresses` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`via` VARCHAR (16) NOT NULL, +`value` VARCHAR (400) NOT NULL, +`identity_id` char(36) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`identity_id`) REFERENCES `identities` (`id`) ON DELETE cascade +) ENGINE=InnoDB \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.postgres.down.sql new file mode 100644 index 00000000000..51596f16d46 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_recovery_addresses"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.postgres.up.sql new file mode 100644 index 00000000000..b6ba272ecaf --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.postgres.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "identity_recovery_addresses" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"via" VARCHAR (16) NOT NULL, +"value" VARCHAR (400) NOT NULL, +"identity_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("identity_id") REFERENCES "identities" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.sqlite3.down.sql new file mode 100644 index 00000000000..51596f16d46 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_recovery_addresses"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.sqlite3.up.sql new file mode 100644 index 00000000000..7663c75910c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000000_create_recovery_addresses.sqlite3.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE "identity_recovery_addresses" ( +"id" TEXT PRIMARY KEY, +"via" TEXT NOT NULL, +"value" TEXT NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.cockroach.down.sql new file mode 100644 index 00000000000..3e68e811807 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_recovery_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.cockroach.up.sql new file mode 100644 index 00000000000..02e829b6d8f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.cockroach.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_recovery_addresses_status_via_uq_idx" ON "identity_recovery_addresses" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.mysql.down.sql new file mode 100644 index 00000000000..9372cdd6758 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `selfservice_recovery_requests` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.mysql.up.sql new file mode 100644 index 00000000000..665e86ff528 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.mysql.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX `identity_recovery_addresses_status_via_uq_idx` ON `identity_recovery_addresses` (`via`, `value`) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.postgres.down.sql new file mode 100644 index 00000000000..3e68e811807 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_recovery_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.postgres.up.sql new file mode 100644 index 00000000000..02e829b6d8f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.postgres.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_recovery_addresses_status_via_uq_idx" ON "identity_recovery_addresses" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.sqlite3.down.sql new file mode 100644 index 00000000000..3e68e811807 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_recovery_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.sqlite3.up.sql new file mode 100644 index 00000000000..02e829b6d8f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000001_create_recovery_addresses.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_recovery_addresses_status_via_uq_idx" ON "identity_recovery_addresses" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.cockroach.down.sql new file mode 100644 index 00000000000..f9da8be61ae --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_recovery_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.cockroach.up.sql new file mode 100644 index 00000000000..1c34d393d7b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.cockroach.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_recovery_addresses_status_via_idx" ON "identity_recovery_addresses" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.mysql.down.sql new file mode 100644 index 00000000000..9843693e645 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `selfservice_recovery_request_methods` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.mysql.up.sql new file mode 100644 index 00000000000..9235aca2c95 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.mysql.up.sql @@ -0,0 +1 @@ +CREATE INDEX `identity_recovery_addresses_status_via_idx` ON `identity_recovery_addresses` (`via`, `value`) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.postgres.down.sql new file mode 100644 index 00000000000..f9da8be61ae --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_recovery_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.postgres.up.sql new file mode 100644 index 00000000000..1c34d393d7b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.postgres.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_recovery_addresses_status_via_idx" ON "identity_recovery_addresses" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.sqlite3.down.sql new file mode 100644 index 00000000000..f9da8be61ae --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_recovery_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.sqlite3.up.sql new file mode 100644 index 00000000000..1c34d393d7b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000002_create_recovery_addresses.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_recovery_addresses_status_via_idx" ON "identity_recovery_addresses" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.cockroach.down.sql new file mode 100644 index 00000000000..ddb21ed1c3d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_recovery_tokens" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.cockroach.up.sql new file mode 100644 index 00000000000..91fe16ed10d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.cockroach.up.sql @@ -0,0 +1,15 @@ +CREATE TABLE "selfservice_recovery_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"messages" json, +"active_method" VARCHAR (32), +"csrf_token" VARCHAR (255) NOT NULL, +"state" VARCHAR (32) NOT NULL, +"recovered_identity_id" UUID, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "selfservice_recovery_requests_identities_id_fk" FOREIGN KEY ("recovered_identity_id") REFERENCES "identities" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.mysql.down.sql new file mode 100644 index 00000000000..34a95c91ce7 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `identity_recovery_tokens` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.mysql.up.sql new file mode 100644 index 00000000000..ecd5ac3655f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.mysql.up.sql @@ -0,0 +1,15 @@ +CREATE TABLE `selfservice_recovery_requests` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`request_url` VARCHAR (2048) NOT NULL, +`issued_at` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +`expires_at` DATETIME NOT NULL, +`messages` JSON, +`active_method` VARCHAR (32), +`csrf_token` VARCHAR (255) NOT NULL, +`state` VARCHAR (32) NOT NULL, +`recovered_identity_id` char(36), +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`recovered_identity_id`) REFERENCES `identities` (`id`) ON DELETE cascade +) ENGINE=InnoDB \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.postgres.down.sql new file mode 100644 index 00000000000..ddb21ed1c3d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_recovery_tokens" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.postgres.up.sql new file mode 100644 index 00000000000..ff29b1e1b7b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.postgres.up.sql @@ -0,0 +1,15 @@ +CREATE TABLE "selfservice_recovery_requests" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"request_url" VARCHAR (2048) NOT NULL, +"issued_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" timestamp NOT NULL, +"messages" jsonb, +"active_method" VARCHAR (32), +"csrf_token" VARCHAR (255) NOT NULL, +"state" VARCHAR (32) NOT NULL, +"recovered_identity_id" UUID, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("recovered_identity_id") REFERENCES "identities" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.sqlite3.down.sql new file mode 100644 index 00000000000..ddb21ed1c3d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_recovery_tokens" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.sqlite3.up.sql new file mode 100644 index 00000000000..4f5b77bf73b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000003_create_recovery_addresses.sqlite3.up.sql @@ -0,0 +1,14 @@ +CREATE TABLE "selfservice_recovery_requests" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, +"expires_at" DATETIME NOT NULL, +"messages" TEXT, +"active_method" TEXT, +"csrf_token" TEXT NOT NULL, +"state" TEXT NOT NULL, +"recovered_identity_id" char(36), +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (recovered_identity_id) REFERENCES identities (id) ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.cockroach.up.sql new file mode 100644 index 00000000000..ae150ce7313 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.cockroach.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "selfservice_recovery_request_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"config" json NOT NULL, +"selfservice_recovery_request_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "selfservice_recovery_request_methods_selfservice_recovery_requests_id_fk" FOREIGN KEY ("selfservice_recovery_request_id") REFERENCES "selfservice_recovery_requests" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.mysql.up.sql new file mode 100644 index 00000000000..0b84cbc2647 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.mysql.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE `selfservice_recovery_request_methods` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`method` VARCHAR (32) NOT NULL, +`config` JSON NOT NULL, +`selfservice_recovery_request_id` char(36) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`selfservice_recovery_request_id`) REFERENCES `selfservice_recovery_requests` (`id`) ON DELETE cascade +) ENGINE=InnoDB \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.postgres.up.sql new file mode 100644 index 00000000000..bb577b37c5d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.postgres.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE "selfservice_recovery_request_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"config" jsonb NOT NULL, +"selfservice_recovery_request_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("selfservice_recovery_request_id") REFERENCES "selfservice_recovery_requests" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.sqlite3.up.sql new file mode 100644 index 00000000000..832fb0412e4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000004_create_recovery_addresses.sqlite3.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE "selfservice_recovery_request_methods" ( +"id" TEXT PRIMARY KEY, +"method" TEXT NOT NULL, +"config" TEXT NOT NULL, +"selfservice_recovery_request_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (selfservice_recovery_request_id) REFERENCES selfservice_recovery_requests (id) ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.cockroach.up.sql new file mode 100644 index 00000000000..8ce83beb7b9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.cockroach.up.sql @@ -0,0 +1,13 @@ +CREATE TABLE "identity_recovery_tokens" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"token" VARCHAR (64) NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" timestamp, +"identity_recovery_address_id" UUID NOT NULL, +"selfservice_recovery_request_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "identity_recovery_tokens_identity_recovery_addresses_id_fk" FOREIGN KEY ("identity_recovery_address_id") REFERENCES "identity_recovery_addresses" ("id") ON DELETE cascade, +CONSTRAINT "identity_recovery_tokens_selfservice_recovery_requests_id_fk" FOREIGN KEY ("selfservice_recovery_request_id") REFERENCES "selfservice_recovery_requests" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.mysql.up.sql new file mode 100644 index 00000000000..346f257f907 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.mysql.up.sql @@ -0,0 +1,13 @@ +CREATE TABLE `identity_recovery_tokens` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`token` VARCHAR (64) NOT NULL, +`used` bool NOT NULL DEFAULT false, +`used_at` DATETIME, +`identity_recovery_address_id` char(36) NOT NULL, +`selfservice_recovery_request_id` char(36) NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`identity_recovery_address_id`) REFERENCES `identity_recovery_addresses` (`id`) ON DELETE cascade, +FOREIGN KEY (`selfservice_recovery_request_id`) REFERENCES `selfservice_recovery_requests` (`id`) ON DELETE cascade +) ENGINE=InnoDB \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.postgres.up.sql new file mode 100644 index 00000000000..d00c2ba3e4e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.postgres.up.sql @@ -0,0 +1,13 @@ +CREATE TABLE "identity_recovery_tokens" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"token" VARCHAR (64) NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" timestamp, +"identity_recovery_address_id" UUID NOT NULL, +"selfservice_recovery_request_id" UUID NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("identity_recovery_address_id") REFERENCES "identity_recovery_addresses" ("id") ON DELETE cascade, +FOREIGN KEY ("selfservice_recovery_request_id") REFERENCES "selfservice_recovery_requests" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.sqlite3.up.sql new file mode 100644 index 00000000000..0b57fdcead2 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000005_create_recovery_addresses.sqlite3.up.sql @@ -0,0 +1,12 @@ +CREATE TABLE "identity_recovery_tokens" ( +"id" TEXT PRIMARY KEY, +"token" TEXT NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" DATETIME, +"identity_recovery_address_id" char(36) NOT NULL, +"selfservice_recovery_request_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_recovery_address_id) REFERENCES identity_recovery_addresses (id) ON DELETE cascade, +FOREIGN KEY (selfservice_recovery_request_id) REFERENCES selfservice_recovery_requests (id) ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.cockroach.up.sql new file mode 100644 index 00000000000..b8444bebf82 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.cockroach.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_recovery_addresses_code_uq_idx" ON "identity_recovery_tokens" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.mysql.up.sql new file mode 100644 index 00000000000..a04f5b7fa5d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.mysql.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX `identity_recovery_addresses_code_uq_idx` ON `identity_recovery_tokens` (`token`) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.postgres.up.sql new file mode 100644 index 00000000000..b8444bebf82 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.postgres.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_recovery_addresses_code_uq_idx" ON "identity_recovery_tokens" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.sqlite3.up.sql new file mode 100644 index 00000000000..b8444bebf82 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000006_create_recovery_addresses.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_recovery_addresses_code_uq_idx" ON "identity_recovery_tokens" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.cockroach.up.sql new file mode 100644 index 00000000000..38bc780729b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.cockroach.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_recovery_addresses_code_idx" ON "identity_recovery_tokens" (token); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.mysql.up.sql new file mode 100644 index 00000000000..0fb0af8d62a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.mysql.up.sql @@ -0,0 +1 @@ +CREATE INDEX `identity_recovery_addresses_code_idx` ON `identity_recovery_tokens` (`token`); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.postgres.up.sql new file mode 100644 index 00000000000..38bc780729b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.postgres.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_recovery_addresses_code_idx" ON "identity_recovery_tokens" (token); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.sqlite3.up.sql new file mode 100644 index 00000000000..38bc780729b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101057000007_create_recovery_addresses.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_recovery_addresses_code_idx" ON "identity_recovery_tokens" (token); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101058000000_create_recovery_addresses.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200519101058000000_create_recovery_addresses.mysql.down.sql new file mode 100644 index 00000000000..e620ccb937f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101058000000_create_recovery_addresses.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE identity_recovery_tokens MODIFY COLUMN token VARCHAR(64) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101058000000_create_recovery_addresses.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200519101058000000_create_recovery_addresses.mysql.up.sql new file mode 100644 index 00000000000..893940ce30e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101058000000_create_recovery_addresses.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE identity_recovery_tokens MODIFY COLUMN token VARCHAR(64) BINARY \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101058000001_create_recovery_addresses.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200519101058000001_create_recovery_addresses.mysql.down.sql new file mode 100644 index 00000000000..e620ccb937f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101058000001_create_recovery_addresses.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE identity_recovery_tokens MODIFY COLUMN token VARCHAR(64) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200519101058000001_create_recovery_addresses.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200519101058000001_create_recovery_addresses.mysql.up.sql new file mode 100644 index 00000000000..893940ce30e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200519101058000001_create_recovery_addresses.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE identity_recovery_tokens MODIFY COLUMN token VARCHAR(64) BINARY \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.cockroach.down.sql new file mode 100644 index 00000000000..a9ca7f9c0d2 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" DROP COLUMN "messages"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.cockroach.up.sql new file mode 100644 index 00000000000..d1e8b79eb06 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "messages" json; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.mysql.down.sql new file mode 100644 index 00000000000..d80e1cae215 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_settings_requests` DROP COLUMN `messages`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.mysql.up.sql new file mode 100644 index 00000000000..2c843fda0d9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_settings_requests` ADD COLUMN `messages` JSON; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.postgres.down.sql new file mode 100644 index 00000000000..a9ca7f9c0d2 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" DROP COLUMN "messages"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.postgres.up.sql new file mode 100644 index 00000000000..e5b5661b1ab --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "messages" jsonb; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.sqlite3.down.sql new file mode 100644 index 00000000000..002764d9150 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_settings_requests_tmp" RENAME TO "selfservice_settings_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.sqlite3.up.sql new file mode 100644 index 00000000000..587ca18d7b6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200601101000000000_create_messages.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "messages" TEXT; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200601101000000001_create_messages.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200601101000000001_create_messages.sqlite3.down.sql new file mode 100644 index 00000000000..d93ea646061 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200601101000000001_create_messages.sqlite3.down.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_settings_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200601101000000001_create_messages.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200601101000000001_create_messages.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200601101000000002_create_messages.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200601101000000002_create_messages.sqlite3.down.sql new file mode 100644 index 00000000000..02da0e4111a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200601101000000002_create_messages.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_settings_requests_tmp" (id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, active_method, update_successful) SELECT id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, active_method, update_successful FROM "selfservice_settings_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200601101000000002_create_messages.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200601101000000002_create_messages.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200601101000000003_create_messages.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200601101000000003_create_messages.sqlite3.down.sql new file mode 100644 index 00000000000..ba66094fa20 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200601101000000003_create_messages.sqlite3.down.sql @@ -0,0 +1,12 @@ +CREATE TABLE "_selfservice_settings_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"active_method" TEXT, +"update_successful" bool NOT NULL DEFAULT 'false', +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200601101000000003_create_messages.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200601101000000003_create_messages.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200601101001000000_verification.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200601101001000000_verification.mysql.down.sql new file mode 100644 index 00000000000..4c7ee79729a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200601101001000000_verification.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE identity_verifiable_addresses MODIFY COLUMN code VARCHAR(255) BINARY \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200601101001000000_verification.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200601101001000000_verification.mysql.up.sql new file mode 100644 index 00000000000..745721812f0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200601101001000000_verification.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE identity_verifiable_addresses MODIFY COLUMN code VARCHAR(32) BINARY \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200601101001000001_verification.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200601101001000001_verification.mysql.down.sql new file mode 100644 index 00000000000..4c7ee79729a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200601101001000001_verification.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE identity_verifiable_addresses MODIFY COLUMN code VARCHAR(255) BINARY \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200601101001000001_verification.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200601101001000001_verification.mysql.up.sql new file mode 100644 index 00000000000..745721812f0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200601101001000001_verification.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE identity_verifiable_addresses MODIFY COLUMN code VARCHAR(32) BINARY \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.cockroach.down.sql new file mode 100644 index 00000000000..012ee653f08 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_requests" DROP COLUMN "messages"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.cockroach.up.sql new file mode 100644 index 00000000000..49925a83236 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_requests" ADD COLUMN "messages" json \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.mysql.down.sql new file mode 100644 index 00000000000..81f1b3b0c41 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_registration_requests` DROP COLUMN `messages`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.mysql.up.sql new file mode 100644 index 00000000000..6ebd845f634 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_verification_requests` ADD COLUMN `messages` JSON \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.postgres.down.sql new file mode 100644 index 00000000000..012ee653f08 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_requests" DROP COLUMN "messages"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.postgres.up.sql new file mode 100644 index 00000000000..afd33ce1a1b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_requests" ADD COLUMN "messages" jsonb \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.sqlite3.down.sql new file mode 100644 index 00000000000..0ff2399a8f5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_registration_requests_tmp" RENAME TO "selfservice_registration_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.sqlite3.up.sql new file mode 100644 index 00000000000..f012c22fafe --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000000_messages.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_requests" ADD COLUMN "messages" TEXT \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.cockroach.down.sql new file mode 100644 index 00000000000..8d44e297d08 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" DROP COLUMN "messages" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.cockroach.up.sql new file mode 100644 index 00000000000..d70352b614d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" ADD COLUMN "messages" json \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.mysql.down.sql new file mode 100644 index 00000000000..06ca9eae2a3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_login_requests` DROP COLUMN `messages` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.mysql.up.sql new file mode 100644 index 00000000000..4e73ab7f48f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_login_requests` ADD COLUMN `messages` JSON \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.postgres.down.sql new file mode 100644 index 00000000000..8d44e297d08 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" DROP COLUMN "messages" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.postgres.up.sql new file mode 100644 index 00000000000..e5a441702af --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" ADD COLUMN "messages" jsonb \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.sqlite3.down.sql new file mode 100644 index 00000000000..52b78bb8816 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.sqlite3.down.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_registration_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.sqlite3.up.sql new file mode 100644 index 00000000000..33542426138 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000001_messages.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" ADD COLUMN "messages" TEXT \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.cockroach.down.sql new file mode 100644 index 00000000000..352e5f96a01 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_requests" DROP COLUMN "messages" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.cockroach.up.sql new file mode 100644 index 00000000000..3d9176a7413 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_requests" ADD COLUMN "messages" json; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.mysql.down.sql new file mode 100644 index 00000000000..5363bf65bf9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_verification_requests` DROP COLUMN `messages` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.mysql.up.sql new file mode 100644 index 00000000000..d67d0f38168 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_registration_requests` ADD COLUMN `messages` JSON; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.postgres.down.sql new file mode 100644 index 00000000000..352e5f96a01 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_requests" DROP COLUMN "messages" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.postgres.up.sql new file mode 100644 index 00000000000..41236ec96b6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_requests" ADD COLUMN "messages" jsonb; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.sqlite3.down.sql new file mode 100644 index 00000000000..87ef40bc75a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_registration_requests_tmp" (id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at) SELECT id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at FROM "selfservice_registration_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.sqlite3.up.sql new file mode 100644 index 00000000000..a99388b60b2 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000002_messages.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_requests" ADD COLUMN "messages" TEXT; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000003_messages.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000003_messages.sqlite3.down.sql new file mode 100644 index 00000000000..42808ebd85e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000003_messages.sqlite3.down.sql @@ -0,0 +1,10 @@ +CREATE TABLE "_selfservice_registration_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"active_method" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000003_messages.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000003_messages.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000004_messages.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000004_messages.sqlite3.down.sql new file mode 100644 index 00000000000..d5fb51fbfa9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000004_messages.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_login_requests_tmp" RENAME TO "selfservice_login_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000004_messages.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000004_messages.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000005_messages.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000005_messages.sqlite3.down.sql new file mode 100644 index 00000000000..47b51d51bcb --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000005_messages.sqlite3.down.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_login_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000005_messages.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000005_messages.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000006_messages.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000006_messages.sqlite3.down.sql new file mode 100644 index 00000000000..c1f05c1396f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000006_messages.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_login_requests_tmp" (id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at, forced) SELECT id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at, forced FROM "selfservice_login_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000006_messages.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000006_messages.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000007_messages.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000007_messages.sqlite3.down.sql new file mode 100644 index 00000000000..b458c0b5dd0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000007_messages.sqlite3.down.sql @@ -0,0 +1,11 @@ +CREATE TABLE "_selfservice_login_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"active_method" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"forced" bool NOT NULL DEFAULT 'false' +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000007_messages.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000007_messages.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000008_messages.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000008_messages.sqlite3.down.sql new file mode 100644 index 00000000000..ee747b8e1ca --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000008_messages.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_verification_requests_tmp" RENAME TO "selfservice_verification_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000008_messages.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000008_messages.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000009_messages.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000009_messages.sqlite3.down.sql new file mode 100644 index 00000000000..96225c485f4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000009_messages.sqlite3.down.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_verification_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000009_messages.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000009_messages.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000010_messages.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000010_messages.sqlite3.down.sql new file mode 100644 index 00000000000..2203ba56c58 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000010_messages.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_verification_requests_tmp" (id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, via, success) SELECT id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, via, success FROM "selfservice_verification_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000010_messages.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000010_messages.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000011_messages.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200605111551000011_messages.sqlite3.down.sql new file mode 100644 index 00000000000..d9ad85e3f1e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200605111551000011_messages.sqlite3.down.sql @@ -0,0 +1,11 @@ +CREATE TABLE "_selfservice_verification_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"via" TEXT NOT NULL DEFAULT 'email', +"success" bool NOT NULL DEFAULT 'FALSE' +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200605111551000011_messages.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200605111551000011_messages.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.cockroach.down.sql new file mode 100644 index 00000000000..51c468b8b09 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "update_successful" bool NOT NULL DEFAULT 'false'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.cockroach.up.sql new file mode 100644 index 00000000000..c387b8fdd41 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "state" VARCHAR (255) NOT NULL DEFAULT 'show_form' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.mysql.down.sql new file mode 100644 index 00000000000..8455290d450 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_settings_requests` ADD COLUMN `update_successful` bool NOT NULL DEFAULT false; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.mysql.up.sql new file mode 100644 index 00000000000..fcf5408a3a1 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_settings_requests` ADD COLUMN `state` VARCHAR (255) NOT NULL DEFAULT 'show_form' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.postgres.down.sql new file mode 100644 index 00000000000..51c468b8b09 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "update_successful" bool NOT NULL DEFAULT 'false'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.postgres.up.sql new file mode 100644 index 00000000000..c387b8fdd41 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "state" VARCHAR (255) NOT NULL DEFAULT 'show_form' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.sqlite3.down.sql new file mode 100644 index 00000000000..51c468b8b09 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "update_successful" bool NOT NULL DEFAULT 'false'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.sqlite3.up.sql new file mode 100644 index 00000000000..97a3f7adc15 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000000_settings.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "state" TEXT NOT NULL DEFAULT 'show_form' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.cockroach.down.sql new file mode 100644 index 00000000000..1300963afb3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" DROP COLUMN "state" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.cockroach.up.sql new file mode 100644 index 00000000000..601e8d6a146 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" DROP COLUMN "update_successful"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.mysql.down.sql new file mode 100644 index 00000000000..bec242d9aa5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_settings_requests` DROP COLUMN `state` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.mysql.up.sql new file mode 100644 index 00000000000..b9224395376 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_settings_requests` DROP COLUMN `update_successful`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.postgres.down.sql new file mode 100644 index 00000000000..1300963afb3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" DROP COLUMN "state" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.postgres.up.sql new file mode 100644 index 00000000000..601e8d6a146 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" DROP COLUMN "update_successful"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.sqlite3.down.sql new file mode 100644 index 00000000000..3e882363f43 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_settings_requests_tmp" RENAME TO "selfservice_settings_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.sqlite3.up.sql new file mode 100644 index 00000000000..b246a8b9cc8 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000001_settings.sqlite3.up.sql @@ -0,0 +1,13 @@ +CREATE TABLE "_selfservice_settings_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"active_method" TEXT, +"messages" TEXT, +"state" TEXT NOT NULL DEFAULT 'show_form', +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000002_settings.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200607165100000002_settings.sqlite3.down.sql new file mode 100644 index 00000000000..d93ea646061 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000002_settings.sqlite3.down.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_settings_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000002_settings.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200607165100000002_settings.sqlite3.up.sql new file mode 100644 index 00000000000..cb697e25037 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000002_settings.sqlite3.up.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_settings_requests_tmp" (id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, active_method, messages, state) SELECT id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, active_method, messages, state FROM "selfservice_settings_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000003_settings.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200607165100000003_settings.sqlite3.down.sql new file mode 100644 index 00000000000..56700ff6b9c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000003_settings.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_settings_requests_tmp" (id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, active_method, messages) SELECT id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, active_method, messages FROM "selfservice_settings_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000003_settings.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200607165100000003_settings.sqlite3.up.sql new file mode 100644 index 00000000000..d93ea646061 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000003_settings.sqlite3.up.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_settings_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000004_settings.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200607165100000004_settings.sqlite3.down.sql new file mode 100644 index 00000000000..37abfb02506 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000004_settings.sqlite3.down.sql @@ -0,0 +1,12 @@ +CREATE TABLE "_selfservice_settings_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"active_method" TEXT, +"messages" TEXT, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200607165100000004_settings.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200607165100000004_settings.sqlite3.up.sql new file mode 100644 index 00000000000..002764d9150 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200607165100000004_settings.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_settings_requests_tmp" RENAME TO "selfservice_settings_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.cockroach.down.sql new file mode 100644 index 00000000000..d2dee7d0fd0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identities" RENAME COLUMN "schema_id" TO "traits_schema_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.cockroach.up.sql new file mode 100644 index 00000000000..ce7cd59733a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identities" RENAME COLUMN "traits_schema_id" TO "schema_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.mysql.down.sql new file mode 100644 index 00000000000..7e3303f9622 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `identities` CHANGE `schema_id` `traits_schema_id` varchar(2048) NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.mysql.up.sql new file mode 100644 index 00000000000..92a92fa94fe --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `identities` CHANGE `traits_schema_id` `schema_id` varchar(2048) NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.postgres.down.sql new file mode 100644 index 00000000000..d2dee7d0fd0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identities" RENAME COLUMN "schema_id" TO "traits_schema_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.postgres.up.sql new file mode 100644 index 00000000000..ce7cd59733a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identities" RENAME COLUMN "traits_schema_id" TO "schema_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.sqlite3.down.sql new file mode 100644 index 00000000000..d2dee7d0fd0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identities" RENAME COLUMN "schema_id" TO "traits_schema_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.sqlite3.up.sql new file mode 100644 index 00000000000..ce7cd59733a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200705105359000000_rename_identities_schema.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identities" RENAME COLUMN "traits_schema_id" TO "schema_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.cockroach.down.sql new file mode 100644 index 00000000000..3ca09c1e7a1 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_requests" DROP COLUMN "type"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.cockroach.up.sql new file mode 100644 index 00000000000..8e010743359 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.mysql.down.sql new file mode 100644 index 00000000000..5fae17d5603 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_verification_requests` DROP COLUMN `type`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.mysql.up.sql new file mode 100644 index 00000000000..b2a3fd7b522 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_login_requests` ADD COLUMN `type` VARCHAR (16) NOT NULL DEFAULT 'browser' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.postgres.down.sql new file mode 100644 index 00000000000..3ca09c1e7a1 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_requests" DROP COLUMN "type"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.postgres.up.sql new file mode 100644 index 00000000000..8e010743359 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..f8c6563dc3a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_verification_requests_tmp" RENAME TO "selfservice_verification_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..e98be86d51f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000000_flow_type.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" ADD COLUMN "type" TEXT NOT NULL DEFAULT 'browser' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.cockroach.down.sql new file mode 100644 index 00000000000..e843d1ecea4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_requests" DROP COLUMN "type" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.cockroach.up.sql new file mode 100644 index 00000000000..18cff2262e8 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.mysql.down.sql new file mode 100644 index 00000000000..18aefc67a64 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_recovery_requests` DROP COLUMN `type` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.mysql.up.sql new file mode 100644 index 00000000000..272b917d5ba --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_registration_requests` ADD COLUMN `type` VARCHAR (16) NOT NULL DEFAULT 'browser' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.postgres.down.sql new file mode 100644 index 00000000000..e843d1ecea4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_requests" DROP COLUMN "type" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.postgres.up.sql new file mode 100644 index 00000000000..18cff2262e8 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..96225c485f4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.sqlite3.down.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_verification_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..5a7f5229a78 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000001_flow_type.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_requests" ADD COLUMN "type" TEXT NOT NULL DEFAULT 'browser' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.cockroach.down.sql new file mode 100644 index 00000000000..b178dfb556d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" DROP COLUMN "type" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.cockroach.up.sql new file mode 100644 index 00000000000..b8909ab2073 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.mysql.down.sql new file mode 100644 index 00000000000..15060f44efe --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_settings_requests` DROP COLUMN `type` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.mysql.up.sql new file mode 100644 index 00000000000..13b7c0ca80a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_settings_requests` ADD COLUMN `type` VARCHAR (16) NOT NULL DEFAULT 'browser' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.postgres.down.sql new file mode 100644 index 00000000000..b178dfb556d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" DROP COLUMN "type" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.postgres.up.sql new file mode 100644 index 00000000000..b8909ab2073 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..da7567fcd37 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_verification_requests_tmp" (id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, messages, via, success) SELECT id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, messages, via, success FROM "selfservice_verification_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..2e42a66592d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000002_flow_type.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" ADD COLUMN "type" TEXT NOT NULL DEFAULT 'browser' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.cockroach.down.sql new file mode 100644 index 00000000000..57ed47b6929 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_requests" DROP COLUMN "type" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.cockroach.up.sql new file mode 100644 index 00000000000..5f675f3bc2b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.mysql.down.sql new file mode 100644 index 00000000000..c97f5c55f96 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_registration_requests` DROP COLUMN `type` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.mysql.up.sql new file mode 100644 index 00000000000..57e979d7645 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_recovery_requests` ADD COLUMN `type` VARCHAR (16) NOT NULL DEFAULT 'browser' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.postgres.down.sql new file mode 100644 index 00000000000..57ed47b6929 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_requests" DROP COLUMN "type" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.postgres.up.sql new file mode 100644 index 00000000000..5f675f3bc2b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..ae4ffd58c36 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.sqlite3.down.sql @@ -0,0 +1,12 @@ +CREATE TABLE "_selfservice_verification_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"messages" TEXT, +"via" TEXT NOT NULL DEFAULT 'email', +"success" bool NOT NULL DEFAULT 'FALSE' +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..8e5b854f1ba --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000003_flow_type.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_requests" ADD COLUMN "type" TEXT NOT NULL DEFAULT 'browser' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.cockroach.down.sql new file mode 100644 index 00000000000..f0fd55fae9f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" DROP COLUMN "type" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.cockroach.up.sql new file mode 100644 index 00000000000..ac97bcf5677 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.mysql.down.sql new file mode 100644 index 00000000000..c6195cd6956 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_login_requests` DROP COLUMN `type` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.mysql.up.sql new file mode 100644 index 00000000000..c1282d888b4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_verification_requests` ADD COLUMN `type` VARCHAR (16) NOT NULL DEFAULT 'browser'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.postgres.down.sql new file mode 100644 index 00000000000..f0fd55fae9f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" DROP COLUMN "type" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.postgres.up.sql new file mode 100644 index 00000000000..ac97bcf5677 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_requests" ADD COLUMN "type" VARCHAR (16) NOT NULL DEFAULT 'browser'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..e5c12a7e682 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_recovery_requests_tmp" RENAME TO "selfservice_recovery_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..3bf96ef7f38 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000004_flow_type.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_requests" ADD COLUMN "type" TEXT NOT NULL DEFAULT 'browser'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000005_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000005_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..90bee92a353 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000005_flow_type.sqlite3.down.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_recovery_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000005_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000005_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000006_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000006_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..ed36b70aaa0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000006_flow_type.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_recovery_requests_tmp" (id, request_url, issued_at, expires_at, messages, active_method, csrf_token, state, recovered_identity_id, created_at, updated_at) SELECT id, request_url, issued_at, expires_at, messages, active_method, csrf_token, state, recovered_identity_id, created_at, updated_at FROM "selfservice_recovery_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000006_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000006_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000007_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000007_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..ede1b133a80 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000007_flow_type.sqlite3.down.sql @@ -0,0 +1,14 @@ +CREATE TABLE "_selfservice_recovery_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"messages" TEXT, +"active_method" TEXT, +"csrf_token" TEXT NOT NULL, +"state" TEXT NOT NULL, +"recovered_identity_id" char(36), +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (recovered_identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000007_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000007_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000008_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000008_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..3e882363f43 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000008_flow_type.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_settings_requests_tmp" RENAME TO "selfservice_settings_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000008_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000008_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000009_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000009_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..d93ea646061 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000009_flow_type.sqlite3.down.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_settings_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000009_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000009_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000010_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000010_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..cb697e25037 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000010_flow_type.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_settings_requests_tmp" (id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, active_method, messages, state) SELECT id, request_url, issued_at, expires_at, identity_id, created_at, updated_at, active_method, messages, state FROM "selfservice_settings_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000010_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000010_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000011_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000011_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..b246a8b9cc8 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000011_flow_type.sqlite3.down.sql @@ -0,0 +1,13 @@ +CREATE TABLE "_selfservice_settings_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"active_method" TEXT, +"messages" TEXT, +"state" TEXT NOT NULL DEFAULT 'show_form', +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000011_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000011_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000012_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000012_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..6ff64072b33 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000012_flow_type.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_registration_requests_tmp" RENAME TO "selfservice_registration_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000012_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000012_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000013_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000013_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..52b78bb8816 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000013_flow_type.sqlite3.down.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_registration_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000013_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000013_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000014_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000014_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..0aa91e55000 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000014_flow_type.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_registration_requests_tmp" (id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at, messages) SELECT id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at, messages FROM "selfservice_registration_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000014_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000014_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000015_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000015_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..7a7e430ebb3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000015_flow_type.sqlite3.down.sql @@ -0,0 +1,11 @@ +CREATE TABLE "_selfservice_registration_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"active_method" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"messages" TEXT +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000015_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000015_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000016_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000016_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..d5fb51fbfa9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000016_flow_type.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_login_requests_tmp" RENAME TO "selfservice_login_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000016_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000016_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000017_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000017_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..47b51d51bcb --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000017_flow_type.sqlite3.down.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_login_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000017_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000017_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000018_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000018_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..7ea2d9cc313 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000018_flow_type.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_login_requests_tmp" (id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at, forced, messages) SELECT id, request_url, issued_at, expires_at, active_method, csrf_token, created_at, updated_at, forced, messages FROM "selfservice_login_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000018_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000018_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000019_flow_type.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810141652000019_flow_type.sqlite3.down.sql new file mode 100644 index 00000000000..2de40184164 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810141652000019_flow_type.sqlite3.down.sql @@ -0,0 +1,12 @@ +CREATE TABLE "_selfservice_login_requests_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"active_method" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"forced" bool NOT NULL DEFAULT 'false', +"messages" TEXT +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810141652000019_flow_type.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810141652000019_flow_type.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.cockroach.down.sql new file mode 100644 index 00000000000..c0218244f6b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" RENAME TO "selfservice_verification_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.cockroach.up.sql new file mode 100644 index 00000000000..bb17cb83a3c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_request_methods" RENAME TO "selfservice_login_flow_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.mysql.down.sql new file mode 100644 index 00000000000..dd9aedc2ac1 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_verification_flows` RENAME TO `selfservice_verification_requests`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.mysql.up.sql new file mode 100644 index 00000000000..137df74aaa5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_login_request_methods` RENAME TO `selfservice_login_flow_methods` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.postgres.down.sql new file mode 100644 index 00000000000..c0218244f6b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" RENAME TO "selfservice_verification_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.postgres.up.sql new file mode 100644 index 00000000000..bb17cb83a3c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_request_methods" RENAME TO "selfservice_login_flow_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.sqlite3.down.sql new file mode 100644 index 00000000000..c0218244f6b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" RENAME TO "selfservice_verification_requests"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.sqlite3.up.sql new file mode 100644 index 00000000000..bb17cb83a3c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000000_flow_rename.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_request_methods" RENAME TO "selfservice_login_flow_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.cockroach.down.sql new file mode 100644 index 00000000000..ef8a5e378ca --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_flows" RENAME TO "selfservice_recovery_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.cockroach.up.sql new file mode 100644 index 00000000000..d308739d129 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" RENAME TO "selfservice_login_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.mysql.down.sql new file mode 100644 index 00000000000..694cabe2bff --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_recovery_flows` RENAME TO `selfservice_recovery_requests` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.mysql.up.sql new file mode 100644 index 00000000000..ce602c6a335 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_login_requests` RENAME TO `selfservice_login_flows` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.postgres.down.sql new file mode 100644 index 00000000000..ef8a5e378ca --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_flows" RENAME TO "selfservice_recovery_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.postgres.up.sql new file mode 100644 index 00000000000..d308739d129 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" RENAME TO "selfservice_login_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.sqlite3.down.sql new file mode 100644 index 00000000000..ef8a5e378ca --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_flows" RENAME TO "selfservice_recovery_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.sqlite3.up.sql new file mode 100644 index 00000000000..d308739d129 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000001_flow_rename.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_requests" RENAME TO "selfservice_login_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.cockroach.down.sql new file mode 100644 index 00000000000..1d4e3e3e0c5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_flow_methods" RENAME TO "selfservice_recovery_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.cockroach.up.sql new file mode 100644 index 00000000000..c4e26c558f6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_request_methods" RENAME TO "selfservice_registration_flow_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.mysql.down.sql new file mode 100644 index 00000000000..322ca14a5cf --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_recovery_flow_methods` RENAME TO `selfservice_recovery_request_methods` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.mysql.up.sql new file mode 100644 index 00000000000..970fd35e0f4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_registration_request_methods` RENAME TO `selfservice_registration_flow_methods` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.postgres.down.sql new file mode 100644 index 00000000000..1d4e3e3e0c5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_flow_methods" RENAME TO "selfservice_recovery_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.postgres.up.sql new file mode 100644 index 00000000000..c4e26c558f6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_request_methods" RENAME TO "selfservice_registration_flow_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.sqlite3.down.sql new file mode 100644 index 00000000000..1d4e3e3e0c5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_flow_methods" RENAME TO "selfservice_recovery_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.sqlite3.up.sql new file mode 100644 index 00000000000..c4e26c558f6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000002_flow_rename.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_request_methods" RENAME TO "selfservice_registration_flow_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.cockroach.down.sql new file mode 100644 index 00000000000..be2f9fd22a5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_flows" RENAME TO "selfservice_settings_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.cockroach.up.sql new file mode 100644 index 00000000000..282f65f7b13 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_requests" RENAME TO "selfservice_registration_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.mysql.down.sql new file mode 100644 index 00000000000..763c5b963ca --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_settings_flows` RENAME TO `selfservice_settings_requests` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.mysql.up.sql new file mode 100644 index 00000000000..8a04ed3b3a5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_registration_requests` RENAME TO `selfservice_registration_flows` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.postgres.down.sql new file mode 100644 index 00000000000..be2f9fd22a5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_flows" RENAME TO "selfservice_settings_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.postgres.up.sql new file mode 100644 index 00000000000..282f65f7b13 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_requests" RENAME TO "selfservice_registration_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.sqlite3.down.sql new file mode 100644 index 00000000000..be2f9fd22a5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_flows" RENAME TO "selfservice_settings_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.sqlite3.up.sql new file mode 100644 index 00000000000..282f65f7b13 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000003_flow_rename.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_requests" RENAME TO "selfservice_registration_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.cockroach.down.sql new file mode 100644 index 00000000000..446ce6ffb0e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_flow_methods" RENAME TO "selfservice_settings_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.cockroach.up.sql new file mode 100644 index 00000000000..6c1dba99d77 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_request_methods" RENAME TO "selfservice_settings_flow_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.mysql.down.sql new file mode 100644 index 00000000000..ebb598e7217 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_settings_flow_methods` RENAME TO `selfservice_settings_request_methods` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.mysql.up.sql new file mode 100644 index 00000000000..2215df580e1 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_settings_request_methods` RENAME TO `selfservice_settings_flow_methods` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.postgres.down.sql new file mode 100644 index 00000000000..446ce6ffb0e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_flow_methods" RENAME TO "selfservice_settings_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.postgres.up.sql new file mode 100644 index 00000000000..6c1dba99d77 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_request_methods" RENAME TO "selfservice_settings_flow_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.sqlite3.down.sql new file mode 100644 index 00000000000..446ce6ffb0e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_flow_methods" RENAME TO "selfservice_settings_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.sqlite3.up.sql new file mode 100644 index 00000000000..6c1dba99d77 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000004_flow_rename.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_request_methods" RENAME TO "selfservice_settings_flow_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.cockroach.down.sql new file mode 100644 index 00000000000..dfc5acb691e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_flows" RENAME TO "selfservice_registration_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.cockroach.up.sql new file mode 100644 index 00000000000..c0d5ed2e2ef --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" RENAME TO "selfservice_settings_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.mysql.down.sql new file mode 100644 index 00000000000..54935f6358b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_registration_flows` RENAME TO `selfservice_registration_requests` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.mysql.up.sql new file mode 100644 index 00000000000..bf9ed12fde7 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_settings_requests` RENAME TO `selfservice_settings_flows` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.postgres.down.sql new file mode 100644 index 00000000000..dfc5acb691e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_flows" RENAME TO "selfservice_registration_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.postgres.up.sql new file mode 100644 index 00000000000..c0d5ed2e2ef --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" RENAME TO "selfservice_settings_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.sqlite3.down.sql new file mode 100644 index 00000000000..dfc5acb691e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_flows" RENAME TO "selfservice_registration_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.sqlite3.up.sql new file mode 100644 index 00000000000..c0d5ed2e2ef --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000005_flow_rename.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_requests" RENAME TO "selfservice_settings_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.cockroach.down.sql new file mode 100644 index 00000000000..b91fa01ee30 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_flow_methods" RENAME TO "selfservice_registration_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.cockroach.up.sql new file mode 100644 index 00000000000..bf5aed782d4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_request_methods" RENAME TO "selfservice_recovery_flow_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.mysql.down.sql new file mode 100644 index 00000000000..932e4153b14 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_registration_flow_methods` RENAME TO `selfservice_registration_request_methods` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.mysql.up.sql new file mode 100644 index 00000000000..264b7e8481c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_recovery_request_methods` RENAME TO `selfservice_recovery_flow_methods` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.postgres.down.sql new file mode 100644 index 00000000000..b91fa01ee30 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_flow_methods" RENAME TO "selfservice_registration_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.postgres.up.sql new file mode 100644 index 00000000000..bf5aed782d4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_request_methods" RENAME TO "selfservice_recovery_flow_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.sqlite3.down.sql new file mode 100644 index 00000000000..b91fa01ee30 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_flow_methods" RENAME TO "selfservice_registration_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.sqlite3.up.sql new file mode 100644 index 00000000000..bf5aed782d4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000006_flow_rename.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_request_methods" RENAME TO "selfservice_recovery_flow_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.cockroach.down.sql new file mode 100644 index 00000000000..0d0dd5ba5ec --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_flow_methods" RENAME TO "selfservice_login_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.cockroach.up.sql new file mode 100644 index 00000000000..01ebd67d19a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_requests" RENAME TO "selfservice_recovery_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.mysql.down.sql new file mode 100644 index 00000000000..630e4709f9b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_login_flow_methods` RENAME TO `selfservice_login_request_methods` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.mysql.up.sql new file mode 100644 index 00000000000..406ca1d5a3b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_recovery_requests` RENAME TO `selfservice_recovery_flows` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.postgres.down.sql new file mode 100644 index 00000000000..0d0dd5ba5ec --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_flow_methods" RENAME TO "selfservice_login_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.postgres.up.sql new file mode 100644 index 00000000000..01ebd67d19a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_requests" RENAME TO "selfservice_recovery_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.sqlite3.down.sql new file mode 100644 index 00000000000..0d0dd5ba5ec --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_flow_methods" RENAME TO "selfservice_login_request_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.sqlite3.up.sql new file mode 100644 index 00000000000..01ebd67d19a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000007_flow_rename.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_requests" RENAME TO "selfservice_recovery_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.cockroach.down.sql new file mode 100644 index 00000000000..9a761d9c4c5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_flows" RENAME TO "selfservice_login_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.cockroach.up.sql new file mode 100644 index 00000000000..8a8a244504a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_requests" RENAME TO "selfservice_verification_flows"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.mysql.down.sql new file mode 100644 index 00000000000..0175446841c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_login_flows` RENAME TO `selfservice_login_requests` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.mysql.up.sql new file mode 100644 index 00000000000..dc3fdb9cb8e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_verification_requests` RENAME TO `selfservice_verification_flows`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.postgres.down.sql new file mode 100644 index 00000000000..9a761d9c4c5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_flows" RENAME TO "selfservice_login_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.postgres.up.sql new file mode 100644 index 00000000000..8a8a244504a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_requests" RENAME TO "selfservice_verification_flows"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.sqlite3.down.sql new file mode 100644 index 00000000000..9a761d9c4c5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_flows" RENAME TO "selfservice_login_requests" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.sqlite3.up.sql new file mode 100644 index 00000000000..8a8a244504a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810161022000008_flow_rename.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_requests" RENAME TO "selfservice_verification_flows"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.cockroach.down.sql new file mode 100644 index 00000000000..77c2a0c06b1 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_flow_methods" RENAME COLUMN "selfservice_recovery_flow_id" TO "selfservice_recovery_request_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.cockroach.up.sql new file mode 100644 index 00000000000..44454b98b7e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_flow_methods" RENAME COLUMN "selfservice_login_request_id" TO "selfservice_login_flow_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.mysql.down.sql new file mode 100644 index 00000000000..dcc2ab20b15 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_recovery_flow_methods` CHANGE `selfservice_recovery_flow_id` `selfservice_recovery_request_id` char(36) NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.mysql.up.sql new file mode 100644 index 00000000000..3455d87f480 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_login_flow_methods` CHANGE `selfservice_login_request_id` `selfservice_login_flow_id` char(36) NOT NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.postgres.down.sql new file mode 100644 index 00000000000..77c2a0c06b1 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_flow_methods" RENAME COLUMN "selfservice_recovery_flow_id" TO "selfservice_recovery_request_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.postgres.up.sql new file mode 100644 index 00000000000..44454b98b7e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_flow_methods" RENAME COLUMN "selfservice_login_request_id" TO "selfservice_login_flow_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.sqlite3.down.sql new file mode 100644 index 00000000000..77c2a0c06b1 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_flow_methods" RENAME COLUMN "selfservice_recovery_flow_id" TO "selfservice_recovery_request_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.sqlite3.up.sql new file mode 100644 index 00000000000..44454b98b7e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000000_flow_fields_rename.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_flow_methods" RENAME COLUMN "selfservice_login_request_id" TO "selfservice_login_flow_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.cockroach.down.sql new file mode 100644 index 00000000000..47d4a55e02d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_flow_methods" RENAME COLUMN "selfservice_settings_flow_id" TO "selfservice_settings_request_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.cockroach.up.sql new file mode 100644 index 00000000000..f57de3649b6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_flow_methods" RENAME COLUMN "selfservice_registration_request_id" TO "selfservice_registration_flow_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.mysql.down.sql new file mode 100644 index 00000000000..e1cf4cc5f3d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_settings_flow_methods` CHANGE `selfservice_settings_flow_id` `selfservice_settings_request_id` char(36) NOT NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.mysql.up.sql new file mode 100644 index 00000000000..712063cfa9a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_registration_flow_methods` CHANGE `selfservice_registration_request_id` `selfservice_registration_flow_id` char(36) NOT NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.postgres.down.sql new file mode 100644 index 00000000000..47d4a55e02d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_flow_methods" RENAME COLUMN "selfservice_settings_flow_id" TO "selfservice_settings_request_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.postgres.up.sql new file mode 100644 index 00000000000..f57de3649b6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_flow_methods" RENAME COLUMN "selfservice_registration_request_id" TO "selfservice_registration_flow_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.sqlite3.down.sql new file mode 100644 index 00000000000..47d4a55e02d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_flow_methods" RENAME COLUMN "selfservice_settings_flow_id" TO "selfservice_settings_request_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.sqlite3.up.sql new file mode 100644 index 00000000000..f57de3649b6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000001_flow_fields_rename.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_flow_methods" RENAME COLUMN "selfservice_registration_request_id" TO "selfservice_registration_flow_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.cockroach.down.sql new file mode 100644 index 00000000000..9475fea22bb --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_flow_methods" RENAME COLUMN "selfservice_registration_flow_id" TO "selfservice_registration_request_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.cockroach.up.sql new file mode 100644 index 00000000000..9725045ca1e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_flow_methods" RENAME COLUMN "selfservice_recovery_request_id" TO "selfservice_recovery_flow_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.mysql.down.sql new file mode 100644 index 00000000000..18f0622c55d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_registration_flow_methods` CHANGE `selfservice_registration_flow_id` `selfservice_registration_request_id` char(36) NOT NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.mysql.up.sql new file mode 100644 index 00000000000..3084591a14b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_recovery_flow_methods` CHANGE `selfservice_recovery_request_id` `selfservice_recovery_flow_id` char(36) NOT NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.postgres.down.sql new file mode 100644 index 00000000000..9475fea22bb --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_flow_methods" RENAME COLUMN "selfservice_registration_flow_id" TO "selfservice_registration_request_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.postgres.up.sql new file mode 100644 index 00000000000..9725045ca1e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_flow_methods" RENAME COLUMN "selfservice_recovery_request_id" TO "selfservice_recovery_flow_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.sqlite3.down.sql new file mode 100644 index 00000000000..9475fea22bb --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_registration_flow_methods" RENAME COLUMN "selfservice_registration_flow_id" TO "selfservice_registration_request_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.sqlite3.up.sql new file mode 100644 index 00000000000..9725045ca1e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000002_flow_fields_rename.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_recovery_flow_methods" RENAME COLUMN "selfservice_recovery_request_id" TO "selfservice_recovery_flow_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.cockroach.down.sql new file mode 100644 index 00000000000..e9fc8f8bb01 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_flow_methods" RENAME COLUMN "selfservice_login_flow_id" TO "selfservice_login_request_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.cockroach.up.sql new file mode 100644 index 00000000000..85846372452 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_flow_methods" RENAME COLUMN "selfservice_settings_request_id" TO "selfservice_settings_flow_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.mysql.down.sql new file mode 100644 index 00000000000..b63049947d3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_login_flow_methods` CHANGE `selfservice_login_flow_id` `selfservice_login_request_id` char(36) NOT NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.mysql.up.sql new file mode 100644 index 00000000000..78ea09e9125 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_settings_flow_methods` CHANGE `selfservice_settings_request_id` `selfservice_settings_flow_id` char(36) NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.postgres.down.sql new file mode 100644 index 00000000000..e9fc8f8bb01 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_flow_methods" RENAME COLUMN "selfservice_login_flow_id" TO "selfservice_login_request_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.postgres.up.sql new file mode 100644 index 00000000000..85846372452 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_flow_methods" RENAME COLUMN "selfservice_settings_request_id" TO "selfservice_settings_flow_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.sqlite3.down.sql new file mode 100644 index 00000000000..e9fc8f8bb01 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_login_flow_methods" RENAME COLUMN "selfservice_login_flow_id" TO "selfservice_login_request_id" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.sqlite3.up.sql new file mode 100644 index 00000000000..85846372452 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200810162450000003_flow_fields_rename.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_settings_flow_methods" RENAME COLUMN "selfservice_settings_request_id" TO "selfservice_settings_flow_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.cockroach.down.sql new file mode 100644 index 00000000000..9cab681fc1e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" DROP COLUMN "token"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.cockroach.up.sql new file mode 100644 index 00000000000..377599cca37 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.cockroach.up.sql @@ -0,0 +1 @@ +DELETE FROM sessions \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.mysql.down.sql new file mode 100644 index 00000000000..3ee676ed8f3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `sessions` DROP COLUMN `token`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.mysql.up.sql new file mode 100644 index 00000000000..377599cca37 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.mysql.up.sql @@ -0,0 +1 @@ +DELETE FROM sessions \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.postgres.down.sql new file mode 100644 index 00000000000..9cab681fc1e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" DROP COLUMN "token"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.postgres.up.sql new file mode 100644 index 00000000000..377599cca37 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.postgres.up.sql @@ -0,0 +1 @@ +DELETE FROM sessions \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.sqlite3.down.sql new file mode 100644 index 00000000000..4fccd03c0e9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_sessions_tmp" RENAME TO "sessions"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.sqlite3.up.sql new file mode 100644 index 00000000000..377599cca37 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000000_add_session_token.sqlite3.up.sql @@ -0,0 +1 @@ +DELETE FROM sessions \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.cockroach.up.sql new file mode 100644 index 00000000000..572e06ea389 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" ADD COLUMN "token" VARCHAR (32) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.mysql.up.sql new file mode 100644 index 00000000000..9581d45faf6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `sessions` ADD COLUMN `token` VARCHAR (32) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.postgres.up.sql new file mode 100644 index 00000000000..572e06ea389 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" ADD COLUMN "token" VARCHAR (32) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.sqlite3.down.sql new file mode 100644 index 00000000000..4822fe6ae92 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.sqlite3.down.sql @@ -0,0 +1,2 @@ + +DROP TABLE "sessions" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.sqlite3.up.sql new file mode 100644 index 00000000000..2472546128a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000001_add_session_token.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" ADD COLUMN "token" TEXT \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.cockroach.up.sql new file mode 100644 index 00000000000..bdf13df186a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" RENAME COLUMN "token" TO "_token_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.mysql.up.sql new file mode 100644 index 00000000000..f8ee5fdfde3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `sessions` MODIFY `token` VARCHAR (32) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.postgres.up.sql new file mode 100644 index 00000000000..86c4207e539 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" ALTER COLUMN "token" TYPE VARCHAR (32), ALTER COLUMN "token" DROP NOT NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.sqlite3.down.sql new file mode 100644 index 00000000000..c633c750aad --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_sessions_tmp" (id, issued_at, expires_at, authenticated_at, identity_id, created_at, updated_at) SELECT id, issued_at, expires_at, authenticated_at, identity_id, created_at, updated_at FROM "sessions" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.sqlite3.up.sql new file mode 100644 index 00000000000..84df02832db --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000002_add_session_token.sqlite3.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE "_sessions_tmp" ( +"id" TEXT PRIMARY KEY, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"authenticated_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"token" TEXT, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.cockroach.up.sql new file mode 100644 index 00000000000..572e06ea389 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" ADD COLUMN "token" VARCHAR (32) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.mysql.up.sql new file mode 100644 index 00000000000..c8e8a19e2a9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.mysql.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX `sessions_token_uq_idx` ON `sessions` (`token`) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.postgres.up.sql new file mode 100644 index 00000000000..efe335d91d6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.postgres.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "sessions_token_uq_idx" ON "sessions" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.sqlite3.down.sql new file mode 100644 index 00000000000..0fef07db05b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.sqlite3.down.sql @@ -0,0 +1,10 @@ +CREATE TABLE "_sessions_tmp" ( +"id" TEXT PRIMARY KEY, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"authenticated_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.sqlite3.up.sql new file mode 100644 index 00000000000..38d1dfae492 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000003_add_session_token.sqlite3.up.sql @@ -0,0 +1 @@ +INSERT INTO "_sessions_tmp" (id, issued_at, expires_at, authenticated_at, identity_id, created_at, updated_at, token) SELECT id, issued_at, expires_at, authenticated_at, identity_id, created_at, updated_at, token FROM "sessions" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.cockroach.up.sql new file mode 100644 index 00000000000..516cec3cec0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.cockroach.up.sql @@ -0,0 +1 @@ +UPDATE "sessions" SET "token" = "_token_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.mysql.up.sql new file mode 100644 index 00000000000..b4c20a11f85 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.mysql.up.sql @@ -0,0 +1 @@ +CREATE INDEX `sessions_token_idx` ON `sessions` (`token`); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.postgres.up.sql new file mode 100644 index 00000000000..cf8e9db4f98 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.postgres.up.sql @@ -0,0 +1 @@ +CREATE INDEX "sessions_token_idx" ON "sessions" (token); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.sqlite3.down.sql new file mode 100644 index 00000000000..9db98fdccd9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.sqlite3.down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "sessions_token_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.sqlite3.up.sql new file mode 100644 index 00000000000..9d2a3fc748c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000004_add_session_token.sqlite3.up.sql @@ -0,0 +1 @@ +DROP TABLE "sessions" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000005_add_session_token.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000005_add_session_token.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000005_add_session_token.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000005_add_session_token.cockroach.up.sql new file mode 100644 index 00000000000..14772cc9378 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000005_add_session_token.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" DROP COLUMN "_token_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000005_add_session_token.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000005_add_session_token.sqlite3.down.sql new file mode 100644 index 00000000000..b83a7e29c95 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000005_add_session_token.sqlite3.down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "sessions_token_uq_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000005_add_session_token.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000005_add_session_token.sqlite3.up.sql new file mode 100644 index 00000000000..961bbb63471 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000005_add_session_token.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "_sessions_tmp" RENAME TO "sessions" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000006_add_session_token.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000006_add_session_token.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000006_add_session_token.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000006_add_session_token.cockroach.up.sql new file mode 100644 index 00000000000..efe335d91d6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000006_add_session_token.cockroach.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "sessions_token_uq_idx" ON "sessions" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000006_add_session_token.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000006_add_session_token.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000006_add_session_token.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000006_add_session_token.sqlite3.up.sql new file mode 100644 index 00000000000..efe335d91d6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000006_add_session_token.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "sessions_token_uq_idx" ON "sessions" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000007_add_session_token.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000007_add_session_token.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000007_add_session_token.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000007_add_session_token.cockroach.up.sql new file mode 100644 index 00000000000..cf8e9db4f98 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000007_add_session_token.cockroach.up.sql @@ -0,0 +1 @@ +CREATE INDEX "sessions_token_idx" ON "sessions" (token); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000007_add_session_token.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200812124254000007_add_session_token.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812124254000007_add_session_token.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200812124254000007_add_session_token.sqlite3.up.sql new file mode 100644 index 00000000000..cf8e9db4f98 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812124254000007_add_session_token.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE INDEX "sessions_token_idx" ON "sessions" (token); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.cockroach.down.sql new file mode 100644 index 00000000000..4e81ca50803 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" DROP COLUMN "active"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.cockroach.up.sql new file mode 100644 index 00000000000..d0f23849f23 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" ADD COLUMN "active" boolean DEFAULT 'false'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.mysql.down.sql new file mode 100644 index 00000000000..fd675bf09cf --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `sessions` DROP COLUMN `active`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.mysql.up.sql new file mode 100644 index 00000000000..80f88e214c7 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `sessions` ADD COLUMN `active` boolean DEFAULT false; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.postgres.down.sql new file mode 100644 index 00000000000..4e81ca50803 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" DROP COLUMN "active"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.postgres.up.sql new file mode 100644 index 00000000000..d0f23849f23 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" ADD COLUMN "active" boolean DEFAULT 'false'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.sqlite3.down.sql new file mode 100644 index 00000000000..4fccd03c0e9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_sessions_tmp" RENAME TO "sessions"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.sqlite3.up.sql new file mode 100644 index 00000000000..77302570222 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812160551000000_add_session_revoke.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "sessions" ADD COLUMN "active" NUMERIC DEFAULT 'false'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000001_add_session_revoke.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200812160551000001_add_session_revoke.sqlite3.down.sql new file mode 100644 index 00000000000..4822fe6ae92 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812160551000001_add_session_revoke.sqlite3.down.sql @@ -0,0 +1,2 @@ + +DROP TABLE "sessions" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000001_add_session_revoke.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200812160551000001_add_session_revoke.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000002_add_session_revoke.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200812160551000002_add_session_revoke.sqlite3.down.sql new file mode 100644 index 00000000000..38d1dfae492 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812160551000002_add_session_revoke.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_sessions_tmp" (id, issued_at, expires_at, authenticated_at, identity_id, created_at, updated_at, token) SELECT id, issued_at, expires_at, authenticated_at, identity_id, created_at, updated_at, token FROM "sessions" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000002_add_session_revoke.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200812160551000002_add_session_revoke.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000003_add_session_revoke.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200812160551000003_add_session_revoke.sqlite3.down.sql new file mode 100644 index 00000000000..1905c70bce9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812160551000003_add_session_revoke.sqlite3.down.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "sessions_token_uq_idx" ON "_sessions_tmp" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000003_add_session_revoke.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200812160551000003_add_session_revoke.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000004_add_session_revoke.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200812160551000004_add_session_revoke.sqlite3.down.sql new file mode 100644 index 00000000000..37fa47b293c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812160551000004_add_session_revoke.sqlite3.down.sql @@ -0,0 +1 @@ +CREATE INDEX "sessions_token_idx" ON "_sessions_tmp" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000004_add_session_revoke.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200812160551000004_add_session_revoke.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000005_add_session_revoke.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200812160551000005_add_session_revoke.sqlite3.down.sql new file mode 100644 index 00000000000..84df02832db --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812160551000005_add_session_revoke.sqlite3.down.sql @@ -0,0 +1,11 @@ +CREATE TABLE "_sessions_tmp" ( +"id" TEXT PRIMARY KEY, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"authenticated_at" DATETIME NOT NULL, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"token" TEXT, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000005_add_session_revoke.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200812160551000005_add_session_revoke.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000006_add_session_revoke.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200812160551000006_add_session_revoke.sqlite3.down.sql new file mode 100644 index 00000000000..b83a7e29c95 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812160551000006_add_session_revoke.sqlite3.down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "sessions_token_uq_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000006_add_session_revoke.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200812160551000006_add_session_revoke.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000007_add_session_revoke.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200812160551000007_add_session_revoke.sqlite3.down.sql new file mode 100644 index 00000000000..9db98fdccd9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200812160551000007_add_session_revoke.sqlite3.down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "sessions_token_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200812160551000007_add_session_revoke.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200812160551000007_add_session_revoke.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.cockroach.down.sql new file mode 100644 index 00000000000..5dad6b6d7ae --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" RENAME COLUMN "selfservice_recovery_flow_id" TO "selfservice_recovery_request_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.cockroach.up.sql new file mode 100644 index 00000000000..3f0a2da51fc --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" RENAME COLUMN "selfservice_recovery_request_id" TO "selfservice_recovery_flow_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.mysql.down.sql new file mode 100644 index 00000000000..b1096fe505a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `identity_recovery_tokens` CHANGE `selfservice_recovery_flow_id` `selfservice_recovery_request_id` char(36) NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.mysql.up.sql new file mode 100644 index 00000000000..26017ff6451 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `identity_recovery_tokens` CHANGE `selfservice_recovery_request_id` `selfservice_recovery_flow_id` char(36) NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.postgres.down.sql new file mode 100644 index 00000000000..5dad6b6d7ae --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" RENAME COLUMN "selfservice_recovery_flow_id" TO "selfservice_recovery_request_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.postgres.up.sql new file mode 100644 index 00000000000..3f0a2da51fc --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" RENAME COLUMN "selfservice_recovery_request_id" TO "selfservice_recovery_flow_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.sqlite3.down.sql new file mode 100644 index 00000000000..5dad6b6d7ae --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" RENAME COLUMN "selfservice_recovery_flow_id" TO "selfservice_recovery_request_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.sqlite3.up.sql new file mode 100644 index 00000000000..3f0a2da51fc --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830121710000000_update_recovery_token.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" RENAME COLUMN "selfservice_recovery_request_id" TO "selfservice_recovery_flow_id"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.cockroach.down.sql new file mode 100644 index 00000000000..42b1738c1b4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "success" bool NOT NULL DEFAULT FALSE; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.cockroach.up.sql new file mode 100644 index 00000000000..5792ef9ebbb --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "state" VARCHAR (255) NOT NULL DEFAULT 'show_form'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.mysql.down.sql new file mode 100644 index 00000000000..970590afeb0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_verification_flows` ADD COLUMN `success` bool NOT NULL DEFAULT FALSE; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..ee5b748a4d9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_verification_flows` ADD COLUMN `state` VARCHAR (255) NOT NULL DEFAULT 'show_form'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.postgres.down.sql new file mode 100644 index 00000000000..42b1738c1b4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "success" bool NOT NULL DEFAULT FALSE; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..5792ef9ebbb --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "state" VARCHAR (255) NOT NULL DEFAULT 'show_form'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..42b1738c1b4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "success" bool NOT NULL DEFAULT FALSE; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..af3d919d03e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000000_add_verification_methods.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "state" TEXT NOT NULL DEFAULT 'show_form'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.cockroach.down.sql new file mode 100644 index 00000000000..4ea5af8f7fc --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "via" VARCHAR (16) NOT NULL DEFAULT 'email' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.cockroach.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.mysql.down.sql new file mode 100644 index 00000000000..76acb84328e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_verification_flows` ADD COLUMN `via` VARCHAR (16) NOT NULL DEFAULT 'email' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.postgres.down.sql new file mode 100644 index 00000000000..4ea5af8f7fc --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "via" VARCHAR (16) NOT NULL DEFAULT 'email' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..e340455c673 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "via" TEXT NOT NULL DEFAULT 'email' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000001_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.cockroach.down.sql new file mode 100644 index 00000000000..9cb268bf4a4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "state" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.cockroach.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.mysql.down.sql new file mode 100644 index 00000000000..cb69380780c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_verification_flows` DROP COLUMN `state` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.postgres.down.sql new file mode 100644 index 00000000000..9cb268bf4a4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "state" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..1444ec963a6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_verification_flows_tmp" RENAME TO "selfservice_verification_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000002_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.cockroach.down.sql new file mode 100644 index 00000000000..554453ef12d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "active_method" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.cockroach.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.mysql.down.sql new file mode 100644 index 00000000000..18701c66b9a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_verification_flows` DROP COLUMN `active_method` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.postgres.down.sql new file mode 100644 index 00000000000..554453ef12d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "active_method" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..fa7f9297169 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.sqlite3.down.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_verification_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000003_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.cockroach.down.sql new file mode 100644 index 00000000000..e66885f32dc --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_verification_flow_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.cockroach.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.mysql.down.sql new file mode 100644 index 00000000000..0bcec61c529 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `selfservice_verification_flow_methods` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.postgres.down.sql new file mode 100644 index 00000000000..e66885f32dc --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_verification_flow_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..cb5cafcc10b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_verification_flows_tmp" (id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, messages, type) SELECT id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, messages, type FROM "selfservice_verification_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000004_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.cockroach.down.sql new file mode 100644 index 00000000000..6636766cd67 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "form" json NOT NULL DEFAULT '{}' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.cockroach.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.mysql.down.sql new file mode 100644 index 00000000000..5a062707fb0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_verification_flows` MODIFY `form` JSON \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.postgres.down.sql new file mode 100644 index 00000000000..649bea39e2a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" ALTER COLUMN "form" TYPE jsonb, ALTER COLUMN "form" DROP NOT NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..d401b5b5b73 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.sqlite3.down.sql @@ -0,0 +1,11 @@ +CREATE TABLE "_selfservice_verification_flows_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"messages" TEXT, +"type" TEXT NOT NULL DEFAULT 'browser' +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000005_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000006_add_verification_methods.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000006_add_verification_methods.mysql.down.sql new file mode 100644 index 00000000000..9a097ce0c3b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000006_add_verification_methods.mysql.down.sql @@ -0,0 +1 @@ +UPDATE selfservice_verification_flows SET form=(SELECT * FROM (SELECT m.config FROM selfservice_verification_flows AS r INNER JOIN selfservice_verification_flow_methods AS m ON r.id=m.selfservice_verification_flow_id) as t) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000006_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000006_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000006_add_verification_methods.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000006_add_verification_methods.postgres.down.sql new file mode 100644 index 00000000000..9a097ce0c3b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000006_add_verification_methods.postgres.down.sql @@ -0,0 +1 @@ +UPDATE selfservice_verification_flows SET form=(SELECT * FROM (SELECT m.config FROM selfservice_verification_flows AS r INNER JOIN selfservice_verification_flow_methods AS m ON r.id=m.selfservice_verification_flow_id) as t) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000006_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000006_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000006_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000006_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..1444ec963a6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000006_add_verification_methods.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_verification_flows_tmp" RENAME TO "selfservice_verification_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000006_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000006_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000007_add_verification_methods.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000007_add_verification_methods.mysql.down.sql new file mode 100644 index 00000000000..a27a7771d3d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000007_add_verification_methods.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_verification_flows` ADD COLUMN `form` JSON \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000007_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000007_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000007_add_verification_methods.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000007_add_verification_methods.postgres.down.sql new file mode 100644 index 00000000000..8ac44ed36ce --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000007_add_verification_methods.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "form" jsonb \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000007_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000007_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000007_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000007_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..fa7f9297169 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000007_add_verification_methods.sqlite3.down.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_verification_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000007_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000007_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000008_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000008_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..99f1a46925b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000008_add_verification_methods.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_verification_flows_tmp" (id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, messages, type, state) SELECT id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, messages, type, state FROM "selfservice_verification_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000008_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000008_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000009_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000009_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..9f9be3b6bc5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000009_add_verification_methods.sqlite3.down.sql @@ -0,0 +1,12 @@ +CREATE TABLE "_selfservice_verification_flows_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"messages" TEXT, +"type" TEXT NOT NULL DEFAULT 'browser', +"state" TEXT NOT NULL DEFAULT 'show_form' +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000009_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000009_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000010_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130642000010_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..e66885f32dc --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130642000010_add_verification_methods.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "selfservice_verification_flow_methods" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130642000010_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130642000010_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.cockroach.up.sql new file mode 100644 index 00000000000..ea4615e685f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.cockroach.up.sql @@ -0,0 +1 @@ +UPDATE selfservice_verification_flows SET state='passed_challenge' WHERE success IS TRUE; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..ea4615e685f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.mysql.up.sql @@ -0,0 +1 @@ +UPDATE selfservice_verification_flows SET state='passed_challenge' WHERE success IS TRUE; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..ea4615e685f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.postgres.up.sql @@ -0,0 +1 @@ +UPDATE selfservice_verification_flows SET state='passed_challenge' WHERE success IS TRUE; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..ea4615e685f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130643000000_add_verification_methods.sqlite3.up.sql @@ -0,0 +1 @@ +UPDATE selfservice_verification_flows SET state='passed_challenge' WHERE success IS TRUE; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.cockroach.up.sql new file mode 100644 index 00000000000..a1a559682cb --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.cockroach.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE "selfservice_verification_flow_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"selfservice_verification_flow_id" UUID NOT NULL, +"config" json NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..5a36baea2e2 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.mysql.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE `selfservice_verification_flow_methods` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`method` VARCHAR (32) NOT NULL, +`selfservice_verification_flow_id` char(36) NOT NULL, +`config` JSON NOT NULL, +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL +) ENGINE=InnoDB \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..c4234d0cd3a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.postgres.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE "selfservice_verification_flow_methods" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"method" VARCHAR (32) NOT NULL, +"selfservice_verification_flow_id" UUID NOT NULL, +"config" jsonb NOT NULL, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..ca15b0a433c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130644000000_add_verification_methods.sqlite3.up.sql @@ -0,0 +1,8 @@ +CREATE TABLE "selfservice_verification_flow_methods" ( +"id" TEXT PRIMARY KEY, +"method" TEXT NOT NULL, +"selfservice_verification_flow_id" char(36) NOT NULL, +"config" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.cockroach.up.sql new file mode 100644 index 00000000000..85087a80472 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "active_method" VARCHAR (32); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..621021cf4c5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_verification_flows` ADD COLUMN `active_method` VARCHAR (32); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..85087a80472 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "active_method" VARCHAR (32); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..2568649311e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130644000001_add_verification_methods.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" ADD COLUMN "active_method" TEXT; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.cockroach.up.sql new file mode 100644 index 00000000000..6b5e1fc22ee --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.cockroach.up.sql @@ -0,0 +1 @@ +INSERT INTO selfservice_verification_flow_methods (id, method, selfservice_verification_flow_id, config, created_at, updated_at) SELECT id, 'link', id, form, created_at, updated_at FROM selfservice_verification_flows; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..6b5e1fc22ee --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.mysql.up.sql @@ -0,0 +1 @@ +INSERT INTO selfservice_verification_flow_methods (id, method, selfservice_verification_flow_id, config, created_at, updated_at) SELECT id, 'link', id, form, created_at, updated_at FROM selfservice_verification_flows; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..6b5e1fc22ee --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.postgres.up.sql @@ -0,0 +1 @@ +INSERT INTO selfservice_verification_flow_methods (id, method, selfservice_verification_flow_id, config, created_at, updated_at) SELECT id, 'link', id, form, created_at, updated_at FROM selfservice_verification_flows; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..6b5e1fc22ee --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130645000000_add_verification_methods.sqlite3.up.sql @@ -0,0 +1 @@ +INSERT INTO selfservice_verification_flow_methods (id, method, selfservice_verification_flow_id, config, created_at, updated_at) SELECT id, 'link', id, form, created_at, updated_at FROM selfservice_verification_flows; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.cockroach.up.sql new file mode 100644 index 00000000000..690a58cfbc6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "form" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..6cb7200415f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_verification_flows` DROP COLUMN `form` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..690a58cfbc6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "form" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..90f5503530e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000000_add_verification_methods.sqlite3.up.sql @@ -0,0 +1,15 @@ +CREATE TABLE "_selfservice_verification_flows_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"via" TEXT NOT NULL, +"csrf_token" TEXT NOT NULL, +"success" bool NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"messages" TEXT, +"type" TEXT NOT NULL DEFAULT 'browser', +"state" TEXT NOT NULL DEFAULT 'show_form', +"active_method" TEXT +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.cockroach.up.sql new file mode 100644 index 00000000000..00117a63f7d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "via" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..e5375ff4c43 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_verification_flows` DROP COLUMN `via` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..00117a63f7d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "via" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..02c3c0c68d6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000001_add_verification_methods.sqlite3.up.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_verification_flows_tmp" (id, request_url, issued_at, expires_at, via, csrf_token, success, created_at, updated_at, messages, type, state, active_method) SELECT id, request_url, issued_at, expires_at, via, csrf_token, success, created_at, updated_at, messages, type, state, active_method FROM "selfservice_verification_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.cockroach.up.sql new file mode 100644 index 00000000000..b775d31b6d9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "success"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.mysql.up.sql new file mode 100644 index 00000000000..ae680245b48 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `selfservice_verification_flows` DROP COLUMN `success`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.postgres.up.sql new file mode 100644 index 00000000000..b775d31b6d9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "selfservice_verification_flows" DROP COLUMN "success"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..fa7f9297169 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000002_add_verification_methods.sqlite3.up.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_verification_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000003_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000003_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000003_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000003_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..1444ec963a6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000003_add_verification_methods.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_verification_flows_tmp" RENAME TO "selfservice_verification_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000004_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000004_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000004_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000004_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..5289902adee --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000004_add_verification_methods.sqlite3.up.sql @@ -0,0 +1,14 @@ +CREATE TABLE "_selfservice_verification_flows_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"csrf_token" TEXT NOT NULL, +"success" bool NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"messages" TEXT, +"type" TEXT NOT NULL DEFAULT 'browser', +"state" TEXT NOT NULL DEFAULT 'show_form', +"active_method" TEXT +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000005_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000005_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000005_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000005_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..7fe65c32f57 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000005_add_verification_methods.sqlite3.up.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_verification_flows_tmp" (id, request_url, issued_at, expires_at, csrf_token, success, created_at, updated_at, messages, type, state, active_method) SELECT id, request_url, issued_at, expires_at, csrf_token, success, created_at, updated_at, messages, type, state, active_method FROM "selfservice_verification_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000006_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000006_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000006_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000006_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..fa7f9297169 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000006_add_verification_methods.sqlite3.up.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_verification_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000007_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000007_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000007_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000007_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..1444ec963a6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000007_add_verification_methods.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_verification_flows_tmp" RENAME TO "selfservice_verification_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000008_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000008_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000008_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000008_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..e7c586c8a9a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000008_add_verification_methods.sqlite3.up.sql @@ -0,0 +1,13 @@ +CREATE TABLE "_selfservice_verification_flows_tmp" ( +"id" TEXT PRIMARY KEY, +"request_url" TEXT NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"expires_at" DATETIME NOT NULL, +"csrf_token" TEXT NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"messages" TEXT, +"type" TEXT NOT NULL DEFAULT 'browser', +"state" TEXT NOT NULL DEFAULT 'show_form', +"active_method" TEXT +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000009_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000009_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000009_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000009_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..1e7cac1004e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000009_add_verification_methods.sqlite3.up.sql @@ -0,0 +1 @@ +INSERT INTO "_selfservice_verification_flows_tmp" (id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, messages, type, state, active_method) SELECT id, request_url, issued_at, expires_at, csrf_token, created_at, updated_at, messages, type, state, active_method FROM "selfservice_verification_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000010_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000010_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000010_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000010_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..fa7f9297169 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000010_add_verification_methods.sqlite3.up.sql @@ -0,0 +1,2 @@ + +DROP TABLE "selfservice_verification_flows" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000011_add_verification_methods.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830130646000011_add_verification_methods.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830130646000011_add_verification_methods.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830130646000011_add_verification_methods.sqlite3.up.sql new file mode 100644 index 00000000000..030cc33097a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830130646000011_add_verification_methods.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "_selfservice_verification_flows_tmp" RENAME TO "selfservice_verification_flows"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.cockroach.down.sql new file mode 100644 index 00000000000..8b455721a90 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.cockroach.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_verification_tokens"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.cockroach.up.sql new file mode 100644 index 00000000000..4ea42da87a7 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.cockroach.up.sql @@ -0,0 +1,15 @@ +CREATE TABLE "identity_verification_tokens" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"token" VARCHAR (64) NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" timestamp, +"expires_at" timestamp NOT NULL, +"issued_at" timestamp NOT NULL, +"identity_verifiable_address_id" UUID NOT NULL, +"selfservice_verification_flow_id" UUID, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +CONSTRAINT "identity_verification_tokens_identity_verifiable_addresses_id_fk" FOREIGN KEY ("identity_verifiable_address_id") REFERENCES "identity_verifiable_addresses" ("id") ON DELETE cascade, +CONSTRAINT "identity_verification_tokens_selfservice_verification_flows_id_fk" FOREIGN KEY ("selfservice_verification_flow_id") REFERENCES "selfservice_verification_flows" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.mysql.down.sql new file mode 100644 index 00000000000..5696963717f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.mysql.down.sql @@ -0,0 +1 @@ +DROP TABLE `identity_verification_tokens`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.mysql.up.sql new file mode 100644 index 00000000000..b58209500c0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.mysql.up.sql @@ -0,0 +1,15 @@ +CREATE TABLE `identity_verification_tokens` ( +`id` char(36) NOT NULL, +PRIMARY KEY(`id`), +`token` VARCHAR (64) NOT NULL, +`used` bool NOT NULL DEFAULT false, +`used_at` DATETIME, +`expires_at` DATETIME NOT NULL, +`issued_at` DATETIME NOT NULL, +`identity_verifiable_address_id` char(36) NOT NULL, +`selfservice_verification_flow_id` char(36), +`created_at` DATETIME NOT NULL, +`updated_at` DATETIME NOT NULL, +FOREIGN KEY (`identity_verifiable_address_id`) REFERENCES `identity_verifiable_addresses` (`id`) ON DELETE cascade, +FOREIGN KEY (`selfservice_verification_flow_id`) REFERENCES `selfservice_verification_flows` (`id`) ON DELETE cascade +) ENGINE=InnoDB \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.postgres.down.sql new file mode 100644 index 00000000000..8b455721a90 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.postgres.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_verification_tokens"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.postgres.up.sql new file mode 100644 index 00000000000..4a5077842b7 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.postgres.up.sql @@ -0,0 +1,15 @@ +CREATE TABLE "identity_verification_tokens" ( +"id" UUID NOT NULL, +PRIMARY KEY("id"), +"token" VARCHAR (64) NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" timestamp, +"expires_at" timestamp NOT NULL, +"issued_at" timestamp NOT NULL, +"identity_verifiable_address_id" UUID NOT NULL, +"selfservice_verification_flow_id" UUID, +"created_at" timestamp NOT NULL, +"updated_at" timestamp NOT NULL, +FOREIGN KEY ("identity_verifiable_address_id") REFERENCES "identity_verifiable_addresses" ("id") ON DELETE cascade, +FOREIGN KEY ("selfservice_verification_flow_id") REFERENCES "selfservice_verification_flows" ("id") ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.sqlite3.down.sql new file mode 100644 index 00000000000..8b455721a90 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_verification_tokens"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.sqlite3.up.sql new file mode 100644 index 00000000000..cab945e116e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000000_add_verification_token.sqlite3.up.sql @@ -0,0 +1,14 @@ +CREATE TABLE "identity_verification_tokens" ( +"id" TEXT PRIMARY KEY, +"token" TEXT NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" DATETIME, +"expires_at" DATETIME NOT NULL, +"issued_at" DATETIME NOT NULL, +"identity_verifiable_address_id" char(36) NOT NULL, +"selfservice_verification_flow_id" char(36), +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_verifiable_address_id) REFERENCES identity_verifiable_addresses (id) ON DELETE cascade, +FOREIGN KEY (selfservice_verification_flow_id) REFERENCES selfservice_verification_flows (id) ON DELETE cascade +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.cockroach.up.sql new file mode 100644 index 00000000000..0eb6954acc5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.cockroach.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_verification_tokens_token_uq_idx" ON "identity_verification_tokens" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.mysql.up.sql new file mode 100644 index 00000000000..1227a1c95b0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.mysql.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX `identity_verification_tokens_token_uq_idx` ON `identity_verification_tokens` (`token`) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.postgres.up.sql new file mode 100644 index 00000000000..0eb6954acc5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.postgres.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_verification_tokens_token_uq_idx" ON "identity_verification_tokens" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.sqlite3.up.sql new file mode 100644 index 00000000000..0eb6954acc5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000001_add_verification_token.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_verification_tokens_token_uq_idx" ON "identity_verification_tokens" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.cockroach.up.sql new file mode 100644 index 00000000000..2b817e78a3b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.cockroach.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verification_tokens_token_idx" ON "identity_verification_tokens" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.mysql.up.sql new file mode 100644 index 00000000000..d0650f35045 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.mysql.up.sql @@ -0,0 +1 @@ +CREATE INDEX `identity_verification_tokens_token_idx` ON `identity_verification_tokens` (`token`) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.postgres.up.sql new file mode 100644 index 00000000000..2b817e78a3b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.postgres.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verification_tokens_token_idx" ON "identity_verification_tokens" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.sqlite3.up.sql new file mode 100644 index 00000000000..2b817e78a3b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000002_add_verification_token.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verification_tokens_token_idx" ON "identity_verification_tokens" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.cockroach.up.sql new file mode 100644 index 00000000000..d43604b7325 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.cockroach.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verification_tokens_verifiable_address_id_idx" ON "identity_verification_tokens" (identity_verifiable_address_id) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.mysql.up.sql new file mode 100644 index 00000000000..c3a3c47e30d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.mysql.up.sql @@ -0,0 +1 @@ +CREATE INDEX `identity_verification_tokens_verifiable_address_id_idx` ON `identity_verification_tokens` (`identity_verifiable_address_id`) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.postgres.up.sql new file mode 100644 index 00000000000..d43604b7325 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.postgres.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verification_tokens_verifiable_address_id_idx" ON "identity_verification_tokens" (identity_verifiable_address_id) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.sqlite3.up.sql new file mode 100644 index 00000000000..d43604b7325 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000003_add_verification_token.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verification_tokens_verifiable_address_id_idx" ON "identity_verification_tokens" (identity_verifiable_address_id) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.cockroach.up.sql new file mode 100644 index 00000000000..c0eac257d65 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.cockroach.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verification_tokens_verification_flow_id_idx" ON "identity_verification_tokens" (selfservice_verification_flow_id); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.mysql.up.sql new file mode 100644 index 00000000000..e134442030c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.mysql.up.sql @@ -0,0 +1 @@ +CREATE INDEX `identity_verification_tokens_verification_flow_id_idx` ON `identity_verification_tokens` (`selfservice_verification_flow_id`); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.postgres.up.sql new file mode 100644 index 00000000000..c0eac257d65 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.postgres.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verification_tokens_verification_flow_id_idx" ON "identity_verification_tokens" (selfservice_verification_flow_id); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.sqlite3.up.sql new file mode 100644 index 00000000000..c0eac257d65 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830154602000004_add_verification_token.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verification_tokens_verification_flow_id_idx" ON "identity_verification_tokens" (selfservice_verification_flow_id); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.cockroach.down.sql new file mode 100644 index 00000000000..5865f64a374 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" DROP COLUMN "issued_at"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.cockroach.up.sql new file mode 100644 index 00000000000..ed9ee13dc95 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" ADD COLUMN "expires_at" timestamp NOT NULL DEFAULT '2000-01-01 00:00:00' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.mysql.down.sql new file mode 100644 index 00000000000..80ec02489dd --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `identity_recovery_tokens` DROP COLUMN `issued_at`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.mysql.up.sql new file mode 100644 index 00000000000..0da7c73d716 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `identity_recovery_tokens` ADD COLUMN `expires_at` DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.postgres.down.sql new file mode 100644 index 00000000000..5865f64a374 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" DROP COLUMN "issued_at"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.postgres.up.sql new file mode 100644 index 00000000000..ed9ee13dc95 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" ADD COLUMN "expires_at" timestamp NOT NULL DEFAULT '2000-01-01 00:00:00' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..1ebb1c9fdfc --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_identity_recovery_tokens_tmp" RENAME TO "identity_recovery_tokens"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..b227755e50c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000000_recovery_token_expires.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" ADD COLUMN "expires_at" DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.cockroach.down.sql new file mode 100644 index 00000000000..425c69491f2 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" DROP COLUMN "expires_at" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.cockroach.up.sql new file mode 100644 index 00000000000..74e835ac12e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" ADD COLUMN "issued_at" timestamp NOT NULL DEFAULT '2000-01-01 00:00:00' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.mysql.down.sql new file mode 100644 index 00000000000..91dd9b778ef --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `identity_recovery_tokens` DROP COLUMN `expires_at` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.mysql.up.sql new file mode 100644 index 00000000000..5e69d7aa85f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `identity_recovery_tokens` ADD COLUMN `issued_at` DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.postgres.down.sql new file mode 100644 index 00000000000..425c69491f2 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" DROP COLUMN "expires_at" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.postgres.up.sql new file mode 100644 index 00000000000..74e835ac12e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" ADD COLUMN "issued_at" timestamp NOT NULL DEFAULT '2000-01-01 00:00:00' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..d5b864c85df --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1,2 @@ + +DROP TABLE "identity_recovery_tokens" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..cdb8f9c1d44 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000001_recovery_token_expires.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" ADD COLUMN "issued_at" DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00' \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.cockroach.down.sql new file mode 100644 index 00000000000..0ada1920e68 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" ADD CONSTRAINT "identity_recovery_tokens_selfservice_recovery_requests_id_fk" FOREIGN KEY ("selfservice_recovery_flow_id") REFERENCES "selfservice_recovery_flows" ("id") ON UPDATE NO ACTION ON DELETE CASCADE \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.cockroach.up.sql new file mode 100644 index 00000000000..c8953a2eabe --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" DROP CONSTRAINT "identity_recovery_tokens_selfservice_recovery_requests_id_fk" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.mysql.down.sql new file mode 100644 index 00000000000..712d8b2b13e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `identity_recovery_tokens` MODIFY `selfservice_recovery_flow_id` char(36) NOT NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.mysql.up.sql new file mode 100644 index 00000000000..0fe490dd776 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `identity_recovery_tokens` MODIFY `selfservice_recovery_flow_id` char(36); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.postgres.down.sql new file mode 100644 index 00000000000..10e628ac810 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" ALTER COLUMN "selfservice_recovery_flow_id" TYPE UUID, ALTER COLUMN "selfservice_recovery_flow_id" SET NOT NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.postgres.up.sql new file mode 100644 index 00000000000..a4605d0dd71 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" ALTER COLUMN "selfservice_recovery_flow_id" TYPE UUID, ALTER COLUMN "selfservice_recovery_flow_id" DROP NOT NULL; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..6670ad27eb5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_identity_recovery_tokens_tmp" (id, token, used, used_at, identity_recovery_address_id, selfservice_recovery_flow_id, created_at, updated_at) SELECT id, token, used, used_at, identity_recovery_address_id, selfservice_recovery_flow_id, created_at, updated_at FROM "identity_recovery_tokens" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..ddbeebc096b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000002_recovery_token_expires.sqlite3.up.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_recovery_addresses_code_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.cockroach.down.sql new file mode 100644 index 00000000000..7ddc5ce9fde --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" DROP COLUMN "_selfservice_recovery_flow_id_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.cockroach.up.sql new file mode 100644 index 00000000000..d3ad85b1cc7 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" RENAME COLUMN "selfservice_recovery_flow_id" TO "_selfservice_recovery_flow_id_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.mysql.down.sql new file mode 100644 index 00000000000..af8197e0df0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.mysql.down.sql @@ -0,0 +1 @@ +DELETE FROM identity_recovery_tokens WHERE selfservice_recovery_flow_id IS NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.mysql.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.postgres.down.sql new file mode 100644 index 00000000000..af8197e0df0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.postgres.down.sql @@ -0,0 +1 @@ +DELETE FROM identity_recovery_tokens WHERE selfservice_recovery_flow_id IS NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.postgres.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..f6b447164c3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_recovery_addresses_code_idx" ON "_identity_recovery_tokens_tmp" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..f3e151a509c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000003_recovery_token_expires.sqlite3.up.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_recovery_addresses_code_uq_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000004_recovery_token_expires.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000004_recovery_token_expires.cockroach.down.sql new file mode 100644 index 00000000000..e09ac77812f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000004_recovery_token_expires.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" ALTER COLUMN "selfservice_recovery_flow_id" SET NOT NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000004_recovery_token_expires.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000004_recovery_token_expires.cockroach.up.sql new file mode 100644 index 00000000000..479627d592a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000004_recovery_token_expires.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" ADD COLUMN "selfservice_recovery_flow_id" UUID \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000004_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000004_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..d278d9c0823 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000004_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_recovery_addresses_code_uq_idx" ON "_identity_recovery_tokens_tmp" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000004_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000004_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..55082d444c5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000004_recovery_token_expires.sqlite3.up.sql @@ -0,0 +1,14 @@ +CREATE TABLE "_identity_recovery_tokens_tmp" ( +"id" TEXT PRIMARY KEY, +"token" TEXT NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" DATETIME, +"identity_recovery_address_id" char(36) NOT NULL, +"selfservice_recovery_flow_id" char(36), +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"expires_at" DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00', +"issued_at" DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00', +FOREIGN KEY (selfservice_recovery_flow_id) REFERENCES selfservice_recovery_flows (id) ON UPDATE NO ACTION ON DELETE CASCADE, +FOREIGN KEY (identity_recovery_address_id) REFERENCES identity_recovery_addresses (id) ON UPDATE NO ACTION ON DELETE CASCADE +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000005_recovery_token_expires.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000005_recovery_token_expires.cockroach.down.sql new file mode 100644 index 00000000000..ab73f48c908 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000005_recovery_token_expires.cockroach.down.sql @@ -0,0 +1 @@ +UPDATE "identity_recovery_tokens" SET "selfservice_recovery_flow_id" = "_selfservice_recovery_flow_id_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000005_recovery_token_expires.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000005_recovery_token_expires.cockroach.up.sql new file mode 100644 index 00000000000..ab73f48c908 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000005_recovery_token_expires.cockroach.up.sql @@ -0,0 +1 @@ +UPDATE "identity_recovery_tokens" SET "selfservice_recovery_flow_id" = "_selfservice_recovery_flow_id_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000005_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000005_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..70591642b03 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000005_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1,12 @@ +CREATE TABLE "_identity_recovery_tokens_tmp" ( +"id" TEXT PRIMARY KEY, +"token" TEXT NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" DATETIME, +"identity_recovery_address_id" char(36) NOT NULL, +"selfservice_recovery_flow_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_recovery_address_id) REFERENCES identity_recovery_addresses (id) ON UPDATE NO ACTION ON DELETE CASCADE, +FOREIGN KEY (selfservice_recovery_flow_id) REFERENCES selfservice_recovery_flows (id) ON UPDATE NO ACTION ON DELETE CASCADE +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000005_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000005_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..f6b447164c3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000005_recovery_token_expires.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_recovery_addresses_code_idx" ON "_identity_recovery_tokens_tmp" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000006_recovery_token_expires.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000006_recovery_token_expires.cockroach.down.sql new file mode 100644 index 00000000000..479627d592a --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000006_recovery_token_expires.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" ADD COLUMN "selfservice_recovery_flow_id" UUID \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000006_recovery_token_expires.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000006_recovery_token_expires.cockroach.up.sql new file mode 100644 index 00000000000..7ddc5ce9fde --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000006_recovery_token_expires.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" DROP COLUMN "_selfservice_recovery_flow_id_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000006_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000006_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..ddbeebc096b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000006_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_recovery_addresses_code_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000006_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000006_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..d278d9c0823 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000006_recovery_token_expires.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_recovery_addresses_code_uq_idx" ON "_identity_recovery_tokens_tmp" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000007_recovery_token_expires.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000007_recovery_token_expires.cockroach.down.sql new file mode 100644 index 00000000000..d3ad85b1cc7 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000007_recovery_token_expires.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" RENAME COLUMN "selfservice_recovery_flow_id" TO "_selfservice_recovery_flow_id_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000007_recovery_token_expires.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000007_recovery_token_expires.cockroach.up.sql new file mode 100644 index 00000000000..c15f38009bf --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000007_recovery_token_expires.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" ADD CONSTRAINT "identity_recovery_tokens_selfservice_recovery_requests_id_fk" FOREIGN KEY ("selfservice_recovery_flow_id") REFERENCES "selfservice_recovery_flows" ("id") ON UPDATE NO ACTION ON DELETE CASCADE; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000007_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000007_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..f3e151a509c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000007_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_recovery_addresses_code_uq_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000007_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000007_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..6557e4c1b7c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000007_recovery_token_expires.sqlite3.up.sql @@ -0,0 +1 @@ +INSERT INTO "_identity_recovery_tokens_tmp" (id, token, used, used_at, identity_recovery_address_id, selfservice_recovery_flow_id, created_at, updated_at, expires_at, issued_at) SELECT id, token, used, used_at, identity_recovery_address_id, selfservice_recovery_flow_id, created_at, updated_at, expires_at, issued_at FROM "identity_recovery_tokens" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000008_recovery_token_expires.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000008_recovery_token_expires.cockroach.down.sql new file mode 100644 index 00000000000..c8953a2eabe --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000008_recovery_token_expires.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_recovery_tokens" DROP CONSTRAINT "identity_recovery_tokens_selfservice_recovery_requests_id_fk" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000008_recovery_token_expires.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000008_recovery_token_expires.cockroach.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000008_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000008_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..c4ea5d5ac65 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000008_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_identity_recovery_tokens_tmp" RENAME TO "identity_recovery_tokens" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000008_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000008_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..ddb21ed1c3d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000008_recovery_token_expires.sqlite3.up.sql @@ -0,0 +1 @@ +DROP TABLE "identity_recovery_tokens" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000009_recovery_token_expires.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000009_recovery_token_expires.cockroach.down.sql new file mode 100644 index 00000000000..af8197e0df0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000009_recovery_token_expires.cockroach.down.sql @@ -0,0 +1 @@ +DELETE FROM identity_recovery_tokens WHERE selfservice_recovery_flow_id IS NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000009_recovery_token_expires.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000009_recovery_token_expires.cockroach.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000009_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000009_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..d5b864c85df --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000009_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1,2 @@ + +DROP TABLE "identity_recovery_tokens" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000009_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000009_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..1ebb1c9fdfc --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000009_recovery_token_expires.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "_identity_recovery_tokens_tmp" RENAME TO "identity_recovery_tokens"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000010_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000010_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..ab19747d7ad --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000010_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_identity_recovery_tokens_tmp" (id, token, used, used_at, identity_recovery_address_id, selfservice_recovery_flow_id, created_at, updated_at, issued_at) SELECT id, token, used, used_at, identity_recovery_address_id, selfservice_recovery_flow_id, created_at, updated_at, issued_at FROM "identity_recovery_tokens" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000010_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000010_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000011_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000011_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..f6b447164c3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000011_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_recovery_addresses_code_idx" ON "_identity_recovery_tokens_tmp" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000011_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000011_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000012_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000012_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..d278d9c0823 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000012_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_recovery_addresses_code_uq_idx" ON "_identity_recovery_tokens_tmp" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000012_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000012_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000013_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000013_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..92460b7d983 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000013_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1,13 @@ +CREATE TABLE "_identity_recovery_tokens_tmp" ( +"id" TEXT PRIMARY KEY, +"token" TEXT NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" DATETIME, +"identity_recovery_address_id" char(36) NOT NULL, +"selfservice_recovery_flow_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"issued_at" DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00', +FOREIGN KEY (identity_recovery_address_id) REFERENCES identity_recovery_addresses (id) ON UPDATE NO ACTION ON DELETE CASCADE, +FOREIGN KEY (selfservice_recovery_flow_id) REFERENCES selfservice_recovery_flows (id) ON UPDATE NO ACTION ON DELETE CASCADE +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000013_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000013_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000014_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000014_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..ddbeebc096b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000014_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_recovery_addresses_code_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000014_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000014_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000015_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000015_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..f3e151a509c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000015_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_recovery_addresses_code_uq_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000015_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000015_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000016_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000016_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..c4ea5d5ac65 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000016_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_identity_recovery_tokens_tmp" RENAME TO "identity_recovery_tokens" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000016_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000016_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000017_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000017_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..ddb21ed1c3d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000017_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_recovery_tokens" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000017_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000017_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000018_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000018_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..6557e4c1b7c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000018_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_identity_recovery_tokens_tmp" (id, token, used, used_at, identity_recovery_address_id, selfservice_recovery_flow_id, created_at, updated_at, expires_at, issued_at) SELECT id, token, used, used_at, identity_recovery_address_id, selfservice_recovery_flow_id, created_at, updated_at, expires_at, issued_at FROM "identity_recovery_tokens" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000018_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000018_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000019_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000019_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..f6b447164c3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000019_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_recovery_addresses_code_idx" ON "_identity_recovery_tokens_tmp" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000019_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000019_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000020_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000020_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..d278d9c0823 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000020_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_recovery_addresses_code_uq_idx" ON "_identity_recovery_tokens_tmp" (token) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000020_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000020_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000021_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000021_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..4c6931a15f3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000021_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1,14 @@ +CREATE TABLE "_identity_recovery_tokens_tmp" ( +"id" TEXT PRIMARY KEY, +"token" TEXT NOT NULL, +"used" bool NOT NULL DEFAULT 'false', +"used_at" DATETIME, +"identity_recovery_address_id" char(36) NOT NULL, +"selfservice_recovery_flow_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"expires_at" DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00', +"issued_at" DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00', +FOREIGN KEY (identity_recovery_address_id) REFERENCES identity_recovery_addresses (id) ON UPDATE NO ACTION ON DELETE CASCADE, +FOREIGN KEY (selfservice_recovery_flow_id) REFERENCES selfservice_recovery_flows (id) ON UPDATE NO ACTION ON DELETE CASCADE +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000021_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000021_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000022_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000022_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..ddbeebc096b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000022_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_recovery_addresses_code_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000022_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000022_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000023_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000023_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..f3e151a509c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000023_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_recovery_addresses_code_uq_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000023_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000023_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000024_recovery_token_expires.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200830172221000024_recovery_token_expires.sqlite3.down.sql new file mode 100644 index 00000000000..af8197e0df0 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200830172221000024_recovery_token_expires.sqlite3.down.sql @@ -0,0 +1 @@ +DELETE FROM identity_recovery_tokens WHERE selfservice_recovery_flow_id IS NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200830172221000024_recovery_token_expires.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200830172221000024_recovery_token_expires.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.cockroach.down.sql new file mode 100644 index 00000000000..4adfcd1e288 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.cockroach.down.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verifiable_addresses_code_idx" ON "identity_verifiable_addresses" (code); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.cockroach.up.sql new file mode 100644 index 00000000000..037a82260ce --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.cockroach.up.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_verifiable_addresses_code_uq_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.mysql.down.sql new file mode 100644 index 00000000000..788e1524164 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.mysql.down.sql @@ -0,0 +1 @@ +CREATE INDEX `identity_verifiable_addresses_code_idx` ON `identity_verifiable_addresses` (`code`); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.mysql.up.sql new file mode 100644 index 00000000000..9ac004da477 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.mysql.up.sql @@ -0,0 +1 @@ +DROP INDEX `identity_verifiable_addresses_code_uq_idx` ON `identity_verifiable_addresses` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.postgres.down.sql new file mode 100644 index 00000000000..4adfcd1e288 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.postgres.down.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verifiable_addresses_code_idx" ON "identity_verifiable_addresses" (code); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.postgres.up.sql new file mode 100644 index 00000000000..fcf23d676a2 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.postgres.up.sql @@ -0,0 +1 @@ +DROP INDEX "identity_verifiable_addresses_code_uq_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..4adfcd1e288 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verifiable_addresses_code_idx" ON "identity_verifiable_addresses" (code); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..037a82260ce --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000000_identity_verifiable_address_remove_code.sqlite3.up.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_verifiable_addresses_code_uq_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.cockroach.down.sql new file mode 100644 index 00000000000..ecf8ba9c94e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.cockroach.down.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_verifiable_addresses_code_uq_idx" ON "identity_verifiable_addresses" (code) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.cockroach.up.sql new file mode 100644 index 00000000000..ab06acfa14e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.cockroach.up.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_verifiable_addresses_code_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.mysql.down.sql new file mode 100644 index 00000000000..3f787b6f05e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.mysql.down.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX `identity_verifiable_addresses_code_uq_idx` ON `identity_verifiable_addresses` (`code`) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.mysql.up.sql new file mode 100644 index 00000000000..6f6853c4f06 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.mysql.up.sql @@ -0,0 +1 @@ +DROP INDEX `identity_verifiable_addresses_code_idx` ON `identity_verifiable_addresses` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.postgres.down.sql new file mode 100644 index 00000000000..ecf8ba9c94e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.postgres.down.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_verifiable_addresses_code_uq_idx" ON "identity_verifiable_addresses" (code) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.postgres.up.sql new file mode 100644 index 00000000000..16f55052082 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.postgres.up.sql @@ -0,0 +1 @@ +DROP INDEX "identity_verifiable_addresses_code_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..ecf8ba9c94e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_verifiable_addresses_code_uq_idx" ON "identity_verifiable_addresses" (code) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..ab06acfa14e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000001_identity_verifiable_address_remove_code.sqlite3.up.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_verifiable_addresses_code_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.cockroach.down.sql new file mode 100644 index 00000000000..a9426a4063e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_verifiable_addresses" DROP COLUMN "_expires_at_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.cockroach.up.sql new file mode 100644 index 00000000000..b1d2ffca40e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_verifiable_addresses" DROP COLUMN "code" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.mysql.down.sql new file mode 100644 index 00000000000..1ca3bf925ee --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `identity_verifiable_addresses` MODIFY `expires_at` DATETIME \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.mysql.up.sql new file mode 100644 index 00000000000..4cde4455447 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `identity_verifiable_addresses` DROP COLUMN `code` \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.postgres.down.sql new file mode 100644 index 00000000000..2042f8ff3db --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_verifiable_addresses" ALTER COLUMN "expires_at" TYPE timestamp, ALTER COLUMN "expires_at" DROP NOT NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.postgres.up.sql new file mode 100644 index 00000000000..b1d2ffca40e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_verifiable_addresses" DROP COLUMN "code" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..6d810eec510 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_identity_verifiable_addresses_tmp" RENAME TO "identity_verifiable_addresses" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..57acd91f27c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000002_identity_verifiable_address_remove_code.sqlite3.up.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.cockroach.down.sql new file mode 100644 index 00000000000..86dce13a119 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.cockroach.down.sql @@ -0,0 +1 @@ +UPDATE "identity_verifiable_addresses" SET "expires_at" = "_expires_at_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.cockroach.up.sql new file mode 100644 index 00000000000..6c491ea396f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_verifiable_addresses" DROP COLUMN "expires_at"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.mysql.down.sql new file mode 100644 index 00000000000..76be8ed844b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `identity_verifiable_addresses` MODIFY `code` VARCHAR (32) NOT NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.mysql.up.sql new file mode 100644 index 00000000000..e58903e54ed --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE `identity_verifiable_addresses` DROP COLUMN `expires_at`; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.postgres.down.sql new file mode 100644 index 00000000000..47f8bb6c39d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_verifiable_addresses" ALTER COLUMN "code" TYPE VARCHAR (32), ALTER COLUMN "code" SET NOT NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.postgres.up.sql new file mode 100644 index 00000000000..6c491ea396f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_verifiable_addresses" DROP COLUMN "expires_at"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..f093d329919 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_verifiable_addresses" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..9fb21b7f912 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000003_identity_verifiable_address_remove_code.sqlite3.up.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_uq_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.cockroach.down.sql new file mode 100644 index 00000000000..fc85347e150 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_verifiable_addresses" ADD COLUMN "expires_at" timestamp \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.cockroach.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.mysql.down.sql new file mode 100644 index 00000000000..d86919b31fe --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.mysql.down.sql @@ -0,0 +1 @@ +UPDATE identity_verifiable_addresses SET expires_at = CURRENT_TIMESTAMP WHERE expires_at IS NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.mysql.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.postgres.down.sql new file mode 100644 index 00000000000..d86919b31fe --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.postgres.down.sql @@ -0,0 +1 @@ +UPDATE identity_verifiable_addresses SET expires_at = CURRENT_TIMESTAMP WHERE expires_at IS NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.postgres.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..07227f5fa97 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_identity_verifiable_addresses_tmp" (id, status, via, verified, value, verified_at, identity_id, created_at, updated_at, code, expires_at) SELECT id, status, via, verified, value, verified_at, identity_id, created_at, updated_at, code, expires_at FROM "identity_verifiable_addresses" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..e9f0577506e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000004_identity_verifiable_address_remove_code.sqlite3.up.sql @@ -0,0 +1,13 @@ +CREATE TABLE "_identity_verifiable_addresses_tmp" ( +"id" TEXT PRIMARY KEY, +"status" TEXT NOT NULL, +"via" TEXT NOT NULL, +"verified" bool NOT NULL, +"value" TEXT NOT NULL, +"verified_at" DATETIME, +"expires_at" DATETIME NOT NULL DEFAULT 'CURRENT_TIMESTAMP', +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.cockroach.down.sql new file mode 100644 index 00000000000..98f6fd81eec --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_verifiable_addresses" RENAME COLUMN "expires_at" TO "_expires_at_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.cockroach.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.mysql.down.sql new file mode 100644 index 00000000000..ecd32793759 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.mysql.down.sql @@ -0,0 +1 @@ +UPDATE identity_verifiable_addresses SET code = LEFT(MD5(RAND()), 32) WHERE code IS NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.mysql.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.postgres.down.sql new file mode 100644 index 00000000000..999d350916b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.postgres.down.sql @@ -0,0 +1 @@ +UPDATE identity_verifiable_addresses SET code = substr(md5(random()::text), 0, 32) WHERE code IS NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.postgres.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..6165df6c2e9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verifiable_addresses_status_via_idx" ON "_identity_verifiable_addresses_tmp" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..6165df6c2e9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000005_identity_verifiable_address_remove_code.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verifiable_addresses_status_via_idx" ON "_identity_verifiable_addresses_tmp" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.cockroach.down.sql new file mode 100644 index 00000000000..2e16a0222c5 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_verifiable_addresses" DROP COLUMN "_code_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.cockroach.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.mysql.down.sql new file mode 100644 index 00000000000..a005a8106fc --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `identity_verifiable_addresses` ADD COLUMN `expires_at` DATETIME \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.mysql.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.postgres.down.sql new file mode 100644 index 00000000000..fc85347e150 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_verifiable_addresses" ADD COLUMN "expires_at" timestamp \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.postgres.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..fca4711f433 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_verifiable_addresses_status_via_uq_idx" ON "_identity_verifiable_addresses_tmp" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..fca4711f433 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000006_identity_verifiable_address_remove_code.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_verifiable_addresses_status_via_uq_idx" ON "_identity_verifiable_addresses_tmp" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.cockroach.down.sql new file mode 100644 index 00000000000..b96c559fc8b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_verifiable_addresses" ALTER COLUMN "code" SET NOT NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.cockroach.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.mysql.down.sql new file mode 100644 index 00000000000..8c367bb3205 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE `identity_verifiable_addresses` ADD COLUMN `code` VARCHAR (32) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.mysql.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.postgres.down.sql new file mode 100644 index 00000000000..8e366ec226f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_verifiable_addresses" ADD COLUMN "code" VARCHAR (32) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.postgres.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..baf88132b7e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1,14 @@ +CREATE TABLE "_identity_verifiable_addresses_tmp" ( +"id" TEXT PRIMARY KEY, +"status" TEXT NOT NULL, +"via" TEXT NOT NULL, +"verified" bool NOT NULL, +"value" TEXT NOT NULL, +"verified_at" DATETIME, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"code" TEXT NOT NULL, +"expires_at" DATETIME, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..289889f8213 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000007_identity_verifiable_address_remove_code.sqlite3.up.sql @@ -0,0 +1 @@ +INSERT INTO "_identity_verifiable_addresses_tmp" (id, status, via, verified, value, verified_at, expires_at, identity_id, created_at, updated_at) SELECT id, status, via, verified, value, verified_at, expires_at, identity_id, created_at, updated_at FROM "identity_verifiable_addresses" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000008_identity_verifiable_address_remove_code.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000008_identity_verifiable_address_remove_code.cockroach.down.sql new file mode 100644 index 00000000000..ab970e1f5f8 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000008_identity_verifiable_address_remove_code.cockroach.down.sql @@ -0,0 +1 @@ +UPDATE "identity_verifiable_addresses" SET "code" = "_code_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000008_identity_verifiable_address_remove_code.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000008_identity_verifiable_address_remove_code.cockroach.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000008_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000008_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..57acd91f27c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000008_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000008_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000008_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..b9252d56e57 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000008_identity_verifiable_address_remove_code.sqlite3.up.sql @@ -0,0 +1,2 @@ + +DROP TABLE "identity_verifiable_addresses" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000009_identity_verifiable_address_remove_code.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000009_identity_verifiable_address_remove_code.cockroach.down.sql new file mode 100644 index 00000000000..8e366ec226f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000009_identity_verifiable_address_remove_code.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_verifiable_addresses" ADD COLUMN "code" VARCHAR (32) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000009_identity_verifiable_address_remove_code.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000009_identity_verifiable_address_remove_code.cockroach.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000009_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000009_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..9fb21b7f912 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000009_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_uq_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000009_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000009_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..6d810eec510 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000009_identity_verifiable_address_remove_code.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "_identity_verifiable_addresses_tmp" RENAME TO "identity_verifiable_addresses" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000010_identity_verifiable_address_remove_code.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000010_identity_verifiable_address_remove_code.cockroach.down.sql new file mode 100644 index 00000000000..6fb58021277 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000010_identity_verifiable_address_remove_code.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_verifiable_addresses" RENAME COLUMN "code" TO "_code_tmp" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000010_identity_verifiable_address_remove_code.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000010_identity_verifiable_address_remove_code.cockroach.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000010_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000010_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..6d810eec510 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000010_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "_identity_verifiable_addresses_tmp" RENAME TO "identity_verifiable_addresses" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000010_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000010_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..57acd91f27c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000010_identity_verifiable_address_remove_code.sqlite3.up.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000011_identity_verifiable_address_remove_code.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000011_identity_verifiable_address_remove_code.cockroach.down.sql new file mode 100644 index 00000000000..d86919b31fe --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000011_identity_verifiable_address_remove_code.cockroach.down.sql @@ -0,0 +1 @@ +UPDATE identity_verifiable_addresses SET expires_at = CURRENT_TIMESTAMP WHERE expires_at IS NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000011_identity_verifiable_address_remove_code.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000011_identity_verifiable_address_remove_code.cockroach.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000011_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000011_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..f093d329919 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000011_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +DROP TABLE "identity_verifiable_addresses" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000011_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000011_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..9fb21b7f912 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000011_identity_verifiable_address_remove_code.sqlite3.up.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_uq_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000012_identity_verifiable_address_remove_code.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000012_identity_verifiable_address_remove_code.cockroach.down.sql new file mode 100644 index 00000000000..d496bf4186d --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000012_identity_verifiable_address_remove_code.cockroach.down.sql @@ -0,0 +1 @@ +UPDATE identity_verifiable_addresses SET code = substr(md5(uuid_v4()), 0, 32) WHERE code IS NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000012_identity_verifiable_address_remove_code.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000012_identity_verifiable_address_remove_code.cockroach.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000012_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000012_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..07227f5fa97 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000012_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +INSERT INTO "_identity_verifiable_addresses_tmp" (id, status, via, verified, value, verified_at, identity_id, created_at, updated_at, code, expires_at) SELECT id, status, via, verified, value, verified_at, identity_id, created_at, updated_at, code, expires_at FROM "identity_verifiable_addresses" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000012_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000012_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..e5b27b43a8c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000012_identity_verifiable_address_remove_code.sqlite3.up.sql @@ -0,0 +1,12 @@ +CREATE TABLE "_identity_verifiable_addresses_tmp" ( +"id" TEXT PRIMARY KEY, +"status" TEXT NOT NULL, +"via" TEXT NOT NULL, +"verified" bool NOT NULL, +"value" TEXT NOT NULL, +"verified_at" DATETIME, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000013_identity_verifiable_address_remove_code.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000013_identity_verifiable_address_remove_code.cockroach.down.sql new file mode 100644 index 00000000000..fc85347e150 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000013_identity_verifiable_address_remove_code.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_verifiable_addresses" ADD COLUMN "expires_at" timestamp \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000013_identity_verifiable_address_remove_code.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000013_identity_verifiable_address_remove_code.cockroach.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000013_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000013_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..6165df6c2e9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000013_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verifiable_addresses_status_via_idx" ON "_identity_verifiable_addresses_tmp" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000013_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000013_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..6165df6c2e9 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000013_identity_verifiable_address_remove_code.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE INDEX "identity_verifiable_addresses_status_via_idx" ON "_identity_verifiable_addresses_tmp" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000014_identity_verifiable_address_remove_code.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000014_identity_verifiable_address_remove_code.cockroach.down.sql new file mode 100644 index 00000000000..8e366ec226f --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000014_identity_verifiable_address_remove_code.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_verifiable_addresses" ADD COLUMN "code" VARCHAR (32) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000014_identity_verifiable_address_remove_code.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000014_identity_verifiable_address_remove_code.cockroach.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000014_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000014_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..fca4711f433 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000014_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_verifiable_addresses_status_via_uq_idx" ON "_identity_verifiable_addresses_tmp" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000014_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000014_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..fca4711f433 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000014_identity_verifiable_address_remove_code.sqlite3.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "identity_verifiable_addresses_status_via_uq_idx" ON "_identity_verifiable_addresses_tmp" (via, value) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000015_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000015_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..baf88132b7e --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000015_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1,14 @@ +CREATE TABLE "_identity_verifiable_addresses_tmp" ( +"id" TEXT PRIMARY KEY, +"status" TEXT NOT NULL, +"via" TEXT NOT NULL, +"verified" bool NOT NULL, +"value" TEXT NOT NULL, +"verified_at" DATETIME, +"identity_id" char(36) NOT NULL, +"created_at" DATETIME NOT NULL, +"updated_at" DATETIME NOT NULL, +"code" TEXT NOT NULL, +"expires_at" DATETIME, +FOREIGN KEY (identity_id) REFERENCES identities (id) ON UPDATE NO ACTION ON DELETE CASCADE +) \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000015_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000015_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..759f78274f6 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000015_identity_verifiable_address_remove_code.sqlite3.up.sql @@ -0,0 +1 @@ +INSERT INTO "_identity_verifiable_addresses_tmp" (id, status, via, verified, value, verified_at, identity_id, created_at, updated_at) SELECT id, status, via, verified, value, verified_at, identity_id, created_at, updated_at FROM "identity_verifiable_addresses" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000016_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000016_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..57acd91f27c --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000016_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000016_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000016_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..b9252d56e57 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000016_identity_verifiable_address_remove_code.sqlite3.up.sql @@ -0,0 +1,2 @@ + +DROP TABLE "identity_verifiable_addresses" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000017_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000017_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..9fb21b7f912 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000017_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS "identity_verifiable_addresses_status_via_uq_idx" \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000017_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000017_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..3ae0041cdf3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000017_identity_verifiable_address_remove_code.sqlite3.up.sql @@ -0,0 +1 @@ +ALTER TABLE "_identity_verifiable_addresses_tmp" RENAME TO "identity_verifiable_addresses"; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000018_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000018_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..d86919b31fe --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000018_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +UPDATE identity_verifiable_addresses SET expires_at = CURRENT_TIMESTAMP WHERE expires_at IS NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000018_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000018_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000019_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000019_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..a8693e65f18 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000019_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +UPDATE identity_verifiable_addresses SET code = substr(hex(randomblob(32)), 0, 32) WHERE code IS NULL \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000019_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000019_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000020_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000020_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..21462d659c4 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000020_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_verifiable_addresses" ADD COLUMN "expires_at" DATETIME \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000020_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000020_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000021_identity_verifiable_address_remove_code.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20200831110752000021_identity_verifiable_address_remove_code.sqlite3.down.sql new file mode 100644 index 00000000000..b73f215069b --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20200831110752000021_identity_verifiable_address_remove_code.sqlite3.down.sql @@ -0,0 +1 @@ +ALTER TABLE "identity_verifiable_addresses" ADD COLUMN "code" TEXT \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20200831110752000021_identity_verifiable_address_remove_code.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20200831110752000021_identity_verifiable_address_remove_code.sqlite3.up.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.cockroach.down.sql new file mode 100644 index 00000000000..a2e136ce537 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.cockroach.down.sql @@ -0,0 +1 @@ +DELETE FROM identity_credential_types WHERE name = 'password' OR name = 'oidc'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.cockroach.up.sql new file mode 100644 index 00000000000..d94b0a922a3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.cockroach.up.sql @@ -0,0 +1 @@ +INSERT INTO identity_credential_types (id, name) SELECT '78c1b41d-8341-4507-aa60-aff1d4369670', 'password' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'password') \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.mysql.down.sql new file mode 100644 index 00000000000..a2e136ce537 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.mysql.down.sql @@ -0,0 +1 @@ +DELETE FROM identity_credential_types WHERE name = 'password' OR name = 'oidc'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.mysql.up.sql new file mode 100644 index 00000000000..d94b0a922a3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.mysql.up.sql @@ -0,0 +1 @@ +INSERT INTO identity_credential_types (id, name) SELECT '78c1b41d-8341-4507-aa60-aff1d4369670', 'password' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'password') \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.postgres.down.sql new file mode 100644 index 00000000000..a2e136ce537 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.postgres.down.sql @@ -0,0 +1 @@ +DELETE FROM identity_credential_types WHERE name = 'password' OR name = 'oidc'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.postgres.up.sql new file mode 100644 index 00000000000..d94b0a922a3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.postgres.up.sql @@ -0,0 +1 @@ +INSERT INTO identity_credential_types (id, name) SELECT '78c1b41d-8341-4507-aa60-aff1d4369670', 'password' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'password') \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.sqlite3.down.sql new file mode 100644 index 00000000000..a2e136ce537 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.sqlite3.down.sql @@ -0,0 +1 @@ +DELETE FROM identity_credential_types WHERE name = 'password' OR name = 'oidc'; \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.sqlite3.up.sql new file mode 100644 index 00000000000..d94b0a922a3 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20201201161451000000_credential_types_values.sqlite3.up.sql @@ -0,0 +1 @@ +INSERT INTO identity_credential_types (id, name) SELECT '78c1b41d-8341-4507-aa60-aff1d4369670', 'password' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'password') \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.cockroach.down.sql b/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.cockroach.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.cockroach.up.sql b/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.cockroach.up.sql new file mode 100644 index 00000000000..de26838c371 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.cockroach.up.sql @@ -0,0 +1 @@ +INSERT INTO identity_credential_types (id, name) SELECT '6fa5e2e0-bfce-4631-b62b-cf2b0252b289', 'oidc' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'oidc'); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.mysql.down.sql b/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.mysql.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.mysql.up.sql b/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.mysql.up.sql new file mode 100644 index 00000000000..de26838c371 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.mysql.up.sql @@ -0,0 +1 @@ +INSERT INTO identity_credential_types (id, name) SELECT '6fa5e2e0-bfce-4631-b62b-cf2b0252b289', 'oidc' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'oidc'); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.postgres.down.sql b/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.postgres.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.postgres.up.sql b/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.postgres.up.sql new file mode 100644 index 00000000000..de26838c371 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.postgres.up.sql @@ -0,0 +1 @@ +INSERT INTO identity_credential_types (id, name) SELECT '6fa5e2e0-bfce-4631-b62b-cf2b0252b289', 'oidc' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'oidc'); \ No newline at end of file diff --git a/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.sqlite3.down.sql b/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.sqlite3.down.sql new file mode 100644 index 00000000000..e69de29bb2d diff --git a/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.sqlite3.up.sql b/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.sqlite3.up.sql new file mode 100644 index 00000000000..de26838c371 --- /dev/null +++ b/oryx/popx/stub/migrations/transactional/20201201161451000001_credential_types_values.sqlite3.up.sql @@ -0,0 +1 @@ +INSERT INTO identity_credential_types (id, name) SELECT '6fa5e2e0-bfce-4631-b62b-cf2b0252b289', 'oidc' WHERE NOT EXISTS ( SELECT * FROM identity_credential_types WHERE name = 'oidc'); \ No newline at end of file diff --git a/oryx/popx/transaction.go b/oryx/popx/transaction.go new file mode 100644 index 00000000000..194dfa45f16 --- /dev/null +++ b/oryx/popx/transaction.go @@ -0,0 +1,122 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package popx + +import ( + "context" + "runtime" + + "github.com/cockroachdb/cockroach-go/v2/crdb" + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" + "github.com/prometheus/client_golang/prometheus" + + "github.com/ory/pop/v6" +) + +type transactionContextKey int + +const transactionKey transactionContextKey = 0 + +func WithTransaction(ctx context.Context, tx *pop.Connection) context.Context { + return context.WithValue(ctx, transactionKey, tx) +} + +func InTransaction(ctx context.Context) bool { + return ctx.Value(transactionKey) != nil +} + +func Transaction(ctx context.Context, connection *pop.Connection, callback func(context.Context, *pop.Connection) error) error { + c := ctx.Value(transactionKey) + if c != nil { + if conn, ok := c.(*pop.Connection); ok { + return errors.WithStack(callback(ctx, conn.WithContext(ctx))) + } + } + + if connection.Dialect.Name() == "cockroach" { + return connection.WithContext(ctx).Dialect.Lock(func() error { + transaction, err := connection.NewTransaction() + if err != nil { + return errors.WithStack(err) + } + + attempt := 0 + return errors.WithStack(crdb.ExecuteInTx(ctx, sqlxTxAdapter{transaction.TX.Tx}, func() error { + attempt++ + if attempt > 1 { + caller := caller() + transactionRetries.WithLabelValues(caller).Inc() + } + return errors.WithStack(callback(WithTransaction(ctx, transaction), transaction)) + })) + }) + } + + return errors.WithStack(connection.WithContext(ctx).Transaction(func(tx *pop.Connection) error { + return errors.WithStack(callback(WithTransaction(ctx, tx), tx)) + })) +} + +func GetConnection(ctx context.Context, connection *pop.Connection) *pop.Connection { + c := ctx.Value(transactionKey) + if c != nil { + if conn, ok := c.(*pop.Connection); ok { + return conn.WithContext(ctx) + } + } + return connection.WithContext(ctx) +} + +type sqlxTxAdapter struct { + *sqlx.Tx +} + +var _ crdb.Tx = sqlxTxAdapter{} + +func (s sqlxTxAdapter) Exec(ctx context.Context, query string, args ...interface{}) error { + _, err := s.Tx.ExecContext(ctx, query, args...) + return errors.WithStack(err) +} + +func (s sqlxTxAdapter) Commit(ctx context.Context) error { + return errors.WithStack(s.Tx.Commit()) +} + +func (s sqlxTxAdapter) Rollback(ctx context.Context) error { + return errors.WithStack(s.Tx.Rollback()) +} + +var ( + transactionRetries = prometheus.NewCounterVec(prometheus.CounterOpts{ + Name: "ory_x_popx_cockroach_transaction_retries_total", + Help: "Counts the number of automatic CockroachDB transaction retries", + }, []string{"caller"}) + TransactionRetries prometheus.Collector = transactionRetries + _ = transactionRetries.WithLabelValues(unknownCaller) // make sure the metric is always present + unknownCaller = "unknown" +) + +func caller() string { + pc := make([]uintptr, 3) + // The number stack frames to skip was determined by putting a breakpoint in + // ory/kratos and looking for the topmost frame which isn't from ory/x or + // ory/pop. + n := runtime.Callers(8, pc) + if n == 0 { + return unknownCaller + } + pc = pc[:n] + frames := runtime.CallersFrames(pc) + for { + frame, more := frames.Next() + if frame.Function != "" { + return frame.Function + } + if !more { + break + } + } + return unknownCaller +} diff --git a/oryx/profilex/profiling.go b/oryx/profilex/profiling.go new file mode 100644 index 00000000000..3e143e54335 --- /dev/null +++ b/oryx/profilex/profiling.go @@ -0,0 +1,40 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package profilex + +import ( + "os" + + "github.com/pkg/profile" +) + +type noop struct{} + +// Stop is a noop. +func (p *noop) Stop() {} + +// Profile parses the PROFILING environment variable and executes the proper profiling task. +func Profile() interface { + Stop() +} { + switch os.Getenv("PROFILING") { + case "cpu": + return profile.Start(profile.CPUProfile, profile.NoShutdownHook) + case "mem": + return profile.Start(profile.MemProfile, profile.NoShutdownHook) + case "mutex": + return profile.Start(profile.MutexProfile, profile.NoShutdownHook) + case "block": + return profile.Start(profile.BlockProfile, profile.NoShutdownHook) + } + return new(noop) +} + +// HelpMessage returns a string explaining how profiling works. +func HelpMessage() string { + return `- PROFILING: Set "PROFILING=cpu" to enable cpu profiling and "PROFILING=mem" to enable memory profiling. + It is not possible to do both at the same time. Profiling is disabled per default. + + Example: PROFILING=cpu` +} diff --git a/oryx/prometheusx/handler.go b/oryx/prometheusx/handler.go new file mode 100644 index 00000000000..563d7cd7f86 --- /dev/null +++ b/oryx/prometheusx/handler.go @@ -0,0 +1,77 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package prometheusx + +import ( + "net/http" + + "github.com/julienschmidt/httprouter" + "github.com/prometheus/client_golang/prometheus/promhttp" + + "github.com/ory/herodot" +) + +const ( + MetricsPrometheusPath = "/metrics/prometheus" +) + +// Handler handles HTTP requests to health and version endpoints. +type Handler struct { + H herodot.Writer + VersionString string +} + +// NewHandler instantiates a handler. +func NewHandler( + h herodot.Writer, + version string, +) *Handler { + return &Handler{ + H: h, + VersionString: version, + } +} + +type router interface { + GET(path string, handle httprouter.Handle) +} + +// SetRoutes registers this handler's routes. +func (h *Handler) SetRoutes(r router) { + r.GET(MetricsPrometheusPath, h.Metrics) +} + +type muxrouter interface { + GET(path string, handle http.HandlerFunc) +} + +// SetMuxRoutes registers this handler's routes on a ServeMux. +func (h *Handler) SetMuxRoutes(mux muxrouter) { + mux.GET(MetricsPrometheusPath, promhttp.Handler().ServeHTTP) +} + +// Metrics outputs prometheus metrics +// +// swagger:route GET /metrics/prometheus metadata prometheus +// +// Get snapshot metrics from the service. If you're using k8s, you can then add annotations to +// your deployment like so: +// +// ``` +// metadata: +// +// annotations: +// prometheus.io/port: "4434" +// prometheus.io/path: "/metrics/prometheus" +// +// ``` +// +// Produces: +// - plain/text +// +// Responses: +// 200: emptyResponse +func (h *Handler) Metrics(rw http.ResponseWriter, r *http.Request, _ httprouter.Params) { + promhttp.Handler().ServeHTTP(rw, r) +} diff --git a/oryx/prometheusx/metrics.go b/oryx/prometheusx/metrics.go new file mode 100644 index 00000000000..ace0dbfdb8d --- /dev/null +++ b/oryx/prometheusx/metrics.go @@ -0,0 +1,152 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package prometheusx + +import ( + "net/http" + "strconv" + + grpcPrometheus "github.com/grpc-ecosystem/go-grpc-prometheus" + + "github.com/ory/x/httpx" + + "github.com/pkg/errors" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promhttp" +) + +// Metrics prototypes +type Metrics struct { + responseTime *prometheus.HistogramVec + totalRequests *prometheus.CounterVec + duration *prometheus.HistogramVec + responseSize *prometheus.HistogramVec + requestSize *prometheus.HistogramVec + handlerStatuses *prometheus.CounterVec +} + +const HTTPMetrics = "http" +const GRPCMetrics = "grpc" + +// NewMetrics creates new custom Prometheus metrics +func NewMetrics(app, metricsPrefix, version, hash, date string) *Metrics { + labels := map[string]string{ + "app": app, + "version": version, + "hash": hash, + "buildTime": date, + } + + if metricsPrefix != "" { + metricsPrefix += "_" + } + + pm := &Metrics{ + responseTime: prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Name: metricsPrefix + "response_time_seconds", + Help: "Description", + ConstLabels: labels, + }, + []string{"endpoint"}, + ), + totalRequests: prometheus.NewCounterVec(prometheus.CounterOpts{ + Name: metricsPrefix + "requests_total", + Help: "number of requests", + ConstLabels: labels, + }, []string{"code", "method", "endpoint"}), + duration: prometheus.NewHistogramVec(prometheus.HistogramOpts{ + Name: metricsPrefix + "requests_duration_seconds", + Help: "duration of a requests in seconds", + ConstLabels: labels, + }, []string{"code", "method", "endpoint"}), + responseSize: prometheus.NewHistogramVec(prometheus.HistogramOpts{ + Name: metricsPrefix + "response_size_bytes", + Help: "size of the responses in bytes", + ConstLabels: labels, + }, []string{"code", "method"}), + requestSize: prometheus.NewHistogramVec(prometheus.HistogramOpts{ + Name: metricsPrefix + "requests_size_bytes", + Help: "size of the requests in bytes", + ConstLabels: labels, + }, []string{"code", "method"}), + handlerStatuses: prometheus.NewCounterVec(prometheus.CounterOpts{ + Name: metricsPrefix + "requests_statuses_total", + Help: "count number of responses per status", + ConstLabels: labels, + }, []string{"method", "status_bucket"}), + } + + err := prometheus.Register(pm) + if e := new(prometheus.AlreadyRegisteredError); errors.As(err, e) { + return pm + } else if err != nil { + panic(err) + } + + grpcPrometheus.EnableHandlingTimeHistogram() + + return pm +} + +// Describe implements prometheus Collector interface. +func (h *Metrics) Describe(in chan<- *prometheus.Desc) { + h.duration.Describe(in) + h.totalRequests.Describe(in) + h.requestSize.Describe(in) + h.responseSize.Describe(in) + h.handlerStatuses.Describe(in) + h.responseTime.Describe(in) +} + +// Collect implements prometheus Collector interface. +func (h *Metrics) Collect(in chan<- prometheus.Metric) { + h.duration.Collect(in) + h.totalRequests.Collect(in) + h.requestSize.Collect(in) + h.responseSize.Collect(in) + h.handlerStatuses.Collect(in) + h.responseTime.Collect(in) +} + +func (h Metrics) instrumentHandlerStatusBucket(next http.Handler) http.HandlerFunc { + return func(rw http.ResponseWriter, r *http.Request) { + next.ServeHTTP(rw, r) + + status, _ := httpx.GetResponseMeta(rw) + + statusBucket := "unknown" + switch { + case status >= 200 && status <= 299: + statusBucket = "2xx" + case status >= 300 && status <= 399: + statusBucket = "3xx" + case status >= 400 && status <= 499: + statusBucket = "4xx" + case status >= 500 && status <= 599: + statusBucket = "5xx" + } + + h.handlerStatuses.With(prometheus.Labels{"method": r.Method, "status_bucket": statusBucket}). + Inc() + } +} + +// Instrument will instrument any http.HandlerFunc with custom metrics +func (h Metrics) Instrument(rw http.ResponseWriter, next http.HandlerFunc, endpoint string) http.HandlerFunc { + labels := prometheus.Labels{} + labelsWithEndpoint := prometheus.Labels{"endpoint": endpoint} + if status, _ := httpx.GetResponseMeta(rw); status != 0 { + labels = prometheus.Labels{"code": strconv.Itoa(status)} + labelsWithEndpoint["code"] = labels["code"] + } + wrapped := promhttp.InstrumentHandlerResponseSize(h.responseSize.MustCurryWith(labels), next) + wrapped = promhttp.InstrumentHandlerCounter(h.totalRequests.MustCurryWith(labelsWithEndpoint), wrapped) + wrapped = promhttp.InstrumentHandlerDuration(h.duration.MustCurryWith(labelsWithEndpoint), wrapped) + wrapped = promhttp.InstrumentHandlerDuration(h.responseTime.MustCurryWith(prometheus.Labels{"endpoint": endpoint}), wrapped) + wrapped = promhttp.InstrumentHandlerRequestSize(h.requestSize.MustCurryWith(labels), wrapped) + wrapped = h.instrumentHandlerStatusBucket(wrapped) + + return wrapped.ServeHTTP +} diff --git a/oryx/prometheusx/middleware.go b/oryx/prometheusx/middleware.go new file mode 100644 index 00000000000..eea99c4cb0b --- /dev/null +++ b/oryx/prometheusx/middleware.go @@ -0,0 +1,108 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package prometheusx + +import ( + "net/http" + "regexp" + "strings" + "sync" + + grpcPrometheus "github.com/grpc-ecosystem/go-grpc-prometheus" + + "github.com/julienschmidt/httprouter" + "golang.org/x/net/context" + "google.golang.org/grpc" +) + +type MetricsManager struct { + prometheusMetrics *Metrics + routers struct { + data []*httprouter.Router + sync.Mutex + } +} + +func NewMetricsManager(app, version, hash, buildTime string) *MetricsManager { + return NewMetricsManagerWithPrefix(app, "", version, hash, buildTime) +} + +// NewMetricsManagerWithPrefix creates MetricsManager that uses metricsPrefix parameters as a prefix +// for all metrics registered within this middleware. Constants HttpMetrics or GrpcMetrics can be used +// respectively. Setting empty string in metricsPrefix will be equivalent to calling NewMetricsManager. +func NewMetricsManagerWithPrefix(app, metricsPrefix, version, hash, buildTime string) *MetricsManager { + return &MetricsManager{ + prometheusMetrics: NewMetrics(app, metricsPrefix, version, hash, buildTime), + } +} + +// Main middleware method to collect metrics for Prometheus. +func (pmm *MetricsManager) ServeHTTP(rw http.ResponseWriter, r *http.Request, next http.HandlerFunc) { + pmm.prometheusMetrics.Instrument(rw, next, pmm.getLabelForPath(r))(rw, r) +} + +func (pmm *MetricsManager) StreamServerInterceptor(srv interface{}, ss grpc.ServerStream, info *grpc.StreamServerInfo, handler grpc.StreamHandler) error { + f := grpcPrometheus.StreamServerInterceptor + return f(srv, ss, info, handler) +} + +func (pmm *MetricsManager) UnaryServerInterceptor(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) { + f := grpcPrometheus.UnaryServerInterceptor + return f(ctx, req, info, handler) +} + +func (pmm *MetricsManager) Register(server *grpc.Server) { + grpcPrometheus.Register(server) +} + +func (pmm *MetricsManager) RegisterRouter(router *httprouter.Router) { + pmm.routers.Lock() + defer pmm.routers.Unlock() + pmm.routers.data = append(pmm.routers.data, router) +} + +var paramPlaceHolderRE = regexp.MustCompile(`\{[a-zA-Z0-9_-]+\}`) + +func (pmm *MetricsManager) getLabelForPath(r *http.Request) string { + // If the request came through a http.ServeMux, it already has a pattern that we + // can use as a label. We just need to replace all path parameters with a generic + // placeholder and remove the trailing slash pattern. + if p := r.Pattern; p != "" { + return paramPlaceHolderRE.ReplaceAllString(strings.TrimSuffix(p, "/{$}"), "{param}") + } + + // looking for a match in one of registered routers + pmm.routers.Lock() + defer pmm.routers.Unlock() + for _, router := range pmm.routers.data { + handler, params, _ := router.Lookup(r.Method, r.URL.Path) + if handler != nil { + return reconstructEndpoint(r.URL.Path, params) + } + } + return "{unmatched}" +} + +// To reduce cardinality of labels, values of matched path parameters must be replaced with {param} +func reconstructEndpoint(path string, params httprouter.Params) string { + // if map is empty, then nothing to change in the path + if len(params) == 0 { + return path + } + + // construct a list of parameter values + paramValues := make(map[string]struct{}, len(params)) + for _, param := range params { + paramValues[param.Value] = struct{}{} + } + + parts := strings.Split(path, "/") + for index, part := range parts { + if _, ok := paramValues[part]; ok { + parts[index] = "{param}" + } + } + + return strings.Join(parts, "/") +} diff --git a/oryx/proxy/proxy.go b/oryx/proxy/proxy.go new file mode 100644 index 00000000000..3b2697a60b4 --- /dev/null +++ b/oryx/proxy/proxy.go @@ -0,0 +1,300 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package proxy + +import ( + "context" + "log" + "net/http" + "net/http/httputil" + + "github.com/pkg/errors" + + "github.com/rs/cors" + "go.opentelemetry.io/otel" +) + +type ( + RespMiddleware func(resp *http.Response, config *HostConfig, body []byte) ([]byte, error) + ReqMiddleware func(req *httputil.ProxyRequest, config *HostConfig, body []byte) ([]byte, error) + HostMapper func(ctx context.Context, r *http.Request) (context.Context, *HostConfig, error) + options struct { + hostMapper HostMapper + onResError func(*http.Response, error) error + onReqError func(*http.Request, error) + respMiddlewares []RespMiddleware + reqMiddlewares []ReqMiddleware + transport http.RoundTripper + errHandler func(http.ResponseWriter, *http.Request, error) + } + HostConfig struct { + // CorsEnabled is a flag to enable or disable CORS + // Default: false + CorsEnabled bool + // CorsOptions allows to configure CORS + // If left empty, no CORS headers will be set even when CorsEnabled is true + CorsOptions *cors.Options + // CookieDomain is the host under which cookies are set. + // If left empty, no cookie domain will be set + CookieDomain string + // UpstreamHost is the next upstream host the proxy will pass the request to. + // e.g. fluffy-bear-afiu23iaysd.oryapis.com + UpstreamHost string + // UpstreamScheme is the protocol used by the upstream service. + UpstreamScheme string + // TargetHost is the final target of the request. Should be the same as UpstreamHost + // if the request is directly passed to the target service. + TargetHost string + // TargetScheme is the final target's scheme + // (i.e. the scheme the target thinks it is running under) + TargetScheme string + // PathPrefix is a prefix that is prepended on the original host, + // but removed before forwarding. + PathPrefix string + // TrustForwardedHosts is a flag that indicates whether the proxy should trust the + // X-Forwarded-* headers or not. + TrustForwardedHeaders bool + // originalHost the original hostname the request is coming from. + // This value will be maintained internally by the proxy. + originalHost string + // originalScheme is the original scheme of the request. + // This value will be maintained internally by the proxy. + originalScheme string + // ForceOriginalSchemeHTTP forces the original scheme to be https if enabled. + ForceOriginalSchemeHTTPS bool + } + Options func(*options) + contextKey string +) + +const ( + hostConfigKey contextKey = "host config" +) + +func (c *HostConfig) setScheme(r *httputil.ProxyRequest) { + if c.ForceOriginalSchemeHTTPS { + c.originalScheme = "https" + } else if forwardedProto := r.In.Header.Get("X-Forwarded-Proto"); forwardedProto != "" { + c.originalScheme = forwardedProto + } else if r.In.TLS == nil { + c.originalScheme = "http" + } else { + c.originalScheme = "https" + } +} + +func (c *HostConfig) setHost(r *httputil.ProxyRequest) { + if forwardedHost := r.In.Header.Get("X-Forwarded-Host"); forwardedHost != "" { + c.originalHost = forwardedHost + } else { + c.originalHost = r.In.Host + } +} + +// rewriter is a custom internal function for altering a http.Request +func rewriter(o *options) func(*httputil.ProxyRequest) { + return func(r *httputil.ProxyRequest) { + ctx := r.Out.Context() + ctx, span := otel.GetTracerProvider().Tracer("").Start(ctx, "x.proxy") + defer span.End() + + ctx, c, err := o.getHostConfig(ctx, r.In) + if err != nil { + o.onReqError(r.Out, err) + return + } + + if c.TrustForwardedHeaders { + headers := []string{ + "X-Forwarded-Host", + "X-Forwarded-Proto", + "X-Forwarded-For", + } + for _, h := range headers { + if v := r.In.Header.Get(h); v != "" { + r.Out.Header.Set(h, v) + } + } + } + + c.setScheme(r) + c.setHost(r) + + headerRequestRewrite(r.Out, c) + + var body []byte + var cb *compressableBody + + if r.Out.ContentLength != 0 { + body, cb, err = readBody(r.Out.Header, r.Out.Body) + if err != nil { + o.onReqError(r.Out, err) + return + } + } + + for _, m := range o.reqMiddlewares { + if body, err = m(r, c, body); err != nil { + o.onReqError(r.Out, err) + return + } + } + + n, err := cb.Write(body) + if err != nil { + o.onReqError(r.Out, err) + return + } + + r.Out.Header.Del("Content-Length") + r.Out.ContentLength = int64(n) + r.Out.Body = cb + } +} + +// modifyResponse is a custom internal function for altering a http.Response +func modifyResponse(o *options) func(*http.Response) error { + return func(r *http.Response) error { + _, c, err := o.getHostConfig(r.Request.Context(), r.Request) + if err != nil { + return err + } + + if err := headerResponseRewrite(r, c); err != nil { + return o.onResError(r, err) + } + + body, cb, err := bodyResponseRewrite(r, c) + if err != nil { + return o.onResError(r, err) + } + + for _, m := range o.respMiddlewares { + if body, err = m(r, c, body); err != nil { + return o.onResError(r, err) + } + } + + n, err := cb.Write(body) + if err != nil { + return o.onResError(r, err) + } + + n, t, err := handleWebsocketResponse(n, cb, r.Body) + if err != nil { + return err + } + + r.Header.Del("Content-Length") + r.ContentLength = int64(n) + r.Body = t + return nil + } +} + +func WithOnError(onReqErr func(*http.Request, error), onResErr func(*http.Response, error) error) Options { + return func(o *options) { + o.onReqError = onReqErr + o.onResError = onResErr + } +} + +func WithReqMiddleware(middlewares ...ReqMiddleware) Options { + return func(o *options) { + o.reqMiddlewares = append(o.reqMiddlewares, middlewares...) + } +} + +func WithRespMiddleware(middlewares ...RespMiddleware) Options { + return func(o *options) { + o.respMiddlewares = append(o.respMiddlewares, middlewares...) + } +} + +func WithTransport(t http.RoundTripper) Options { + return func(o *options) { + o.transport = t + } +} + +func WithErrorHandler(eh func(w http.ResponseWriter, r *http.Request, err error)) Options { + return func(o *options) { + o.errHandler = eh + } +} + +func (o *options) getHostConfig(ctx context.Context, r *http.Request) (context.Context, *HostConfig, error) { + if cached, ok := ctx.Value(hostConfigKey).(*HostConfig); ok && cached != nil { + return ctx, cached, nil + } + ctx, c, err := o.hostMapper(ctx, r) + if err != nil { + return nil, nil, err + } + // cache the host config in the request context + // this will be passed on to the request and response proxy functions + ctx = context.WithValue(ctx, hostConfigKey, c) + return ctx, c, nil +} + +func (o *options) beforeProxyMiddleware(h http.Handler) http.Handler { + return http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) { + // get the hostmapper configurations before the request is proxied + ctx, c, err := o.getHostConfig(request.Context(), request) + if err != nil { + o.onReqError(request, err) + return + } + + // Add our Cors middleware. + // This middleware will only trigger if the host config has cors enabled on that request. + if c.CorsEnabled && c.CorsOptions != nil { + cors.New(*c.CorsOptions).HandlerFunc(writer, request) + } + + h.ServeHTTP(writer, request.WithContext(ctx)) + }) +} + +func defaultErrorHandler(w http.ResponseWriter, r *http.Request, err error) { + switch { + case errors.Is(err, context.Canceled): + w.WriteHeader(499) // http://nginx.org/en/docs/dev/development_guide.html + case isTimeoutError(err): + w.WriteHeader(http.StatusGatewayTimeout) + default: + log.Printf("http: proxy error: %v", err) + w.WriteHeader(http.StatusBadGateway) + } +} + +func isTimeoutError(err error) bool { + var te interface{ Timeout() bool } = nil + return errors.As(err, &te) && te.Timeout() || errors.Is(err, context.DeadlineExceeded) +} + +// New creates a new Proxy +// A Proxy sets up a middleware with custom request and response modification handlers +func New(hostMapper HostMapper, opts ...Options) http.Handler { + o := &options{ + hostMapper: hostMapper, + onReqError: func(*http.Request, error) {}, + onResError: func(_ *http.Response, err error) error { return err }, + transport: http.DefaultTransport, + errHandler: defaultErrorHandler, + } + + for _, op := range opts { + op(o) + } + + rp := &httputil.ReverseProxy{ + Rewrite: rewriter(o), + ModifyResponse: modifyResponse(o), + Transport: o.transport, + ErrorHandler: o.errHandler, + } + + return o.beforeProxyMiddleware(rp) +} diff --git a/oryx/proxy/rewrites.go b/oryx/proxy/rewrites.go new file mode 100644 index 00000000000..ddd6d1d40df --- /dev/null +++ b/oryx/proxy/rewrites.go @@ -0,0 +1,163 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package proxy + +import ( + "bytes" + "compress/gzip" + "io" + "net/http" + "net/url" + "path" + "strings" + + "github.com/pkg/errors" +) + +type compressableBody struct { + buf bytes.Buffer + w io.WriteCloser +} + +// we require a read and write for websocket connections +var _ io.ReadWriteCloser = new(compressableBody) + +func (b *compressableBody) Close() error { + if b != nil { + b.buf.Reset() + if b.w != nil { + return b.w.Close() + } + } + return nil +} + +func (b *compressableBody) Write(d []byte) (int, error) { + if b == nil { + // this happens when the body is empty + return 0, nil + } + + var w io.Writer = &b.buf + if b.w != nil { + w = b.w + defer b.w.Close() + } + return w.Write(d) +} + +func (b *compressableBody) Read(p []byte) (n int, err error) { + if b == nil { + // this happens when the body is empty + return 0, io.EOF + } + return b.buf.Read(p) +} + +func headerRequestRewrite(req *http.Request, c *HostConfig) { + req.URL.Scheme = c.UpstreamScheme + req.URL.Host = c.UpstreamHost + req.URL.Path = strings.TrimPrefix(req.URL.Path, c.PathPrefix) + + if _, ok := req.Header["User-Agent"]; !ok { + // explicitly disable User-Agent so it's not set to default value + req.Header.Set("User-Agent", "") + } +} + +func headerResponseRewrite(resp *http.Response, c *HostConfig) error { + redir, err := resp.Location() + if err != nil { + if !errors.Is(err, http.ErrNoLocation) { + return errors.WithStack(err) + } + } else if redir.Host == c.TargetHost { + redir.Scheme = c.originalScheme + redir.Host = c.originalHost + redir.Path = path.Join(c.PathPrefix, redir.Path) + resp.Header.Set("Location", redir.String()) + } + + ReplaceCookieDomainAndSecure(resp, c.TargetHost, c.CookieDomain, c.originalScheme == "https") + + return nil +} + +// ReplaceCookieDomainAndSecure replaces the domain of all matching Set-Cookie headers in the response. +func ReplaceCookieDomainAndSecure(resp *http.Response, original, replacement string, secure bool) { + original, replacement = stripPort(original), stripPort(replacement) // cookies don't distinguish ports + + cookies := resp.Cookies() + resp.Header.Del("Set-Cookie") + for _, co := range cookies { + co.Domain = replacement + co.Secure = secure + if !secure { + co.SameSite = http.SameSiteLaxMode + } + resp.Header.Add("Set-Cookie", co.String()) + } +} + +func bodyResponseRewrite(resp *http.Response, c *HostConfig) ([]byte, *compressableBody, error) { + if resp.ContentLength == 0 { + return nil, nil, nil + } + + body, cb, err := readBody(resp.Header, resp.Body) + if err != nil { + return nil, nil, err + } + + if c.TargetScheme == "" { + c.TargetScheme = "https" + } + + return bytes.ReplaceAll(body, []byte(c.TargetScheme+"://"+c.TargetHost), []byte(c.originalScheme+"://"+c.originalHost+c.PathPrefix)), cb, nil +} + +func readBody(h http.Header, body io.ReadCloser) ([]byte, *compressableBody, error) { + defer body.Close() + + cb := &compressableBody{} + + switch h.Get("Content-Encoding") { + case "gzip": + var err error + body, err = gzip.NewReader(body) + if err != nil { + return nil, nil, errors.WithStack(err) + } + + cb.w = gzip.NewWriter(&cb.buf) + default: + // do nothing, we can read directly + } + + b, err := io.ReadAll(body) + if err != nil { + return nil, nil, errors.WithStack(err) + } + return b, cb, nil +} + +func handleWebsocketResponse(n int, cb *compressableBody, body io.ReadCloser) (int, io.ReadWriteCloser, error) { + var err error + readWriteCloser, ok := body.(io.ReadWriteCloser) + if ok { + if cb != nil { + n, err = readWriteCloser.Write(cb.buf.Bytes()) + if err != nil { + return 0, nil, errors.WithStack(err) + } + } + return n, readWriteCloser, nil + } + return n, cb, nil +} + +// stripPort removes the optional port from the host. +func stripPort(host string) string { + return (&url.URL{Host: host}).Hostname() +} diff --git a/oryx/proxy/stubs/auth.example.com.json b/oryx/proxy/stubs/auth.example.com.json new file mode 100644 index 00000000000..2dcdc3b110f --- /dev/null +++ b/oryx/proxy/stubs/auth.example.com.json @@ -0,0 +1,9 @@ +{ + "ui": { + "action": "https://auth.example.com" + }, + "callbacks": [ + "https://auth.example.com/path/to/resource", + "https://auth.example.com/path?q=https://localhost:8000" + ] +} diff --git a/oryx/randx/README.md b/oryx/randx/README.md new file mode 100644 index 00000000000..fb8c504c309 --- /dev/null +++ b/oryx/randx/README.md @@ -0,0 +1,10 @@ +`randx.RuneSequence` generates even distributions for the given character set +and length. All results are therefore also evenly distributed. + +## AlphaNum + +[Alphabet and Numeric](../docs/alpha_num.png) + +## AlphaNum + +[Alphabet and Numeric](../docs/num.png) diff --git a/oryx/randx/sequence.go b/oryx/randx/sequence.go new file mode 100644 index 00000000000..d862aaa93b1 --- /dev/null +++ b/oryx/randx/sequence.go @@ -0,0 +1,60 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package randx + +import ( + "crypto/rand" + "math/big" +) + +var rander = rand.Reader // random function + +var ( + // AlphaNum contains runes [abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789]. + AlphaNum = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789") + // Alpha contains runes [abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ]. + Alpha = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ") + // AlphaLowerNum contains runes [abcdefghijklmnopqrstuvwxyz0123456789]. + AlphaLowerNum = []rune("abcdefghijklmnopqrstuvwxyz0123456789") + // AlphaUpperNum contains runes [ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789]. + AlphaUpperNum = []rune("ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789") + // AlphaLower contains runes [abcdefghijklmnopqrstuvwxyz]. + AlphaLower = []rune("abcdefghijklmnopqrstuvwxyz") + // AlphaUpperVowels contains runes [AEIOUY]. + AlphaUpperVowels = []rune("AEIOUY") + // AlphaUpperNoVowels contains runes [BCDFGHJKLMNPQRSTVWXZ]. + AlphaUpperNoVowels = []rune("BCDFGHJKLMNPQRSTVWXZ") + // AlphaUpper contains runes [ABCDEFGHIJKLMNOPQRSTUVWXYZ]. + AlphaUpper = []rune("ABCDEFGHIJKLMNOPQRSTUVWXYZ") + // Numeric contains runes [0123456789]. + Numeric = []rune("0123456789") + // AlphaNumNoAmbiguous is equivalent to AlphaNum but without visually ambiguous characters [0Oo1IlB8S5Z2]. + AlphaNumNoAmbiguous = []rune("abcdefghijkmnpqrstuvwxyzACDEFGHJKLMNPQRTUVWXY34679") +) + +// RuneSequence returns a random sequence using the defined allowed runes. +func RuneSequence(l int, allowedRunes []rune) (seq []rune, err error) { + c := big.NewInt(int64(len(allowedRunes))) + seq = make([]rune, l) + + for i := 0; i < l; i++ { + r, err := rand.Int(rander, c) + if err != nil { + return seq, err + } + rn := allowedRunes[r.Uint64()] + seq[i] = rn + } + + return seq, nil +} + +// MustString returns a random string sequence using the defined runes. Panics on error. +func MustString(l int, allowedRunes []rune) string { + seq, err := RuneSequence(l, allowedRunes) + if err != nil { + panic(err) + } + return string(seq) +} diff --git a/oryx/randx/strength/go.mod b/oryx/randx/strength/go.mod new file mode 100644 index 00000000000..dd84a72bd22 --- /dev/null +++ b/oryx/randx/strength/go.mod @@ -0,0 +1,23 @@ +module github.com/ory/x/randx/strength + +go 1.25 + +replace github.com/ory/x => ../.. + +require ( + github.com/ory/x v0.0.729 + gonum.org/v1/plot v0.16.0 +) + +require ( + codeberg.org/go-fonts/liberation v0.5.0 // indirect + codeberg.org/go-latex/latex v0.1.0 // indirect + codeberg.org/go-pdf/fpdf v0.11.1 // indirect + git.sr.ht/~sbinet/gg v0.6.0 // indirect + github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b // indirect + github.com/campoy/embedmd v1.0.0 // indirect + github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + golang.org/x/image v0.30.0 // indirect + golang.org/x/text v0.31.0 // indirect +) diff --git a/oryx/randx/strength/go.sum b/oryx/randx/strength/go.sum new file mode 100644 index 00000000000..29a7256be69 --- /dev/null +++ b/oryx/randx/strength/go.sum @@ -0,0 +1,66 @@ +codeberg.org/go-fonts/dejavu v0.4.0 h1:2yn58Vkh4CFK3ipacWUAIE3XVBGNa0y1bc95Bmfx91I= +codeberg.org/go-fonts/dejavu v0.4.0/go.mod h1:abni088lmhQJvso2Lsb7azCKzwkfcnttl6tL1UTWKzg= +codeberg.org/go-fonts/latin-modern v0.4.0 h1:vkRCc1y3whKA7iL9Ep0fSGVuJfqjix0ica9UflHORO8= +codeberg.org/go-fonts/latin-modern v0.4.0/go.mod h1:BF68mZznJ9QHn+hic9ks2DaFl4sR5YhfM6xTYaP9vNw= +codeberg.org/go-fonts/liberation v0.5.0 h1:SsKoMO1v1OZmzkG2DY+7ZkCL9U+rrWI09niOLfQ5Bo0= +codeberg.org/go-fonts/liberation v0.5.0/go.mod h1:zS/2e1354/mJ4pGzIIaEtm/59VFCFnYC7YV6YdGl5GU= +codeberg.org/go-latex/latex v0.1.0 h1:hoGO86rIbWVyjtlDLzCqZPjNykpWQ9YuTZqAzPcfL3c= +codeberg.org/go-latex/latex v0.1.0/go.mod h1:LA0q/AyWIYrqVd+A9Upkgsb+IqPcmSTKc9Dny04MHMw= +codeberg.org/go-pdf/fpdf v0.11.1 h1:U8+coOTDVLxHIXZgGvkfQEi/q0hYHYvEHFuGNX2GzGs= +codeberg.org/go-pdf/fpdf v0.11.1/go.mod h1:Y0DGRAdZ0OmnZPvjbMp/1bYxmIPxm0ws4tfoPOc4LjU= +git.sr.ht/~sbinet/cmpimg v0.1.0 h1:E0zPRk2muWuCqSKSVZIWsgtU9pjsw3eKHi8VmQeScxo= +git.sr.ht/~sbinet/cmpimg v0.1.0/go.mod h1:FU12psLbF4TfNXkKH2ZZQ29crIqoiqTZmeQ7dkp/pxE= +git.sr.ht/~sbinet/gg v0.6.0 h1:RIzgkizAk+9r7uPzf/VfbJHBMKUr0F5hRFxTUGMnt38= +git.sr.ht/~sbinet/gg v0.6.0/go.mod h1:uucygbfC9wVPQIfrmwM2et0imr8L7KQWywX0xpFMm94= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/ajstarks/deck v0.0.0-20200831202436-30c9fc6549a9/go.mod h1:JynElWSGnm/4RlzPXRlREEwqTHAN3T56Bv2ITsFT3gY= +github.com/ajstarks/deck/generate v0.0.0-20210309230005-c3f852c02e19/go.mod h1:T13YZdzov6OU0A1+RfKZiZN9ca6VeKdBdyDV+BY97Tk= +github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b h1:slYM766cy2nI3BwyRiyQj/Ud48djTMtMebDqepE95rw= +github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b/go.mod h1:1KcenG0jGWcpt8ov532z81sp/kMMUG485J2InIOyADM= +github.com/campoy/embedmd v1.0.0 h1:V4kI2qTJJLf4J29RzI/MAt2c3Bl4dQSYPuflzwFH2hY= +github.com/campoy/embedmd v1.0.0/go.mod h1:oxyr9RCiSXg0M3VJ3ks0UGfp98BpSSGr0kpiX3MzVl8= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g= +github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/image v0.30.0 h1:jD5RhkmVAnjqaCUXfbGBrn3lpxbknfN9w2UhHHU+5B4= +golang.org/x/image v0.30.0/go.mod h1:SAEUTxCCMWSrJcCy/4HwavEsfZZJlYxeHLc6tTiAe/c= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= +gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= +gonum.org/v1/plot v0.16.0 h1:dK28Qx/Ky4VmPUN/2zeW0ELyM6ucDnBAj5yun7M9n1g= +gonum.org/v1/plot v0.16.0/go.mod h1:Xz6U1yDMi6Ni6aaXILqmVIb6Vro8E+K7Q/GeeH+Pn0c= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.1.3/go.mod h1:NgwopIslSNH47DimFoV78dnkksY2EFtX0ajyb3K/las= +rsc.io/pdf v0.1.1 h1:k1MczvYDUvJBe93bYd7wrZLLUEcLZAuF824/I4e5Xr4= +rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= diff --git a/oryx/randx/strength/main.go b/oryx/randx/strength/main.go new file mode 100644 index 00000000000..26c877c4ea0 --- /dev/null +++ b/oryx/randx/strength/main.go @@ -0,0 +1,101 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package main + +import ( + "fmt" + "sort" + + "gonum.org/v1/plot" + "gonum.org/v1/plot/plotter" + "gonum.org/v1/plot/plotutil" + "gonum.org/v1/plot/vg" + + "github.com/ory/x/randx" +) + +const iterations = 1000 * 100 + +type generate func(int, []rune) ([]rune, error) + +func main() { + draw(measureDistribution(iterations, randx.AlphaNum, randx.RuneSequence), "AlphaNum Distribution", "docs/alpha_num.png") + draw(measureDistribution(iterations, randx.Numeric, randx.RuneSequence), "Num Distribution", "docs/num.png") + draw(measureResultDistribution(100, 6, randx.Numeric, randx.RuneSequence), "Num Distribution", "docs/result_num.png") +} + +func measureResultDistribution(iterations int, length int, characters []rune, fn generate) map[string]int { + dist := make(map[string]int) + for index := 1; index <= iterations; index++ { + // status output to cli + if index%1000 == 0 { + fmt.Printf("\r%d / %d", index, iterations) + } + raw, err := fn(length, characters) + if err != nil { + panic(err) + } + dist[string(raw)] = dist[string(raw)] + 1 + } + return dist +} + +func measureDistribution(iterations int, characters []rune, fn generate) map[string]int { + dist := make(map[string]int) + for index := 1; index <= iterations; index++ { + // status output to cli + if index%1000 == 0 { + fmt.Printf("\r%d / %d", index, iterations) + } + raw, err := fn(100, characters) + if err != nil { + panic(err) + } + for _, s := range raw { + c := string(s) + i := dist[c] + dist[c] = i + 1 + } + } + return dist +} + +func draw(distribution map[string]int, title, filename string) { + keys, values := orderMap(distribution) + group := plotter.Values{} + for _, v := range values { + group = append(group, float64(v)) + } + + p := plot.New() + p.Title.Text = title + p.Y.Label.Text = "N" + + bars, err := plotter.NewBarChart(group, vg.Points(4)) + if err != nil { + panic(err) + } + bars.LineStyle.Width = vg.Length(0) + bars.Color = plotutil.Color(0) + + p.Add(bars) + p.NominalX(keys...) + + if err := p.Save(300*vg.Millimeter, 150*vg.Millimeter, filename); err != nil { + panic(err) + } +} + +func orderMap(m map[string]int) (keys []string, values []int) { + keys = []string{} + values = []int{} + for k := range m { + keys = append(keys, k) + } + sort.Strings(keys) + for _, key := range keys { + values = append(values, m[key]) + } + return keys, values +} diff --git a/oryx/reqlog/LICENSE b/oryx/reqlog/LICENSE new file mode 100644 index 00000000000..638544b3e63 --- /dev/null +++ b/oryx/reqlog/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017 Dan Buch and contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/oryx/reqlog/external_latency.go b/oryx/reqlog/external_latency.go new file mode 100644 index 00000000000..53edda9b8b4 --- /dev/null +++ b/oryx/reqlog/external_latency.go @@ -0,0 +1,34 @@ +// Copyright © 2024 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package reqlog + +import ( + "context" + "sync/atomic" + "time" +) + +func withEnableExternalLatencyMeasurement(ctx context.Context) context.Context { + return context.WithValue(ctx, externalLatencyKey, new(int64)) +} + +func AccumulateExternalLatency(ctx context.Context, dur time.Duration) { + total, ok := ctx.Value(externalLatencyKey).(*int64) + if !ok { + return + } + atomic.AddInt64(total, int64(dur)) +} + +func getExternalLatency(ctx context.Context) time.Duration { + total, ok := ctx.Value(externalLatencyKey).(*int64) + if !ok { + return 0 + } + return time.Duration(atomic.LoadInt64(total)) +} + +type contextKey int + +const externalLatencyKey contextKey = 1 diff --git a/oryx/reqlog/middleware.go b/oryx/reqlog/middleware.go new file mode 100644 index 00000000000..59dc2817b6a --- /dev/null +++ b/oryx/reqlog/middleware.go @@ -0,0 +1,191 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package reqlog + +import ( + "net/http" + "sync" + "time" + + "github.com/sirupsen/logrus" + "github.com/urfave/negroni" + + "github.com/ory/x/logrusx" +) + +type timer interface { + Now() time.Time + Since(time.Time) time.Duration +} + +type realClock struct{} + +func (rc *realClock) Now() time.Time { + return time.Now() +} + +func (rc *realClock) Since(t time.Time) time.Duration { + return time.Since(t) +} + +// Middleware is a middleware handler that logs the request as it goes in and the response as it goes out. +type Middleware struct { + // Logger is the log.Logger instance used to log messages with the Logger middleware + Logger *logrusx.Logger + // Name is the name of the application as recorded in latency metrics + Name string + Before func(*logrusx.Logger, *http.Request, string) *logrusx.Logger + After func(*logrusx.Logger, *http.Request, negroni.ResponseWriter, time.Duration, string) *logrusx.Logger + + logStarting bool + + clock timer + + logLevel logrus.Level + + // Silence log for specific URL paths + silencePaths map[string]bool + + sync.RWMutex +} + +// NewMiddleware returns a reqlog middleware with default settings +func NewMiddleware() *Middleware { + return NewCustomMiddleware(logrus.InfoLevel, &logrus.TextFormatter{}, "web") +} + +// NewCustomMiddleware returns a reqlog middleware with the given level and formatter +func NewCustomMiddleware(level logrus.Level, formatter logrus.Formatter, name string) *Middleware { + log := logrusx.New(name, "", logrusx.ForceFormatter(formatter), logrusx.ForceLevel(level)) + return &Middleware{ + Logger: log, + Name: name, + Before: DefaultBefore, + After: DefaultAfter, + + logLevel: logrus.InfoLevel, + logStarting: true, + clock: &realClock{}, + silencePaths: map[string]bool{}, + } +} + +// NewMiddlewareFromLogger returns a reqlog middleware which writes to a given logrus logger. +func NewMiddlewareFromLogger(logger *logrusx.Logger, name string) *Middleware { + return &Middleware{ + Logger: logger, + Name: name, + Before: DefaultBefore, + After: DefaultAfter, + + logLevel: logrus.InfoLevel, + logStarting: true, + clock: &realClock{}, + silencePaths: map[string]bool{}, + } +} + +// SetLogStarting accepts a bool to control the logging of "started handling +// request" prior to passing to the next middleware +func (m *Middleware) SetLogStarting(v bool) { + m.logStarting = v +} + +// ExcludePaths adds new URL paths to be ignored during logging. The URL u is parsed, hence the returned error +func (m *Middleware) ExcludePaths(paths ...string) *Middleware { + for _, path := range paths { + m.Lock() + m.silencePaths[path] = true + m.Unlock() + } + return m +} + +func (m *Middleware) Wrap(handler http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + m.ServeHTTP(rw, r, handler.ServeHTTP) + }) +} + +func (m *Middleware) WrapFunc(handler http.HandlerFunc) http.HandlerFunc { + return func(rw http.ResponseWriter, r *http.Request) { + m.ServeHTTP(rw, r, handler) + } +} + +func (m *Middleware) ServeHTTP(rw http.ResponseWriter, r *http.Request, next http.HandlerFunc) { + if m.Before == nil { + m.Before = DefaultBefore + } + + if m.After == nil { + m.After = DefaultAfter + } + + logLevel := m.logLevel + m.RLock() + if _, ok := m.silencePaths[r.URL.Path]; ok { + logLevel = logrus.TraceLevel + } + m.RUnlock() + + start := m.clock.Now() + + // Try to get the real IP + remoteAddr := r.RemoteAddr + if realIP := r.Header.Get("X-Real-IP"); realIP != "" { + remoteAddr = realIP + } + + entry := m.Logger.NewEntry() + + entry = m.Before(entry, r, remoteAddr) + + if m.logStarting { + entry.Log(logLevel, "started handling request") + } + + nrw, ok := rw.(negroni.ResponseWriter) + if !ok { + nrw = negroni.NewResponseWriter(rw) + } + + r = r.WithContext(withEnableExternalLatencyMeasurement(r.Context())) + next(nrw, r) + + latency := m.clock.Since(start) + + m.After(entry, r, nrw, latency, m.Name).Log(logLevel, "completed handling request") +} + +// BeforeFunc is the func type used to modify or replace the *logrusx.Logger prior +// to calling the next func in the middleware chain +type BeforeFunc func(*logrusx.Logger, *http.Request, string) *logrusx.Logger + +// AfterFunc is the func type used to modify or replace the *logrusx.Logger after +// calling the next func in the middleware chain +type AfterFunc func(*logrusx.Logger, negroni.ResponseWriter, time.Duration, string) *logrusx.Logger + +// DefaultBefore is the default func assigned to *Middleware.Before +func DefaultBefore(entry *logrusx.Logger, req *http.Request, remoteAddr string) *logrusx.Logger { + return entry.WithRequest(req) +} + +// DefaultAfter is the default func assigned to *Middleware.After +func DefaultAfter(entry *logrusx.Logger, req *http.Request, res negroni.ResponseWriter, latency time.Duration, name string) *logrusx.Logger { + e := entry.WithRequest(req).WithField("http_response", map[string]any{ + "status": res.Status(), + "size": res.Size(), + "text_status": http.StatusText(res.Status()), + "took": latency, + "headers": entry.HTTPHeadersRedacted(res.Header()), + }) + if el := getExternalLatency(req.Context()); el > 0 { + e = e.WithFields(map[string]any{ + "took_internal": latency - el, + "took_external": el, + }) + } + return e +} diff --git a/oryx/requirex/assertx.go b/oryx/requirex/assertx.go new file mode 100644 index 00000000000..6dfb4dd3896 --- /dev/null +++ b/oryx/requirex/assertx.go @@ -0,0 +1,19 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package requirex + +import ( + "bytes" + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" +) + +func EqualAsJSON(t *testing.T, expected, actual interface{}, args ...interface{}) { + var eb, ab bytes.Buffer + require.NoError(t, json.NewEncoder(&eb).Encode(expected)) + require.NoError(t, json.NewEncoder(&ab).Encode(actual)) + require.JSONEq(t, eb.String(), ab.String(), args...) +} diff --git a/oryx/requirex/time.go b/oryx/requirex/time.go new file mode 100644 index 00000000000..a9b079e3efe --- /dev/null +++ b/oryx/requirex/time.go @@ -0,0 +1,23 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package requirex + +import ( + "time" + + "github.com/stretchr/testify/require" +) + +// EqualDuration fails if expected and actual are more distant than precision +// Note: The previous implementation incorrectly passed on durations bigger than time.maxDuration (i.e. with zero-time involved) and incorrectly failed on zero durations. +func EqualDuration(t require.TestingT, expected, actual, precision time.Duration) { + require.Truef(t, expected <= actual+precision && expected >= actual-precision, "expected %s to be within %s of %s", actual, precision, expected) +} + +// EqualTime fails if expected and actual are more distant than precision +// Deprecated: use require.WithinDuration instead +// Note: The previous implementation incorrectly passed on durations bigger than time.maxDuration (i.e. with zero-time involved) and incorrectly failed on zero durations. +func EqualTime(t require.TestingT, expected, actual time.Time, precision time.Duration) { + require.WithinDuration(t, expected, actual, precision) +} diff --git a/oryx/resilience/retry.go b/oryx/resilience/retry.go new file mode 100644 index 00000000000..8ca6511e2e2 --- /dev/null +++ b/oryx/resilience/retry.go @@ -0,0 +1,39 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Package resilience provides helpers for dealing with resilience. +package resilience + +import ( + "time" + + "github.com/pkg/errors" + + "github.com/ory/x/logrusx" +) + +// Retry executes a f until no error is returned or failAfter is reached. +func Retry(logger *logrusx.Logger, maxWait time.Duration, failAfter time.Duration, f func() error) (err error) { + var lastStart time.Time + err = errors.New("did not connect") + loopWait := time.Millisecond * 100 + retryStart := time.Now().UTC() + for retryStart.Add(failAfter).After(time.Now().UTC()) { + lastStart = time.Now().UTC() + if err = f(); err == nil { + return nil + } + + if lastStart.Add(maxWait * 2).Before(time.Now().UTC()) { + retryStart = time.Now().UTC() + } + + logger.WithError(err).Infof("Retrying in %f seconds...", loopWait.Seconds()) + time.Sleep(loopWait) + loopWait = loopWait * time.Duration(int64(2)) + if loopWait > maxWait { + loopWait = maxWait + } + } + return err +} diff --git a/oryx/safecast/safecast.go b/oryx/safecast/safecast.go new file mode 100644 index 00000000000..947b517ffab --- /dev/null +++ b/oryx/safecast/safecast.go @@ -0,0 +1,14 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package safecast + +import "math" + +// Clamp if needed. +func Uint64ToInt64(in uint64) int64 { + if in > math.MaxInt64 { + return math.MaxInt64 + } + return int64(in) +} diff --git a/oryx/serverx/404.go b/oryx/serverx/404.go new file mode 100644 index 00000000000..5e5a68be850 --- /dev/null +++ b/oryx/serverx/404.go @@ -0,0 +1,43 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package serverx + +import ( + _ "embed" + "net/http" + + "github.com/ory/herodot/httputil" +) + +//go:embed 404.html +var page404HTML string + +//go:embed 404.json +var page404JSON string + +// DefaultNotFoundHandler is a default handler for handling 404 errors. +var DefaultNotFoundHandler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + var contentType, body string + switch httputil.NegotiateContentType(r, []string{ + "text/html", + "text/plain", + "application/json", + }, "text/html") { + case "text/plain": + contentType = "text/plain" + body = "Error 404 - The requested route does not exist. Make sure you are using the right path, domain, and port." + case "application/json": + contentType = "application/json" + body = page404JSON + default: + fallthrough + case "text/html": + contentType = "text/html" + body = page404HTML + } + + w.Header().Set("Content-Type", contentType+"; charset=utf-8") + w.WriteHeader(http.StatusNotFound) + _, _ = w.Write([]byte(body)) +}) diff --git a/oryx/serverx/404.html b/oryx/serverx/404.html new file mode 100644 index 00000000000..8742c2fb4ac --- /dev/null +++ b/oryx/serverx/404.html @@ -0,0 +1,56 @@ + + + + + 404 - Route not found + + + +
+
+

Error 404

+

+ The requested route does not exist. Make sure you are using the right + path, domain, and port. +

+
+
+ + diff --git a/oryx/serverx/404.json b/oryx/serverx/404.json new file mode 100644 index 00000000000..5f46c1c0687 --- /dev/null +++ b/oryx/serverx/404.json @@ -0,0 +1,7 @@ +{ + "error": { + "code": 404, + "message": "Not Found", + "reason": "The requested route does not exist. Make sure you are using the right path, domain, and port." + } +} diff --git a/oryx/serverx/redir.go b/oryx/serverx/redir.go new file mode 100644 index 00000000000..845a77dd525 --- /dev/null +++ b/oryx/serverx/redir.go @@ -0,0 +1,17 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package serverx + +import ( + "net/http" + + "github.com/julienschmidt/httprouter" +) + +// PermanentRedirect permanently redirects (302) a path to another one. +func PermanentRedirect(to string) func(rw http.ResponseWriter, r *http.Request, _ httprouter.Params) { + return func(rw http.ResponseWriter, r *http.Request, _ httprouter.Params) { + http.Redirect(rw, r, to, http.StatusPermanentRedirect) + } +} diff --git a/oryx/servicelocatorx/options.go b/oryx/servicelocatorx/options.go new file mode 100644 index 00000000000..9609bbcc835 --- /dev/null +++ b/oryx/servicelocatorx/options.go @@ -0,0 +1,83 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package servicelocatorx + +import ( + "github.com/urfave/negroni" + "google.golang.org/grpc" + + "github.com/ory/x/contextx" + "github.com/ory/x/logrusx" +) + +type ( + Options struct { + logger *logrusx.Logger + contextualizer contextx.Contextualizer + httpMiddlewares []negroni.Handler + grpcUnaryInterceptors []grpc.UnaryServerInterceptor + grpcStreamInterceptors []grpc.StreamServerInterceptor + } + Option func(o *Options) +) + +func WithLogger(l *logrusx.Logger) Option { + return func(o *Options) { + o.logger = l + } +} + +func WithContextualizer(ctxer contextx.Contextualizer) Option { + return func(o *Options) { + o.contextualizer = ctxer + } +} + +func WithHTTPMiddlewares(m ...negroni.Handler) Option { + return func(o *Options) { + o.httpMiddlewares = m + } +} + +func WithGRPCUnaryInterceptors(i ...grpc.UnaryServerInterceptor) Option { + return func(o *Options) { + o.grpcUnaryInterceptors = i + } +} + +func WithGRPCStreamInterceptors(i ...grpc.StreamServerInterceptor) Option { + return func(o *Options) { + o.grpcStreamInterceptors = i + } +} + +func (o *Options) Logger() *logrusx.Logger { + return o.logger +} + +func (o *Options) Contextualizer() contextx.Contextualizer { + return o.contextualizer +} + +func (o *Options) HTTPMiddlewares() []negroni.Handler { + return o.httpMiddlewares +} + +func (o *Options) GRPCUnaryInterceptors() []grpc.UnaryServerInterceptor { + return o.grpcUnaryInterceptors +} + +func (o *Options) GRPCStreamInterceptors() []grpc.StreamServerInterceptor { + return o.grpcStreamInterceptors +} + +func NewOptions(options ...Option) *Options { + o := &Options{ + contextualizer: &contextx.Default{}, + } + for _, opt := range options { + opt(o) + } + return o +} diff --git a/oryx/sjsonx/set.go b/oryx/sjsonx/set.go new file mode 100644 index 00000000000..b6f37fad5e5 --- /dev/null +++ b/oryx/sjsonx/set.go @@ -0,0 +1,36 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package sjsonx + +import ( + "github.com/pkg/errors" + "github.com/tidwall/sjson" +) + +// SetBytes sets multiple key value pairs in the json object using sjson.SetBytes. +func SetBytes(in []byte, vs map[string]interface{}) (out []byte, err error) { + out = make([]byte, len(in)) + copy(out, in) + for k, v := range vs { + out, err = sjson.SetBytes(out, k, v) + if err != nil { + return nil, errors.WithStack(err) + } + } + + return out, nil +} + +// Set sets multiple key value pairs in the json object using sjson.Set. +func Set(in string, vs map[string]interface{}) (out string, err error) { + out = in + for k, v := range vs { + out, err = sjson.Set(out, k, v) + if err != nil { + return "", errors.WithStack(err) + } + } + + return out, nil +} diff --git a/oryx/snapshotx/.snapshots/TestDeleteMatches-file=1.json-fn.json b/oryx/snapshotx/.snapshots/TestDeleteMatches-file=1.json-fn.json new file mode 100644 index 00000000000..4bc224c3400 --- /dev/null +++ b/oryx/snapshotx/.snapshots/TestDeleteMatches-file=1.json-fn.json @@ -0,0 +1,27 @@ +{ + "foo": { + "other": "fdsa" + }, + "nested": { + "nested": { + "arr": [ + { + }, + { + } + ] + } + }, + "arr": [ + { + }, + { + "arr": [ + { + }, + { + } + ] + } + ] +} diff --git a/oryx/snapshotx/.snapshots/TestDeleteMatches-file=2.json-fn.json b/oryx/snapshotx/.snapshots/TestDeleteMatches-file=2.json-fn.json new file mode 100644 index 00000000000..9926439f751 --- /dev/null +++ b/oryx/snapshotx/.snapshots/TestDeleteMatches-file=2.json-fn.json @@ -0,0 +1,34 @@ +{ + "created_at": "1234", + "updated_at": "1234", + "nested": { + "created_at": 1234, + "nested": { + "created_at": 1234, + "arr": [ + { + "created_at": 1234 + }, + { + "updated_at": 1234 + } + ] + } + }, + "arr": [ + { + "created_at": 1234 + }, + { + "updated_at": 1234, + "arr": [ + { + "created_at": 1234 + }, + { + "updated_at": 1234 + } + ] + } + ] +} diff --git a/oryx/snapshotx/.snapshots/TestDeleteMatches-file=3.json-fn.json b/oryx/snapshotx/.snapshots/TestDeleteMatches-file=3.json-fn.json new file mode 100644 index 00000000000..5e9b1c9808f --- /dev/null +++ b/oryx/snapshotx/.snapshots/TestDeleteMatches-file=3.json-fn.json @@ -0,0 +1,28 @@ +{ + "updated_at": "1234", + "nested": { + "nested": { + "arr": [ + { + }, + { + "updated_at": 1234 + } + ] + } + }, + "arr": [ + { + }, + { + "updated_at": 1234, + "arr": [ + { + }, + { + "updated_at": 1234 + } + ] + } + ] +} diff --git a/oryx/snapshotx/fixtures/1.json b/oryx/snapshotx/fixtures/1.json new file mode 100644 index 00000000000..a0d0535ef56 --- /dev/null +++ b/oryx/snapshotx/fixtures/1.json @@ -0,0 +1,47 @@ +{ + "ignore_nested": [ + "updated_at", + "created_at" + ], + "ignore_exact": [ + "foo.id" + ], + "content": { + "foo": { + "id": "asdf", + "other": "fdsa" + }, + "created_at": "1234", + "updated_at": "1234", + "nested":{ + "created_at": 1234, + "nested": { + "created_at": 1234, + "arr": [ + { + "created_at": 1234 + }, + { + "updated_at": 1234 + } + ] + } + }, + "arr": [ + { + "created_at": 1234 + }, + { + "updated_at": 1234, + "arr": [ + { + "created_at": 1234 + }, + { + "updated_at": 1234 + } + ] + } + ] + } +} \ No newline at end of file diff --git a/oryx/snapshotx/fixtures/2.json b/oryx/snapshotx/fixtures/2.json new file mode 100644 index 00000000000..dbe84c070e2 --- /dev/null +++ b/oryx/snapshotx/fixtures/2.json @@ -0,0 +1,38 @@ +{ + "ignore_nested": [ + ], + "content": { + "created_at": "1234", + "updated_at": "1234", + "nested":{ + "created_at": 1234, + "nested": { + "created_at": 1234, + "arr": [ + { + "created_at": 1234 + }, + { + "updated_at": 1234 + } + ] + } + }, + "arr": [ + { + "created_at": 1234 + }, + { + "updated_at": 1234, + "arr": [ + { + "created_at": 1234 + }, + { + "updated_at": 1234 + } + ] + } + ] + } +} \ No newline at end of file diff --git a/oryx/snapshotx/fixtures/3.json b/oryx/snapshotx/fixtures/3.json new file mode 100644 index 00000000000..bc58d3f9e81 --- /dev/null +++ b/oryx/snapshotx/fixtures/3.json @@ -0,0 +1,39 @@ +{ + "ignore_nested": [ + "created_at" + ], + "content": { + "created_at": "1234", + "updated_at": "1234", + "nested": { + "created_at": 1234, + "nested": { + "created_at": 1234, + "arr": [ + { + "created_at": 1234 + }, + { + "updated_at": 1234 + } + ] + } + }, + "arr": [ + { + "created_at": 1234 + }, + { + "updated_at": 1234, + "arr": [ + { + "created_at": 1234 + }, + { + "updated_at": 1234 + } + ] + } + ] + } +} \ No newline at end of file diff --git a/oryx/snapshotx/snapshot.go b/oryx/snapshotx/snapshot.go new file mode 100644 index 00000000000..e8b0b6d21f8 --- /dev/null +++ b/oryx/snapshotx/snapshot.go @@ -0,0 +1,144 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package snapshotx + +import ( + "bytes" + "encoding/json" + "fmt" + "slices" + "strings" + "testing" + + "github.com/bradleyjkemp/cupaloy/v2" + "github.com/stretchr/testify/require" + "github.com/tidwall/gjson" + "github.com/tidwall/sjson" +) + +type ( + Opt = func(*options) + options struct { + modifiers []func(t *testing.T, raw []byte) []byte + name string + } +) + +func ExceptPaths(keys ...string) Opt { + return func(o *options) { + o.modifiers = append(o.modifiers, func(t *testing.T, raw []byte) []byte { + for _, key := range keys { + var err error + raw, err = sjson.DeleteBytes(raw, key) + require.NoError(t, err) + } + return raw + }) + } +} + +func ExceptNestedKeys(nestedKeys ...string) Opt { + return func(o *options) { + o.modifiers = append(o.modifiers, func(t *testing.T, raw []byte) []byte { + parsed := gjson.ParseBytes(raw) + require.True(t, parsed.IsObject() || parsed.IsArray()) + return deleteMatches(t, "", parsed, nestedKeys, []string{}, raw) + }) + } +} + +func WithReplacement(str, replace string) Opt { + return func(o *options) { + o.modifiers = append(o.modifiers, func(t *testing.T, raw []byte) []byte { + return bytes.ReplaceAll(raw, []byte(str), []byte(replace)) + }) + } +} + +func WithName(name string) Opt { + return func(o *options) { + o.name = name + } +} + +func newOptions(opts ...Opt) *options { + o := &options{} + for _, opt := range opts { + opt(o) + } + return o +} + +func (o *options) applyModifiers(t *testing.T, compare []byte) []byte { + for _, modifier := range o.modifiers { + compare = modifier(t, compare) + } + return compare +} + +var snapshot = cupaloy.New(cupaloy.SnapshotFileExtension(".json")) + +func SnapshotTJSON[C ~string | ~[]byte](t *testing.T, compare C, opts ...Opt) { + SnapshotT(t, json.RawMessage(compare), opts...) +} + +func SnapshotT(t *testing.T, actual any, opts ...Opt) { + t.Helper() + compare, err := json.MarshalIndent(actual, "", " ") + require.NoErrorf(t, err, "%+v", actual) + + o := newOptions(opts...) + compare = o.applyModifiers(t, compare) + + if o.name == "" { + snapshot.SnapshotT(t, compare) + } else { + name := strings.ReplaceAll(t.Name()+"_"+o.name, "/", "-") + require.NoError(t, snapshot.SnapshotWithName(name, compare)) + } +} + +// SnapshotTExcept is deprecated in favor of SnapshotT with Opt. +// +// DEPRECATED: please use SnapshotT instead +func SnapshotTExcept(t *testing.T, actual interface{}, except []string) { + t.Helper() + compare, err := json.MarshalIndent(actual, "", " ") + require.NoError(t, err, "%+v", actual) + for _, e := range except { + compare, err = sjson.DeleteBytes(compare, e) + require.NoError(t, err, "%s", e) + } + + snapshot.SnapshotT(t, compare) +} + +func deleteMatches(t *testing.T, key string, result gjson.Result, matches []string, parents []string, content []byte) []byte { + path := parents + if key != "" { + path = append(parents, key) + } + + if result.IsObject() { + result.ForEach(func(key, value gjson.Result) bool { + content = deleteMatches(t, key.String(), value, matches, path, content) + return true + }) + } else if result.IsArray() { + var i int + result.ForEach(func(_, value gjson.Result) bool { + content = deleteMatches(t, fmt.Sprintf("%d", i), value, matches, path, content) + i++ + return true + }) + } + + if slices.Contains(matches, key) { + content, err := sjson.DeleteBytes(content, strings.Join(path, ".")) + require.NoError(t, err) + return content + } + + return content +} diff --git a/oryx/sqlcon/connector.go b/oryx/sqlcon/connector.go new file mode 100644 index 00000000000..a21539cd3ca --- /dev/null +++ b/oryx/sqlcon/connector.go @@ -0,0 +1,23 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Package sqlcon provides helpers for dealing with SQL connectivity. +package sqlcon + +import ( + "runtime" + "strings" +) + +// GetDriverName returns the driver name of a given DSN. +func GetDriverName(dsn string) string { + return strings.Split(dsn, "://")[0] +} +func maxParallelism() int { + maxProcs := runtime.GOMAXPROCS(0) + numCPU := runtime.NumCPU() + if maxProcs < numCPU { + return maxProcs + } + return numCPU +} diff --git a/oryx/sqlcon/dockertest/cockroach.go b/oryx/sqlcon/dockertest/cockroach.go new file mode 100644 index 00000000000..c2d1820fc55 --- /dev/null +++ b/oryx/sqlcon/dockertest/cockroach.go @@ -0,0 +1,22 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package dockertest + +import ( + "testing" + + "github.com/cockroachdb/cockroach-go/v2/testserver" + "github.com/stretchr/testify/require" +) + +func NewLocalTestCRDBServer(t testing.TB) string { + ts, err := testserver.NewTestServer(testserver.CustomVersionOpt("25.3.3")) + require.NoError(t, err) + t.Cleanup(ts.Stop) + + require.NoError(t, ts.WaitForInit()) + + ts.PGURL().Scheme = "cockroach" + return ts.PGURL().String() +} diff --git a/oryx/sqlcon/dockertest/onexit.go b/oryx/sqlcon/dockertest/onexit.go new file mode 100644 index 00000000000..671fa37d04a --- /dev/null +++ b/oryx/sqlcon/dockertest/onexit.go @@ -0,0 +1,57 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package dockertest + +import ( + "os" + "os/signal" + "sync" + "syscall" +) + +const interruptedExitCode = 130 + +// OnExit helps with cleaning up docker test. +type OnExit struct { + sync.Mutex + once sync.Once + handlers []func() +} + +// NewOnExit create a new OnExit instance. +func NewOnExit() *OnExit { + return &OnExit{ + handlers: make([]func(), 0), + } +} + +// Add adds a task that is executed on SIGINT, SIGKILL, SIGTERM. +func (at *OnExit) Add(f func()) { + at.Lock() + defer at.Unlock() + at.handlers = append(at.handlers, f) + at.once.Do(func() { + go func() { + c := make(chan os.Signal, 1) + signal.Notify(c, syscall.SIGINT, syscall.SIGTERM) + <-c + at.Exit(interruptedExitCode) + }() + }) +} + +// Exit wraps os.Exit +func (at *OnExit) Exit(status int) { + at.execute() + os.Exit(status) +} + +func (at *OnExit) execute() { + at.Lock() + defer at.Unlock() + for _, f := range at.handlers { + f() + } + at.handlers = make([]func(), 0) +} diff --git a/oryx/sqlcon/dockertest/test_helper.go b/oryx/sqlcon/dockertest/test_helper.go new file mode 100644 index 00000000000..f1d851644f6 --- /dev/null +++ b/oryx/sqlcon/dockertest/test_helper.go @@ -0,0 +1,311 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package dockertest + +import ( + "cmp" + "fmt" + "io" + "log" + "os" + "regexp" + "strings" + "sync" + "testing" + "time" + + "github.com/docker/docker/api/types/container" + "github.com/docker/docker/api/types/filters" + "github.com/docker/docker/client" + "github.com/stretchr/testify/require" + + "github.com/ory/pop/v6" + + "github.com/ory/dockertest/v3" + dc "github.com/ory/dockertest/v3/docker" + "github.com/ory/x/logrusx" + "github.com/ory/x/resilience" + "github.com/ory/x/stringsx" +) + +type dockerPool interface { + Purge(r *dockertest.Resource) error + Run(repository, tag string, env []string) (*dockertest.Resource, error) + RunWithOptions(opts *dockertest.RunOptions, hcOpts ...func(*dc.HostConfig)) (*dockertest.Resource, error) +} + +var ( + pool dockerPool + resources []*dockertest.Resource + mux sync.Mutex +) + +func init() { + var err error + pool, err = dockertest.NewPool("") + if err != nil { + panic(err) + } +} + +// KillAllTestDatabases deletes all test databases. +func KillAllTestDatabases() { + mux.Lock() + defer mux.Unlock() + for _, r := range resources { + if err := pool.Purge(r); err != nil { + log.Printf("Failed to purge resource: %s", err) + } + } + + resources = nil +} + +// Register sets up OnExit. +func Register() *OnExit { + onexit := NewOnExit() + onexit.Add(func() { + KillAllTestDatabases() + }) + return onexit +} + +func ConnectPop(t require.TestingT, url string) (c *pop.Connection) { + require.NoError(t, resilience.Retry(logrusx.New("", ""), time.Second*5, time.Minute*5, func() error { + var err error + c, err = pop.NewConnection(&pop.ConnectionDetails{ + URL: url, + }) + if err != nil { + log.Printf("could not create pop connection") + return err + } + if err := c.Open(); err != nil { + // an Open error probably means we have a problem with the connections config + log.Printf("could not open pop connection: %+v", err) + return err + } + return c.RawQuery("select version()").Exec() + })) + return +} + +// ## PostgreSQL ## + +func startPostgreSQL(version string) (*dockertest.Resource, error) { + resource, err := pool.Run("postgres", cmp.Or(version, "16"), []string{"PGUSER=postgres", "POSTGRES_PASSWORD=secret", "POSTGRES_DB=postgres"}) + if err == nil { + mux.Lock() + resources = append(resources, resource) + mux.Unlock() + } + return resource, err +} + +// RunTestPostgreSQL runs a PostgreSQL database and returns the URL to it. +// If a docker container is started for the database, the container be removed +// at the end of the test. +func RunTestPostgreSQL(t testing.TB) string { + if dsn := os.Getenv("TEST_DATABASE_POSTGRESQL"); dsn != "" { + t.Logf("Skipping Docker setup because environment variable TEST_DATABASE_POSTGRESQL is set to: %s", dsn) + return dsn + } + + u, cleanup, err := runPosgreSQLCleanup("") + require.NoError(t, err) + t.Cleanup(cleanup) + + return u +} + +func runPosgreSQLCleanup(version string) (string, func(), error) { + resource, err := startPostgreSQL(version) + if err != nil { + return "", func() {}, err + } + + return fmt.Sprintf("postgres://postgres:secret@127.0.0.1:%s/postgres?sslmode=disable", resource.GetPort("5432/tcp")), + func() { _ = pool.Purge(resource) }, nil +} + +// RunTestPostgreSQLWithVersion connects to a PostgreSQL database . +func RunTestPostgreSQLWithVersion(t testing.TB, version string) string { + if dsn := os.Getenv("TEST_DATABASE_POSTGRESQL"); dsn != "" { + return dsn + } + + resource, err := startPostgreSQL(version) + require.NoError(t, err) + return fmt.Sprintf("postgres://postgres:secret@127.0.0.1:%s/postgres?sslmode=disable", resource.GetPort("5432/tcp")) +} + +// ## MySQL ## + +func startMySQL(version string) (*dockertest.Resource, error) { + resource, err := pool.RunWithOptions(&dockertest.RunOptions{ + Repository: "mysql", + Tag: cmp.Or(version, "8.0"), + Env: []string{ + "MYSQL_ROOT_PASSWORD=secret", + "MYSQL_ROOT_HOST=%", + }, + }) + if err != nil { + return nil, err + } + mux.Lock() + resources = append(resources, resource) + mux.Unlock() + return resource, nil +} + +func runMySQLCleanup(version string) (string, func(), error) { + resource, err := startMySQL(version) + if err != nil { + return "", func() {}, err + } + + return fmt.Sprintf("mysql://root:secret@tcp(localhost:%s)/mysql?parseTime=true&multiStatements=true", resource.GetPort("3306/tcp")), + func() { _ = pool.Purge(resource) }, nil +} + +// RunTestMySQL runs a MySQL database and returns the URL to it. +// If a docker container is started for the database, the container be removed +// at the end of the test. +func RunTestMySQL(t testing.TB) string { + if dsn := os.Getenv("TEST_DATABASE_MYSQL"); dsn != "" { + t.Logf("Skipping Docker setup because environment variable TEST_DATABASE_MYSQL is set to: %s", dsn) + return dsn + } + + u, cleanup, err := runMySQLCleanup("") + require.NoError(t, err) + t.Cleanup(cleanup) + + return u +} + +// RunTestMySQLWithVersion runs a MySQL database in the specified version and returns the URL to it. +// If a docker container is started for the database, the container be removed +// at the end of the test. +func RunTestMySQLWithVersion(t testing.TB, version string) string { + if dsn := os.Getenv("TEST_DATABASE_MYSQL"); dsn != "" { + t.Logf("Skipping Docker setup because environment variable TEST_DATABASE_MYSQL is set to: %s", dsn) + return dsn + } + + u, cleanup, err := runMySQLCleanup(version) + require.NoError(t, err) + t.Cleanup(cleanup) + + return u +} + +// ## CockroachDB + +func startCockroachDB(version string) (*dockertest.Resource, error) { + resource, err := pool.RunWithOptions(&dockertest.RunOptions{ + Repository: "cockroachdb/cockroach", + Tag: cmp.Or(version, "latest-v25.4"), + Cmd: []string{"start-single-node", "--insecure"}, + }) + if err == nil { + mux.Lock() + resources = append(resources, resource) + mux.Unlock() + } + return resource, err +} + +func runCockroachDBWithVersionCleanup(version string) (string, func(), error) { + resource, err := startCockroachDB(version) + if err != nil { + return "", func() {}, err + } + + return fmt.Sprintf("cockroach://root@localhost:%s/defaultdb?sslmode=disable", resource.GetPort("26257/tcp")), + func() { _ = pool.Purge(resource) }, + nil +} + +// RunTestCockroachDB runs a CockroachDB database and returns the URL to it. +// If a docker container is started for the database, the container be removed +// at the end of the test. +func RunTestCockroachDB(t testing.TB) string { + return RunTestCockroachDBWithVersion(t, "") +} + +// RunTestCockroachDBWithVersion runs a CockroachDB database and returns the URL to it. +// If a docker container is started for the database, the container be removed +// at the end of the test. +func RunTestCockroachDBWithVersion(t testing.TB, version string) string { + if dsn := os.Getenv("TEST_DATABASE_COCKROACHDB"); dsn != "" { + t.Logf("Skipping Docker setup because environment variable TEST_DATABASE_COCKROACHDB is set to: %s", dsn) + return dsn + } + + u, cleanup, err := runCockroachDBWithVersionCleanup(version) + require.NoError(t, err) + t.Cleanup(cleanup) + + return u +} + +func DumpSchema(t testing.TB, c *pop.Connection) string { + name, database, port := c.Dialect.Name(), c.Dialect.Details().Database, c.Dialect.Details().Port + t.Logf("Dumping schema for dialect %s, database %s on port %s", name, database, port) + + var cmd []string + var appendToDump string + switch dialects := stringsx.SwitchExact(name); { + case dialects.AddCase("sqlite3"): + return dumpSQLiteSchema(t, c) + case dialects.AddCase("postgres"): + cmd = []string{"pg_dump", "--username", "postgres", "--schema-only", "--dbname", database} + // we need to set the search path because the postgres dump always unsets it + appendToDump = "SET search_path TO public;\n" + case dialects.AddCase("mysql"): + cmd = []string{"mysqldump", "--user", "root", "--password=secret", "--no-data", database} + case dialects.AddCase("cockroach"): + cmd = []string{"cockroach", "sql", "--insecure", "--database", database, "--execute", "SHOW CREATE ALL TABLES; SHOW CREATE ALL TYPES;", "--format", "raw"} + default: + t.Log(dialects.ToUnknownCaseErr()) + t.FailNow() + return "" + } + + cli, err := client.NewClientWithOpts(client.FromEnv) + require.NoError(t, err) + containers, err := cli.ContainerList(t.Context(), container.ListOptions{ + Filters: filters.NewArgs(filters.Arg("publish", port)), + }) + require.NoError(t, err) + require.Lenf(t, containers, 1, "expected exactly one %s container with port %s", name, port) + + process, err := cli.ContainerExecCreate(t.Context(), containers[0].ID, container.ExecOptions{ + Tty: true, + AttachStdout: true, + Cmd: cmd, + }) + require.NoError(t, err) + + resp, err := cli.ContainerExecAttach(t.Context(), process.ID, container.ExecAttachOptions{ + Tty: true, + }) + require.NoError(t, err) + dump, err := io.ReadAll(resp.Reader) + require.NoErrorf(t, err, "%s", dump) + + d := string(dump) + appendToDump + d = regexp.MustCompile(`(--|#|\\|mysqldump|SHOW CREATE)[^\n]*\n`).ReplaceAllLiteralString(d, "") // comments and other non-schema lines + d = strings.ReplaceAll(d, "\r\n", "\n") + d = regexp.MustCompile(`\n\n+`).ReplaceAllLiteralString(d, "\n\n") + return d +} + +func dumpSQLiteSchema(t testing.TB, c *pop.Connection) string { + var sqls []string + require.NoError(t, c.RawQuery("SELECT sql FROM sqlite_master WHERE type IN ('table', 'index', 'trigger', 'view') AND name NOT LIKE 'sqlite_%' ORDER BY name").All(&sqls)) + return strings.Join(sqls, ";\n") + ";\n" +} diff --git a/oryx/sqlcon/error.go b/oryx/sqlcon/error.go new file mode 100644 index 00000000000..81f7fca854d --- /dev/null +++ b/oryx/sqlcon/error.go @@ -0,0 +1,96 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package sqlcon + +import ( + "database/sql" + "net/http" + + "google.golang.org/grpc/codes" + + "github.com/go-sql-driver/mysql" + "github.com/jackc/pgconn" + "github.com/lib/pq" + "github.com/pkg/errors" + + "github.com/ory/herodot" +) + +var ( + // ErrUniqueViolation is returned when^a SQL INSERT / UPDATE command returns a conflict. + ErrUniqueViolation = &herodot.DefaultError{ + CodeField: http.StatusConflict, + GRPCCodeField: codes.AlreadyExists, + StatusField: http.StatusText(http.StatusConflict), + ErrorField: "Unable to insert or update resource because a resource with that value exists already", + } + // ErrNoRows is returned when a SQL SELECT statement returns no rows. + ErrNoRows = &herodot.DefaultError{ + CodeField: http.StatusNotFound, + GRPCCodeField: codes.NotFound, + StatusField: http.StatusText(http.StatusNotFound), + ErrorField: "Unable to locate the resource", + } + // ErrConcurrentUpdate is returned when the database is unable to serialize access due to a concurrent update. + ErrConcurrentUpdate = &herodot.DefaultError{ + CodeField: http.StatusBadRequest, + GRPCCodeField: codes.Aborted, + StatusField: http.StatusText(http.StatusBadRequest), + ErrorField: "Unable to serialize access due to a concurrent update in another session", + } + ErrNoSuchTable = &herodot.DefaultError{ + CodeField: http.StatusInternalServerError, + GRPCCodeField: codes.Internal, + StatusField: http.StatusText(http.StatusInternalServerError), + ErrorField: "Unable to locate the table", + } +) + +func handlePostgres(err error, sqlState string) error { + switch sqlState { + case "23505": // "unique_violation" + return errors.WithStack(ErrUniqueViolation.WithWrap(err)) + case "40001", // "serialization_failure" in CRDB + "CR000": // "serialization_failure" + return errors.WithStack(ErrConcurrentUpdate.WithWrap(err)) + case "42P01": // "no such table" + return errors.WithStack(ErrNoSuchTable.WithWrap(err)) + } + return errors.WithStack(err) +} + +type stater interface { + SQLState() string +} + +// HandleError returns the right sqlcon.Err* depending on the input error. +func HandleError(err error) error { + if err == nil { + return nil + } + + var st stater + if errors.Is(err, sql.ErrNoRows) { + return errors.WithStack(ErrNoRows) + } else if errors.As(err, &st) { + return errors.WithStack(handlePostgres(err, st.SQLState())) + } else if e := new(pq.Error); errors.As(err, &e) { + return errors.WithStack(handlePostgres(err, string(e.Code))) + } else if e := new(pgconn.PgError); errors.As(err, &e) { + return errors.WithStack(handlePostgres(err, e.Code)) + } else if e := new(mysql.MySQLError); errors.As(err, &e) { + switch e.Number { + case 1062: + return errors.WithStack(ErrUniqueViolation.WithWrap(err)) + case 1146: + return errors.WithStack(ErrNoSuchTable.WithWrap(e)) + } + } + + if err := handleSqlite(err); err != nil { + return errors.WithStack(err) + } + + return errors.WithStack(err) +} diff --git a/oryx/sqlcon/error_nosqlite.go b/oryx/sqlcon/error_nosqlite.go new file mode 100644 index 00000000000..1df58a72da3 --- /dev/null +++ b/oryx/sqlcon/error_nosqlite.go @@ -0,0 +1,12 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +//go:build !sqlite +// +build !sqlite + +package sqlcon + +// handleSqlite handles the error iff (if and only if) it is an sqlite error +func handleSqlite(_ error) error { + return nil +} diff --git a/oryx/sqlcon/error_sqlite.go b/oryx/sqlcon/error_sqlite.go new file mode 100644 index 00000000000..60c432ee0c2 --- /dev/null +++ b/oryx/sqlcon/error_sqlite.go @@ -0,0 +1,40 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +//go:build sqlite +// +build sqlite + +package sqlcon + +import ( + "strings" + + "github.com/mattn/go-sqlite3" + "github.com/pkg/errors" +) + +// handleSqlite handles the error iff (if and only if) it is an sqlite error +func handleSqlite(err error) error { + if e := new(sqlite3.Error); errors.As(err, e) { + switch e.ExtendedCode { + case sqlite3.ErrConstraintUnique: + fallthrough + case sqlite3.ErrConstraintPrimaryKey: + return errors.WithStack(ErrUniqueViolation.WithWrap(err)) + + } + + switch e.Code { + case sqlite3.ErrError: + if strings.Contains(err.Error(), "no such table") { + return errors.WithStack(ErrNoSuchTable.WithWrap(err)) + } + case sqlite3.ErrLocked: + return errors.WithStack(ErrConcurrentUpdate.WithWrap(err)) + } + + return errors.WithStack(err) + } + + return nil +} diff --git a/oryx/sqlcon/message.go b/oryx/sqlcon/message.go new file mode 100644 index 00000000000..d6b44e8bf73 --- /dev/null +++ b/oryx/sqlcon/message.go @@ -0,0 +1,87 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package sqlcon + +// HelpMessage returns a string explaining how to set up SQL using environment variables. +func HelpMessage() string { + return `- DATABASE_URL: A DSN to a persistent backend. Various backends are supported: + + - Changes are lost on process death (ephemeral storage): + + - Memory: If DATABASE_URL is "memory", data will be written to memory and is lost when you restart this instance. + Example: DATABASE_URL=memory + + - Changes are kept after process death (persistent storage): + + - SQL Databases: Officially, PostgreSQL, MySQL and CockroachDB are supported. This project works best with PostgreSQL. + + - PostgreSQL: If DATABASE_URL is a DSN starting with postgres://, PostgreSQL will be used as storage backend. + Example: DATABASE_URL=postgres://user:password@host:123/database + + Additionally, the following query/DSN parameters are supported: + + * max_conns (number): Sets the maximum number of open connections to the database. Defaults to the number of CPU cores times 2. + * max_idle_conns (number): Sets the maximum number of connections in the idle. Defaults to the number of CPU cores. + * max_conn_lifetime (duratino): Sets the maximum amount of time ("ms", "s", "m", "h") a connection may be reused. + Defaults to 0s (disabled). + * sslmode (string): Whether or not to use SSL (default is require) + * disable - No SSL + * require - Always SSL (skip verification) + * verify-ca - Always SSL (verify that the certificate presented by the + server was signed by a trusted CA) + * verify-full - Always SSL (verify that the certification presented by + the server was signed by a trusted CA and the server host name + matches the one in the certificate) + * fallback_application_name (string): An application_name to fall back to if one isn't provided. + * connect_timeout (number): Maximum wait for connection, in seconds. Zero or + not specified means wait indefinitely. + * sslcert (string): Cert file location. The file must contain PEM encoded data. + * sslkey (string): Key file location. The file must contain PEM encoded data. + * sslrootcert (string): The location of the root certificate file. The file + must contain PEM encoded data. + Example: DATABASE_URL=postgres://user:password@host:123/database?sslmode=verify-full + + - MySQL: If DATABASE_URL is a DSN starting with mysql:// MySQL will be used as storage backend. + Be aware that the ?parseTime=true parameter is mandatory, or timestamps will not work. + Example: DATABASE_URL=mysql://user:password@tcp(host:123)/database?parseTime=true + + Additionally, the following query/DSN parameters are supported: + * collation (string): Sets the collation used for client-server interaction on connection. In contrast to charset, + collation does not issue additional queries. If the specified collation is unavailable on the target server, + the connection will fail. + * loc (string): Sets the location for time.Time values. Note that this sets the location for time.Time values + but does not change MySQL's time_zone setting. For that set the time_zone DSN parameter. Please keep in mind, + that param values must be url.QueryEscape'ed. Alternatively you can manually replace the / with %2F. + For example US/Pacific would be loc=US%2FPacific. + * maxAllowedPacket (number): Max packet size allowed in bytes. The default value is 4 MiB and should be + adjusted to match the server settings. maxAllowedPacket=0 can be used to automatically fetch the max_allowed_packet variable from server on every connection. + * readTimeout (duration): I/O read timeout. The value must be a decimal number with a unit suffix + ("ms", "s", "m", "h"), such as "30s", "0.5m" or "1m30s". + * timeout (duration): Timeout for establishing connections, aka dial timeout. The value must be a decimal number with a unit suffix + ("ms", "s", "m", "h"), such as "30s", "0.5m" or "1m30s". + * tls (bool / string): tls=true enables TLS / SSL encrypted connection to the server. Use skip-verify if + you want to use a self-signed or invalid certificate (server side). + * writeTimeout (duration): I/O write timeout. The value must be a decimal number with a unit suffix + ("ms", "s", "m", "h"), such as "30s", "0.5m" or "1m30s". + Example: DATABASE_URL=mysql://user:password@tcp(host:123)/database?parseTime=true&writeTimeout=123s + + - CockroachDB: If DATABASE_URL is a DSN starting with cockroach://, CockroachDB will be used as storage backend. + Example: DATABASE_URL=cockroach://user:password@host:123/database + + Additionally, the following query/DSN parameters are supported: + * sslmode (string): Whether or not to use SSL (default is require) + * disable - No SSL + * require - Always SSL (skip verification) + * verify-ca - Always SSL (verify that the certificate presented by the + server was signed by a trusted CA) + * verify-full - Always SSL (verify that the certification presented by + the server was signed by a trusted CA and the server host name + matches the one in the certificate) + * application_name (string): An initial value for the application_name session variable. + * sslcert (string): Cert file location. The file must contain PEM encoded data. + * sslkey (string): Key file location. The file must contain PEM encoded data. + * sslrootcert (string): The location of the root certificate file. The file + must contain PEM encoded data. + Example: DATABASE_URL=cockroach://user:password@host:123/database?sslmode=verify-full` +} diff --git a/oryx/sqlcon/parse_opts.go b/oryx/sqlcon/parse_opts.go new file mode 100644 index 00000000000..b8adccd8494 --- /dev/null +++ b/oryx/sqlcon/parse_opts.go @@ -0,0 +1,118 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package sqlcon + +import ( + "fmt" + "net/url" + "strconv" + "strings" + "time" + + "github.com/ory/x/logrusx" +) + +// ParseConnectionOptions parses values for max_conns, max_idle_conns, max_conn_lifetime from DSNs. +// It also returns the URI without those query parameters. +func ParseConnectionOptions(l *logrusx.Logger, dsn string) (maxConns int, maxIdleConns int, maxConnLifetime, maxIdleConnTime time.Duration, cleanedDSN string) { + maxConns = maxParallelism() * 2 + maxIdleConns = maxParallelism() + cleanedDSN = dsn + + hostPath, rawQuery, ok := strings.Cut(dsn, "?") + if !ok { + l. + WithField("sql_max_connections", maxConns). + WithField("sql_max_idle_connections", maxIdleConns). + WithField("sql_max_connection_lifetime", maxConnLifetime). + WithField("sql_max_idle_connection_time", maxIdleConnTime). + Debugf("No SQL connection options have been defined, falling back to default connection options.") + return + } + + query, err := url.ParseQuery(rawQuery) + if err != nil { + l. + WithField("sql_max_connections", maxConns). + WithField("sql_max_idle_connections", maxIdleConns). + WithField("sql_max_connection_lifetime", maxConnLifetime). + WithField("sql_max_idle_connection_time", maxIdleConnTime). + WithError(err). + Warnf("Unable to parse SQL DSN query, falling back to default connection options.") + return + } + + if v := query.Get("max_conns"); v != "" { + s, err := strconv.ParseInt(v, 10, 64) + if err != nil { + l.WithError(err).Warnf(`SQL DSN query parameter "max_conns" value %v could not be parsed to int, falling back to default value %d`, v, maxConns) + } else { + maxConns = int(s) + } + query.Del("max_conns") + } + + if v := query.Get("max_idle_conns"); v != "" { + s, err := strconv.ParseInt(v, 10, 64) + if err != nil { + l.WithError(err).Warnf(`SQL DSN query parameter "max_idle_conns" value %v could not be parsed to int, falling back to default value %d`, v, maxIdleConns) + } else { + maxIdleConns = int(s) + } + query.Del("max_idle_conns") + } + + if v := query.Get("max_conn_lifetime"); v != "" { + s, err := time.ParseDuration(v) + if err != nil { + l.WithError(err).Warnf(`SQL DSN query parameter "max_conn_lifetime" value %v could not be parsed to duration, falling back to default value %d`, v, maxConnLifetime) + } else { + maxConnLifetime = s + } + query.Del("max_conn_lifetime") + } + + if v := query.Get("max_conn_idle_time"); v != "" { + s, err := time.ParseDuration(v) + if err != nil { + l.WithError(err).Warnf(`SQL DSN query parameter "max_conn_idle_time" value %v could not be parsed to duration, falling back to default value %d`, v, maxIdleConnTime) + } else { + maxIdleConnTime = s + } + query.Del("max_conn_idle_time") + } + cleanedDSN = fmt.Sprintf("%s?%s", hostPath, query.Encode()) + + return +} + +// FinalizeDSN will return a finalized DSN URI. +func FinalizeDSN(l *logrusx.Logger, dsn string) string { + if !strings.HasPrefix(dsn, "mysql://") { + return dsn + } + + var q url.Values + hostPath, query, ok := strings.Cut(dsn, "?") + + if !ok { + q = make(url.Values) + } else { + var err error + q, err = url.ParseQuery(query) + if err != nil { + l.WithError(err).Warnf("Unable to parse SQL DSN query, could not finalize the DSN URI.") + return dsn + } + } + + q.Set("multiStatements", "true") + q.Set("parseTime", "true") + + // This causes an UPDATE to return the number of matching rows instead of + // the number of rows changed. This ensures compatibility with PostgreSQL and SQLite behavior. + q.Set("clientFoundRows", "true") + + return fmt.Sprintf("%s?%s", hostPath, q.Encode()) +} diff --git a/oryx/sqlxx/batch/.snapshots/Test_buildInsertQueryArgs-case=cockroach.json b/oryx/sqlxx/batch/.snapshots/Test_buildInsertQueryArgs-case=cockroach.json new file mode 100644 index 00000000000..51b3ae7053d --- /dev/null +++ b/oryx/sqlxx/batch/.snapshots/Test_buildInsertQueryArgs-case=cockroach.json @@ -0,0 +1,14 @@ +{ + "TableName": "\"test_models\"", + "ColumnsDecl": "\"created_at\", \"id\", \"int\", \"nid\", \"null_time_ptr\", \"string\", \"updated_at\"", + "Columns": [ + "created_at", + "id", + "int", + "nid", + "null_time_ptr", + "string", + "updated_at" + ], + "Placeholders": "(?, ?, ?, ?, ?, ?, ?),\n(?, gen_random_uuid(), ?, ?, ?, ?, ?),\n(?, gen_random_uuid(), ?, ?, ?, ?, ?),\n(?, ?, ?, ?, ?, ?, ?),\n(?, gen_random_uuid(), ?, ?, ?, ?, ?),\n(?, gen_random_uuid(), ?, ?, ?, ?, ?),\n(?, ?, ?, ?, ?, ?, ?),\n(?, gen_random_uuid(), ?, ?, ?, ?, ?),\n(?, gen_random_uuid(), ?, ?, ?, ?, ?),\n(?, ?, ?, ?, ?, ?, ?)" +} diff --git a/oryx/sqlxx/batch/.snapshots/Test_buildInsertQueryArgs-case=testModel.json b/oryx/sqlxx/batch/.snapshots/Test_buildInsertQueryArgs-case=testModel.json new file mode 100644 index 00000000000..db458b94e26 --- /dev/null +++ b/oryx/sqlxx/batch/.snapshots/Test_buildInsertQueryArgs-case=testModel.json @@ -0,0 +1,14 @@ +{ + "TableName": "\"test_models\"", + "ColumnsDecl": "\"created_at\", \"id\", \"int\", \"nid\", \"null_time_ptr\", \"string\", \"updated_at\"", + "Columns": [ + "created_at", + "id", + "int", + "nid", + "null_time_ptr", + "string", + "updated_at" + ], + "Placeholders": "(?, ?, ?, ?, ?, ?, ?),\n(?, ?, ?, ?, ?, ?, ?),\n(?, ?, ?, ?, ?, ?, ?),\n(?, ?, ?, ?, ?, ?, ?),\n(?, ?, ?, ?, ?, ?, ?),\n(?, ?, ?, ?, ?, ?, ?),\n(?, ?, ?, ?, ?, ?, ?),\n(?, ?, ?, ?, ?, ?, ?),\n(?, ?, ?, ?, ?, ?, ?),\n(?, ?, ?, ?, ?, ?, ?)" +} diff --git a/oryx/sqlxx/batch/.snapshots/Test_buildInsertQueryValues-case=testModel-case=cockroach.json b/oryx/sqlxx/batch/.snapshots/Test_buildInsertQueryValues-case=testModel-case=cockroach.json new file mode 100644 index 00000000000..c5bdc385c20 --- /dev/null +++ b/oryx/sqlxx/batch/.snapshots/Test_buildInsertQueryValues-case=testModel-case=cockroach.json @@ -0,0 +1,16 @@ +[ + "0001-01-01T00:00:00Z", + "0001-01-01T00:00:00Z", + "string", + 42, + null, + { + "ID": "00000000-0000-0000-0000-000000000000", + "NID": "00000000-0000-0000-0000-000000000000", + "String": "string", + "Int": 42, + "NullTimePtr": null, + "created_at": "0001-01-01T00:00:00Z", + "updated_at": "0001-01-01T00:00:00Z" + } +] diff --git a/oryx/sqlxx/batch/create.go b/oryx/sqlxx/batch/create.go new file mode 100644 index 00000000000..9b0f9d29b79 --- /dev/null +++ b/oryx/sqlxx/batch/create.go @@ -0,0 +1,306 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package batch + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "sort" + "strings" + "time" + + "github.com/jmoiron/sqlx/reflectx" + + "github.com/ory/x/dbal" + + "github.com/gofrs/uuid" + "github.com/pkg/errors" + + "github.com/ory/pop/v6" + + "github.com/ory/x/otelx" + "github.com/ory/x/sqlcon" + + "github.com/ory/x/sqlxx" +) + +type ( + insertQueryArgs struct { + TableName string + ColumnsDecl string + Columns []string + Placeholders string + } + quoter interface { + Quote(key string) string + } + TracerConnection struct { + Tracer *otelx.Tracer + Connection *pop.Connection + } +) + +func buildInsertQueryArgs[T any](ctx context.Context, dialect string, mapper *reflectx.Mapper, quoter quoter, models []*T) insertQueryArgs { + var ( + v T + model = pop.NewModel(v, ctx) + + columns []string + quotedColumns []string + placeholders []string + placeholderRow []string + ) + + for _, col := range model.Columns().Cols { + columns = append(columns, col.Name) + placeholderRow = append(placeholderRow, "?") + } + + // We sort for the sole reason that the test snapshots are deterministic. + sort.Strings(columns) + + for _, col := range columns { + quotedColumns = append(quotedColumns, quoter.Quote(col)) + } + + // We generate a list (for every row one) of VALUE statements here that + // will be substituted by their column values later: + // + // (?, ?, ?, ?), + // (?, ?, ?, ?), + // (?, ?, ?, ?) + for _, m := range models { + m := reflect.ValueOf(m) + + pl := make([]string, len(placeholderRow)) + copy(pl, placeholderRow) + + // There is a special case - when using CockroachDB we want to generate + // UUIDs using "gen_random_uuid()" which ends up in a VALUE statement of: + // + // (gen_random_uuid(), ?, ?, ?), + for k := range placeholderRow { + if columns[k] != "id" { + continue + } + + field := mapper.FieldByName(m, columns[k]) + val, ok := field.Interface().(uuid.UUID) + if !ok { + continue + } + + if val == uuid.Nil && dialect == dbal.DriverCockroachDB { + pl[k] = "gen_random_uuid()" + break + } + } + + placeholders = append(placeholders, fmt.Sprintf("(%s)", strings.Join(pl, ", "))) + } + + return insertQueryArgs{ + TableName: quoter.Quote(model.TableName()), + ColumnsDecl: strings.Join(quotedColumns, ", "), + Columns: columns, + Placeholders: strings.Join(placeholders, ",\n"), + } +} + +func buildInsertQueryValues[T any](dialect string, mapper *reflectx.Mapper, columns []string, models []*T, nowFunc func() time.Time) (values []any, err error) { + for _, m := range models { + m := reflect.ValueOf(m) + + now := nowFunc() + // Append model fields to args + for _, c := range columns { + field := mapper.FieldByName(m, c) + + switch c { + case "created_at": + if pop.IsZeroOfUnderlyingType(field.Interface()) { + field.Set(reflect.ValueOf(now)) + } + case "updated_at": + field.Set(reflect.ValueOf(now)) + case "id": + if value, ok := field.Interface().(uuid.UUID); ok && value != uuid.Nil { + break // breaks switch, not for + } else if value, ok := field.Interface().(string); ok && len(value) > 0 { + break // breaks switch, not for + } else if dialect == dbal.DriverCockroachDB { + // This is a special case: + // 1. We're using cockroach + // 2. It's the primary key field ("ID") + // 3. A UUID was not yet set. + // + // If all these conditions meet, the VALUE statement will look as such: + // + // (gen_random_uuid(), ?, ?, ?, ...) + // + // For that reason, we do not add the ID value to the list of arguments, + // because one of the arguments is using a built-in and thus doesn't need a value. + continue // break switch, not for + } + + id, err := uuid.NewV4() + if err != nil { + return nil, err + } + field.Set(reflect.ValueOf(id)) + } + + values = append(values, field.Interface()) + + // Special-handling for *sqlxx.NullTime: mapper.FieldByName sets this to a zero time.Time, + // but we want a nil pointer instead. + if i, ok := field.Interface().(*sqlxx.NullTime); ok { + if time.Time(*i).IsZero() { + field.Set(reflect.Zero(field.Type())) + } + } + } + } + + return values, nil +} + +type createOptions struct { + onConflict string +} + +type option func(*createOptions) + +func OnConflictDoNothing() func(*createOptions) { + return func(o *createOptions) { + o.onConflict = "ON CONFLICT DO NOTHING" + } +} + +// CreateFromSlice is a helper around Create that accepts a slice of models +// instead of a slice of model pointers. +func CreateFromSlice[T any](ctx context.Context, p *TracerConnection, models []T, opts ...option) (err error) { + var ptrs []*T + for k := range models { + ptrs = append(ptrs, &models[k]) + } + return Create(ctx, p, ptrs, opts...) +} + +// Create batch-inserts the given models into the database using a single INSERT statement. +// The models are either all created or none. +func Create[T any](ctx context.Context, p *TracerConnection, models []*T, opts ...option) (err error) { + ctx, span := p.Tracer.Tracer().Start(ctx, "persistence.sql.batch.Create") + defer otelx.End(span, &err) + + if len(models) == 0 { + return nil + } + + options := &createOptions{} + for _, opt := range opts { + opt(options) + } + + var v T + model := pop.NewModel(v, ctx) + + conn := p.Connection + quoter, ok := conn.Dialect.(quoter) + if !ok { + return errors.Errorf("store is not a quoter: %T", conn.Store) + } + + queryArgs := buildInsertQueryArgs(ctx, conn.Dialect.Name(), conn.TX.Mapper, quoter, models) + values, err := buildInsertQueryValues(conn.Dialect.Name(), conn.TX.Mapper, queryArgs.Columns, models, func() time.Time { return time.Now().UTC().Truncate(time.Microsecond) }) + if err != nil { + return err + } + + var returningClause string + if conn.Dialect.Name() != dbal.DriverMySQL { + // PostgreSQL, CockroachDB, SQLite support RETURNING. + returningClause = fmt.Sprintf("RETURNING %s", model.IDField()) + } + + query := conn.Dialect.TranslateSQL(fmt.Sprintf( + "INSERT INTO %s (%s) VALUES\n%s\n%s\n%s", + queryArgs.TableName, + queryArgs.ColumnsDecl, + queryArgs.Placeholders, + options.onConflict, + returningClause, + )) + + rows, err := conn.TX.QueryContext(ctx, query, values...) + if err != nil { + return sqlcon.HandleError(err) + } + defer rows.Close() + + // Hydrate the models from the RETURNING clause. + // + // Databases not supporting RETURNING will just return 0 rows. + count := 0 + for rows.Next() { + if err := rows.Err(); err != nil { + return sqlcon.HandleError(err) + } + + if err := setModelID(rows, pop.NewModel(models[count], ctx)); err != nil { + return err + } + count++ + } + + if err := rows.Err(); err != nil { + return sqlcon.HandleError(err) + } + + if err := rows.Close(); err != nil { + return sqlcon.HandleError(err) + } + + return sqlcon.HandleError(err) +} + +// setModelID was copy & pasted from pop. It basically sets +// the primary key to the given value read from the SQL row. +func setModelID(row *sql.Rows, model *pop.Model) error { + el := reflect.ValueOf(model.Value).Elem() + fbn := el.FieldByName("ID") + if !fbn.IsValid() { + return errors.New("model does not have a field named id") + } + + pkt, err := model.PrimaryKeyType() + if err != nil { + return errors.WithStack(err) + } + + switch pkt { + case "UUID": + var id uuid.UUID + if err := row.Scan(&id); err != nil { + return errors.WithStack(err) + } + fbn.Set(reflect.ValueOf(id)) + default: + var id interface{} + if err := row.Scan(&id); err != nil { + return errors.WithStack(err) + } + v := reflect.ValueOf(id) + switch fbn.Kind() { + case reflect.Int, reflect.Int64: + fbn.SetInt(v.Int()) + default: + fbn.Set(reflect.ValueOf(id)) + } + } + + return nil +} diff --git a/oryx/sqlxx/expand.go b/oryx/sqlxx/expand.go new file mode 100644 index 00000000000..8f9020d0eae --- /dev/null +++ b/oryx/sqlxx/expand.go @@ -0,0 +1,34 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package sqlxx + +// Expandable controls what fields to expand for projects. +type Expandable string + +// Expandables is a list of Expandable values. +type Expandables []Expandable + +// String returns a string representation of the Expandable. +func (e Expandable) String() string { + return string(e) +} + +// ToEager returns the fields used by pop's Eager command. +func (e Expandables) ToEager() []string { + var s []string + for _, e := range e { + s = append(s, e.String()) + } + return s +} + +// Has returns true if the Expandable is in the list. +func (e Expandables) Has(search Expandable) bool { + for _, e := range e { + if e == search { + return true + } + } + return false +} diff --git a/oryx/sqlxx/sqlxx.go b/oryx/sqlxx/sqlxx.go new file mode 100644 index 00000000000..cad35962924 --- /dev/null +++ b/oryx/sqlxx/sqlxx.go @@ -0,0 +1,168 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package sqlxx + +import ( + "fmt" + "net/url" + "reflect" + "slices" + "strings" + + "github.com/pkg/errors" +) + +func keys(t any, exclude []string) []string { + tt := reflect.TypeOf(t) + if tt.Kind() == reflect.Pointer { + tt = tt.Elem() + } + ks := make([]string, 0, tt.NumField()) + for i := range tt.NumField() { + f := tt.Field(i) + key, _, _ := strings.Cut(f.Tag.Get("db"), ",") + if key != "" && key != "-" && !slices.Contains(exclude, key) { + ks = append(ks, key) + } + } + return ks +} + +// NamedInsertArguments returns columns and arguments for SQL INSERT statements based on a struct's tags. Does +// not work with nested structs or maps! +// +// type st struct { +// Foo string `db:"foo"` +// Bar string `db:"bar,omitempty"` +// Baz string `db:"-"` +// Zab string +// } +// columns, arguments := NamedInsertArguments(new(st)) +// query := fmt.Sprintf("INSERT INTO foo (%s) VALUES (%s)", columns, arguments) +// // INSERT INTO foo (foo, bar) VALUES (:foo, :bar) +func NamedInsertArguments(t any, exclude ...string) (columns string, arguments string) { + keys := keys(t, exclude) + return strings.Join(keys, ", "), + ":" + strings.Join(keys, ", :") +} + +// NamedUpdateArguments returns columns and arguments for SQL UPDATE statements based on a struct's tags. Does +// not work with nested structs or maps! +// +// type st struct { +// Foo string `db:"foo"` +// Bar string `db:"bar,omitempty"` +// Baz string `db:"-"` +// Zab string +// } +// query := fmt.Sprintf("UPDATE foo SET %s", NamedUpdateArguments(new(st))) +// // UPDATE foo SET foo=:foo, bar=:bar +func NamedUpdateArguments(t any, exclude ...string) string { + keys := keys(t, exclude) + statements := make([]string, len(keys)) + + for k, key := range keys { + statements[k] = fmt.Sprintf("%s=:%s", key, key) + } + + return strings.Join(statements, ", ") +} + +func OnConflictDoNothing(dialect string, columnNoop string) string { + if dialect == "mysql" { + return fmt.Sprintf(" ON DUPLICATE KEY UPDATE `%s` = `%s` ", columnNoop, columnNoop) + } else { + return ` ON CONFLICT DO NOTHING ` + } +} + +// ExtractSchemeFromDSN returns the scheme (e.g. `mysql`, `postgres`, etc) component in a DSN string, +// as well as the remaining part of the DSN after the scheme separator. +// It is an error to not have a scheme present. +// This makes sense in the context of a DSN to be able to identify which database is in use. +func ExtractSchemeFromDSN(dsn string) (string, string, error) { + scheme, afterSchemeSeparator, schemeSeparatorFound := strings.Cut(dsn, "://") + if !schemeSeparatorFound { + return "", "", errors.New("invalid DSN: missing scheme separator") + } + if scheme == "" { + return "", "", errors.New("invalid DSN: empty scheme") + } + + return scheme, afterSchemeSeparator, nil +} + +// ExtractDbNameFromDSN returns the database name component in a DSN string. +func ExtractDbNameFromDSN(dsn string) (string, error) { + _, afterScheme, err := ExtractSchemeFromDSN(dsn) + if err != nil { + return "", err + } + + _, afterSlash, slashFound := strings.Cut(afterScheme, "/") + if !slashFound { + return "", nil + } + + dbName, _, _ := strings.Cut(afterSlash, "?") + return dbName, nil +} + +// ReplaceSchemeInDSN replaces the scheme (e.g. `mysql`, `postgres`, etc) in a DSN string with another one. +// This is necessary for example when using `cockroach` as a scheme, but using the postgres driver to connect to the database, +// and this driver only accepts `postgres` as a scheme. +func ReplaceSchemeInDSN(dsn string, newScheme string) (string, error) { + _, afterSchemeSeparator, err := ExtractSchemeFromDSN(dsn) + if err != nil { + return "", errors.WithStack(err) + } + + return newScheme + "://" + afterSchemeSeparator, nil +} + +// DSNRedacted parses a database DSN and returns a redacted form as a string. +// It replaces any password with "xxxxx" just like `url.Redacted()`. +// Only the password is redacted, not the username. +// This function is necessary because MySQL uses a DSN format not compatible with `url.Parse`. +// Additionally and as a consequence of the point above, the scheme is expected to be present and non-empty. +// This function is less strict that `url.Parse` in the case of MySQL. +// It also does not escape any characters in the username, whereas `url.String()`/`url.Redacted` does. +func DSNRedacted(dsn string) (string, error) { + scheme, afterSchemeSeparator, err := ExtractSchemeFromDSN(dsn) + if err != nil { + return "", errors.WithStack(err) + } + + // If this is not MySQL, we simply delegate the work to `url.Parse`. + if scheme != "mysql" { + u, err := url.Parse(dsn) + if err != nil { + return "", errors.WithStack(err) + } + return u.Redacted(), nil + } + + // MySQL has a weird DSN syntax not conforming to a standard URL, of the form: + // `[username[:password]@][protocol[(address)]]/dbname[?param1=value1&...¶mN=valueN]` + // We only need to parse up to `@` in order to redact the password. The rest is left as-is. + + usernamePassword, afterUsernamePassword, usernamePasswordSeparatorFound := strings.Cut(afterSchemeSeparator, "@") + if !usernamePasswordSeparatorFound { + afterUsernamePassword = afterSchemeSeparator + } + + username, password, hasPassword := strings.Cut(usernamePassword, ":") + // We only insert a redacted password in the final result if a password was provided in the input. + // This behavior matches the one of `url.Redacted()`. + if hasPassword { + password = ":xxxxx" + } + + res := scheme + "://" + if usernamePasswordSeparatorFound { + res += username + password + "@" + } + res += afterUsernamePassword + return res, nil +} diff --git a/oryx/sqlxx/types.go b/oryx/sqlxx/types.go new file mode 100644 index 00000000000..5b4bad26b73 --- /dev/null +++ b/oryx/sqlxx/types.go @@ -0,0 +1,588 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package sqlxx + +import ( + "database/sql" + "database/sql/driver" + "encoding/json" + "fmt" + "slices" + "strings" + "time" + + "github.com/tidwall/gjson" + + "github.com/pkg/errors" +) + +// Duration represents a JSON and SQL compatible time.Duration. +// swagger:type string +type Duration time.Duration + +// MarshalJSON returns m as the JSON encoding of m. +func (ns Duration) MarshalJSON() ([]byte, error) { + return json.Marshal(time.Duration(ns).String()) +} + +// UnmarshalJSON sets *m to a copy of data. +func (ns *Duration) UnmarshalJSON(data []byte) error { + var s string + if err := json.Unmarshal(data, &s); err != nil { + return err + } + + p, err := time.ParseDuration(s) + if err != nil { + return err + } + + *ns = Duration(p) + return nil +} + +// StringSliceJSONFormat represents []string{} which is encoded to/from JSON for SQL storage. +type StringSliceJSONFormat []string + +// Scan implements the Scanner interface. +func (m *StringSliceJSONFormat) Scan(value interface{}) error { + var val string + switch v := value.(type) { + case nil: + *m = StringSliceJSONFormat{} + return nil + case string: + val = v + case []byte: + val = string(v) + default: + return errors.Errorf("cannot scan %#v into StringSliceJSONFormat", value) + } + if len(val) == 0 { + val = "[]" + } + + if parsed := gjson.Parse(val); parsed.Type == gjson.Null { + val = "[]" + } else if !parsed.IsArray() { + return errors.Errorf("expected JSON value to be an array but got type: %s", parsed.Type.String()) + } + + return errors.WithStack(json.Unmarshal([]byte(val), &m)) +} + +// Value implements the driver Valuer interface. +func (m StringSliceJSONFormat) Value() (driver.Value, error) { + if len(m) == 0 { + return "[]", nil + } + + encoded, err := json.Marshal(&m) + return string(encoded), errors.WithStack(err) +} + +// StringSlicePipeDelimiter de/encodes the string slice to/from a SQL string. +type StringSlicePipeDelimiter []string + +// Scan implements the Scanner interface. +func (n *StringSlicePipeDelimiter) Scan(value interface{}) error { + var s sql.NullString + if err := s.Scan(value); err != nil { + return err + } + *n = scanStringSlice('|', s.String) + return nil +} + +// Value implements the driver Valuer interface. +func (n StringSlicePipeDelimiter) Value() (driver.Value, error) { + return valueStringSlice('|', n), nil +} + +func scanStringSlice(delimiter rune, value interface{}) []string { + escaped := false + s := fmt.Sprintf("%s", value) + splitted := strings.FieldsFunc(s, func(r rune) bool { + if r == '\\' { + escaped = !escaped + } else if escaped && r != delimiter { + escaped = false + } + return !escaped && r == delimiter + }) + for k, v := range splitted { + splitted[k] = strings.ReplaceAll(v, "\\"+string(delimiter), string(delimiter)) + } + return splitted +} + +func valueStringSlice(delimiter rune, value []string) string { + replace := make([]string, len(value)) + for k, v := range value { + replace[k] = strings.ReplaceAll(v, string(delimiter), "\\"+string(delimiter)) + } + return strings.Join(replace, string(delimiter)) +} + +// NullBool represents a bool that may be null. +// NullBool implements the Scanner interface so +// swagger:type bool +// swagger:model nullBool +type NullBool struct { + Bool bool + Valid bool // Valid is true if Bool is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullBool) Scan(value interface{}) error { + d := sql.NullBool{} + if err := d.Scan(value); err != nil { + return err + } + + ns.Bool = d.Bool + ns.Valid = d.Valid + return nil +} + +// Value implements the driver Valuer interface. +func (ns NullBool) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return ns.Bool, nil +} + +// MarshalJSON returns m as the JSON encoding of m. +func (ns NullBool) MarshalJSON() ([]byte, error) { + if !ns.Valid { + return []byte("null"), nil + } + return json.Marshal(ns.Bool) +} + +// UnmarshalJSON sets *m to a copy of data. +func (ns *NullBool) UnmarshalJSON(data []byte) error { + if ns == nil { + return errors.New("json.RawMessage: UnmarshalJSON on nil pointer") + } + if len(data) == 0 || string(data) == "null" { + return nil + } + ns.Valid = true + return errors.WithStack(json.Unmarshal(data, &ns.Bool)) +} + +// FalsyNullBool represents a bool that may be null. +// It JSON decodes to false if null. +// +// swagger:type bool +// swagger:model falsyNullBool +type FalsyNullBool struct { + Bool bool + Valid bool // Valid is true if Bool is not NULL +} + +// Scan implements the Scanner interface. +func (ns *FalsyNullBool) Scan(value interface{}) error { + d := sql.NullBool{} + if err := d.Scan(value); err != nil { + return err + } + + ns.Bool = d.Bool + ns.Valid = d.Valid + return nil +} + +// Value implements the driver Valuer interface. +func (ns FalsyNullBool) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return ns.Bool, nil +} + +// MarshalJSON returns m as the JSON encoding of m. +func (ns FalsyNullBool) MarshalJSON() ([]byte, error) { + if !ns.Valid { + return []byte("false"), nil + } + return json.Marshal(ns.Bool) +} + +// UnmarshalJSON sets *m to a copy of data. +func (ns *FalsyNullBool) UnmarshalJSON(data []byte) error { + if ns == nil { + return errors.New("json.RawMessage: UnmarshalJSON on nil pointer") + } + if len(data) == 0 || string(data) == "null" { + return nil + } + ns.Valid = true + return errors.WithStack(json.Unmarshal(data, &ns.Bool)) +} + +// swagger:type string +// swagger:model nullString +type NullString string + +// MarshalJSON returns m as the JSON encoding of m. +func (ns NullString) MarshalJSON() ([]byte, error) { + return json.Marshal(string(ns)) +} + +// UnmarshalJSON sets *m to a copy of data. +func (ns *NullString) UnmarshalJSON(data []byte) error { + if ns == nil { + return errors.New("json.RawMessage: UnmarshalJSON on nil pointer") + } + if len(data) == 0 { + return nil + } + return errors.WithStack(json.Unmarshal(data, (*string)(ns))) +} + +// Scan implements the Scanner interface. +func (ns *NullString) Scan(value interface{}) error { + var v sql.NullString + if err := (&v).Scan(value); err != nil { + return err + } + *ns = NullString(v.String) + return nil +} + +// Value implements the driver Valuer interface. +func (ns NullString) Value() (driver.Value, error) { + if len(ns) == 0 { + return sql.NullString{}.Value() + } + return sql.NullString{Valid: true, String: string(ns)}.Value() +} + +// String implements the Stringer interface. +func (ns NullString) String() string { + return string(ns) +} + +// NullTime implements sql.NullTime functionality. +// +// swagger:model nullTime +// required: false +type NullTime time.Time + +// Scan implements the Scanner interface. +func (ns *NullTime) Scan(value interface{}) error { + var v sql.NullTime + if err := (&v).Scan(value); err != nil { + return err + } + *ns = NullTime(v.Time) + return nil +} + +// MarshalJSON returns m as the JSON encoding of m. +func (ns NullTime) MarshalJSON() ([]byte, error) { + var t *time.Time + if !time.Time(ns).IsZero() { + tt := time.Time(ns) + t = &tt + } + return json.Marshal(t) +} + +// UnmarshalJSON sets *m to a copy of data. +func (ns *NullTime) UnmarshalJSON(data []byte) error { + var t time.Time + if err := json.Unmarshal(data, &t); err != nil { + return err + } + *ns = NullTime(t) + return nil +} + +// Value implements the driver Valuer interface. +func (ns NullTime) Value() (driver.Value, error) { + return sql.NullTime{Valid: !time.Time(ns).IsZero(), Time: time.Time(ns)}.Value() +} + +// MapStringInterface represents a map[string]interface that works well with JSON, SQL, and Swagger. +type MapStringInterface map[string]interface{} + +// Scan implements the Scanner interface. +func (n *MapStringInterface) Scan(value interface{}) error { + v := fmt.Sprintf("%s", value) + if len(v) == 0 { + return nil + } + return errors.WithStack(json.Unmarshal([]byte(v), n)) +} + +// Value implements the driver Valuer interface. +func (n MapStringInterface) Value() (driver.Value, error) { + value, err := json.Marshal(n) + if err != nil { + return nil, errors.WithStack(err) + } + return string(value), nil +} + +// JSONArrayRawMessage represents a json.RawMessage which only accepts arrays that works well with JSON, SQL, and Swagger. +type JSONArrayRawMessage json.RawMessage + +// Scan implements the Scanner interface. +func (m *JSONArrayRawMessage) Scan(value interface{}) error { + val := fmt.Sprintf("%s", value) + if len(val) == 0 { + val = "[]" + } + + if parsed := gjson.Parse(val); parsed.Type == gjson.Null { + val = "[]" + } else if !parsed.IsArray() { + return errors.Errorf("expected JSON value to be an array but got type: %s", parsed.Type.String()) + } + + *m = []byte(val) + return nil +} + +// Value implements the driver Valuer interface. +func (m JSONArrayRawMessage) Value() (driver.Value, error) { + if len(m) == 0 { + return "[]", nil + } + + if parsed := gjson.ParseBytes(m); parsed.Type == gjson.Null { + return "[]", nil + } else if !parsed.IsArray() { + return nil, errors.Errorf("expected JSON value to be an array but got type: %s", parsed.Type.String()) + } + + return string(m), nil +} + +// JSONRawMessage represents a json.RawMessage that works well with JSON, SQL, and Swagger. +type JSONRawMessage json.RawMessage + +// Scan implements the Scanner interface. +func (m *JSONRawMessage) Scan(value interface{}) error { + switch v := value.(type) { + case []byte: + *m = slices.Clone(v) + case string: + *m = JSONRawMessage(v) + case nil: + *m = JSONRawMessage("null") + default: + return errors.Errorf("cannot scan %T into JSONRawMessage", value) + } + return nil +} + +// Value implements the driver Valuer interface. +func (m JSONRawMessage) Value() (driver.Value, error) { + if len(m) == 0 { + return "null", nil + } + return string(m), nil +} + +// MarshalJSON returns m as the JSON encoding of m. +func (m JSONRawMessage) MarshalJSON() ([]byte, error) { + if len(m) == 0 { + return []byte("null"), nil + } + return m, nil +} + +// UnmarshalJSON sets *m to a copy of data. +func (m *JSONRawMessage) UnmarshalJSON(data []byte) error { + if m == nil { + return errors.New("json.RawMessage: UnmarshalJSON on nil pointer") + } + *m = append((*m)[0:0], data...) + return nil +} + +// NullJSONRawMessage represents a json.RawMessage that works well with JSON, SQL, and Swagger and is NULLable- +// +// swagger:model nullJsonRawMessage +type NullJSONRawMessage json.RawMessage + +// Scan implements the Scanner interface. +func (m *NullJSONRawMessage) Scan(value any) error { + return (*JSONRawMessage)(m).Scan(value) +} + +// Value implements the driver Valuer interface. +func (m NullJSONRawMessage) Value() (driver.Value, error) { + if len(m) == 0 || string(m) == "null" { + return nil, nil + } + return string(m), nil +} + +// MarshalJSON returns m as the JSON encoding of m. +func (m NullJSONRawMessage) MarshalJSON() ([]byte, error) { + if len(m) == 0 { + return []byte("null"), nil + } + return m, nil +} + +// UnmarshalJSON sets *m to a copy of data. +func (m *NullJSONRawMessage) UnmarshalJSON(data []byte) error { + if m == nil { + return errors.New("json.RawMessage: UnmarshalJSON on nil pointer") + } + *m = append((*m)[0:0], data...) + return nil +} + +// JSONScan is a generic helper for retrieving a SQL JSON-encoded value. +func JSONScan(dst, value any) error { + // Note: raw is a string (not []byte) because the MySQL driver reuses byte slices across scans. + // Using strings avoids the need to manually copy the byte slice. + var raw string + switch v := value.(type) { + case nil: + raw = "null" + case string: + raw = v + case []byte: + raw = string(v) + default: + return fmt.Errorf("unable to scan type %T as JSON into %T", value, dst) + } + if err := json.Unmarshal([]byte(raw), dst); err != nil { + return fmt.Errorf("unable to decode JSON payload into %T: %w", dst, err) + } + return nil +} + +// NullInt64 represents an int64 that may be null. +// swagger:model nullInt64 +type NullInt64 struct { + Int int64 + Valid bool // Valid is true if Duration is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullInt64) Scan(value interface{}) error { + d := sql.NullInt64{} + if err := d.Scan(value); err != nil { + return err + } + + ns.Int = d.Int64 + ns.Valid = d.Valid + return nil +} + +// Value implements the driver Valuer interface. +func (ns NullInt64) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return ns.Int, nil +} + +// MarshalJSON returns m as the JSON encoding of m. +func (ns NullInt64) MarshalJSON() ([]byte, error) { + if !ns.Valid { + return []byte("null"), nil + } + return json.Marshal(ns.Int) +} + +// UnmarshalJSON sets *m to a copy of data. +func (ns *NullInt64) UnmarshalJSON(data []byte) error { + if ns == nil { + return errors.New("json.RawMessage: UnmarshalJSON on nil pointer") + } + if len(data) == 0 || string(data) == "null" { + return nil + } + ns.Valid = true + return errors.WithStack(json.Unmarshal(data, &ns.Int)) +} + +// NullDuration represents a nullable JSON and SQL compatible time.Duration. +// +// swagger:type string +// swagger:model nullDuration +type NullDuration struct { + Duration time.Duration + Valid bool +} + +// Scan implements the Scanner interface. +func (ns *NullDuration) Scan(value interface{}) error { + d := sql.NullInt64{} + if err := d.Scan(value); err != nil { + return err + } + + ns.Duration = time.Duration(d.Int64) + ns.Valid = d.Valid + return nil +} + +// Value implements the driver Valuer interface. +func (ns NullDuration) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return int64(ns.Duration), nil +} + +// MarshalJSON returns m as the JSON encoding of m. +func (ns NullDuration) MarshalJSON() ([]byte, error) { + if !ns.Valid { + return []byte("null"), nil + } + + return json.Marshal(ns.Duration.String()) +} + +// UnmarshalJSON sets *m to a copy of data. +func (ns *NullDuration) UnmarshalJSON(data []byte) error { + if ns == nil { + return errors.New("json.RawMessage: UnmarshalJSON on nil pointer") + } + + if len(data) == 0 || string(data) == "null" { + return nil + } + + var s string + if err := json.Unmarshal(data, &s); err != nil { + return err + } + + p, err := time.ParseDuration(s) + if err != nil { + return err + } + + ns.Duration = p + ns.Valid = true + return nil +} + +func (ns Duration) IsZero() bool { return ns == 0 } +func (m StringSliceJSONFormat) IsZero() bool { return len(m) == 0 } +func (n StringSlicePipeDelimiter) IsZero() bool { return len(n) == 0 } +func (ns NullBool) IsZero() bool { return !ns.Valid || !ns.Bool } +func (ns FalsyNullBool) IsZero() bool { return !ns.Valid || !ns.Bool } +func (ns NullString) IsZero() bool { return len(ns) == 0 } +func (ns NullTime) IsZero() bool { return time.Time(ns).IsZero() } +func (n MapStringInterface) IsZero() bool { return len(n) == 0 } +func (m JSONArrayRawMessage) IsZero() bool { return len(m) == 0 || string(m) == "[]" } +func (m JSONRawMessage) IsZero() bool { return len(m) == 0 || string(m) == "null" } +func (m NullJSONRawMessage) IsZero() bool { return len(m) == 0 || string(m) == "null" } +func (ns NullInt64) IsZero() bool { return !ns.Valid || ns.Int == 0 } +func (ns NullDuration) IsZero() bool { return !ns.Valid || ns.Duration == 0 } diff --git a/oryx/stringslice/filter.go b/oryx/stringslice/filter.go new file mode 100644 index 00000000000..2ebbee64b94 --- /dev/null +++ b/oryx/stringslice/filter.go @@ -0,0 +1,30 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package stringslice + +import ( + "slices" + "strings" + "unicode" +) + +// Filter applies the provided filter function and removes all items from the slice for which the filter function returns true. +// Deprecated: use slices.DeleteFunc instead (changes semantics: the original slice is modified) +func Filter(values []string, filter func(string) bool) []string { + return slices.DeleteFunc(slices.Clone(values), filter) +} + +// TrimEmptyFilter applies the strings.TrimFunc function and removes all empty strings +// Deprecated: use slices.DeleteFunc instead (changes semantics: the original slice is modified) +func TrimEmptyFilter(values []string, trim func(rune) bool) (ret []string) { + return Filter(values, func(value string) bool { + return strings.TrimFunc(value, trim) == "" + }) +} + +// TrimSpaceEmptyFilter applies the strings.TrimSpace function and removes all empty strings +// Deprecated: use slices.DeleteFunc with strings.TrimSpace instead (changes semantics: the original slice is modified) +func TrimSpaceEmptyFilter(values []string) []string { + return TrimEmptyFilter(values, unicode.IsSpace) +} diff --git a/oryx/stringslice/has.go b/oryx/stringslice/has.go new file mode 100644 index 00000000000..e863fa84b1f --- /dev/null +++ b/oryx/stringslice/has.go @@ -0,0 +1,22 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package stringslice + +import ( + "slices" + "strings" +) + +// Has returns true if the needle is in the haystack (case-sensitive) +// Deprecated: use slices.Contains instead +func Has(haystack []string, needle string) bool { + return slices.Contains(haystack, needle) +} + +// HasI returns true if the needle is in the haystack (case-insensitive) +func HasI(haystack []string, needle string) bool { + return slices.ContainsFunc(haystack, func(value string) bool { + return strings.EqualFold(value, needle) + }) +} diff --git a/oryx/stringslice/merge.go b/oryx/stringslice/merge.go new file mode 100644 index 00000000000..fe0c887b7bb --- /dev/null +++ b/oryx/stringslice/merge.go @@ -0,0 +1,12 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package stringslice + +import "slices" + +// Merge merges several string slices into one. +// Deprecated: use slices.Concat instead +func Merge(parts ...[]string) []string { + return slices.Concat(parts...) +} diff --git a/oryx/stringslice/reverse.go b/oryx/stringslice/reverse.go new file mode 100644 index 00000000000..ca205500693 --- /dev/null +++ b/oryx/stringslice/reverse.go @@ -0,0 +1,14 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package stringslice + +import "slices" + +// Reverse reverses the order of a string slice +// Deprecated: use slices.Reverse instead (changes semantics) +func Reverse(s []string) []string { + c := slices.Clone(s) + slices.Reverse(c) + return c +} diff --git a/oryx/stringslice/unique.go b/oryx/stringslice/unique.go new file mode 100644 index 00000000000..7a649d45f8f --- /dev/null +++ b/oryx/stringslice/unique.go @@ -0,0 +1,20 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package stringslice + +// Unique returns the given string slice with unique values, preserving order. +// Consider using slices.Compact with slices.Sort instead when you don't care about order. +func Unique(i []string) []string { + u := make([]string, 0, len(i)) + m := make(map[string]struct{}, len(i)) + + for _, val := range i { + if _, ok := m[val]; !ok { + m[val] = struct{}{} + u = append(u, val) + } + } + + return u +} diff --git a/oryx/stringsx/case.go b/oryx/stringsx/case.go new file mode 100644 index 00000000000..45048b319bd --- /dev/null +++ b/oryx/stringsx/case.go @@ -0,0 +1,26 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package stringsx + +import "unicode" + +// ToLowerInitial converts a string's first character to lower case. +func ToLowerInitial(s string) string { + if s == "" { + return "" + } + a := []rune(s) + a[0] = unicode.ToLower(a[0]) + return string(a) +} + +// ToUpperInitial converts a string's first character to upper case. +func ToUpperInitial(s string) string { + if s == "" { + return "" + } + a := []rune(s) + a[0] = unicode.ToUpper(a[0]) + return string(a) +} diff --git a/oryx/stringsx/coalesce.go b/oryx/stringsx/coalesce.go new file mode 100644 index 00000000000..2dc4b8e38ea --- /dev/null +++ b/oryx/stringsx/coalesce.go @@ -0,0 +1,12 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package stringsx + +import "cmp" + +// Coalesce returns the first non-empty string value +// Deprecated: use cmp.Or instead +func Coalesce(str ...string) string { + return cmp.Or(str...) +} diff --git a/oryx/stringsx/default.go b/oryx/stringsx/default.go new file mode 100644 index 00000000000..1eac9f0e398 --- /dev/null +++ b/oryx/stringsx/default.go @@ -0,0 +1,11 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package stringsx + +import "cmp" + +// Deprecated: use cmp.Or instead +func DefaultIfEmpty(s string, defaultValue string) string { + return cmp.Or(s, defaultValue) +} diff --git a/oryx/stringsx/ptr.go b/oryx/stringsx/ptr.go new file mode 100644 index 00000000000..990aa3f8e58 --- /dev/null +++ b/oryx/stringsx/ptr.go @@ -0,0 +1,9 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package stringsx + +// Deprecated: use pointerx.Ptr instead +func GetPointer(s string) *string { + return &s +} diff --git a/oryx/stringsx/split.go b/oryx/stringsx/split.go new file mode 100644 index 00000000000..132d20b25f3 --- /dev/null +++ b/oryx/stringsx/split.go @@ -0,0 +1,16 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package stringsx + +import "strings" + +// Splitx is a special case of strings.Split +// which returns an empty slice if the string is empty +func Splitx(s, sep string) []string { + if s == "" { + return []string{} + } + + return strings.Split(s, sep) +} diff --git a/oryx/stringsx/switch_case.go b/oryx/stringsx/switch_case.go new file mode 100644 index 00000000000..dc5cb7fef71 --- /dev/null +++ b/oryx/stringsx/switch_case.go @@ -0,0 +1,90 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package stringsx + +import ( + "fmt" + "slices" + "strings" +) + +type ( + RegisteredCases struct { + cases []string + actual string + } + errUnknownCase struct { + *RegisteredCases + } + RegisteredPrefixes struct { + prefixes []string + actual string + } + errUnknownPrefix struct { + *RegisteredPrefixes + } +) + +var ( + ErrUnknownCase = errUnknownCase{} + ErrUnknownPrefix = errUnknownPrefix{} +) + +func SwitchExact(actual string) *RegisteredCases { + return &RegisteredCases{ + actual: actual, + } +} + +func SwitchPrefix(actual string) *RegisteredPrefixes { + return &RegisteredPrefixes{ + actual: actual, + } +} + +func (r *RegisteredCases) AddCase(cases ...string) bool { + r.cases = append(r.cases, cases...) + return slices.Contains(cases, r.actual) +} + +func (r *RegisteredPrefixes) HasPrefix(prefixes ...string) bool { + r.prefixes = append(r.prefixes, prefixes...) + return slices.ContainsFunc(prefixes, func(s string) bool { + return strings.HasPrefix(r.actual, s) + }) +} + +func (r *RegisteredCases) String() string { + return "[" + strings.Join(r.cases, ", ") + "]" +} + +func (r *RegisteredPrefixes) String() string { + return "[" + strings.Join(r.prefixes, ", ") + "]" +} + +func (r *RegisteredCases) ToUnknownCaseErr() error { + return errUnknownCase{r} +} + +func (r *RegisteredPrefixes) ToUnknownPrefixErr() error { + return errUnknownPrefix{r} +} + +func (e errUnknownCase) Error() string { + return fmt.Sprintf("expected one of %s but got %s", e.String(), e.actual) +} + +func (e errUnknownCase) Is(err error) bool { + _, ok := err.(errUnknownCase) + return ok +} + +func (e errUnknownPrefix) Error() string { + return fmt.Sprintf("expected %s to have one of the prefixes %s", e.actual, e.String()) +} + +func (e errUnknownPrefix) Is(err error) bool { + _, ok := err.(errUnknownPrefix) + return ok +} diff --git a/oryx/stringsx/truncate.go b/oryx/stringsx/truncate.go new file mode 100644 index 00000000000..86c102164da --- /dev/null +++ b/oryx/stringsx/truncate.go @@ -0,0 +1,21 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package stringsx + +import "unicode/utf8" + +// TruncateByteLen returns string truncated at the end with the length specified +func TruncateByteLen(s string, length int) string { + if length <= 0 || len(s) <= length { + return s + } + + res := s[:length] + + // in case we cut in the middle of an utf8 rune, we have to remove the last byte as well until it fits + for !utf8.ValidString(res) { + res = res[:len(res)-1] + } + return res +} diff --git a/oryx/swaggerx/error.go b/oryx/swaggerx/error.go new file mode 100644 index 00000000000..e825145f775 --- /dev/null +++ b/oryx/swaggerx/error.go @@ -0,0 +1,35 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package swaggerx + +import ( + "encoding/json" + "errors" + "fmt" + "net/http" + + "github.com/go-openapi/runtime" +) + +func FormatSwaggerError(err error) string { + var e *runtime.APIError + if errors.As(err, &e) { + body, err := json.MarshalIndent(e, "\t", " ") + if err != nil { + body = []byte(fmt.Sprintf("%+v", e.Response)) + } + + switch e.Code { + case http.StatusForbidden: + return fmt.Sprintf("The service responded with status code 403 indicating that you lack permission to access the resource. The full error details are:\n\n\t%s\n\n", body) + case http.StatusUnauthorized: + return fmt.Sprintf("The service responded with status code 401 indicating that you forgot to include credentials (e.g. token, TLS certificate, ...) in the HTTP request. The full error details are:\n\n\t%s\n\n", body) + case http.StatusNotFound: + return fmt.Sprintf("The service responded with status code 404 indicating that the resource does not exist. Check that the URL is correct (are you using the correct admin/public/... endpoint?) and that the resource exists. The full error details are:\n\n\t%s\n\n", body) + default: + return fmt.Sprintf("Unable to complete operation %s because the server responded with status code %d:\n\n\t%s\n", e.OperationName, e.Code, body) + } + } + return fmt.Sprintf("%+v", err) +} diff --git a/oryx/templatex/regex.go b/oryx/templatex/regex.go new file mode 100644 index 00000000000..32a21780ecb --- /dev/null +++ b/oryx/templatex/regex.go @@ -0,0 +1,137 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Package compiler offers a regexp compiler which compiles regex templates to regexp.Regexp +// +// reg, err := compiler.CompileRegex("foo:bar.baz:<[0-9]{2,10}>", '<', '>') +// // if err != nil ... +// reg.MatchString("foo:bar.baz:123") +// +// reg, err := compiler.CompileRegex("/foo/bar/url/{[a-z]+}", '{', '}') +// // if err != nil ... +// reg.MatchString("/foo/bar/url/abz") +// +// This package is adapts github.com/gorilla/mux/regexp.go + +package templatex + +// Copyright 2012 The Gorilla Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license as follows: + +//Copyright (c) 2012 Rodrigo Moraes. All rights reserved. +// +//Redistribution and use in source and binary forms, with or without +//modification, are permitted provided that the following conditions are +//met: +// +//* Redistributions of source code must retain the above copyright +//notice, this list of conditions and the following disclaimer. +//* Redistributions in binary form must reproduce the above +//copyright notice, this list of conditions and the following disclaimer +//in the documentation and/or other materials provided with the +//distribution. +//* Neither the name of Google Inc. nor the names of its +//contributors may be used to endorse or promote products derived from +//this software without specific prior written permission. +// +//THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +//"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +//LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +//A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +//OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +//SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +//LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +//DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +//THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +//(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +//OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import ( + "bytes" + "fmt" + "regexp" + + "github.com/pkg/errors" +) + +// delimiterIndices returns the first level delimiter indices from a string. +// It returns an error in case of unbalanced delimiters. +func delimiterIndices(s string, delimiterStart, delimiterEnd byte) ([]int, error) { + var level, idx int + idxs := make([]int, 0) + for i := 0; i < len(s); i++ { + switch s[i] { + case delimiterStart: + if level++; level == 1 { + idx = i + } + case delimiterEnd: + if level--; level == 0 { + idxs = append(idxs, idx, i+1) + } else if level < 0 { + return nil, errors.Errorf("unbalanced braces in: %s", s) + } + } + } + + if level != 0 { + return nil, errors.Errorf("unbalanced braces in: %s", s) + } + + return idxs, nil +} + +// CompileRegex parses a template and returns a Regexp. +// +// You can define your own delimiters. It is e.g. common to use curly braces {} but I recommend using characters +// which have no special meaning in Regex, e.g.: <, > +// +// reg, err := templatex.CompileRegex("foo:bar.baz:<[0-9]{2,10}>", '<', '>') +// // if err != nil ... +// reg.MatchString("foo:bar.baz:123") +func CompileRegex(tpl string, delimiterStart, delimiterEnd byte) (*regexp.Regexp, error) { + // Check if it is well-formed. + idxs, errBraces := delimiterIndices(tpl, delimiterStart, delimiterEnd) + if errBraces != nil { + return nil, errBraces + } + varsR := make([]*regexp.Regexp, len(idxs)/2) + pattern := bytes.NewBufferString("") + if err := pattern.WriteByte('^'); err != nil { + return nil, errors.WithStack(err) + } + + var end int + var err error + for i := 0; i < len(idxs); i += 2 { + // Set all values we are interested in. + raw := tpl[end:idxs[i]] + end = idxs[i+1] + patt := tpl[idxs[i]+1 : end-1] + // Build the regexp pattern. + varIdx := i / 2 + fmt.Fprintf(pattern, "%s(%s)", regexp.QuoteMeta(raw), patt) + varsR[varIdx], err = regexp.Compile(fmt.Sprintf("^%s$", patt)) + if err != nil { + return nil, errors.WithStack(err) + } + } + + // Add the remaining. + raw := tpl[end:] + if _, err := pattern.WriteString(regexp.QuoteMeta(raw)); err != nil { + return nil, errors.WithStack(err) + } + if err := pattern.WriteByte('$'); err != nil { + return nil, errors.WithStack(err) + } + + // Compile full regexp. + reg, errCompile := regexp.Compile(pattern.String()) + if errCompile != nil { + return nil, errors.WithStack(errCompile) + } + + return reg, nil +} diff --git a/oryx/testingx/helpers.go b/oryx/testingx/helpers.go new file mode 100644 index 00000000000..17d57b24b6d --- /dev/null +++ b/oryx/testingx/helpers.go @@ -0,0 +1,41 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +// Package testingx contains helper functions and extensions used when writing tests in Ory. +package testingx + +import ( + "io" + "os" + "path/filepath" + "runtime" + "testing" + + "github.com/stretchr/testify/require" +) + +// ReadAll reads all bytes from the reader and returns them as a byte slice. +func ReadAll(t testing.TB, r io.Reader) []byte { + body, err := io.ReadAll(r) + require.NoError(t, err) + return body +} + +// ReadAllString reads all bytes from the reader and returns them as a string. +func ReadAllString(t testing.TB, r io.Reader) string { + return string(ReadAll(t, r)) +} + +// RepoRootPath returns the absolute path of the closest parent directory that has a go.mod file relative to the caller. +func RepoRootPath(t require.TestingT) (repoRoot string) { + _, fpath, _, _ := runtime.Caller(1) + for dir := filepath.Dir(filepath.FromSlash(fpath)); dir != filepath.Dir(dir); dir = filepath.Dir(dir) { + modPath := filepath.Join(dir, "go.mod") + if _, err := os.Stat(modPath); err == nil { + repoRoot = dir + break + } + } + require.NotEmptyf(t, repoRoot, "could not determine repo root using path: %q", fpath) + return repoRoot +} diff --git a/oryx/tlsx/cert.go b/oryx/tlsx/cert.go new file mode 100644 index 00000000000..aa0a28fa201 --- /dev/null +++ b/oryx/tlsx/cert.go @@ -0,0 +1,401 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package tlsx + +import ( + "bytes" + "context" + "crypto" + "crypto/ecdsa" + "crypto/ed25519" + "crypto/rand" + "crypto/rsa" + "crypto/tls" + "crypto/x509" + "crypto/x509/pkix" + "encoding/base64" + "encoding/pem" + "fmt" + "io" + "math/big" + "os" + "path/filepath" + "slices" + "sync/atomic" + "testing" + "time" + + "github.com/pkg/errors" + "github.com/stretchr/testify/require" + + "github.com/ory/x/watcherx" +) + +// ErrNoCertificatesConfigured is returned when no TLS configuration was found. +var ErrNoCertificatesConfigured = errors.New("no tls configuration was found") + +// ErrInvalidCertificateConfiguration is returned when an invalid TLS configuration was found. +var ErrInvalidCertificateConfiguration = errors.New("tls configuration is invalid") + +// HTTPSCertificate returns loads a HTTP over TLS Certificate by looking at environment variables. +func HTTPSCertificate() ([]tls.Certificate, error) { + prefix := "HTTPS_TLS" + return Certificate( + os.Getenv(prefix+"_CERT"), os.Getenv(prefix+"_KEY"), + os.Getenv(prefix+"_CERT_PATH"), os.Getenv(prefix+"_KEY_PATH"), + ) +} + +// HTTPSCertificateHelpMessage returns a help message for configuring HTTP over TLS Certificates. +func HTTPSCertificateHelpMessage() string { + return CertificateHelpMessage("HTTPS_TLS") +} + +// CertificateHelpMessage returns a help message for configuring TLS Certificates. +func CertificateHelpMessage(prefix string) string { + return `- ` + prefix + `_CERT_PATH: The path to the TLS certificate (pem encoded). + Example: ` + prefix + `_CERT_PATH=~/cert.pem + +- ` + prefix + `_KEY_PATH: The path to the TLS private key (pem encoded). + Example: ` + prefix + `_KEY_PATH=~/key.pem + +- ` + prefix + `_CERT: Base64 encoded (without padding) string of the TLS certificate (PEM encoded) to be used for HTTP over TLS (HTTPS). + Example: ` + prefix + `_CERT="-----BEGIN CERTIFICATE-----\nMIIDZTCCAk2gAwIBAgIEV5xOtDANBgkqhkiG9w0BAQ0FADA0MTIwMAYDVQQDDClP..." + +- ` + prefix + `_KEY: Base64 encoded (without padding) string of the private key (PEM encoded) to be used for HTTP over TLS (HTTPS). + Example: ` + prefix + `_KEY="-----BEGIN ENCRYPTED PRIVATE KEY-----\nMIIFDjBABgkqhkiG9w0BBQ0wMzAbBgkqhkiG9w0BBQwwDg..." +` +} + +// CertificateFromBase64 loads a TLS certificate from a base64-encoded string of +// the PEM representations of the cert and key. +func CertificateFromBase64(certBase64, keyBase64 string) (tls.Certificate, error) { + certPEM, err := base64.StdEncoding.DecodeString(certBase64) + if err != nil { + return tls.Certificate{}, fmt.Errorf("unable to base64 decode the TLS certificate: %v", err) + } + keyPEM, err := base64.StdEncoding.DecodeString(keyBase64) + if err != nil { + return tls.Certificate{}, fmt.Errorf("unable to base64 decode the TLS private key: %v", err) + } + cert, err := tls.X509KeyPair(certPEM, keyPEM) + if err != nil { + return tls.Certificate{}, fmt.Errorf("unable to load X509 key pair: %v", err) + } + return cert, nil +} + +// [deprecated] Certificate returns a TLS Certificate by looking at its +// arguments. If both certPEMBase64 and keyPEMBase64 are not empty and contain +// base64-encoded PEM representations of a cert and key, respectively, that key +// pair is returned. Otherwise, if certPath and keyPath point to PEM files, the +// key pair is loaded from those. Returns ErrNoCertificatesConfigured if all +// arguments are empty, and ErrInvalidCertificateConfiguration if the arguments +// are inconsistent. +// +// This function is deprecated. Use CertificateFromBase64 or GetCertificate +// instead. +func Certificate( + certPEMBase64, keyPEMBase64 string, + certPath, keyPath string, +) ([]tls.Certificate, error) { + if certPEMBase64 == "" && keyPEMBase64 == "" && certPath == "" && keyPath == "" { + return nil, errors.WithStack(ErrNoCertificatesConfigured) + } + + if certPEMBase64 != "" && keyPEMBase64 != "" { + cert, err := CertificateFromBase64(certPEMBase64, keyPEMBase64) + if err != nil { + return nil, errors.WithStack(err) + } + return []tls.Certificate{cert}, nil + } + + if certPath != "" && keyPath != "" { + cert, err := tls.LoadX509KeyPair(certPath, keyPath) + if err != nil { + return nil, fmt.Errorf("unable to load X509 key pair from files: %v", err) + } + return []tls.Certificate{cert}, nil + } + + return nil, errors.WithStack(ErrInvalidCertificateConfiguration) +} + +type CertFunc = func(*tls.ClientHelloInfo) (*tls.Certificate, error) + +// GetCertificate returns a function for use with +// "net/tls".Config.GetCertificate. +// +// The certificate and private key are read from the specified filesystem paths. +// The certificate file is watched for changes, upon which the cert+key are +// reloaded in the background. Errors during reloading are deduplicated and +// reported through the errs channel if it is not nil. When the provided context +// is canceled, background reloading stops and the errs channel is closed. +// +// The returned function always yields the latest successfully loaded +// certificate; ClientHelloInfo is unused. +func GetCertificate( + ctx context.Context, + certPath, keyPath string, + errs chan<- error, +) (CertFunc, error) { + if certPath == "" || keyPath == "" { + return nil, errors.WithStack(ErrNoCertificatesConfigured) + } + cert, err := tls.LoadX509KeyPair(certPath, keyPath) + if err != nil { + return nil, errors.WithStack(fmt.Errorf("unable to load X509 key pair from files: %v", err)) + } + var store atomic.Value + store.Store(&cert) + + events := make(chan watcherx.Event) + // The cert could change without the key changing, but not the other way around. + // Hence, we only watch the cert. + _, err = watcherx.WatchFile(ctx, certPath, events) + if err != nil { + return nil, errors.WithStack(err) + } + go func() { + if errs != nil { + defer close(errs) + } + var lastReportedError string + for { + select { + case <-ctx.Done(): + return + + case event := <-events: + var err error + switch event := event.(type) { + case *watcherx.ChangeEvent: + var cert tls.Certificate + cert, err = tls.LoadX509KeyPair(certPath, keyPath) + if err == nil { + store.Store(&cert) + lastReportedError = "" + continue + } + err = fmt.Errorf("unable to load X509 key pair from files: %v", err) + + case *watcherx.ErrorEvent: + err = fmt.Errorf("file watch: %v", event) + default: + continue + } + + if err.Error() == lastReportedError { // same message as before: don't spam the error channel + continue + } + // fresh error + select { + case errs <- errors.WithStack(err): + lastReportedError = err.Error() + case <-time.After(500 * time.Millisecond): + } + } + } + }() + + return func(*tls.ClientHelloInfo) (*tls.Certificate, error) { + if cert, ok := store.Load().(*tls.Certificate); ok { + return cert, nil + } + return nil, errors.WithStack(ErrNoCertificatesConfigured) + }, nil +} + +// PublicKey returns the public key for a given private key, or nil. +func PublicKey(key crypto.PrivateKey) interface{ Equal(x crypto.PublicKey) bool } { + switch k := key.(type) { + case *rsa.PrivateKey: + return &k.PublicKey + case *ecdsa.PrivateKey: + return &k.PublicKey + case ed25519.PrivateKey: + return k.Public().(ed25519.PublicKey) + default: + return nil + } +} + +// CreateSelfSignedTLSCertificate creates a self-signed TLS certificate. +func CreateSelfSignedTLSCertificate(key interface{}, opts ...CertificateOpts) (*tls.Certificate, error) { + c, err := CreateSelfSignedCertificate(key, opts...) + if err != nil { + return nil, err + } + + block, err := PEMBlockForKey(key) + if err != nil { + return nil, err + } + + pemCert := pem.EncodeToMemory(&pem.Block{Type: "CERTIFICATE", Bytes: c.Raw}) + pemKey := pem.EncodeToMemory(block) + cert, err := tls.X509KeyPair(pemCert, pemKey) + if err != nil { + return nil, err + } + + return &cert, nil +} + +// CreateSelfSignedCertificate creates a self-signed x509 certificate. +func CreateSelfSignedCertificate(key interface{}, opts ...CertificateOpts) (cert *x509.Certificate, err error) { + serialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128) + serialNumber, err := rand.Int(rand.Reader, serialNumberLimit) + if err != nil { + return cert, errors.Errorf("failed to generate serial number: %s", err) + } + + certificate := &x509.Certificate{ + SerialNumber: serialNumber, + Subject: pkix.Name{ + Organization: []string{"ORY GmbH"}, + CommonName: "ORY", + }, + Issuer: pkix.Name{ + Organization: []string{"ORY GmbH"}, + CommonName: "ORY", + }, + NotBefore: time.Now().UTC(), + NotAfter: time.Now().UTC().Add(time.Hour * 24 * 31), + KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign, + ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth, x509.ExtKeyUsageClientAuth}, + BasicConstraintsValid: true, + IsCA: true, + DNSNames: []string{"localhost"}, + } + for _, opt := range opts { + opt(certificate) + } + + der, err := x509.CreateCertificate(rand.Reader, certificate, certificate, PublicKey(key), key) + if err != nil { + return cert, errors.Errorf("failed to create certificate: %s", err) + } + + cert, err = x509.ParseCertificate(der) + if err != nil { + return cert, errors.Errorf("failed to encode private key: %s", err) + } + return cert, nil +} + +// PEMBlockForKey returns a PEM-encoded block for key. +func PEMBlockForKey(key interface{}) (*pem.Block, error) { + b, err := x509.MarshalPKCS8PrivateKey(key) + if err != nil { + return nil, errors.WithStack(err) + } + return &pem.Block{Type: "PRIVATE KEY", Bytes: b}, nil +} + +// NewClientCert creates a new client TLS certificate signed by the given CA. +func NewClientCert(CAcert *x509.Certificate, CAkey crypto.PrivateKey, opts ...CertificateOpts) (*tls.Certificate, error) { + if !slices.Contains(CAcert.ExtKeyUsage, x509.ExtKeyUsageClientAuth) { + return nil, errors.Errorf("the CA certificate does not have the client authentication extended key usage (OID 1.3.6.1.5.5.7.3.2) set") + } + serialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128) + serialNumber, err := rand.Int(rand.Reader, serialNumberLimit) + if err != nil { + return nil, errors.Errorf("failed to generate serial number: %s", err) + } + + key, err := rsa.GenerateKey(rand.Reader, 3072) + if err != nil { + return nil, errors.Errorf("failed to generate private key: %s", err) + } + + template := &x509.Certificate{ + SerialNumber: serialNumber, + Subject: pkix.Name{ + Organization: []string{"Ory GmbH"}, + CommonName: "ORY", + }, + Issuer: CAcert.Subject, + NotBefore: time.Now().UTC(), + NotAfter: CAcert.NotAfter, + KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageKeyEncipherment, + ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageClientAuth}, + BasicConstraintsValid: true, + IsCA: false, + } + for _, opt := range opts { + opt(template) + } + + der, err := x509.CreateCertificate(rand.Reader, template, CAcert, PublicKey(key), CAkey) + if err != nil { + return nil, errors.Errorf("failed to create certificate: %s", err) + } + + pemCert := pem.EncodeToMemory(&pem.Block{Type: "CERTIFICATE", Bytes: der}) + pemBlock, err := PEMBlockForKey(key) + if err != nil { + return nil, err + } + pemKey := pem.EncodeToMemory(pemBlock) + + cert, err := tls.X509KeyPair(pemCert, pemKey) + if err != nil { + return nil, errors.WithStack(err) + } + return &cert, nil +} + +type CertificateOpts func(*x509.Certificate) + +// CreateSelfSignedCertificateForTest writes a new, self-signed TLS +// certificate+key (in PEM format) to a temporary location on disk and returns +// the paths to both, and the respective contents in base64 encoding. The +// files are automatically cleaned up when the given *testing.T concludes its +// tests. +func CreateSelfSignedCertificateForTest(t testing.TB) (certPath, keyPath, certBase64, keyBase64 string) { + tmpDir := t.TempDir() + + privateKey, err := rsa.GenerateKey(rand.Reader, 2048) + require.NoError(t, err) + + cert, err := CreateSelfSignedCertificate(privateKey) + require.NoError(t, err) + + // write cert + certFile, err := os.Create(filepath.Join(tmpDir, "cert.pem")) + require.NoError(t, err) + certPath = certFile.Name() + + var buf bytes.Buffer + enc := base64.NewEncoder(base64.StdEncoding, &buf) + require.NoErrorf(t, pem.Encode( + io.MultiWriter(enc, certFile), + &pem.Block{Type: "CERTIFICATE", Bytes: cert.Raw}, + ), "Failed to write data to %q", certPath) + require.NoError(t, enc.Close()) + require.NoErrorf(t, certFile.Close(), "Error closing %q", certPath) + certBase64 = buf.String() + + // write key + keyFile, err := os.Create(filepath.Join(tmpDir, "key.pem")) + require.NoError(t, err) + keyPath = keyFile.Name() + buf.Reset() + enc = base64.NewEncoder(base64.StdEncoding, &buf) + + privBytes, err := x509.MarshalPKCS8PrivateKey(privateKey) + require.NoError(t, err) + + require.NoErrorf(t, pem.Encode( + io.MultiWriter(enc, keyFile), + &pem.Block{Type: "PRIVATE KEY", Bytes: privBytes}, + ), "Failed to write data to %q", keyPath) + require.NoError(t, enc.Close()) + require.NoErrorf(t, keyFile.Close(), "Error closing %q", keyPath) + keyBase64 = buf.String() + + return +} diff --git a/oryx/tlsx/termination.go b/oryx/tlsx/termination.go new file mode 100644 index 00000000000..aae1548ca7c --- /dev/null +++ b/oryx/tlsx/termination.go @@ -0,0 +1,95 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package tlsx + +import ( + "net" + "net/http" + "strings" + + "github.com/pkg/errors" + "github.com/urfave/negroni" + + "github.com/ory/herodot" + "github.com/ory/x/healthx" + "github.com/ory/x/logrusx" + "github.com/ory/x/prometheusx" +) + +type dependencies interface { + logrusx.Provider + Writer() herodot.Writer +} + +// EnforceTLSRequests creates a middleware that enforces TLS for incoming HTTP requests. +// It allows termination (non-HTTPS traffic) from specific CIDR ranges provided in the `allowTerminationFrom` slice. +// If the request is not secure and does not match the allowed CIDR ranges, an error response is returned. +// The middleware also validates the `X-Forwarded-Proto` header to ensure it is set to "https". +func EnforceTLSRequests(d dependencies, allowTerminationFrom []string) (negroni.Handler, error) { + networks := make([]*net.IPNet, 0, len(allowTerminationFrom)) + for _, rn := range allowTerminationFrom { + _, network, err := net.ParseCIDR(rn) + if err != nil { + return nil, errors.WithStack(err) + } + networks = append(networks, network) + } + + return negroni.HandlerFunc(func(rw http.ResponseWriter, r *http.Request, next http.HandlerFunc) { + if r.TLS != nil || + r.URL.Path == healthx.AliveCheckPath || + r.URL.Path == healthx.ReadyCheckPath || + r.URL.Path == prometheusx.MetricsPrometheusPath { + next(rw, r) + return + } + + if len(networks) == 0 { + d.Logger().WithRequest(r).WithError(errors.New("TLS termination is not enabled")).Error("Could not serve http connection") + d.Writer().WriteErrorCode(rw, r, http.StatusBadGateway, errors.New("can not serve request over insecure http")) + return + } + + if err := matchesRange(r, networks); err != nil { + d.Logger().WithRequest(r).WithError(err).Warnln("Could not serve http connection") + d.Writer().WriteErrorCode(rw, r, http.StatusBadGateway, errors.New("can not serve request over insecure http")) + return + } + + proto := r.Header.Get("X-Forwarded-Proto") + if proto == "" { + d.Logger().WithRequest(r).WithError(errors.New("X-Forwarded-Proto header is missing")).Error("Could not serve http connection") + d.Writer().WriteErrorCode(rw, r, http.StatusBadGateway, errors.New("can not serve request over insecure http")) + return + } else if proto != "https" { + d.Logger().WithRequest(r).WithError(errors.New("X-Forwarded-Proto header is missing")).Error("Could not serve http connection") + d.Writer().WriteErrorCode(rw, r, http.StatusBadGateway, errors.Errorf("expected X-Forwarded-Proto header to be https but got: %s", proto)) + return + } + + next(rw, r) + }), nil +} + +func matchesRange(r *http.Request, networks []*net.IPNet) error { + remoteIP, _, err := net.SplitHostPort(r.RemoteAddr) + if err != nil { + return errors.WithStack(err) + } + + check := []string{remoteIP} + for fwd := range strings.SplitSeq(r.Header.Get("X-Forwarded-For"), ",") { + check = append(check, strings.TrimSpace(fwd)) + } + + for _, ipNet := range networks { + for _, ip := range check { + addr := net.ParseIP(ip) + if ipNet.Contains(addr) { + return nil + } + } + } + return errors.Errorf("neither remote address nor any x-forwarded-for values match CIDR ranges %+v: %v, ranges, check)", networks, check) +} diff --git a/oryx/tools/listx/main.go b/oryx/tools/listx/main.go new file mode 100644 index 00000000000..2f259f77473 --- /dev/null +++ b/oryx/tools/listx/main.go @@ -0,0 +1,45 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package main + +import ( + "fmt" + "os" + "path/filepath" + "strings" + + "github.com/ory/x/cmdx" +) + +func main() { + args := os.Args + if len(args) != 2 { + cmdx.Fatalf("Expects exactly one input parameter") + } + err := filepath.Walk(args[1], func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + + if info.IsDir() { + return nil + } + + if strings.Contains(path, "vendor") { + return nil + } + + if filepath.Ext(path) == ".go" { + p, err := filepath.Abs(filepath.Join(args[1], path)) + if err != nil { + return err + } + fmt.Println(p) + } + + return nil + }) + + cmdx.Must(err, "%s", err) +} diff --git a/oryx/urlx/copy.go b/oryx/urlx/copy.go new file mode 100644 index 00000000000..2e558cb23cd --- /dev/null +++ b/oryx/urlx/copy.go @@ -0,0 +1,24 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package urlx + +import "net/url" + +// Copy returns a copy of the input url. +func Copy(src *url.URL) *url.URL { + var out = new(url.URL) + *out = *src + return out +} + +// CopyWithQuery returns a copy of the input url with the given query parameters +func CopyWithQuery(src *url.URL, query url.Values) *url.URL { + out := Copy(src) + q := out.Query() + for k := range query { + q.Set(k, query.Get(k)) + } + out.RawQuery = q.Encode() + return out +} diff --git a/oryx/urlx/extract.go b/oryx/urlx/extract.go new file mode 100644 index 00000000000..15e47359b48 --- /dev/null +++ b/oryx/urlx/extract.go @@ -0,0 +1,137 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package urlx + +import ( + "context" + "net" + "net/url" + "strings" + "sync" + "time" +) + +// hostCache caches DNS lookup results for hostnames to avoid repeated lookups. +// The cache is thread-safe and stores true/false whether a hostname resolves to public IPs. +type hostCache struct { + mu sync.RWMutex + cache map[string]bool +} + +// get retrieves a cached value for a hostname. Returns value and whether it was found. +func (hc *hostCache) get(hostname string) (bool, bool) { + hc.mu.RLock() + defer hc.mu.RUnlock() + isPublic, found := hc.cache[hostname] + return isPublic, found +} + +// set stores the lookup result for a hostname. +func (hc *hostCache) set(hostname string, isPublic bool) { + hc.mu.Lock() + defer hc.mu.Unlock() + hc.cache[hostname] = isPublic +} + +// localCache lives for the lifetime of the main process. The cache +// size is not expected to grow more than a few hundred bytes. +var localCache = &hostCache{ + cache: make(map[string]bool), +} + +// ExtractPublicAddress iterates over parameters and extracts the first public +// address found. Parameter values are assumed to be in priority order. Returns +// an empty string if only private addresses are available. +func ExtractPublicAddress(values ...string) string { + for _, value := range values { + if value == "" || value == "*" { + continue + } + host := value + + // parse URL addresses + if u, err := url.Parse(value); err == nil && len(u.Host) > 1 { + host = removeWildcardsFromHostname(u.Host) + } + + // strip port on both URL and non-URL addresses + hostname, _, err := net.SplitHostPort(host) + if err != nil { + hostname = host + } + + // for IP addresses + if ip := net.ParseIP(hostname); ip != nil { + if !isPrivateIP(ip) { + return host + } + continue + } + + // for hostnames, first check cache + if isPublic, found := localCache.get(hostname); found { + if isPublic { + return host + } + continue + } + + // otherwise, perform DNS lookup & cache result + isPublic := isPublicHostname(hostname) + localCache.set(hostname, isPublic) + if isPublic { + return host + } + } + + return "" +} + +// isPrivateIP checks if an IP address is private (RFC 1918/4193). +func isPrivateIP(ip net.IP) bool { + return ip.IsPrivate() || + ip.IsLoopback() || + ip.IsLinkLocalUnicast() || + ip.IsLinkLocalMulticast() || + ip.IsUnspecified() // 0.0.0.0 or :: +} + +// isPublicHostname performs DNS lookup to determine if hostname resolves to public IPs. +// Returns true if at least one resolved IP is public, false if all are private or lookup fails. +func isPublicHostname(hostname string) bool { + // avoid DNS lookup if localhost + lower := strings.ToLower(hostname) + if lower == "localhost" { + return false + } + + ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) + defer cancel() + + ips, err := net.DefaultResolver.LookupIPAddr(ctx, hostname) + if err != nil { + return false + } + + for _, ip := range ips { + if !isPrivateIP(ip.IP) { + return true + } + } + + return false +} + +// removeWildcardsFromHostname removes wildcard segments from a hostname string +// by splitting on dots and filtering out asterisk-only segments. +func removeWildcardsFromHostname(hostname string) string { + sep := strings.Split(hostname, ".") + clean := make([]string, 0, len(sep)) + for _, s := range sep { + if s != "*" && s != "" { + clean = append(clean, s) + } + } + return strings.Join(clean, ".") +} diff --git a/oryx/urlx/join.go b/oryx/urlx/join.go new file mode 100644 index 00000000000..90eeb15a001 --- /dev/null +++ b/oryx/urlx/join.go @@ -0,0 +1,52 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package urlx + +import ( + "net/url" + "path" + + "github.com/ory/x/cmdx" +) + +// MustJoin joins the paths of two URLs. Fatals if first is not a DSN. +func MustJoin(first string, parts ...string) string { + u, err := url.Parse(first) + if err != nil { + cmdx.Fatalf("Unable to parse %s: %s", first, err) + } + return AppendPaths(u, parts...).String() +} + +// AppendPaths appends the provided paths to the url. +// Paths are intentionally *not* URL encoded. +// The caller is responsible for url encoding, possibly selectively, the required path components with `url.PathEscape`. +func AppendPaths(u *url.URL, paths ...string) (ep *url.URL) { + ep = Copy(u) + if len(paths) == 0 { + return ep + } + + ep.Path = path.Join(append([]string{ep.Path}, paths...)...) + + last := paths[len(paths)-1] + if last != "" && last[len(last)-1] == '/' { + ep.Path = ep.Path + "/" + } + + return ep +} + +// SetQuery appends the provided url values to the DSN's query string. +func SetQuery(u *url.URL, query url.Values) (ep *url.URL) { + ep = Copy(u) + q := ep.Query() + + for k := range query { + q.Set(k, query.Get(k)) + } + + ep.RawQuery = q.Encode() + return ep +} diff --git a/oryx/urlx/parse.go b/oryx/urlx/parse.go new file mode 100644 index 00000000000..d6a7706150e --- /dev/null +++ b/oryx/urlx/parse.go @@ -0,0 +1,119 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package urlx + +import ( + "net/url" + "regexp" + "strings" + + "github.com/ory/x/logrusx" +) + +// winPathRegex is a regex for [DRIVE-LETTER]: +var winPathRegex = regexp.MustCompile("^[A-Za-z]:.*") + +// Parse parses rawURL into a URL structure with special handling for file:// URLs +// +// File URLs with relative paths (file://../file, ../file) will be returned as a +// url.URL object without the Scheme set to "file". This is because the file +// scheme does not support relative paths. Make sure to check for +// both "file" or "" (an empty string) in URL.Scheme if you are looking for +// a file path. +// +// Use the companion function GetURLFilePath() to get a file path suitable +// for the current operating system. +func Parse(rawURL string) (*url.URL, error) { + lcRawURL := strings.ToLower(rawURL) + if strings.HasPrefix(lcRawURL, "file:///") { + return url.Parse(rawURL) + } + + // Normally the first part after file:// is a hostname, but since + // this is often misused we interpret the URL like a normal path + // by removing the "file://" from the beginning (if it exists) + rawURL = trimPrefixIC(rawURL, "file://") + + if winPathRegex.MatchString(rawURL) { + // Windows path + return url.Parse("file:///" + rawURL) + } + + if strings.HasPrefix(lcRawURL, "\\\\") { + // Windows UNC path + // We extract the hostname and create an appropriate file:// URL + // based on the hostname and the path + host, path := extractUNCPathParts(rawURL) + // It is safe to replace the \ with / here because this is POSIX style path + return url.Parse("file://" + host + strings.ReplaceAll(path, "\\", "/")) + } + + parsed, err := url.Parse(rawURL) + if err != nil { + return nil, err + } + // Since go1.19: + // + // > The URL type now distinguishes between URLs with no authority and URLs with an empty authority. + // > For example, http:///path has an empty authority (host), while http:/path has none. + // + // See https://golang.org/doc/go1.19#net/url for more details. + parsed.OmitHost = false + return parsed, nil +} + +// ParseOrPanic parses a url or panics. +func ParseOrPanic(in string) *url.URL { + out, err := url.Parse(in) + if err != nil { + panic(err.Error()) + } + return out +} + +// ParseOrFatal parses a url or fatals. +func ParseOrFatal(l *logrusx.Logger, in string) *url.URL { + out, err := url.Parse(in) + if err != nil { + l.WithError(err).Fatalf("Unable to parse url: %s", in) + } + return out +} + +// ParseRequestURIOrPanic parses a request uri or panics. +func ParseRequestURIOrPanic(in string) *url.URL { + out, err := url.ParseRequestURI(in) + if err != nil { + panic(err.Error()) + } + return out +} + +// ParseRequestURIOrFatal parses a request uri or fatals. +func ParseRequestURIOrFatal(l *logrusx.Logger, in string) *url.URL { + out, err := url.ParseRequestURI(in) + if err != nil { + l.WithError(err).Fatalf("Unable to parse url: %s", in) + } + return out +} + +func extractUNCPathParts(uncPath string) (host, path string) { + parts := strings.Split(strings.TrimPrefix(uncPath, "\\\\"), "\\") + host = parts[0] + if len(parts) > 0 { + path = "\\" + strings.Join(parts[1:], "\\") + } + return host, path +} + +// trimPrefixIC returns s without the provided leading prefix string using +// case insensitive matching. +// If s doesn't start with prefix, s is returned unchanged. +func trimPrefixIC(s, prefix string) string { + if strings.HasPrefix(strings.ToLower(s), prefix) { + return s[len(prefix):] + } + return s +} diff --git a/oryx/urlx/path.go b/oryx/urlx/path.go new file mode 100644 index 00000000000..8ceaddcbc2c --- /dev/null +++ b/oryx/urlx/path.go @@ -0,0 +1,19 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +//go:build !windows +// +build !windows + +package urlx + +import ( + "net/url" +) + +// GetURLFilePath returns the path of a URL that is compatible with the runtime os filesystem +func GetURLFilePath(u *url.URL) string { + if u == nil { + return "" + } + return u.Path +} diff --git a/oryx/urlx/path_windows.go b/oryx/urlx/path_windows.go new file mode 100644 index 00000000000..cf100d22b71 --- /dev/null +++ b/oryx/urlx/path_windows.go @@ -0,0 +1,37 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +//go:build windows +// +build windows + +package urlx + +import ( + "net/url" + "path/filepath" + "strings" +) + +// GetURLFilePath returns the path of a URL that is compatible with the runtime os filesystem +func GetURLFilePath(u *url.URL) string { + if u == nil { + return "" + } + if !(u.Scheme == "file" || u.Scheme == "") { + return u.Path + } + + fPath := u.Path + if u.Host != "" { + // Make UNC Path + fPath = "\\\\" + u.Host + filepath.FromSlash(fPath) + return fPath + } + fPathTrimmed := strings.TrimLeft(fPath, "/") + if winPathRegex.MatchString(fPathTrimmed) { + // On Windows we should remove the initial path separator in case this + // is a normal path (for example: "\c:\" -> "c:\"") + fPath = fPathTrimmed + } + return filepath.FromSlash(fPath) +} diff --git a/oryx/uuidx/uuid.go b/oryx/uuidx/uuid.go new file mode 100644 index 00000000000..e405746daca --- /dev/null +++ b/oryx/uuidx/uuid.go @@ -0,0 +1,11 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package uuidx + +import "github.com/gofrs/uuid" + +// NewV4 returns a new randomly generated UUID or panics. +func NewV4() uuid.UUID { + return uuid.Must(uuid.NewV4()) +} diff --git a/oryx/watcherx/definitions.go b/oryx/watcherx/definitions.go new file mode 100644 index 00000000000..ec5739560ab --- /dev/null +++ b/oryx/watcherx/definitions.go @@ -0,0 +1,67 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package watcherx + +import ( + "context" + "fmt" + "net/url" +) + +type ( + errSchemeUnknown struct { + scheme string + } + EventChannel chan Event + Watcher interface { + // DispatchNow fires the watcher and causes an event. + // + // WARNING: The returned channel must be read or no further events will + // be propagated due to a deadlock. + DispatchNow() (<-chan int, error) + } + dispatcher struct { + trigger chan struct{} + done chan int + } +) + +var ( + // ErrSchemeUnknown is just for checking with errors.Is() + ErrSchemeUnknown = &errSchemeUnknown{} + ErrWatcherNotRunning = fmt.Errorf("watcher is not running") +) + +func (e *errSchemeUnknown) Is(other error) bool { + _, ok := other.(*errSchemeUnknown) + return ok +} + +func (e *errSchemeUnknown) Error() string { + return fmt.Sprintf("unknown scheme '%s' to watch", e.scheme) +} + +func newDispatcher() *dispatcher { + return &dispatcher{ + trigger: make(chan struct{}), + done: make(chan int), + } +} + +func (d *dispatcher) DispatchNow() (<-chan int, error) { + if d.trigger == nil { + return nil, ErrWatcherNotRunning + } + d.trigger <- struct{}{} + return d.done, nil +} + +func Watch(ctx context.Context, u *url.URL, c EventChannel) (Watcher, error) { + switch u.Scheme { + // see urlx.Parse for why the empty string is also file + case "file", "": + return WatchFile(ctx, u.Path, c) + } + return nil, &errSchemeUnknown{u.Scheme} +} diff --git a/oryx/watcherx/directory.go b/oryx/watcherx/directory.go new file mode 100644 index 00000000000..722e505fd59 --- /dev/null +++ b/oryx/watcherx/directory.go @@ -0,0 +1,129 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package watcherx + +import ( + "context" + "os" + "path/filepath" + + "github.com/fsnotify/fsnotify" + "github.com/pkg/errors" +) + +func WatchDirectory(ctx context.Context, dir string, c EventChannel) (Watcher, error) { + w, err := fsnotify.NewWatcher() + if err != nil { + return nil, errors.WithStack(err) + } + subDirs := make(map[string]struct{}) + if err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return errors.WithStack(err) + } + if info.IsDir() { + if err := w.Add(path); err != nil { + return errors.WithStack(err) + } + subDirs[path] = struct{}{} + } + return nil + }); err != nil { + return nil, err + } + + d := newDispatcher() + go streamDirectoryEvents(ctx, w, c, d.trigger, d.done, dir, subDirs) + return d, nil +} + +func handleEvent(e fsnotify.Event, w *fsnotify.Watcher, c EventChannel, subDirs map[string]struct{}) { + if e.Has(fsnotify.Remove) { + if _, ok := subDirs[e.Name]; ok { + // we do not want any event on deletion of a directory + delete(subDirs, e.Name) + return + } + c <- &RemoveEvent{ + source: source(e.Name), + } + return + } else if e.Has(fsnotify.Write | fsnotify.Create) { + if stats, err := os.Stat(e.Name); err != nil { + c <- &ErrorEvent{ + error: errors.WithStack(err), + source: source(e.Name), + } + return + } else if stats.IsDir() { + if err := w.Add(e.Name); err != nil { + c <- &ErrorEvent{ + error: errors.WithStack(err), + source: source(e.Name), + } + } + subDirs[e.Name] = struct{}{} + return + } + + //#nosec G304 -- false positive + data, err := os.ReadFile(e.Name) + if err != nil { + c <- &ErrorEvent{ + error: err, + source: source(e.Name), + } + } else { + c <- &ChangeEvent{ + data: data, + source: source(e.Name), + } + } + } +} + +func streamDirectoryEvents(ctx context.Context, w *fsnotify.Watcher, c EventChannel, sendNow <-chan struct{}, sendNowDone chan<- int, dir string, subDirs map[string]struct{}) { + for { + select { + case <-ctx.Done(): + _ = w.Close() + return + case e := <-w.Events: + handleEvent(e, w, c, subDirs) + case <-sendNow: + var eventsSent int + + if err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if !info.IsDir() { + //#nosec G304 -- false positive + data, err := os.ReadFile(path) + if err != nil { + c <- &ErrorEvent{ + error: err, + source: source(path), + } + } else { + c <- &ChangeEvent{ + data: data, + source: source(path), + } + } + eventsSent++ + } + return nil + }); err != nil { + c <- &ErrorEvent{ + error: err, + source: source(dir), + } + eventsSent++ + } + + sendNowDone <- eventsSent + } + } +} diff --git a/oryx/watcherx/event.go b/oryx/watcherx/event.go new file mode 100644 index 00000000000..a4065c067fe --- /dev/null +++ b/oryx/watcherx/event.go @@ -0,0 +1,109 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package watcherx + +import ( + "bytes" + "encoding/json" + "fmt" + "io" +) + +type ( + Event interface { + // MarshalJSON is required to work multiple times + json.Marshaler + + Reader() io.Reader + Source() string + String() string + setSource(string) + } + source string + ErrorEvent struct { + error + source + } + ChangeEvent struct { + data []byte + source + } + RemoveEvent struct { + source + } + serialEventType string + serialEvent struct { + Type serialEventType `json:"type"` + Data []byte `json:"data"` + Source source `json:"source"` + } +) + +func NewErrorEvent(err error, source_ string) *ErrorEvent { + return &ErrorEvent{ + error: err, + source: source(source_), + } +} + +const ( + serialTypeChange serialEventType = "change" + serialTypeRemove serialEventType = "remove" + serialTypeError serialEventType = "error" +) + +func (e *ErrorEvent) Reader() io.Reader { + return bytes.NewBufferString(e.Error()) +} + +func (e *ErrorEvent) MarshalJSON() ([]byte, error) { + return json.Marshal(serialEvent{ + Type: serialTypeError, + Data: []byte(e.Error()), + Source: e.source, + }) +} + +func (e *ErrorEvent) String() string { + return fmt.Sprintf("error: %+v; source: %s", e.error, e.source) +} + +func (e source) Source() string { + return string(e) +} + +func (e *source) setSource(nsrc string) { + *e = source(nsrc) +} + +func (e *ChangeEvent) Reader() io.Reader { + return bytes.NewBuffer(e.data) +} + +func (e *ChangeEvent) MarshalJSON() ([]byte, error) { + return json.Marshal(serialEvent{ + Type: serialTypeChange, + Data: e.data, + Source: e.source, + }) +} + +func (e *ChangeEvent) String() string { + return fmt.Sprintf("data: %s; source: %s", e.data, e.source) +} + +func (e *RemoveEvent) Reader() io.Reader { + return nil +} + +func (e *RemoveEvent) MarshalJSON() ([]byte, error) { + return json.Marshal(serialEvent{ + Type: serialTypeRemove, + Source: e.source, + }) +} + +func (e *RemoveEvent) String() string { + return fmt.Sprintf("removed source: %s", e.source) +} diff --git a/oryx/watcherx/file.go b/oryx/watcherx/file.go new file mode 100644 index 00000000000..c1bcd411ae6 --- /dev/null +++ b/oryx/watcherx/file.go @@ -0,0 +1,177 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package watcherx + +import ( + "context" + "fmt" + "os" + "path/filepath" + + "github.com/fsnotify/fsnotify" + "github.com/pkg/errors" +) + +// WatchFile spawns a background goroutine to watch file, reporting any changes +// to c. Watching stops when ctx is canceled. +func WatchFile(ctx context.Context, file string, c EventChannel) (Watcher, error) { + watcher, err := fsnotify.NewWatcher() + if err != nil { + return nil, errors.WithStack(err) + } + dir := filepath.Dir(file) + if err := watcher.Add(dir); err != nil { + return nil, errors.WithStack(err) + } + resolvedFile, err := filepath.EvalSymlinks(file) + if err != nil { + if pathError := new(os.PathError); !errors.As(err, &pathError) { + return nil, errors.WithStack(err) + } + // The file does not exist. The watcher should still watch the directory + // to get notified about file creation. + resolvedFile = "" + } else if resolvedFile != file { + // If `resolvedFile` != `file` then `file` is a symlink and we have to explicitly watch the referenced file. + // This is because fsnotify follows symlinks and watches the destination file, not the symlink + // itself. That is at least the case for unix systems. See: https://github.com/fsnotify/fsnotify/issues/199 + if err := watcher.Add(file); err != nil { + return nil, errors.WithStack(err) + } + } + d := newDispatcher() + go streamFileEvents(ctx, watcher, c, d.trigger, d.done, file, resolvedFile) + return d, nil +} + +// streamFileEvents watches for file changes and supports symlinks which requires several workarounds due to limitations of fsnotify. +// Argument `resolvedFile` is the resolved symlink path of the file, or it is the watchedFile name itself. If `resolvedFile` is empty, then the watchedFile does not exist. +func streamFileEvents(ctx context.Context, watcher *fsnotify.Watcher, c EventChannel, sendNow <-chan struct{}, sendNowDone chan<- int, watchedFile, resolvedFile string) { + eventSource := source(watchedFile) + removeDirectFileWatcher := func() { + _ = watcher.Remove(watchedFile) + } + addDirectFileWatcher := func() { + // check if the watchedFile (symlink) exists + // if it does not the dir watcher will notify us when it gets created + if _, err := os.Lstat(watchedFile); err == nil { + if err := watcher.Add(watchedFile); err != nil { + c <- &ErrorEvent{ + error: errors.WithStack(err), + source: eventSource, + } + } + } + } + defer watcher.Close() + for { + select { + case <-ctx.Done(): + return + case <-sendNow: + if resolvedFile == "" { + // The file does not exist. Announce this by sending a RemoveEvent. + c <- &RemoveEvent{eventSource} + } else { + // The file does exist. Announce the current content by sending a ChangeEvent. + // #nosec G304 -- false positive + data, err := os.ReadFile(watchedFile) + if err != nil { + select { + case c <- &ErrorEvent{ + error: errors.WithStack(err), + source: eventSource, + }: + case <-ctx.Done(): + return + } + continue + } + select { + case c <- &ChangeEvent{ + data: data, + source: eventSource, + }: + case <-ctx.Done(): + return + } + } + + // in any of the above cases we send exactly one event + select { + case sendNowDone <- 1: + case <-ctx.Done(): + return + } + case e, ok := <-watcher.Events: + if !ok { + return + } + list := watcher.WatchList() + fmt.Println(list) + // filter events to only watch watchedFile + // e.Name contains the name of the watchedFile (regardless whether it is a symlink), not the resolved file name + if filepath.Clean(e.Name) == watchedFile { + recentlyResolvedFile, err := filepath.EvalSymlinks(watchedFile) + // when there is no error the file exists and any symlinks can be resolved + if err != nil { + // check if the watchedFile (or the file behind the symlink) was removed + if _, ok := err.(*os.PathError); ok { + select { + case c <- &RemoveEvent{eventSource}: + case <-ctx.Done(): + return + } + removeDirectFileWatcher() + continue + } + select { + case c <- &ErrorEvent{ + error: errors.WithStack(err), + source: eventSource, + }: + case <-ctx.Done(): + return + } + continue + } + // This catches following three cases: + // 1. the watchedFile was written or created + // 2. the watchedFile is a symlink and has changed (k8s config map updates) + // 3. the watchedFile behind the symlink was written or created + switch { + case recentlyResolvedFile != resolvedFile: + resolvedFile = recentlyResolvedFile + // watch the symlink again to update the actually watched file + removeDirectFileWatcher() + addDirectFileWatcher() + // we fallthrough because we also want to read the file in this case + fallthrough + case e.Has(fsnotify.Write | fsnotify.Create): + // #nosec G304 -- false positive + data, err := os.ReadFile(watchedFile) + if err != nil { + select { + case c <- &ErrorEvent{ + error: errors.WithStack(err), + source: eventSource, + }: + case <-ctx.Done(): + return + } + continue + } + select { + case c <- &ChangeEvent{ + data: data, + source: eventSource, + }: + case <-ctx.Done(): + return + } + } + } + } + } +} diff --git a/oryx/watcherx/integrationtest/.dockerignore b/oryx/watcherx/integrationtest/.dockerignore new file mode 100644 index 00000000000..515faedd929 --- /dev/null +++ b/oryx/watcherx/integrationtest/.dockerignore @@ -0,0 +1,7 @@ +event_logger.yml +configmap.yml + +Makefile +README.md + +eventlog_snapshot diff --git a/oryx/watcherx/integrationtest/.gitignore b/oryx/watcherx/integrationtest/.gitignore new file mode 100644 index 00000000000..2533ea23e03 --- /dev/null +++ b/oryx/watcherx/integrationtest/.gitignore @@ -0,0 +1 @@ +tmp_snapshot diff --git a/oryx/watcherx/integrationtest/Dockerfile b/oryx/watcherx/integrationtest/Dockerfile new file mode 100644 index 00000000000..85547268b9d --- /dev/null +++ b/oryx/watcherx/integrationtest/Dockerfile @@ -0,0 +1,21 @@ +FROM golang:1.25-alpine AS builder + +RUN apk -U --no-cache add build-base + +WORKDIR /go/src/github.com/ory/x + +ADD go.mod go.mod +ADD go.sum go.sum + +RUN go mod download + +ADD . . + +RUN go build -o /usr/bin/eventlogger ./watcherx/integrationtest + +FROM alpine:3.22 + +COPY --from=builder /usr/bin/eventlogger /usr/bin/eventlogger + +ENTRYPOINT ["eventlogger"] +CMD ["/etc/config/mock-config"] diff --git a/oryx/watcherx/integrationtest/Makefile b/oryx/watcherx/integrationtest/Makefile new file mode 100644 index 00000000000..7725e79936e --- /dev/null +++ b/oryx/watcherx/integrationtest/Makefile @@ -0,0 +1,65 @@ +SHELL=/bin/bash -euo pipefail + +CLUSTER_NAME=watcherx-integration-test +SNAPSHOT_FILE=eventlog_snapshot + +define generate_snapshot + sleep 5 + make update + sleep 1 + kubectl logs eventlogger --context kind-${CLUSTER_NAME} >> $(1) + make apply + sleep 1 + kubectl logs eventlogger --context kind-${CLUSTER_NAME} >> $(1) + make update + sleep 1 + kubectl logs eventlogger --context kind-${CLUSTER_NAME} >> $(1) +endef + +.PHONY: build +build: + docker build -f Dockerfile -t eventlogger:latest ../.. + +.PHONY: create +create: + kind create cluster --name ${CLUSTER_NAME} --wait 1m || true + +.PHONY: load +load: + kind load docker-image eventlogger:latest --name ${CLUSTER_NAME} + +.PHONY: apply +apply: + kubectl apply -f configmap.yml -f event_logger.yml --context kind-${CLUSTER_NAME} + +.PHONY: delete +delete: + kind delete cluster --name ${CLUSTER_NAME} + +.PHONY: setup +setup: build create load apply + +.PHONY: snapshot +snapshot: setup container-restart + rm ${SNAPSHOT_FILE} + ${call generate_snapshot,$(SNAPSHOT_FILE)} + +.PHONY: check +check: setup container-restart + rm tmp_snapshot || true + ${call generate_snapshot,tmp_snapshot} + diff tmp_snapshot ${SNAPSHOT_FILE} + +.PHONY: logs +logs: + kubectl logs eventlogger --context kind-${CLUSTER_NAME} + +.PHONY: container-restart +container-restart: + kubectl delete -f event_logger.yml --context kind-${CLUSTER_NAME} + kubectl apply -f event_logger.yml --context kind-${CLUSTER_NAME} + +.PHONY: update +update: + cat configmap.yml | sed 's/somevalue/othervalue/' | kubectl apply -f - --context kind-${CLUSTER_NAME} + cat event_logger.yml | sed 's/somevalue/othervalue/' | kubectl apply -f - --context kind-${CLUSTER_NAME} diff --git a/oryx/watcherx/integrationtest/README.md b/oryx/watcherx/integrationtest/README.md new file mode 100644 index 00000000000..4dedd3badfc --- /dev/null +++ b/oryx/watcherx/integrationtest/README.md @@ -0,0 +1,27 @@ +# Integration Test for watcherx/FileWatcher + +As kubernetes has a special way to change mounted config map values we want to +make sure our file watcher is compatible with that. + +## Perquisites + +The versions are the ones that definitely work. + +- kind (v0.8.1) +- kubectl (v1.18.5) +- docker (v19.03.12-ce) +- make (v4.3) + +## Structure + +The `main.go` just logs all events it gets. It is deployed to a kind kubernetes +cluster together with a configmap that gets updated during the test. For details +on the test steps have a look at the `Makefile`. + +## Running + +To generate the log snapshot run `make snapshot`. That snapshot should be +committed. To check if the FileWatcher works run `make check`. For debugging +purposes single steps of the setup have descriptive make target names and can be +run separately. It is safe to delete the cluster at any point or rerun snapshot +generation. diff --git a/oryx/watcherx/integrationtest/configmap.yml b/oryx/watcherx/integrationtest/configmap.yml new file mode 100644 index 00000000000..b8c5cee2896 --- /dev/null +++ b/oryx/watcherx/integrationtest/configmap.yml @@ -0,0 +1,6 @@ +kind: ConfigMap +apiVersion: v1 +metadata: + name: changing-config +data: + mock-config: somevalue diff --git a/oryx/watcherx/integrationtest/event_logger.yml b/oryx/watcherx/integrationtest/event_logger.yml new file mode 100644 index 00000000000..acd9641658b --- /dev/null +++ b/oryx/watcherx/integrationtest/event_logger.yml @@ -0,0 +1,19 @@ +kind: Pod +apiVersion: v1 +metadata: + name: eventlogger + annotations: + variant: somevalue +spec: + containers: + - name: eventlogger + image: eventlogger:latest + imagePullPolicy: Never + volumeMounts: + - name: changing-config + mountPath: /etc/config + restartPolicy: Never + volumes: + - name: changing-config + configMap: + name: changing-config diff --git a/oryx/watcherx/integrationtest/eventlog_snapshot b/oryx/watcherx/integrationtest/eventlog_snapshot new file mode 100644 index 00000000000..c9ab6a737b4 --- /dev/null +++ b/oryx/watcherx/integrationtest/eventlog_snapshot @@ -0,0 +1,21 @@ +watching file /etc/config/mock-config +got change event: +Data: othervalue, +Src: /etc/config/mock-config +watching file /etc/config/mock-config +got change event: +Data: othervalue, +Src: /etc/config/mock-config +got change event: +Data: somevalue, +Src: /etc/config/mock-config +watching file /etc/config/mock-config +got change event: +Data: othervalue, +Src: /etc/config/mock-config +got change event: +Data: somevalue, +Src: /etc/config/mock-config +got change event: +Data: othervalue, +Src: /etc/config/mock-config diff --git a/oryx/watcherx/integrationtest/main.go b/oryx/watcherx/integrationtest/main.go new file mode 100644 index 00000000000..5a853438dd5 --- /dev/null +++ b/oryx/watcherx/integrationtest/main.go @@ -0,0 +1,47 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package main + +import ( + "context" + "fmt" + "io" + "os" + + "github.com/ory/x/watcherx" +) + +func main() { + if len(os.Args) != 2 { + _, _ = fmt.Fprintf(os.Stderr, "expected 1 comand line argument but got %d\n", len(os.Args)-1) + os.Exit(1) + } + c := make(chan watcherx.Event) + ctx, cancel := context.WithCancel(context.Background()) + _, err := watcherx.WatchFile(ctx, os.Args[1], c) + if err != nil { + _, _ = fmt.Fprintf(os.Stderr, "could not initialize file watcher: %+v\n", err) + os.Exit(1) + } + fmt.Printf("watching file %s\n", os.Args[1]) + defer cancel() + for { + switch e := (<-c).(type) { + case *watcherx.ChangeEvent: + var data []byte + data, err = io.ReadAll(e.Reader()) + if err != nil { + _, _ = fmt.Fprintf(os.Stderr, "could not read data: %+v\n", err) + os.Exit(1) + } + fmt.Printf("got change event:\nData: %s,\nSrc: %s\n", data, e.Source()) + case *watcherx.RemoveEvent: + fmt.Printf("got remove event:\nSrc: %s\n", e.Source()) + case *watcherx.ErrorEvent: + fmt.Printf("got error event:\nError: %s\n", e.Error()) + default: + fmt.Println("got unknown event") + } + } +} diff --git a/oryx/watcherx/test_helpers.go b/oryx/watcherx/test_helpers.go new file mode 100644 index 00000000000..8960458bbc4 --- /dev/null +++ b/oryx/watcherx/test_helpers.go @@ -0,0 +1,80 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package watcherx + +import ( + "os" + "path" + "path/filepath" + "runtime" + "testing" + "time" + + "github.com/stretchr/testify/require" +) + +func KubernetesAtomicWrite(t *testing.T, dir, fileName, content string) { + // atomic write according to https://github.com/kubernetes/kubernetes/blob/master/pkg/volume/util/atomic_writer.go + const ( + dataDirName = "..data" + newDataDirName = "..data_tmp" + ) + // (2) + dataDirPath := filepath.Join(dir, dataDirName) + oldTsDir, err := os.Readlink(dataDirPath) + if err != nil { + require.True(t, os.IsNotExist(err), "%+v", err) + // although Readlink() returns "" on err, don't be fragile by relying on it (since it's not specified in docs) + // empty oldTsDir indicates that it didn't exist + oldTsDir = "" + } + oldTsPath := filepath.Join(dir, oldTsDir) + + // (3) we are not interested in the case where a file gets deleted as we just operate on one file + // (4) we assume the file needs an update + + // (5) + tsDir, err := os.MkdirTemp(dir, time.Now().UTC().Format("..2006_01_02_15_04_05.")) + require.NoError(t, err) + tsDirName := filepath.Base(tsDir) + + // (6) + require.NoError( + t, + os.WriteFile(path.Join(tsDir, fileName), []byte(content), 0600), + ) + + // (7) + _, err = os.Readlink(filepath.Join(dir, fileName)) + if err != nil && os.IsNotExist(err) { + // The link into the data directory for this path doesn't exist; create it + require.NoError( + t, + os.Symlink(filepath.Join(dataDirName, fileName), filepath.Join(dir, fileName)), + ) + } + + // (8) + newDataDirPath := filepath.Join(dir, newDataDirName) + require.NoError( + t, + os.Symlink(tsDirName, newDataDirPath), + ) + + // (9) + if runtime.GOOS == "windows" { + require.NoError(t, os.Remove(dataDirPath)) + require.NoError(t, os.Symlink(tsDirName, dataDirPath)) + require.NoError(t, os.Remove(newDataDirPath)) + } else { + require.NoError(t, os.Rename(newDataDirPath, dataDirPath)) + } + + // (10) in our case there is nothing to remove + + // (11) + if len(oldTsDir) > 0 { + require.NoError(t, os.RemoveAll(oldTsPath)) + } +} diff --git a/package-lock.json b/package-lock.json index b06e51b354c..3435ec57607 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,82 +1,45 @@ { "name": "@oryd/hydra", "version": "0.0.0", - "lockfileVersion": 2, + "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@oryd/hydra", "version": "0.0.0", "dependencies": { - "@openapitools/openapi-generator-cli": "^2.1.18", - "conventional-changelog-cli": "~2.2.2", - "doctoc": "^2.2.1" + "@openapitools/openapi-generator-cli": "2.25.2" }, "devDependencies": { - "cypress": "^9.7.0", - "dayjs": "^1.10.6", - "jsonwebtoken": "^8.5.1", - "license-checker": "^25.0.1", + "cypress": "9.7.0", + "dayjs": "1.11.18", + "doctoc": "^2.2.1", + "jsonwebtoken": "8.5.1", + "license-checker": "25.0.1", "ory-prettier-styles": "1.3.0", - "prettier": "2.7.1", - "prettier-plugin-packagejson": "^2.2.18", - "standard": "^12.0.1", - "uuid": "^8.3.2", - "wait-on": "^3.2.0" + "prettier": "3.6.2", + "prettier-plugin-packagejson": "2.5.19", + "standard": "17.1.2", + "uuid": "8.3.2", + "wait-on": "9.0.1" } }, - "node_modules/@babel/code-frame": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz", - "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==", - "dependencies": { - "@babel/highlight": "^7.18.6" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-identifier": { - "version": "7.19.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz", - "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/highlight": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", - "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", - "dependencies": { - "@babel/helper-validator-identifier": "^7.18.6", - "chalk": "^2.0.0", - "js-tokens": "^4.0.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/highlight/node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" - }, - "node_modules/@colors/colors": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", - "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==", - "dev": true, - "optional": true, - "engines": { - "node": ">=0.1.90" + "node_modules/@borewit/text-codec": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@borewit/text-codec/-/text-codec-0.1.1.tgz", + "integrity": "sha512-5L/uBxmjaCIX5h8Z+uu+kA9BQLkc/Wl06UGR5ajNRxu+/XjonB5i8JpgFMrPj3LXTCPA0pv8yxUvbUi+QthGGA==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" } }, "node_modules/@cypress/request": { - "version": "2.88.10", - "resolved": "https://registry.npmjs.org/@cypress/request/-/request-2.88.10.tgz", - "integrity": "sha512-Zp7F+R93N0yZyG34GutyTNr+okam7s/Fzc1+i3kcqOP8vk6OuajuE9qZJ6Rs+10/1JFtXFYMdyarnU1rZuJesg==", + "version": "2.88.12", + "resolved": "https://registry.npmjs.org/@cypress/request/-/request-2.88.12.tgz", + "integrity": "sha512-tOn+0mDZxASFM+cuAP9szGUGPI1HwWVSvdzm7V4cCsPdFTx6qMj29CwaQmRAMIEhORIUBFBsYROYJcveK4uOjA==", "dev": true, + "license": "Apache-2.0", "dependencies": { "aws-sign2": "~0.7.0", "aws4": "^1.8.0", @@ -91,9 +54,9 @@ "json-stringify-safe": "~5.0.1", "mime-types": "~2.1.19", "performance-now": "^2.1.0", - "qs": "~6.5.2", + "qs": "~6.10.3", "safe-buffer": "^5.1.2", - "tough-cookie": "~2.5.0", + "tough-cookie": "^4.1.3", "tunnel-agent": "^0.6.0", "uuid": "^8.3.2" }, @@ -101,240 +64,369 @@ "node": ">= 6" } }, - "node_modules/@cypress/request/node_modules/http-signature": { - "version": "1.3.6", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.3.6.tgz", - "integrity": "sha512-3adrsD6zqo4GsTqtO7FyrejHNv+NgiIfAfv68+jVlFmSr9OGy7zrxONceFRLKvnnZA5jbxQBX1u9PpB6Wi32Gw==", + "node_modules/@cypress/request/node_modules/form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", "dev": true, + "license": "MIT", "dependencies": { - "assert-plus": "^1.0.0", - "jsprim": "^2.0.2", - "sshpk": "^1.14.1" + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" }, "engines": { - "node": ">=0.10" + "node": ">= 0.12" } }, - "node_modules/@cypress/request/node_modules/jsprim": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-2.0.2.tgz", - "integrity": "sha512-gqXddjPqQ6G40VdnI6T6yObEC+pDNvyP95wdQhkWkg7crHH3km5qP1FsOXEkzEQwnz6gz5qGTn1c2Y52wP3OyQ==", + "node_modules/@cypress/xvfb": { + "version": "1.2.4", "dev": true, - "engines": [ - "node >=0.6.0" - ], + "license": "MIT", "dependencies": { - "assert-plus": "1.0.0", - "extsprintf": "1.3.0", - "json-schema": "0.4.0", - "verror": "1.10.0" + "debug": "^3.1.0", + "lodash.once": "^4.1.1" } }, - "node_modules/@cypress/request/node_modules/tough-cookie": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", - "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", + "node_modules/@eslint-community/eslint-utils": { + "version": "4.9.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz", + "integrity": "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==", "dev": true, + "license": "MIT", "dependencies": { - "psl": "^1.1.28", - "punycode": "^2.1.1" + "eslint-visitor-keys": "^3.4.3" }, "engines": { - "node": ">=0.8" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" } }, - "node_modules/@cypress/xvfb": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@cypress/xvfb/-/xvfb-1.2.4.tgz", - "integrity": "sha512-skbBzPggOVYCbnGgV+0dmBdW/s77ZkAOXIC1knS8NagwDjBrNC1LuXtQJeiN6l+m7lzmHtaoUw/ctJKdqkG57Q==", + "node_modules/@eslint-community/regexpp": { + "version": "4.12.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", + "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", "dev": true, - "dependencies": { - "debug": "^3.1.0", - "lodash.once": "^4.1.1" + "license": "MIT", + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" } }, - "node_modules/@hutson/parse-repository-url": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@hutson/parse-repository-url/-/parse-repository-url-3.0.2.tgz", - "integrity": "sha512-H9XAx3hc0BQHY6l+IFSWHDySypcXsvsuLhgYLUGywmJ5pswRVQJUHpOsobnLYp2ZUaUlKiKDrgWWhosOwAEM8Q==", + "node_modules/@eslint/eslintrc": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.6.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, "engines": { - "node": ">=6.9.0" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "node_modules/@eslint/eslintrc/node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", "dev": true, + "license": "MIT", "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" + "ms": "^2.1.3" }, "engines": { - "node": ">= 8" + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } } }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "node_modules/@eslint/eslintrc/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "dev": true, + "license": "MIT" + }, + "node_modules/@eslint/js": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz", + "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==", + "dev": true, + "license": "MIT", "engines": { - "node": ">= 8" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "node_modules/@hapi/address": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@hapi/address/-/address-5.1.1.tgz", + "integrity": "sha512-A+po2d/dVoY7cYajycYI43ZbYMXukuopIsqCjh5QzsBCipDtdofHntljDlpccMjIfTy6UOkg+5KPriwYch2bXA==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" + "@hapi/hoek": "^11.0.2" }, "engines": { - "node": ">= 8" + "node": ">=14.0.0" } }, - "node_modules/@nuxtjs/opencollective": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/@nuxtjs/opencollective/-/opencollective-0.3.2.tgz", - "integrity": "sha512-um0xL3fO7Mf4fDxcqx9KryrB7zgRM5JSlvGN5AGkP6JLM5XEKyjeAiPbNxdXVXQ16isuAhYpvP88NgL2BGd6aA==", + "node_modules/@hapi/formula": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@hapi/formula/-/formula-3.0.2.tgz", + "integrity": "sha512-hY5YPNXzw1He7s0iqkRQi+uMGh383CGdyyIGYtB+W5N3KHPXoqychklvHhKCC9M3Xtv0OCs/IHw+r4dcHtBYWw==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@hapi/hoek": { + "version": "11.0.7", + "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-11.0.7.tgz", + "integrity": "sha512-HV5undWkKzcB4RZUusqOpcgxOaq6VOAH7zhhIr2g3G8NF/MlFO75SjOr2NfuSx0Mh40+1FqCkagKLJRykUWoFQ==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@hapi/pinpoint": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@hapi/pinpoint/-/pinpoint-2.0.1.tgz", + "integrity": "sha512-EKQmr16tM8s16vTT3cA5L0kZZcTMU5DUOZTuvpnY738m+jyP3JIUj+Mm1xc1rsLkGBQ/gVnfKYPwOmPg1tUR4Q==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@hapi/tlds": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@hapi/tlds/-/tlds-1.1.3.tgz", + "integrity": "sha512-QIvUMB5VZ8HMLZF9A2oWr3AFM430QC8oGd0L35y2jHpuW6bIIca6x/xL7zUf4J7L9WJ3qjz+iJII8ncaeMbpSg==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@hapi/topo": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@hapi/topo/-/topo-6.0.2.tgz", + "integrity": "sha512-KR3rD5inZbGMrHmgPxsJ9dbi6zEK+C3ZwUwTa+eMwWLz7oijWUTWD2pMSNNYJAU6Qq+65NkxXjqHr/7LM2Xkqg==", + "dev": true, + "license": "BSD-3-Clause", "dependencies": { - "chalk": "^4.1.0", - "consola": "^2.15.0", - "node-fetch": "^2.6.1" - }, - "bin": { - "opencollective": "bin/opencollective.js" + "@hapi/hoek": "^11.0.2" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz", + "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==", + "deprecated": "Use @eslint/config-array instead", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@humanwhocodes/object-schema": "^2.0.3", + "debug": "^4.3.1", + "minimatch": "^3.0.5" }, "engines": { - "node": ">=8.0.0", - "npm": ">=5.0.0" + "node": ">=10.10.0" } }, - "node_modules/@nuxtjs/opencollective/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "node_modules/@humanwhocodes/config-array/node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", "dependencies": { - "color-convert": "^2.0.1" + "ms": "^2.1.3" }, "engines": { - "node": ">=8" + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.22" }, "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "type": "github", + "url": "https://github.com/sponsors/nzakas" } }, - "node_modules/@nuxtjs/opencollective/node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "node_modules/@humanwhocodes/object-schema": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", + "deprecated": "Use @eslint/object-schema instead", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@isaacs/balanced-match": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz", + "integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==", + "license": "MIT", + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/@isaacs/brace-expansion": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@isaacs/brace-expansion/-/brace-expansion-5.0.0.tgz", + "integrity": "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==", + "license": "MIT", "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" + "@isaacs/balanced-match": "^4.0.1" }, "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" + "node": "20 || >=22" } }, - "node_modules/@nuxtjs/opencollective/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "license": "ISC", "dependencies": { - "color-name": "~1.1.4" + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" }, "engines": { - "node": ">=7.0.0" + "node": ">=12" } }, - "node_modules/@nuxtjs/opencollective/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + "node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.1.0", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } }, - "node_modules/@nuxtjs/opencollective/node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "node_modules/@isaacs/cliui/node_modules/ansi-styles": { + "version": "6.2.1", + "license": "MIT", "engines": { - "node": ">=8" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/@nuxtjs/opencollective/node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "node_modules/@isaacs/cliui/node_modules/emoji-regex": { + "version": "9.2.2", + "license": "MIT" + }, + "node_modules/@isaacs/cliui/node_modules/string-width": { + "version": "5.1.2", + "license": "MIT", "dependencies": { - "has-flag": "^4.0.0" + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" }, "engines": { - "node": ">=8" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@openapitools/openapi-generator-cli": { - "version": "2.4.26", - "resolved": "https://registry.npmjs.org/@openapitools/openapi-generator-cli/-/openapi-generator-cli-2.4.26.tgz", - "integrity": "sha512-O42H9q1HWGoIpcpMaUu318b6bmOgcjP3MieHwOrFdoG3KyttceBGlbLf9Kbf7WM91WSNCDXum7cnEKASuoGjAg==", - "hasInstallScript": true, + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.0", + "license": "MIT", "dependencies": { - "@nestjs/common": "8.2.6", - "@nestjs/core": "8.2.6", - "@nuxtjs/opencollective": "0.3.2", - "chalk": "4.1.2", - "commander": "8.3.0", - "compare-versions": "3.6.0", - "concurrently": "6.5.1", - "console.table": "0.10.0", - "fs-extra": "10.0.0", - "glob": "7.1.6", - "inquirer": "8.2.0", - "lodash": "4.17.21", - "reflect-metadata": "0.1.13", - "rxjs": "7.5.2", - "tslib": "2.0.3" + "ansi-regex": "^6.0.1" }, - "bin": { - "openapi-generator-cli": "main.js" + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { + "version": "8.1.0", + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" }, "engines": { - "node": ">=10.0.0" + "node": ">=12" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/openapi_generator" + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/@lukeed/csprng": { + "version": "1.1.0", + "license": "MIT", + "engines": { + "node": ">=8" } }, - "node_modules/@openapitools/openapi-generator-cli/node_modules/@nestjs/common": { - "version": "8.2.6", - "resolved": "https://registry.npmjs.org/@nestjs/common/-/common-8.2.6.tgz", - "integrity": "sha512-flLYSXunxcKyjbYddrhwbc49uE705MxBt85rS3mHyhDbAIPSGGeZEqME44YyAzCg1NTfJSNe7ztmOce5kNkb9A==", + "node_modules/@nestjs/common": { + "version": "11.1.9", + "resolved": "https://registry.npmjs.org/@nestjs/common/-/common-11.1.9.tgz", + "integrity": "sha512-zDntUTReRbAThIfSp3dQZ9kKqI+LjgLp5YZN5c1bgNRDuoeLySAoZg46Bg1a+uV8TMgIRziHocglKGNzr6l+bQ==", + "license": "MIT", + "peer": true, "dependencies": { - "axios": "0.24.0", + "file-type": "21.1.0", "iterare": "1.2.1", - "tslib": "2.3.1", - "uuid": "8.3.2" + "load-esm": "1.0.3", + "tslib": "2.8.1", + "uid": "2.0.2" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/nest" }, "peerDependencies": { - "cache-manager": "*", - "class-transformer": "*", - "class-validator": "*", - "reflect-metadata": "^0.1.12", + "class-transformer": ">=0.4.1", + "class-validator": ">=0.13.2", + "reflect-metadata": "^0.1.12 || ^0.2.0", "rxjs": "^7.1.0" }, "peerDependenciesMeta": { - "cache-manager": { - "optional": true - }, "class-transformer": { "optional": true }, @@ -343,35 +435,39 @@ } } }, - "node_modules/@openapitools/openapi-generator-cli/node_modules/@nestjs/common/node_modules/tslib": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz", - "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" + "node_modules/@nestjs/common/node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" }, - "node_modules/@openapitools/openapi-generator-cli/node_modules/@nestjs/core": { - "version": "8.2.6", - "resolved": "https://registry.npmjs.org/@nestjs/core/-/core-8.2.6.tgz", - "integrity": "sha512-NwPcEIMmCsucs3QaDlQvkoU1FlFM2wm/WjaqLQhkSoIEmAR1gNtBo88f5io5cpMwCo1k5xYhqGlaSl6TfngwWQ==", + "node_modules/@nestjs/core": { + "version": "11.1.9", + "resolved": "https://registry.npmjs.org/@nestjs/core/-/core-11.1.9.tgz", + "integrity": "sha512-a00B0BM4X+9z+t3UxJqIZlemIwCQdYoPKrMcM+ky4z3pkqqG1eTWexjs+YXpGObnLnjtMPVKWlcZHp3adDYvUw==", "hasInstallScript": true, + "license": "MIT", "dependencies": { - "@nuxtjs/opencollective": "0.3.2", + "@nuxt/opencollective": "0.4.1", "fast-safe-stringify": "2.1.1", "iterare": "1.2.1", - "object-hash": "2.2.0", - "path-to-regexp": "3.2.0", - "tslib": "2.3.1", - "uuid": "8.3.2" + "path-to-regexp": "8.3.0", + "tslib": "2.8.1", + "uid": "2.0.2" + }, + "engines": { + "node": ">= 20" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/nest" }, "peerDependencies": { - "@nestjs/common": "^8.0.0", - "@nestjs/microservices": "^8.0.0", - "@nestjs/platform-express": "^8.0.0", - "@nestjs/websockets": "^8.0.0", - "reflect-metadata": "^0.1.12", + "@nestjs/common": "^11.0.0", + "@nestjs/microservices": "^11.0.0", + "@nestjs/platform-express": "^11.0.0", + "@nestjs/websockets": "^11.0.0", + "reflect-metadata": "^0.1.12 || ^0.2.0", "rxjs": "^7.1.0" }, "peerDependenciesMeta": { @@ -386,51 +482,94 @@ } } }, - "node_modules/@openapitools/openapi-generator-cli/node_modules/@nestjs/core/node_modules/tslib": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz", - "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" + "node_modules/@nestjs/core/node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" }, - "node_modules/@openapitools/openapi-generator-cli/node_modules/ansi-escapes": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", - "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", "dependencies": { - "type-fest": "^0.21.3" - }, - "engines": { - "node": ">=8" + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@openapitools/openapi-generator-cli/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "engines": { - "node": ">=8" + "node": ">= 8" } }, - "node_modules/@openapitools/openapi-generator-cli/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", "dependencies": { - "color-convert": "^2.0.1" + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" }, "engines": { - "node": ">=8" + "node": ">= 8" + } + }, + "node_modules/@nuxt/opencollective": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@nuxt/opencollective/-/opencollective-0.4.1.tgz", + "integrity": "sha512-GXD3wy50qYbxCJ652bDrDzgMr3NFEkIS374+IgFQKkCvk9yiYcLvX2XDYr7UyQxf4wK0e+yqDYRubZ0DtOxnmQ==", + "license": "MIT", + "dependencies": { + "consola": "^3.2.3" }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "bin": { + "opencollective": "bin/opencollective.js" + }, + "engines": { + "node": "^14.18.0 || >=16.10.0", + "npm": ">=5.10.0" } }, - "node_modules/@openapitools/openapi-generator-cli/node_modules/chalk": { + "node_modules/@nuxt/opencollective/node_modules/consola": { + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.2.tgz", + "integrity": "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==", + "license": "MIT", + "engines": { + "node": "^14.18.0 || >=16.10.0" + } + }, + "node_modules/@nuxtjs/opencollective": { + "version": "0.3.2", + "license": "MIT", + "dependencies": { + "chalk": "^4.1.0", + "consola": "^2.15.0", + "node-fetch": "^2.6.1" + }, + "bin": { + "opencollective": "bin/opencollective.js" + }, + "engines": { + "node": ">=8.0.0", + "npm": ">=5.0.0" + } + }, + "node_modules/@nuxtjs/opencollective/node_modules/chalk": { "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -442,155 +581,251 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/@openapitools/openapi-generator-cli/node_modules/chardet": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", - "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==" + "node_modules/@nuxtjs/opencollective/node_modules/has-flag": { + "version": "4.0.0", + "license": "MIT", + "engines": { + "node": ">=8" + } }, - "node_modules/@openapitools/openapi-generator-cli/node_modules/cli-width": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz", - "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==", + "node_modules/@nuxtjs/opencollective/node_modules/supports-color": { + "version": "7.2.0", + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, "engines": { - "node": ">= 10" + "node": ">=8" } }, - "node_modules/@openapitools/openapi-generator-cli/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "node_modules/@openapitools/openapi-generator-cli": { + "version": "2.25.2", + "resolved": "https://registry.npmjs.org/@openapitools/openapi-generator-cli/-/openapi-generator-cli-2.25.2.tgz", + "integrity": "sha512-TXElbW1NXCy0EECXiO5AD2ZzT1dmaCs41Z8t3pBUGaJf8zgF/Lm0P6GRhVEpw29iHBNjZcy8nrgQ1acUfuCdng==", + "hasInstallScript": true, + "license": "Apache-2.0", "dependencies": { - "color-name": "~1.1.4" + "@nestjs/axios": "4.0.1", + "@nestjs/common": "11.1.9", + "@nestjs/core": "11.1.9", + "@nuxtjs/opencollective": "0.3.2", + "axios": "1.13.2", + "chalk": "4.1.2", + "commander": "8.3.0", + "compare-versions": "6.1.1", + "concurrently": "9.2.1", + "console.table": "0.10.0", + "fs-extra": "11.3.2", + "glob": "13.0.0", + "inquirer": "8.2.7", + "proxy-agent": "6.5.0", + "reflect-metadata": "0.2.2", + "rxjs": "7.8.2", + "tslib": "2.8.1" + }, + "bin": { + "openapi-generator-cli": "main.js" }, "engines": { - "node": ">=7.0.0" + "node": ">=16" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/openapi_generator" } }, - "node_modules/@openapitools/openapi-generator-cli/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + "node_modules/@openapitools/openapi-generator-cli/node_modules/@inquirer/external-editor": { + "version": "1.0.1", + "license": "MIT", + "dependencies": { + "chardet": "^2.1.0", + "iconv-lite": "^0.6.3" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } }, - "node_modules/@openapitools/openapi-generator-cli/node_modules/commander": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", - "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", + "node_modules/@openapitools/openapi-generator-cli/node_modules/@nestjs/axios": { + "version": "4.0.1", + "license": "MIT", + "peerDependencies": { + "@nestjs/common": "^10.0.0 || ^11.0.0", + "axios": "^1.3.1", + "rxjs": "^7.0.0" + } + }, + "node_modules/@openapitools/openapi-generator-cli/node_modules/ansi-escapes": { + "version": "4.3.2", + "license": "MIT", + "dependencies": { + "type-fest": "^0.21.3" + }, "engines": { - "node": ">= 12" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@openapitools/openapi-generator-cli/node_modules/external-editor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", - "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", + "node_modules/@openapitools/openapi-generator-cli/node_modules/chalk": { + "version": "4.1.2", + "license": "MIT", "dependencies": { - "chardet": "^0.7.0", - "iconv-lite": "^0.4.24", - "tmp": "^0.0.33" + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" }, "engines": { - "node": ">=4" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@openapitools/openapi-generator-cli/node_modules/chardet": { + "version": "2.1.0", + "license": "MIT" + }, + "node_modules/@openapitools/openapi-generator-cli/node_modules/cli-width": { + "version": "3.0.0", + "license": "ISC", + "engines": { + "node": ">= 10" + } + }, + "node_modules/@openapitools/openapi-generator-cli/node_modules/commander": { + "version": "8.3.0", + "license": "MIT", + "engines": { + "node": ">= 12" } }, "node_modules/@openapitools/openapi-generator-cli/node_modules/fs-extra": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.0.0.tgz", - "integrity": "sha512-C5owb14u9eJwizKGdchcDUQeFtlSHHthBk8pbX9Vc1PFZrLombudjDnNns88aYslCyF6IY5SUw3Roz6xShcEIQ==", + "version": "11.3.2", + "license": "MIT", "dependencies": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", "universalify": "^2.0.0" }, "engines": { - "node": ">=12" + "node": ">=14.14" } }, "node_modules/@openapitools/openapi-generator-cli/node_modules/has-flag": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "license": "MIT", "engines": { "node": ">=8" } }, + "node_modules/@openapitools/openapi-generator-cli/node_modules/iconv-lite": { + "version": "0.6.3", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/@openapitools/openapi-generator-cli/node_modules/inquirer": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-8.2.0.tgz", - "integrity": "sha512-0crLweprevJ02tTuA6ThpoAERAGyVILC4sS74uib58Xf/zSr1/ZWtmm7D5CI+bSQEaA04f0K7idaHpQbSWgiVQ==", + "version": "8.2.7", + "license": "MIT", "dependencies": { + "@inquirer/external-editor": "^1.0.0", "ansi-escapes": "^4.2.1", "chalk": "^4.1.1", "cli-cursor": "^3.1.0", "cli-width": "^3.0.0", - "external-editor": "^3.0.3", "figures": "^3.0.0", "lodash": "^4.17.21", "mute-stream": "0.0.8", "ora": "^5.4.1", "run-async": "^2.4.0", - "rxjs": "^7.2.0", + "rxjs": "^7.5.5", "string-width": "^4.1.0", "strip-ansi": "^6.0.0", - "through": "^2.3.6" + "through": "^2.3.6", + "wrap-ansi": "^6.0.1" }, "engines": { - "node": ">=8.0.0" + "node": ">=12.0.0" } }, "node_modules/@openapitools/openapi-generator-cli/node_modules/mute-stream": { "version": "0.0.8", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", - "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==" + "license": "ISC" }, - "node_modules/@openapitools/openapi-generator-cli/node_modules/rxjs": { - "version": "7.5.2", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.5.2.tgz", - "integrity": "sha512-PwDt186XaL3QN5qXj/H9DGyHhP3/RYYgZZwqBv9Tv8rsAaiwFH1IsJJlcgD37J7UW5a6O67qX0KWKS3/pu0m4w==", + "node_modules/@openapitools/openapi-generator-cli/node_modules/supports-color": { + "version": "7.2.0", + "license": "MIT", "dependencies": { - "tslib": "^2.1.0" + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" } }, - "node_modules/@openapitools/openapi-generator-cli/node_modules/rxjs/node_modules/tslib": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz", - "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" + "node_modules/@openapitools/openapi-generator-cli/node_modules/tslib": { + "version": "2.8.1", + "license": "0BSD" }, - "node_modules/@openapitools/openapi-generator-cli/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "node_modules/@openapitools/openapi-generator-cli/node_modules/wrap-ansi": { + "version": "6.2.0", + "license": "MIT", "dependencies": { - "ansi-regex": "^5.0.1" + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" }, "engines": { "node": ">=8" } }, - "node_modules/@openapitools/openapi-generator-cli/node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dependencies": { - "has-flag": "^4.0.0" - }, + "node_modules/@pkgr/core": { + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.2.9.tgz", + "integrity": "sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==", + "dev": true, + "license": "MIT", "engines": { - "node": ">=8" + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/pkgr" } }, - "node_modules/@openapitools/openapi-generator-cli/node_modules/tslib": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.0.3.tgz", - "integrity": "sha512-uZtkfKblCEQtZKBF6EBXVZeQNl82yqtDQdv+eck8u7tdPxjLu2/lp5/uPW+um2tpuxINHWy3GhiccY7QgEaVHQ==" + "node_modules/@rtsao/scc": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@rtsao/scc/-/scc-1.1.0.tgz", + "integrity": "sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==", + "dev": true, + "license": "MIT" + }, + "node_modules/@standard-schema/spec": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.0.0.tgz", + "integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==", + "dev": true, + "license": "MIT" }, "node_modules/@textlint/ast-node-types": { "version": "12.2.2", - "resolved": "https://registry.npmjs.org/@textlint/ast-node-types/-/ast-node-types-12.2.2.tgz", - "integrity": "sha512-VQAXUSGdmEajHXrMxeM9ZTS8UBJSVB0ghJFHpFfqYKlcDsjIqClSmTprY6521HoCoSLoUIGBxTC3jQyUMJFIWw==" + "dev": true, + "license": "MIT" }, "node_modules/@textlint/markdown-to-ast": { "version": "12.2.2", - "resolved": "https://registry.npmjs.org/@textlint/markdown-to-ast/-/markdown-to-ast-12.2.2.tgz", - "integrity": "sha512-OP0cnGCzt8Bbfhn8fO/arQSHBhmuXB4maSXH8REJAtKRpTADWOrbuxAOaI9mjQ7EMTDiml02RZ9MaELQAWAsqQ==", + "dev": true, + "license": "MIT", "dependencies": { "@textlint/ast-node-types": "^12.2.2", "debug": "^4.3.4", @@ -605,8 +840,8 @@ }, "node_modules/@textlint/markdown-to-ast/node_modules/debug": { "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "license": "MIT", "dependencies": { "ms": "2.1.2" }, @@ -619,84 +854,120 @@ } } }, - "node_modules/@types/glob": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@types/glob/-/glob-7.2.0.tgz", - "integrity": "sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA==", - "dev": true, + "node_modules/@tokenizer/inflate": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@tokenizer/inflate/-/inflate-0.3.1.tgz", + "integrity": "sha512-4oeoZEBQdLdt5WmP/hx1KZ6D3/Oid/0cUb2nk4F0pTDAWy+KCH3/EnAkZF/bvckWo8I33EqBm01lIPgmgc8rCA==", + "license": "MIT", "dependencies": { - "@types/minimatch": "*", - "@types/node": "*" + "debug": "^4.4.1", + "fflate": "^0.8.2", + "token-types": "^6.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, + "node_modules/@tokenizer/inflate/node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } } }, + "node_modules/@tokenizer/inflate/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/@tokenizer/token": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@tokenizer/token/-/token-0.3.0.tgz", + "integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==", + "license": "MIT" + }, + "node_modules/@tootallnate/quickjs-emscripten": { + "version": "0.23.0", + "license": "MIT" + }, + "node_modules/@types/json5": { + "version": "0.0.29", + "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", + "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/mdast": { "version": "3.0.10", - "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.10.tgz", - "integrity": "sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA==", + "dev": true, + "license": "MIT", "dependencies": { "@types/unist": "*" } }, - "node_modules/@types/minimatch": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-5.1.2.tgz", - "integrity": "sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==", - "dev": true - }, - "node_modules/@types/minimist": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.2.tgz", - "integrity": "sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==" - }, "node_modules/@types/node": { "version": "14.17.5", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.17.5.tgz", - "integrity": "sha512-bjqH2cX/O33jXT/UmReo2pM7DIJREPMnarixbQ57DOOzzFaI6D2+IcwaJQaJpv0M1E9TIhPCYVxrkcityLjlqA==", - "dev": true - }, - "node_modules/@types/normalize-package-data": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz", - "integrity": "sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==" + "dev": true, + "license": "MIT" }, "node_modules/@types/sinonjs__fake-timers": { "version": "8.1.1", - "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.1.tgz", - "integrity": "sha512-0kSuKjAS0TrGLJ0M/+8MaFkGsQhZpB6pxOmvS3K8FYI72K//YmdfoW9X2qPsAKh1mkwxGD5zib9s1FIFed6E8g==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@types/sizzle": { "version": "2.3.2", - "resolved": "https://registry.npmjs.org/@types/sizzle/-/sizzle-2.3.2.tgz", - "integrity": "sha512-7EJYyKTL7tFR8+gDbB6Wwz/arpGa0Mywk1TJbNzKzHtzbwVmY4HR9WqS5VV7dsBUKQmPNr192jHr/VpBluj/hg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@types/unist": { "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.6.tgz", - "integrity": "sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==" + "dev": true, + "license": "MIT" }, "node_modules/@types/yauzl": { "version": "2.9.2", - "resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.9.2.tgz", - "integrity": "sha512-8uALY5LTvSuHgloDVUvWP3pIauILm+8/0pDMokuDYIoNsOkSwd5AiHBTSEJjKTDcZr5z8UpgOWZkxBF4iJftoA==", "dev": true, + "license": "MIT", "optional": true, "dependencies": { "@types/node": "*" } }, + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", + "dev": true, + "license": "ISC" + }, "node_modules/abbrev": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", - "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/acorn": { - "version": "6.4.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.1.tgz", - "integrity": "sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA==", + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "dev": true, + "license": "MIT", + "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -705,21 +976,26 @@ } }, "node_modules/acorn-jsx": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.0.1.tgz", - "integrity": "sha512-HJ7CfNHrfJLlNTzIEUTj43LNWGkqpRLxm3YjAlcD0ACydk9XynzYsCBHxut+iqt+1aBXkx9UP/w/ZqMr13XIzg==", - "dev": true + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } }, - "node_modules/add-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/add-stream/-/add-stream-1.0.0.tgz", - "integrity": "sha512-qQLMr+8o0WC4FZGQTcJiKBVC59JylcPSrTtk6usvmIDFUOCKegapy1VHQwRbFMOFyb/inzUVqHs+eMYKDM1YeQ==" + "node_modules/agent-base": { + "version": "7.1.3", + "license": "MIT", + "engines": { + "node": ">= 14" + } }, "node_modules/aggregate-error": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", - "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", "dev": true, + "license": "MIT", "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" @@ -733,6 +1009,7 @@ "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "dev": true, + "license": "MIT", "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -744,65 +1021,71 @@ "url": "https://github.com/sponsors/epoberezkin" } }, - "node_modules/ajv-keywords": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.4.0.tgz", - "integrity": "sha512-aUjdRFISbuFOl0EIZc+9e4FfZp0bDZgAdOOf30bJmw8VM9v84SHyVyxDfbWxpGYbdZD/9XoKxfHVNmxPkhwyGw==", - "dev": true - }, "node_modules/anchor-markdown-header": { "version": "0.6.0", - "resolved": "https://registry.npmjs.org/anchor-markdown-header/-/anchor-markdown-header-0.6.0.tgz", - "integrity": "sha512-v7HJMtE1X7wTpNFseRhxsY/pivP4uAJbidVhPT+yhz4i/vV1+qx371IXuV9V7bN6KjFtheLJxqaSm0Y/8neJTA==", + "dev": true, + "license": "MIT", "dependencies": { "emoji-regex": "~10.1.0" } }, "node_modules/anchor-markdown-header/node_modules/emoji-regex": { "version": "10.1.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.1.0.tgz", - "integrity": "sha512-xAEnNCT3w2Tg6MA7ly6QqYJvEoY1tm9iIjJ3yMKK9JPlWuRHAMoe5iETwQnx3M9TVbFMfsrBgWKR+IsmswwNjg==" + "dev": true, + "license": "MIT" }, "node_modules/ansi-colors": { "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", - "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } }, - "node_modules/ansi-escapes": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", - "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", - "dev": true, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "license": "MIT", "engines": { - "node": ">=4" + "node": ">=8" } }, - "node_modules/ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, "engines": { - "node": ">=0.10.0" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/ansi-styles": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", - "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=", - "dev": true, + "node_modules/ansi-styles/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, "engines": { - "node": ">=0.10.0" + "node": ">=7.0.0" } }, + "node_modules/ansi-styles/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "license": "MIT" + }, "node_modules/arch": { "version": "2.2.0", - "resolved": "https://registry.npmjs.org/arch/-/arch-2.2.0.tgz", - "integrity": "sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==", "dev": true, "funding": [ { @@ -817,72 +1100,195 @@ "type": "consulting", "url": "https://feross.org/support" } - ] + ], + "license": "MIT" }, "node_modules/argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/array-buffer-byte-length": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz", + "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==", "dev": true, + "license": "MIT", "dependencies": { - "sprintf-js": "~1.0.2" + "call-bound": "^1.0.3", + "is-array-buffer": "^3.0.5" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/array-find-index": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz", - "integrity": "sha512-M1HQyIXcBGtVywBt8WVdim+lrNaK7VHp99Qt5pSNziXznKHViIBbXWtfRTpEFpF/c4FdfxNAsCCwPp5phBYJtw==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, - "node_modules/array-ify": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-ify/-/array-ify-1.0.0.tgz", - "integrity": "sha512-c5AMf34bKdvPhQ7tBGhqkgKNUzMr4WUs+WDtC2ZUGOUncbxKMTvqxYctiseW3+L4bA8ec+GcZ6/A/FW4m8ukng==" - }, "node_modules/array-includes": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.0.3.tgz", - "integrity": "sha1-GEtI9i2S10UrsxsyMWXH+L0CJm0=", + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.9.tgz", + "integrity": "sha512-FmeCCAenzH0KH381SPT5FZmiA/TmpndpcaShhfgEN9eCVjnFBqq3l1xrI42y8+PPLI6hypzou4GXw00WHmPBLQ==", "dev": true, + "license": "MIT", "dependencies": { - "define-properties": "^1.1.2", - "es-abstract": "^1.7.0" + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "define-properties": "^1.2.1", + "es-abstract": "^1.24.0", + "es-object-atoms": "^1.1.1", + "get-intrinsic": "^1.3.0", + "is-string": "^1.1.1", + "math-intrinsics": "^1.1.0" }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "node_modules/array.prototype.findlast": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.findlast/-/array.prototype.findlast-1.2.5.tgz", + "integrity": "sha512-CVvd6FHg1Z3POpBLxO6E6zr+rSKEQ9L6rZHAaY7lLfhKsWYUBBOuMs0e9o24oopj6H+geRCX0YJ+TJLBK2eHyQ==", "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-shim-unscopables": "^1.0.2" + }, "engines": { - "node": ">=8" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/arrify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==", + "node_modules/array.prototype.findlastindex": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.6.tgz", + "integrity": "sha512-F/TKATkzseUExPlfvmwQKGITM3DGTK+vkAsCZoDc5daVygbJBnjEUCbgkAvVFsgfXfX4YIqZ/27G3k3tdXrTxQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.9", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "es-shim-unscopables": "^1.1.0" + }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.flat": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.3.tgz", + "integrity": "sha512-rwG/ja1neyLqCuGZ5YYrznA62D4mZXg0i1cIskIUKSiqF3Cje9/wXAls9B9s1Wa2fomMsIv8czB8jZcPmxCXFg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.flatmap": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.3.tgz", + "integrity": "sha512-Y7Wt51eKJSyi80hFrJCePGGNo5ktJCslFuboqJsbf57CCPcm5zztluPlc4/aD8sWsKvlwatezpV4U1efk8kpjg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.tosorted": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.4.tgz", + "integrity": "sha512-p6Fx8B7b7ZhL/gmUsAy0D15WhvDccw3mnGNbZpi3pmeJdxtWsj2jEaI4Y6oo3XiHfzuSgPwKc04MYt6KgvC/wA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.3", + "es-errors": "^1.3.0", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/arraybuffer.prototype.slice": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz", + "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "is-array-buffer": "^3.0.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/asap": { "version": "2.0.6", - "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", - "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/asn1": { - "version": "0.2.4", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", - "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", + "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", "dev": true, + "license": "MIT", "dependencies": { "safer-buffer": "~2.1.0" } @@ -890,105 +1296,115 @@ "node_modules/assert-plus": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.8" } }, + "node_modules/ast-types": { + "version": "0.13.4", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ast-types/node_modules/tslib": { + "version": "2.8.1", + "license": "0BSD" + }, "node_modules/astral-regex": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", - "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/async": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.3.tgz", - "integrity": "sha512-spZRyzKL5l5BZQrr/6m/SqFdBN0q3OCI0f9rjfBzCMBIP4p75P620rR3gTmaksNOhmzgdxcaxdNfMy6anrbM0g==", - "dev": true + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", + "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==", + "dev": true, + "license": "MIT" + }, + "node_modules/async-function": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz", + "integrity": "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } }, "node_modules/asynckit": { "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", - "dev": true + "license": "MIT" }, "node_modules/at-least-node": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", - "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==", "dev": true, + "license": "ISC", "engines": { "node": ">= 4.0.0" } }, + "node_modules/available-typed-arrays": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/aws-sign2": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", + "integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==", "dev": true, + "license": "Apache-2.0", "engines": { "node": "*" } }, "node_modules/aws4": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz", - "integrity": "sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ==", - "dev": true - }, - "node_modules/axios": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.24.0.tgz", - "integrity": "sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA==", - "dependencies": { - "follow-redirects": "^1.14.4" - } - }, - "node_modules/babel-code-frame": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-code-frame/-/babel-code-frame-6.26.0.tgz", - "integrity": "sha1-Y/1D99weO7fONZR9uP42mj9Yx0s=", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.13.2.tgz", + "integrity": "sha512-lHe62zvbTB5eEABUVi/AwVh0ZKY9rMMDhmm+eeyuuUQbQ3+J+fONVQOZyj+DdrvD4BY33uYniyRJ4UJIaSKAfw==", "dev": true, - "dependencies": { - "chalk": "^1.1.3", - "esutils": "^2.0.2", - "js-tokens": "^3.0.2" - } + "license": "MIT" }, - "node_modules/babel-code-frame/node_modules/chalk": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", - "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", - "dev": true, + "node_modules/axios": { + "version": "1.12.0", + "license": "MIT", + "peer": true, "dependencies": { - "ansi-styles": "^2.2.1", - "escape-string-regexp": "^1.0.2", - "has-ansi": "^2.0.0", - "strip-ansi": "^3.0.0", - "supports-color": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" + "follow-redirects": "^1.15.6", + "form-data": "^4.0.4", + "proxy-from-env": "^1.1.0" } }, - "node_modules/babel-code-frame/node_modules/supports-color": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", - "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=", - "dev": true, - "engines": { - "node": ">=0.8.0" - } + "node_modules/axios/node_modules/proxy-from-env": { + "version": "1.1.0", + "license": "MIT" }, "node_modules/bail": { "version": "1.0.5", - "resolved": "https://registry.npmjs.org/bail/-/bail-1.0.5.tgz", - "integrity": "sha512-xFbRxM1tahm08yHBP16MMjVUAvDaBMD38zsM9EMAUN61omwLmKlOpB/Zku5QkjZ8TZ4vn53pj+t518cH0S03RQ==", + "dev": true, + "license": "MIT", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -996,13 +1412,11 @@ }, "node_modules/balanced-match": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" + "dev": true, + "license": "MIT" }, "node_modules/base64-js": { "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", "funding": [ { "type": "github", @@ -1016,21 +1430,29 @@ "type": "consulting", "url": "https://feross.org/support" } - ] + ], + "license": "MIT" + }, + "node_modules/basic-ftp": { + "version": "5.0.5", + "license": "MIT", + "engines": { + "node": ">=10.0.0" + } }, "node_modules/bcrypt-pbkdf": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", - "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", + "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { "tweetnacl": "^0.14.3" } }, "node_modules/bl": { "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", - "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "license": "MIT", "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", @@ -1039,41 +1461,25 @@ }, "node_modules/blob-util": { "version": "2.0.2", - "resolved": "https://registry.npmjs.org/blob-util/-/blob-util-2.0.2.tgz", - "integrity": "sha512-T7JQa+zsXXEa6/8ZhHcQEW1UFfVM49Ts65uBkFL6fz2QmrElqmbajIDJvuA0tEhRe5eIjpV9ZF+0RfZR9voJFQ==", - "dev": true + "dev": true, + "license": "Apache-2.0" }, "node_modules/bluebird": { "version": "3.7.2", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", - "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "dev": true, + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, - "node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/buffer": { "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", "funding": [ { "type": "github", @@ -1088,6 +1494,7 @@ "url": "https://feross.org/support" } ], + "license": "MIT", "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" @@ -1095,9 +1502,8 @@ }, "node_modules/buffer-crc32": { "version": "0.2.13", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", - "integrity": "sha1-DTM+PwDqxQqhRUq9MO+MKl2ackI=", "dev": true, + "license": "MIT", "engines": { "node": "*" } @@ -1105,73 +1511,107 @@ "node_modules/buffer-equal-constant-time": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", - "integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=", - "dev": true + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/builtins": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.1.0.tgz", + "integrity": "sha512-SW9lzGTLvWTP1AY8xeAMZimqDrIaSdLQUcVr9DMef51niJ022Ri87SwRRKYm4A6iHfkPaiVUu/Duw2Wc4J7kKg==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.0.0" + } + }, + "node_modules/builtins/node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } }, "node_modules/cachedir": { "version": "2.3.0", - "resolved": "https://registry.npmjs.org/cachedir/-/cachedir-2.3.0.tgz", - "integrity": "sha512-A+Fezp4zxnit6FanDmv9EqXNAi3vt9DWp51/71UEhXukb7QUuvtv9344h91dyAxuTLoSYJFU299qzR3tzwPAhw==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } }, - "node_modules/caller-path": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/caller-path/-/caller-path-0.1.0.tgz", - "integrity": "sha1-lAhe9jWB7NPaqSREqP6U6CV3dR8=", + "node_modules/call-bind": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", + "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", "dev": true, + "license": "MIT", "dependencies": { - "callsites": "^0.2.0" + "call-bind-apply-helpers": "^1.0.0", + "es-define-property": "^1.0.0", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.2" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/callsites": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-0.2.0.tgz", - "integrity": "sha1-r6uWJikQp/M8GaV3WCXGnzTjUMo=", - "dev": true, - "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, "engines": { - "node": ">=6" + "node": ">= 0.4" } }, - "node_modules/camelcase-keys": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz", - "integrity": "sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==", + "node_modules/call-bound": { + "version": "1.0.4", + "dev": true, + "license": "MIT", "dependencies": { - "camelcase": "^5.3.1", - "map-obj": "^4.0.0", - "quick-lru": "^4.0.1" + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" }, "engines": { - "node": ">=8" + "node": ">= 0.4" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" } }, "node_modules/caseless": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", - "dev": true + "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==", + "dev": true, + "license": "Apache-2.0" }, "node_modules/ccount": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/ccount/-/ccount-1.1.0.tgz", - "integrity": "sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg==", + "dev": true, + "license": "MIT", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -1179,8 +1619,8 @@ }, "node_modules/chalk": { "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", @@ -1192,8 +1632,8 @@ }, "node_modules/chalk/node_modules/ansi-styles": { "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "license": "MIT", "dependencies": { "color-convert": "^1.9.0" }, @@ -1203,8 +1643,8 @@ }, "node_modules/character-entities": { "version": "1.2.4", - "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-1.2.4.tgz", - "integrity": "sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw==", + "dev": true, + "license": "MIT", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -1212,8 +1652,8 @@ }, "node_modules/character-entities-legacy": { "version": "1.1.4", - "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz", - "integrity": "sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA==", + "dev": true, + "license": "MIT", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -1221,53 +1661,50 @@ }, "node_modules/character-reference-invalid": { "version": "1.1.4", - "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz", - "integrity": "sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg==", + "dev": true, + "license": "MIT", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/chardet": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.4.2.tgz", - "integrity": "sha1-tUc7M9yXxCTl2Y3IfVXU2KKci/I=", - "dev": true - }, "node_modules/check-more-types": { "version": "2.24.0", "resolved": "https://registry.npmjs.org/check-more-types/-/check-more-types-2.24.0.tgz", - "integrity": "sha1-FCD/sQ/URNz8ebQ4kbv//TKoRgA=", + "integrity": "sha512-Pj779qHxV2tuapviy1bSZNEL1maXr13bPYpsvSDB68HlYcYuhlDrmGd63i0JHMCLKzc7rUSNIrpdJlhVlNwrxA==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.8.0" } }, "node_modules/ci-info": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.2.0.tgz", - "integrity": "sha512-dVqRX7fLUm8J6FgHJ418XuIgDLZDkYcDFTeL6TA2gt5WlIZUQrrH6EZrNClwT/H0FateUsZkGIOPRrLbP+PR9A==", - "dev": true - }, - "node_modules/circular-json": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/circular-json/-/circular-json-0.3.3.tgz", - "integrity": "sha512-UZK3NBx2Mca+b5LsG7bY183pHWt5Y1xts4P3Pz7ENTwGVnJOUWbRb3ocjvX7hx9tq/yTAdclXm9sZ38gNuem4A==", - "dev": true + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } }, "node_modules/clean-stack": { "version": "2.2.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", - "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } }, "node_modules/cli-cursor": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", - "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "license": "MIT", "dependencies": { "restore-cursor": "^3.1.0" }, @@ -1276,9 +1713,8 @@ } }, "node_modules/cli-spinners": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.6.1.tgz", - "integrity": "sha512-x/5fWmGMnbKQAaNwN+UZlV79qBLM9JFnJuJ03gIi5whrob0xV0ofNVHy9DhwGdsMJQc2OKv0oGmLzvaqvAVv+g==", + "version": "2.9.2", + "license": "MIT", "engines": { "node": ">=6" }, @@ -1287,10 +1723,9 @@ } }, "node_modules/cli-table3": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.2.tgz", - "integrity": "sha512-QyavHCaIC80cMivimWu4aWHilIpiDpfm3hGmqAmXVL1UsnbLuBSMd21hTX6VY4ZSDSM73ESLeF8TOYId3rBTbw==", + "version": "0.6.1", "dev": true, + "license": "MIT", "dependencies": { "string-width": "^4.2.0" }, @@ -1298,14 +1733,13 @@ "node": "10.* || >= 12.*" }, "optionalDependencies": { - "@colors/colors": "1.5.0" + "colors": "1.4.0" } }, "node_modules/cli-truncate": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-2.1.0.tgz", - "integrity": "sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==", "dev": true, + "license": "MIT", "dependencies": { "slice-ansi": "^3.0.0", "string-width": "^4.2.0" @@ -1317,73 +1751,43 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/cli-width": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.0.tgz", - "integrity": "sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk=", - "dev": true - }, - "node_modules/cliui": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", - "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.0", - "wrap-ansi": "^7.0.0" - } - }, - "node_modules/cliui/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/cliui/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/clone": { "version": "1.0.4", - "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz", - "integrity": "sha1-2jCcwmPfFZlMaIypAheco8fNfH4=", + "license": "MIT", "engines": { "node": ">=0.8" } }, "node_modules/color-convert": { "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "license": "MIT", "dependencies": { "color-name": "1.1.3" } }, "node_modules/color-name": { "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" + "dev": true, + "license": "MIT" }, "node_modules/colorette": { "version": "1.2.2", - "resolved": "https://registry.npmjs.org/colorette/-/colorette-1.2.2.tgz", - "integrity": "sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w==", - "dev": true + "dev": true, + "license": "MIT" }, - "node_modules/combined-stream": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.7.tgz", - "integrity": "sha512-brWl9y6vOB1xYPZcpZde3N9zDByXTosAeMDo4p1wzo6UMOX4vumB+TP1RZ76sfE6Md68Q0NJSrE/gbezd4Ul+w==", + "node_modules/colors": { + "version": "1.4.0", "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "license": "MIT", "dependencies": { "delayed-stream": "~1.0.0" }, @@ -1396,77 +1800,53 @@ "resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz", "integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==", "dev": true, + "license": "MIT", "engines": { "node": ">= 6" } }, "node_modules/common-tags": { "version": "1.8.0", - "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.0.tgz", - "integrity": "sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw==", "dev": true, + "license": "MIT", "engines": { "node": ">=4.0.0" } }, - "node_modules/compare-func": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/compare-func/-/compare-func-2.0.0.tgz", - "integrity": "sha512-zHig5N+tPWARooBnb0Zx1MFcdfpyJrfTJ3Y5L+IFvUm8rM74hHz66z0gw0x4tijh5CorKkKUCnW82R2vmpeCRA==", - "dependencies": { - "array-ify": "^1.0.0", - "dot-prop": "^5.1.0" - } - }, "node_modules/compare-versions": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/compare-versions/-/compare-versions-3.6.0.tgz", - "integrity": "sha512-W6Af2Iw1z4CB7q4uU4hv646dW9GQuBM+YpC0UvUCWSD8w90SJjp+ujJuXaEMtAXBtSqGfMPuFOVn4/+FlaqfBA==" + "version": "6.1.1", + "license": "MIT" }, "node_modules/concat-map": { "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + "dev": true, + "license": "MIT" }, "node_modules/concurrently": { - "version": "6.5.1", - "resolved": "https://registry.npmjs.org/concurrently/-/concurrently-6.5.1.tgz", - "integrity": "sha512-FlSwNpGjWQfRwPLXvJ/OgysbBxPkWpiVjy1042b0U7on7S7qwwMIILRj7WTN1mTgqa582bG6NFuScOoh6Zgdag==", + "version": "9.2.1", + "license": "MIT", "dependencies": { - "chalk": "^4.1.0", - "date-fns": "^2.16.1", - "lodash": "^4.17.21", - "rxjs": "^6.6.3", - "spawn-command": "^0.0.2-1", - "supports-color": "^8.1.0", - "tree-kill": "^1.2.2", - "yargs": "^16.2.0" + "chalk": "4.1.2", + "rxjs": "7.8.2", + "shell-quote": "1.8.3", + "supports-color": "8.1.1", + "tree-kill": "1.2.2", + "yargs": "17.7.2" }, "bin": { - "concurrently": "bin/concurrently.js" - }, - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/concurrently/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dependencies": { - "color-convert": "^2.0.1" + "conc": "dist/bin/concurrently.js", + "concurrently": "dist/bin/concurrently.js" }, "engines": { - "node": ">=8" + "node": ">=18" }, "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "url": "https://github.com/open-cli-tools/concurrently?sponsor=1" } }, "node_modules/concurrently/node_modules/chalk": { "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -1480,8 +1860,7 @@ }, "node_modules/concurrently/node_modules/chalk/node_modules/supports-color": { "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -1489,45 +1868,28 @@ "node": ">=8" } }, - "node_modules/concurrently/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "node_modules/concurrently/node_modules/cliui": { + "version": "8.0.1", + "license": "ISC", "dependencies": { - "color-name": "~1.1.4" + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" }, "engines": { - "node": ">=7.0.0" + "node": ">=12" } }, - "node_modules/concurrently/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, "node_modules/concurrently/node_modules/has-flag": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/concurrently/node_modules/rxjs": { - "version": "6.6.7", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.6.7.tgz", - "integrity": "sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ==", - "dependencies": { - "tslib": "^1.9.0" - }, - "engines": { - "npm": ">=2.0.0" - } - }, "node_modules/concurrently/node_modules/supports-color": { "version": "8.1.1", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", - "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -1538,342 +1900,62 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, - "node_modules/consola": { - "version": "2.15.3", - "resolved": "https://registry.npmjs.org/consola/-/consola-2.15.3.tgz", - "integrity": "sha512-9vAdYbHj6x2fLKC4+oPH0kFzY/orMZyG2Aj+kNylHxKGJ/Ed4dpNyAQYwJOdqO4zdM7XpVHmyejQDcQHrnuXbw==" - }, - "node_modules/console.table": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/console.table/-/console.table-0.10.0.tgz", - "integrity": "sha1-CRcCVYiHW+/XDPLv9L7yxuLXXQQ=", - "dependencies": { - "easy-table": "1.1.0" - }, - "engines": { - "node": "> 0.10" - } - }, - "node_modules/contains-path": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/contains-path/-/contains-path-0.1.0.tgz", - "integrity": "sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/conventional-changelog": { - "version": "3.1.25", - "resolved": "https://registry.npmjs.org/conventional-changelog/-/conventional-changelog-3.1.25.tgz", - "integrity": "sha512-ryhi3fd1mKf3fSjbLXOfK2D06YwKNic1nC9mWqybBHdObPd8KJ2vjaXZfYj1U23t+V8T8n0d7gwnc9XbIdFbyQ==", - "dependencies": { - "conventional-changelog-angular": "^5.0.12", - "conventional-changelog-atom": "^2.0.8", - "conventional-changelog-codemirror": "^2.0.8", - "conventional-changelog-conventionalcommits": "^4.5.0", - "conventional-changelog-core": "^4.2.1", - "conventional-changelog-ember": "^2.0.9", - "conventional-changelog-eslint": "^3.0.9", - "conventional-changelog-express": "^2.0.6", - "conventional-changelog-jquery": "^3.0.11", - "conventional-changelog-jshint": "^2.0.9", - "conventional-changelog-preset-loader": "^2.3.4" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-angular": { - "version": "5.0.13", - "resolved": "https://registry.npmjs.org/conventional-changelog-angular/-/conventional-changelog-angular-5.0.13.tgz", - "integrity": "sha512-i/gipMxs7s8L/QeuavPF2hLnJgH6pEZAttySB6aiQLWcX3puWDL3ACVmvBhJGxnAy52Qc15ua26BufY6KpmrVA==", - "dependencies": { - "compare-func": "^2.0.0", - "q": "^1.5.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-atom": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/conventional-changelog-atom/-/conventional-changelog-atom-2.0.8.tgz", - "integrity": "sha512-xo6v46icsFTK3bb7dY/8m2qvc8sZemRgdqLb/bjpBsH2UyOS8rKNTgcb5025Hri6IpANPApbXMg15QLb1LJpBw==", - "dependencies": { - "q": "^1.5.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-cli": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/conventional-changelog-cli/-/conventional-changelog-cli-2.2.2.tgz", - "integrity": "sha512-8grMV5Jo8S0kP3yoMeJxV2P5R6VJOqK72IiSV9t/4H5r/HiRqEBQ83bYGuz4Yzfdj4bjaAEhZN/FFbsFXr5bOA==", - "dependencies": { - "add-stream": "^1.0.0", - "conventional-changelog": "^3.1.24", - "lodash": "^4.17.15", - "meow": "^8.0.0", - "tempfile": "^3.0.0" - }, - "bin": { - "conventional-changelog": "cli.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-codemirror": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/conventional-changelog-codemirror/-/conventional-changelog-codemirror-2.0.8.tgz", - "integrity": "sha512-z5DAsn3uj1Vfp7po3gpt2Boc+Bdwmw2++ZHa5Ak9k0UKsYAO5mH1UBTN0qSCuJZREIhX6WU4E1p3IW2oRCNzQw==", - "dependencies": { - "q": "^1.5.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-conventionalcommits": { - "version": "4.6.3", - "resolved": "https://registry.npmjs.org/conventional-changelog-conventionalcommits/-/conventional-changelog-conventionalcommits-4.6.3.tgz", - "integrity": "sha512-LTTQV4fwOM4oLPad317V/QNQ1FY4Hju5qeBIM1uTHbrnCE+Eg4CdRZ3gO2pUeR+tzWdp80M2j3qFFEDWVqOV4g==", - "dependencies": { - "compare-func": "^2.0.0", - "lodash": "^4.17.15", - "q": "^1.5.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-core": { - "version": "4.2.4", - "resolved": "https://registry.npmjs.org/conventional-changelog-core/-/conventional-changelog-core-4.2.4.tgz", - "integrity": "sha512-gDVS+zVJHE2v4SLc6B0sLsPiloR0ygU7HaDW14aNJE1v4SlqJPILPl/aJC7YdtRE4CybBf8gDwObBvKha8Xlyg==", - "dependencies": { - "add-stream": "^1.0.0", - "conventional-changelog-writer": "^5.0.0", - "conventional-commits-parser": "^3.2.0", - "dateformat": "^3.0.0", - "get-pkg-repo": "^4.0.0", - "git-raw-commits": "^2.0.8", - "git-remote-origin-url": "^2.0.0", - "git-semver-tags": "^4.1.1", - "lodash": "^4.17.15", - "normalize-package-data": "^3.0.0", - "q": "^1.5.1", - "read-pkg": "^3.0.0", - "read-pkg-up": "^3.0.0", - "through2": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-core/node_modules/hosted-git-info": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", - "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", - "dependencies": { - "lru-cache": "^6.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-core/node_modules/normalize-package-data": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", - "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", - "dependencies": { - "hosted-git-info": "^4.0.1", - "is-core-module": "^2.5.0", - "semver": "^7.3.4", - "validate-npm-package-license": "^3.0.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-core/node_modules/read-pkg-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-3.0.0.tgz", - "integrity": "sha512-YFzFrVvpC6frF1sz8psoHDBGF7fLPc+llq/8NB43oagqWkx8ar5zYtsTORtOjw9W2RHLpWP+zTWwBvf1bCmcSw==", - "dependencies": { - "find-up": "^2.0.0", - "read-pkg": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/conventional-changelog-core/node_modules/semver": { - "version": "7.3.8", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", - "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-ember": { - "version": "2.0.9", - "resolved": "https://registry.npmjs.org/conventional-changelog-ember/-/conventional-changelog-ember-2.0.9.tgz", - "integrity": "sha512-ulzIReoZEvZCBDhcNYfDIsLTHzYHc7awh+eI44ZtV5cx6LVxLlVtEmcO+2/kGIHGtw+qVabJYjdI5cJOQgXh1A==", - "dependencies": { - "q": "^1.5.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-eslint": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/conventional-changelog-eslint/-/conventional-changelog-eslint-3.0.9.tgz", - "integrity": "sha512-6NpUCMgU8qmWmyAMSZO5NrRd7rTgErjrm4VASam2u5jrZS0n38V7Y9CzTtLT2qwz5xEChDR4BduoWIr8TfwvXA==", - "dependencies": { - "q": "^1.5.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-express": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/conventional-changelog-express/-/conventional-changelog-express-2.0.6.tgz", - "integrity": "sha512-SDez2f3iVJw6V563O3pRtNwXtQaSmEfTCaTBPCqn0oG0mfkq0rX4hHBq5P7De2MncoRixrALj3u3oQsNK+Q0pQ==", - "dependencies": { - "q": "^1.5.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-jquery": { - "version": "3.0.11", - "resolved": "https://registry.npmjs.org/conventional-changelog-jquery/-/conventional-changelog-jquery-3.0.11.tgz", - "integrity": "sha512-x8AWz5/Td55F7+o/9LQ6cQIPwrCjfJQ5Zmfqi8thwUEKHstEn4kTIofXub7plf1xvFA2TqhZlq7fy5OmV6BOMw==", - "dependencies": { - "q": "^1.5.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-jshint": { - "version": "2.0.9", - "resolved": "https://registry.npmjs.org/conventional-changelog-jshint/-/conventional-changelog-jshint-2.0.9.tgz", - "integrity": "sha512-wMLdaIzq6TNnMHMy31hql02OEQ8nCQfExw1SE0hYL5KvU+JCTuPaDO+7JiogGT2gJAxiUGATdtYYfh+nT+6riA==", + "node_modules/concurrently/node_modules/yargs": { + "version": "17.7.2", + "license": "MIT", "dependencies": { - "compare-func": "^2.0.0", - "q": "^1.5.1" + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" }, "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-preset-loader": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/conventional-changelog-preset-loader/-/conventional-changelog-preset-loader-2.3.4.tgz", - "integrity": "sha512-GEKRWkrSAZeTq5+YjUZOYxdHq+ci4dNwHvpaBC3+ENalzFWuCWa9EZXSuZBpkr72sMdKB+1fyDV4takK1Lf58g==", - "engines": { - "node": ">=10" + "node": ">=12" } }, - "node_modules/conventional-changelog-writer": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/conventional-changelog-writer/-/conventional-changelog-writer-5.0.1.tgz", - "integrity": "sha512-5WsuKUfxW7suLblAbFnxAcrvf6r+0b7GvNaWUwUIk0bXMnENP/PEieGKVUQrjPqwPT4o3EPAASBXiY6iHooLOQ==", - "dependencies": { - "conventional-commits-filter": "^2.0.7", - "dateformat": "^3.0.0", - "handlebars": "^4.7.7", - "json-stringify-safe": "^5.0.1", - "lodash": "^4.17.15", - "meow": "^8.0.0", - "semver": "^6.0.0", - "split": "^1.0.0", - "through2": "^4.0.0" - }, - "bin": { - "conventional-changelog-writer": "cli.js" - }, + "node_modules/concurrently/node_modules/yargs-parser": { + "version": "21.1.1", + "license": "ISC", "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-writer/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "bin": { - "semver": "bin/semver.js" + "node": ">=12" } }, - "node_modules/conventional-commits-filter": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/conventional-commits-filter/-/conventional-commits-filter-2.0.7.tgz", - "integrity": "sha512-ASS9SamOP4TbCClsRHxIHXRfcGCnIoQqkvAzCSbZzTFLfcTqJVugB0agRgsEELsqaeWgsXv513eS116wnlSSPA==", - "dependencies": { - "lodash.ismatch": "^4.4.0", - "modify-values": "^1.0.0" - }, - "engines": { - "node": ">=10" - } + "node_modules/consola": { + "version": "2.15.3", + "license": "MIT" }, - "node_modules/conventional-commits-parser": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/conventional-commits-parser/-/conventional-commits-parser-3.2.4.tgz", - "integrity": "sha512-nK7sAtfi+QXbxHCYfhpZsfRtaitZLIA6889kFIouLvz6repszQDgxBu7wf2WbU+Dco7sAnNCJYERCwt54WPC2Q==", + "node_modules/console.table": { + "version": "0.10.0", + "license": "MIT", "dependencies": { - "is-text-path": "^1.0.1", - "JSONStream": "^1.0.4", - "lodash": "^4.17.15", - "meow": "^8.0.0", - "split2": "^3.0.0", - "through2": "^4.0.0" - }, - "bin": { - "conventional-commits-parser": "cli.js" + "easy-table": "1.1.0" }, "engines": { - "node": ">=10" + "node": "> 0.10" } }, - "node_modules/core-js": { - "version": "2.6.5", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.5.tgz", - "integrity": "sha512-klh/kDpwX8hryYL14M9w/xei6vrv6sE8gTHDG7/T/+SEovB/G4ejwcfE/CBzO6Edsu+OETZMZ3wcX/EjUkrl5A==", - "dev": true - }, "node_modules/core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==", + "dev": true, + "license": "MIT" }, "node_modules/cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", "dependencies": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" }, "engines": { - "node": ">=4.8" + "node": ">= 8" } }, "node_modules/cypress": { @@ -1882,6 +1964,7 @@ "integrity": "sha512-+1EE1nuuuwIt/N1KXRR2iWHU+OiIt7H28jJDyyI4tiUftId/DrXYEwoDa5+kH2pki1zxnA0r6HrUGHV5eLbF5Q==", "dev": true, "hasInstallScript": true, + "license": "MIT", "dependencies": { "@cypress/request": "^2.88.10", "@cypress/xvfb": "^1.2.4", @@ -1933,26 +2016,10 @@ "node": ">=12.0.0" } }, - "node_modules/cypress/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, "node_modules/cypress/node_modules/chalk": { "version": "4.1.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", - "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -1966,9 +2033,8 @@ }, "node_modules/cypress/node_modules/chalk/node_modules/supports-color": { "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, + "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -1976,29 +2042,10 @@ "node": ">=8" } }, - "node_modules/cypress/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/cypress/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, "node_modules/cypress/node_modules/debug": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", - "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", + "version": "4.3.4", "dev": true, + "license": "MIT", "dependencies": { "ms": "2.1.2" }, @@ -2013,36 +2060,16 @@ }, "node_modules/cypress/node_modules/has-flag": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/cypress/node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dev": true, - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/cypress/node_modules/semver": { - "version": "7.3.7", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", - "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", + "version": "7.7.2", "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, + "license": "ISC", "bin": { "semver": "bin/semver.js" }, @@ -2052,9 +2079,8 @@ }, "node_modules/cypress/node_modules/supports-color": { "version": "8.1.1", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", - "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", "dev": true, + "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -2065,218 +2091,211 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, - "node_modules/cypress/node_modules/tmp": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", - "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", + "node_modules/dashdash": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==", "dev": true, + "license": "MIT", "dependencies": { - "rimraf": "^3.0.0" + "assert-plus": "^1.0.0" }, "engines": { - "node": ">=8.17.0" + "node": ">=0.10" } }, - "node_modules/dargs": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/dargs/-/dargs-7.0.0.tgz", - "integrity": "sha512-2iy1EkLdlBzQGvbweYRFxmFath8+K7+AKB0TlhHWkNuH+TmovaMH/Wp7V7R4u7f4SnX3OgLsU9t1NI9ioDnUpg==", + "node_modules/data-uri-to-buffer": { + "version": "6.0.2", + "license": "MIT", "engines": { - "node": ">=8" + "node": ">= 14" } }, - "node_modules/dashdash": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "node_modules/data-view-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz", + "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==", "dev": true, + "license": "MIT", "dependencies": { - "assert-plus": "^1.0.0" + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.2" }, "engines": { - "node": ">=0.10" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/date-fns": { - "version": "2.28.0", - "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.28.0.tgz", - "integrity": "sha512-8d35hViGYx/QH0icHYCeLmsLmMUheMmTyV9Fcm6gvNwdw31yXXH+O85sOBJ+OLnLQMKZowvpKb6FgMIQjcpvQw==", + "node_modules/data-view-byte-length": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz", + "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.2" + }, "engines": { - "node": ">=0.11" + "node": ">= 0.4" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/date-fns" + "url": "https://github.com/sponsors/inspect-js" } }, - "node_modules/dateformat": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-3.0.3.tgz", - "integrity": "sha512-jyCETtSl3VMZMWeRo7iY1FL19ges1t55hMo5yaam4Jrsm5EPL89UQkoQRyiI+Yf4k8r2ZpdngkV8hr1lIdjb3Q==", + "node_modules/data-view-byte-offset": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz", + "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, "engines": { - "node": "*" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/dayjs": { - "version": "1.10.6", - "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.10.6.tgz", - "integrity": "sha512-AztC/IOW4L1Q41A86phW5Thhcrco3xuAA+YX/BLpLWWjRcTj5TOt/QImBLmCKlrF7u7k47arTnOyL6GnbG8Hvw==", - "dev": true + "version": "1.11.18", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.18.tgz", + "integrity": "sha512-zFBQ7WFRvVRhKcWoUh+ZA1g2HVgUbsZm9sbddh8EC5iv93sui8DVVz1Npvz+r6meo9VKfa8NyLWBsQK1VvIKPA==", + "dev": true, + "license": "MIT" }, "node_modules/debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "version": "3.2.7", "dev": true, + "license": "MIT", "dependencies": { "ms": "^2.1.1" } }, - "node_modules/debug-log": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/debug-log/-/debug-log-1.0.1.tgz", - "integrity": "sha1-IwdjLUwEOCuN+KMvcLiVBG1SdF8=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/debuglog": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/debuglog/-/debuglog-1.0.1.tgz", - "integrity": "sha512-syBZ+rnAK3EgMsH2aYEOLUW7mZSY9Gb+0wUMCFsZvcmiz+HigA0LOcq/HoQqVuGG+EKykunc7QG2bzrponfaSw==", "dev": true, + "license": "MIT", "engines": { "node": "*" } }, - "node_modules/decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/decamelize-keys": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.0.tgz", - "integrity": "sha512-ocLWuYzRPoS9bfiSdDd3cxvrzovVMZnRDVEzAs+hWIVXGDbHxWMECij2OBuyB/An0FFW/nLuq6Kv1i/YC5Qfzg==", - "dependencies": { - "decamelize": "^1.1.0", - "map-obj": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/decamelize-keys/node_modules/map-obj": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", - "integrity": "sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/deep-is": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", - "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=", - "dev": true + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true, + "license": "MIT" }, "node_modules/defaults": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.3.tgz", - "integrity": "sha1-xlYFHpgX2f8I7YgUd/P+QBnz730=", + "license": "MIT", "dependencies": { "clone": "^1.0.2" } }, - "node_modules/define-properties": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", - "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", "dev": true, + "license": "MIT", "dependencies": { - "object-keys": "^1.0.12" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/deglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/deglob/-/deglob-2.1.1.tgz", - "integrity": "sha512-2kjwuGGonL7gWE1XU4Fv79+vVzpoQCl0V+boMwWtOQJV2AGDabCwez++nB1Nli/8BabAfZQ/UuHPlp6AymKdWw==", + "node_modules/define-properties": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", "dev": true, + "license": "MIT", "dependencies": { - "find-root": "^1.0.0", - "glob": "^7.0.5", - "ignore": "^3.0.9", - "pkg-config": "^1.1.0", - "run-parallel": "^1.1.2", - "uniq": "^1.0.1" + "define-data-property": "^1.0.1", + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/deglob/node_modules/ignore": { - "version": "3.3.10", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", - "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==", - "dev": true + "node_modules/degenerator": { + "version": "5.0.1", + "license": "MIT", + "dependencies": { + "ast-types": "^0.13.4", + "escodegen": "^2.1.0", + "esprima": "^4.0.1" + }, + "engines": { + "node": ">= 14" + } }, "node_modules/delayed-stream": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", - "dev": true, + "license": "MIT", "engines": { "node": ">=0.4.0" } }, "node_modules/detect-indent": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-6.1.0.tgz", - "integrity": "sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==", + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-7.0.2.tgz", + "integrity": "sha512-y+8xyqdGLL+6sh0tVeHcfP/QDd8gUgbasolJJpY7NgeQGSZ739bDtSiaiDgtoicy+mtYB81dKLxO9xRhCyIB3A==", "dev": true, + "license": "MIT", "engines": { - "node": ">=8" + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/detect-newline": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", - "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-4.0.1.tgz", + "integrity": "sha512-qE3Veg1YXzGHQhlA6jzebZN2qVf6NX+A7m7qlhCGG30dJixrAQhYOsJjsnBjJkCSmuOPpCk30145fr8FV0bzog==", "dev": true, + "license": "MIT", "engines": { - "node": ">=8" + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/dezalgo": { "version": "1.0.4", - "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", - "integrity": "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==", "dev": true, + "license": "ISC", "dependencies": { "asap": "^2.0.0", "wrappy": "1" } }, - "node_modules/dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "dev": true, - "dependencies": { - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/doctoc": { "version": "2.2.1", - "resolved": "https://registry.npmjs.org/doctoc/-/doctoc-2.2.1.tgz", - "integrity": "sha512-qNJ1gsuo7hH40vlXTVVrADm6pdg30bns/Mo7Nv1SxuXSM1bwF9b4xQ40a6EFT/L1cI+Yylbyi8MPI4G4y7XJzQ==", + "dev": true, + "license": "MIT", "dependencies": { "@textlint/markdown-to-ast": "^12.1.1", "anchor-markdown-header": "^0.6.0", @@ -2290,21 +2309,22 @@ } }, "node_modules/doctrine": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", "dev": true, + "license": "Apache-2.0", "dependencies": { "esutils": "^2.0.2" }, "engines": { - "node": ">=0.10.0" + "node": ">=6.0.0" } }, "node_modules/dom-serializer": { "version": "1.4.1", - "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.4.1.tgz", - "integrity": "sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag==", + "dev": true, + "license": "MIT", "dependencies": { "domelementtype": "^2.0.1", "domhandler": "^4.2.0", @@ -2316,27 +2336,27 @@ }, "node_modules/dom-serializer/node_modules/entities": { "version": "2.2.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz", - "integrity": "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==", + "dev": true, + "license": "BSD-2-Clause", "funding": { "url": "https://github.com/fb55/entities?sponsor=1" } }, "node_modules/domelementtype": { "version": "2.3.0", - "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", - "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", + "dev": true, "funding": [ { "type": "github", "url": "https://github.com/sponsors/fb55" } - ] + ], + "license": "BSD-2-Clause" }, "node_modules/domhandler": { "version": "4.3.1", - "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-4.3.1.tgz", - "integrity": "sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==", + "dev": true, + "license": "BSD-2-Clause", "dependencies": { "domelementtype": "^2.2.0" }, @@ -2349,8 +2369,8 @@ }, "node_modules/domutils": { "version": "2.8.0", - "resolved": "https://registry.npmjs.org/domutils/-/domutils-2.8.0.tgz", - "integrity": "sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==", + "dev": true, + "license": "BSD-2-Clause", "dependencies": { "dom-serializer": "^1.0.1", "domelementtype": "^2.2.0", @@ -2360,21 +2380,25 @@ "url": "https://github.com/fb55/domutils?sponsor=1" } }, - "node_modules/dot-prop": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", - "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", + "node_modules/dunder-proto": { + "version": "1.0.1", + "license": "MIT", "dependencies": { - "is-obj": "^2.0.0" + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" }, "engines": { - "node": ">=8" + "node": ">= 0.4" } }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "license": "MIT" + }, "node_modules/easy-table": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/easy-table/-/easy-table-1.1.0.tgz", - "integrity": "sha1-hvmrTBAvA3G3KXuSplHVgkvIy3M=", + "license": "MIT", "optionalDependencies": { "wcwidth": ">=1.0.1" } @@ -2382,8 +2406,9 @@ "node_modules/ecc-jsbn": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", - "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", + "integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==", "dev": true, + "license": "MIT", "dependencies": { "jsbn": "~0.1.0", "safer-buffer": "^2.1.0" @@ -2394,29 +2419,28 @@ "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", "dev": true, + "license": "Apache-2.0", "dependencies": { "safe-buffer": "^5.0.1" } }, "node_modules/emoji-regex": { "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + "license": "MIT" }, "node_modules/end-of-stream": { "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", "dev": true, + "license": "MIT", "dependencies": { "once": "^1.4.0" } }, "node_modules/enquirer": { "version": "2.3.6", - "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", - "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", "dev": true, + "license": "MIT", + "peer": true, "dependencies": { "ansi-colors": "^4.1.1" }, @@ -2426,8 +2450,8 @@ }, "node_modules/entities": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/entities/-/entities-3.0.1.tgz", - "integrity": "sha512-WiyBqoomrwMdFG1e0kqvASYfnlb0lp8M5o5Fw2OFq1hNZxxcNk8Ik0Xm7LxzBhuidnZB/UtBqVCgUz3kBOP51Q==", + "dev": true, + "license": "BSD-2-Clause", "engines": { "node": ">=0.12" }, @@ -2436,370 +2460,792 @@ } }, "node_modules/error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz", + "integrity": "sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==", + "dev": true, + "license": "MIT", "dependencies": { "is-arrayish": "^0.2.1" } }, "node_modules/es-abstract": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.13.0.tgz", - "integrity": "sha512-vDZfg/ykNxQVwup/8E1BZhVzFfBxs9NqMzGcvIJrqg5k2/5Za2bWo40dK2J1pgLngZ7c+Shh8lwYtLGyrwPutg==", + "version": "1.24.0", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz", + "integrity": "sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-buffer-byte-length": "^1.0.2", + "arraybuffer.prototype.slice": "^1.0.4", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "data-view-buffer": "^1.0.2", + "data-view-byte-length": "^1.0.2", + "data-view-byte-offset": "^1.0.1", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "es-set-tostringtag": "^2.1.0", + "es-to-primitive": "^1.3.0", + "function.prototype.name": "^1.1.8", + "get-intrinsic": "^1.3.0", + "get-proto": "^1.0.1", + "get-symbol-description": "^1.1.0", + "globalthis": "^1.0.4", + "gopd": "^1.2.0", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "internal-slot": "^1.1.0", + "is-array-buffer": "^3.0.5", + "is-callable": "^1.2.7", + "is-data-view": "^1.0.2", + "is-negative-zero": "^2.0.3", + "is-regex": "^1.2.1", + "is-set": "^2.0.3", + "is-shared-array-buffer": "^1.0.4", + "is-string": "^1.1.1", + "is-typed-array": "^1.1.15", + "is-weakref": "^1.1.1", + "math-intrinsics": "^1.1.0", + "object-inspect": "^1.13.4", + "object-keys": "^1.1.1", + "object.assign": "^4.1.7", + "own-keys": "^1.0.1", + "regexp.prototype.flags": "^1.5.4", + "safe-array-concat": "^1.1.3", + "safe-push-apply": "^1.0.0", + "safe-regex-test": "^1.1.0", + "set-proto": "^1.0.0", + "stop-iteration-iterator": "^1.1.0", + "string.prototype.trim": "^1.2.10", + "string.prototype.trimend": "^1.0.9", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.3", + "typed-array-byte-length": "^1.0.3", + "typed-array-byte-offset": "^1.0.4", + "typed-array-length": "^1.0.7", + "unbox-primitive": "^1.1.0", + "which-typed-array": "^1.1.19" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-iterator-helpers": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.2.1.tgz", + "integrity": "sha512-uDn+FE1yrDzyC0pCo961B2IHbdM8y/ACZsKD4dG6WqrjV53BADjwa7D+1aom2rsNVfLyDgU/eigvlJGJ08OQ4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.6", + "es-errors": "^1.3.0", + "es-set-tostringtag": "^2.0.3", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.6", + "globalthis": "^1.0.4", + "gopd": "^1.2.0", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.2.0", + "has-symbols": "^1.1.0", + "internal-slot": "^1.1.0", + "iterator.prototype": "^1.1.4", + "safe-array-concat": "^1.1.3" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-shim-unscopables": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.1.0.tgz", + "integrity": "sha512-d9T8ucsEhh8Bi1woXCf+TIKDIROLG5WCkxg8geBCbvk22kzwC5G2OnXVMO6FUsvQlgUUXQ2itephWDLqDzbeCw==", "dev": true, + "license": "MIT", "dependencies": { - "es-to-primitive": "^1.2.0", - "function-bind": "^1.1.1", - "has": "^1.0.3", - "is-callable": "^1.1.4", - "is-regex": "^1.0.4", - "object-keys": "^1.0.12" + "hasown": "^2.0.2" }, "engines": { "node": ">= 0.4" } }, "node_modules/es-to-primitive": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.0.tgz", - "integrity": "sha512-qZryBOJjV//LaxLTV6UC//WewneB3LcXOL9NP++ozKVXsIIIpm/2c13UDiD9Jp2eThsecw9m3jPqDwTyobcdbg==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz", + "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==", "dev": true, + "license": "MIT", "dependencies": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" + "is-callable": "^1.2.7", + "is-date-object": "^1.0.5", + "is-symbol": "^1.0.4" }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/escalade": { "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "license": "MIT", "engines": { "node": ">=6" } }, "node_modules/escape-string-regexp": { "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "license": "MIT", "engines": { "node": ">=0.8.0" } }, - "node_modules/eslint": { - "version": "5.4.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-5.4.0.tgz", - "integrity": "sha512-UIpL91XGex3qtL6qwyCQJar2j3osKxK9e3ano3OcGEIRM4oWIpCkDg9x95AXEC2wMs7PnxzOkPZ2gq+tsMS9yg==", - "dev": true, + "node_modules/escodegen": { + "version": "2.1.0", + "license": "BSD-2-Clause", "dependencies": { - "ajv": "^6.5.0", - "babel-code-frame": "^6.26.0", - "chalk": "^2.1.0", - "cross-spawn": "^6.0.5", - "debug": "^3.1.0", - "doctrine": "^2.1.0", - "eslint-scope": "^4.0.0", - "eslint-utils": "^1.3.1", - "eslint-visitor-keys": "^1.0.0", - "espree": "^4.0.0", - "esquery": "^1.0.1", + "esprima": "^4.0.1", + "estraverse": "^5.2.0", + "esutils": "^2.0.2" + }, + "bin": { + "escodegen": "bin/escodegen.js", + "esgenerate": "bin/esgenerate.js" + }, + "engines": { + "node": ">=6.0" + }, + "optionalDependencies": { + "source-map": "~0.6.1" + } + }, + "node_modules/eslint": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz", + "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==", + "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.1", + "@humanwhocodes/config-array": "^0.13.0", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", "esutils": "^2.0.2", - "file-entry-cache": "^2.0.0", - "functional-red-black-tree": "^1.0.1", - "glob": "^7.1.2", - "globals": "^11.7.0", - "ignore": "^4.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "graphemer": "^1.4.0", + "ignore": "^5.2.0", "imurmurhash": "^0.1.4", - "inquirer": "^5.2.0", - "is-resolvable": "^1.1.0", - "js-yaml": "^3.11.0", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-yaml": "^4.1.0", "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.3.0", - "lodash": "^4.17.5", - "minimatch": "^3.0.4", - "mkdirp": "^0.5.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", "natural-compare": "^1.4.0", - "optionator": "^0.8.2", - "path-is-inside": "^1.0.2", - "pluralize": "^7.0.0", - "progress": "^2.0.0", - "regexpp": "^2.0.0", - "require-uncached": "^1.0.3", - "semver": "^5.5.0", - "strip-ansi": "^4.0.0", - "strip-json-comments": "^2.0.1", - "table": "^4.0.3", + "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", "text-table": "^0.2.0" }, "bin": { "eslint": "bin/eslint.js" }, "engines": { - "node": "^6.14.0 || ^8.10.0 || >=9.10.0" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, "node_modules/eslint-config-standard": { - "version": "12.0.0", - "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-12.0.0.tgz", - "integrity": "sha512-COUz8FnXhqFitYj4DTqHzidjIL/t4mumGZto5c7DrBpvWoie+Sn3P4sLEzUGeYhRElWuFEf8K1S1EfvD1vixCQ==", - "dev": true + "version": "17.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-17.1.0.tgz", + "integrity": "sha512-IwHwmaBNtDK4zDHQukFDW5u/aTb8+meQWZvNFWkiGmbWjD6bqyuSSBxxXKkCftCUzc1zwCH2m/baCNDLGmuO5Q==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "eslint": "^8.0.1", + "eslint-plugin-import": "^2.25.2", + "eslint-plugin-n": "^15.0.0 || ^16.0.0 ", + "eslint-plugin-promise": "^6.0.0" + } }, "node_modules/eslint-config-standard-jsx": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/eslint-config-standard-jsx/-/eslint-config-standard-jsx-6.0.2.tgz", - "integrity": "sha512-D+YWAoXw+2GIdbMBRAzWwr1ZtvnSf4n4yL0gKGg7ShUOGXkSOLerI17K4F6LdQMJPNMoWYqepzQD/fKY+tXNSg==", - "dev": true + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/eslint-config-standard-jsx/-/eslint-config-standard-jsx-11.0.0.tgz", + "integrity": "sha512-+1EV/R0JxEK1L0NGolAr8Iktm3Rgotx3BKwgaX+eAuSX8D952LULKtjgZD3F+e6SvibONnhLwoTi9DPxN5LvvQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "peerDependencies": { + "eslint": "^8.8.0", + "eslint-plugin-react": "^7.28.0" + } }, "node_modules/eslint-import-resolver-node": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.2.tgz", - "integrity": "sha512-sfmTqJfPSizWu4aymbPr4Iidp5yKm8yDkHp+Ir3YiTHiiDfxh69mOUsmiqW6RZ9zRXFaF64GtYmN7e+8GHBv6Q==", + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz", + "integrity": "sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==", "dev": true, + "license": "MIT", "dependencies": { - "debug": "^2.6.9", - "resolve": "^1.5.0" + "debug": "^3.2.7", + "is-core-module": "^2.13.0", + "resolve": "^1.22.4" } }, - "node_modules/eslint-import-resolver-node/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "node_modules/eslint-import-resolver-node/node_modules/resolve": { + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", "dev": true, + "license": "MIT", "dependencies": { - "ms": "2.0.0" + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/eslint-import-resolver-node/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "dev": true - }, "node_modules/eslint-module-utils": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.4.0.tgz", - "integrity": "sha512-14tltLm38Eu3zS+mt0KvILC3q8jyIAH518MlG+HO0p+yK885Lb1UHTY/UgR91eOyGdmxAPb+OLoW4znqIT6Ndw==", + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.12.1.tgz", + "integrity": "sha512-L8jSWTze7K2mTg0vos/RuLRS5soomksDPoJLXIslC7c8Wmut3bx7CPpJijDcBZtxQ5lrbUdM+s0OlNbz0DCDNw==", "dev": true, + "license": "MIT", "dependencies": { - "debug": "^2.6.8", - "pkg-dir": "^2.0.0" + "debug": "^3.2.7" }, "engines": { "node": ">=4" + }, + "peerDependenciesMeta": { + "eslint": { + "optional": true + } } }, - "node_modules/eslint-module-utils/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "node_modules/eslint-plugin-es": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-4.1.0.tgz", + "integrity": "sha512-GILhQTnjYE2WorX5Jyi5i4dz5ALWxBIdQECVQavL6s7cI76IZTDWleTHkxz/QT3kvcs2QlGHvKLYsSlPOlPXnQ==", "dev": true, + "license": "MIT", "dependencies": { - "ms": "2.0.0" + "eslint-utils": "^2.0.0", + "regexpp": "^3.0.0" + }, + "engines": { + "node": ">=8.10.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=4.19.1" } }, - "node_modules/eslint-module-utils/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "dev": true - }, - "node_modules/eslint-plugin-es": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-1.4.0.tgz", - "integrity": "sha512-XfFmgFdIUDgvaRAlaXUkxrRg5JSADoRC8IkKLc/cISeR3yHVMefFHQZpcyXXEUUPHfy5DwviBcrfqlyqEwlQVw==", + "node_modules/eslint-plugin-es/node_modules/eslint-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", + "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", "dev": true, + "license": "MIT", "dependencies": { - "eslint-utils": "^1.3.0", - "regexpp": "^2.0.1" + "eslint-visitor-keys": "^1.1.0" }, "engines": { - "node": ">=6.5.0" + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" } }, - "node_modules/eslint-plugin-import": { - "version": "2.14.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.14.0.tgz", - "integrity": "sha512-FpuRtniD/AY6sXByma2Wr0TXvXJ4nA/2/04VPlfpmUDPOpOY264x+ILiwnrk/k4RINgDAyFZByxqPUbSQ5YE7g==", + "node_modules/eslint-plugin-es/node_modules/eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", "dev": true, - "dependencies": { - "contains-path": "^0.1.0", - "debug": "^2.6.8", - "doctrine": "1.5.0", - "eslint-import-resolver-node": "^0.3.1", - "eslint-module-utils": "^2.2.0", - "has": "^1.0.1", - "lodash": "^4.17.4", - "minimatch": "^3.0.3", - "read-pkg-up": "^2.0.0", - "resolve": "^1.6.0" - }, + "license": "Apache-2.0", "engines": { "node": ">=4" } }, - "node_modules/eslint-plugin-import/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "dependencies": { - "ms": "2.0.0" + "node_modules/eslint-plugin-import": { + "version": "2.32.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.32.0.tgz", + "integrity": "sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@rtsao/scc": "^1.1.0", + "array-includes": "^3.1.9", + "array.prototype.findlastindex": "^1.2.6", + "array.prototype.flat": "^1.3.3", + "array.prototype.flatmap": "^1.3.3", + "debug": "^3.2.7", + "doctrine": "^2.1.0", + "eslint-import-resolver-node": "^0.3.9", + "eslint-module-utils": "^2.12.1", + "hasown": "^2.0.2", + "is-core-module": "^2.16.1", + "is-glob": "^4.0.3", + "minimatch": "^3.1.2", + "object.fromentries": "^2.0.8", + "object.groupby": "^1.0.3", + "object.values": "^1.2.1", + "semver": "^6.3.1", + "string.prototype.trimend": "^1.0.9", + "tsconfig-paths": "^3.15.0" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 || ^9" } }, "node_modules/eslint-plugin-import/node_modules/doctrine": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", - "integrity": "sha1-N53Ocw9hZvds76TmcHoVmwLFpvo=", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", "dev": true, + "license": "Apache-2.0", "dependencies": { - "esutils": "^2.0.2", - "isarray": "^1.0.0" + "esutils": "^2.0.2" }, "engines": { "node": ">=0.10.0" } }, - "node_modules/eslint-plugin-import/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "dev": true + "node_modules/eslint-plugin-import/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } }, - "node_modules/eslint-plugin-node": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-7.0.1.tgz", - "integrity": "sha512-lfVw3TEqThwq0j2Ba/Ckn2ABdwmL5dkOgAux1rvOk6CO7A6yGyPI2+zIxN6FyNkp1X1X/BSvKOceD6mBWSj4Yw==", + "node_modules/eslint-plugin-n": { + "version": "15.7.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-15.7.0.tgz", + "integrity": "sha512-jDex9s7D/Qial8AGVIHq4W7NswpUD5DPDL2RH8Lzd9EloWUuvUkHfv4FRLMipH5q2UtyurorBkPeNi1wVWNh3Q==", "dev": true, + "license": "MIT", + "peer": true, "dependencies": { - "eslint-plugin-es": "^1.3.1", - "eslint-utils": "^1.3.1", - "ignore": "^4.0.2", - "minimatch": "^3.0.4", - "resolve": "^1.8.1", - "semver": "^5.5.0" + "builtins": "^5.0.1", + "eslint-plugin-es": "^4.1.0", + "eslint-utils": "^3.0.0", + "ignore": "^5.1.1", + "is-core-module": "^2.11.0", + "minimatch": "^3.1.2", + "resolve": "^1.22.1", + "semver": "^7.3.8" }, "engines": { - "node": ">=6" + "node": ">=12.22.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-plugin-n/node_modules/resolve": { + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/eslint-plugin-n/node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" } }, "node_modules/eslint-plugin-promise": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-4.0.1.tgz", - "integrity": "sha512-Si16O0+Hqz1gDHsys6RtFRrW7cCTB6P7p3OJmKp3Y3dxpQE2qwOA7d3xnV+0mBmrPoi0RBnxlCKvqu70te6wjg==", + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.6.0.tgz", + "integrity": "sha512-57Zzfw8G6+Gq7axm2Pdo3gW/Rx3h9Yywgn61uE/3elTCOePEHVrn2i5CdfBwA1BLK0Q0WqctICIUSqXZW/VprQ==", "dev": true, + "license": "ISC", + "peer": true, "engines": { - "node": ">=6" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0 || ^9.0.0" } }, "node_modules/eslint-plugin-react": { - "version": "7.11.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.11.1.tgz", - "integrity": "sha512-cVVyMadRyW7qsIUh3FHp3u6QHNhOgVrLQYdQEB1bPWBsgbNCHdFAeNMquBMCcZJu59eNthX053L70l7gRt4SCw==", + "version": "7.37.5", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.37.5.tgz", + "integrity": "sha512-Qteup0SqU15kdocexFNAJMvCJEfa2xUKNV4CC1xsVMrIIqEy3SQ/rqyxCWNzfrd3/ldy6HMlD2e0JDVpDg2qIA==", "dev": true, + "license": "MIT", + "peer": true, "dependencies": { - "array-includes": "^3.0.3", + "array-includes": "^3.1.8", + "array.prototype.findlast": "^1.2.5", + "array.prototype.flatmap": "^1.3.3", + "array.prototype.tosorted": "^1.1.4", "doctrine": "^2.1.0", - "has": "^1.0.3", - "jsx-ast-utils": "^2.0.1", - "prop-types": "^15.6.2" + "es-iterator-helpers": "^1.2.1", + "estraverse": "^5.3.0", + "hasown": "^2.0.2", + "jsx-ast-utils": "^2.4.1 || ^3.0.0", + "minimatch": "^3.1.2", + "object.entries": "^1.1.9", + "object.fromentries": "^2.0.8", + "object.values": "^1.2.1", + "prop-types": "^15.8.1", + "resolve": "^2.0.0-next.5", + "semver": "^6.3.1", + "string.prototype.matchall": "^4.0.12", + "string.prototype.repeat": "^1.0.0" }, "engines": { "node": ">=4" + }, + "peerDependencies": { + "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7" } }, - "node_modules/eslint-plugin-standard": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-standard/-/eslint-plugin-standard-4.0.0.tgz", - "integrity": "sha512-OwxJkR6TQiYMmt1EsNRMe5qG3GsbjlcOhbGUBY4LtavF9DsLaTcoR+j2Tdjqi23oUwKNUqX7qcn5fPStafMdlA==", - "dev": true + "node_modules/eslint-plugin-react/node_modules/doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eslint-plugin-react/node_modules/resolve": { + "version": "2.0.0-next.5", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.5.tgz", + "integrity": "sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/eslint-plugin-react/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } }, "node_modules/eslint-scope": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", - "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { - "esrecurse": "^4.1.0", - "estraverse": "^4.1.1" + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" }, "engines": { - "node": ">=4.0.0" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, "node_modules/eslint-utils": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz", - "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", + "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", "dev": true, + "license": "MIT", "dependencies": { - "eslint-visitor-keys": "^1.1.0" + "eslint-visitor-keys": "^2.0.0" }, "engines": { - "node": ">=6" + "node": "^10.0.0 || ^12.0.0 || >= 14.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=5" } }, "node_modules/eslint-utils/node_modules/eslint-visitor-keys": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.1.0.tgz", - "integrity": "sha512-8y9YjtM1JBJU/A9Kc+SbaOV4y29sSWckBwMHa+FGtVj5gN/sbnKDf6xJUl+8g7FAij9LVaP8C24DUiH/f/2Z9A==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", + "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", "dev": true, + "license": "Apache-2.0", "engines": { - "node": ">=4" + "node": ">=10" } }, "node_modules/eslint-visitor-keys": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz", - "integrity": "sha512-qzm/XxIbxm/FHyH341ZrbnMUpe+5Bocte9xkmFMzPMjRaZMcXww+MpBptFvtU+79L362nqiLhekCxCxDPaUMBQ==", + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", "dev": true, + "license": "Apache-2.0", "engines": { - "node": ">=4" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, - "node_modules/eslint/node_modules/ansi-regex": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.1.tgz", - "integrity": "sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw==", + "node_modules/eslint/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, "engines": { - "node": ">=4" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/eslint/node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/eslint/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/eslint/node_modules/strip-ansi": { + "node_modules/eslint/node_modules/has-flag": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/eslint/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/eslint/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, + "license": "MIT", "dependencies": { - "ansi-regex": "^3.0.0" + "has-flag": "^4.0.0" }, "engines": { - "node": ">=4" + "node": ">=8" } }, "node_modules/espree": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/espree/-/espree-4.1.0.tgz", - "integrity": "sha512-I5BycZW6FCVIub93TeVY1s7vjhP9CY6cXCznIRfiig7nRviKZYdRnj/sHEWC6A7WE9RDWOFq9+7OsWSYz8qv2w==", + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { - "acorn": "^6.0.2", - "acorn-jsx": "^5.0.0", - "eslint-visitor-keys": "^1.0.0" + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" }, "engines": { - "node": ">=6.0.0" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, "node_modules/esprima": { "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "dev": true, + "license": "BSD-2-Clause", "bin": { "esparse": "bin/esparse.js", "esvalidate": "bin/esvalidate.js" @@ -2809,58 +3255,55 @@ } }, "node_modules/esquery": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.0.1.tgz", - "integrity": "sha512-SmiyZ5zIWH9VM+SRUReLS5Q8a7GxtRdxEBVZpm98rJM7Sb+A9DVCndXfkeFUd3byderg+EbDkfnevfCwynWaNA==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { - "estraverse": "^4.0.0" + "estraverse": "^5.1.0" }, "engines": { - "node": ">=0.6" + "node": ">=0.10" } }, "node_modules/esrecurse": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.2.1.tgz", - "integrity": "sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { - "estraverse": "^4.1.0" + "estraverse": "^5.2.0" }, "engines": { "node": ">=4.0" } }, "node_modules/estraverse": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.2.0.tgz", - "integrity": "sha1-De4/7TH81GlhjOc0IJn8GvoL2xM=", - "dev": true, + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "license": "BSD-2-Clause", "engines": { - "node": ">=0.10.0" + "node": ">=4.0" } }, "node_modules/esutils": { "version": "2.0.2", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.2.tgz", - "integrity": "sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs=", - "dev": true, "engines": { "node": ">=0.10.0" } }, "node_modules/eventemitter2": { - "version": "6.4.4", - "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-6.4.4.tgz", - "integrity": "sha512-HLU3NDY6wARrLCEwyGKRBvuWYyvW6mHYv72SJJAH3iJN3a6eVUvkjFkcxah1bcTgGVBBrFdIopBJPhCQFMLyXw==", - "dev": true + "version": "6.4.7", + "dev": true, + "license": "MIT" }, "node_modules/execa": { "version": "4.1.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-4.1.0.tgz", - "integrity": "sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA==", "dev": true, + "license": "MIT", "dependencies": { "cross-spawn": "^7.0.0", "get-stream": "^5.0.0", @@ -2879,108 +3322,33 @@ "url": "https://github.com/sindresorhus/execa?sponsor=1" } }, - "node_modules/execa/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "node_modules/executable": { + "version": "4.1.1", "dev": true, + "license": "MIT", "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" + "pify": "^2.2.0" }, "engines": { - "node": ">= 8" + "node": ">=4" } }, - "node_modules/execa/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "node_modules/extend": { + "version": "3.0.2", "dev": true, - "engines": { - "node": ">=8" - } + "license": "MIT" }, - "node_modules/execa/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "node_modules/extract-zip": { + "version": "2.0.1", "dev": true, + "license": "BSD-2-Clause", "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/execa/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/execa/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/executable": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/executable/-/executable-4.1.1.tgz", - "integrity": "sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==", - "dev": true, - "dependencies": { - "pify": "^2.2.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/extend": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" - }, - "node_modules/external-editor": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-2.2.0.tgz", - "integrity": "sha512-bSn6gvGxKt+b7+6TKEv1ZycHleA7aHhRHyAqJyp5pbUFuYYNIzpZnQDk7AsYckyWdEnTeAnay0aCy2aV6iTk9A==", - "dev": true, - "dependencies": { - "chardet": "^0.4.0", - "iconv-lite": "^0.4.17", - "tmp": "^0.0.33" - }, - "engines": { - "node": ">=0.12" - } - }, - "node_modules/extract-zip": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", - "integrity": "sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==", - "dev": true, - "dependencies": { - "debug": "^4.1.1", - "get-stream": "^5.1.0", - "yauzl": "^2.10.0" - }, - "bin": { - "extract-zip": "cli.js" + "debug": "^4.1.1", + "get-stream": "^5.1.0", + "yauzl": "^2.10.0" + }, + "bin": { + "extract-zip": "cli.js" }, "engines": { "node": ">= 10.17.0" @@ -2990,10 +3358,9 @@ } }, "node_modules/extract-zip/node_modules/debug": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", - "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", + "version": "4.3.4", "dev": true, + "license": "MIT", "dependencies": { "ms": "2.1.2" }, @@ -3009,64 +3376,54 @@ "node_modules/extsprintf": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", + "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==", "dev": true, "engines": [ "node >=0.6.0" - ] + ], + "license": "MIT" }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true - }, - "node_modules/fast-glob": { - "version": "3.2.11", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.11.tgz", - "integrity": "sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==", "dev": true, - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" - }, - "engines": { - "node": ">=8.6.0" - } + "license": "MIT" }, "node_modules/fast-json-stable-stringify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", - "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=", - "dev": true + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT" }, "node_modules/fast-levenshtein": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", - "dev": true + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true, + "license": "MIT" }, "node_modules/fast-safe-stringify": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", - "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", + "license": "MIT" }, "node_modules/fastq": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", - "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", "dev": true, + "license": "ISC", "dependencies": { "reusify": "^1.0.4" } }, "node_modules/fault": { "version": "1.0.4", - "resolved": "https://registry.npmjs.org/fault/-/fault-1.0.4.tgz", - "integrity": "sha512-CJ0HCB5tL5fYTEA7ToAq5+kTwd++Borf1/bifxd9iT70QcXr4MRrO3Llf8Ifs70q+SJcGHFtnIE/Nw6giCtECA==", + "dev": true, + "license": "MIT", "dependencies": { "format": "^0.2.0" }, @@ -3077,17 +3434,39 @@ }, "node_modules/fd-slicer": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", - "integrity": "sha1-JcfInLH5B3+IkbvmHY85Dq4lbx4=", "dev": true, + "license": "MIT", "dependencies": { "pend": "~1.2.0" } }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/fflate": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/fflate/-/fflate-0.8.2.tgz", + "integrity": "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==", + "license": "MIT" + }, "node_modules/figures": { "version": "3.2.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", - "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", + "license": "MIT", "dependencies": { "escape-string-regexp": "^1.0.5" }, @@ -3099,72 +3478,84 @@ } }, "node_modules/file-entry-cache": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-2.0.0.tgz", - "integrity": "sha1-w5KZDD5oR4PYOLjISkXYoEhFg2E=", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", "dev": true, + "license": "MIT", "dependencies": { - "flat-cache": "^1.2.1", - "object-assign": "^4.0.1" + "flat-cache": "^3.0.4" }, "engines": { - "node": ">=0.10.0" + "node": "^10.12.0 || >=12.0.0" } }, - "node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, + "node_modules/file-type": { + "version": "21.1.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-21.1.0.tgz", + "integrity": "sha512-boU4EHmP3JXkwDo4uhyBhTt5pPstxB6eEXKJBu2yu2l7aAMMm7QQYQEzssJmKReZYrFdFOJS8koVo6bXIBGDqA==", + "license": "MIT", "dependencies": { - "to-regex-range": "^5.0.1" + "@tokenizer/inflate": "^0.3.1", + "strtok3": "^10.3.1", + "token-types": "^6.0.0", + "uint8array-extras": "^1.4.0" }, "engines": { - "node": ">=8" + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sindresorhus/file-type?sponsor=1" } }, - "node_modules/find-root": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/find-root/-/find-root-1.1.0.tgz", - "integrity": "sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng==", - "dev": true - }, "node_modules/find-up": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "license": "MIT", "dependencies": { - "locate-path": "^2.0.0" + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" }, "engines": { - "node": ">=4" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/flat-cache": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-1.3.4.tgz", - "integrity": "sha512-VwyB3Lkgacfik2vhqR4uv2rvebqmDvFu4jlN/C1RzWoJEo8I7z4Q404oiqYCkq41mni8EzQnm95emU9seckwtg==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", + "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", "dev": true, + "license": "MIT", "dependencies": { - "circular-json": "^0.3.1", - "graceful-fs": "^4.1.2", - "rimraf": "~2.6.2", - "write": "^0.2.1" + "flatted": "^3.2.9", + "keyv": "^4.5.3", + "rimraf": "^3.0.2" }, "engines": { - "node": ">=0.10.0" + "node": "^10.12.0 || >=12.0.0" } }, + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "dev": true, + "license": "ISC" + }, "node_modules/follow-redirects": { - "version": "1.14.9", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.9.tgz", - "integrity": "sha512-MQDfihBQYMcyy5dhRDJUHcw7lb2Pv/TuE6xP1vyraLukNDHKbDxDNaOE3NbCAdKQApno+GPRyo1YAp89yCjK4w==", + "version": "1.15.11", "funding": [ { "type": "individual", "url": "https://github.com/sponsors/RubenVerborgh" } ], + "license": "MIT", "engines": { "node": ">=4.0" }, @@ -3174,42 +3565,81 @@ } } }, + "node_modules/for-each": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", + "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-callable": "^1.2.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/foreground-child": { + "version": "3.3.1", + "license": "ISC", + "dependencies": { + "cross-spawn": "^7.0.6", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/foreground-child/node_modules/signal-exit": { + "version": "4.1.0", + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/forever-agent": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", + "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==", "dev": true, + "license": "Apache-2.0", "engines": { "node": "*" } }, "node_modules/form-data": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", - "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", - "dev": true, + "version": "4.0.4", + "license": "MIT", "dependencies": { "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", "mime-types": "^2.1.12" }, "engines": { - "node": ">= 0.12" + "node": ">= 6" } }, "node_modules/format": { "version": "0.2.2", - "resolved": "https://registry.npmjs.org/format/-/format-0.2.2.tgz", - "integrity": "sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==", + "dev": true, "engines": { "node": ">=0.4.x" } }, "node_modules/fs-extra": { "version": "9.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", - "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", "dev": true, + "license": "MIT", "dependencies": { "at-least-node": "^1.0.0", "graceful-fs": "^4.2.0", @@ -3220,108 +3650,101 @@ "node": ">=10" } }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" - }, "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + "version": "1.1.2", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, - "node_modules/functional-red-black-tree": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", - "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", - "dev": true + "node_modules/function.prototype.name": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz", + "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "functions-have-names": "^1.2.3", + "hasown": "^2.0.2", + "is-callable": "^1.2.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/get-caller-file": { "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "license": "ISC", "engines": { "node": "6.* || 8.* || >= 10.*" } }, - "node_modules/get-pkg-repo": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/get-pkg-repo/-/get-pkg-repo-4.2.1.tgz", - "integrity": "sha512-2+QbHjFRfGB74v/pYWjd5OhU3TDIC2Gv/YKUTk/tCvAz0pkn/Mz6P3uByuBimLOcPvN2jYdScl3xGFSrx0jEcA==", + "node_modules/get-intrinsic": { + "version": "1.3.0", + "license": "MIT", "dependencies": { - "@hutson/parse-repository-url": "^3.0.0", - "hosted-git-info": "^4.0.0", - "through2": "^2.0.0", - "yargs": "^16.2.0" - }, - "bin": { - "get-pkg-repo": "src/cli.js" + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" }, "engines": { - "node": ">=6.9.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/get-pkg-repo/node_modules/hosted-git-info": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", - "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", + "node_modules/get-proto": { + "version": "1.0.1", + "license": "MIT", "dependencies": { - "lru-cache": "^6.0.0" + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" }, "engines": { - "node": ">=10" - } - }, - "node_modules/get-pkg-repo/node_modules/readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/get-pkg-repo/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - }, - "node_modules/get-pkg-repo/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, - "node_modules/get-pkg-repo/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" + "node": ">= 0.4" } }, "node_modules/get-stdin": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-6.0.0.tgz", - "integrity": "sha512-jp4tHawyV7+fkkSKyvjuLZswblUtz+SQKzSWnBbii16BuZksJlU1wuBYXY75r+duh/llF1ur6oNwi+2ZzjKZ7g==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-8.0.0.tgz", + "integrity": "sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg==", "dev": true, + "license": "MIT", "engines": { - "node": ">=4" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/get-stream": { "version": "5.2.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", - "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", "dev": true, + "license": "MIT", "dependencies": { "pump": "^3.0.0" }, @@ -3332,11 +3755,61 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/get-symbol-description": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz", + "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-uri": { + "version": "6.0.4", + "license": "MIT", + "dependencies": { + "basic-ftp": "^5.0.2", + "data-uri-to-buffer": "^6.0.2", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/get-uri/node_modules/debug": { + "version": "4.4.1", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/get-uri/node_modules/ms": { + "version": "2.1.3", + "license": "MIT" + }, "node_modules/getos": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/getos/-/getos-3.2.1.tgz", "integrity": "sha512-U56CfOK17OKgTVqozZjUKNdkfEv6jk5WISBJ8SHoagjE6L69zOwl3Z+O8myjY9MEW3i2HPWQBt/LTbCgcC973Q==", "dev": true, + "license": "MIT", "dependencies": { "async": "^3.2.0" } @@ -3344,126 +3817,110 @@ "node_modules/getpass": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==", "dev": true, + "license": "MIT", "dependencies": { "assert-plus": "^1.0.0" } }, "node_modules/git-hooks-list": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/git-hooks-list/-/git-hooks-list-1.0.3.tgz", - "integrity": "sha512-Y7wLWcrLUXwk2noSka166byGCvhMtDRpgHdzCno1UQv/n/Hegp++a2xBWJL1lJarnKD3SWaljD+0z1ztqxuKyQ==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/git-hooks-list/-/git-hooks-list-4.1.1.tgz", + "integrity": "sha512-cmP497iLq54AZnv4YRAEMnEyQ1eIn4tGKbmswqwmFV4GBnAqE8NLtWxxdXa++AalfgL5EBH4IxTPyquEuGY/jA==", "dev": true, + "license": "MIT", "funding": { "url": "https://github.com/fisker/git-hooks-list?sponsor=1" } }, - "node_modules/git-raw-commits": { - "version": "2.0.11", - "resolved": "https://registry.npmjs.org/git-raw-commits/-/git-raw-commits-2.0.11.tgz", - "integrity": "sha512-VnctFhw+xfj8Va1xtfEqCUD2XDrbAPSJx+hSrE5K7fGdjZruW7XV+QOrN7LF/RJyvspRiD2I0asWsxFp0ya26A==", + "node_modules/glob": { + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-12.0.0.tgz", + "integrity": "sha512-5Qcll1z7IKgHr5g485ePDdHcNQY0k2dtv/bjYy0iuyGxQw2qSOiiXUXJ+AYQpg3HNoUMHqAruX478Jeev7UULw==", + "license": "BlueOak-1.0.0", "dependencies": { - "dargs": "^7.0.0", - "lodash": "^4.17.15", - "meow": "^8.0.0", - "split2": "^3.0.0", - "through2": "^4.0.0" + "foreground-child": "^3.3.1", + "jackspeak": "^4.1.1", + "minimatch": "^10.1.1", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^2.0.0" }, "bin": { - "git-raw-commits": "cli.js" + "glob": "dist/esm/bin.mjs" }, "engines": { - "node": ">=10" + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/git-remote-origin-url": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/git-remote-origin-url/-/git-remote-origin-url-2.0.0.tgz", - "integrity": "sha512-eU+GGrZgccNJcsDH5LkXR3PB9M958hxc7sbA8DFJjrv9j4L2P/eZfKhM+QD6wyzpiv+b1BpK0XrYCxkovtjSLw==", + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", "dependencies": { - "gitconfiglocal": "^1.0.0", - "pify": "^2.3.0" + "is-glob": "^4.0.3" }, "engines": { - "node": ">=4" + "node": ">=10.13.0" } }, - "node_modules/git-semver-tags": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/git-semver-tags/-/git-semver-tags-4.1.1.tgz", - "integrity": "sha512-OWyMt5zBe7xFs8vglMmhM9lRQzCWL3WjHtxNNfJTMngGym7pC1kh8sP6jevfydJ6LP3ZvGxfb6ABYgPUM0mtsA==", + "node_modules/glob/node_modules/minimatch": { + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.1.1.tgz", + "integrity": "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==", + "license": "BlueOak-1.0.0", "dependencies": { - "meow": "^8.0.0", - "semver": "^6.0.0" - }, - "bin": { - "git-semver-tags": "cli.js" + "@isaacs/brace-expansion": "^5.0.0" }, "engines": { - "node": ">=10" - } - }, - "node_modules/git-semver-tags/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "bin": { - "semver": "bin/semver.js" + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/gitconfiglocal": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/gitconfiglocal/-/gitconfiglocal-1.0.0.tgz", - "integrity": "sha512-spLUXeTAVHxDtKsJc8FkFVgFtMdEN9qPGpL23VfSHx4fP4+Ds097IXLvymbnDH8FnmxX5Nr9bPw3A+AQ6mWEaQ==", - "dependencies": { - "ini": "^1.3.2" - } - }, - "node_modules/gitconfiglocal/node_modules/ini": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", - "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" - }, - "node_modules/glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "node_modules/global-dirs": { + "version": "3.0.0", + "dev": true, + "license": "MIT", "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" + "ini": "2.0.0" }, "engines": { - "node": "*" + "node": ">=10" }, "funding": { - "url": "https://github.com/sponsors/isaacs" + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "node_modules/globals": { + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", "dev": true, + "license": "MIT", "dependencies": { - "is-glob": "^4.0.1" + "type-fest": "^0.20.2" }, "engines": { - "node": ">= 6" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/global-dirs": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.0.tgz", - "integrity": "sha512-v8ho2DS5RiCjftj1nD9NmnfaOzTdud7RRnVd9kFNOjqZbISlx5DQ+OrTkywgd0dIt7oFCvKetZSHoHcP3sDdiA==", + "node_modules/globals/node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", "dev": true, - "dependencies": { - "ini": "2.0.0" - }, + "license": "(MIT OR CC0-1.0)", "engines": { "node": ">=10" }, @@ -3471,156 +3928,135 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/globals": { - "version": "11.11.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.11.0.tgz", - "integrity": "sha512-WHq43gS+6ufNOEqlrDBxVEbb8ntfXrfAUU2ZOpCxrBdGKW3gyv8mCxAfIBD0DroPKGrJ2eSsXsLtY9MPntsyTw==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/globby": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-10.0.0.tgz", - "integrity": "sha512-3LifW9M4joGZasyYPz2A1U74zbC/45fvpXUvO/9KbSa+VV0aGZarWkfdgKyR9sExNP0t0x0ss/UMJpNpcaTspw==", + "node_modules/globalthis": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", + "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", "dev": true, + "license": "MIT", "dependencies": { - "@types/glob": "^7.1.1", - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.0.3", - "glob": "^7.1.3", - "ignore": "^5.1.1", - "merge2": "^1.2.3", - "slash": "^3.0.0" + "define-properties": "^1.2.1", + "gopd": "^1.0.1" }, "engines": { - "node": ">=8" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/globby/node_modules/ignore": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", - "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", - "dev": true, + "node_modules/gopd": { + "version": "1.2.0", + "license": "MIT", "engines": { - "node": ">= 4" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/graceful-fs": { "version": "4.2.6", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.6.tgz", - "integrity": "sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ==" + "license": "ISC" }, - "node_modules/handlebars": { - "version": "4.7.7", - "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.7.tgz", - "integrity": "sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA==", - "dependencies": { - "minimist": "^1.2.5", - "neo-async": "^2.6.0", - "source-map": "^0.6.1", - "wordwrap": "^1.0.0" - }, - "bin": { - "handlebars": "bin/handlebars" - }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true, + "license": "MIT" + }, + "node_modules/has-bigints": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.1.0.tgz", + "integrity": "sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg==", + "dev": true, + "license": "MIT", "engines": { - "node": ">=0.4.7" + "node": ">= 0.4" }, - "optionalDependencies": { - "uglify-js": "^3.1.4" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/har-schema": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", + "node_modules/has-flag": { + "version": "3.0.0", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } }, - "node_modules/har-validator": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz", - "integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==", + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", "dev": true, + "license": "MIT", "dependencies": { - "ajv": "^6.5.5", - "har-schema": "^2.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/hard-rejection": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz", - "integrity": "sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==", - "engines": { - "node": ">=6" - } - }, - "node_modules/has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dependencies": { - "function-bind": "^1.1.1" + "es-define-property": "^1.0.0" }, - "engines": { - "node": ">= 0.4.0" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/has-ansi": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", - "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=", + "node_modules/has-proto": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz", + "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==", "dev": true, + "license": "MIT", "dependencies": { - "ansi-regex": "^2.0.0" + "dunder-proto": "^1.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "node_modules/has-symbols": { + "version": "1.1.0", + "license": "MIT", "engines": { - "node": ">=4" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/has-symbols": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.0.tgz", - "integrity": "sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q=", - "dev": true, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/hoek": { - "version": "5.0.4", - "resolved": "https://registry.npmjs.org/hoek/-/hoek-5.0.4.tgz", - "integrity": "sha512-Alr4ZQgoMlnere5FZJsIyfIjORBqZll5POhDsF4q64dPuJR6rNxXdDxtHSQq8OXRurhmx+PWYEE8bXRROY8h0w==", - "dev": true, + "node_modules/hasown": { + "version": "2.0.2", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, "engines": { - "node": ">=8.9.0" + "node": ">= 0.4" } }, "node_modules/hosted-git-info": { "version": "2.8.9", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", - "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==" + "dev": true, + "license": "ISC" }, "node_modules/htmlparser2": { "version": "7.2.0", - "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-7.2.0.tgz", - "integrity": "sha512-H7MImA4MS6cw7nbyURtLPO1Tms7C5H602LRETv95z1MxO/7CP7rDVROehUYeYBUYEON94NXXDEPmZuq+hX4sog==", + "dev": true, "funding": [ "https://github.com/fb55/htmlparser2?sponsor=1", { @@ -3628,6 +4064,7 @@ "url": "https://github.com/sponsors/fb55" } ], + "license": "MIT", "dependencies": { "domelementtype": "^2.0.1", "domhandler": "^4.2.2", @@ -3635,45 +4072,91 @@ "entities": "^3.0.1" } }, + "node_modules/http-proxy-agent": { + "version": "7.0.2", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/http-proxy-agent/node_modules/debug": { + "version": "4.4.1", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/http-proxy-agent/node_modules/ms": { + "version": "2.1.3", + "license": "MIT" + }, "node_modules/http-signature": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.3.6.tgz", + "integrity": "sha512-3adrsD6zqo4GsTqtO7FyrejHNv+NgiIfAfv68+jVlFmSr9OGy7zrxONceFRLKvnnZA5jbxQBX1u9PpB6Wi32Gw==", "dev": true, + "license": "MIT", "dependencies": { "assert-plus": "^1.0.0", - "jsprim": "^1.2.2", - "sshpk": "^1.7.0" + "jsprim": "^2.0.2", + "sshpk": "^1.14.1" }, "engines": { - "node": ">=0.8", - "npm": ">=1.3.7" + "node": ">=0.10" } }, - "node_modules/human-signals": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz", - "integrity": "sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw==", - "dev": true, + "node_modules/https-proxy-agent": { + "version": "7.0.6", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, "engines": { - "node": ">=8.12.0" + "node": ">= 14" } }, - "node_modules/iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "node_modules/https-proxy-agent/node_modules/debug": { + "version": "4.4.1", + "license": "MIT", "dependencies": { - "safer-buffer": ">= 2.1.2 < 3" + "ms": "^2.1.3" }, "engines": { - "node": ">=0.10.0" + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/https-proxy-agent/node_modules/ms": { + "version": "2.1.3", + "license": "MIT" + }, + "node_modules/human-signals": { + "version": "1.1.1", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=8.12.0" } }, "node_modules/ieee754": { "version": "1.2.1", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", "funding": [ { "type": "github", @@ -3687,204 +4170,203 @@ "type": "consulting", "url": "https://feross.org/support" } - ] + ], + "license": "BSD-3-Clause" }, "node_modules/ignore": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", - "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", "dev": true, + "license": "MIT", "engines": { "node": ">= 4" } }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/imurmurhash": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.8.19" } }, "node_modules/indent-string": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", - "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, "node_modules/inherits": { "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + "license": "ISC" }, "node_modules/ini": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", - "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==", "dev": true, + "license": "ISC", "engines": { "node": ">=10" } }, - "node_modules/inquirer": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-5.2.0.tgz", - "integrity": "sha512-E9BmnJbAKLPGonz0HeWHtbKf+EeSP93paWO3ZYoUpq/aowXvYGjjCSuashhXPpzbArIjBbji39THkxTz9ZeEUQ==", + "node_modules/internal-slot": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", + "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==", "dev": true, + "license": "MIT", "dependencies": { - "ansi-escapes": "^3.0.0", - "chalk": "^2.0.0", - "cli-cursor": "^2.1.0", - "cli-width": "^2.0.0", - "external-editor": "^2.1.0", - "figures": "^2.0.0", - "lodash": "^4.3.0", - "mute-stream": "0.0.7", - "run-async": "^2.2.0", - "rxjs": "^5.5.2", - "string-width": "^2.1.0", - "strip-ansi": "^4.0.0", - "through": "^2.3.6" + "es-errors": "^1.3.0", + "hasown": "^2.0.2", + "side-channel": "^1.1.0" }, "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/inquirer/node_modules/ansi-regex": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.1.tgz", - "integrity": "sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw==", - "dev": true, - "engines": { - "node": ">=4" + "node": ">= 0.4" } }, - "node_modules/inquirer/node_modules/cli-cursor": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", - "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", - "dev": true, + "node_modules/ip-address": { + "version": "9.0.5", + "license": "MIT", "dependencies": { - "restore-cursor": "^2.0.0" + "jsbn": "1.1.0", + "sprintf-js": "^1.1.3" }, "engines": { - "node": ">=4" + "node": ">= 12" } }, - "node_modules/inquirer/node_modules/figures": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", - "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", - "dev": true, - "dependencies": { - "escape-string-regexp": "^1.0.5" - }, - "engines": { - "node": ">=4" - } + "node_modules/ip-address/node_modules/jsbn": { + "version": "1.1.0", + "license": "MIT" }, - "node_modules/inquirer/node_modules/is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "node_modules/ip-address/node_modules/sprintf-js": { + "version": "1.1.3", + "license": "BSD-3-Clause" + }, + "node_modules/is-alphabetical": { + "version": "1.0.4", "dev": true, - "engines": { - "node": ">=4" + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/inquirer/node_modules/onetime": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", - "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "node_modules/is-alphanumerical": { + "version": "1.0.4", "dev": true, + "license": "MIT", "dependencies": { - "mimic-fn": "^1.0.0" + "is-alphabetical": "^1.0.0", + "is-decimal": "^1.0.0" }, - "engines": { - "node": ">=4" + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/inquirer/node_modules/restore-cursor": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", - "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", + "node_modules/is-array-buffer": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz", + "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==", "dev": true, + "license": "MIT", "dependencies": { - "onetime": "^2.0.0", - "signal-exit": "^3.0.2" + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" }, "engines": { - "node": ">=4" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/inquirer/node_modules/string-width": { + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-async-function": { "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz", + "integrity": "sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==", "dev": true, + "license": "MIT", "dependencies": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" + "async-function": "^1.0.0", + "call-bound": "^1.0.3", + "get-proto": "^1.0.1", + "has-tostringtag": "^1.0.2", + "safe-regex-test": "^1.1.0" }, "engines": { - "node": ">=4" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/inquirer/node_modules/strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "node_modules/is-bigint": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz", + "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==", "dev": true, + "license": "MIT", "dependencies": { - "ansi-regex": "^3.0.0" + "has-bigints": "^1.0.2" }, "engines": { - "node": ">=4" - } - }, - "node_modules/is-alphabetical": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-1.0.4.tgz", - "integrity": "sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg==", + "node": ">= 0.4" + }, "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-alphanumerical": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz", - "integrity": "sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A==", + "node_modules/is-boolean-object": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz", + "integrity": "sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==", + "dev": true, + "license": "MIT", "dependencies": { - "is-alphabetical": "^1.0.0", - "is-decimal": "^1.0.0" + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" }, "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=" - }, "node_modules/is-buffer": { "version": "2.0.5", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz", - "integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==", + "dev": true, "funding": [ { "type": "github", @@ -3899,55 +4381,92 @@ "url": "https://feross.org/support" } ], + "license": "MIT", "engines": { "node": ">=4" } }, "node_modules/is-callable": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.4.tgz", - "integrity": "sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA==", + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-ci": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.0.tgz", - "integrity": "sha512-kDXyttuLeslKAHYL/K28F2YkM3x5jvFPEw3yXbRptXydjD9rpLEz+C5K5iutY9ZiUu6AP41JdvRQwF4Iqs4ZCQ==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.1.tgz", + "integrity": "sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==", "dev": true, + "license": "MIT", "dependencies": { - "ci-info": "^3.1.1" + "ci-info": "^3.2.0" }, "bin": { "is-ci": "bin.js" } }, "node_modules/is-core-module": { - "version": "2.11.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz", - "integrity": "sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==", + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-data-view": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz", + "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==", + "dev": true, + "license": "MIT", "dependencies": { - "has": "^1.0.3" + "call-bound": "^1.0.2", + "get-intrinsic": "^1.2.6", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-date-object": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.1.tgz", - "integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY=", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz", + "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==", "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "has-tostringtag": "^1.0.2" + }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-decimal": { "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-1.0.4.tgz", - "integrity": "sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==", + "dev": true, + "license": "MIT", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -3958,23 +4477,59 @@ "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, + "node_modules/is-finalizationregistry": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz", + "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-fullwidth-code-point": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "license": "MIT", "engines": { "node": ">=8" } }, + "node_modules/is-generator-function": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.0.tgz", + "integrity": "sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "get-proto": "^1.0.0", + "has-tostringtag": "^1.0.2", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-glob": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "dev": true, + "license": "MIT", "dependencies": { "is-extglob": "^2.1.1" }, @@ -3984,8 +4539,8 @@ }, "node_modules/is-hexadecimal": { "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz", - "integrity": "sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw==", + "dev": true, + "license": "MIT", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -3993,9 +4548,8 @@ }, "node_modules/is-installed-globally": { "version": "0.4.0", - "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz", - "integrity": "sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==", "dev": true, + "license": "MIT", "dependencies": { "global-dirs": "^3.0.0", "is-path-inside": "^3.0.2" @@ -4009,106 +4563,187 @@ }, "node_modules/is-interactive": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-1.0.0.tgz", - "integrity": "sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==", + "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "node_modules/is-map": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", + "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", "dev": true, + "license": "MIT", "engines": { - "node": ">=0.12.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-obj": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", - "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", + "node_modules/is-negative-zero": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", + "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", + "dev": true, + "license": "MIT", "engines": { - "node": ">=8" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-number-object": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz", + "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-path-inside": { "version": "3.0.3", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", - "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/is-plain-obj": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", - "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/is-regex": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.4.tgz", - "integrity": "sha1-VRdIm1RwkbCTDglWVM7SXul+lJE=", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", + "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", "dev": true, + "license": "MIT", "dependencies": { - "has": "^1.0.1" + "call-bound": "^1.0.2", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-resolvable": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-resolvable/-/is-resolvable-1.1.0.tgz", - "integrity": "sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg==", - "dev": true + "node_modules/is-set": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", + "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-shared-array-buffer": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz", + "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/is-stream": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", - "integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, + "node_modules/is-string": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz", + "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-symbol": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.2.tgz", - "integrity": "sha512-HS8bZ9ox60yCJLH9snBpIwv9pYUAkcuLhSA1oero1UB5y9aiQpRA8y2ex945AOtCZL1lJDeIk3G5LthswI46Lw==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz", + "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==", "dev": true, + "license": "MIT", "dependencies": { - "has-symbols": "^1.0.0" + "call-bound": "^1.0.2", + "has-symbols": "^1.1.0", + "safe-regex-test": "^1.1.0" }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-text-path": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-text-path/-/is-text-path-1.0.1.tgz", - "integrity": "sha512-xFuJpne9oFz5qDaodwmmG08e3CawH/2ZV8Qqza1Ko7Sk8POWbkRdwIoAWVhqvq0XeUzANEhKo2n0IXUGBm7A/w==", + "node_modules/is-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", + "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", + "dev": true, + "license": "MIT", "dependencies": { - "text-extensions": "^1.0.0" + "which-typed-array": "^1.1.16" }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", - "dev": true + "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==", + "dev": true, + "license": "MIT" }, "node_modules/is-unicode-supported": { "version": "0.1.0", - "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", - "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "license": "MIT", "engines": { "node": ">=10" }, @@ -4116,71 +4751,144 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + "node_modules/is-weakmap": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", + "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, - "node_modules/isemail": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/isemail/-/isemail-3.2.0.tgz", - "integrity": "sha512-zKqkK+O+dGqevc93KNsbZ/TqTUFd46MwWjYOoMrjIMZ51eU7DtQG3Wmd9SQQT7i7RVnuTPEiYEWHU3MSbxC1Tg==", + "node_modules/is-weakref": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.1.tgz", + "integrity": "sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==", "dev": true, + "license": "MIT", "dependencies": { - "punycode": "2.x.x" + "call-bound": "^1.0.3" }, "engines": { - "node": ">=4.0.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakset": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz", + "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true, + "license": "MIT" + }, "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", - "dev": true + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" }, "node_modules/isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=", - "dev": true + "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==", + "dev": true, + "license": "MIT" }, "node_modules/iterare": { "version": "1.2.1", - "resolved": "https://registry.npmjs.org/iterare/-/iterare-1.2.1.tgz", - "integrity": "sha512-RKYVTCjAnRthyJes037NX/IiqeidgN1xc3j1RjFfECFp28A1GVwK9nA+i0rJPaHqSZwygLzRnFlzUuHFoWWy+Q==", + "license": "ISC", "engines": { "node": ">=6" } }, + "node_modules/iterator.prototype": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/iterator.prototype/-/iterator.prototype-1.1.5.tgz", + "integrity": "sha512-H0dkQoCa3b2VEeKQBOxFph+JAbcrQdE7KC0UkqwpLmv2EC4P41QXP+rqo9wYodACiG5/WM5s9oDApTU8utwj9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-data-property": "^1.1.4", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.6", + "get-proto": "^1.0.0", + "has-symbols": "^1.1.0", + "set-function-name": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/jackspeak": { + "version": "4.1.1", + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/joi": { - "version": "13.7.0", - "resolved": "https://registry.npmjs.org/joi/-/joi-13.7.0.tgz", - "integrity": "sha512-xuY5VkHfeOYK3Hdi91ulocfuFopwgbSORmIwzcwHKESQhC7w1kD5jaVSPnqDxS2I8t3RZ9omCKAxNwXN5zG1/Q==", + "version": "18.0.1", + "resolved": "https://registry.npmjs.org/joi/-/joi-18.0.1.tgz", + "integrity": "sha512-IiQpRyypSnLisQf3PwuN2eIHAsAIGZIrLZkd4zdvIar2bDyhM91ubRjy8a3eYablXsh9BeI/c7dmPYHca5qtoA==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { - "hoek": "5.x.x", - "isemail": "3.x.x", - "topo": "3.x.x" + "@hapi/address": "^5.1.1", + "@hapi/formula": "^3.0.2", + "@hapi/hoek": "^11.0.7", + "@hapi/pinpoint": "^2.0.1", + "@hapi/tlds": "^1.1.1", + "@hapi/topo": "^6.0.2", + "@standard-schema/spec": "^1.0.0" }, "engines": { - "node": ">=8.9.0" + "node": ">= 20" } }, "node_modules/js-tokens": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-3.0.2.tgz", - "integrity": "sha1-mGbfOVECEw449/mWvOtlRDIJwls=", - "dev": true + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" }, "node_modules/js-yaml": { - "version": "3.13.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", - "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", "dev": true, + "license": "MIT", "dependencies": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" + "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" @@ -4189,46 +4897,73 @@ "node_modules/jsbn": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", - "dev": true + "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" }, "node_modules/json-parse-better-errors": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", - "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==" + "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", + "dev": true, + "license": "MIT" }, "node_modules/json-parse-even-better-errors": { "version": "2.3.1", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", - "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==" + "dev": true, + "license": "MIT" }, "node_modules/json-schema": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", - "dev": true + "dev": true, + "license": "(AFL-2.1 OR BSD-3-Clause)" }, "node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/json-stable-stringify-without-jsonify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", - "dev": true + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true, + "license": "MIT" }, "node_modules/json-stringify-safe": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", + "dev": true, + "license": "ISC" + }, + "node_modules/json5": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", + "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.0" + }, + "bin": { + "json5": "lib/cli.js" + } }, "node_modules/jsonfile": { "version": "6.1.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", - "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", + "license": "MIT", "dependencies": { "universalify": "^2.0.0" }, @@ -4236,34 +4971,12 @@ "graceful-fs": "^4.1.6" } }, - "node_modules/jsonparse": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz", - "integrity": "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==", - "engines": [ - "node >= 0.2.0" - ] - }, - "node_modules/JSONStream": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz", - "integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==", - "dependencies": { - "jsonparse": "^1.2.0", - "through": ">=2.2.7 <3" - }, - "bin": { - "JSONStream": "bin.js" - }, - "engines": { - "node": "*" - } - }, "node_modules/jsonwebtoken": { "version": "8.5.1", "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz", "integrity": "sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w==", "dev": true, + "license": "MIT", "dependencies": { "jws": "^3.2.2", "lodash.includes": "^4.3.0", @@ -4282,78 +4995,89 @@ } }, "node_modules/jsprim": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz", - "integrity": "sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-2.0.2.tgz", + "integrity": "sha512-gqXddjPqQ6G40VdnI6T6yObEC+pDNvyP95wdQhkWkg7crHH3km5qP1FsOXEkzEQwnz6gz5qGTn1c2Y52wP3OyQ==", "dev": true, + "engines": [ + "node >=0.6.0" + ], + "license": "MIT", "dependencies": { "assert-plus": "1.0.0", "extsprintf": "1.3.0", "json-schema": "0.4.0", "verror": "1.10.0" - }, - "engines": { - "node": ">=0.6.0" } }, "node_modules/jsx-ast-utils": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-2.1.0.tgz", - "integrity": "sha512-yDGDG2DS4JcqhA6blsuYbtsT09xL8AoLuUR2Gb5exrw7UEM19sBcOTq+YBBhrNbl0PUC4R4LnFu+dHg2HKeVvA==", + "version": "3.3.5", + "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz", + "integrity": "sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ==", "dev": true, + "license": "MIT", "dependencies": { - "array-includes": "^3.0.3" + "array-includes": "^3.1.6", + "array.prototype.flat": "^1.3.1", + "object.assign": "^4.1.4", + "object.values": "^1.1.6" }, "engines": { "node": ">=4.0" } }, "node_modules/jwa": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", - "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.2.tgz", + "integrity": "sha512-eeH5JO+21J78qMvTIDdBXidBd6nG2kZjg5Ohz/1fpa28Z4CcsWUzJ1ZZyFq/3z3N17aZy+ZuBoHljASbL1WfOw==", "dev": true, + "license": "MIT", "dependencies": { - "buffer-equal-constant-time": "1.0.1", + "buffer-equal-constant-time": "^1.0.1", "ecdsa-sig-formatter": "1.0.11", "safe-buffer": "^5.0.1" } }, "node_modules/jws": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", - "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.3.tgz", + "integrity": "sha512-byiJ0FLRdLdSVSReO/U4E7RoEyOCKnEnEPMjq3HxWtvzLsV08/i5RQKsFVNkCldrCaPr2vDNAOMsfs8T/Hze7g==", "dev": true, + "license": "MIT", "dependencies": { - "jwa": "^1.4.1", + "jwa": "^1.4.2", "safe-buffer": "^5.0.1" } }, - "node_modules/kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "engines": { - "node": ">=0.10.0" + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" } }, "node_modules/lazy-ass": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/lazy-ass/-/lazy-ass-1.6.0.tgz", - "integrity": "sha1-eZllXoZGwX8In90YfRUNMyTVRRM=", + "integrity": "sha512-cc8oEVoctTvsFZ/Oje/kGnHbpWHYBe8IAJe4C0QNc3t8uM/0Y8+erSz/7Y1ALuXTEZTMvxXwO6YbX1ey3ujiZw==", "dev": true, + "license": "MIT", "engines": { "node": "> 0.8" } }, "node_modules/levn": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", - "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", "dev": true, + "license": "MIT", "dependencies": { - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2" + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" }, "engines": { "node": ">= 0.8.0" @@ -4361,9 +5085,8 @@ }, "node_modules/license-checker": { "version": "25.0.1", - "resolved": "https://registry.npmjs.org/license-checker/-/license-checker-25.0.1.tgz", - "integrity": "sha512-mET5AIwl7MR2IAKYYoVBBpV0OnkKQ1xGj2IMMeEFIs42QAkEVjRtFZGWmQ28WeU7MP779iAgOaOy93Mn44mn6g==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { "chalk": "^2.4.1", "debug": "^3.1.0", @@ -4380,16 +5103,10 @@ "license-checker": "bin/license-checker" } }, - "node_modules/lines-and-columns": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", - "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==" - }, "node_modules/listr2": { "version": "3.10.0", - "resolved": "https://registry.npmjs.org/listr2/-/listr2-3.10.0.tgz", - "integrity": "sha512-eP40ZHihu70sSmqFNbNy2NL1YwImmlMmPh9WO5sLmPDleurMHt3n+SwEWNu2kzKScexZnkyFtc1VI0z/TGlmpw==", "dev": true, + "license": "MIT", "dependencies": { "cli-truncate": "^2.1.0", "colorette": "^1.2.2", @@ -4408,9 +5125,8 @@ }, "node_modules/listr2/node_modules/rxjs": { "version": "6.6.7", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.6.7.tgz", - "integrity": "sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ==", "dev": true, + "license": "Apache-2.0", "dependencies": { "tslib": "^1.9.0" }, @@ -4418,96 +5134,141 @@ "npm": ">=2.0.0" } }, + "node_modules/load-esm": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/load-esm/-/load-esm-1.0.3.tgz", + "integrity": "sha512-v5xlu8eHD1+6r8EHTg6hfmO97LN8ugKtiXcy5e6oN72iD2r6u0RPfLl6fxM+7Wnh2ZRq15o0russMst44WauPA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + }, + { + "type": "buymeacoffee", + "url": "https://buymeacoffee.com/borewit" + } + ], + "license": "MIT", + "engines": { + "node": ">=13.2.0" + } + }, "node_modules/load-json-file": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", - "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-5.3.0.tgz", + "integrity": "sha512-cJGP40Jc/VXUsp8/OrnyKyTZ1y6v/dphm3bioS+RrKXjK2BB6wHUd6JptZEFDGgGahMT+InnZO5i1Ei9mpC8Bw==", + "dev": true, + "license": "MIT", "dependencies": { - "graceful-fs": "^4.1.2", + "graceful-fs": "^4.1.15", "parse-json": "^4.0.0", - "pify": "^3.0.0", - "strip-bom": "^3.0.0" + "pify": "^4.0.1", + "strip-bom": "^3.0.0", + "type-fest": "^0.3.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/load-json-file/node_modules/parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw==", + "dev": true, + "license": "MIT", + "dependencies": { + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" }, "engines": { "node": ">=4" } }, "node_modules/load-json-file/node_modules/pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true, + "license": "MIT", "engines": { - "node": ">=4" + "node": ">=6" + } + }, + "node_modules/load-json-file/node_modules/type-fest": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.3.1.tgz", + "integrity": "sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=6" } }, "node_modules/locate-path": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "license": "MIT", "dependencies": { - "p-locate": "^2.0.0", - "path-exists": "^3.0.0" + "p-locate": "^5.0.0" }, "engines": { - "node": ">=4" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/lodash": { "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + "license": "MIT" }, "node_modules/lodash.includes": { "version": "4.3.0", - "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", - "integrity": "sha1-YLuYqHy5I8aMoeUTJUgzFISfVT8=", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/lodash.isboolean": { "version": "3.0.3", - "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", - "integrity": "sha1-bC4XHbKiV82WgC/UOwGyDV9YcPY=", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/lodash.isinteger": { "version": "4.0.4", - "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", - "integrity": "sha1-YZwK89A/iwTDH1iChAt3sRzWg0M=", - "dev": true - }, - "node_modules/lodash.ismatch": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/lodash.ismatch/-/lodash.ismatch-4.4.0.tgz", - "integrity": "sha512-fPMfXjGQEV9Xsq/8MTSgUf255gawYRbjwMyDbcvDhXgV7enSZA0hynz6vMPnpAb5iONEzBHBPsT+0zes5Z301g==" + "dev": true, + "license": "MIT" }, "node_modules/lodash.isnumber": { "version": "3.0.3", - "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", - "integrity": "sha1-POdoEMWSjQM1IwGsKHMX8RwLH/w=", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/lodash.isplainobject": { "version": "4.0.6", - "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", - "integrity": "sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs=", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/lodash.isstring": { "version": "4.0.1", - "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", - "integrity": "sha1-1SfftUVuynzJu5XV2ur4i6VKVFE=", - "dev": true + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true, + "license": "MIT" }, "node_modules/lodash.once": { "version": "4.1.1", - "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", - "integrity": "sha1-DdOXEhPHxW34gJd9UEyI+0cal6w=", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/log-symbols": { "version": "4.1.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", - "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "license": "MIT", "dependencies": { "chalk": "^4.1.0", "is-unicode-supported": "^0.1.0" @@ -4519,24 +5280,9 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/log-symbols/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, "node_modules/log-symbols/node_modules/chalk": { "version": "4.1.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", - "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", + "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -4548,34 +5294,16 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/log-symbols/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/log-symbols/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, "node_modules/log-symbols/node_modules/has-flag": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/log-symbols/node_modules/supports-color": { "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -4585,9 +5313,8 @@ }, "node_modules/log-update": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/log-update/-/log-update-4.0.0.tgz", - "integrity": "sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg==", "dev": true, + "license": "MIT", "dependencies": { "ansi-escapes": "^4.3.0", "cli-cursor": "^3.1.0", @@ -4603,9 +5330,8 @@ }, "node_modules/log-update/node_modules/ansi-escapes": { "version": "4.3.2", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", - "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", "dev": true, + "license": "MIT", "dependencies": { "type-fest": "^0.21.3" }, @@ -4616,53 +5342,10 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/log-update/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/log-update/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/log-update/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/log-update/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, "node_modules/log-update/node_modules/slice-ansi": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", - "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^4.0.0", "astral-regex": "^2.0.0", @@ -4675,23 +5358,10 @@ "url": "https://github.com/chalk/slice-ansi?sponsor=1" } }, - "node_modules/log-update/node_modules/strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/log-update/node_modules/wrap-ansi": { "version": "6.2.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", - "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -4703,8 +5373,8 @@ }, "node_modules/longest-streak": { "version": "2.0.4", - "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-2.0.4.tgz", - "integrity": "sha512-vM6rUVCVUJJt33bnmHiZEvr7wPT78ztX7rojL+LW51bHtLh6HTjx84LA5W4+oa6aKEJA7jJu5LR6vQRBpA5DVg==", + "dev": true, + "license": "MIT", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -4715,6 +5385,7 @@ "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", "dev": true, + "license": "MIT", "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, @@ -4722,32 +5393,10 @@ "loose-envify": "cli.js" } }, - "node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/map-obj": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz", - "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/markdown-table": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-2.0.0.tgz", - "integrity": "sha512-Ezda85ToJUBhM6WGaG6veasyym+Tbs3cMAw/ZhOPqXiYsr0jgocBV3j3nx+4lk47plLlIqjwuTm/ywVI+zjJ/A==", + "dev": true, + "license": "MIT", "dependencies": { "repeat-string": "^1.0.0" }, @@ -4756,10 +5405,17 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/mdast-util-find-and-replace": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-1.1.1.tgz", - "integrity": "sha512-9cKl33Y21lyckGzpSmEQnIDjEfeeWelN5s1kUW1LwdB0Fkuq2u+4GdqcGEygYxJE8GVqCl0741bYXHgamfWAZA==", + "dev": true, + "license": "MIT", "dependencies": { "escape-string-regexp": "^4.0.0", "unist-util-is": "^4.0.0", @@ -4772,8 +5428,8 @@ }, "node_modules/mdast-util-find-and-replace/node_modules/escape-string-regexp": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, @@ -4783,8 +5439,8 @@ }, "node_modules/mdast-util-footnote": { "version": "0.1.7", - "resolved": "https://registry.npmjs.org/mdast-util-footnote/-/mdast-util-footnote-0.1.7.tgz", - "integrity": "sha512-QxNdO8qSxqbO2e3m09KwDKfWiLgqyCurdWTQ198NpbZ2hxntdc+VKS4fDJCmNWbAroUdYnSthu+XbZ8ovh8C3w==", + "dev": true, + "license": "MIT", "dependencies": { "mdast-util-to-markdown": "^0.6.0", "micromark": "~2.11.0" @@ -4796,8 +5452,8 @@ }, "node_modules/mdast-util-from-markdown": { "version": "0.8.5", - "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-0.8.5.tgz", - "integrity": "sha512-2hkTXtYYnr+NubD/g6KGBS/0mFmBcifAsI0yIWRiRo0PjVs6SSOSOdtzbp6kSGnShDN6G5aWZpKQ2lWRy27mWQ==", + "dev": true, + "license": "MIT", "dependencies": { "@types/mdast": "^3.0.0", "mdast-util-to-string": "^2.0.0", @@ -4812,8 +5468,8 @@ }, "node_modules/mdast-util-frontmatter": { "version": "0.2.0", - "resolved": "https://registry.npmjs.org/mdast-util-frontmatter/-/mdast-util-frontmatter-0.2.0.tgz", - "integrity": "sha512-FHKL4w4S5fdt1KjJCwB0178WJ0evnyyQr5kXTM3wrOVpytD0hrkvd+AOOjU9Td8onOejCkmZ+HQRT3CZ3coHHQ==", + "dev": true, + "license": "MIT", "dependencies": { "micromark-extension-frontmatter": "^0.2.0" }, @@ -4824,8 +5480,8 @@ }, "node_modules/mdast-util-gfm": { "version": "0.1.2", - "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-0.1.2.tgz", - "integrity": "sha512-NNkhDx/qYcuOWB7xHUGWZYVXvjPFFd6afg6/e2g+SV4r9q5XUcCbV4Wfa3DLYIiD+xAEZc6K4MGaE/m0KDcPwQ==", + "dev": true, + "license": "MIT", "dependencies": { "mdast-util-gfm-autolink-literal": "^0.1.0", "mdast-util-gfm-strikethrough": "^0.2.0", @@ -4840,8 +5496,8 @@ }, "node_modules/mdast-util-gfm-autolink-literal": { "version": "0.1.3", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-0.1.3.tgz", - "integrity": "sha512-GjmLjWrXg1wqMIO9+ZsRik/s7PLwTaeCHVB7vRxUwLntZc8mzmTsLVr6HW1yLokcnhfURsn5zmSVdi3/xWWu1A==", + "dev": true, + "license": "MIT", "dependencies": { "ccount": "^1.0.0", "mdast-util-find-and-replace": "^1.1.0", @@ -4854,8 +5510,8 @@ }, "node_modules/mdast-util-gfm-strikethrough": { "version": "0.2.3", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-0.2.3.tgz", - "integrity": "sha512-5OQLXpt6qdbttcDG/UxYY7Yjj3e8P7X16LzvpX8pIQPYJ/C2Z1qFGMmcw+1PZMUM3Z8wt8NRfYTvCni93mgsgA==", + "dev": true, + "license": "MIT", "dependencies": { "mdast-util-to-markdown": "^0.6.0" }, @@ -4866,8 +5522,8 @@ }, "node_modules/mdast-util-gfm-table": { "version": "0.1.6", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-0.1.6.tgz", - "integrity": "sha512-j4yDxQ66AJSBwGkbpFEp9uG/LS1tZV3P33fN1gkyRB2LoRL+RR3f76m0HPHaby6F4Z5xr9Fv1URmATlRRUIpRQ==", + "dev": true, + "license": "MIT", "dependencies": { "markdown-table": "^2.0.0", "mdast-util-to-markdown": "~0.6.0" @@ -4879,8 +5535,8 @@ }, "node_modules/mdast-util-gfm-task-list-item": { "version": "0.1.6", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-0.1.6.tgz", - "integrity": "sha512-/d51FFIfPsSmCIRNp7E6pozM9z1GYPIkSy1urQ8s/o4TC22BZ7DqfHFWiqBD23bc7J3vV1Fc9O4QIHBlfuit8A==", + "dev": true, + "license": "MIT", "dependencies": { "mdast-util-to-markdown": "~0.6.0" }, @@ -4891,8 +5547,8 @@ }, "node_modules/mdast-util-to-markdown": { "version": "0.6.5", - "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-0.6.5.tgz", - "integrity": "sha512-XeV9sDE7ZlOQvs45C9UKMtfTcctcaj/pGwH8YLbMHoMOXNNCn2LsqVQOqrF1+/NU8lKDAqozme9SCXWyo9oAcQ==", + "dev": true, + "license": "MIT", "dependencies": { "@types/unist": "^2.0.0", "longest-streak": "^2.0.0", @@ -4908,343 +5564,108 @@ }, "node_modules/mdast-util-to-string": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-2.0.0.tgz", - "integrity": "sha512-AW4DRS3QbBayY/jJmD8437V1Gombjf8RSOUCMFBuo5iHi58AGEgVCKQ+ezHkZZDpAQS75hcBMpLqjpJTjtUL7w==", + "dev": true, + "license": "MIT", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/meow": { - "version": "8.1.2", - "resolved": "https://registry.npmjs.org/meow/-/meow-8.1.2.tgz", - "integrity": "sha512-r85E3NdZ+mpYk1C6RjPFEMSE+s1iZMuHtsHAqY0DT3jZczl0diWUZ8g6oU7h0M9cD2EL+PzaYghhCLzR0ZNn5Q==", + "node_modules/merge-stream": { + "version": "2.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/micromark": { + "version": "2.11.4", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "@types/minimist": "^1.2.0", - "camelcase-keys": "^6.2.2", - "decamelize-keys": "^1.1.0", - "hard-rejection": "^2.1.0", - "minimist-options": "4.1.0", - "normalize-package-data": "^3.0.0", - "read-pkg-up": "^7.0.1", - "redent": "^3.0.0", - "trim-newlines": "^3.0.0", - "type-fest": "^0.18.0", - "yargs-parser": "^20.2.3" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "debug": "^4.0.0", + "parse-entities": "^2.0.0" } }, - "node_modules/meow/node_modules/find-up": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", - "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "node_modules/micromark-extension-footnote": { + "version": "0.3.2", + "dev": true, + "license": "MIT", "dependencies": { - "locate-path": "^5.0.0", - "path-exists": "^4.0.0" + "micromark": "~2.11.0" }, - "engines": { - "node": ">=8" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/meow/node_modules/hosted-git-info": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", - "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", + "node_modules/micromark-extension-frontmatter": { + "version": "0.2.2", + "dev": true, + "license": "MIT", "dependencies": { - "lru-cache": "^6.0.0" + "fault": "^1.0.0" }, - "engines": { - "node": ">=10" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/meow/node_modules/locate-path": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", - "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "node_modules/micromark-extension-gfm": { + "version": "0.3.3", + "dev": true, + "license": "MIT", "dependencies": { - "p-locate": "^4.1.0" + "micromark": "~2.11.0", + "micromark-extension-gfm-autolink-literal": "~0.5.0", + "micromark-extension-gfm-strikethrough": "~0.6.5", + "micromark-extension-gfm-table": "~0.4.0", + "micromark-extension-gfm-tagfilter": "~0.3.0", + "micromark-extension-gfm-task-list-item": "~0.3.0" }, - "engines": { - "node": ">=8" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/meow/node_modules/normalize-package-data": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", - "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", + "node_modules/micromark-extension-gfm-autolink-literal": { + "version": "0.5.7", + "dev": true, + "license": "MIT", "dependencies": { - "hosted-git-info": "^4.0.1", - "is-core-module": "^2.5.0", - "semver": "^7.3.4", - "validate-npm-package-license": "^3.0.1" + "micromark": "~2.11.3" }, - "engines": { - "node": ">=10" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/meow/node_modules/p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "node_modules/micromark-extension-gfm-strikethrough": { + "version": "0.6.5", + "dev": true, + "license": "MIT", "dependencies": { - "p-try": "^2.0.0" - }, - "engines": { - "node": ">=6" + "micromark": "~2.11.0" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/meow/node_modules/p-locate": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", - "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "node_modules/micromark-extension-gfm-table": { + "version": "0.4.3", + "dev": true, + "license": "MIT", "dependencies": { - "p-limit": "^2.2.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/meow/node_modules/p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", - "engines": { - "node": ">=6" - } - }, - "node_modules/meow/node_modules/parse-json": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", - "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", - "dependencies": { - "@babel/code-frame": "^7.0.0", - "error-ex": "^1.3.1", - "json-parse-even-better-errors": "^2.3.0", - "lines-and-columns": "^1.1.6" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/meow/node_modules/path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", - "engines": { - "node": ">=8" - } - }, - "node_modules/meow/node_modules/read-pkg": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", - "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==", - "dependencies": { - "@types/normalize-package-data": "^2.4.0", - "normalize-package-data": "^2.5.0", - "parse-json": "^5.0.0", - "type-fest": "^0.6.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/meow/node_modules/read-pkg-up": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz", - "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==", - "dependencies": { - "find-up": "^4.1.0", - "read-pkg": "^5.2.0", - "type-fest": "^0.8.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/meow/node_modules/read-pkg-up/node_modules/type-fest": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", - "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", - "engines": { - "node": ">=8" - } - }, - "node_modules/meow/node_modules/read-pkg/node_modules/hosted-git-info": { - "version": "2.8.9", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", - "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==" - }, - "node_modules/meow/node_modules/read-pkg/node_modules/normalize-package-data": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", - "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", - "dependencies": { - "hosted-git-info": "^2.1.4", - "resolve": "^1.10.0", - "semver": "2 || 3 || 4 || 5", - "validate-npm-package-license": "^3.0.1" - } - }, - "node_modules/meow/node_modules/read-pkg/node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "bin": { - "semver": "bin/semver" - } - }, - "node_modules/meow/node_modules/read-pkg/node_modules/type-fest": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", - "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==", - "engines": { - "node": ">=8" - } - }, - "node_modules/meow/node_modules/semver": { - "version": "7.3.8", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", - "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/meow/node_modules/type-fest": { - "version": "0.18.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.18.1.tgz", - "integrity": "sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/merge-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "dev": true - }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true, - "engines": { - "node": ">= 8" - } - }, - "node_modules/micromark": { - "version": "2.11.4", - "resolved": "https://registry.npmjs.org/micromark/-/micromark-2.11.4.tgz", - "integrity": "sha512-+WoovN/ppKolQOFIAajxi7Lu9kInbPxFuTBVEavFcL8eAfVstoc5MocPmqBeAdBOJV00uaVjegzH4+MA0DN/uA==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "dependencies": { - "debug": "^4.0.0", - "parse-entities": "^2.0.0" - } - }, - "node_modules/micromark-extension-footnote": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/micromark-extension-footnote/-/micromark-extension-footnote-0.3.2.tgz", - "integrity": "sha512-gr/BeIxbIWQoUm02cIfK7mdMZ/fbroRpLsck4kvFtjbzP4yi+OPVbnukTc/zy0i7spC2xYE/dbX1Sur8BEDJsQ==", - "dependencies": { - "micromark": "~2.11.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-frontmatter": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/micromark-extension-frontmatter/-/micromark-extension-frontmatter-0.2.2.tgz", - "integrity": "sha512-q6nPLFCMTLtfsctAuS0Xh4vaolxSFUWUWR6PZSrXXiRy+SANGllpcqdXFv2z07l0Xz/6Hl40hK0ffNCJPH2n1A==", - "dependencies": { - "fault": "^1.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-gfm": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-0.3.3.tgz", - "integrity": "sha512-oVN4zv5/tAIA+l3GbMi7lWeYpJ14oQyJ3uEim20ktYFAcfX1x3LNlFGGlmrZHt7u9YlKExmyJdDGaTt6cMSR/A==", - "dependencies": { - "micromark": "~2.11.0", - "micromark-extension-gfm-autolink-literal": "~0.5.0", - "micromark-extension-gfm-strikethrough": "~0.6.5", - "micromark-extension-gfm-table": "~0.4.0", - "micromark-extension-gfm-tagfilter": "~0.3.0", - "micromark-extension-gfm-task-list-item": "~0.3.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-gfm-autolink-literal": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-0.5.7.tgz", - "integrity": "sha512-ePiDGH0/lhcngCe8FtH4ARFoxKTUelMp4L7Gg2pujYD5CSMb9PbblnyL+AAMud/SNMyusbS2XDSiPIRcQoNFAw==", - "dependencies": { - "micromark": "~2.11.3" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-gfm-strikethrough": { - "version": "0.6.5", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-0.6.5.tgz", - "integrity": "sha512-PpOKlgokpQRwUesRwWEp+fHjGGkZEejj83k9gU5iXCbDG+XBA92BqnRKYJdfqfkrRcZRgGuPuXb7DaK/DmxOhw==", - "dependencies": { - "micromark": "~2.11.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-gfm-table": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-0.4.3.tgz", - "integrity": "sha512-hVGvESPq0fk6ALWtomcwmgLvH8ZSVpcPjzi0AjPclB9FsVRgMtGZkUcpE0zgjOCFAznKepF4z3hX8z6e3HODdA==", - "dependencies": { - "micromark": "~2.11.0" + "micromark": "~2.11.0" }, "funding": { "type": "opencollective", @@ -5253,8 +5674,8 @@ }, "node_modules/micromark-extension-gfm-tagfilter": { "version": "0.3.0", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-0.3.0.tgz", - "integrity": "sha512-9GU0xBatryXifL//FJH+tAZ6i240xQuFrSL7mYi8f4oZSbc+NvXjkrHemeYP0+L4ZUT+Ptz3b95zhUZnMtoi/Q==", + "dev": true, + "license": "MIT", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" @@ -5262,8 +5683,8 @@ }, "node_modules/micromark-extension-gfm-task-list-item": { "version": "0.3.3", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-0.3.3.tgz", - "integrity": "sha512-0zvM5iSLKrc/NQl84pZSjGo66aTGd57C1idmlWmE87lkMcXrTxg1uXa/nXomxJytoje9trP0NDLvw4bZ/Z/XCQ==", + "dev": true, + "license": "MIT", "dependencies": { "micromark": "~2.11.0" }, @@ -5274,8 +5695,8 @@ }, "node_modules/micromark/node_modules/debug": { "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "license": "MIT", "dependencies": { "ms": "2.1.2" }, @@ -5288,33 +5709,16 @@ } } }, - "node_modules/micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", - "dev": true, - "dependencies": { - "braces": "^3.0.2", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/mime-db": { "version": "1.38.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.38.0.tgz", - "integrity": "sha512-bqVioMFFzc2awcdJZIzR3HjZFX20QhilVS7hytkKrv7xFAn8bM1gzc/FOX2awLISvWe0PV8ptFKcon+wZ5qYkg==", - "dev": true, + "license": "MIT", "engines": { "node": ">= 0.6" } }, "node_modules/mime-types": { "version": "2.1.22", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.22.tgz", - "integrity": "sha512-aGl6TZGnhm/li6F7yx82bJiBZwgiEa4Hf6CNr8YO+r5UHr53tSTYZb102zyU50DOWWKeOv0uQLRL0/9EiKWCog==", - "dev": true, + "license": "MIT", "dependencies": { "mime-db": "~1.38.0" }, @@ -5322,27 +5726,10 @@ "node": ">= 0.6" } }, - "node_modules/mimic-fn": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", - "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/min-indent": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", - "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", - "engines": { - "node": ">=4" - } - }, "node_modules/minimatch": { "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", "dependencies": { "brace-expansion": "^1.1.7" }, @@ -5351,36 +5738,24 @@ } }, "node_modules/minimist": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", - "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" - }, - "node_modules/minimist-options": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz", - "integrity": "sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==", - "dependencies": { - "arrify": "^1.0.1", - "is-plain-obj": "^1.1.0", - "kind-of": "^6.0.3" - }, - "engines": { - "node": ">= 6" + "version": "1.2.8", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/minimist-options/node_modules/is-plain-obj": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", - "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==", + "node_modules/minipass": { + "version": "7.1.2", + "license": "ISC", "engines": { - "node": ">=0.10.0" + "node": ">=16 || 14 >=14.17" } }, "node_modules/mkdirp": { "version": "0.5.5", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", - "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", "dev": true, + "license": "MIT", "dependencies": { "minimist": "^1.2.5" }, @@ -5388,46 +5763,28 @@ "mkdirp": "bin/cmd.js" } }, - "node_modules/modify-values": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/modify-values/-/modify-values-1.0.1.tgz", - "integrity": "sha512-xV2bxeN6F7oYjZWTe/YPAy6MN2M+sL4u/Rlm2AHCIVGfo2p1yGmBHQ6vHehl4bRTZBdHu3TSkWdYgkwpYzAGSw==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/ms": { "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "node_modules/mute-stream": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", - "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", - "dev": true - }, - "node_modules/neo-async": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", - "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==" + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" }, - "node_modules/nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", - "dev": true + "node_modules/netmask": { + "version": "2.0.2", + "license": "MIT", + "engines": { + "node": ">= 0.4.0" + } }, "node_modules/node-fetch": { - "version": "2.6.7", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", - "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "version": "2.7.0", + "license": "MIT", "dependencies": { "whatwg-url": "^5.0.0" }, @@ -5445,9 +5802,8 @@ }, "node_modules/nopt": { "version": "4.0.3", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.3.tgz", - "integrity": "sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg==", "dev": true, + "license": "ISC", "dependencies": { "abbrev": "1", "osenv": "^0.1.4" @@ -5458,8 +5814,8 @@ }, "node_modules/normalize-package-data": { "version": "2.5.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", - "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "license": "BSD-2-Clause", "dependencies": { "hosted-git-info": "^2.1.4", "resolve": "^1.10.0", @@ -5469,15 +5825,13 @@ }, "node_modules/npm-normalize-package-bin": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz", - "integrity": "sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/npm-run-path": { "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", "dev": true, + "license": "MIT", "dependencies": { "path-key": "^3.0.0" }, @@ -5485,39 +5839,25 @@ "node": ">=8" } }, - "node_modules/npm-run-path/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/oauth-sign": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", - "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==", - "dev": true, - "engines": { - "node": "*" - } - }, "node_modules/object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, - "node_modules/object-hash": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.2.0.tgz", - "integrity": "sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw==", + "node_modules/object-inspect": { + "version": "1.13.4", + "dev": true, + "license": "MIT", "engines": { - "node": ">= 6" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/object-keys": { @@ -5525,22 +5865,112 @@ "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.4" } }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "node_modules/object.assign": { + "version": "4.1.7", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz", + "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==", + "dev": true, + "license": "MIT", "dependencies": { - "wrappy": "1" - } - }, - "node_modules/onetime": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0", + "has-symbols": "^1.1.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.entries": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.9.tgz", + "integrity": "sha512-8u/hfXFRBD1O0hPUjioLhoWFHRmt6tKA4/vZPyckBr18l1KE9uHrFaFaUi8MDRTpi4uak2goyPTSNJLXX2k2Hw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.fromentries": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz", + "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.groupby": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.3.tgz", + "integrity": "sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.values": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.1.tgz", + "integrity": "sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/once": { + "version": "1.4.0", + "dev": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "license": "MIT", "dependencies": { "mimic-fn": "^2.1.0" }, @@ -5553,24 +5983,24 @@ }, "node_modules/onetime/node_modules/mimic-fn": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "license": "MIT", "engines": { "node": ">=6" } }, "node_modules/optionator": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.2.tgz", - "integrity": "sha1-NkxeQJ0/TWMB1sC0wFu6UBgK62Q=", + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", "dev": true, + "license": "MIT", "dependencies": { - "deep-is": "~0.1.3", - "fast-levenshtein": "~2.0.4", - "levn": "~0.3.0", - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2", - "wordwrap": "~1.0.0" + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" }, "engines": { "node": ">= 0.8.0" @@ -5578,8 +6008,7 @@ }, "node_modules/ora": { "version": "5.4.1", - "resolved": "https://registry.npmjs.org/ora/-/ora-5.4.1.tgz", - "integrity": "sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==", + "license": "MIT", "dependencies": { "bl": "^4.1.0", "chalk": "^4.1.0", @@ -5598,32 +6027,9 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/ora/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/ora/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, "node_modules/ora/node_modules/chalk": { "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -5635,45 +6041,16 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/ora/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/ora/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, "node_modules/ora/node_modules/has-flag": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/ora/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/ora/node_modules/supports-color": { "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -5683,32 +6060,28 @@ }, "node_modules/ory-prettier-styles": { "version": "1.3.0", - "resolved": "https://registry.npmjs.org/ory-prettier-styles/-/ory-prettier-styles-1.3.0.tgz", - "integrity": "sha512-Vfn0G6CyLaadwcCamwe1SQCf37ZQfBDgMrhRI70dE/2fbE3Q43/xu7K5c32I5FGt/EliroWty5yBjmdkj0eWug==", "dev": true }, "node_modules/os-homedir": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", - "integrity": "sha512-B5JU3cabzk8c67mRRd3ECmROafjYMXbuzlwtqdM8IbS8ktlTix8aFGb2bAGKrSRIlnfKwovGUUr72JUPyOb6kQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/os-tmpdir": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", - "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", + "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/osenv": { "version": "0.1.5", - "resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz", - "integrity": "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==", "dev": true, + "license": "ISC", "dependencies": { "os-homedir": "^1.0.0", "os-tmpdir": "^1.0.0" @@ -5716,37 +6089,63 @@ }, "node_modules/ospath": { "version": "1.2.2", - "resolved": "https://registry.npmjs.org/ospath/-/ospath-1.2.2.tgz", - "integrity": "sha1-EnZjl3Sj+O8lcvf+QoDg6kVQwHs=", - "dev": true + "dev": true, + "license": "MIT" + }, + "node_modules/own-keys": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz", + "integrity": "sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==", + "dev": true, + "license": "MIT", + "dependencies": { + "get-intrinsic": "^1.2.6", + "object-keys": "^1.1.1", + "safe-push-apply": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/p-limit": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", - "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", "dependencies": { - "p-try": "^1.0.0" + "yocto-queue": "^0.1.0" }, "engines": { - "node": ">=4" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/p-locate": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "license": "MIT", "dependencies": { - "p-limit": "^1.1.0" + "p-limit": "^3.0.2" }, "engines": { - "node": ">=4" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/p-map": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", - "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", "dev": true, + "license": "MIT", "dependencies": { "aggregate-error": "^3.0.0" }, @@ -5758,17 +6157,83 @@ } }, "node_modules/p-try": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "license": "MIT", "engines": { - "node": ">=4" + "node": ">=6" + } + }, + "node_modules/pac-proxy-agent": { + "version": "7.2.0", + "license": "MIT", + "dependencies": { + "@tootallnate/quickjs-emscripten": "^0.23.0", + "agent-base": "^7.1.2", + "debug": "^4.3.4", + "get-uri": "^6.0.1", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.6", + "pac-resolver": "^7.0.1", + "socks-proxy-agent": "^8.0.5" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/pac-proxy-agent/node_modules/debug": { + "version": "4.4.1", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/pac-proxy-agent/node_modules/ms": { + "version": "2.1.3", + "license": "MIT" + }, + "node_modules/pac-resolver": { + "version": "7.0.1", + "license": "MIT", + "dependencies": { + "degenerator": "^5.0.0", + "netmask": "^2.0.2" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/package-json-from-dist": { + "version": "1.0.1", + "license": "BlueOak-1.0.0" + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" } }, "node_modules/parse-entities": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-2.0.0.tgz", - "integrity": "sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ==", + "dev": true, + "license": "MIT", "dependencies": { "character-entities": "^1.0.0", "character-entities-legacy": "^1.0.0", @@ -5782,87 +6247,82 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/parse-json": { + "node_modules/path-exists": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", - "dependencies": { - "error-ex": "^1.3.1", - "json-parse-better-errors": "^1.0.1" - }, + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", "engines": { - "node": ">=4" + "node": ">=8" } }, - "node_modules/path-exists": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", "engines": { - "node": ">=4" + "node": ">=8" } }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "node_modules/path-parse": { + "version": "1.0.7", + "dev": true, + "license": "MIT" + }, + "node_modules/path-scurry": { + "version": "2.0.0", + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^11.0.0", + "minipass": "^7.1.2" + }, "engines": { - "node": ">=0.10.0" + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/path-is-inside": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", - "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=", - "dev": true - }, - "node_modules/path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", - "dev": true, + "node_modules/path-scurry/node_modules/lru-cache": { + "version": "11.1.0", + "license": "ISC", "engines": { - "node": ">=4" + "node": "20 || >=22" } }, - "node_modules/path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" - }, "node_modules/path-to-regexp": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-3.2.0.tgz", - "integrity": "sha512-jczvQbCUS7XmS7o+y1aEO9OBVFeZBQ1MDSEqmO7xSoPgOPoowY/SxLpZ6Vh97/8qHZOteiCKb7gkG9gA2ZUxJA==" - }, - "node_modules/path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", - "dev": true, - "engines": { - "node": ">=8" + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.3.0.tgz", + "integrity": "sha512-7jdwVIRtsP8MYpdXSwOS0YdD0Du+qOoF/AEPIt88PcCFrZCzx41oxku1jD88hZBwbNUIEfpqvuhjFaMAqMTWnA==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/pend": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", - "integrity": "sha1-elfrVQpng/kRUzH89GY9XI4AelA=", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", - "dev": true + "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==", + "dev": true, + "license": "MIT" }, "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, + "license": "MIT", + "peer": true, "engines": { - "node": ">=8.6" + "node": ">=12" }, "funding": { "url": "https://github.com/sponsors/jonschlinkert" @@ -5870,101 +6330,152 @@ }, "node_modules/pify": { "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/pkg-conf": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/pkg-conf/-/pkg-conf-2.1.0.tgz", - "integrity": "sha1-ISZRTKbyq/69FoWW3xi6V4Z/AFg=", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/pkg-conf/-/pkg-conf-3.1.0.tgz", + "integrity": "sha512-m0OTbR/5VPNPqO1ph6Fqbj7Hv6QU7gR/tQW40ZqrL1rjgCU85W6C1bJn0BItuJqnR98PWzw7Z8hHeChD1WrgdQ==", "dev": true, + "license": "MIT", "dependencies": { - "find-up": "^2.0.0", - "load-json-file": "^4.0.0" + "find-up": "^3.0.0", + "load-json-file": "^5.2.0" }, "engines": { - "node": ">=4" + "node": ">=6" } }, - "node_modules/pkg-config": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/pkg-config/-/pkg-config-1.1.1.tgz", - "integrity": "sha1-VX7yLXPaPIg3EHdmxS6tq94pj+Q=", + "node_modules/pkg-conf/node_modules/find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", "dev": true, + "license": "MIT", "dependencies": { - "debug-log": "^1.0.0", - "find-root": "^1.0.0", - "xtend": "^4.0.1" + "locate-path": "^3.0.0" }, "engines": { - "node": ">=0.10" + "node": ">=6" } }, - "node_modules/pkg-dir": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", - "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", + "node_modules/pkg-conf/node_modules/locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", "dev": true, + "license": "MIT", "dependencies": { - "find-up": "^2.1.0" + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" }, "engines": { - "node": ">=4" + "node": ">=6" } }, - "node_modules/pluralize": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/pluralize/-/pluralize-7.0.0.tgz", - "integrity": "sha512-ARhBOdzS3e41FbkW/XWrTEtukqqLoK5+Z/4UeDaLuSW+39JPeFgs4gCGqsrJHVZX0fUrx//4OF0K1CUGwlIFow==", + "node_modules/pkg-conf/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "dev": true, - "engines": { - "node": ">=4" + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-conf/node_modules/p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/pkg-conf/node_modules/path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/possible-typed-array-names": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", + "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" } }, "node_modules/prelude-ls": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", - "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.8.0" } }, "node_modules/prettier": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.7.1.tgz", - "integrity": "sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==", + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.2.tgz", + "integrity": "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==", "dev": true, + "license": "MIT", + "peer": true, "bin": { - "prettier": "bin-prettier.js" + "prettier": "bin/prettier.cjs" }, "engines": { - "node": ">=10.13.0" + "node": ">=14" }, "funding": { "url": "https://github.com/prettier/prettier?sponsor=1" } }, "node_modules/prettier-plugin-packagejson": { - "version": "2.2.18", - "resolved": "https://registry.npmjs.org/prettier-plugin-packagejson/-/prettier-plugin-packagejson-2.2.18.tgz", - "integrity": "sha512-iBjQ3IY6IayFrQHhXvg+YvKprPUUiIJ04Vr9+EbeQPfwGajznArIqrN33c5bi4JcIvmLHGROIMOm9aYakJj/CA==", + "version": "2.5.19", + "resolved": "https://registry.npmjs.org/prettier-plugin-packagejson/-/prettier-plugin-packagejson-2.5.19.tgz", + "integrity": "sha512-Qsqp4+jsZbKMpEGZB1UP1pxeAT8sCzne2IwnKkr+QhUe665EXUo3BAvTf1kAPCqyMv9kg3ZmO0+7eOni/C6Uag==", "dev": true, + "license": "MIT", "dependencies": { - "sort-package-json": "1.57.0" + "sort-package-json": "3.4.0", + "synckit": "0.11.11" }, "peerDependencies": { "prettier": ">= 1.16.0" + }, + "peerDependenciesMeta": { + "prettier": { + "optional": true + } } }, "node_modules/pretty-bytes": { "version": "5.6.0", - "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.6.0.tgz", - "integrity": "sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" }, @@ -5972,99 +6483,155 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" - }, - "node_modules/progress": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", - "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", - "dev": true, - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", + "version": "15.8.1", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", + "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", "dev": true, + "license": "MIT", "dependencies": { "loose-envify": "^1.4.0", "object-assign": "^4.1.1", - "react-is": "^16.8.1" + "react-is": "^16.13.1" + } + }, + "node_modules/proxy-agent": { + "version": "6.5.0", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "^4.3.4", + "http-proxy-agent": "^7.0.1", + "https-proxy-agent": "^7.0.6", + "lru-cache": "^7.14.1", + "pac-proxy-agent": "^7.1.0", + "proxy-from-env": "^1.1.0", + "socks-proxy-agent": "^8.0.5" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/proxy-agent/node_modules/debug": { + "version": "4.4.1", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/proxy-agent/node_modules/lru-cache": { + "version": "7.18.3", + "license": "ISC", + "engines": { + "node": ">=12" } }, + "node_modules/proxy-agent/node_modules/ms": { + "version": "2.1.3", + "license": "MIT" + }, + "node_modules/proxy-agent/node_modules/proxy-from-env": { + "version": "1.1.0", + "license": "MIT" + }, "node_modules/proxy-from-env": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.0.0.tgz", - "integrity": "sha512-F2JHgJQ1iqwnHDcQjVBsq3n/uoaFL+iPW/eAeL7kVxy/2RrWaN4WroKjjvbsoRtv0ftelNyC01bjRhn/bhcf4A==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/psl": { - "version": "1.1.31", - "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.31.tgz", - "integrity": "sha512-/6pt4+C+T+wZUieKR620OpzN/LlnNKuWjy1iFLQ/UG35JqHlR/89MP1d96dUfkf6Dne3TuLQzOYEYshJ+Hx8mw==", - "dev": true + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.15.0.tgz", + "integrity": "sha512-JZd3gMVBAVQkSs6HdNZo9Sdo0LNcQeMNP3CozBJb3JYC/QUYZTnKxP+f8oWRX4rHP5EurWxqAHTSwUCjlNKa1w==", + "dev": true, + "license": "MIT", + "dependencies": { + "punycode": "^2.3.1" + }, + "funding": { + "url": "https://github.com/sponsors/lupomontero" + } }, "node_modules/pump": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", "dev": true, + "license": "MIT", "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "node_modules/punycode": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "version": "2.3.1", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } }, - "node_modules/q": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/q/-/q-1.5.1.tgz", - "integrity": "sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw==", - "engines": { - "node": ">=0.6.0", - "teleport": ">=0.2.0" - } - }, "node_modules/qs": { - "version": "6.5.3", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", - "integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==", + "version": "6.10.4", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.4.tgz", + "integrity": "sha512-OQiU+C+Ds5qiH91qh/mg0w+8nwQuLjM4F4M/PbmhDOoYehPh+Fb0bDjtR1sOvy7YKxvj28Y/M0PhP5uVX0kB+g==", "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.0.4" + }, "engines": { "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/quick-lru": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", - "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==", - "engines": { - "node": ">=8" - } + "node_modules/querystringify": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", + "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" }, "node_modules/react-is": { - "version": "16.8.6", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.8.6.tgz", - "integrity": "sha512-aUk3bHfZ2bRSVFFbbeVS4i+lNPZr3/WM5jT2J5omUVV1zzcs1nAaf3l51ctA5FFvCRbhrH0bdAsRRQddFJZPtA==", - "dev": true + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", + "dev": true, + "license": "MIT" }, "node_modules/read-installed": { "version": "4.0.3", - "resolved": "https://registry.npmjs.org/read-installed/-/read-installed-4.0.3.tgz", - "integrity": "sha512-O03wg/IYuV/VtnK2h/KXEt9VIbMUFbk3ERG0Iu4FhLZw0EP0T9znqrYDGn6ncbEsXUFaUjiVAWXHzxwt3lhRPQ==", "dev": true, + "license": "ISC", "dependencies": { "debuglog": "^1.0.1", "read-package-json": "^2.0.0", @@ -6079,9 +6646,8 @@ }, "node_modules/read-package-json": { "version": "2.1.2", - "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-2.1.2.tgz", - "integrity": "sha512-D1KmuLQr6ZSJS0tW8hf3WGpRlwszJOXZ3E8Yd/DNRaM5d+1wVRZdHlpGBLAuovjr28LbWvjpWkBHMxpRGGjzNA==", "dev": true, + "license": "ISC", "dependencies": { "glob": "^7.1.1", "json-parse-even-better-errors": "^2.3.0", @@ -6089,108 +6655,9 @@ "npm-normalize-package-bin": "^1.0.0" } }, - "node_modules/read-pkg": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", - "integrity": "sha512-BLq/cCO9two+lBgiTYNqD6GdtK8s4NpaWrl6/rCO9w0TUS8oJl7cmToOZfRYllKTISY6nt1U7jQ53brmKqY6BA==", - "dependencies": { - "load-json-file": "^4.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg-up": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", - "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=", - "dev": true, - "dependencies": { - "find-up": "^2.0.0", - "read-pkg": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg-up/node_modules/load-json-file": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", - "integrity": "sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=", - "dev": true, - "dependencies": { - "graceful-fs": "^4.1.2", - "parse-json": "^2.2.0", - "pify": "^2.0.0", - "strip-bom": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg-up/node_modules/parse-json": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", - "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", - "dev": true, - "dependencies": { - "error-ex": "^1.2.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/read-pkg-up/node_modules/path-type": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", - "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", - "dev": true, - "dependencies": { - "pify": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg-up/node_modules/read-pkg": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", - "integrity": "sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=", - "dev": true, - "dependencies": { - "load-json-file": "^2.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg/node_modules/path-type": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", - "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", - "dependencies": { - "pify": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg/node_modules/pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", - "engines": { - "node": ">=4" - } - }, "node_modules/readable-stream": { "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "license": "MIT", "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", @@ -6202,9 +6669,8 @@ }, "node_modules/readdir-scoped-modules": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/readdir-scoped-modules/-/readdir-scoped-modules-1.1.0.tgz", - "integrity": "sha512-asaikDeqAQg7JifRsZn1NJZXo9E+VwlyCfbkZhwyISinqk5zNS6266HS5kah6P0SaQKGF6SkNnZVHUzHFYxYDw==", "dev": true, + "license": "ISC", "dependencies": { "debuglog": "^1.0.1", "dezalgo": "^1.0.0", @@ -6212,36 +6678,72 @@ "once": "^1.3.0" } }, - "node_modules/redent": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", - "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", + "node_modules/reflect-metadata": { + "version": "0.2.2", + "license": "Apache-2.0", + "peer": true + }, + "node_modules/reflect.getprototypeof": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", + "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==", + "dev": true, + "license": "MIT", "dependencies": { - "indent-string": "^4.0.0", - "strip-indent": "^3.0.0" + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.9", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.7", + "get-proto": "^1.0.1", + "which-builtin-type": "^1.2.1" }, "engines": { - "node": ">=8" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/reflect-metadata": { - "version": "0.1.13", - "resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.1.13.tgz", - "integrity": "sha512-Ts1Y/anZELhSsjMcU605fU9RE4Oi3p5ORujwbIKXfWa+0Zxs510Qrmrce5/Jowq3cHSZSJqBjypxmHarc+vEWg==" + "node_modules/regexp.prototype.flags": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz", + "integrity": "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "set-function-name": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/regexpp": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", - "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", + "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", "dev": true, + "license": "MIT", "engines": { - "node": ">=6.5.0" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" } }, "node_modules/remark-footnotes": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/remark-footnotes/-/remark-footnotes-3.0.0.tgz", - "integrity": "sha512-ZssAvH9FjGYlJ/PBVKdSmfyPc3Cz4rTWgZLI4iE/SX8Nt5l3o3oEjv3wwG5VD7xOjktzdwp5coac+kJV9l4jgg==", + "dev": true, + "license": "MIT", "dependencies": { "mdast-util-footnote": "^0.1.0", "micromark-extension-footnote": "^0.3.0" @@ -6253,8 +6755,8 @@ }, "node_modules/remark-frontmatter": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/remark-frontmatter/-/remark-frontmatter-3.0.0.tgz", - "integrity": "sha512-mSuDd3svCHs+2PyO29h7iijIZx4plX0fheacJcAoYAASfgzgVIcXGYSq9GFyYocFLftQs8IOmmkgtOovs6d4oA==", + "dev": true, + "license": "MIT", "dependencies": { "mdast-util-frontmatter": "^0.2.0", "micromark-extension-frontmatter": "^0.2.0" @@ -6266,8 +6768,8 @@ }, "node_modules/remark-gfm": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-1.0.0.tgz", - "integrity": "sha512-KfexHJCiqvrdBZVbQ6RopMZGwaXz6wFJEfByIuEwGf0arvITHjiKKZ1dpXujjH9KZdm1//XJQwgfnJ3lmXaDPA==", + "dev": true, + "license": "MIT", "dependencies": { "mdast-util-gfm": "^0.1.0", "micromark-extension-gfm": "^0.3.0" @@ -6279,8 +6781,8 @@ }, "node_modules/remark-parse": { "version": "9.0.0", - "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-9.0.0.tgz", - "integrity": "sha512-geKatMwSzEXKHuzBNU1z676sGcDcFoChMK38TgdHJNAYfFtsfHDQG7MoJAjs6sgYMqyLduCYWDIWZIxiPeafEw==", + "dev": true, + "license": "MIT", "dependencies": { "mdast-util-from-markdown": "^0.8.0" }, @@ -6291,104 +6793,55 @@ }, "node_modules/repeat-string": { "version": "1.6.1", - "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", - "integrity": "sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==", - "engines": { - "node": ">=0.10" - } - }, - "node_modules/request": { - "version": "2.88.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", - "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", "dev": true, - "dependencies": { - "aws-sign2": "~0.7.0", - "aws4": "^1.8.0", - "caseless": "~0.12.0", - "combined-stream": "~1.0.6", - "extend": "~3.0.2", - "forever-agent": "~0.6.1", - "form-data": "~2.3.2", - "har-validator": "~5.1.0", - "http-signature": "~1.2.0", - "is-typedarray": "~1.0.0", - "isstream": "~0.1.2", - "json-stringify-safe": "~5.0.1", - "mime-types": "~2.1.19", - "oauth-sign": "~0.9.0", - "performance-now": "^2.1.0", - "qs": "~6.5.2", - "safe-buffer": "^5.1.2", - "tough-cookie": "~2.4.3", - "tunnel-agent": "^0.6.0", - "uuid": "^3.3.2" - }, + "license": "MIT", "engines": { - "node": ">= 4" + "node": ">=0.10" } }, "node_modules/request-progress": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/request-progress/-/request-progress-3.0.0.tgz", - "integrity": "sha1-TKdUCBx/7GP1BeT6qCWqBs1mnb4=", "dev": true, + "license": "MIT", "dependencies": { "throttleit": "^1.0.0" } }, - "node_modules/request/node_modules/uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", - "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", - "dev": true, - "bin": { - "uuid": "bin/uuid" - } - }, "node_modules/require-directory": { "version": "2.1.1", - "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "license": "MIT", "engines": { "node": ">=0.10.0" } }, - "node_modules/require-uncached": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/require-uncached/-/require-uncached-1.0.3.tgz", - "integrity": "sha1-Tg1W1slmL9MeQwEcS5WqSZVUIdM=", + "node_modules/requires-port": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", + "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==", "dev": true, - "dependencies": { - "caller-path": "^0.1.0", - "resolve-from": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } + "license": "MIT" }, "node_modules/resolve": { "version": "1.10.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.10.0.tgz", - "integrity": "sha512-3sUr9aq5OfSg2S9pNtPA9hL1FVEAjvfOC4leW0SNf/mpnaakz2a9femSd6LqAww2RaFctwyf1lCqnTHuF1rxDg==", + "dev": true, + "license": "MIT", "dependencies": { "path-parse": "^1.0.6" } }, "node_modules/resolve-from": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-1.0.1.tgz", - "integrity": "sha1-Jsv+k10a7uq7Kbw/5a6wHpPUQiY=", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", "dev": true, + "license": "MIT", "engines": { - "node": ">=0.10.0" + "node": ">=4" } }, "node_modules/restore-cursor": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", - "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "license": "MIT", "dependencies": { "onetime": "^5.1.0", "signal-exit": "^3.0.2" @@ -6398,63 +6851,102 @@ } }, "node_modules/reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", "dev": true, + "license": "MIT", "engines": { "iojs": ">=1.0.0", "node": ">=0.10.0" } }, "node_modules/rimraf": { - "version": "2.6.3", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", - "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", "dev": true, + "license": "ISC", "dependencies": { "glob": "^7.1.3" }, "bin": { "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, "node_modules/run-async": { "version": "2.4.1", - "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz", - "integrity": "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==", + "license": "MIT", "engines": { "node": ">=0.12.0" } }, "node_modules/run-parallel": { - "version": "1.1.9", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.1.9.tgz", - "integrity": "sha512-DEqnSRTDw/Tc3FXf49zedI638Z9onwUotBMiUFKmrO2sdFKIbXamXGQ3Axd4qgphxKB4kw/qP1w5kTxnfU1B9Q==", - "dev": true - }, - "node_modules/rx": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/rx/-/rx-4.1.0.tgz", - "integrity": "sha1-pfE/957zt0D+MKqAP7CfmIBdR4I=", - "dev": true - }, - "node_modules/rxjs": { - "version": "5.5.12", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-5.5.12.tgz", - "integrity": "sha512-xx2itnL5sBbqeeiVgNPVuQQ1nC8Jp2WfNJhXWHmElW9YmrpS9UVnNzhP3EH3HFqexO5Tlp8GhYY+WEcqcVMvGw==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/rxjs": { + "version": "7.8.2", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.2.tgz", + "integrity": "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==", + "license": "Apache-2.0", + "peer": true, + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/rxjs/node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/safe-array-concat": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.3.tgz", + "integrity": "sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==", "dev": true, + "license": "MIT", "dependencies": { - "symbol-observable": "1.0.1" + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "get-intrinsic": "^1.2.6", + "has-symbols": "^1.1.0", + "isarray": "^2.0.5" }, "engines": { - "npm": ">=2.0.0" + "node": ">=0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/safe-buffer": { "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", "funding": [ { "type": "github", @@ -6468,153 +6960,347 @@ "type": "consulting", "url": "https://feross.org/support" } - ] + ], + "license": "MIT" + }, + "node_modules/safe-push-apply": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz", + "integrity": "sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safe-regex-test": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz", + "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "is-regex": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/safer-buffer": { "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + "license": "MIT" }, "node_modules/semver": { - "version": "5.7.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz", - "integrity": "sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA==", + "version": "5.7.2", + "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver" } }, - "node_modules/shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", "dev": true, + "license": "MIT", "dependencies": { - "shebang-regex": "^1.0.0" + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" } }, - "node_modules/shebang-regex": { + "node_modules/set-function-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-proto": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", + "resolved": "https://registry.npmjs.org/set-proto/-/set-proto-1.0.0.tgz", + "integrity": "sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==", "dev": true, + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0" + }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" } }, - "node_modules/signal-exit": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", - "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=" + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } }, - "node_modules/slash": { + "node_modules/shebang-regex": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "dev": true, + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/slice-ansi": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-3.0.0.tgz", - "integrity": "sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==", + "node_modules/shell-quote": { + "version": "1.8.3", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", "dev": true, + "license": "MIT", "dependencies": { - "ansi-styles": "^4.0.0", - "astral-regex": "^2.0.0", - "is-fullwidth-code-point": "^3.0.0" + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" }, "engines": { - "node": ">=8" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/slice-ansi/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "node_modules/side-channel-list": { + "version": "1.0.0", "dev": true, + "license": "MIT", "dependencies": { - "color-convert": "^2.0.1" + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" }, "engines": { - "node": ">=8" + "node": ">= 0.4" }, "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/slice-ansi/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "node_modules/side-channel-map": { + "version": "1.0.1", "dev": true, + "license": "MIT", "dependencies": { - "color-name": "~1.1.4" + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" }, "engines": { - "node": ">=7.0.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/slice-ansi/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/signal-exit": { + "version": "3.0.2", + "license": "ISC" + }, + "node_modules/slice-ansi": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "engines": { + "node": ">=8" + } }, "node_modules/slide": { "version": "1.1.6", - "resolved": "https://registry.npmjs.org/slide/-/slide-1.1.6.tgz", - "integrity": "sha512-NwrtjCg+lZoqhFU8fOwl4ay2ei8PaqCBOUV3/ektPY9trO1yQ1oXEfmHAhKArUVUr/hOHvy5f6AdP17dCM0zMw==", "dev": true, + "license": "ISC", "engines": { "node": "*" } }, + "node_modules/smart-buffer": { + "version": "4.2.0", + "license": "MIT", + "engines": { + "node": ">= 6.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks": { + "version": "2.8.5", + "license": "MIT", + "dependencies": { + "ip-address": "^9.0.5", + "smart-buffer": "^4.2.0" + }, + "engines": { + "node": ">= 10.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks-proxy-agent": { + "version": "8.0.5", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "^4.3.4", + "socks": "^2.8.3" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/socks-proxy-agent/node_modules/debug": { + "version": "4.4.1", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/socks-proxy-agent/node_modules/ms": { + "version": "2.1.3", + "license": "MIT" + }, "node_modules/sort-object-keys": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/sort-object-keys/-/sort-object-keys-1.1.3.tgz", "integrity": "sha512-855pvK+VkU7PaKYPc+Jjnmt4EzejQHyhhF33q31qG8x7maDzkeFhAAThdCYay11CISO+qAMwjOBP+fPZe0IPyg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/sort-package-json": { - "version": "1.57.0", - "resolved": "https://registry.npmjs.org/sort-package-json/-/sort-package-json-1.57.0.tgz", - "integrity": "sha512-FYsjYn2dHTRb41wqnv+uEqCUvBpK3jZcTp9rbz2qDTmel7Pmdtf+i2rLaaPMRZeSVM60V3Se31GyWFpmKs4Q5Q==", + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/sort-package-json/-/sort-package-json-3.4.0.tgz", + "integrity": "sha512-97oFRRMM2/Js4oEA9LJhjyMlde+2ewpZQf53pgue27UkbEXfHJnDzHlUxQ/DWUkzqmp7DFwJp8D+wi/TYeQhpA==", "dev": true, + "license": "MIT", "dependencies": { - "detect-indent": "^6.0.0", - "detect-newline": "3.1.0", - "git-hooks-list": "1.0.3", - "globby": "10.0.0", - "is-plain-obj": "2.1.0", - "sort-object-keys": "^1.1.3" + "detect-indent": "^7.0.1", + "detect-newline": "^4.0.1", + "git-hooks-list": "^4.0.0", + "is-plain-obj": "^4.1.0", + "semver": "^7.7.1", + "sort-object-keys": "^1.1.3", + "tinyglobby": "^0.2.12" }, "bin": { "sort-package-json": "cli.js" + }, + "engines": { + "node": ">=20" + } + }, + "node_modules/sort-package-json/node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/sort-package-json/node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" } }, "node_modules/source-map": { "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "license": "BSD-3-Clause", + "optional": true, "engines": { "node": ">=0.10.0" } }, - "node_modules/spawn-command": { - "version": "0.0.2-1", - "resolved": "https://registry.npmjs.org/spawn-command/-/spawn-command-0.0.2-1.tgz", - "integrity": "sha1-YvXpRmmBwbeW3Fkpk34RycaSG9A=" - }, "node_modules/spdx-compare": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/spdx-compare/-/spdx-compare-1.0.0.tgz", - "integrity": "sha512-C1mDZOX0hnu0ep9dfmuoi03+eOdDoz2yvK79RxbcrVEG1NO1Ph35yW102DHWKN4pk80nwCgeMmSY5L25VE4D9A==", "dev": true, + "license": "MIT", "dependencies": { "array-find-index": "^1.0.2", "spdx-expression-parse": "^3.0.0", @@ -6623,8 +7309,8 @@ }, "node_modules/spdx-correct": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", - "integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==", + "dev": true, + "license": "Apache-2.0", "dependencies": { "spdx-expression-parse": "^3.0.0", "spdx-license-ids": "^3.0.0" @@ -6632,13 +7318,13 @@ }, "node_modules/spdx-exceptions": { "version": "2.2.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz", - "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==" + "dev": true, + "license": "CC-BY-3.0" }, "node_modules/spdx-expression-parse": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz", - "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==", + "dev": true, + "license": "MIT", "dependencies": { "spdx-exceptions": "^2.1.0", "spdx-license-ids": "^3.0.0" @@ -6646,56 +7332,30 @@ }, "node_modules/spdx-license-ids": { "version": "3.0.4", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.4.tgz", - "integrity": "sha512-7j8LYJLeY/Yb6ACbQ7F76qy5jHkp0U6jgBfJsk97bwWlVUnUWsAgpyaCvo17h0/RQGnQ036tVDomiwoI4pDkQA==" + "dev": true, + "license": "CC0-1.0" }, "node_modules/spdx-ranges": { "version": "2.1.1", - "resolved": "https://registry.npmjs.org/spdx-ranges/-/spdx-ranges-2.1.1.tgz", - "integrity": "sha512-mcdpQFV7UDAgLpXEE/jOMqvK4LBoO0uTQg0uvXUewmEFhpiZx5yJSZITHB8w1ZahKdhfZqP5GPEOKLyEq5p8XA==", - "dev": true + "dev": true, + "license": "(MIT AND CC-BY-3.0)" }, "node_modules/spdx-satisfies": { "version": "4.0.1", - "resolved": "https://registry.npmjs.org/spdx-satisfies/-/spdx-satisfies-4.0.1.tgz", - "integrity": "sha512-WVzZ/cXAzoNmjCWiEluEA3BjHp5tiUmmhn9MK+X0tBbR9sOqtC6UQwmgCNrAIZvNlMuBUYAaHYfb2oqlF9SwKA==", "dev": true, + "license": "MIT", "dependencies": { "spdx-compare": "^1.0.0", "spdx-expression-parse": "^3.0.0", "spdx-ranges": "^2.0.0" } }, - "node_modules/split": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", - "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", - "dependencies": { - "through": "2" - }, - "engines": { - "node": "*" - } - }, - "node_modules/split2": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/split2/-/split2-3.2.2.tgz", - "integrity": "sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==", - "dependencies": { - "readable-stream": "^3.0.0" - } - }, - "node_modules/sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", - "dev": true - }, "node_modules/sshpk": { - "version": "1.16.1", - "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", - "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", + "version": "1.18.0", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.18.0.tgz", + "integrity": "sha512-2p2KJZTSqQ/I3+HX42EpYOa2l3f8Erv8MWKsy2I9uf4wA7yFIkXRffYdsx86y6z4vHtV8u7g+pPlr8/4ouAxsQ==", "dev": true, + "license": "MIT", "dependencies": { "asn1": "~0.2.3", "assert-plus": "^1.0.0", @@ -6707,402 +7367,484 @@ "safer-buffer": "^2.0.2", "tweetnacl": "~0.14.0" }, + "bin": { + "sshpk-conv": "bin/sshpk-conv", + "sshpk-sign": "bin/sshpk-sign", + "sshpk-verify": "bin/sshpk-verify" + }, "engines": { "node": ">=0.10.0" } }, "node_modules/standard": { - "version": "12.0.1", - "resolved": "https://registry.npmjs.org/standard/-/standard-12.0.1.tgz", - "integrity": "sha512-UqdHjh87OG2gUrNCSM4QRLF5n9h3TFPwrCNyVlkqu31Hej0L/rc8hzKqVvkb2W3x0WMq7PzZdkLfEcBhVOR6lg==", + "version": "17.1.2", + "resolved": "https://registry.npmjs.org/standard/-/standard-17.1.2.tgz", + "integrity": "sha512-WLm12WoXveKkvnPnPnaFUUHuOB2cUdAsJ4AiGHL2G0UNMrcRAWY2WriQaV8IQ3oRmYr0AWUbLNr94ekYFAHOrA==", "dev": true, - "dependencies": { - "eslint": "~5.4.0", - "eslint-config-standard": "12.0.0", - "eslint-config-standard-jsx": "6.0.2", - "eslint-plugin-import": "~2.14.0", - "eslint-plugin-node": "~7.0.1", - "eslint-plugin-promise": "~4.0.0", - "eslint-plugin-react": "~7.11.1", - "eslint-plugin-standard": "~4.0.0", - "standard-engine": "~9.0.0" + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "eslint": "^8.41.0", + "eslint-config-standard": "17.1.0", + "eslint-config-standard-jsx": "^11.0.0", + "eslint-plugin-import": "^2.27.5", + "eslint-plugin-n": "^15.7.0", + "eslint-plugin-promise": "^6.1.1", + "eslint-plugin-react": "^7.36.1", + "standard-engine": "^15.1.0", + "version-guard": "^1.1.1" }, "bin": { - "standard": "bin/cmd.js" + "standard": "bin/cmd.cjs" }, "engines": { - "node": ">=4" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, "node_modules/standard-engine": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/standard-engine/-/standard-engine-9.0.0.tgz", - "integrity": "sha512-ZfNfCWZ2Xq67VNvKMPiVMKHnMdvxYzvZkf1AH8/cw2NLDBm5LRsxMqvEJpsjLI/dUosZ3Z1d6JlHDp5rAvvk2w==", + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/standard-engine/-/standard-engine-15.1.0.tgz", + "integrity": "sha512-VHysfoyxFu/ukT+9v49d4BRXIokFRZuH3z1VRxzFArZdjSCFpro6rEIU3ji7e4AoAtuSfKBkiOmsrDqKW5ZSRw==", "dev": true, - "dependencies": { - "deglob": "^2.1.0", - "get-stdin": "^6.0.0", - "minimist": "^1.1.0", - "pkg-conf": "^2.0.0" - } - }, - "node_modules/string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "dependencies": { - "safe-buffer": "~5.2.0" - } - }, - "node_modules/string-width": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", - "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.0" + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "get-stdin": "^8.0.0", + "minimist": "^1.2.6", + "pkg-conf": "^3.1.0", + "xdg-basedir": "^4.0.0" }, "engines": { - "node": ">=8" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, - "node_modules/string-width/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "node_modules/stop-iteration-iterator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz", + "integrity": "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "internal-slot": "^1.1.0" + }, "engines": { - "node": ">=8" + "node": ">= 0.4" } }, - "node_modules/string-width/node_modules/strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "node_modules/string_decoder": { + "version": "1.3.0", + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "license": "MIT", "dependencies": { - "ansi-regex": "^5.0.0" + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" }, "engines": { "node": ">=8" } }, - "node_modules/strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", - "dev": true, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "license": "MIT", "dependencies": { - "ansi-regex": "^2.0.0" + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node_modules/strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", + "node_modules/string.prototype.matchall": { + "version": "4.0.12", + "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.12.tgz", + "integrity": "sha512-6CC9uyBL+/48dYizRf7H7VAYCMCNTBeM78x/VTUe9bFEaxBepPJDa1Ow99LqI/1yF7kuy7Q3cQsYMrcjGUcskA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.6", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.6", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "internal-slot": "^1.1.0", + "regexp.prototype.flags": "^1.5.3", + "set-function-name": "^2.0.2", + "side-channel": "^1.1.0" + }, "engines": { - "node": ">=4" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/strip-final-newline": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "node_modules/string.prototype.repeat": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/string.prototype.repeat/-/string.prototype.repeat-1.0.0.tgz", + "integrity": "sha512-0u/TldDbKD8bFCQ/4f5+mNRrXwZ8hg2w7ZR8wa16e8z9XpePWl3eGEcUD0OXpEH/VJH/2G3gjUtR3ZOiBe2S/w==", "dev": true, - "engines": { - "node": ">=6" + "license": "MIT", + "dependencies": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" } }, - "node_modules/strip-indent": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", - "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", + "node_modules/string.prototype.trim": { + "version": "1.2.10", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz", + "integrity": "sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==", + "dev": true, + "license": "MIT", "dependencies": { - "min-indent": "^1.0.0" + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "define-data-property": "^1.1.4", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-object-atoms": "^1.0.0", + "has-property-descriptors": "^1.0.2" }, "engines": { - "node": ">=8" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", + "node_modules/string.prototype.trimend": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.9.tgz", + "integrity": "sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==", "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "node_modules/string.prototype.trimstart": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", + "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", + "dev": true, + "license": "MIT", "dependencies": { - "has-flag": "^3.0.0" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" }, "engines": { - "node": ">=4" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/symbol-observable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.0.1.tgz", - "integrity": "sha1-g0D8RwLDEi310iKI+IKD9RPT/dQ=", - "dev": true, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node_modules/table": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/table/-/table-4.0.3.tgz", - "integrity": "sha512-S7rnFITmBH1EnyKcvxBh1LjYeQMmnZtCXSEbHcH6S0NoKit24ZuFO/T1vDcLdYsLQkM188PVVhQmzKIuThNkKg==", - "dev": true, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "license": "MIT", "dependencies": { - "ajv": "^6.0.1", - "ajv-keywords": "^3.0.0", - "chalk": "^2.1.0", - "lodash": "^4.17.4", - "slice-ansi": "1.0.0", - "string-width": "^2.1.1" + "ansi-regex": "^5.0.1" }, "engines": { - "node": ">=4.0.0" + "node": ">=8" } }, - "node_modules/table/node_modules/ansi-regex": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.1.tgz", - "integrity": "sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw==", + "node_modules/strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } }, - "node_modules/table/node_modules/is-fullwidth-code-point": { + "node_modules/strip-final-newline": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", "dev": true, + "license": "MIT", "engines": { - "node": ">=4" + "node": ">=6" } }, - "node_modules/table/node_modules/slice-ansi": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-1.0.0.tgz", - "integrity": "sha512-POqxBK6Lb3q6s047D/XsDVNPnF9Dl8JSaqe9h9lURl0OdNqy/ujDrOiIHtsqXMGbWWTIomRzAMaTyawAU//Reg==", + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", "dev": true, - "dependencies": { - "is-fullwidth-code-point": "^2.0.0" - }, + "license": "MIT", "engines": { - "node": ">=4" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/table/node_modules/string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", - "dev": true, + "node_modules/strtok3": { + "version": "10.3.4", + "resolved": "https://registry.npmjs.org/strtok3/-/strtok3-10.3.4.tgz", + "integrity": "sha512-KIy5nylvC5le1OdaaoCJ07L+8iQzJHGH6pWDuzS+d07Cu7n1MZ2x26P8ZKIWfbK02+XIL8Mp4RkWeqdUCrDMfg==", + "license": "MIT", "dependencies": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" + "@tokenizer/token": "^0.3.0" }, "engines": { - "node": ">=4" + "node": ">=18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" } }, - "node_modules/table/node_modules/strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "node_modules/supports-color": { + "version": "5.5.0", "dev": true, + "license": "MIT", "dependencies": { - "ansi-regex": "^3.0.0" + "has-flag": "^3.0.0" }, "engines": { "node": ">=4" } }, - "node_modules/temp-dir": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/temp-dir/-/temp-dir-2.0.0.tgz", - "integrity": "sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg==", + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", "engines": { - "node": ">=8" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/tempfile": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/tempfile/-/tempfile-3.0.0.tgz", - "integrity": "sha512-uNFCg478XovRi85iD42egu+eSFUmmka750Jy7L5tfHI5hQKKtbPnxaSaXAbBqCDYrw3wx4tXjKwci4/QmsZJxw==", + "node_modules/synckit": { + "version": "0.11.11", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.11.11.tgz", + "integrity": "sha512-MeQTA1r0litLUf0Rp/iisCaL8761lKAZHaimlbGK4j0HysC4PLfqygQj9srcs0m2RdtDYnF8UuYyKpbjHYp7Jw==", + "dev": true, + "license": "MIT", "dependencies": { - "temp-dir": "^2.0.0", - "uuid": "^3.3.2" + "@pkgr/core": "^0.2.9" }, "engines": { - "node": ">=8" - } - }, - "node_modules/tempfile/node_modules/uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", - "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", - "bin": { - "uuid": "bin/uuid" - } - }, - "node_modules/text-extensions": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/text-extensions/-/text-extensions-1.9.0.tgz", - "integrity": "sha512-wiBrwC1EhBelW12Zy26JeOUkQ5mRu+5o8rpsJk5+2t+Y5vE7e842qtZDQ2g1NpX/29HdyFeJ4nSIhI47ENSxlQ==", - "engines": { - "node": ">=0.10" + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/synckit" } }, "node_modules/text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", - "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", - "dev": true + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true, + "license": "MIT" }, "node_modules/throttleit": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-1.0.0.tgz", - "integrity": "sha1-nnhYNtr0Z0MUWlmEtiaNgoUorGw=", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/through": { "version": "2.3.8", - "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" + "license": "MIT" }, - "node_modules/through2": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/through2/-/through2-4.0.2.tgz", - "integrity": "sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==", - "dependencies": { - "readable-stream": "3" - } - }, - "node_modules/tmp": { - "version": "0.0.33", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", - "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", "dependencies": { - "os-tmpdir": "~1.0.2" + "fdir": "^6.5.0", + "picomatch": "^4.0.3" }, "engines": { - "node": ">=0.6.0" + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" } }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "node_modules/tmp": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.5.tgz", + "integrity": "sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==", "dev": true, - "dependencies": { - "is-number": "^7.0.0" - }, + "license": "MIT", "engines": { - "node": ">=8.0" + "node": ">=14.14" } }, - "node_modules/topo": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/topo/-/topo-3.0.3.tgz", - "integrity": "sha512-IgpPtvD4kjrJ7CRA3ov2FhWQADwv+Tdqbsf1ZnPUSAtCJ9e1Z44MmoSGDXGk4IppoZA7jd/QRkNddlLJWlUZsQ==", - "dev": true, + "node_modules/token-types": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/token-types/-/token-types-6.1.1.tgz", + "integrity": "sha512-kh9LVIWH5CnL63Ipf0jhlBIy0UsrMj/NJDfpsy1SqOXlLKEVyXXYrnFxFT1yOOYVGBSApeVnjPw/sBz5BfEjAQ==", + "license": "MIT", "dependencies": { - "hoek": "6.x.x" + "@borewit/text-codec": "^0.1.0", + "@tokenizer/token": "^0.3.0", + "ieee754": "^1.2.1" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" } }, - "node_modules/topo/node_modules/hoek": { - "version": "6.1.3", - "resolved": "https://registry.npmjs.org/hoek/-/hoek-6.1.3.tgz", - "integrity": "sha512-YXXAAhmF9zpQbC7LEcREFtXfGq5K1fmd+4PHkBq8NUqmzW3G+Dq10bI/i0KucLRwss3YYFQ0fSfoxBZYiGUqtQ==", - "dev": true - }, "node_modules/tough-cookie": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", - "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.4.tgz", + "integrity": "sha512-Loo5UUvLD9ScZ6jh8beX1T6sO1w2/MpCRpEP7V280GKMVUQ0Jzar2U3UJPsrdbziLEMMhu3Ujnq//rhiFuIeag==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { - "psl": "^1.1.24", - "punycode": "^1.4.1" + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.2.0", + "url-parse": "^1.5.3" }, "engines": { - "node": ">=0.8" + "node": ">=6" } }, - "node_modules/tough-cookie/node_modules/punycode": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=", - "dev": true + "node_modules/tough-cookie/node_modules/universalify": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", + "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4.0.0" + } }, "node_modules/tr46": { "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=" + "license": "MIT" }, "node_modules/traverse": { "version": "0.6.7", - "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.7.tgz", - "integrity": "sha512-/y956gpUo9ZNCb99YjxG7OaslxZWHfCHAUUfshwqOXmxUIvqLjVO581BT+gM59+QV9tFe6/CGG53tsA1Y7RSdg==", + "dev": true, + "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/tree-kill": { "version": "1.2.2", - "resolved": "https://registry.npmjs.org/tree-kill/-/tree-kill-1.2.2.tgz", - "integrity": "sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==", + "license": "MIT", "bin": { "tree-kill": "cli.js" } }, "node_modules/treeify": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/treeify/-/treeify-1.1.0.tgz", - "integrity": "sha512-1m4RA7xVAJrSGrrXGs0L3YTwyvBs2S8PbRHaLZAkFw7JR8oIFwYtysxlBZhYIa7xSyiYJKZ3iGrrk55cGA3i9A==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.6" } }, - "node_modules/trim-newlines": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz", - "integrity": "sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==", - "engines": { - "node": ">=8" - } - }, "node_modules/trough": { "version": "1.0.5", - "resolved": "https://registry.npmjs.org/trough/-/trough-1.0.5.tgz", - "integrity": "sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA==", + "dev": true, + "license": "MIT", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/tsconfig-paths": { + "version": "3.15.0", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz", + "integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/json5": "^0.0.29", + "json5": "^1.0.2", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" + } + }, "node_modules/tslib": { "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + "dev": true, + "license": "0BSD" }, "node_modules/tunnel-agent": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", "dev": true, + "license": "Apache-2.0", "dependencies": { "safe-buffer": "^5.0.1" }, @@ -7113,16 +7855,18 @@ "node_modules/tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", - "dev": true + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==", + "dev": true, + "license": "Unlicense" }, "node_modules/type-check": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", - "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", "dev": true, + "license": "MIT", "dependencies": { - "prelude-ls": "~1.1.2" + "prelude-ls": "^1.2.1" }, "engines": { "node": ">= 0.8.0" @@ -7130,8 +7874,7 @@ }, "node_modules/type-fest": { "version": "0.21.3", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", - "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "license": "(MIT OR CC0-1.0)", "engines": { "node": ">=10" }, @@ -7139,71 +7882,179 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/uglify-js": { - "version": "3.17.4", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.17.4.tgz", - "integrity": "sha512-T9q82TJI9e/C1TAxYvfb16xO120tMVFZrGA3f9/P4424DNu6ypK103y0GPFVa17yotwSyZW5iYXgjYHkGrJW/g==", - "optional": true, - "bin": { - "uglifyjs": "bin/uglifyjs" + "node_modules/typed-array-buffer": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", + "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.14" }, "engines": { - "node": ">=0.8.0" + "node": ">= 0.4" } }, - "node_modules/underscore": { - "version": "1.13.6", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.6.tgz", - "integrity": "sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==" - }, - "node_modules/unified": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/unified/-/unified-9.2.2.tgz", - "integrity": "sha512-Sg7j110mtefBD+qunSLO1lqOEKdrwBFBrR6Qd8f4uwkhWNlbkaqwHse6e7QvD3AP/MNoJdEDLaf8OxYyoWgorQ==", + "node_modules/typed-array-byte-length": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.3.tgz", + "integrity": "sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==", + "dev": true, + "license": "MIT", "dependencies": { - "bail": "^1.0.0", - "extend": "^3.0.0", - "is-buffer": "^2.0.0", - "is-plain-obj": "^2.0.0", - "trough": "^1.0.0", - "vfile": "^4.0.0" + "call-bind": "^1.0.8", + "for-each": "^0.3.3", + "gopd": "^1.2.0", + "has-proto": "^1.2.0", + "is-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/uniq": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/uniq/-/uniq-1.0.1.tgz", - "integrity": "sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8=", - "dev": true - }, - "node_modules/unist-util-is": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz", - "integrity": "sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==", + "node_modules/typed-array-byte-offset": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.4.tgz", + "integrity": "sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "for-each": "^0.3.3", + "gopd": "^1.2.0", + "has-proto": "^1.2.0", + "is-typed-array": "^1.1.15", + "reflect.getprototypeof": "^1.0.9" + }, + "engines": { + "node": ">= 0.4" + }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/unist-util-stringify-position": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-2.0.3.tgz", - "integrity": "sha512-3faScn5I+hy9VleOq/qNbAd6pAx7iH5jYBMS9I1HgQVijz/4mv5Bvw5iw1sC/90CODiKo81G/ps8AJrISn687g==", + "node_modules/typed-array-length": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.7.tgz", + "integrity": "sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==", + "dev": true, + "license": "MIT", "dependencies": { - "@types/unist": "^2.0.2" + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "is-typed-array": "^1.1.13", + "possible-typed-array-names": "^1.0.0", + "reflect.getprototypeof": "^1.0.6" + }, + "engines": { + "node": ">= 0.4" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/uid": { + "version": "2.0.2", + "license": "MIT", + "dependencies": { + "@lukeed/csprng": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/uint8array-extras": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/uint8array-extras/-/uint8array-extras-1.5.0.tgz", + "integrity": "sha512-rvKSBiC5zqCCiDZ9kAOszZcDvdAHwwIKJG33Ykj43OKcWsnmcBRL09YTU4nOeHZ8Y2a7l1MgTd08SBe9A8Qj6A==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/unbox-primitive": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz", + "integrity": "sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "has-bigints": "^1.0.2", + "has-symbols": "^1.1.0", + "which-boxed-primitive": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/underscore": { + "version": "1.13.6", + "dev": true, + "license": "MIT" + }, + "node_modules/undici-types": { + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", + "license": "MIT", + "optional": true + }, + "node_modules/unified": { + "version": "9.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "bail": "^1.0.0", + "extend": "^3.0.0", + "is-buffer": "^2.0.0", + "is-plain-obj": "^2.0.0", + "trough": "^1.0.0", + "vfile": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-is": { + "version": "4.1.0", + "dev": true, + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-stringify-position": { + "version": "2.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.2" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, "node_modules/unist-util-visit-parents": { "version": "3.1.1", - "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz", - "integrity": "sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg==", + "dev": true, + "license": "MIT", "dependencies": { "@types/unist": "^2.0.0", "unist-util-is": "^4.0.0" @@ -7215,58 +8066,66 @@ }, "node_modules/universalify": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", - "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==", + "license": "MIT", "engines": { "node": ">= 10.0.0" } }, "node_modules/untildify": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", - "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/update-section": { "version": "0.3.3", - "resolved": "https://registry.npmjs.org/update-section/-/update-section-0.3.3.tgz", - "integrity": "sha512-BpRZMZpgXLuTiKeiu7kK0nIPwGdyrqrs6EDSaXtjD/aQ2T+qVo9a5hRC3HN3iJjCMxNT/VxoLGQ7E/OzE5ucnw==" + "dev": true, + "license": "MIT" }, "node_modules/uri-js": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", - "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "punycode": "^2.1.0" } }, + "node_modules/url-parse": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" + } + }, "node_modules/util-deprecate": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + "license": "MIT" }, "node_modules/util-extend": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/util-extend/-/util-extend-1.0.3.tgz", - "integrity": "sha512-mLs5zAK+ctllYBj+iAQvlDCwoxU/WDOUaJkcFudeiAX6OajC6BKXJUa9a+tbtkC11dz2Ufb7h0lyvIOVn4LADA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/uuid": { "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "dev": true, + "license": "MIT", "bin": { "uuid": "dist/bin/uuid" } }, "node_modules/validate-npm-package-license": { "version": "3.0.4", - "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", - "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "license": "Apache-2.0", "dependencies": { "spdx-correct": "^3.0.0", "spdx-expression-parse": "^3.0.0" @@ -7275,21 +8134,32 @@ "node_modules/verror": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", - "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==", "dev": true, "engines": [ "node >=0.6.0" ], + "license": "MIT", "dependencies": { "assert-plus": "^1.0.0", "core-util-is": "1.0.2", "extsprintf": "^1.2.0" } }, + "node_modules/version-guard": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/version-guard/-/version-guard-1.1.3.tgz", + "integrity": "sha512-JwPr6erhX53EWH/HCSzfy1tTFrtPXUe927wdM1jqBBeYp1OM+qPHjWbsvv6pIBduqdgxxS+ScfG7S28pzyr2DQ==", + "dev": true, + "license": "0BSD", + "engines": { + "node": ">=0.10.48" + } + }, "node_modules/vfile": { "version": "4.2.1", - "resolved": "https://registry.npmjs.org/vfile/-/vfile-4.2.1.tgz", - "integrity": "sha512-O6AE4OskCG5S1emQ/4gl8zK586RqA3srz3nfK/Viy0UPToBc5Trp9BVFb1u0CjsKrAWwnpr4ifM/KBXPWwJbCA==", + "dev": true, + "license": "MIT", "dependencies": { "@types/unist": "^2.0.0", "is-buffer": "^2.0.0", @@ -7303,8 +8173,8 @@ }, "node_modules/vfile-message": { "version": "2.0.4", - "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-2.0.4.tgz", - "integrity": "sha512-DjssxRGkMvifUOJre00juHoP9DPWuzjxKuMDrhNbk2TdaYYBNMStsNhEOt3idrtI12VQYM/1+iM0KOzXi4pxwQ==", + "dev": true, + "license": "MIT", "dependencies": { "@types/unist": "^2.0.0", "unist-util-stringify-position": "^2.0.0" @@ -7315,5890 +8185,241 @@ } }, "node_modules/wait-on": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/wait-on/-/wait-on-3.2.0.tgz", - "integrity": "sha512-QUGNKlKLDyY6W/qHdxaRlXUAgLPe+3mLL/tRByHpRNcHs/c7dZXbu+OnJWGNux6tU1WFh/Z8aEwvbuzSAu79Zg==", + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/wait-on/-/wait-on-9.0.1.tgz", + "integrity": "sha512-noeCAI+XbqWMXY23sKril0BSURhuLYarkVXwJv1uUWwoojZJE7pmX3vJ7kh7SZaNgPGzfsCSQIZM/AGvu0Q9pA==", "dev": true, + "license": "MIT", "dependencies": { - "core-js": "^2.5.7", - "joi": "^13.0.0", - "minimist": "^1.2.0", - "request": "^2.88.0", - "rx": "^4.1.0" + "axios": "^1.12.2", + "joi": "^18.0.1", + "lodash": "^4.17.21", + "minimist": "^1.2.8", + "rxjs": "^7.8.2" }, "bin": { "wait-on": "bin/wait-on" }, "engines": { - "node": ">=4.0.0" + "node": ">=20.0.0" } }, "node_modules/wcwidth": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/wcwidth/-/wcwidth-1.0.1.tgz", - "integrity": "sha1-8LDc+RW8X/FSivrbLA4XtTLaL+g=", + "license": "MIT", "dependencies": { "defaults": "^1.0.3" } }, "node_modules/webidl-conversions": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=" + "license": "BSD-2-Clause" }, "node_modules/whatwg-url": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", + "license": "MIT", "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", "dependencies": { "isexe": "^2.0.0" }, "bin": { - "which": "bin/which" + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" } }, - "node_modules/wordwrap": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", - "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=" - }, - "node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "node_modules/which-boxed-primitive": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz", + "integrity": "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==", + "dev": true, + "license": "MIT", "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" + "is-bigint": "^1.1.0", + "is-boolean-object": "^1.2.1", + "is-number-object": "^1.1.1", + "is-string": "^1.1.1", + "is-symbol": "^1.1.1" }, "engines": { - "node": ">=10" + "node": ">= 0.4" }, "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/wrap-ansi/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dependencies": { - "color-convert": "^2.0.1" + "node_modules/which-builtin-type": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.2.1.tgz", + "integrity": "sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "function.prototype.name": "^1.1.6", + "has-tostringtag": "^1.0.2", + "is-async-function": "^2.0.0", + "is-date-object": "^1.1.0", + "is-finalizationregistry": "^1.1.0", + "is-generator-function": "^1.0.10", + "is-regex": "^1.2.1", + "is-weakref": "^1.0.2", + "isarray": "^2.0.5", + "which-boxed-primitive": "^1.1.0", + "which-collection": "^1.0.2", + "which-typed-array": "^1.1.16" }, "engines": { - "node": ">=8" + "node": ">= 0.4" }, "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/wrap-ansi/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "node_modules/which-collection": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", + "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", + "dev": true, + "license": "MIT", "dependencies": { - "color-name": "~1.1.4" + "is-map": "^2.0.3", + "is-set": "^2.0.3", + "is-weakmap": "^2.0.2", + "is-weakset": "^2.0.3" }, "engines": { - "node": ">=7.0.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/wrap-ansi/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, - "node_modules/wrap-ansi/node_modules/strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "node_modules/which-typed-array": { + "version": "1.1.19", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", + "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", + "dev": true, + "license": "MIT", "dependencies": { - "ansi-regex": "^5.0.0" + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "for-each": "^0.3.5", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2" }, "engines": { - "node": ">=8" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" - }, - "node_modules/write": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/write/-/write-0.2.1.tgz", - "integrity": "sha1-X8A4KOJkzqP+kUVUdvejxWbLB1c=", + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", "dev": true, - "dependencies": { - "mkdirp": "^0.5.1" - }, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, - "node_modules/xtend": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", - "integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68=", + "node_modules/wrap-ansi": { + "version": "7.0.0", + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, "engines": { - "node": ">=0.4" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, - "node_modules/y18n": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", - "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, "engines": { "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + "node_modules/wrappy": { + "version": "1.0.2", + "dev": true, + "license": "ISC" }, - "node_modules/yargs": { - "version": "16.2.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", - "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", - "dependencies": { - "cliui": "^7.0.2", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.0", - "y18n": "^5.0.5", - "yargs-parser": "^20.2.2" - }, + "node_modules/xdg-basedir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz", + "integrity": "sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==", + "dev": true, + "license": "MIT", "engines": { - "node": ">=10" + "node": ">=8" } }, - "node_modules/yargs-parser": { - "version": "20.2.9", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", - "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "node_modules/y18n": { + "version": "5.0.8", + "license": "ISC", "engines": { "node": ">=10" } }, "node_modules/yauzl": { "version": "2.10.0", - "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", - "integrity": "sha1-x+sXyT4RLLEIb6bY5R+wZnt5pfk=", "dev": true, + "license": "MIT", "dependencies": { "buffer-crc32": "~0.2.3", "fd-slicer": "~1.1.0" } }, - "node_modules/zwitch": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-1.0.5.tgz", - "integrity": "sha512-V50KMwwzqJV0NpZIZFwfOD5/lyny3WlSzRiXgA0G7VUnRlqttta1L6UQIHzd6EuBY/cHGfwTIck7w1yH6Q5zUw==", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - } - }, - "dependencies": { - "@babel/code-frame": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz", - "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==", - "requires": { - "@babel/highlight": "^7.18.6" - } - }, - "@babel/helper-validator-identifier": { - "version": "7.19.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz", - "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==" - }, - "@babel/highlight": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", - "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", - "requires": { - "@babel/helper-validator-identifier": "^7.18.6", - "chalk": "^2.0.0", - "js-tokens": "^4.0.0" - }, - "dependencies": { - "js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" - } - } - }, - "@colors/colors": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", - "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==", - "dev": true, - "optional": true - }, - "@cypress/request": { - "version": "2.88.10", - "resolved": "https://registry.npmjs.org/@cypress/request/-/request-2.88.10.tgz", - "integrity": "sha512-Zp7F+R93N0yZyG34GutyTNr+okam7s/Fzc1+i3kcqOP8vk6OuajuE9qZJ6Rs+10/1JFtXFYMdyarnU1rZuJesg==", + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", "dev": true, - "requires": { - "aws-sign2": "~0.7.0", - "aws4": "^1.8.0", - "caseless": "~0.12.0", - "combined-stream": "~1.0.6", - "extend": "~3.0.2", - "forever-agent": "~0.6.1", - "form-data": "~2.3.2", - "http-signature": "~1.3.6", - "is-typedarray": "~1.0.0", - "isstream": "~0.1.2", - "json-stringify-safe": "~5.0.1", - "mime-types": "~2.1.19", - "performance-now": "^2.1.0", - "qs": "~6.5.2", - "safe-buffer": "^5.1.2", - "tough-cookie": "~2.5.0", - "tunnel-agent": "^0.6.0", - "uuid": "^8.3.2" + "license": "MIT", + "engines": { + "node": ">=10" }, - "dependencies": { - "http-signature": { - "version": "1.3.6", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.3.6.tgz", - "integrity": "sha512-3adrsD6zqo4GsTqtO7FyrejHNv+NgiIfAfv68+jVlFmSr9OGy7zrxONceFRLKvnnZA5jbxQBX1u9PpB6Wi32Gw==", - "dev": true, - "requires": { - "assert-plus": "^1.0.0", - "jsprim": "^2.0.2", - "sshpk": "^1.14.1" - } - }, - "jsprim": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-2.0.2.tgz", - "integrity": "sha512-gqXddjPqQ6G40VdnI6T6yObEC+pDNvyP95wdQhkWkg7crHH3km5qP1FsOXEkzEQwnz6gz5qGTn1c2Y52wP3OyQ==", - "dev": true, - "requires": { - "assert-plus": "1.0.0", - "extsprintf": "1.3.0", - "json-schema": "0.4.0", - "verror": "1.10.0" - } - }, - "tough-cookie": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", - "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", - "dev": true, - "requires": { - "psl": "^1.1.28", - "punycode": "^2.1.1" - } - } - } - }, - "@cypress/xvfb": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@cypress/xvfb/-/xvfb-1.2.4.tgz", - "integrity": "sha512-skbBzPggOVYCbnGgV+0dmBdW/s77ZkAOXIC1knS8NagwDjBrNC1LuXtQJeiN6l+m7lzmHtaoUw/ctJKdqkG57Q==", - "dev": true, - "requires": { - "debug": "^3.1.0", - "lodash.once": "^4.1.1" + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "@hutson/parse-repository-url": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@hutson/parse-repository-url/-/parse-repository-url-3.0.2.tgz", - "integrity": "sha512-H9XAx3hc0BQHY6l+IFSWHDySypcXsvsuLhgYLUGywmJ5pswRVQJUHpOsobnLYp2ZUaUlKiKDrgWWhosOwAEM8Q==" - }, - "@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "node_modules/zwitch": { + "version": "1.0.5", "dev": true, - "requires": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } - }, - "@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "dev": true - }, - "@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dev": true, - "requires": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - } - }, - "@nuxtjs/opencollective": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/@nuxtjs/opencollective/-/opencollective-0.3.2.tgz", - "integrity": "sha512-um0xL3fO7Mf4fDxcqx9KryrB7zgRM5JSlvGN5AGkP6JLM5XEKyjeAiPbNxdXVXQ16isuAhYpvP88NgL2BGd6aA==", - "requires": { - "chalk": "^4.1.0", - "consola": "^2.15.0", - "node-fetch": "^2.6.1" - }, - "dependencies": { - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "requires": { - "color-convert": "^2.0.1" - } - }, - "chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "requires": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" - }, - "supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "requires": { - "has-flag": "^4.0.0" - } - } - } - }, - "@openapitools/openapi-generator-cli": { - "version": "2.4.26", - "resolved": "https://registry.npmjs.org/@openapitools/openapi-generator-cli/-/openapi-generator-cli-2.4.26.tgz", - "integrity": "sha512-O42H9q1HWGoIpcpMaUu318b6bmOgcjP3MieHwOrFdoG3KyttceBGlbLf9Kbf7WM91WSNCDXum7cnEKASuoGjAg==", - "requires": { - "@nestjs/common": "8.2.6", - "@nestjs/core": "8.2.6", - "@nuxtjs/opencollective": "0.3.2", - "chalk": "4.1.2", - "commander": "8.3.0", - "compare-versions": "3.6.0", - "concurrently": "6.5.1", - "console.table": "0.10.0", - "fs-extra": "10.0.0", - "glob": "7.1.6", - "inquirer": "8.2.0", - "lodash": "4.17.21", - "reflect-metadata": "0.1.13", - "rxjs": "7.5.2", - "tslib": "2.0.3" - }, - "dependencies": { - "@nestjs/common": { - "version": "8.2.6", - "resolved": "https://registry.npmjs.org/@nestjs/common/-/common-8.2.6.tgz", - "integrity": "sha512-flLYSXunxcKyjbYddrhwbc49uE705MxBt85rS3mHyhDbAIPSGGeZEqME44YyAzCg1NTfJSNe7ztmOce5kNkb9A==", - "requires": { - "axios": "0.24.0", - "iterare": "1.2.1", - "tslib": "2.3.1", - "uuid": "8.3.2" - }, - "dependencies": { - "tslib": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz", - "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" - } - } - }, - "@nestjs/core": { - "version": "8.2.6", - "resolved": "https://registry.npmjs.org/@nestjs/core/-/core-8.2.6.tgz", - "integrity": "sha512-NwPcEIMmCsucs3QaDlQvkoU1FlFM2wm/WjaqLQhkSoIEmAR1gNtBo88f5io5cpMwCo1k5xYhqGlaSl6TfngwWQ==", - "requires": { - "@nuxtjs/opencollective": "0.3.2", - "fast-safe-stringify": "2.1.1", - "iterare": "1.2.1", - "object-hash": "2.2.0", - "path-to-regexp": "3.2.0", - "tslib": "2.3.1", - "uuid": "8.3.2" - }, - "dependencies": { - "tslib": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz", - "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" - } - } - }, - "ansi-escapes": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", - "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", - "requires": { - "type-fest": "^0.21.3" - } - }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "requires": { - "color-convert": "^2.0.1" - } - }, - "chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "requires": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - } - }, - "chardet": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", - "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==" - }, - "cli-width": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz", - "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==" - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, - "commander": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", - "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==" - }, - "external-editor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", - "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", - "requires": { - "chardet": "^0.7.0", - "iconv-lite": "^0.4.24", - "tmp": "^0.0.33" - } - }, - "fs-extra": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.0.0.tgz", - "integrity": "sha512-C5owb14u9eJwizKGdchcDUQeFtlSHHthBk8pbX9Vc1PFZrLombudjDnNns88aYslCyF6IY5SUw3Roz6xShcEIQ==", - "requires": { - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - } - }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" - }, - "inquirer": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-8.2.0.tgz", - "integrity": "sha512-0crLweprevJ02tTuA6ThpoAERAGyVILC4sS74uib58Xf/zSr1/ZWtmm7D5CI+bSQEaA04f0K7idaHpQbSWgiVQ==", - "requires": { - "ansi-escapes": "^4.2.1", - "chalk": "^4.1.1", - "cli-cursor": "^3.1.0", - "cli-width": "^3.0.0", - "external-editor": "^3.0.3", - "figures": "^3.0.0", - "lodash": "^4.17.21", - "mute-stream": "0.0.8", - "ora": "^5.4.1", - "run-async": "^2.4.0", - "rxjs": "^7.2.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0", - "through": "^2.3.6" - } - }, - "mute-stream": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", - "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==" - }, - "rxjs": { - "version": "7.5.2", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.5.2.tgz", - "integrity": "sha512-PwDt186XaL3QN5qXj/H9DGyHhP3/RYYgZZwqBv9Tv8rsAaiwFH1IsJJlcgD37J7UW5a6O67qX0KWKS3/pu0m4w==", - "requires": { - "tslib": "^2.1.0" - }, - "dependencies": { - "tslib": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz", - "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" - } - } - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - } - }, - "supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "requires": { - "has-flag": "^4.0.0" - } - }, - "tslib": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.0.3.tgz", - "integrity": "sha512-uZtkfKblCEQtZKBF6EBXVZeQNl82yqtDQdv+eck8u7tdPxjLu2/lp5/uPW+um2tpuxINHWy3GhiccY7QgEaVHQ==" - } - } - }, - "@textlint/ast-node-types": { - "version": "12.2.2", - "resolved": "https://registry.npmjs.org/@textlint/ast-node-types/-/ast-node-types-12.2.2.tgz", - "integrity": "sha512-VQAXUSGdmEajHXrMxeM9ZTS8UBJSVB0ghJFHpFfqYKlcDsjIqClSmTprY6521HoCoSLoUIGBxTC3jQyUMJFIWw==" - }, - "@textlint/markdown-to-ast": { - "version": "12.2.2", - "resolved": "https://registry.npmjs.org/@textlint/markdown-to-ast/-/markdown-to-ast-12.2.2.tgz", - "integrity": "sha512-OP0cnGCzt8Bbfhn8fO/arQSHBhmuXB4maSXH8REJAtKRpTADWOrbuxAOaI9mjQ7EMTDiml02RZ9MaELQAWAsqQ==", - "requires": { - "@textlint/ast-node-types": "^12.2.2", - "debug": "^4.3.4", - "mdast-util-gfm-autolink-literal": "^0.1.0", - "remark-footnotes": "^3.0.0", - "remark-frontmatter": "^3.0.0", - "remark-gfm": "^1.0.0", - "remark-parse": "^9.0.0", - "traverse": "^0.6.6", - "unified": "^9.2.2" - }, - "dependencies": { - "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "requires": { - "ms": "2.1.2" - } - } - } - }, - "@types/glob": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@types/glob/-/glob-7.2.0.tgz", - "integrity": "sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA==", - "dev": true, - "requires": { - "@types/minimatch": "*", - "@types/node": "*" - } - }, - "@types/mdast": { - "version": "3.0.10", - "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.10.tgz", - "integrity": "sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA==", - "requires": { - "@types/unist": "*" - } - }, - "@types/minimatch": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-5.1.2.tgz", - "integrity": "sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==", - "dev": true - }, - "@types/minimist": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.2.tgz", - "integrity": "sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==" - }, - "@types/node": { - "version": "14.17.5", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.17.5.tgz", - "integrity": "sha512-bjqH2cX/O33jXT/UmReo2pM7DIJREPMnarixbQ57DOOzzFaI6D2+IcwaJQaJpv0M1E9TIhPCYVxrkcityLjlqA==", - "dev": true - }, - "@types/normalize-package-data": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz", - "integrity": "sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==" - }, - "@types/sinonjs__fake-timers": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.1.tgz", - "integrity": "sha512-0kSuKjAS0TrGLJ0M/+8MaFkGsQhZpB6pxOmvS3K8FYI72K//YmdfoW9X2qPsAKh1mkwxGD5zib9s1FIFed6E8g==", - "dev": true - }, - "@types/sizzle": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/@types/sizzle/-/sizzle-2.3.2.tgz", - "integrity": "sha512-7EJYyKTL7tFR8+gDbB6Wwz/arpGa0Mywk1TJbNzKzHtzbwVmY4HR9WqS5VV7dsBUKQmPNr192jHr/VpBluj/hg==", - "dev": true - }, - "@types/unist": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.6.tgz", - "integrity": "sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==" - }, - "@types/yauzl": { - "version": "2.9.2", - "resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.9.2.tgz", - "integrity": "sha512-8uALY5LTvSuHgloDVUvWP3pIauILm+8/0pDMokuDYIoNsOkSwd5AiHBTSEJjKTDcZr5z8UpgOWZkxBF4iJftoA==", - "dev": true, - "optional": true, - "requires": { - "@types/node": "*" - } - }, - "abbrev": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", - "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", - "dev": true - }, - "acorn": { - "version": "6.4.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.1.tgz", - "integrity": "sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA==", - "dev": true - }, - "acorn-jsx": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.0.1.tgz", - "integrity": "sha512-HJ7CfNHrfJLlNTzIEUTj43LNWGkqpRLxm3YjAlcD0ACydk9XynzYsCBHxut+iqt+1aBXkx9UP/w/ZqMr13XIzg==", - "dev": true - }, - "add-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/add-stream/-/add-stream-1.0.0.tgz", - "integrity": "sha512-qQLMr+8o0WC4FZGQTcJiKBVC59JylcPSrTtk6usvmIDFUOCKegapy1VHQwRbFMOFyb/inzUVqHs+eMYKDM1YeQ==" - }, - "aggregate-error": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", - "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", - "dev": true, - "requires": { - "clean-stack": "^2.0.0", - "indent-string": "^4.0.0" - } - }, - "ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, - "requires": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - } - }, - "ajv-keywords": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.4.0.tgz", - "integrity": "sha512-aUjdRFISbuFOl0EIZc+9e4FfZp0bDZgAdOOf30bJmw8VM9v84SHyVyxDfbWxpGYbdZD/9XoKxfHVNmxPkhwyGw==", - "dev": true - }, - "anchor-markdown-header": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/anchor-markdown-header/-/anchor-markdown-header-0.6.0.tgz", - "integrity": "sha512-v7HJMtE1X7wTpNFseRhxsY/pivP4uAJbidVhPT+yhz4i/vV1+qx371IXuV9V7bN6KjFtheLJxqaSm0Y/8neJTA==", - "requires": { - "emoji-regex": "~10.1.0" - }, - "dependencies": { - "emoji-regex": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.1.0.tgz", - "integrity": "sha512-xAEnNCT3w2Tg6MA7ly6QqYJvEoY1tm9iIjJ3yMKK9JPlWuRHAMoe5iETwQnx3M9TVbFMfsrBgWKR+IsmswwNjg==" - } - } - }, - "ansi-colors": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", - "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", - "dev": true - }, - "ansi-escapes": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", - "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", - "dev": true - }, - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true - }, - "ansi-styles": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", - "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=", - "dev": true - }, - "arch": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/arch/-/arch-2.2.0.tgz", - "integrity": "sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==", - "dev": true - }, - "argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "dev": true, - "requires": { - "sprintf-js": "~1.0.2" - } - }, - "array-find-index": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz", - "integrity": "sha512-M1HQyIXcBGtVywBt8WVdim+lrNaK7VHp99Qt5pSNziXznKHViIBbXWtfRTpEFpF/c4FdfxNAsCCwPp5phBYJtw==", - "dev": true - }, - "array-ify": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-ify/-/array-ify-1.0.0.tgz", - "integrity": "sha512-c5AMf34bKdvPhQ7tBGhqkgKNUzMr4WUs+WDtC2ZUGOUncbxKMTvqxYctiseW3+L4bA8ec+GcZ6/A/FW4m8ukng==" - }, - "array-includes": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.0.3.tgz", - "integrity": "sha1-GEtI9i2S10UrsxsyMWXH+L0CJm0=", - "dev": true, - "requires": { - "define-properties": "^1.1.2", - "es-abstract": "^1.7.0" - } - }, - "array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", - "dev": true - }, - "arrify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==" - }, - "asap": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", - "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==", - "dev": true - }, - "asn1": { - "version": "0.2.4", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", - "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", - "dev": true, - "requires": { - "safer-buffer": "~2.1.0" - } - }, - "assert-plus": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", - "dev": true - }, - "astral-regex": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", - "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", - "dev": true - }, - "async": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.3.tgz", - "integrity": "sha512-spZRyzKL5l5BZQrr/6m/SqFdBN0q3OCI0f9rjfBzCMBIP4p75P620rR3gTmaksNOhmzgdxcaxdNfMy6anrbM0g==", - "dev": true - }, - "asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", - "dev": true - }, - "at-least-node": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", - "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==", - "dev": true - }, - "aws-sign2": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", - "dev": true - }, - "aws4": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz", - "integrity": "sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ==", - "dev": true - }, - "axios": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.24.0.tgz", - "integrity": "sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA==", - "requires": { - "follow-redirects": "^1.14.4" - } - }, - "babel-code-frame": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-code-frame/-/babel-code-frame-6.26.0.tgz", - "integrity": "sha1-Y/1D99weO7fONZR9uP42mj9Yx0s=", - "dev": true, - "requires": { - "chalk": "^1.1.3", - "esutils": "^2.0.2", - "js-tokens": "^3.0.2" - }, - "dependencies": { - "chalk": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", - "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", - "dev": true, - "requires": { - "ansi-styles": "^2.2.1", - "escape-string-regexp": "^1.0.2", - "has-ansi": "^2.0.0", - "strip-ansi": "^3.0.0", - "supports-color": "^2.0.0" - } - }, - "supports-color": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", - "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=", - "dev": true - } - } - }, - "bail": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/bail/-/bail-1.0.5.tgz", - "integrity": "sha512-xFbRxM1tahm08yHBP16MMjVUAvDaBMD38zsM9EMAUN61omwLmKlOpB/Zku5QkjZ8TZ4vn53pj+t518cH0S03RQ==" - }, - "balanced-match": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" - }, - "base64-js": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" - }, - "bcrypt-pbkdf": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", - "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", - "dev": true, - "requires": { - "tweetnacl": "^0.14.3" - } - }, - "bl": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", - "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", - "requires": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, - "blob-util": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/blob-util/-/blob-util-2.0.2.tgz", - "integrity": "sha512-T7JQa+zsXXEa6/8ZhHcQEW1UFfVM49Ts65uBkFL6fz2QmrElqmbajIDJvuA0tEhRe5eIjpV9ZF+0RfZR9voJFQ==", - "dev": true - }, - "bluebird": { - "version": "3.7.2", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", - "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", - "dev": true - }, - "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "requires": { - "fill-range": "^7.0.1" - } - }, - "buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, - "buffer-crc32": { - "version": "0.2.13", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", - "integrity": "sha1-DTM+PwDqxQqhRUq9MO+MKl2ackI=", - "dev": true - }, - "buffer-equal-constant-time": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", - "integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=", - "dev": true - }, - "cachedir": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/cachedir/-/cachedir-2.3.0.tgz", - "integrity": "sha512-A+Fezp4zxnit6FanDmv9EqXNAi3vt9DWp51/71UEhXukb7QUuvtv9344h91dyAxuTLoSYJFU299qzR3tzwPAhw==", - "dev": true - }, - "caller-path": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/caller-path/-/caller-path-0.1.0.tgz", - "integrity": "sha1-lAhe9jWB7NPaqSREqP6U6CV3dR8=", - "dev": true, - "requires": { - "callsites": "^0.2.0" - } - }, - "callsites": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-0.2.0.tgz", - "integrity": "sha1-r6uWJikQp/M8GaV3WCXGnzTjUMo=", - "dev": true - }, - "camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==" - }, - "camelcase-keys": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz", - "integrity": "sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==", - "requires": { - "camelcase": "^5.3.1", - "map-obj": "^4.0.0", - "quick-lru": "^4.0.1" - } - }, - "caseless": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", - "dev": true - }, - "ccount": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/ccount/-/ccount-1.1.0.tgz", - "integrity": "sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg==" - }, - "chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "dependencies": { - "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "requires": { - "color-convert": "^1.9.0" - } - } - } - }, - "character-entities": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-1.2.4.tgz", - "integrity": "sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw==" - }, - "character-entities-legacy": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz", - "integrity": "sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA==" - }, - "character-reference-invalid": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz", - "integrity": "sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg==" - }, - "chardet": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.4.2.tgz", - "integrity": "sha1-tUc7M9yXxCTl2Y3IfVXU2KKci/I=", - "dev": true - }, - "check-more-types": { - "version": "2.24.0", - "resolved": "https://registry.npmjs.org/check-more-types/-/check-more-types-2.24.0.tgz", - "integrity": "sha1-FCD/sQ/URNz8ebQ4kbv//TKoRgA=", - "dev": true - }, - "ci-info": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.2.0.tgz", - "integrity": "sha512-dVqRX7fLUm8J6FgHJ418XuIgDLZDkYcDFTeL6TA2gt5WlIZUQrrH6EZrNClwT/H0FateUsZkGIOPRrLbP+PR9A==", - "dev": true - }, - "circular-json": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/circular-json/-/circular-json-0.3.3.tgz", - "integrity": "sha512-UZK3NBx2Mca+b5LsG7bY183pHWt5Y1xts4P3Pz7ENTwGVnJOUWbRb3ocjvX7hx9tq/yTAdclXm9sZ38gNuem4A==", - "dev": true - }, - "clean-stack": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", - "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", - "dev": true - }, - "cli-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", - "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", - "requires": { - "restore-cursor": "^3.1.0" - } - }, - "cli-spinners": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.6.1.tgz", - "integrity": "sha512-x/5fWmGMnbKQAaNwN+UZlV79qBLM9JFnJuJ03gIi5whrob0xV0ofNVHy9DhwGdsMJQc2OKv0oGmLzvaqvAVv+g==" - }, - "cli-table3": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.2.tgz", - "integrity": "sha512-QyavHCaIC80cMivimWu4aWHilIpiDpfm3hGmqAmXVL1UsnbLuBSMd21hTX6VY4ZSDSM73ESLeF8TOYId3rBTbw==", - "dev": true, - "requires": { - "@colors/colors": "1.5.0", - "string-width": "^4.2.0" - } - }, - "cli-truncate": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-2.1.0.tgz", - "integrity": "sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==", - "dev": true, - "requires": { - "slice-ansi": "^3.0.0", - "string-width": "^4.2.0" - } - }, - "cli-width": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.0.tgz", - "integrity": "sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk=", - "dev": true - }, - "cliui": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", - "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", - "requires": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.0", - "wrap-ansi": "^7.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - } - } - } - }, - "clone": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz", - "integrity": "sha1-2jCcwmPfFZlMaIypAheco8fNfH4=" - }, - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "requires": { - "color-name": "1.1.3" - } - }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" - }, - "colorette": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/colorette/-/colorette-1.2.2.tgz", - "integrity": "sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w==", - "dev": true - }, - "combined-stream": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.7.tgz", - "integrity": "sha512-brWl9y6vOB1xYPZcpZde3N9zDByXTosAeMDo4p1wzo6UMOX4vumB+TP1RZ76sfE6Md68Q0NJSrE/gbezd4Ul+w==", - "dev": true, - "requires": { - "delayed-stream": "~1.0.0" - } - }, - "commander": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz", - "integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==", - "dev": true - }, - "common-tags": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.0.tgz", - "integrity": "sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw==", - "dev": true - }, - "compare-func": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/compare-func/-/compare-func-2.0.0.tgz", - "integrity": "sha512-zHig5N+tPWARooBnb0Zx1MFcdfpyJrfTJ3Y5L+IFvUm8rM74hHz66z0gw0x4tijh5CorKkKUCnW82R2vmpeCRA==", - "requires": { - "array-ify": "^1.0.0", - "dot-prop": "^5.1.0" - } - }, - "compare-versions": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/compare-versions/-/compare-versions-3.6.0.tgz", - "integrity": "sha512-W6Af2Iw1z4CB7q4uU4hv646dW9GQuBM+YpC0UvUCWSD8w90SJjp+ujJuXaEMtAXBtSqGfMPuFOVn4/+FlaqfBA==" - }, - "concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" - }, - "concurrently": { - "version": "6.5.1", - "resolved": "https://registry.npmjs.org/concurrently/-/concurrently-6.5.1.tgz", - "integrity": "sha512-FlSwNpGjWQfRwPLXvJ/OgysbBxPkWpiVjy1042b0U7on7S7qwwMIILRj7WTN1mTgqa582bG6NFuScOoh6Zgdag==", - "requires": { - "chalk": "^4.1.0", - "date-fns": "^2.16.1", - "lodash": "^4.17.21", - "rxjs": "^6.6.3", - "spawn-command": "^0.0.2-1", - "supports-color": "^8.1.0", - "tree-kill": "^1.2.2", - "yargs": "^16.2.0" - }, - "dependencies": { - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "requires": { - "color-convert": "^2.0.1" - } - }, - "chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "requires": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "dependencies": { - "supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "requires": { - "has-flag": "^4.0.0" - } - } - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" - }, - "rxjs": { - "version": "6.6.7", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.6.7.tgz", - "integrity": "sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ==", - "requires": { - "tslib": "^1.9.0" - } - }, - "supports-color": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", - "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", - "requires": { - "has-flag": "^4.0.0" - } - } - } - }, - "consola": { - "version": "2.15.3", - "resolved": "https://registry.npmjs.org/consola/-/consola-2.15.3.tgz", - "integrity": "sha512-9vAdYbHj6x2fLKC4+oPH0kFzY/orMZyG2Aj+kNylHxKGJ/Ed4dpNyAQYwJOdqO4zdM7XpVHmyejQDcQHrnuXbw==" - }, - "console.table": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/console.table/-/console.table-0.10.0.tgz", - "integrity": "sha1-CRcCVYiHW+/XDPLv9L7yxuLXXQQ=", - "requires": { - "easy-table": "1.1.0" - } - }, - "contains-path": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/contains-path/-/contains-path-0.1.0.tgz", - "integrity": "sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo=", - "dev": true - }, - "conventional-changelog": { - "version": "3.1.25", - "resolved": "https://registry.npmjs.org/conventional-changelog/-/conventional-changelog-3.1.25.tgz", - "integrity": "sha512-ryhi3fd1mKf3fSjbLXOfK2D06YwKNic1nC9mWqybBHdObPd8KJ2vjaXZfYj1U23t+V8T8n0d7gwnc9XbIdFbyQ==", - "requires": { - "conventional-changelog-angular": "^5.0.12", - "conventional-changelog-atom": "^2.0.8", - "conventional-changelog-codemirror": "^2.0.8", - "conventional-changelog-conventionalcommits": "^4.5.0", - "conventional-changelog-core": "^4.2.1", - "conventional-changelog-ember": "^2.0.9", - "conventional-changelog-eslint": "^3.0.9", - "conventional-changelog-express": "^2.0.6", - "conventional-changelog-jquery": "^3.0.11", - "conventional-changelog-jshint": "^2.0.9", - "conventional-changelog-preset-loader": "^2.3.4" - } - }, - "conventional-changelog-angular": { - "version": "5.0.13", - "resolved": "https://registry.npmjs.org/conventional-changelog-angular/-/conventional-changelog-angular-5.0.13.tgz", - "integrity": "sha512-i/gipMxs7s8L/QeuavPF2hLnJgH6pEZAttySB6aiQLWcX3puWDL3ACVmvBhJGxnAy52Qc15ua26BufY6KpmrVA==", - "requires": { - "compare-func": "^2.0.0", - "q": "^1.5.1" - } - }, - "conventional-changelog-atom": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/conventional-changelog-atom/-/conventional-changelog-atom-2.0.8.tgz", - "integrity": "sha512-xo6v46icsFTK3bb7dY/8m2qvc8sZemRgdqLb/bjpBsH2UyOS8rKNTgcb5025Hri6IpANPApbXMg15QLb1LJpBw==", - "requires": { - "q": "^1.5.1" - } - }, - "conventional-changelog-cli": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/conventional-changelog-cli/-/conventional-changelog-cli-2.2.2.tgz", - "integrity": "sha512-8grMV5Jo8S0kP3yoMeJxV2P5R6VJOqK72IiSV9t/4H5r/HiRqEBQ83bYGuz4Yzfdj4bjaAEhZN/FFbsFXr5bOA==", - "requires": { - "add-stream": "^1.0.0", - "conventional-changelog": "^3.1.24", - "lodash": "^4.17.15", - "meow": "^8.0.0", - "tempfile": "^3.0.0" - } - }, - "conventional-changelog-codemirror": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/conventional-changelog-codemirror/-/conventional-changelog-codemirror-2.0.8.tgz", - "integrity": "sha512-z5DAsn3uj1Vfp7po3gpt2Boc+Bdwmw2++ZHa5Ak9k0UKsYAO5mH1UBTN0qSCuJZREIhX6WU4E1p3IW2oRCNzQw==", - "requires": { - "q": "^1.5.1" - } - }, - "conventional-changelog-conventionalcommits": { - "version": "4.6.3", - "resolved": "https://registry.npmjs.org/conventional-changelog-conventionalcommits/-/conventional-changelog-conventionalcommits-4.6.3.tgz", - "integrity": "sha512-LTTQV4fwOM4oLPad317V/QNQ1FY4Hju5qeBIM1uTHbrnCE+Eg4CdRZ3gO2pUeR+tzWdp80M2j3qFFEDWVqOV4g==", - "requires": { - "compare-func": "^2.0.0", - "lodash": "^4.17.15", - "q": "^1.5.1" - } - }, - "conventional-changelog-core": { - "version": "4.2.4", - "resolved": "https://registry.npmjs.org/conventional-changelog-core/-/conventional-changelog-core-4.2.4.tgz", - "integrity": "sha512-gDVS+zVJHE2v4SLc6B0sLsPiloR0ygU7HaDW14aNJE1v4SlqJPILPl/aJC7YdtRE4CybBf8gDwObBvKha8Xlyg==", - "requires": { - "add-stream": "^1.0.0", - "conventional-changelog-writer": "^5.0.0", - "conventional-commits-parser": "^3.2.0", - "dateformat": "^3.0.0", - "get-pkg-repo": "^4.0.0", - "git-raw-commits": "^2.0.8", - "git-remote-origin-url": "^2.0.0", - "git-semver-tags": "^4.1.1", - "lodash": "^4.17.15", - "normalize-package-data": "^3.0.0", - "q": "^1.5.1", - "read-pkg": "^3.0.0", - "read-pkg-up": "^3.0.0", - "through2": "^4.0.0" - }, - "dependencies": { - "hosted-git-info": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", - "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", - "requires": { - "lru-cache": "^6.0.0" - } - }, - "normalize-package-data": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", - "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", - "requires": { - "hosted-git-info": "^4.0.1", - "is-core-module": "^2.5.0", - "semver": "^7.3.4", - "validate-npm-package-license": "^3.0.1" - } - }, - "read-pkg-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-3.0.0.tgz", - "integrity": "sha512-YFzFrVvpC6frF1sz8psoHDBGF7fLPc+llq/8NB43oagqWkx8ar5zYtsTORtOjw9W2RHLpWP+zTWwBvf1bCmcSw==", - "requires": { - "find-up": "^2.0.0", - "read-pkg": "^3.0.0" - } - }, - "semver": { - "version": "7.3.8", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", - "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", - "requires": { - "lru-cache": "^6.0.0" - } - } - } - }, - "conventional-changelog-ember": { - "version": "2.0.9", - "resolved": "https://registry.npmjs.org/conventional-changelog-ember/-/conventional-changelog-ember-2.0.9.tgz", - "integrity": "sha512-ulzIReoZEvZCBDhcNYfDIsLTHzYHc7awh+eI44ZtV5cx6LVxLlVtEmcO+2/kGIHGtw+qVabJYjdI5cJOQgXh1A==", - "requires": { - "q": "^1.5.1" - } - }, - "conventional-changelog-eslint": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/conventional-changelog-eslint/-/conventional-changelog-eslint-3.0.9.tgz", - "integrity": "sha512-6NpUCMgU8qmWmyAMSZO5NrRd7rTgErjrm4VASam2u5jrZS0n38V7Y9CzTtLT2qwz5xEChDR4BduoWIr8TfwvXA==", - "requires": { - "q": "^1.5.1" - } - }, - "conventional-changelog-express": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/conventional-changelog-express/-/conventional-changelog-express-2.0.6.tgz", - "integrity": "sha512-SDez2f3iVJw6V563O3pRtNwXtQaSmEfTCaTBPCqn0oG0mfkq0rX4hHBq5P7De2MncoRixrALj3u3oQsNK+Q0pQ==", - "requires": { - "q": "^1.5.1" - } - }, - "conventional-changelog-jquery": { - "version": "3.0.11", - "resolved": "https://registry.npmjs.org/conventional-changelog-jquery/-/conventional-changelog-jquery-3.0.11.tgz", - "integrity": "sha512-x8AWz5/Td55F7+o/9LQ6cQIPwrCjfJQ5Zmfqi8thwUEKHstEn4kTIofXub7plf1xvFA2TqhZlq7fy5OmV6BOMw==", - "requires": { - "q": "^1.5.1" - } - }, - "conventional-changelog-jshint": { - "version": "2.0.9", - "resolved": "https://registry.npmjs.org/conventional-changelog-jshint/-/conventional-changelog-jshint-2.0.9.tgz", - "integrity": "sha512-wMLdaIzq6TNnMHMy31hql02OEQ8nCQfExw1SE0hYL5KvU+JCTuPaDO+7JiogGT2gJAxiUGATdtYYfh+nT+6riA==", - "requires": { - "compare-func": "^2.0.0", - "q": "^1.5.1" - } - }, - "conventional-changelog-preset-loader": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/conventional-changelog-preset-loader/-/conventional-changelog-preset-loader-2.3.4.tgz", - "integrity": "sha512-GEKRWkrSAZeTq5+YjUZOYxdHq+ci4dNwHvpaBC3+ENalzFWuCWa9EZXSuZBpkr72sMdKB+1fyDV4takK1Lf58g==" - }, - "conventional-changelog-writer": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/conventional-changelog-writer/-/conventional-changelog-writer-5.0.1.tgz", - "integrity": "sha512-5WsuKUfxW7suLblAbFnxAcrvf6r+0b7GvNaWUwUIk0bXMnENP/PEieGKVUQrjPqwPT4o3EPAASBXiY6iHooLOQ==", - "requires": { - "conventional-commits-filter": "^2.0.7", - "dateformat": "^3.0.0", - "handlebars": "^4.7.7", - "json-stringify-safe": "^5.0.1", - "lodash": "^4.17.15", - "meow": "^8.0.0", - "semver": "^6.0.0", - "split": "^1.0.0", - "through2": "^4.0.0" - }, - "dependencies": { - "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" - } - } - }, - "conventional-commits-filter": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/conventional-commits-filter/-/conventional-commits-filter-2.0.7.tgz", - "integrity": "sha512-ASS9SamOP4TbCClsRHxIHXRfcGCnIoQqkvAzCSbZzTFLfcTqJVugB0agRgsEELsqaeWgsXv513eS116wnlSSPA==", - "requires": { - "lodash.ismatch": "^4.4.0", - "modify-values": "^1.0.0" - } - }, - "conventional-commits-parser": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/conventional-commits-parser/-/conventional-commits-parser-3.2.4.tgz", - "integrity": "sha512-nK7sAtfi+QXbxHCYfhpZsfRtaitZLIA6889kFIouLvz6repszQDgxBu7wf2WbU+Dco7sAnNCJYERCwt54WPC2Q==", - "requires": { - "is-text-path": "^1.0.1", - "JSONStream": "^1.0.4", - "lodash": "^4.17.15", - "meow": "^8.0.0", - "split2": "^3.0.0", - "through2": "^4.0.0" - } - }, - "core-js": { - "version": "2.6.5", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.5.tgz", - "integrity": "sha512-klh/kDpwX8hryYL14M9w/xei6vrv6sE8gTHDG7/T/+SEovB/G4ejwcfE/CBzO6Edsu+OETZMZ3wcX/EjUkrl5A==", - "dev": true - }, - "core-util-is": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" - }, - "cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, - "requires": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } - }, - "cypress": { - "version": "9.7.0", - "resolved": "https://registry.npmjs.org/cypress/-/cypress-9.7.0.tgz", - "integrity": "sha512-+1EE1nuuuwIt/N1KXRR2iWHU+OiIt7H28jJDyyI4tiUftId/DrXYEwoDa5+kH2pki1zxnA0r6HrUGHV5eLbF5Q==", - "dev": true, - "requires": { - "@cypress/request": "^2.88.10", - "@cypress/xvfb": "^1.2.4", - "@types/node": "^14.14.31", - "@types/sinonjs__fake-timers": "8.1.1", - "@types/sizzle": "^2.3.2", - "arch": "^2.2.0", - "blob-util": "^2.0.2", - "bluebird": "^3.7.2", - "buffer": "^5.6.0", - "cachedir": "^2.3.0", - "chalk": "^4.1.0", - "check-more-types": "^2.24.0", - "cli-cursor": "^3.1.0", - "cli-table3": "~0.6.1", - "commander": "^5.1.0", - "common-tags": "^1.8.0", - "dayjs": "^1.10.4", - "debug": "^4.3.2", - "enquirer": "^2.3.6", - "eventemitter2": "^6.4.3", - "execa": "4.1.0", - "executable": "^4.1.1", - "extract-zip": "2.0.1", - "figures": "^3.2.0", - "fs-extra": "^9.1.0", - "getos": "^3.2.1", - "is-ci": "^3.0.0", - "is-installed-globally": "~0.4.0", - "lazy-ass": "^1.6.0", - "listr2": "^3.8.3", - "lodash": "^4.17.21", - "log-symbols": "^4.0.0", - "minimist": "^1.2.6", - "ospath": "^1.2.2", - "pretty-bytes": "^5.6.0", - "proxy-from-env": "1.0.0", - "request-progress": "^3.0.0", - "semver": "^7.3.2", - "supports-color": "^8.1.1", - "tmp": "~0.2.1", - "untildify": "^4.0.0", - "yauzl": "^2.10.0" - }, - "dependencies": { - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "requires": { - "color-convert": "^2.0.1" - } - }, - "chalk": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", - "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", - "dev": true, - "requires": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "dependencies": { - "supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "requires": { - "has-flag": "^4.0.0" - } - } - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "debug": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", - "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", - "dev": true, - "requires": { - "ms": "2.1.2" - } - }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true - }, - "rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dev": true, - "requires": { - "glob": "^7.1.3" - } - }, - "semver": { - "version": "7.3.7", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", - "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } - }, - "supports-color": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", - "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", - "dev": true, - "requires": { - "has-flag": "^4.0.0" - } - }, - "tmp": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", - "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", - "dev": true, - "requires": { - "rimraf": "^3.0.0" - } - } - } - }, - "dargs": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/dargs/-/dargs-7.0.0.tgz", - "integrity": "sha512-2iy1EkLdlBzQGvbweYRFxmFath8+K7+AKB0TlhHWkNuH+TmovaMH/Wp7V7R4u7f4SnX3OgLsU9t1NI9ioDnUpg==" - }, - "dashdash": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", - "dev": true, - "requires": { - "assert-plus": "^1.0.0" - } - }, - "date-fns": { - "version": "2.28.0", - "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.28.0.tgz", - "integrity": "sha512-8d35hViGYx/QH0icHYCeLmsLmMUheMmTyV9Fcm6gvNwdw31yXXH+O85sOBJ+OLnLQMKZowvpKb6FgMIQjcpvQw==" - }, - "dateformat": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-3.0.3.tgz", - "integrity": "sha512-jyCETtSl3VMZMWeRo7iY1FL19ges1t55hMo5yaam4Jrsm5EPL89UQkoQRyiI+Yf4k8r2ZpdngkV8hr1lIdjb3Q==" - }, - "dayjs": { - "version": "1.10.6", - "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.10.6.tgz", - "integrity": "sha512-AztC/IOW4L1Q41A86phW5Thhcrco3xuAA+YX/BLpLWWjRcTj5TOt/QImBLmCKlrF7u7k47arTnOyL6GnbG8Hvw==", - "dev": true - }, - "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - }, - "debug-log": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/debug-log/-/debug-log-1.0.1.tgz", - "integrity": "sha1-IwdjLUwEOCuN+KMvcLiVBG1SdF8=", - "dev": true - }, - "debuglog": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/debuglog/-/debuglog-1.0.1.tgz", - "integrity": "sha512-syBZ+rnAK3EgMsH2aYEOLUW7mZSY9Gb+0wUMCFsZvcmiz+HigA0LOcq/HoQqVuGG+EKykunc7QG2bzrponfaSw==", - "dev": true - }, - "decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==" - }, - "decamelize-keys": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.0.tgz", - "integrity": "sha512-ocLWuYzRPoS9bfiSdDd3cxvrzovVMZnRDVEzAs+hWIVXGDbHxWMECij2OBuyB/An0FFW/nLuq6Kv1i/YC5Qfzg==", - "requires": { - "decamelize": "^1.1.0", - "map-obj": "^1.0.0" - }, - "dependencies": { - "map-obj": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", - "integrity": "sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==" - } - } - }, - "deep-is": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", - "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=", - "dev": true - }, - "defaults": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.3.tgz", - "integrity": "sha1-xlYFHpgX2f8I7YgUd/P+QBnz730=", - "requires": { - "clone": "^1.0.2" - } - }, - "define-properties": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", - "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", - "dev": true, - "requires": { - "object-keys": "^1.0.12" - } - }, - "deglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/deglob/-/deglob-2.1.1.tgz", - "integrity": "sha512-2kjwuGGonL7gWE1XU4Fv79+vVzpoQCl0V+boMwWtOQJV2AGDabCwez++nB1Nli/8BabAfZQ/UuHPlp6AymKdWw==", - "dev": true, - "requires": { - "find-root": "^1.0.0", - "glob": "^7.0.5", - "ignore": "^3.0.9", - "pkg-config": "^1.1.0", - "run-parallel": "^1.1.2", - "uniq": "^1.0.1" - }, - "dependencies": { - "ignore": { - "version": "3.3.10", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", - "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==", - "dev": true - } - } - }, - "delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", - "dev": true - }, - "detect-indent": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-6.1.0.tgz", - "integrity": "sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==", - "dev": true - }, - "detect-newline": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", - "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", - "dev": true - }, - "dezalgo": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", - "integrity": "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==", - "dev": true, - "requires": { - "asap": "^2.0.0", - "wrappy": "1" - } - }, - "dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "dev": true, - "requires": { - "path-type": "^4.0.0" - } - }, - "doctoc": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/doctoc/-/doctoc-2.2.1.tgz", - "integrity": "sha512-qNJ1gsuo7hH40vlXTVVrADm6pdg30bns/Mo7Nv1SxuXSM1bwF9b4xQ40a6EFT/L1cI+Yylbyi8MPI4G4y7XJzQ==", - "requires": { - "@textlint/markdown-to-ast": "^12.1.1", - "anchor-markdown-header": "^0.6.0", - "htmlparser2": "^7.2.0", - "minimist": "^1.2.6", - "underscore": "^1.13.2", - "update-section": "^0.3.3" - } - }, - "doctrine": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", - "dev": true, - "requires": { - "esutils": "^2.0.2" - } - }, - "dom-serializer": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.4.1.tgz", - "integrity": "sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag==", - "requires": { - "domelementtype": "^2.0.1", - "domhandler": "^4.2.0", - "entities": "^2.0.0" - }, - "dependencies": { - "entities": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz", - "integrity": "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==" - } - } - }, - "domelementtype": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", - "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==" - }, - "domhandler": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-4.3.1.tgz", - "integrity": "sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==", - "requires": { - "domelementtype": "^2.2.0" - } - }, - "domutils": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/domutils/-/domutils-2.8.0.tgz", - "integrity": "sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==", - "requires": { - "dom-serializer": "^1.0.1", - "domelementtype": "^2.2.0", - "domhandler": "^4.2.0" - } - }, - "dot-prop": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", - "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", - "requires": { - "is-obj": "^2.0.0" - } - }, - "easy-table": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/easy-table/-/easy-table-1.1.0.tgz", - "integrity": "sha1-hvmrTBAvA3G3KXuSplHVgkvIy3M=", - "requires": { - "wcwidth": ">=1.0.1" - } - }, - "ecc-jsbn": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", - "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", - "dev": true, - "requires": { - "jsbn": "~0.1.0", - "safer-buffer": "^2.1.0" - } - }, - "ecdsa-sig-formatter": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", - "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", - "dev": true, - "requires": { - "safe-buffer": "^5.0.1" - } - }, - "emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", - "dev": true, - "requires": { - "once": "^1.4.0" - } - }, - "enquirer": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", - "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", - "dev": true, - "requires": { - "ansi-colors": "^4.1.1" - } - }, - "entities": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/entities/-/entities-3.0.1.tgz", - "integrity": "sha512-WiyBqoomrwMdFG1e0kqvASYfnlb0lp8M5o5Fw2OFq1hNZxxcNk8Ik0Xm7LxzBhuidnZB/UtBqVCgUz3kBOP51Q==" - }, - "error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", - "requires": { - "is-arrayish": "^0.2.1" - } - }, - "es-abstract": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.13.0.tgz", - "integrity": "sha512-vDZfg/ykNxQVwup/8E1BZhVzFfBxs9NqMzGcvIJrqg5k2/5Za2bWo40dK2J1pgLngZ7c+Shh8lwYtLGyrwPutg==", - "dev": true, - "requires": { - "es-to-primitive": "^1.2.0", - "function-bind": "^1.1.1", - "has": "^1.0.3", - "is-callable": "^1.1.4", - "is-regex": "^1.0.4", - "object-keys": "^1.0.12" - } - }, - "es-to-primitive": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.0.tgz", - "integrity": "sha512-qZryBOJjV//LaxLTV6UC//WewneB3LcXOL9NP++ozKVXsIIIpm/2c13UDiD9Jp2eThsecw9m3jPqDwTyobcdbg==", - "dev": true, - "requires": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" - } - }, - "escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==" - }, - "escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" - }, - "eslint": { - "version": "5.4.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-5.4.0.tgz", - "integrity": "sha512-UIpL91XGex3qtL6qwyCQJar2j3osKxK9e3ano3OcGEIRM4oWIpCkDg9x95AXEC2wMs7PnxzOkPZ2gq+tsMS9yg==", - "dev": true, - "requires": { - "ajv": "^6.5.0", - "babel-code-frame": "^6.26.0", - "chalk": "^2.1.0", - "cross-spawn": "^6.0.5", - "debug": "^3.1.0", - "doctrine": "^2.1.0", - "eslint-scope": "^4.0.0", - "eslint-utils": "^1.3.1", - "eslint-visitor-keys": "^1.0.0", - "espree": "^4.0.0", - "esquery": "^1.0.1", - "esutils": "^2.0.2", - "file-entry-cache": "^2.0.0", - "functional-red-black-tree": "^1.0.1", - "glob": "^7.1.2", - "globals": "^11.7.0", - "ignore": "^4.0.2", - "imurmurhash": "^0.1.4", - "inquirer": "^5.2.0", - "is-resolvable": "^1.1.0", - "js-yaml": "^3.11.0", - "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.3.0", - "lodash": "^4.17.5", - "minimatch": "^3.0.4", - "mkdirp": "^0.5.1", - "natural-compare": "^1.4.0", - "optionator": "^0.8.2", - "path-is-inside": "^1.0.2", - "pluralize": "^7.0.0", - "progress": "^2.0.0", - "regexpp": "^2.0.0", - "require-uncached": "^1.0.3", - "semver": "^5.5.0", - "strip-ansi": "^4.0.0", - "strip-json-comments": "^2.0.1", - "table": "^4.0.3", - "text-table": "^0.2.0" - }, - "dependencies": { - "ansi-regex": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.1.tgz", - "integrity": "sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw==", - "dev": true - }, - "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", - "dev": true, - "requires": { - "ansi-regex": "^3.0.0" - } - } - } - }, - "eslint-config-standard": { - "version": "12.0.0", - "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-12.0.0.tgz", - "integrity": "sha512-COUz8FnXhqFitYj4DTqHzidjIL/t4mumGZto5c7DrBpvWoie+Sn3P4sLEzUGeYhRElWuFEf8K1S1EfvD1vixCQ==", - "dev": true - }, - "eslint-config-standard-jsx": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/eslint-config-standard-jsx/-/eslint-config-standard-jsx-6.0.2.tgz", - "integrity": "sha512-D+YWAoXw+2GIdbMBRAzWwr1ZtvnSf4n4yL0gKGg7ShUOGXkSOLerI17K4F6LdQMJPNMoWYqepzQD/fKY+tXNSg==", - "dev": true - }, - "eslint-import-resolver-node": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.2.tgz", - "integrity": "sha512-sfmTqJfPSizWu4aymbPr4Iidp5yKm8yDkHp+Ir3YiTHiiDfxh69mOUsmiqW6RZ9zRXFaF64GtYmN7e+8GHBv6Q==", - "dev": true, - "requires": { - "debug": "^2.6.9", - "resolve": "^1.5.0" - }, - "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "requires": { - "ms": "2.0.0" - } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "dev": true - } - } - }, - "eslint-module-utils": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.4.0.tgz", - "integrity": "sha512-14tltLm38Eu3zS+mt0KvILC3q8jyIAH518MlG+HO0p+yK885Lb1UHTY/UgR91eOyGdmxAPb+OLoW4znqIT6Ndw==", - "dev": true, - "requires": { - "debug": "^2.6.8", - "pkg-dir": "^2.0.0" - }, - "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "requires": { - "ms": "2.0.0" - } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "dev": true - } - } - }, - "eslint-plugin-es": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-1.4.0.tgz", - "integrity": "sha512-XfFmgFdIUDgvaRAlaXUkxrRg5JSADoRC8IkKLc/cISeR3yHVMefFHQZpcyXXEUUPHfy5DwviBcrfqlyqEwlQVw==", - "dev": true, - "requires": { - "eslint-utils": "^1.3.0", - "regexpp": "^2.0.1" - } - }, - "eslint-plugin-import": { - "version": "2.14.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.14.0.tgz", - "integrity": "sha512-FpuRtniD/AY6sXByma2Wr0TXvXJ4nA/2/04VPlfpmUDPOpOY264x+ILiwnrk/k4RINgDAyFZByxqPUbSQ5YE7g==", - "dev": true, - "requires": { - "contains-path": "^0.1.0", - "debug": "^2.6.8", - "doctrine": "1.5.0", - "eslint-import-resolver-node": "^0.3.1", - "eslint-module-utils": "^2.2.0", - "has": "^1.0.1", - "lodash": "^4.17.4", - "minimatch": "^3.0.3", - "read-pkg-up": "^2.0.0", - "resolve": "^1.6.0" - }, - "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "requires": { - "ms": "2.0.0" - } - }, - "doctrine": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", - "integrity": "sha1-N53Ocw9hZvds76TmcHoVmwLFpvo=", - "dev": true, - "requires": { - "esutils": "^2.0.2", - "isarray": "^1.0.0" - } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "dev": true - } - } - }, - "eslint-plugin-node": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-7.0.1.tgz", - "integrity": "sha512-lfVw3TEqThwq0j2Ba/Ckn2ABdwmL5dkOgAux1rvOk6CO7A6yGyPI2+zIxN6FyNkp1X1X/BSvKOceD6mBWSj4Yw==", - "dev": true, - "requires": { - "eslint-plugin-es": "^1.3.1", - "eslint-utils": "^1.3.1", - "ignore": "^4.0.2", - "minimatch": "^3.0.4", - "resolve": "^1.8.1", - "semver": "^5.5.0" - } - }, - "eslint-plugin-promise": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-4.0.1.tgz", - "integrity": "sha512-Si16O0+Hqz1gDHsys6RtFRrW7cCTB6P7p3OJmKp3Y3dxpQE2qwOA7d3xnV+0mBmrPoi0RBnxlCKvqu70te6wjg==", - "dev": true - }, - "eslint-plugin-react": { - "version": "7.11.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.11.1.tgz", - "integrity": "sha512-cVVyMadRyW7qsIUh3FHp3u6QHNhOgVrLQYdQEB1bPWBsgbNCHdFAeNMquBMCcZJu59eNthX053L70l7gRt4SCw==", - "dev": true, - "requires": { - "array-includes": "^3.0.3", - "doctrine": "^2.1.0", - "has": "^1.0.3", - "jsx-ast-utils": "^2.0.1", - "prop-types": "^15.6.2" - } - }, - "eslint-plugin-standard": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-standard/-/eslint-plugin-standard-4.0.0.tgz", - "integrity": "sha512-OwxJkR6TQiYMmt1EsNRMe5qG3GsbjlcOhbGUBY4LtavF9DsLaTcoR+j2Tdjqi23oUwKNUqX7qcn5fPStafMdlA==", - "dev": true - }, - "eslint-scope": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", - "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", - "dev": true, - "requires": { - "esrecurse": "^4.1.0", - "estraverse": "^4.1.1" - } - }, - "eslint-utils": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz", - "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==", - "dev": true, - "requires": { - "eslint-visitor-keys": "^1.1.0" - }, - "dependencies": { - "eslint-visitor-keys": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.1.0.tgz", - "integrity": "sha512-8y9YjtM1JBJU/A9Kc+SbaOV4y29sSWckBwMHa+FGtVj5gN/sbnKDf6xJUl+8g7FAij9LVaP8C24DUiH/f/2Z9A==", - "dev": true - } - } - }, - "eslint-visitor-keys": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz", - "integrity": "sha512-qzm/XxIbxm/FHyH341ZrbnMUpe+5Bocte9xkmFMzPMjRaZMcXww+MpBptFvtU+79L362nqiLhekCxCxDPaUMBQ==", - "dev": true - }, - "espree": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/espree/-/espree-4.1.0.tgz", - "integrity": "sha512-I5BycZW6FCVIub93TeVY1s7vjhP9CY6cXCznIRfiig7nRviKZYdRnj/sHEWC6A7WE9RDWOFq9+7OsWSYz8qv2w==", - "dev": true, - "requires": { - "acorn": "^6.0.2", - "acorn-jsx": "^5.0.0", - "eslint-visitor-keys": "^1.0.0" - } - }, - "esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "dev": true - }, - "esquery": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.0.1.tgz", - "integrity": "sha512-SmiyZ5zIWH9VM+SRUReLS5Q8a7GxtRdxEBVZpm98rJM7Sb+A9DVCndXfkeFUd3byderg+EbDkfnevfCwynWaNA==", - "dev": true, - "requires": { - "estraverse": "^4.0.0" - } - }, - "esrecurse": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.2.1.tgz", - "integrity": "sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ==", - "dev": true, - "requires": { - "estraverse": "^4.1.0" - } - }, - "estraverse": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.2.0.tgz", - "integrity": "sha1-De4/7TH81GlhjOc0IJn8GvoL2xM=", - "dev": true - }, - "esutils": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.2.tgz", - "integrity": "sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs=", - "dev": true - }, - "eventemitter2": { - "version": "6.4.4", - "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-6.4.4.tgz", - "integrity": "sha512-HLU3NDY6wARrLCEwyGKRBvuWYyvW6mHYv72SJJAH3iJN3a6eVUvkjFkcxah1bcTgGVBBrFdIopBJPhCQFMLyXw==", - "dev": true - }, - "execa": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-4.1.0.tgz", - "integrity": "sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA==", - "dev": true, - "requires": { - "cross-spawn": "^7.0.0", - "get-stream": "^5.0.0", - "human-signals": "^1.1.1", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.0", - "onetime": "^5.1.0", - "signal-exit": "^3.0.2", - "strip-final-newline": "^2.0.0" - }, - "dependencies": { - "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "requires": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - } - }, - "path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true - }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "requires": { - "shebang-regex": "^3.0.0" - } - }, - "shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true - }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - } - } - }, - "executable": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/executable/-/executable-4.1.1.tgz", - "integrity": "sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==", - "dev": true, - "requires": { - "pify": "^2.2.0" - } - }, - "extend": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" - }, - "external-editor": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-2.2.0.tgz", - "integrity": "sha512-bSn6gvGxKt+b7+6TKEv1ZycHleA7aHhRHyAqJyp5pbUFuYYNIzpZnQDk7AsYckyWdEnTeAnay0aCy2aV6iTk9A==", - "dev": true, - "requires": { - "chardet": "^0.4.0", - "iconv-lite": "^0.4.17", - "tmp": "^0.0.33" - } - }, - "extract-zip": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", - "integrity": "sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==", - "dev": true, - "requires": { - "@types/yauzl": "^2.9.1", - "debug": "^4.1.1", - "get-stream": "^5.1.0", - "yauzl": "^2.10.0" - }, - "dependencies": { - "debug": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", - "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", - "dev": true, - "requires": { - "ms": "2.1.2" - } - } - } - }, - "extsprintf": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", - "dev": true - }, - "fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true - }, - "fast-glob": { - "version": "3.2.11", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.11.tgz", - "integrity": "sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==", - "dev": true, - "requires": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" - } - }, - "fast-json-stable-stringify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", - "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=", - "dev": true - }, - "fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", - "dev": true - }, - "fast-safe-stringify": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", - "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" - }, - "fastq": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", - "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", - "dev": true, - "requires": { - "reusify": "^1.0.4" - } - }, - "fault": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/fault/-/fault-1.0.4.tgz", - "integrity": "sha512-CJ0HCB5tL5fYTEA7ToAq5+kTwd++Borf1/bifxd9iT70QcXr4MRrO3Llf8Ifs70q+SJcGHFtnIE/Nw6giCtECA==", - "requires": { - "format": "^0.2.0" - } - }, - "fd-slicer": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", - "integrity": "sha1-JcfInLH5B3+IkbvmHY85Dq4lbx4=", - "dev": true, - "requires": { - "pend": "~1.2.0" - } - }, - "figures": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", - "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", - "requires": { - "escape-string-regexp": "^1.0.5" - } - }, - "file-entry-cache": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-2.0.0.tgz", - "integrity": "sha1-w5KZDD5oR4PYOLjISkXYoEhFg2E=", - "dev": true, - "requires": { - "flat-cache": "^1.2.1", - "object-assign": "^4.0.1" - } - }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "requires": { - "to-regex-range": "^5.0.1" - } - }, - "find-root": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/find-root/-/find-root-1.1.0.tgz", - "integrity": "sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng==", - "dev": true - }, - "find-up": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", - "requires": { - "locate-path": "^2.0.0" - } - }, - "flat-cache": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-1.3.4.tgz", - "integrity": "sha512-VwyB3Lkgacfik2vhqR4uv2rvebqmDvFu4jlN/C1RzWoJEo8I7z4Q404oiqYCkq41mni8EzQnm95emU9seckwtg==", - "dev": true, - "requires": { - "circular-json": "^0.3.1", - "graceful-fs": "^4.1.2", - "rimraf": "~2.6.2", - "write": "^0.2.1" - } - }, - "follow-redirects": { - "version": "1.14.9", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.9.tgz", - "integrity": "sha512-MQDfihBQYMcyy5dhRDJUHcw7lb2Pv/TuE6xP1vyraLukNDHKbDxDNaOE3NbCAdKQApno+GPRyo1YAp89yCjK4w==" - }, - "forever-agent": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", - "dev": true - }, - "form-data": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", - "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", - "dev": true, - "requires": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" - } - }, - "format": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/format/-/format-0.2.2.tgz", - "integrity": "sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==" - }, - "fs-extra": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", - "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", - "dev": true, - "requires": { - "at-least-node": "^1.0.0", - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - } - }, - "fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" - }, - "function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" - }, - "functional-red-black-tree": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", - "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", - "dev": true - }, - "get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" - }, - "get-pkg-repo": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/get-pkg-repo/-/get-pkg-repo-4.2.1.tgz", - "integrity": "sha512-2+QbHjFRfGB74v/pYWjd5OhU3TDIC2Gv/YKUTk/tCvAz0pkn/Mz6P3uByuBimLOcPvN2jYdScl3xGFSrx0jEcA==", - "requires": { - "@hutson/parse-repository-url": "^3.0.0", - "hosted-git-info": "^4.0.0", - "through2": "^2.0.0", - "yargs": "^16.2.0" - }, - "dependencies": { - "hosted-git-info": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", - "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", - "requires": { - "lru-cache": "^6.0.0" - } - }, - "readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "requires": { - "safe-buffer": "~5.1.0" - } - }, - "through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "requires": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } - } - } - }, - "get-stdin": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-6.0.0.tgz", - "integrity": "sha512-jp4tHawyV7+fkkSKyvjuLZswblUtz+SQKzSWnBbii16BuZksJlU1wuBYXY75r+duh/llF1ur6oNwi+2ZzjKZ7g==", - "dev": true - }, - "get-stream": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", - "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", - "dev": true, - "requires": { - "pump": "^3.0.0" - } - }, - "getos": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/getos/-/getos-3.2.1.tgz", - "integrity": "sha512-U56CfOK17OKgTVqozZjUKNdkfEv6jk5WISBJ8SHoagjE6L69zOwl3Z+O8myjY9MEW3i2HPWQBt/LTbCgcC973Q==", - "dev": true, - "requires": { - "async": "^3.2.0" - } - }, - "getpass": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", - "dev": true, - "requires": { - "assert-plus": "^1.0.0" - } - }, - "git-hooks-list": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/git-hooks-list/-/git-hooks-list-1.0.3.tgz", - "integrity": "sha512-Y7wLWcrLUXwk2noSka166byGCvhMtDRpgHdzCno1UQv/n/Hegp++a2xBWJL1lJarnKD3SWaljD+0z1ztqxuKyQ==", - "dev": true - }, - "git-raw-commits": { - "version": "2.0.11", - "resolved": "https://registry.npmjs.org/git-raw-commits/-/git-raw-commits-2.0.11.tgz", - "integrity": "sha512-VnctFhw+xfj8Va1xtfEqCUD2XDrbAPSJx+hSrE5K7fGdjZruW7XV+QOrN7LF/RJyvspRiD2I0asWsxFp0ya26A==", - "requires": { - "dargs": "^7.0.0", - "lodash": "^4.17.15", - "meow": "^8.0.0", - "split2": "^3.0.0", - "through2": "^4.0.0" - } - }, - "git-remote-origin-url": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/git-remote-origin-url/-/git-remote-origin-url-2.0.0.tgz", - "integrity": "sha512-eU+GGrZgccNJcsDH5LkXR3PB9M958hxc7sbA8DFJjrv9j4L2P/eZfKhM+QD6wyzpiv+b1BpK0XrYCxkovtjSLw==", - "requires": { - "gitconfiglocal": "^1.0.0", - "pify": "^2.3.0" - } - }, - "git-semver-tags": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/git-semver-tags/-/git-semver-tags-4.1.1.tgz", - "integrity": "sha512-OWyMt5zBe7xFs8vglMmhM9lRQzCWL3WjHtxNNfJTMngGym7pC1kh8sP6jevfydJ6LP3ZvGxfb6ABYgPUM0mtsA==", - "requires": { - "meow": "^8.0.0", - "semver": "^6.0.0" - }, - "dependencies": { - "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" - } - } - }, - "gitconfiglocal": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/gitconfiglocal/-/gitconfiglocal-1.0.0.tgz", - "integrity": "sha512-spLUXeTAVHxDtKsJc8FkFVgFtMdEN9qPGpL23VfSHx4fP4+Ds097IXLvymbnDH8FnmxX5Nr9bPw3A+AQ6mWEaQ==", - "requires": { - "ini": "^1.3.2" - }, - "dependencies": { - "ini": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", - "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" - } - } - }, - "glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "requires": { - "is-glob": "^4.0.1" - } - }, - "global-dirs": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.0.tgz", - "integrity": "sha512-v8ho2DS5RiCjftj1nD9NmnfaOzTdud7RRnVd9kFNOjqZbISlx5DQ+OrTkywgd0dIt7oFCvKetZSHoHcP3sDdiA==", - "dev": true, - "requires": { - "ini": "2.0.0" - } - }, - "globals": { - "version": "11.11.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.11.0.tgz", - "integrity": "sha512-WHq43gS+6ufNOEqlrDBxVEbb8ntfXrfAUU2ZOpCxrBdGKW3gyv8mCxAfIBD0DroPKGrJ2eSsXsLtY9MPntsyTw==", - "dev": true - }, - "globby": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-10.0.0.tgz", - "integrity": "sha512-3LifW9M4joGZasyYPz2A1U74zbC/45fvpXUvO/9KbSa+VV0aGZarWkfdgKyR9sExNP0t0x0ss/UMJpNpcaTspw==", - "dev": true, - "requires": { - "@types/glob": "^7.1.1", - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.0.3", - "glob": "^7.1.3", - "ignore": "^5.1.1", - "merge2": "^1.2.3", - "slash": "^3.0.0" - }, - "dependencies": { - "ignore": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", - "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", - "dev": true - } - } - }, - "graceful-fs": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.6.tgz", - "integrity": "sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ==" - }, - "handlebars": { - "version": "4.7.7", - "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.7.tgz", - "integrity": "sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA==", - "requires": { - "minimist": "^1.2.5", - "neo-async": "^2.6.0", - "source-map": "^0.6.1", - "uglify-js": "^3.1.4", - "wordwrap": "^1.0.0" - } - }, - "har-schema": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", - "dev": true - }, - "har-validator": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz", - "integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==", - "dev": true, - "requires": { - "ajv": "^6.5.5", - "har-schema": "^2.0.0" - } - }, - "hard-rejection": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz", - "integrity": "sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==" - }, - "has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "requires": { - "function-bind": "^1.1.1" - } - }, - "has-ansi": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", - "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=", - "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" - }, - "has-symbols": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.0.tgz", - "integrity": "sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q=", - "dev": true - }, - "hoek": { - "version": "5.0.4", - "resolved": "https://registry.npmjs.org/hoek/-/hoek-5.0.4.tgz", - "integrity": "sha512-Alr4ZQgoMlnere5FZJsIyfIjORBqZll5POhDsF4q64dPuJR6rNxXdDxtHSQq8OXRurhmx+PWYEE8bXRROY8h0w==", - "dev": true - }, - "hosted-git-info": { - "version": "2.8.9", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", - "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==" - }, - "htmlparser2": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-7.2.0.tgz", - "integrity": "sha512-H7MImA4MS6cw7nbyURtLPO1Tms7C5H602LRETv95z1MxO/7CP7rDVROehUYeYBUYEON94NXXDEPmZuq+hX4sog==", - "requires": { - "domelementtype": "^2.0.1", - "domhandler": "^4.2.2", - "domutils": "^2.8.0", - "entities": "^3.0.1" - } - }, - "http-signature": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", - "dev": true, - "requires": { - "assert-plus": "^1.0.0", - "jsprim": "^1.2.2", - "sshpk": "^1.7.0" - } - }, - "human-signals": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz", - "integrity": "sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw==", - "dev": true - }, - "iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", - "requires": { - "safer-buffer": ">= 2.1.2 < 3" - } - }, - "ieee754": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==" - }, - "ignore": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", - "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", - "dev": true - }, - "imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", - "dev": true - }, - "indent-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", - "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==" - }, - "inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "requires": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "ini": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", - "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==", - "dev": true - }, - "inquirer": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-5.2.0.tgz", - "integrity": "sha512-E9BmnJbAKLPGonz0HeWHtbKf+EeSP93paWO3ZYoUpq/aowXvYGjjCSuashhXPpzbArIjBbji39THkxTz9ZeEUQ==", - "dev": true, - "requires": { - "ansi-escapes": "^3.0.0", - "chalk": "^2.0.0", - "cli-cursor": "^2.1.0", - "cli-width": "^2.0.0", - "external-editor": "^2.1.0", - "figures": "^2.0.0", - "lodash": "^4.3.0", - "mute-stream": "0.0.7", - "run-async": "^2.2.0", - "rxjs": "^5.5.2", - "string-width": "^2.1.0", - "strip-ansi": "^4.0.0", - "through": "^2.3.6" - }, - "dependencies": { - "ansi-regex": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.1.tgz", - "integrity": "sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw==", - "dev": true - }, - "cli-cursor": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", - "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", - "dev": true, - "requires": { - "restore-cursor": "^2.0.0" - } - }, - "figures": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", - "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", - "dev": true, - "requires": { - "escape-string-regexp": "^1.0.5" - } - }, - "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", - "dev": true - }, - "onetime": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", - "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", - "dev": true, - "requires": { - "mimic-fn": "^1.0.0" - } - }, - "restore-cursor": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", - "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", - "dev": true, - "requires": { - "onetime": "^2.0.0", - "signal-exit": "^3.0.2" - } - }, - "string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", - "dev": true, - "requires": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" - } - }, - "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", - "dev": true, - "requires": { - "ansi-regex": "^3.0.0" - } - } - } - }, - "is-alphabetical": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-1.0.4.tgz", - "integrity": "sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg==" - }, - "is-alphanumerical": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz", - "integrity": "sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A==", - "requires": { - "is-alphabetical": "^1.0.0", - "is-decimal": "^1.0.0" - } - }, - "is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=" - }, - "is-buffer": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz", - "integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==" - }, - "is-callable": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.4.tgz", - "integrity": "sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA==", - "dev": true - }, - "is-ci": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.0.tgz", - "integrity": "sha512-kDXyttuLeslKAHYL/K28F2YkM3x5jvFPEw3yXbRptXydjD9rpLEz+C5K5iutY9ZiUu6AP41JdvRQwF4Iqs4ZCQ==", - "dev": true, - "requires": { - "ci-info": "^3.1.1" - } - }, - "is-core-module": { - "version": "2.11.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz", - "integrity": "sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==", - "requires": { - "has": "^1.0.3" - } - }, - "is-date-object": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.1.tgz", - "integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY=", - "dev": true - }, - "is-decimal": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-1.0.4.tgz", - "integrity": "sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==" - }, - "is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true - }, - "is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" - }, - "is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "requires": { - "is-extglob": "^2.1.1" - } - }, - "is-hexadecimal": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz", - "integrity": "sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw==" - }, - "is-installed-globally": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz", - "integrity": "sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==", - "dev": true, - "requires": { - "global-dirs": "^3.0.0", - "is-path-inside": "^3.0.2" - } - }, - "is-interactive": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-1.0.0.tgz", - "integrity": "sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==" - }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true - }, - "is-obj": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", - "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==" - }, - "is-path-inside": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", - "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", - "dev": true - }, - "is-plain-obj": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", - "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==" - }, - "is-regex": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.4.tgz", - "integrity": "sha1-VRdIm1RwkbCTDglWVM7SXul+lJE=", - "dev": true, - "requires": { - "has": "^1.0.1" - } - }, - "is-resolvable": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-resolvable/-/is-resolvable-1.1.0.tgz", - "integrity": "sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg==", - "dev": true - }, - "is-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", - "integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==", - "dev": true - }, - "is-symbol": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.2.tgz", - "integrity": "sha512-HS8bZ9ox60yCJLH9snBpIwv9pYUAkcuLhSA1oero1UB5y9aiQpRA8y2ex945AOtCZL1lJDeIk3G5LthswI46Lw==", - "dev": true, - "requires": { - "has-symbols": "^1.0.0" - } - }, - "is-text-path": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-text-path/-/is-text-path-1.0.1.tgz", - "integrity": "sha512-xFuJpne9oFz5qDaodwmmG08e3CawH/2ZV8Qqza1Ko7Sk8POWbkRdwIoAWVhqvq0XeUzANEhKo2n0IXUGBm7A/w==", - "requires": { - "text-extensions": "^1.0.0" - } - }, - "is-typedarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", - "dev": true - }, - "is-unicode-supported": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", - "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==" - }, - "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" - }, - "isemail": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/isemail/-/isemail-3.2.0.tgz", - "integrity": "sha512-zKqkK+O+dGqevc93KNsbZ/TqTUFd46MwWjYOoMrjIMZ51eU7DtQG3Wmd9SQQT7i7RVnuTPEiYEWHU3MSbxC1Tg==", - "dev": true, - "requires": { - "punycode": "2.x.x" - } - }, - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", - "dev": true - }, - "isstream": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=", - "dev": true - }, - "iterare": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/iterare/-/iterare-1.2.1.tgz", - "integrity": "sha512-RKYVTCjAnRthyJes037NX/IiqeidgN1xc3j1RjFfECFp28A1GVwK9nA+i0rJPaHqSZwygLzRnFlzUuHFoWWy+Q==" - }, - "joi": { - "version": "13.7.0", - "resolved": "https://registry.npmjs.org/joi/-/joi-13.7.0.tgz", - "integrity": "sha512-xuY5VkHfeOYK3Hdi91ulocfuFopwgbSORmIwzcwHKESQhC7w1kD5jaVSPnqDxS2I8t3RZ9omCKAxNwXN5zG1/Q==", - "dev": true, - "requires": { - "hoek": "5.x.x", - "isemail": "3.x.x", - "topo": "3.x.x" - } - }, - "js-tokens": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-3.0.2.tgz", - "integrity": "sha1-mGbfOVECEw449/mWvOtlRDIJwls=", - "dev": true - }, - "js-yaml": { - "version": "3.13.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", - "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", - "dev": true, - "requires": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" - } - }, - "jsbn": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", - "dev": true - }, - "json-parse-better-errors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", - "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==" - }, - "json-parse-even-better-errors": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", - "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==" - }, - "json-schema": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", - "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", - "dev": true - }, - "json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true - }, - "json-stable-stringify-without-jsonify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", - "dev": true - }, - "json-stringify-safe": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" - }, - "jsonfile": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", - "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", - "requires": { - "graceful-fs": "^4.1.6", - "universalify": "^2.0.0" - } - }, - "jsonparse": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz", - "integrity": "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==" - }, - "JSONStream": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz", - "integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==", - "requires": { - "jsonparse": "^1.2.0", - "through": ">=2.2.7 <3" - } - }, - "jsonwebtoken": { - "version": "8.5.1", - "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz", - "integrity": "sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w==", - "dev": true, - "requires": { - "jws": "^3.2.2", - "lodash.includes": "^4.3.0", - "lodash.isboolean": "^3.0.3", - "lodash.isinteger": "^4.0.4", - "lodash.isnumber": "^3.0.3", - "lodash.isplainobject": "^4.0.6", - "lodash.isstring": "^4.0.1", - "lodash.once": "^4.0.0", - "ms": "^2.1.1", - "semver": "^5.6.0" - } - }, - "jsprim": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz", - "integrity": "sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==", - "dev": true, - "requires": { - "assert-plus": "1.0.0", - "extsprintf": "1.3.0", - "json-schema": "0.4.0", - "verror": "1.10.0" - } - }, - "jsx-ast-utils": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-2.1.0.tgz", - "integrity": "sha512-yDGDG2DS4JcqhA6blsuYbtsT09xL8AoLuUR2Gb5exrw7UEM19sBcOTq+YBBhrNbl0PUC4R4LnFu+dHg2HKeVvA==", - "dev": true, - "requires": { - "array-includes": "^3.0.3" - } - }, - "jwa": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", - "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", - "dev": true, - "requires": { - "buffer-equal-constant-time": "1.0.1", - "ecdsa-sig-formatter": "1.0.11", - "safe-buffer": "^5.0.1" - } - }, - "jws": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", - "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", - "dev": true, - "requires": { - "jwa": "^1.4.1", - "safe-buffer": "^5.0.1" - } - }, - "kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" - }, - "lazy-ass": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/lazy-ass/-/lazy-ass-1.6.0.tgz", - "integrity": "sha1-eZllXoZGwX8In90YfRUNMyTVRRM=", - "dev": true - }, - "levn": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", - "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", - "dev": true, - "requires": { - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2" - } - }, - "license-checker": { - "version": "25.0.1", - "resolved": "https://registry.npmjs.org/license-checker/-/license-checker-25.0.1.tgz", - "integrity": "sha512-mET5AIwl7MR2IAKYYoVBBpV0OnkKQ1xGj2IMMeEFIs42QAkEVjRtFZGWmQ28WeU7MP779iAgOaOy93Mn44mn6g==", - "dev": true, - "requires": { - "chalk": "^2.4.1", - "debug": "^3.1.0", - "mkdirp": "^0.5.1", - "nopt": "^4.0.1", - "read-installed": "~4.0.3", - "semver": "^5.5.0", - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0", - "spdx-satisfies": "^4.0.0", - "treeify": "^1.1.0" - } - }, - "lines-and-columns": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", - "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==" - }, - "listr2": { - "version": "3.10.0", - "resolved": "https://registry.npmjs.org/listr2/-/listr2-3.10.0.tgz", - "integrity": "sha512-eP40ZHihu70sSmqFNbNy2NL1YwImmlMmPh9WO5sLmPDleurMHt3n+SwEWNu2kzKScexZnkyFtc1VI0z/TGlmpw==", - "dev": true, - "requires": { - "cli-truncate": "^2.1.0", - "colorette": "^1.2.2", - "log-update": "^4.0.0", - "p-map": "^4.0.0", - "rxjs": "^6.6.7", - "through": "^2.3.8", - "wrap-ansi": "^7.0.0" - }, - "dependencies": { - "rxjs": { - "version": "6.6.7", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.6.7.tgz", - "integrity": "sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ==", - "dev": true, - "requires": { - "tslib": "^1.9.0" - } - } - } - }, - "load-json-file": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", - "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", - "requires": { - "graceful-fs": "^4.1.2", - "parse-json": "^4.0.0", - "pify": "^3.0.0", - "strip-bom": "^3.0.0" - }, - "dependencies": { - "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" - } - } - }, - "locate-path": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", - "requires": { - "p-locate": "^2.0.0", - "path-exists": "^3.0.0" - } - }, - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" - }, - "lodash.includes": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", - "integrity": "sha1-YLuYqHy5I8aMoeUTJUgzFISfVT8=", - "dev": true - }, - "lodash.isboolean": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", - "integrity": "sha1-bC4XHbKiV82WgC/UOwGyDV9YcPY=", - "dev": true - }, - "lodash.isinteger": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", - "integrity": "sha1-YZwK89A/iwTDH1iChAt3sRzWg0M=", - "dev": true - }, - "lodash.ismatch": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/lodash.ismatch/-/lodash.ismatch-4.4.0.tgz", - "integrity": "sha512-fPMfXjGQEV9Xsq/8MTSgUf255gawYRbjwMyDbcvDhXgV7enSZA0hynz6vMPnpAb5iONEzBHBPsT+0zes5Z301g==" - }, - "lodash.isnumber": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", - "integrity": "sha1-POdoEMWSjQM1IwGsKHMX8RwLH/w=", - "dev": true - }, - "lodash.isplainobject": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", - "integrity": "sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs=", - "dev": true - }, - "lodash.isstring": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", - "integrity": "sha1-1SfftUVuynzJu5XV2ur4i6VKVFE=", - "dev": true - }, - "lodash.once": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", - "integrity": "sha1-DdOXEhPHxW34gJd9UEyI+0cal6w=", - "dev": true - }, - "log-symbols": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", - "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", - "requires": { - "chalk": "^4.1.0", - "is-unicode-supported": "^0.1.0" - }, - "dependencies": { - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "requires": { - "color-convert": "^2.0.1" - } - }, - "chalk": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", - "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", - "requires": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" - }, - "supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "requires": { - "has-flag": "^4.0.0" - } - } - } - }, - "log-update": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/log-update/-/log-update-4.0.0.tgz", - "integrity": "sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg==", - "dev": true, - "requires": { - "ansi-escapes": "^4.3.0", - "cli-cursor": "^3.1.0", - "slice-ansi": "^4.0.0", - "wrap-ansi": "^6.2.0" - }, - "dependencies": { - "ansi-escapes": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", - "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", - "dev": true, - "requires": { - "type-fest": "^0.21.3" - } - }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "requires": { - "color-convert": "^2.0.1" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "slice-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", - "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", - "dev": true, - "requires": { - "ansi-styles": "^4.0.0", - "astral-regex": "^2.0.0", - "is-fullwidth-code-point": "^3.0.0" - } - }, - "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.0" - } - }, - "wrap-ansi": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", - "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", - "dev": true, - "requires": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - } - } - } - }, - "longest-streak": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-2.0.4.tgz", - "integrity": "sha512-vM6rUVCVUJJt33bnmHiZEvr7wPT78ztX7rojL+LW51bHtLh6HTjx84LA5W4+oa6aKEJA7jJu5LR6vQRBpA5DVg==" - }, - "loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", - "dev": true, - "requires": { - "js-tokens": "^3.0.0 || ^4.0.0" - } - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } - }, - "map-obj": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz", - "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==" - }, - "markdown-table": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-2.0.0.tgz", - "integrity": "sha512-Ezda85ToJUBhM6WGaG6veasyym+Tbs3cMAw/ZhOPqXiYsr0jgocBV3j3nx+4lk47plLlIqjwuTm/ywVI+zjJ/A==", - "requires": { - "repeat-string": "^1.0.0" - } - }, - "mdast-util-find-and-replace": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-1.1.1.tgz", - "integrity": "sha512-9cKl33Y21lyckGzpSmEQnIDjEfeeWelN5s1kUW1LwdB0Fkuq2u+4GdqcGEygYxJE8GVqCl0741bYXHgamfWAZA==", - "requires": { - "escape-string-regexp": "^4.0.0", - "unist-util-is": "^4.0.0", - "unist-util-visit-parents": "^3.0.0" - }, - "dependencies": { - "escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==" - } - } - }, - "mdast-util-footnote": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/mdast-util-footnote/-/mdast-util-footnote-0.1.7.tgz", - "integrity": "sha512-QxNdO8qSxqbO2e3m09KwDKfWiLgqyCurdWTQ198NpbZ2hxntdc+VKS4fDJCmNWbAroUdYnSthu+XbZ8ovh8C3w==", - "requires": { - "mdast-util-to-markdown": "^0.6.0", - "micromark": "~2.11.0" - } - }, - "mdast-util-from-markdown": { - "version": "0.8.5", - "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-0.8.5.tgz", - "integrity": "sha512-2hkTXtYYnr+NubD/g6KGBS/0mFmBcifAsI0yIWRiRo0PjVs6SSOSOdtzbp6kSGnShDN6G5aWZpKQ2lWRy27mWQ==", - "requires": { - "@types/mdast": "^3.0.0", - "mdast-util-to-string": "^2.0.0", - "micromark": "~2.11.0", - "parse-entities": "^2.0.0", - "unist-util-stringify-position": "^2.0.0" - } - }, - "mdast-util-frontmatter": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/mdast-util-frontmatter/-/mdast-util-frontmatter-0.2.0.tgz", - "integrity": "sha512-FHKL4w4S5fdt1KjJCwB0178WJ0evnyyQr5kXTM3wrOVpytD0hrkvd+AOOjU9Td8onOejCkmZ+HQRT3CZ3coHHQ==", - "requires": { - "micromark-extension-frontmatter": "^0.2.0" - } - }, - "mdast-util-gfm": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-0.1.2.tgz", - "integrity": "sha512-NNkhDx/qYcuOWB7xHUGWZYVXvjPFFd6afg6/e2g+SV4r9q5XUcCbV4Wfa3DLYIiD+xAEZc6K4MGaE/m0KDcPwQ==", - "requires": { - "mdast-util-gfm-autolink-literal": "^0.1.0", - "mdast-util-gfm-strikethrough": "^0.2.0", - "mdast-util-gfm-table": "^0.1.0", - "mdast-util-gfm-task-list-item": "^0.1.0", - "mdast-util-to-markdown": "^0.6.1" - } - }, - "mdast-util-gfm-autolink-literal": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-0.1.3.tgz", - "integrity": "sha512-GjmLjWrXg1wqMIO9+ZsRik/s7PLwTaeCHVB7vRxUwLntZc8mzmTsLVr6HW1yLokcnhfURsn5zmSVdi3/xWWu1A==", - "requires": { - "ccount": "^1.0.0", - "mdast-util-find-and-replace": "^1.1.0", - "micromark": "^2.11.3" - } - }, - "mdast-util-gfm-strikethrough": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-0.2.3.tgz", - "integrity": "sha512-5OQLXpt6qdbttcDG/UxYY7Yjj3e8P7X16LzvpX8pIQPYJ/C2Z1qFGMmcw+1PZMUM3Z8wt8NRfYTvCni93mgsgA==", - "requires": { - "mdast-util-to-markdown": "^0.6.0" - } - }, - "mdast-util-gfm-table": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-0.1.6.tgz", - "integrity": "sha512-j4yDxQ66AJSBwGkbpFEp9uG/LS1tZV3P33fN1gkyRB2LoRL+RR3f76m0HPHaby6F4Z5xr9Fv1URmATlRRUIpRQ==", - "requires": { - "markdown-table": "^2.0.0", - "mdast-util-to-markdown": "~0.6.0" - } - }, - "mdast-util-gfm-task-list-item": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-0.1.6.tgz", - "integrity": "sha512-/d51FFIfPsSmCIRNp7E6pozM9z1GYPIkSy1urQ8s/o4TC22BZ7DqfHFWiqBD23bc7J3vV1Fc9O4QIHBlfuit8A==", - "requires": { - "mdast-util-to-markdown": "~0.6.0" - } - }, - "mdast-util-to-markdown": { - "version": "0.6.5", - "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-0.6.5.tgz", - "integrity": "sha512-XeV9sDE7ZlOQvs45C9UKMtfTcctcaj/pGwH8YLbMHoMOXNNCn2LsqVQOqrF1+/NU8lKDAqozme9SCXWyo9oAcQ==", - "requires": { - "@types/unist": "^2.0.0", - "longest-streak": "^2.0.0", - "mdast-util-to-string": "^2.0.0", - "parse-entities": "^2.0.0", - "repeat-string": "^1.0.0", - "zwitch": "^1.0.0" - } - }, - "mdast-util-to-string": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-2.0.0.tgz", - "integrity": "sha512-AW4DRS3QbBayY/jJmD8437V1Gombjf8RSOUCMFBuo5iHi58AGEgVCKQ+ezHkZZDpAQS75hcBMpLqjpJTjtUL7w==" - }, - "meow": { - "version": "8.1.2", - "resolved": "https://registry.npmjs.org/meow/-/meow-8.1.2.tgz", - "integrity": "sha512-r85E3NdZ+mpYk1C6RjPFEMSE+s1iZMuHtsHAqY0DT3jZczl0diWUZ8g6oU7h0M9cD2EL+PzaYghhCLzR0ZNn5Q==", - "requires": { - "@types/minimist": "^1.2.0", - "camelcase-keys": "^6.2.2", - "decamelize-keys": "^1.1.0", - "hard-rejection": "^2.1.0", - "minimist-options": "4.1.0", - "normalize-package-data": "^3.0.0", - "read-pkg-up": "^7.0.1", - "redent": "^3.0.0", - "trim-newlines": "^3.0.0", - "type-fest": "^0.18.0", - "yargs-parser": "^20.2.3" - }, - "dependencies": { - "find-up": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", - "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", - "requires": { - "locate-path": "^5.0.0", - "path-exists": "^4.0.0" - } - }, - "hosted-git-info": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", - "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", - "requires": { - "lru-cache": "^6.0.0" - } - }, - "locate-path": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", - "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", - "requires": { - "p-locate": "^4.1.0" - } - }, - "normalize-package-data": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", - "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", - "requires": { - "hosted-git-info": "^4.0.1", - "is-core-module": "^2.5.0", - "semver": "^7.3.4", - "validate-npm-package-license": "^3.0.1" - } - }, - "p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", - "requires": { - "p-try": "^2.0.0" - } - }, - "p-locate": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", - "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", - "requires": { - "p-limit": "^2.2.0" - } - }, - "p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" - }, - "parse-json": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", - "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", - "requires": { - "@babel/code-frame": "^7.0.0", - "error-ex": "^1.3.1", - "json-parse-even-better-errors": "^2.3.0", - "lines-and-columns": "^1.1.6" - } - }, - "path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==" - }, - "read-pkg": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", - "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==", - "requires": { - "@types/normalize-package-data": "^2.4.0", - "normalize-package-data": "^2.5.0", - "parse-json": "^5.0.0", - "type-fest": "^0.6.0" - }, - "dependencies": { - "hosted-git-info": { - "version": "2.8.9", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", - "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==" - }, - "normalize-package-data": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", - "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", - "requires": { - "hosted-git-info": "^2.1.4", - "resolve": "^1.10.0", - "semver": "2 || 3 || 4 || 5", - "validate-npm-package-license": "^3.0.1" - } - }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" - }, - "type-fest": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", - "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==" - } - } - }, - "read-pkg-up": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz", - "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==", - "requires": { - "find-up": "^4.1.0", - "read-pkg": "^5.2.0", - "type-fest": "^0.8.1" - }, - "dependencies": { - "type-fest": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", - "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==" - } - } - }, - "semver": { - "version": "7.3.8", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", - "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", - "requires": { - "lru-cache": "^6.0.0" - } - }, - "type-fest": { - "version": "0.18.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.18.1.tgz", - "integrity": "sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==" - } - } - }, - "merge-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "dev": true - }, - "merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true - }, - "micromark": { - "version": "2.11.4", - "resolved": "https://registry.npmjs.org/micromark/-/micromark-2.11.4.tgz", - "integrity": "sha512-+WoovN/ppKolQOFIAajxi7Lu9kInbPxFuTBVEavFcL8eAfVstoc5MocPmqBeAdBOJV00uaVjegzH4+MA0DN/uA==", - "requires": { - "debug": "^4.0.0", - "parse-entities": "^2.0.0" - }, - "dependencies": { - "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "requires": { - "ms": "2.1.2" - } - } - } - }, - "micromark-extension-footnote": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/micromark-extension-footnote/-/micromark-extension-footnote-0.3.2.tgz", - "integrity": "sha512-gr/BeIxbIWQoUm02cIfK7mdMZ/fbroRpLsck4kvFtjbzP4yi+OPVbnukTc/zy0i7spC2xYE/dbX1Sur8BEDJsQ==", - "requires": { - "micromark": "~2.11.0" - } - }, - "micromark-extension-frontmatter": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/micromark-extension-frontmatter/-/micromark-extension-frontmatter-0.2.2.tgz", - "integrity": "sha512-q6nPLFCMTLtfsctAuS0Xh4vaolxSFUWUWR6PZSrXXiRy+SANGllpcqdXFv2z07l0Xz/6Hl40hK0ffNCJPH2n1A==", - "requires": { - "fault": "^1.0.0" - } - }, - "micromark-extension-gfm": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-0.3.3.tgz", - "integrity": "sha512-oVN4zv5/tAIA+l3GbMi7lWeYpJ14oQyJ3uEim20ktYFAcfX1x3LNlFGGlmrZHt7u9YlKExmyJdDGaTt6cMSR/A==", - "requires": { - "micromark": "~2.11.0", - "micromark-extension-gfm-autolink-literal": "~0.5.0", - "micromark-extension-gfm-strikethrough": "~0.6.5", - "micromark-extension-gfm-table": "~0.4.0", - "micromark-extension-gfm-tagfilter": "~0.3.0", - "micromark-extension-gfm-task-list-item": "~0.3.0" - } - }, - "micromark-extension-gfm-autolink-literal": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-0.5.7.tgz", - "integrity": "sha512-ePiDGH0/lhcngCe8FtH4ARFoxKTUelMp4L7Gg2pujYD5CSMb9PbblnyL+AAMud/SNMyusbS2XDSiPIRcQoNFAw==", - "requires": { - "micromark": "~2.11.3" - } - }, - "micromark-extension-gfm-strikethrough": { - "version": "0.6.5", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-0.6.5.tgz", - "integrity": "sha512-PpOKlgokpQRwUesRwWEp+fHjGGkZEejj83k9gU5iXCbDG+XBA92BqnRKYJdfqfkrRcZRgGuPuXb7DaK/DmxOhw==", - "requires": { - "micromark": "~2.11.0" - } - }, - "micromark-extension-gfm-table": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-0.4.3.tgz", - "integrity": "sha512-hVGvESPq0fk6ALWtomcwmgLvH8ZSVpcPjzi0AjPclB9FsVRgMtGZkUcpE0zgjOCFAznKepF4z3hX8z6e3HODdA==", - "requires": { - "micromark": "~2.11.0" - } - }, - "micromark-extension-gfm-tagfilter": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-0.3.0.tgz", - "integrity": "sha512-9GU0xBatryXifL//FJH+tAZ6i240xQuFrSL7mYi8f4oZSbc+NvXjkrHemeYP0+L4ZUT+Ptz3b95zhUZnMtoi/Q==" - }, - "micromark-extension-gfm-task-list-item": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-0.3.3.tgz", - "integrity": "sha512-0zvM5iSLKrc/NQl84pZSjGo66aTGd57C1idmlWmE87lkMcXrTxg1uXa/nXomxJytoje9trP0NDLvw4bZ/Z/XCQ==", - "requires": { - "micromark": "~2.11.0" - } - }, - "micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", - "dev": true, - "requires": { - "braces": "^3.0.2", - "picomatch": "^2.3.1" - } - }, - "mime-db": { - "version": "1.38.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.38.0.tgz", - "integrity": "sha512-bqVioMFFzc2awcdJZIzR3HjZFX20QhilVS7hytkKrv7xFAn8bM1gzc/FOX2awLISvWe0PV8ptFKcon+wZ5qYkg==", - "dev": true - }, - "mime-types": { - "version": "2.1.22", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.22.tgz", - "integrity": "sha512-aGl6TZGnhm/li6F7yx82bJiBZwgiEa4Hf6CNr8YO+r5UHr53tSTYZb102zyU50DOWWKeOv0uQLRL0/9EiKWCog==", - "dev": true, - "requires": { - "mime-db": "~1.38.0" - } - }, - "mimic-fn": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", - "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", - "dev": true - }, - "min-indent": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", - "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==" - }, - "minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "requires": { - "brace-expansion": "^1.1.7" - } - }, - "minimist": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", - "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" - }, - "minimist-options": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz", - "integrity": "sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==", - "requires": { - "arrify": "^1.0.1", - "is-plain-obj": "^1.1.0", - "kind-of": "^6.0.3" - }, - "dependencies": { - "is-plain-obj": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", - "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==" - } - } - }, - "mkdirp": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", - "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", - "dev": true, - "requires": { - "minimist": "^1.2.5" - } - }, - "modify-values": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/modify-values/-/modify-values-1.0.1.tgz", - "integrity": "sha512-xV2bxeN6F7oYjZWTe/YPAy6MN2M+sL4u/Rlm2AHCIVGfo2p1yGmBHQ6vHehl4bRTZBdHu3TSkWdYgkwpYzAGSw==" - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "mute-stream": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", - "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", - "dev": true - }, - "natural-compare": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", - "dev": true - }, - "neo-async": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", - "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==" - }, - "nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", - "dev": true - }, - "node-fetch": { - "version": "2.6.7", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", - "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", - "requires": { - "whatwg-url": "^5.0.0" - } - }, - "nopt": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.3.tgz", - "integrity": "sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg==", - "dev": true, - "requires": { - "abbrev": "1", - "osenv": "^0.1.4" - } - }, - "normalize-package-data": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", - "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", - "requires": { - "hosted-git-info": "^2.1.4", - "resolve": "^1.10.0", - "semver": "2 || 3 || 4 || 5", - "validate-npm-package-license": "^3.0.1" - } - }, - "npm-normalize-package-bin": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz", - "integrity": "sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==", - "dev": true - }, - "npm-run-path": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", - "dev": true, - "requires": { - "path-key": "^3.0.0" - }, - "dependencies": { - "path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true - } - } - }, - "oauth-sign": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", - "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==", - "dev": true - }, - "object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", - "dev": true - }, - "object-hash": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.2.0.tgz", - "integrity": "sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw==" - }, - "object-keys": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", - "dev": true - }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "requires": { - "wrappy": "1" - } - }, - "onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "requires": { - "mimic-fn": "^2.1.0" - }, - "dependencies": { - "mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==" - } - } - }, - "optionator": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.2.tgz", - "integrity": "sha1-NkxeQJ0/TWMB1sC0wFu6UBgK62Q=", - "dev": true, - "requires": { - "deep-is": "~0.1.3", - "fast-levenshtein": "~2.0.4", - "levn": "~0.3.0", - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2", - "wordwrap": "~1.0.0" - } - }, - "ora": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/ora/-/ora-5.4.1.tgz", - "integrity": "sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==", - "requires": { - "bl": "^4.1.0", - "chalk": "^4.1.0", - "cli-cursor": "^3.1.0", - "cli-spinners": "^2.5.0", - "is-interactive": "^1.0.0", - "is-unicode-supported": "^0.1.0", - "log-symbols": "^4.1.0", - "strip-ansi": "^6.0.0", - "wcwidth": "^1.0.1" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "requires": { - "color-convert": "^2.0.1" - } - }, - "chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "requires": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - } - }, - "supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "requires": { - "has-flag": "^4.0.0" - } - } - } - }, - "ory-prettier-styles": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/ory-prettier-styles/-/ory-prettier-styles-1.3.0.tgz", - "integrity": "sha512-Vfn0G6CyLaadwcCamwe1SQCf37ZQfBDgMrhRI70dE/2fbE3Q43/xu7K5c32I5FGt/EliroWty5yBjmdkj0eWug==", - "dev": true - }, - "os-homedir": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", - "integrity": "sha512-B5JU3cabzk8c67mRRd3ECmROafjYMXbuzlwtqdM8IbS8ktlTix8aFGb2bAGKrSRIlnfKwovGUUr72JUPyOb6kQ==", - "dev": true - }, - "os-tmpdir": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", - "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=" - }, - "osenv": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz", - "integrity": "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==", - "dev": true, - "requires": { - "os-homedir": "^1.0.0", - "os-tmpdir": "^1.0.0" - } - }, - "ospath": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/ospath/-/ospath-1.2.2.tgz", - "integrity": "sha1-EnZjl3Sj+O8lcvf+QoDg6kVQwHs=", - "dev": true - }, - "p-limit": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", - "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", - "requires": { - "p-try": "^1.0.0" - } - }, - "p-locate": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", - "requires": { - "p-limit": "^1.1.0" - } - }, - "p-map": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", - "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", - "dev": true, - "requires": { - "aggregate-error": "^3.0.0" - } - }, - "p-try": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=" - }, - "parse-entities": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-2.0.0.tgz", - "integrity": "sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ==", - "requires": { - "character-entities": "^1.0.0", - "character-entities-legacy": "^1.0.0", - "character-reference-invalid": "^1.0.0", - "is-alphanumerical": "^1.0.0", - "is-decimal": "^1.0.0", - "is-hexadecimal": "^1.0.0" - } - }, - "parse-json": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", - "requires": { - "error-ex": "^1.3.1", - "json-parse-better-errors": "^1.0.1" - } - }, - "path-exists": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" - }, - "path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" - }, - "path-is-inside": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", - "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=", - "dev": true - }, - "path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", - "dev": true - }, - "path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" - }, - "path-to-regexp": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-3.2.0.tgz", - "integrity": "sha512-jczvQbCUS7XmS7o+y1aEO9OBVFeZBQ1MDSEqmO7xSoPgOPoowY/SxLpZ6Vh97/8qHZOteiCKb7gkG9gA2ZUxJA==" - }, - "path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", - "dev": true - }, - "pend": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", - "integrity": "sha1-elfrVQpng/kRUzH89GY9XI4AelA=", - "dev": true - }, - "performance-now": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", - "dev": true - }, - "picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true - }, - "pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=" - }, - "pkg-conf": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/pkg-conf/-/pkg-conf-2.1.0.tgz", - "integrity": "sha1-ISZRTKbyq/69FoWW3xi6V4Z/AFg=", - "dev": true, - "requires": { - "find-up": "^2.0.0", - "load-json-file": "^4.0.0" - } - }, - "pkg-config": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/pkg-config/-/pkg-config-1.1.1.tgz", - "integrity": "sha1-VX7yLXPaPIg3EHdmxS6tq94pj+Q=", - "dev": true, - "requires": { - "debug-log": "^1.0.0", - "find-root": "^1.0.0", - "xtend": "^4.0.1" - } - }, - "pkg-dir": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", - "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", - "dev": true, - "requires": { - "find-up": "^2.1.0" - } - }, - "pluralize": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/pluralize/-/pluralize-7.0.0.tgz", - "integrity": "sha512-ARhBOdzS3e41FbkW/XWrTEtukqqLoK5+Z/4UeDaLuSW+39JPeFgs4gCGqsrJHVZX0fUrx//4OF0K1CUGwlIFow==", - "dev": true - }, - "prelude-ls": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", - "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", - "dev": true - }, - "prettier": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.7.1.tgz", - "integrity": "sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==", - "dev": true - }, - "prettier-plugin-packagejson": { - "version": "2.2.18", - "resolved": "https://registry.npmjs.org/prettier-plugin-packagejson/-/prettier-plugin-packagejson-2.2.18.tgz", - "integrity": "sha512-iBjQ3IY6IayFrQHhXvg+YvKprPUUiIJ04Vr9+EbeQPfwGajznArIqrN33c5bi4JcIvmLHGROIMOm9aYakJj/CA==", - "dev": true, - "requires": { - "sort-package-json": "1.57.0" - } - }, - "pretty-bytes": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.6.0.tgz", - "integrity": "sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg==", - "dev": true - }, - "process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" - }, - "progress": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", - "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", - "dev": true - }, - "prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, - "proxy-from-env": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.0.0.tgz", - "integrity": "sha512-F2JHgJQ1iqwnHDcQjVBsq3n/uoaFL+iPW/eAeL7kVxy/2RrWaN4WroKjjvbsoRtv0ftelNyC01bjRhn/bhcf4A==", - "dev": true - }, - "psl": { - "version": "1.1.31", - "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.31.tgz", - "integrity": "sha512-/6pt4+C+T+wZUieKR620OpzN/LlnNKuWjy1iFLQ/UG35JqHlR/89MP1d96dUfkf6Dne3TuLQzOYEYshJ+Hx8mw==", - "dev": true - }, - "pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "dev": true, - "requires": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, - "punycode": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", - "dev": true - }, - "q": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/q/-/q-1.5.1.tgz", - "integrity": "sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw==" - }, - "qs": { - "version": "6.5.3", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", - "integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==", - "dev": true - }, - "quick-lru": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", - "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==" - }, - "react-is": { - "version": "16.8.6", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.8.6.tgz", - "integrity": "sha512-aUk3bHfZ2bRSVFFbbeVS4i+lNPZr3/WM5jT2J5omUVV1zzcs1nAaf3l51ctA5FFvCRbhrH0bdAsRRQddFJZPtA==", - "dev": true - }, - "read-installed": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/read-installed/-/read-installed-4.0.3.tgz", - "integrity": "sha512-O03wg/IYuV/VtnK2h/KXEt9VIbMUFbk3ERG0Iu4FhLZw0EP0T9znqrYDGn6ncbEsXUFaUjiVAWXHzxwt3lhRPQ==", - "dev": true, - "requires": { - "debuglog": "^1.0.1", - "graceful-fs": "^4.1.2", - "read-package-json": "^2.0.0", - "readdir-scoped-modules": "^1.0.0", - "semver": "2 || 3 || 4 || 5", - "slide": "~1.1.3", - "util-extend": "^1.0.1" - } - }, - "read-package-json": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-2.1.2.tgz", - "integrity": "sha512-D1KmuLQr6ZSJS0tW8hf3WGpRlwszJOXZ3E8Yd/DNRaM5d+1wVRZdHlpGBLAuovjr28LbWvjpWkBHMxpRGGjzNA==", - "dev": true, - "requires": { - "glob": "^7.1.1", - "json-parse-even-better-errors": "^2.3.0", - "normalize-package-data": "^2.0.0", - "npm-normalize-package-bin": "^1.0.0" - } - }, - "read-pkg": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", - "integrity": "sha512-BLq/cCO9two+lBgiTYNqD6GdtK8s4NpaWrl6/rCO9w0TUS8oJl7cmToOZfRYllKTISY6nt1U7jQ53brmKqY6BA==", - "requires": { - "load-json-file": "^4.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^3.0.0" - }, - "dependencies": { - "path-type": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", - "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", - "requires": { - "pify": "^3.0.0" - } - }, - "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==" - } - } - }, - "read-pkg-up": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", - "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=", - "dev": true, - "requires": { - "find-up": "^2.0.0", - "read-pkg": "^2.0.0" - }, - "dependencies": { - "load-json-file": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", - "integrity": "sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=", - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "parse-json": "^2.2.0", - "pify": "^2.0.0", - "strip-bom": "^3.0.0" - } - }, - "parse-json": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", - "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", - "dev": true, - "requires": { - "error-ex": "^1.2.0" - } - }, - "path-type": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", - "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", - "dev": true, - "requires": { - "pify": "^2.0.0" - } - }, - "read-pkg": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", - "integrity": "sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=", - "dev": true, - "requires": { - "load-json-file": "^2.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^2.0.0" - } - } - } - }, - "readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - }, - "readdir-scoped-modules": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/readdir-scoped-modules/-/readdir-scoped-modules-1.1.0.tgz", - "integrity": "sha512-asaikDeqAQg7JifRsZn1NJZXo9E+VwlyCfbkZhwyISinqk5zNS6266HS5kah6P0SaQKGF6SkNnZVHUzHFYxYDw==", - "dev": true, - "requires": { - "debuglog": "^1.0.1", - "dezalgo": "^1.0.0", - "graceful-fs": "^4.1.2", - "once": "^1.3.0" - } - }, - "redent": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", - "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", - "requires": { - "indent-string": "^4.0.0", - "strip-indent": "^3.0.0" - } - }, - "reflect-metadata": { - "version": "0.1.13", - "resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.1.13.tgz", - "integrity": "sha512-Ts1Y/anZELhSsjMcU605fU9RE4Oi3p5ORujwbIKXfWa+0Zxs510Qrmrce5/Jowq3cHSZSJqBjypxmHarc+vEWg==" - }, - "regexpp": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", - "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==", - "dev": true - }, - "remark-footnotes": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/remark-footnotes/-/remark-footnotes-3.0.0.tgz", - "integrity": "sha512-ZssAvH9FjGYlJ/PBVKdSmfyPc3Cz4rTWgZLI4iE/SX8Nt5l3o3oEjv3wwG5VD7xOjktzdwp5coac+kJV9l4jgg==", - "requires": { - "mdast-util-footnote": "^0.1.0", - "micromark-extension-footnote": "^0.3.0" - } - }, - "remark-frontmatter": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/remark-frontmatter/-/remark-frontmatter-3.0.0.tgz", - "integrity": "sha512-mSuDd3svCHs+2PyO29h7iijIZx4plX0fheacJcAoYAASfgzgVIcXGYSq9GFyYocFLftQs8IOmmkgtOovs6d4oA==", - "requires": { - "mdast-util-frontmatter": "^0.2.0", - "micromark-extension-frontmatter": "^0.2.0" - } - }, - "remark-gfm": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-1.0.0.tgz", - "integrity": "sha512-KfexHJCiqvrdBZVbQ6RopMZGwaXz6wFJEfByIuEwGf0arvITHjiKKZ1dpXujjH9KZdm1//XJQwgfnJ3lmXaDPA==", - "requires": { - "mdast-util-gfm": "^0.1.0", - "micromark-extension-gfm": "^0.3.0" - } - }, - "remark-parse": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-9.0.0.tgz", - "integrity": "sha512-geKatMwSzEXKHuzBNU1z676sGcDcFoChMK38TgdHJNAYfFtsfHDQG7MoJAjs6sgYMqyLduCYWDIWZIxiPeafEw==", - "requires": { - "mdast-util-from-markdown": "^0.8.0" - } - }, - "repeat-string": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", - "integrity": "sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==" - }, - "request": { - "version": "2.88.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", - "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", - "dev": true, - "requires": { - "aws-sign2": "~0.7.0", - "aws4": "^1.8.0", - "caseless": "~0.12.0", - "combined-stream": "~1.0.6", - "extend": "~3.0.2", - "forever-agent": "~0.6.1", - "form-data": "~2.3.2", - "har-validator": "~5.1.0", - "http-signature": "~1.2.0", - "is-typedarray": "~1.0.0", - "isstream": "~0.1.2", - "json-stringify-safe": "~5.0.1", - "mime-types": "~2.1.19", - "oauth-sign": "~0.9.0", - "performance-now": "^2.1.0", - "qs": "~6.5.2", - "safe-buffer": "^5.1.2", - "tough-cookie": "~2.4.3", - "tunnel-agent": "^0.6.0", - "uuid": "^3.3.2" - }, - "dependencies": { - "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", - "dev": true - } - } - }, - "request-progress": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/request-progress/-/request-progress-3.0.0.tgz", - "integrity": "sha1-TKdUCBx/7GP1BeT6qCWqBs1mnb4=", - "dev": true, - "requires": { - "throttleit": "^1.0.0" - } - }, - "require-directory": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=" - }, - "require-uncached": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/require-uncached/-/require-uncached-1.0.3.tgz", - "integrity": "sha1-Tg1W1slmL9MeQwEcS5WqSZVUIdM=", - "dev": true, - "requires": { - "caller-path": "^0.1.0", - "resolve-from": "^1.0.0" - } - }, - "resolve": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.10.0.tgz", - "integrity": "sha512-3sUr9aq5OfSg2S9pNtPA9hL1FVEAjvfOC4leW0SNf/mpnaakz2a9femSd6LqAww2RaFctwyf1lCqnTHuF1rxDg==", - "requires": { - "path-parse": "^1.0.6" - } - }, - "resolve-from": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-1.0.1.tgz", - "integrity": "sha1-Jsv+k10a7uq7Kbw/5a6wHpPUQiY=", - "dev": true - }, - "restore-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", - "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", - "requires": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" - } - }, - "reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", - "dev": true - }, - "rimraf": { - "version": "2.6.3", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", - "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", - "dev": true, - "requires": { - "glob": "^7.1.3" - } - }, - "run-async": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz", - "integrity": "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==" - }, - "run-parallel": { - "version": "1.1.9", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.1.9.tgz", - "integrity": "sha512-DEqnSRTDw/Tc3FXf49zedI638Z9onwUotBMiUFKmrO2sdFKIbXamXGQ3Axd4qgphxKB4kw/qP1w5kTxnfU1B9Q==", - "dev": true - }, - "rx": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/rx/-/rx-4.1.0.tgz", - "integrity": "sha1-pfE/957zt0D+MKqAP7CfmIBdR4I=", - "dev": true - }, - "rxjs": { - "version": "5.5.12", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-5.5.12.tgz", - "integrity": "sha512-xx2itnL5sBbqeeiVgNPVuQQ1nC8Jp2WfNJhXWHmElW9YmrpS9UVnNzhP3EH3HFqexO5Tlp8GhYY+WEcqcVMvGw==", - "dev": true, - "requires": { - "symbol-observable": "1.0.1" - } - }, - "safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" - }, - "safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" - }, - "semver": { - "version": "5.7.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz", - "integrity": "sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA==" - }, - "shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", - "dev": true, - "requires": { - "shebang-regex": "^1.0.0" - } - }, - "shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", - "dev": true - }, - "signal-exit": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", - "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=" - }, - "slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "dev": true - }, - "slice-ansi": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-3.0.0.tgz", - "integrity": "sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==", - "dev": true, - "requires": { - "ansi-styles": "^4.0.0", - "astral-regex": "^2.0.0", - "is-fullwidth-code-point": "^3.0.0" - }, - "dependencies": { - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "requires": { - "color-convert": "^2.0.1" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - } - } - }, - "slide": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/slide/-/slide-1.1.6.tgz", - "integrity": "sha512-NwrtjCg+lZoqhFU8fOwl4ay2ei8PaqCBOUV3/ektPY9trO1yQ1oXEfmHAhKArUVUr/hOHvy5f6AdP17dCM0zMw==", - "dev": true - }, - "sort-object-keys": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/sort-object-keys/-/sort-object-keys-1.1.3.tgz", - "integrity": "sha512-855pvK+VkU7PaKYPc+Jjnmt4EzejQHyhhF33q31qG8x7maDzkeFhAAThdCYay11CISO+qAMwjOBP+fPZe0IPyg==", - "dev": true - }, - "sort-package-json": { - "version": "1.57.0", - "resolved": "https://registry.npmjs.org/sort-package-json/-/sort-package-json-1.57.0.tgz", - "integrity": "sha512-FYsjYn2dHTRb41wqnv+uEqCUvBpK3jZcTp9rbz2qDTmel7Pmdtf+i2rLaaPMRZeSVM60V3Se31GyWFpmKs4Q5Q==", - "dev": true, - "requires": { - "detect-indent": "^6.0.0", - "detect-newline": "3.1.0", - "git-hooks-list": "1.0.3", - "globby": "10.0.0", - "is-plain-obj": "2.1.0", - "sort-object-keys": "^1.1.3" - } - }, - "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" - }, - "spawn-command": { - "version": "0.0.2-1", - "resolved": "https://registry.npmjs.org/spawn-command/-/spawn-command-0.0.2-1.tgz", - "integrity": "sha1-YvXpRmmBwbeW3Fkpk34RycaSG9A=" - }, - "spdx-compare": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/spdx-compare/-/spdx-compare-1.0.0.tgz", - "integrity": "sha512-C1mDZOX0hnu0ep9dfmuoi03+eOdDoz2yvK79RxbcrVEG1NO1Ph35yW102DHWKN4pk80nwCgeMmSY5L25VE4D9A==", - "dev": true, - "requires": { - "array-find-index": "^1.0.2", - "spdx-expression-parse": "^3.0.0", - "spdx-ranges": "^2.0.0" - } - }, - "spdx-correct": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", - "integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==", - "requires": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "spdx-exceptions": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz", - "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==" - }, - "spdx-expression-parse": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz", - "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==", - "requires": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "spdx-license-ids": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.4.tgz", - "integrity": "sha512-7j8LYJLeY/Yb6ACbQ7F76qy5jHkp0U6jgBfJsk97bwWlVUnUWsAgpyaCvo17h0/RQGnQ036tVDomiwoI4pDkQA==" - }, - "spdx-ranges": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/spdx-ranges/-/spdx-ranges-2.1.1.tgz", - "integrity": "sha512-mcdpQFV7UDAgLpXEE/jOMqvK4LBoO0uTQg0uvXUewmEFhpiZx5yJSZITHB8w1ZahKdhfZqP5GPEOKLyEq5p8XA==", - "dev": true - }, - "spdx-satisfies": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/spdx-satisfies/-/spdx-satisfies-4.0.1.tgz", - "integrity": "sha512-WVzZ/cXAzoNmjCWiEluEA3BjHp5tiUmmhn9MK+X0tBbR9sOqtC6UQwmgCNrAIZvNlMuBUYAaHYfb2oqlF9SwKA==", - "dev": true, - "requires": { - "spdx-compare": "^1.0.0", - "spdx-expression-parse": "^3.0.0", - "spdx-ranges": "^2.0.0" - } - }, - "split": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", - "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", - "requires": { - "through": "2" - } - }, - "split2": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/split2/-/split2-3.2.2.tgz", - "integrity": "sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==", - "requires": { - "readable-stream": "^3.0.0" - } - }, - "sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", - "dev": true - }, - "sshpk": { - "version": "1.16.1", - "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", - "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", - "dev": true, - "requires": { - "asn1": "~0.2.3", - "assert-plus": "^1.0.0", - "bcrypt-pbkdf": "^1.0.0", - "dashdash": "^1.12.0", - "ecc-jsbn": "~0.1.1", - "getpass": "^0.1.1", - "jsbn": "~0.1.0", - "safer-buffer": "^2.0.2", - "tweetnacl": "~0.14.0" - } - }, - "standard": { - "version": "12.0.1", - "resolved": "https://registry.npmjs.org/standard/-/standard-12.0.1.tgz", - "integrity": "sha512-UqdHjh87OG2gUrNCSM4QRLF5n9h3TFPwrCNyVlkqu31Hej0L/rc8hzKqVvkb2W3x0WMq7PzZdkLfEcBhVOR6lg==", - "dev": true, - "requires": { - "eslint": "~5.4.0", - "eslint-config-standard": "12.0.0", - "eslint-config-standard-jsx": "6.0.2", - "eslint-plugin-import": "~2.14.0", - "eslint-plugin-node": "~7.0.1", - "eslint-plugin-promise": "~4.0.0", - "eslint-plugin-react": "~7.11.1", - "eslint-plugin-standard": "~4.0.0", - "standard-engine": "~9.0.0" - } - }, - "standard-engine": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/standard-engine/-/standard-engine-9.0.0.tgz", - "integrity": "sha512-ZfNfCWZ2Xq67VNvKMPiVMKHnMdvxYzvZkf1AH8/cw2NLDBm5LRsxMqvEJpsjLI/dUosZ3Z1d6JlHDp5rAvvk2w==", - "dev": true, - "requires": { - "deglob": "^2.1.0", - "get-stdin": "^6.0.0", - "minimist": "^1.1.0", - "pkg-conf": "^2.0.0" - } - }, - "string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "requires": { - "safe-buffer": "~5.2.0" - } - }, - "string-width": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", - "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, - "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", - "requires": { - "ansi-regex": "^5.0.0" - } - } - } - }, - "strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", - "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=" - }, - "strip-final-newline": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", - "dev": true - }, - "strip-indent": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", - "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", - "requires": { - "min-indent": "^1.0.0" - } - }, - "strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", - "dev": true - }, - "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "requires": { - "has-flag": "^3.0.0" - } - }, - "symbol-observable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.0.1.tgz", - "integrity": "sha1-g0D8RwLDEi310iKI+IKD9RPT/dQ=", - "dev": true - }, - "table": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/table/-/table-4.0.3.tgz", - "integrity": "sha512-S7rnFITmBH1EnyKcvxBh1LjYeQMmnZtCXSEbHcH6S0NoKit24ZuFO/T1vDcLdYsLQkM188PVVhQmzKIuThNkKg==", - "dev": true, - "requires": { - "ajv": "^6.0.1", - "ajv-keywords": "^3.0.0", - "chalk": "^2.1.0", - "lodash": "^4.17.4", - "slice-ansi": "1.0.0", - "string-width": "^2.1.1" - }, - "dependencies": { - "ansi-regex": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.1.tgz", - "integrity": "sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw==", - "dev": true - }, - "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", - "dev": true - }, - "slice-ansi": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-1.0.0.tgz", - "integrity": "sha512-POqxBK6Lb3q6s047D/XsDVNPnF9Dl8JSaqe9h9lURl0OdNqy/ujDrOiIHtsqXMGbWWTIomRzAMaTyawAU//Reg==", - "dev": true, - "requires": { - "is-fullwidth-code-point": "^2.0.0" - } - }, - "string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", - "dev": true, - "requires": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" - } - }, - "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", - "dev": true, - "requires": { - "ansi-regex": "^3.0.0" - } - } - } - }, - "temp-dir": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/temp-dir/-/temp-dir-2.0.0.tgz", - "integrity": "sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg==" - }, - "tempfile": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/tempfile/-/tempfile-3.0.0.tgz", - "integrity": "sha512-uNFCg478XovRi85iD42egu+eSFUmmka750Jy7L5tfHI5hQKKtbPnxaSaXAbBqCDYrw3wx4tXjKwci4/QmsZJxw==", - "requires": { - "temp-dir": "^2.0.0", - "uuid": "^3.3.2" - }, - "dependencies": { - "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" - } - } - }, - "text-extensions": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/text-extensions/-/text-extensions-1.9.0.tgz", - "integrity": "sha512-wiBrwC1EhBelW12Zy26JeOUkQ5mRu+5o8rpsJk5+2t+Y5vE7e842qtZDQ2g1NpX/29HdyFeJ4nSIhI47ENSxlQ==" - }, - "text-table": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", - "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", - "dev": true - }, - "throttleit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-1.0.0.tgz", - "integrity": "sha1-nnhYNtr0Z0MUWlmEtiaNgoUorGw=", - "dev": true - }, - "through": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" - }, - "through2": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/through2/-/through2-4.0.2.tgz", - "integrity": "sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==", - "requires": { - "readable-stream": "3" - } - }, - "tmp": { - "version": "0.0.33", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", - "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", - "requires": { - "os-tmpdir": "~1.0.2" - } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "requires": { - "is-number": "^7.0.0" - } - }, - "topo": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/topo/-/topo-3.0.3.tgz", - "integrity": "sha512-IgpPtvD4kjrJ7CRA3ov2FhWQADwv+Tdqbsf1ZnPUSAtCJ9e1Z44MmoSGDXGk4IppoZA7jd/QRkNddlLJWlUZsQ==", - "dev": true, - "requires": { - "hoek": "6.x.x" - }, - "dependencies": { - "hoek": { - "version": "6.1.3", - "resolved": "https://registry.npmjs.org/hoek/-/hoek-6.1.3.tgz", - "integrity": "sha512-YXXAAhmF9zpQbC7LEcREFtXfGq5K1fmd+4PHkBq8NUqmzW3G+Dq10bI/i0KucLRwss3YYFQ0fSfoxBZYiGUqtQ==", - "dev": true - } - } - }, - "tough-cookie": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", - "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", - "dev": true, - "requires": { - "psl": "^1.1.24", - "punycode": "^1.4.1" - }, - "dependencies": { - "punycode": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=", - "dev": true - } - } - }, - "tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=" - }, - "traverse": { - "version": "0.6.7", - "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.7.tgz", - "integrity": "sha512-/y956gpUo9ZNCb99YjxG7OaslxZWHfCHAUUfshwqOXmxUIvqLjVO581BT+gM59+QV9tFe6/CGG53tsA1Y7RSdg==" - }, - "tree-kill": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/tree-kill/-/tree-kill-1.2.2.tgz", - "integrity": "sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==" - }, - "treeify": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/treeify/-/treeify-1.1.0.tgz", - "integrity": "sha512-1m4RA7xVAJrSGrrXGs0L3YTwyvBs2S8PbRHaLZAkFw7JR8oIFwYtysxlBZhYIa7xSyiYJKZ3iGrrk55cGA3i9A==", - "dev": true - }, - "trim-newlines": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz", - "integrity": "sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==" - }, - "trough": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/trough/-/trough-1.0.5.tgz", - "integrity": "sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA==" - }, - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - }, - "tunnel-agent": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", - "dev": true, - "requires": { - "safe-buffer": "^5.0.1" - } - }, - "tweetnacl": { - "version": "0.14.5", - "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", - "dev": true - }, - "type-check": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", - "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", - "dev": true, - "requires": { - "prelude-ls": "~1.1.2" - } - }, - "type-fest": { - "version": "0.21.3", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", - "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==" - }, - "uglify-js": { - "version": "3.17.4", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.17.4.tgz", - "integrity": "sha512-T9q82TJI9e/C1TAxYvfb16xO120tMVFZrGA3f9/P4424DNu6ypK103y0GPFVa17yotwSyZW5iYXgjYHkGrJW/g==", - "optional": true - }, - "underscore": { - "version": "1.13.6", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.6.tgz", - "integrity": "sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==" - }, - "unified": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/unified/-/unified-9.2.2.tgz", - "integrity": "sha512-Sg7j110mtefBD+qunSLO1lqOEKdrwBFBrR6Qd8f4uwkhWNlbkaqwHse6e7QvD3AP/MNoJdEDLaf8OxYyoWgorQ==", - "requires": { - "bail": "^1.0.0", - "extend": "^3.0.0", - "is-buffer": "^2.0.0", - "is-plain-obj": "^2.0.0", - "trough": "^1.0.0", - "vfile": "^4.0.0" - } - }, - "uniq": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/uniq/-/uniq-1.0.1.tgz", - "integrity": "sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8=", - "dev": true - }, - "unist-util-is": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz", - "integrity": "sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==" - }, - "unist-util-stringify-position": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-2.0.3.tgz", - "integrity": "sha512-3faScn5I+hy9VleOq/qNbAd6pAx7iH5jYBMS9I1HgQVijz/4mv5Bvw5iw1sC/90CODiKo81G/ps8AJrISn687g==", - "requires": { - "@types/unist": "^2.0.2" - } - }, - "unist-util-visit-parents": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz", - "integrity": "sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg==", - "requires": { - "@types/unist": "^2.0.0", - "unist-util-is": "^4.0.0" - } - }, - "universalify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", - "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==" - }, - "untildify": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", - "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", - "dev": true - }, - "update-section": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/update-section/-/update-section-0.3.3.tgz", - "integrity": "sha512-BpRZMZpgXLuTiKeiu7kK0nIPwGdyrqrs6EDSaXtjD/aQ2T+qVo9a5hRC3HN3iJjCMxNT/VxoLGQ7E/OzE5ucnw==" - }, - "uri-js": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", - "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==", - "dev": true, - "requires": { - "punycode": "^2.1.0" - } - }, - "util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" - }, - "util-extend": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/util-extend/-/util-extend-1.0.3.tgz", - "integrity": "sha512-mLs5zAK+ctllYBj+iAQvlDCwoxU/WDOUaJkcFudeiAX6OajC6BKXJUa9a+tbtkC11dz2Ufb7h0lyvIOVn4LADA==", - "dev": true - }, - "uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" - }, - "validate-npm-package-license": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", - "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", - "requires": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" - } - }, - "verror": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", - "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", - "dev": true, - "requires": { - "assert-plus": "^1.0.0", - "core-util-is": "1.0.2", - "extsprintf": "^1.2.0" - } - }, - "vfile": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/vfile/-/vfile-4.2.1.tgz", - "integrity": "sha512-O6AE4OskCG5S1emQ/4gl8zK586RqA3srz3nfK/Viy0UPToBc5Trp9BVFb1u0CjsKrAWwnpr4ifM/KBXPWwJbCA==", - "requires": { - "@types/unist": "^2.0.0", - "is-buffer": "^2.0.0", - "unist-util-stringify-position": "^2.0.0", - "vfile-message": "^2.0.0" - } - }, - "vfile-message": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-2.0.4.tgz", - "integrity": "sha512-DjssxRGkMvifUOJre00juHoP9DPWuzjxKuMDrhNbk2TdaYYBNMStsNhEOt3idrtI12VQYM/1+iM0KOzXi4pxwQ==", - "requires": { - "@types/unist": "^2.0.0", - "unist-util-stringify-position": "^2.0.0" - } - }, - "wait-on": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/wait-on/-/wait-on-3.2.0.tgz", - "integrity": "sha512-QUGNKlKLDyY6W/qHdxaRlXUAgLPe+3mLL/tRByHpRNcHs/c7dZXbu+OnJWGNux6tU1WFh/Z8aEwvbuzSAu79Zg==", - "dev": true, - "requires": { - "core-js": "^2.5.7", - "joi": "^13.0.0", - "minimist": "^1.2.0", - "request": "^2.88.0", - "rx": "^4.1.0" - } - }, - "wcwidth": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/wcwidth/-/wcwidth-1.0.1.tgz", - "integrity": "sha1-8LDc+RW8X/FSivrbLA4XtTLaL+g=", - "requires": { - "defaults": "^1.0.3" - } - }, - "webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=" - }, - "whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", - "requires": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - }, - "wordwrap": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", - "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=" - }, - "wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "requires": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "requires": { - "color-convert": "^2.0.1" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, - "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", - "requires": { - "ansi-regex": "^5.0.0" - } - } - } - }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" - }, - "write": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/write/-/write-0.2.1.tgz", - "integrity": "sha1-X8A4KOJkzqP+kUVUdvejxWbLB1c=", - "dev": true, - "requires": { - "mkdirp": "^0.5.1" - } - }, - "xtend": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", - "integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68=" - }, - "y18n": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", - "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==" - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, - "yargs": { - "version": "16.2.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", - "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", - "requires": { - "cliui": "^7.0.2", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.0", - "y18n": "^5.0.5", - "yargs-parser": "^20.2.2" - } - }, - "yargs-parser": { - "version": "20.2.9", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", - "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==" - }, - "yauzl": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", - "integrity": "sha1-x+sXyT4RLLEIb6bY5R+wZnt5pfk=", - "dev": true, - "requires": { - "buffer-crc32": "~0.2.3", - "fd-slicer": "~1.1.0" - } - }, - "zwitch": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-1.0.5.tgz", - "integrity": "sha512-V50KMwwzqJV0NpZIZFwfOD5/lyny3WlSzRiXgA0G7VUnRlqttta1L6UQIHzd6EuBY/cHGfwTIck7w1yH6Q5zUw==" } } } diff --git a/package.json b/package.json index 7a6a8e55250..06ff1afa531 100644 --- a/package.json +++ b/package.json @@ -11,20 +11,23 @@ }, "prettier": "ory-prettier-styles", "dependencies": { - "@openapitools/openapi-generator-cli": "^2.1.18", - "conventional-changelog-cli": "~2.2.2", - "doctoc": "^2.2.1" + "@openapitools/openapi-generator-cli": "2.25.2" }, "devDependencies": { - "cypress": "^9.7.0", - "dayjs": "^1.10.6", - "jsonwebtoken": "^8.5.1", - "license-checker": "^25.0.1", + "cypress": "9.7.0", + "dayjs": "1.11.18", + "doctoc": "^2.2.1", + "jsonwebtoken": "8.5.1", + "license-checker": "25.0.1", "ory-prettier-styles": "1.3.0", - "prettier": "2.7.1", - "prettier-plugin-packagejson": "^2.2.18", - "standard": "^12.0.1", - "uuid": "^8.3.2", - "wait-on": "^3.2.0" + "prettier": "3.6.2", + "prettier-plugin-packagejson": "2.5.19", + "standard": "17.1.2", + "uuid": "8.3.2", + "wait-on": "9.0.1" + }, + "overrides": { + "axios": ">=1.12.0", + "glob": ">=11.1.0" } } diff --git a/persistence/definitions.go b/persistence/definitions.go index 88e6c444662..5cdf5c4ee49 100644 --- a/persistence/definitions.go +++ b/persistence/definitions.go @@ -6,30 +6,28 @@ package persistence import ( "context" - "github.com/gobuffalo/pop/v6" - - "github.com/ory/hydra/client" - "github.com/ory/hydra/consent" - "github.com/ory/hydra/jwk" - "github.com/ory/hydra/oauth2/trust" - "github.com/ory/hydra/x" - "github.com/ory/x/popx" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/consent" + "github.com/ory/hydra/v2/oauth2/trust" + "github.com/ory/hydra/v2/x" + "github.com/ory/pop/v6" + "github.com/ory/x/networkx" ) type ( Persister interface { - consent.Manager + consent.ObfuscatedSubjectManager + consent.LoginManager + consent.LogoutManager client.Manager x.FositeStorer - jwk.Manager trust.GrantManager - MigrationStatus(ctx context.Context) (popx.MigrationStatuses, error) - MigrateDown(context.Context, int) error - MigrateUp(context.Context) error - PrepareMigration(context.Context) error Connection(context.Context) *pop.Connection - Ping() error + Transaction(context.Context, func(ctx context.Context, c *pop.Connection) error) error + Ping(context.Context) error + DetermineNetwork(ctx context.Context) (*networkx.Network, error) + x.Networker } Provider interface { Persister() Persister diff --git a/persistence/sql/migratest/assertion_helpers.go b/persistence/sql/migratest/assertion_helpers.go deleted file mode 100644 index faa32ff9b9f..00000000000 --- a/persistence/sql/migratest/assertion_helpers.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package migratest - -import ( - "testing" - "time" - - "github.com/gofrs/uuid" - "github.com/instana/testify/require" - - "github.com/ory/hydra/flow" - testhelpersuuid "github.com/ory/hydra/internal/testhelpers/uuid" - "github.com/ory/x/sqlxx" -) - -func fixturizeFlow(t *testing.T, f *flow.Flow) { - testhelpersuuid.AssertUUID(t, &f.NID) - f.NID = uuid.Nil - require.NotZero(t, f.ClientID) - f.ClientID = "" - require.NotNil(t, f.Client) - f.Client = nil - recently := time.Now().Add(-time.Minute) - require.Greater(t, time.Time(f.LoginInitializedAt).UnixNano(), recently.UnixNano()) - f.LoginInitializedAt = sqlxx.NullTime{} - require.True(t, f.RequestedAt.After(recently)) - f.RequestedAt = time.Time{} - require.True(t, time.Time(f.LoginAuthenticatedAt).After(recently)) - f.LoginAuthenticatedAt = sqlxx.NullTime{} - f.ConsentHandledAt = sqlxx.NullTime{} -} diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-0001.json b/persistence/sql/migratest/fixtures/hydra_client/client-0001.json index 52c544377e7..ab2abe3cb72 100644 --- a/persistence/sql/migratest/fixtures/hydra_client/client-0001.json +++ b/persistence/sql/migratest/fixtures/hydra_client/client-0001.json @@ -1,4 +1,5 @@ { + "AccessTokenStrategy": "", "AllowedCORSOrigins": [], "Audience": [], "BackChannelLogoutSessionRequired": false, @@ -13,12 +14,11 @@ "GrantTypes": [ "grant-0001_1" ], - "ID": "00000000-0000-0000-0000-000000000000", + "ID": "client-0001", "JSONWebKeys": { "JSONWebKeySet": null }, "JSONWebKeysURI": "", - "LegacyClientID": "client-0001", "Lifespans": { "AuthorizationCodeGrantAccessTokenLifespan": { "Duration": 0, @@ -36,6 +36,18 @@ "Duration": 0, "Valid": false }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, "ImplicitGrantAccessTokenLifespan": { "Duration": 0, "Valid": false @@ -71,10 +83,9 @@ }, "LogoURI": "http://logo/0001", "Metadata": {}, - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Name": "Client 0001", "Owner": "owner-0001", - "PKDeprecated": 1, "PolicyURI": "http://policy/0001", "PostLogoutRedirectURIs": [], "RedirectURIs": [ @@ -92,6 +103,11 @@ "Secret": "secret-0001", "SecretExpiresAt": 0, "SectorIdentifierURI": "", + "SkipConsent": false, + "SkipLogoutConsent": { + "Bool": false, + "Valid": false + }, "SubjectType": "", "TermsOfServiceURI": "http://tos/0001", "TokenEndpointAuthMethod": "none", diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-0002.json b/persistence/sql/migratest/fixtures/hydra_client/client-0002.json index de81d74bda6..cabd794188b 100644 --- a/persistence/sql/migratest/fixtures/hydra_client/client-0002.json +++ b/persistence/sql/migratest/fixtures/hydra_client/client-0002.json @@ -1,4 +1,5 @@ { + "AccessTokenStrategy": "", "AllowedCORSOrigins": [], "Audience": [], "BackChannelLogoutSessionRequired": false, @@ -13,12 +14,11 @@ "GrantTypes": [ "grant-0002_1" ], - "ID": "00000000-0000-0000-0000-000000000000", + "ID": "client-0002", "JSONWebKeys": { "JSONWebKeySet": null }, "JSONWebKeysURI": "", - "LegacyClientID": "client-0002", "Lifespans": { "AuthorizationCodeGrantAccessTokenLifespan": { "Duration": 0, @@ -36,6 +36,18 @@ "Duration": 0, "Valid": false }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, "ImplicitGrantAccessTokenLifespan": { "Duration": 0, "Valid": false @@ -71,10 +83,9 @@ }, "LogoURI": "http://logo/0002", "Metadata": {}, - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Name": "Client 0002", "Owner": "owner-0002", - "PKDeprecated": 2, "PolicyURI": "http://policy/0002", "PostLogoutRedirectURIs": [], "RedirectURIs": [ @@ -92,6 +103,11 @@ "Secret": "secret-0002", "SecretExpiresAt": 0, "SectorIdentifierURI": "", + "SkipConsent": false, + "SkipLogoutConsent": { + "Bool": false, + "Valid": false + }, "SubjectType": "", "TermsOfServiceURI": "http://tos/0002", "TokenEndpointAuthMethod": "none", diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-0003.json b/persistence/sql/migratest/fixtures/hydra_client/client-0003.json index cd9a90bdaa9..2b2abbddb1d 100644 --- a/persistence/sql/migratest/fixtures/hydra_client/client-0003.json +++ b/persistence/sql/migratest/fixtures/hydra_client/client-0003.json @@ -1,4 +1,5 @@ { + "AccessTokenStrategy": "", "AllowedCORSOrigins": [], "Audience": [], "BackChannelLogoutSessionRequired": false, @@ -13,12 +14,11 @@ "GrantTypes": [ "grant-0003_1" ], - "ID": "00000000-0000-0000-0000-000000000000", + "ID": "client-0003", "JSONWebKeys": { "JSONWebKeySet": null }, "JSONWebKeysURI": "", - "LegacyClientID": "client-0003", "Lifespans": { "AuthorizationCodeGrantAccessTokenLifespan": { "Duration": 0, @@ -36,6 +36,18 @@ "Duration": 0, "Valid": false }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, "ImplicitGrantAccessTokenLifespan": { "Duration": 0, "Valid": false @@ -71,10 +83,9 @@ }, "LogoURI": "http://logo/0003", "Metadata": {}, - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Name": "Client 0003", "Owner": "owner-0003", - "PKDeprecated": 3, "PolicyURI": "http://policy/0003", "PostLogoutRedirectURIs": [], "RedirectURIs": [ @@ -92,6 +103,11 @@ "Secret": "secret-0003", "SecretExpiresAt": 0, "SectorIdentifierURI": "", + "SkipConsent": false, + "SkipLogoutConsent": { + "Bool": false, + "Valid": false + }, "SubjectType": "", "TermsOfServiceURI": "http://tos/0003", "TokenEndpointAuthMethod": "none", diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-0004.json b/persistence/sql/migratest/fixtures/hydra_client/client-0004.json index 158ad3f866c..6e8168cd547 100644 --- a/persistence/sql/migratest/fixtures/hydra_client/client-0004.json +++ b/persistence/sql/migratest/fixtures/hydra_client/client-0004.json @@ -1,4 +1,5 @@ { + "AccessTokenStrategy": "", "AllowedCORSOrigins": [], "Audience": [], "BackChannelLogoutSessionRequired": false, @@ -13,12 +14,11 @@ "GrantTypes": [ "grant-0004_1" ], - "ID": "00000000-0000-0000-0000-000000000000", + "ID": "client-0004", "JSONWebKeys": { "JSONWebKeySet": null }, "JSONWebKeysURI": "http://jwks/0004", - "LegacyClientID": "client-0004", "Lifespans": { "AuthorizationCodeGrantAccessTokenLifespan": { "Duration": 0, @@ -36,6 +36,18 @@ "Duration": 0, "Valid": false }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, "ImplicitGrantAccessTokenLifespan": { "Duration": 0, "Valid": false @@ -71,10 +83,9 @@ }, "LogoURI": "http://logo/0004", "Metadata": {}, - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Name": "Client 0004", "Owner": "owner-0004", - "PKDeprecated": 4, "PolicyURI": "http://policy/0004", "PostLogoutRedirectURIs": [], "RedirectURIs": [ @@ -94,6 +105,11 @@ "Secret": "secret-0004", "SecretExpiresAt": 0, "SectorIdentifierURI": "http://sector_id/0004", + "SkipConsent": false, + "SkipLogoutConsent": { + "Bool": false, + "Valid": false + }, "SubjectType": "", "TermsOfServiceURI": "http://tos/0004", "TokenEndpointAuthMethod": "none", diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-0005.json b/persistence/sql/migratest/fixtures/hydra_client/client-0005.json index 798a586bc03..95a12e2de6b 100644 --- a/persistence/sql/migratest/fixtures/hydra_client/client-0005.json +++ b/persistence/sql/migratest/fixtures/hydra_client/client-0005.json @@ -1,4 +1,5 @@ { + "AccessTokenStrategy": "", "AllowedCORSOrigins": [], "Audience": [], "BackChannelLogoutSessionRequired": false, @@ -13,12 +14,11 @@ "GrantTypes": [ "grant-0005_1" ], - "ID": "00000000-0000-0000-0000-000000000000", + "ID": "client-0005", "JSONWebKeys": { "JSONWebKeySet": null }, "JSONWebKeysURI": "http://jwks/0005", - "LegacyClientID": "client-0005", "Lifespans": { "AuthorizationCodeGrantAccessTokenLifespan": { "Duration": 0, @@ -36,6 +36,18 @@ "Duration": 0, "Valid": false }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, "ImplicitGrantAccessTokenLifespan": { "Duration": 0, "Valid": false @@ -71,10 +83,9 @@ }, "LogoURI": "http://logo/0005", "Metadata": {}, - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Name": "Client 0005", "Owner": "owner-0005", - "PKDeprecated": 5, "PolicyURI": "http://policy/0005", "PostLogoutRedirectURIs": [], "RedirectURIs": [ @@ -94,6 +105,11 @@ "Secret": "secret-0005", "SecretExpiresAt": 0, "SectorIdentifierURI": "http://sector_id/0005", + "SkipConsent": false, + "SkipLogoutConsent": { + "Bool": false, + "Valid": false + }, "SubjectType": "", "TermsOfServiceURI": "http://tos/0005", "TokenEndpointAuthMethod": "token_auth-0005", diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-0006.json b/persistence/sql/migratest/fixtures/hydra_client/client-0006.json index 59911f9a747..b95628a9cd1 100644 --- a/persistence/sql/migratest/fixtures/hydra_client/client-0006.json +++ b/persistence/sql/migratest/fixtures/hydra_client/client-0006.json @@ -1,4 +1,5 @@ { + "AccessTokenStrategy": "", "AllowedCORSOrigins": [], "Audience": [], "BackChannelLogoutSessionRequired": false, @@ -13,12 +14,11 @@ "GrantTypes": [ "grant-0006_1" ], - "ID": "00000000-0000-0000-0000-000000000000", + "ID": "client-0006", "JSONWebKeys": { "JSONWebKeySet": null }, "JSONWebKeysURI": "http://jwks/0006", - "LegacyClientID": "client-0006", "Lifespans": { "AuthorizationCodeGrantAccessTokenLifespan": { "Duration": 0, @@ -36,6 +36,18 @@ "Duration": 0, "Valid": false }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, "ImplicitGrantAccessTokenLifespan": { "Duration": 0, "Valid": false @@ -71,10 +83,9 @@ }, "LogoURI": "http://logo/0006", "Metadata": {}, - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Name": "Client 0006", "Owner": "owner-0006", - "PKDeprecated": 6, "PolicyURI": "http://policy/0006", "PostLogoutRedirectURIs": [], "RedirectURIs": [ @@ -94,6 +105,11 @@ "Secret": "secret-0006", "SecretExpiresAt": 0, "SectorIdentifierURI": "http://sector_id/0006", + "SkipConsent": false, + "SkipLogoutConsent": { + "Bool": false, + "Valid": false + }, "SubjectType": "subject-0006", "TermsOfServiceURI": "http://tos/0006", "TokenEndpointAuthMethod": "token_auth-0006", diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-0007.json b/persistence/sql/migratest/fixtures/hydra_client/client-0007.json index f8cccf2f5b5..bd274e5f843 100644 --- a/persistence/sql/migratest/fixtures/hydra_client/client-0007.json +++ b/persistence/sql/migratest/fixtures/hydra_client/client-0007.json @@ -1,4 +1,5 @@ { + "AccessTokenStrategy": "", "AllowedCORSOrigins": [], "Audience": [], "BackChannelLogoutSessionRequired": false, @@ -13,12 +14,11 @@ "GrantTypes": [ "grant-0007_1" ], - "ID": "00000000-0000-0000-0000-000000000000", + "ID": "client-0007", "JSONWebKeys": { "JSONWebKeySet": null }, "JSONWebKeysURI": "http://jwks/0007", - "LegacyClientID": "client-0007", "Lifespans": { "AuthorizationCodeGrantAccessTokenLifespan": { "Duration": 0, @@ -36,6 +36,18 @@ "Duration": 0, "Valid": false }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, "ImplicitGrantAccessTokenLifespan": { "Duration": 0, "Valid": false @@ -71,10 +83,9 @@ }, "LogoURI": "http://logo/0007", "Metadata": {}, - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Name": "Client 0007", "Owner": "owner-0007", - "PKDeprecated": 7, "PolicyURI": "http://policy/0007", "PostLogoutRedirectURIs": [], "RedirectURIs": [ @@ -94,6 +105,11 @@ "Secret": "secret-0007", "SecretExpiresAt": 0, "SectorIdentifierURI": "http://sector_id/0007", + "SkipConsent": false, + "SkipLogoutConsent": { + "Bool": false, + "Valid": false + }, "SubjectType": "subject-0007", "TermsOfServiceURI": "http://tos/0007", "TokenEndpointAuthMethod": "token_auth-0007", diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-0008.json b/persistence/sql/migratest/fixtures/hydra_client/client-0008.json index 518af6a985e..8f069fe472e 100644 --- a/persistence/sql/migratest/fixtures/hydra_client/client-0008.json +++ b/persistence/sql/migratest/fixtures/hydra_client/client-0008.json @@ -1,4 +1,5 @@ { + "AccessTokenStrategy": "", "AllowedCORSOrigins": [ "http://cors/0008_1" ], @@ -15,12 +16,11 @@ "GrantTypes": [ "grant-0008_1" ], - "ID": "00000000-0000-0000-0000-000000000000", + "ID": "client-0008", "JSONWebKeys": { "JSONWebKeySet": null }, "JSONWebKeysURI": "http://jwks/0008", - "LegacyClientID": "client-0008", "Lifespans": { "AuthorizationCodeGrantAccessTokenLifespan": { "Duration": 0, @@ -38,6 +38,18 @@ "Duration": 0, "Valid": false }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, "ImplicitGrantAccessTokenLifespan": { "Duration": 0, "Valid": false @@ -73,10 +85,9 @@ }, "LogoURI": "http://logo/0008", "Metadata": {}, - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Name": "Client 0008", "Owner": "owner-0008", - "PKDeprecated": 8, "PolicyURI": "http://policy/0008", "PostLogoutRedirectURIs": [], "RedirectURIs": [ @@ -96,6 +107,11 @@ "Secret": "secret-0008", "SecretExpiresAt": 0, "SectorIdentifierURI": "http://sector_id/0008", + "SkipConsent": false, + "SkipLogoutConsent": { + "Bool": false, + "Valid": false + }, "SubjectType": "subject-0008", "TermsOfServiceURI": "http://tos/0008", "TokenEndpointAuthMethod": "token_auth-0008", diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-0009.json b/persistence/sql/migratest/fixtures/hydra_client/client-0009.json index 9da29f7ca17..2e85ab549b7 100644 --- a/persistence/sql/migratest/fixtures/hydra_client/client-0009.json +++ b/persistence/sql/migratest/fixtures/hydra_client/client-0009.json @@ -1,4 +1,5 @@ { + "AccessTokenStrategy": "", "AllowedCORSOrigins": [ "http://cors/0009_1" ], @@ -15,12 +16,11 @@ "GrantTypes": [ "grant-0009_1" ], - "ID": "00000000-0000-0000-0000-000000000000", + "ID": "client-0009", "JSONWebKeys": { "JSONWebKeySet": null }, "JSONWebKeysURI": "http://jwks/0009", - "LegacyClientID": "client-0009", "Lifespans": { "AuthorizationCodeGrantAccessTokenLifespan": { "Duration": 0, @@ -38,6 +38,18 @@ "Duration": 0, "Valid": false }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, "ImplicitGrantAccessTokenLifespan": { "Duration": 0, "Valid": false @@ -73,10 +85,9 @@ }, "LogoURI": "http://logo/0009", "Metadata": {}, - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Name": "Client 0009", "Owner": "owner-0009", - "PKDeprecated": 9, "PolicyURI": "http://policy/0009", "PostLogoutRedirectURIs": [], "RedirectURIs": [ @@ -96,6 +107,11 @@ "Secret": "secret-0009", "SecretExpiresAt": 0, "SectorIdentifierURI": "http://sector_id/0009", + "SkipConsent": false, + "SkipLogoutConsent": { + "Bool": false, + "Valid": false + }, "SubjectType": "subject-0009", "TermsOfServiceURI": "http://tos/0009", "TokenEndpointAuthMethod": "token_auth-0009", diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-0010.json b/persistence/sql/migratest/fixtures/hydra_client/client-0010.json index 728a66c2bda..f060a6f6337 100644 --- a/persistence/sql/migratest/fixtures/hydra_client/client-0010.json +++ b/persistence/sql/migratest/fixtures/hydra_client/client-0010.json @@ -1,4 +1,5 @@ { + "AccessTokenStrategy": "", "AllowedCORSOrigins": [ "http://cors/0010_1" ], @@ -15,12 +16,11 @@ "GrantTypes": [ "grant-0010_1" ], - "ID": "00000000-0000-0000-0000-000000000000", + "ID": "client-0010", "JSONWebKeys": { "JSONWebKeySet": null }, "JSONWebKeysURI": "http://jwks/0010", - "LegacyClientID": "client-0010", "Lifespans": { "AuthorizationCodeGrantAccessTokenLifespan": { "Duration": 0, @@ -38,6 +38,18 @@ "Duration": 0, "Valid": false }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, "ImplicitGrantAccessTokenLifespan": { "Duration": 0, "Valid": false @@ -73,10 +85,9 @@ }, "LogoURI": "http://logo/0010", "Metadata": {}, - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Name": "Client 0010", "Owner": "owner-0010", - "PKDeprecated": 10, "PolicyURI": "http://policy/0010", "PostLogoutRedirectURIs": [], "RedirectURIs": [ @@ -96,6 +107,11 @@ "Secret": "secret-0010", "SecretExpiresAt": 0, "SectorIdentifierURI": "http://sector_id/0010", + "SkipConsent": false, + "SkipLogoutConsent": { + "Bool": false, + "Valid": false + }, "SubjectType": "subject-0010", "TermsOfServiceURI": "http://tos/0010", "TokenEndpointAuthMethod": "token_auth-0010", diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-0011.json b/persistence/sql/migratest/fixtures/hydra_client/client-0011.json index 3335f98fa13..44ec40aea43 100644 --- a/persistence/sql/migratest/fixtures/hydra_client/client-0011.json +++ b/persistence/sql/migratest/fixtures/hydra_client/client-0011.json @@ -1,4 +1,5 @@ { + "AccessTokenStrategy": "", "AllowedCORSOrigins": [ "http://cors/0011_1" ], @@ -17,12 +18,11 @@ "GrantTypes": [ "grant-0011_1" ], - "ID": "00000000-0000-0000-0000-000000000000", + "ID": "client-0011", "JSONWebKeys": { "JSONWebKeySet": null }, "JSONWebKeysURI": "http://jwks/0011", - "LegacyClientID": "client-0011", "Lifespans": { "AuthorizationCodeGrantAccessTokenLifespan": { "Duration": 0, @@ -40,6 +40,18 @@ "Duration": 0, "Valid": false }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, "ImplicitGrantAccessTokenLifespan": { "Duration": 0, "Valid": false @@ -75,10 +87,9 @@ }, "LogoURI": "http://logo/0011", "Metadata": {}, - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Name": "Client 0011", "Owner": "owner-0011", - "PKDeprecated": 11, "PolicyURI": "http://policy/0011", "PostLogoutRedirectURIs": [], "RedirectURIs": [ @@ -98,6 +109,11 @@ "Secret": "secret-0011", "SecretExpiresAt": 0, "SectorIdentifierURI": "http://sector_id/0011", + "SkipConsent": false, + "SkipLogoutConsent": { + "Bool": false, + "Valid": false + }, "SubjectType": "subject-0011", "TermsOfServiceURI": "http://tos/0011", "TokenEndpointAuthMethod": "token_auth-0011", diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-0012.json b/persistence/sql/migratest/fixtures/hydra_client/client-0012.json index d5c7523feb5..fbd6313598f 100644 --- a/persistence/sql/migratest/fixtures/hydra_client/client-0012.json +++ b/persistence/sql/migratest/fixtures/hydra_client/client-0012.json @@ -1,4 +1,5 @@ { + "AccessTokenStrategy": "", "AllowedCORSOrigins": [ "http://cors/0012_1" ], @@ -11,18 +12,17 @@ "Contacts": [ "contact-0012_1" ], - "CreatedAt": "0001-01-01T00:00:00Z", + "CreatedAt": "2022-02-15T22:20:20Z", "FrontChannelLogoutSessionRequired": false, "FrontChannelLogoutURI": "", "GrantTypes": [ "grant-0012_1" ], - "ID": "00000000-0000-0000-0000-000000000000", + "ID": "client-0012", "JSONWebKeys": { "JSONWebKeySet": null }, "JSONWebKeysURI": "http://jwks/0012", - "LegacyClientID": "client-0012", "Lifespans": { "AuthorizationCodeGrantAccessTokenLifespan": { "Duration": 0, @@ -40,6 +40,18 @@ "Duration": 0, "Valid": false }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, "ImplicitGrantAccessTokenLifespan": { "Duration": 0, "Valid": false @@ -75,10 +87,9 @@ }, "LogoURI": "http://logo/0012", "Metadata": {}, - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Name": "Client 0012", "Owner": "owner-0012", - "PKDeprecated": 12, "PolicyURI": "http://policy/0012", "PostLogoutRedirectURIs": [], "RedirectURIs": [ @@ -98,10 +109,15 @@ "Secret": "secret-0012", "SecretExpiresAt": 0, "SectorIdentifierURI": "http://sector_id/0012", + "SkipConsent": false, + "SkipLogoutConsent": { + "Bool": false, + "Valid": false + }, "SubjectType": "subject-0012", "TermsOfServiceURI": "http://tos/0012", "TokenEndpointAuthMethod": "token_auth-0012", "TokenEndpointAuthSigningAlgorithm": "", - "UpdatedAt": "0001-01-01T00:00:00Z", + "UpdatedAt": "2022-02-15T22:20:20Z", "UserinfoSignedResponseAlg": "u_alg-0012" } diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-0013.json b/persistence/sql/migratest/fixtures/hydra_client/client-0013.json index a4a8438a9d3..2148a8ec4f6 100644 --- a/persistence/sql/migratest/fixtures/hydra_client/client-0013.json +++ b/persistence/sql/migratest/fixtures/hydra_client/client-0013.json @@ -1,4 +1,5 @@ { + "AccessTokenStrategy": "", "AllowedCORSOrigins": [ "http://cors/0013_1" ], @@ -11,18 +12,17 @@ "Contacts": [ "contact-0013_1" ], - "CreatedAt": "0001-01-01T00:00:00Z", + "CreatedAt": "2022-02-15T22:20:20Z", "FrontChannelLogoutSessionRequired": true, "FrontChannelLogoutURI": "http://front_logout/0013", "GrantTypes": [ "grant-0013_1" ], - "ID": "00000000-0000-0000-0000-000000000000", + "ID": "client-0013", "JSONWebKeys": { "JSONWebKeySet": null }, "JSONWebKeysURI": "http://jwks/0013", - "LegacyClientID": "client-0013", "Lifespans": { "AuthorizationCodeGrantAccessTokenLifespan": { "Duration": 0, @@ -40,6 +40,18 @@ "Duration": 0, "Valid": false }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, "ImplicitGrantAccessTokenLifespan": { "Duration": 0, "Valid": false @@ -75,10 +87,9 @@ }, "LogoURI": "http://logo/0013", "Metadata": {}, - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Name": "Client 0013", "Owner": "owner-0013", - "PKDeprecated": 13, "PolicyURI": "http://policy/0013", "PostLogoutRedirectURIs": [ "http://post_redirect/0013_1" @@ -100,10 +111,15 @@ "Secret": "secret-0013", "SecretExpiresAt": 0, "SectorIdentifierURI": "http://sector_id/0013", + "SkipConsent": false, + "SkipLogoutConsent": { + "Bool": false, + "Valid": false + }, "SubjectType": "subject-0013", "TermsOfServiceURI": "http://tos/0013", "TokenEndpointAuthMethod": "token_auth-0013", "TokenEndpointAuthSigningAlgorithm": "", - "UpdatedAt": "0001-01-01T00:00:00Z", + "UpdatedAt": "2022-02-15T22:20:20Z", "UserinfoSignedResponseAlg": "u_alg-0013" } diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-0014.json b/persistence/sql/migratest/fixtures/hydra_client/client-0014.json index 28aec74413a..94003d09e18 100644 --- a/persistence/sql/migratest/fixtures/hydra_client/client-0014.json +++ b/persistence/sql/migratest/fixtures/hydra_client/client-0014.json @@ -1,4 +1,5 @@ { + "AccessTokenStrategy": "", "AllowedCORSOrigins": [ "http://cors/0014_1" ], @@ -11,18 +12,17 @@ "Contacts": [ "contact-0014_1" ], - "CreatedAt": "0001-01-01T00:00:00Z", + "CreatedAt": "2022-02-15T22:20:21Z", "FrontChannelLogoutSessionRequired": true, "FrontChannelLogoutURI": "http://front_logout/0014", "GrantTypes": [ "grant-0014_1" ], - "ID": "00000000-0000-0000-0000-000000000000", + "ID": "client-0014", "JSONWebKeys": { "JSONWebKeySet": null }, "JSONWebKeysURI": "http://jwks/0014", - "LegacyClientID": "client-0014", "Lifespans": { "AuthorizationCodeGrantAccessTokenLifespan": { "Duration": 0, @@ -40,6 +40,18 @@ "Duration": 0, "Valid": false }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, "ImplicitGrantAccessTokenLifespan": { "Duration": 0, "Valid": false @@ -77,10 +89,9 @@ "Metadata": { "migration": "0014" }, - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Name": "Client 0014", "Owner": "owner-0014", - "PKDeprecated": 14, "PolicyURI": "http://policy/0014", "PostLogoutRedirectURIs": [ "http://post_redirect/0014_1" @@ -102,10 +113,15 @@ "Secret": "secret-0014", "SecretExpiresAt": 0, "SectorIdentifierURI": "http://sector_id/0014", + "SkipConsent": false, + "SkipLogoutConsent": { + "Bool": false, + "Valid": false + }, "SubjectType": "subject-0014", "TermsOfServiceURI": "http://tos/0014", "TokenEndpointAuthMethod": "token_auth-0014", "TokenEndpointAuthSigningAlgorithm": "", - "UpdatedAt": "0001-01-01T00:00:00Z", + "UpdatedAt": "2022-02-15T22:20:21Z", "UserinfoSignedResponseAlg": "u_alg-0014" } diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-0015.json b/persistence/sql/migratest/fixtures/hydra_client/client-0015.json index 029feb8eb30..5f28970a4d4 100644 --- a/persistence/sql/migratest/fixtures/hydra_client/client-0015.json +++ b/persistence/sql/migratest/fixtures/hydra_client/client-0015.json @@ -1,4 +1,5 @@ { + "AccessTokenStrategy": "", "AllowedCORSOrigins": [ "http://cors/0015_1" ], @@ -11,18 +12,17 @@ "Contacts": [ "contact-0015_1" ], - "CreatedAt": "0001-01-01T00:00:00Z", + "CreatedAt": "2022-02-15T22:20:21Z", "FrontChannelLogoutSessionRequired": true, "FrontChannelLogoutURI": "http://front_logout/0015", "GrantTypes": [ "grant-0015_1" ], - "ID": "00000000-0000-0000-0000-000000000000", + "ID": "client-0015", "JSONWebKeys": { "JSONWebKeySet": null }, "JSONWebKeysURI": "http://jwks/0015", - "LegacyClientID": "client-0015", "Lifespans": { "AuthorizationCodeGrantAccessTokenLifespan": { "Duration": 151000000000, @@ -40,6 +40,18 @@ "Duration": 154000000000, "Valid": true }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, "ImplicitGrantAccessTokenLifespan": { "Duration": 155000000000, "Valid": true @@ -77,10 +89,9 @@ "Metadata": { "migration": "0015" }, - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Name": "Client 0015", "Owner": "owner-0015", - "PKDeprecated": 15, "PolicyURI": "http://policy/0015", "PostLogoutRedirectURIs": [ "http://post_redirect/0015_1" @@ -102,10 +113,15 @@ "Secret": "secret-0015", "SecretExpiresAt": 0, "SectorIdentifierURI": "http://sector_id/0015", + "SkipConsent": false, + "SkipLogoutConsent": { + "Bool": false, + "Valid": false + }, "SubjectType": "subject-0015", "TermsOfServiceURI": "http://tos/0015", "TokenEndpointAuthMethod": "token_auth-0015", "TokenEndpointAuthSigningAlgorithm": "", - "UpdatedAt": "0001-01-01T00:00:00Z", + "UpdatedAt": "2022-02-15T22:20:21Z", "UserinfoSignedResponseAlg": "u_alg-0015" } diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-20.json b/persistence/sql/migratest/fixtures/hydra_client/client-20.json index ab1579564c5..3ef897475cd 100644 --- a/persistence/sql/migratest/fixtures/hydra_client/client-20.json +++ b/persistence/sql/migratest/fixtures/hydra_client/client-20.json @@ -1,4 +1,5 @@ { + "AccessTokenStrategy": "", "AllowedCORSOrigins": [ "http://cors/20_1" ], @@ -11,18 +12,17 @@ "Contacts": [ "contact-20_1" ], - "CreatedAt": "0001-01-01T00:00:00Z", + "CreatedAt": "2022-02-15T22:20:23Z", "FrontChannelLogoutSessionRequired": true, "FrontChannelLogoutURI": "http://front_logout/20", "GrantTypes": [ "grant-20_1" ], - "ID": "00000000-0000-0000-0000-000000000000", + "ID": "client-20", "JSONWebKeys": { "JSONWebKeySet": null }, "JSONWebKeysURI": "http://jwks/20", - "LegacyClientID": "client-20", "Lifespans": { "AuthorizationCodeGrantAccessTokenLifespan": { "Duration": 0, @@ -40,6 +40,18 @@ "Duration": 0, "Valid": false }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, "ImplicitGrantAccessTokenLifespan": { "Duration": 0, "Valid": false @@ -77,10 +89,9 @@ "Metadata": { "migration": "20" }, - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Name": "Client 20", "Owner": "owner-20", - "PKDeprecated": 0, "PolicyURI": "http://policy/20", "PostLogoutRedirectURIs": [ "http://post_redirect/20_1" @@ -102,10 +113,15 @@ "Secret": "secret-20", "SecretExpiresAt": 0, "SectorIdentifierURI": "http://sector_id/20", + "SkipConsent": false, + "SkipLogoutConsent": { + "Bool": false, + "Valid": false + }, "SubjectType": "subject-20", "TermsOfServiceURI": "http://tos/20", "TokenEndpointAuthMethod": "token_auth-20", "TokenEndpointAuthSigningAlgorithm": "", - "UpdatedAt": "0001-01-01T00:00:00Z", + "UpdatedAt": "2022-02-15T22:20:23Z", "UserinfoSignedResponseAlg": "u_alg-20" } diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-2005.json b/persistence/sql/migratest/fixtures/hydra_client/client-2005.json index 475204dc723..ffad8036db3 100644 --- a/persistence/sql/migratest/fixtures/hydra_client/client-2005.json +++ b/persistence/sql/migratest/fixtures/hydra_client/client-2005.json @@ -1,4 +1,5 @@ { + "AccessTokenStrategy": "", "AllowedCORSOrigins": [ "http://cors/2005_1" ], @@ -11,18 +12,17 @@ "Contacts": [ "contact-2005_1" ], - "CreatedAt": "0001-01-01T00:00:00Z", + "CreatedAt": "2022-02-15T22:20:22Z", "FrontChannelLogoutSessionRequired": true, "FrontChannelLogoutURI": "http://front_logout/2005", "GrantTypes": [ "grant-2005_1" ], - "ID": "00000000-0000-0000-0000-000000000000", + "ID": "client-2005", "JSONWebKeys": { "JSONWebKeySet": null }, "JSONWebKeysURI": "http://jwks/2005", - "LegacyClientID": "client-2005", "Lifespans": { "AuthorizationCodeGrantAccessTokenLifespan": { "Duration": 0, @@ -40,6 +40,18 @@ "Duration": 0, "Valid": false }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, "ImplicitGrantAccessTokenLifespan": { "Duration": 0, "Valid": false @@ -77,10 +89,9 @@ "Metadata": { "migration": "2005" }, - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Name": "Client 2005", "Owner": "owner-2005", - "PKDeprecated": 2005, "PolicyURI": "http://policy/2005", "PostLogoutRedirectURIs": [ "http://post_redirect/2005_1" @@ -102,10 +113,15 @@ "Secret": "secret-2005", "SecretExpiresAt": 0, "SectorIdentifierURI": "http://sector_id/2005", + "SkipConsent": false, + "SkipLogoutConsent": { + "Bool": false, + "Valid": false + }, "SubjectType": "subject-2005", "TermsOfServiceURI": "http://tos/2005", "TokenEndpointAuthMethod": "token_auth-2005", "TokenEndpointAuthSigningAlgorithm": "", - "UpdatedAt": "0001-01-01T00:00:00Z", + "UpdatedAt": "2022-02-15T22:20:22Z", "UserinfoSignedResponseAlg": "u_alg-2005" } diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-21.json b/persistence/sql/migratest/fixtures/hydra_client/client-21.json index 55e565b54db..bb97eae4d17 100644 --- a/persistence/sql/migratest/fixtures/hydra_client/client-21.json +++ b/persistence/sql/migratest/fixtures/hydra_client/client-21.json @@ -1,4 +1,5 @@ { + "AccessTokenStrategy": "", "AllowedCORSOrigins": [ "http://cors/21_1", "http://cors/21_2" @@ -14,19 +15,18 @@ "contact-21_1", "contact-21_2" ], - "CreatedAt": "0001-01-01T00:00:00Z", + "CreatedAt": "2022-02-15T22:20:23Z", "FrontChannelLogoutSessionRequired": true, "FrontChannelLogoutURI": "http://front_logout/21", "GrantTypes": [ "grant-21_1", "grant-21_2" ], - "ID": "00000000-0000-0000-0000-000000000000", + "ID": "client-21", "JSONWebKeys": { "JSONWebKeySet": null }, "JSONWebKeysURI": "http://jwks/21", - "LegacyClientID": "client-21", "Lifespans": { "AuthorizationCodeGrantAccessTokenLifespan": { "Duration": 0, @@ -44,6 +44,18 @@ "Duration": 0, "Valid": false }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, "ImplicitGrantAccessTokenLifespan": { "Duration": 0, "Valid": false @@ -81,10 +93,9 @@ "Metadata": { "migration": "21" }, - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Name": "Client 21", "Owner": "owner-21", - "PKDeprecated": 0, "PolicyURI": "http://policy/21", "PostLogoutRedirectURIs": [ "http://post_redirect/21_1", @@ -110,10 +121,15 @@ "Secret": "secret-21", "SecretExpiresAt": 0, "SectorIdentifierURI": "http://sector_id/21", + "SkipConsent": false, + "SkipLogoutConsent": { + "Bool": false, + "Valid": false + }, "SubjectType": "subject-21", "TermsOfServiceURI": "http://tos/21", "TokenEndpointAuthMethod": "token_auth-21", "TokenEndpointAuthSigningAlgorithm": "", - "UpdatedAt": "0001-01-01T00:00:00Z", + "UpdatedAt": "2022-02-15T22:20:23Z", "UserinfoSignedResponseAlg": "u_alg-21" } diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-22.json b/persistence/sql/migratest/fixtures/hydra_client/client-22.json new file mode 100644 index 00000000000..6030e003246 --- /dev/null +++ b/persistence/sql/migratest/fixtures/hydra_client/client-22.json @@ -0,0 +1,135 @@ +{ + "AccessTokenStrategy": "", + "AllowedCORSOrigins": [ + "http://cors/22_1", + "http://cors/22_2" + ], + "Audience": [ + "autdience-22_1", + "autdience-22_2" + ], + "BackChannelLogoutSessionRequired": true, + "BackChannelLogoutURI": "http://back_logout/22", + "ClientURI": "http://client/22", + "Contacts": [ + "contact-22_1", + "contact-22_2" + ], + "CreatedAt": "2022-02-15T22:20:23Z", + "FrontChannelLogoutSessionRequired": true, + "FrontChannelLogoutURI": "http://front_logout/22", + "GrantTypes": [ + "grant-22_1", + "grant-22_2" + ], + "ID": "client-22", + "JSONWebKeys": { + "JSONWebKeySet": null + }, + "JSONWebKeysURI": "http://jwks/22", + "Lifespans": { + "AuthorizationCodeGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "AuthorizationCodeGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "AuthorizationCodeGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "ClientCredentialsGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "ImplicitGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "ImplicitGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "JwtBearerGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "PasswordGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "PasswordGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "RefreshTokenGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "RefreshTokenGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "RefreshTokenGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + } + }, + "LogoURI": "http://logo/22", + "Metadata": { + "migration": "22" + }, + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "Name": "Client 22", + "Owner": "owner-22", + "PolicyURI": "http://policy/22", + "PostLogoutRedirectURIs": [ + "http://post_redirect/22_1", + "http://post_redirect/22_2" + ], + "RedirectURIs": [ + "http://redirect/22_1", + "http://redirect/22_2" + ], + "RegistrationAccessToken": "", + "RegistrationAccessTokenSignature": "", + "RegistrationClientURI": "", + "RequestObjectSigningAlgorithm": "r_alg-22", + "RequestURIs": [ + "http://request/22_1", + "http://request/22_2" + ], + "ResponseTypes": [ + "response-22_1", + "response-22_2" + ], + "Scope": "scope-22", + "Secret": "secret-22", + "SecretExpiresAt": 0, + "SectorIdentifierURI": "http://sector_id/22", + "SkipConsent": true, + "SkipLogoutConsent": { + "Bool": true, + "Valid": true + }, + "SubjectType": "subject-22", + "TermsOfServiceURI": "http://tos/22", + "TokenEndpointAuthMethod": "token_auth-22", + "TokenEndpointAuthSigningAlgorithm": "", + "UpdatedAt": "2022-02-15T22:20:23Z", + "UserinfoSignedResponseAlg": "u_alg-22" +} diff --git a/persistence/sql/migratest/fixtures/hydra_client/client-23.json b/persistence/sql/migratest/fixtures/hydra_client/client-23.json new file mode 100644 index 00000000000..ed9e225ea88 --- /dev/null +++ b/persistence/sql/migratest/fixtures/hydra_client/client-23.json @@ -0,0 +1,135 @@ +{ + "AccessTokenStrategy": "", + "AllowedCORSOrigins": [ + "http://cors/23_1", + "http://cors/23_2" + ], + "Audience": [ + "autdience-23_1", + "autdience-23_2" + ], + "BackChannelLogoutSessionRequired": true, + "BackChannelLogoutURI": "http://back_logout/23", + "ClientURI": "http://client/23", + "Contacts": [ + "contact-23_1", + "contact-23_2" + ], + "CreatedAt": "2023-02-15T23:20:23Z", + "FrontChannelLogoutSessionRequired": true, + "FrontChannelLogoutURI": "http://front_logout/23", + "GrantTypes": [ + "grant-23_1", + "grant-23_2" + ], + "ID": "client-23", + "JSONWebKeys": { + "JSONWebKeySet": null + }, + "JSONWebKeysURI": "http://jwks/23", + "Lifespans": { + "AuthorizationCodeGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "AuthorizationCodeGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "AuthorizationCodeGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "ClientCredentialsGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "DeviceAuthorizationGrantAccessTokenLifespan": { + "Duration": 3600, + "Valid": true + }, + "DeviceAuthorizationGrantIDTokenLifespan": { + "Duration": 3600, + "Valid": true + }, + "DeviceAuthorizationGrantRefreshTokenLifespan": { + "Duration": 3600, + "Valid": true + }, + "ImplicitGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "ImplicitGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "JwtBearerGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "PasswordGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "PasswordGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "RefreshTokenGrantAccessTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "RefreshTokenGrantIDTokenLifespan": { + "Duration": 0, + "Valid": false + }, + "RefreshTokenGrantRefreshTokenLifespan": { + "Duration": 0, + "Valid": false + } + }, + "LogoURI": "http://logo/23", + "Metadata": { + "migration": "23" + }, + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "Name": "Client 23", + "Owner": "owner-23", + "PolicyURI": "http://policy/23", + "PostLogoutRedirectURIs": [ + "http://post_redirect/23_1", + "http://post_redirect/23_2" + ], + "RedirectURIs": [ + "http://redirect/23_1", + "http://redirect/23_2" + ], + "RegistrationAccessToken": "", + "RegistrationAccessTokenSignature": "", + "RegistrationClientURI": "", + "RequestObjectSigningAlgorithm": "r_alg-23", + "RequestURIs": [ + "http://request/23_1", + "http://request/23_2" + ], + "ResponseTypes": [ + "response-23_1", + "response-23_2" + ], + "Scope": "scope-23", + "Secret": "secret-23", + "SecretExpiresAt": 0, + "SectorIdentifierURI": "http://sector_id/23", + "SkipConsent": true, + "SkipLogoutConsent": { + "Bool": true, + "Valid": true + }, + "SubjectType": "subject-23", + "TermsOfServiceURI": "http://tos/23", + "TokenEndpointAuthMethod": "token_auth-23", + "TokenEndpointAuthSigningAlgorithm": "", + "UpdatedAt": "2023-02-15T23:20:23Z", + "UserinfoSignedResponseAlg": "u_alg-23" +} diff --git a/persistence/sql/migratest/fixtures/hydra_jwk/kid-0002.json b/persistence/sql/migratest/fixtures/hydra_jwk/kid-0002.json index 577c2a93ef5..84afbbe9cac 100644 --- a/persistence/sql/migratest/fixtures/hydra_jwk/kid-0002.json +++ b/persistence/sql/migratest/fixtures/hydra_jwk/kid-0002.json @@ -3,6 +3,6 @@ "Set": "sid-0002", "KID": "kid-0002", "Version": 2, - "CreatedAt": "0001-01-01T00:00:00Z", + "CreatedAt": "2022-02-15T22:20:21Z", "Key": "key-0002" } diff --git a/persistence/sql/migratest/fixtures/hydra_jwk/kid-0003.json b/persistence/sql/migratest/fixtures/hydra_jwk/kid-0003.json index ec21b32f6c1..3c2aedd1265 100644 --- a/persistence/sql/migratest/fixtures/hydra_jwk/kid-0003.json +++ b/persistence/sql/migratest/fixtures/hydra_jwk/kid-0003.json @@ -3,6 +3,6 @@ "Set": "sid-0003", "KID": "kid-0003", "Version": 3, - "CreatedAt": "0001-01-01T00:00:00Z", + "CreatedAt": "2022-02-15T22:20:21Z", "Key": "key-0003" } diff --git a/persistence/sql/migratest/fixtures/hydra_jwk/kid-0004.json b/persistence/sql/migratest/fixtures/hydra_jwk/kid-0004.json index faff2794a35..e3a8299cd28 100644 --- a/persistence/sql/migratest/fixtures/hydra_jwk/kid-0004.json +++ b/persistence/sql/migratest/fixtures/hydra_jwk/kid-0004.json @@ -3,6 +3,6 @@ "Set": "sid-0004", "KID": "kid-0004", "Version": 4, - "CreatedAt": "0001-01-01T00:00:00Z", + "CreatedAt": "2022-02-15T22:20:21Z", "Key": "key-0004" } diff --git a/persistence/sql/migratest/fixtures/hydra_jwk/kid-0005.json b/persistence/sql/migratest/fixtures/hydra_jwk/kid-0005.json index 9aed75d3f00..d20b946dd5c 100644 --- a/persistence/sql/migratest/fixtures/hydra_jwk/kid-0005.json +++ b/persistence/sql/migratest/fixtures/hydra_jwk/kid-0005.json @@ -3,6 +3,6 @@ "Set": "sid-0005", "KID": "kid-0005", "Version": 4, - "CreatedAt": "0001-01-01T00:00:00Z", + "CreatedAt": "2022-02-15T22:20:23Z", "Key": "key-0005" } diff --git a/persistence/sql/migratest/fixtures/hydra_jwk/kid-0008.json b/persistence/sql/migratest/fixtures/hydra_jwk/kid-0008.json index 8cc897f72ed..b84357d2389 100644 --- a/persistence/sql/migratest/fixtures/hydra_jwk/kid-0008.json +++ b/persistence/sql/migratest/fixtures/hydra_jwk/kid-0008.json @@ -3,6 +3,6 @@ "Set": "sid-0008", "KID": "kid-0008", "Version": 2, - "CreatedAt": "0001-01-01T00:00:00Z", + "CreatedAt": "2022-02-15T22:20:23Z", "Key": "key-0002" } diff --git a/persistence/sql/migratest/fixtures/hydra_jwk/kid-0009.json b/persistence/sql/migratest/fixtures/hydra_jwk/kid-0009.json index 9cba178f949..b857303a8b7 100644 --- a/persistence/sql/migratest/fixtures/hydra_jwk/kid-0009.json +++ b/persistence/sql/migratest/fixtures/hydra_jwk/kid-0009.json @@ -3,6 +3,6 @@ "Set": "sid-0009", "KID": "kid-0009", "Version": 2, - "CreatedAt": "0001-01-01T00:00:00Z", + "CreatedAt": "2022-02-15T22:20:21Z", "Key": "key-0002" } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0001.json b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0001.json index e0d857ce36c..f730be1e071 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0001.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0001.json @@ -1,13 +1,13 @@ { "ID": "sig-0001", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0001", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:21Z", + "Client": "client-0014", "Scopes": "scope-0001", "GrantedScope": "granted_scope-0001", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0002.json b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0002.json index 653f7ca5b18..df64c413030 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0002.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0002.json @@ -1,13 +1,13 @@ { "ID": "sig-0002", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0002", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:21Z", + "Client": "client-0014", "Scopes": "scope-0002", "GrantedScope": "granted_scope-0002", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0003.json b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0003.json index 56c5eabf419..a7e283bca1b 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0003.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0003.json @@ -1,13 +1,13 @@ { "ID": "sig-0003", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0003", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:21Z", + "Client": "client-0014", "Scopes": "scope-0003", "GrantedScope": "granted_scope-0003", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0004.json b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0004.json index 5eae151e448..16998b3daba 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0004.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0004.json @@ -1,13 +1,13 @@ { "ID": "sig-0004", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0004", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:21Z", + "Client": "client-0014", "Scopes": "scope-0004", "GrantedScope": "granted_scope-0004", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0005.json b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0005.json index 90ff903a389..94c8b09da3f 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0005.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0005.json @@ -1,13 +1,13 @@ { "ID": "sig-0005", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0005", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0014", "Scopes": "scope-0005", "GrantedScope": "granted_scope-0005", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0006.json b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0006.json index d4ec51f0029..dd40cf69613 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0006.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0006.json @@ -1,13 +1,13 @@ { "ID": "sig-0006", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0006", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0014", "Scopes": "scope-0006", "GrantedScope": "granted_scope-0006", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0007.json b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0007.json index f07b8acdc49..6f094162f58 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0007.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0007.json @@ -1,13 +1,13 @@ { "ID": "sig-0007", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0007", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0014", "Scopes": "scope-0007", "GrantedScope": "granted_scope-0007", "RequestedAudience": "requested_audience-0007", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0008.json b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0008.json index 637ce41ae43..b9ec4325c56 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0008.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0008.json @@ -1,13 +1,13 @@ { "ID": "sig-0008", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0008", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0008", "GrantedScope": "granted_scope-0008", "RequestedAudience": "requested_audience-0008", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0009.json b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0009.json index c8a7d675df2..84524888e21 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0009.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0009.json @@ -1,13 +1,13 @@ { "ID": "sig-0009", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0009", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0009", "GrantedScope": "granted_scope-0009", "RequestedAudience": "requested_audience-0009", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0010.json b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0010.json index 8f811a240c4..3eb52254da7 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0010.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0010.json @@ -1,13 +1,13 @@ { "ID": "sig-0010", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0010", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0010", "GrantedScope": "granted_scope-0010", "RequestedAudience": "requested_audience-0010", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0011.json b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0011.json index d5b80241925..8c57b54fb62 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0011.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-0011.json @@ -1,13 +1,13 @@ { "ID": "sig-0011", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0011", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0011", "GrantedScope": "granted_scope-0011", "RequestedAudience": "requested_audience-0011", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-20201110104000-01.json b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-20201110104000-01.json index 90a33971681..1530062d576 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-20201110104000-01.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-20201110104000-01.json @@ -1,13 +1,13 @@ { "ID": "sig-20201110104000-01", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-20201110104000", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0011", "GrantedScope": "granted_scope-0011", "RequestedAudience": "requested_audience-0011", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-20201110104000.json b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-20201110104000.json index a1a8516f246..7b4d42c798b 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-20201110104000.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_access/sig-20201110104000.json @@ -1,13 +1,13 @@ { "ID": "sig-20201110104000", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-20201110104000", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0011", "GrantedScope": "granted_scope-0011", "RequestedAudience": "requested_audience-0011", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0001.json b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0001.json index a642f6606bf..cef02af2d2d 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0001.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0001.json @@ -1,7 +1,9 @@ { "ID": "auth_session-0001", - "NID": "00000000-0000-0000-0000-000000000000", - "AuthenticatedAt": null, + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "AuthenticatedAt": "2022-02-15T22:20:21Z", "Subject": "subject-0001", - "Remember": true + "IdentityProviderSessionID": "", + "Remember": true, + "ExpiresAt": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0002.json b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0002.json index 65d132ab888..e0d35f74b4f 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0002.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0002.json @@ -1,7 +1,9 @@ { "ID": "auth_session-0002", - "NID": "00000000-0000-0000-0000-000000000000", - "AuthenticatedAt": null, + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "AuthenticatedAt": "2022-02-15T22:20:21Z", "Subject": "subject-0002", - "Remember": true + "IdentityProviderSessionID": "", + "Remember": true, + "ExpiresAt": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0003.json b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0003.json index dfa9ae21e86..6945baea4e9 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0003.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0003.json @@ -1,7 +1,9 @@ { "ID": "auth_session-0003", - "NID": "00000000-0000-0000-0000-000000000000", - "AuthenticatedAt": null, + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "AuthenticatedAt": "2022-02-15T22:20:21Z", "Subject": "subject-0003", - "Remember": true + "IdentityProviderSessionID": "", + "Remember": true, + "ExpiresAt": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0004.json b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0004.json index 4411c2064a5..1c95263f021 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0004.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0004.json @@ -1,7 +1,9 @@ { "ID": "auth_session-0004", - "NID": "00000000-0000-0000-0000-000000000000", - "AuthenticatedAt": null, + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "AuthenticatedAt": "2022-02-15T22:20:21Z", "Subject": "subject-0004", - "Remember": true + "IdentityProviderSessionID": "", + "Remember": true, + "ExpiresAt": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0005.json b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0005.json index cba9dcf6125..5cf6e14963e 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0005.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0005.json @@ -1,7 +1,9 @@ { "ID": "auth_session-0005", - "NID": "00000000-0000-0000-0000-000000000000", - "AuthenticatedAt": null, + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "AuthenticatedAt": "2022-02-15T22:20:21Z", "Subject": "subject-0005", - "Remember": true + "IdentityProviderSessionID": "", + "Remember": true, + "ExpiresAt": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0006.json b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0006.json index 2081943455f..6a67ce7fb30 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0006.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0006.json @@ -1,7 +1,9 @@ { "ID": "auth_session-0006", - "NID": "00000000-0000-0000-0000-000000000000", - "AuthenticatedAt": null, + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "AuthenticatedAt": "2022-02-15T22:20:21Z", "Subject": "subject-0006", - "Remember": true + "IdentityProviderSessionID": "", + "Remember": true, + "ExpiresAt": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0007.json b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0007.json index c701732aac3..cf31a89a48f 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0007.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0007.json @@ -1,7 +1,9 @@ { "ID": "auth_session-0007", - "NID": "00000000-0000-0000-0000-000000000000", - "AuthenticatedAt": null, + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "AuthenticatedAt": "2022-02-15T22:20:21Z", "Subject": "subject-0007", - "Remember": true + "IdentityProviderSessionID": "", + "Remember": true, + "ExpiresAt": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0008.json b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0008.json index b8fc116b728..98ebea1f106 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0008.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0008.json @@ -1,7 +1,9 @@ { "ID": "auth_session-0008", - "NID": "00000000-0000-0000-0000-000000000000", - "AuthenticatedAt": null, + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "AuthenticatedAt": "2022-02-15T22:20:21Z", "Subject": "subject-0008", - "Remember": true + "IdentityProviderSessionID": "", + "Remember": true, + "ExpiresAt": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0009.json b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0009.json index 57cee5ecb1e..b17a1efdd3f 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0009.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0009.json @@ -1,7 +1,9 @@ { "ID": "auth_session-0009", - "NID": "00000000-0000-0000-0000-000000000000", - "AuthenticatedAt": null, + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "AuthenticatedAt": "2022-02-15T22:20:21Z", "Subject": "subject-0009", - "Remember": true + "IdentityProviderSessionID": "", + "Remember": true, + "ExpiresAt": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0010.json b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0010.json index 3e6b4d5fa10..03557f805dc 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0010.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0010.json @@ -1,7 +1,9 @@ { "ID": "auth_session-0010", - "NID": "00000000-0000-0000-0000-000000000000", - "AuthenticatedAt": null, + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "AuthenticatedAt": "2022-02-15T22:20:21Z", "Subject": "subject-0010", - "Remember": true + "IdentityProviderSessionID": "", + "Remember": true, + "ExpiresAt": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0011.json b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0011.json index 1344d7158fb..cabcbd05385 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0011.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0011.json @@ -1,7 +1,9 @@ { "ID": "auth_session-0011", - "NID": "00000000-0000-0000-0000-000000000000", - "AuthenticatedAt": null, + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "AuthenticatedAt": "2022-02-15T22:20:21Z", "Subject": "subject-0011", - "Remember": false + "IdentityProviderSessionID": "", + "Remember": false, + "ExpiresAt": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0012.json b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0012.json index 876277e2440..5ccc05f6810 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0012.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0012.json @@ -1,7 +1,9 @@ { "ID": "auth_session-0012", - "NID": "00000000-0000-0000-0000-000000000000", - "AuthenticatedAt": null, + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "AuthenticatedAt": "2022-02-15T22:20:21Z", "Subject": "subject-0012", - "Remember": false + "IdentityProviderSessionID": "", + "Remember": false, + "ExpiresAt": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0013.json b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0013.json index 404a1b4fe37..2867aa3ea7d 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0013.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0013.json @@ -1,7 +1,9 @@ { "ID": "auth_session-0013", - "NID": "00000000-0000-0000-0000-000000000000", - "AuthenticatedAt": null, + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "AuthenticatedAt": "2022-02-15T22:20:21Z", "Subject": "subject-0013", - "Remember": false + "IdentityProviderSessionID": "", + "Remember": false, + "ExpiresAt": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0014.json b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0014.json index 472bef86c13..8ea6ba8a855 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0014.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0014.json @@ -1,7 +1,9 @@ { "ID": "auth_session-0014", - "NID": "00000000-0000-0000-0000-000000000000", - "AuthenticatedAt": null, + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "AuthenticatedAt": "2022-02-15T22:20:21Z", "Subject": "subject-0014", - "Remember": false + "IdentityProviderSessionID": "", + "Remember": false, + "ExpiresAt": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0015.json b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0015.json index 1f82a76af6b..f4d2192cee0 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0015.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0015.json @@ -1,7 +1,9 @@ { "ID": "auth_session-0015", - "NID": "00000000-0000-0000-0000-000000000000", - "AuthenticatedAt": null, + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "AuthenticatedAt": "2022-02-15T22:20:21Z", "Subject": "subject-0015", - "Remember": false + "IdentityProviderSessionID": "", + "Remember": false, + "ExpiresAt": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0016.json b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0016.json new file mode 100644 index 00000000000..f99f634f3a9 --- /dev/null +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0016.json @@ -0,0 +1,9 @@ +{ + "ID": "auth_session-0016", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "AuthenticatedAt": "2022-02-15T22:20:21Z", + "Subject": "subject-0016", + "IdentityProviderSessionID": "", + "Remember": true, + "ExpiresAt": null +} diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0017.json b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0017.json new file mode 100644 index 00000000000..0025e9e77a2 --- /dev/null +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_authentication_session/auth_session-0017.json @@ -0,0 +1,9 @@ +{ + "ID": "auth_session-0017", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "AuthenticatedAt": "2022-02-15T22:20:21Z", + "Subject": "subject-0017", + "IdentityProviderSessionID": "identity_provider_session_id-0017", + "Remember": true, + "ExpiresAt": null +} diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0001.json b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0001.json index e0d857ce36c..f730be1e071 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0001.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0001.json @@ -1,13 +1,13 @@ { "ID": "sig-0001", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0001", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:21Z", + "Client": "client-0014", "Scopes": "scope-0001", "GrantedScope": "granted_scope-0001", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0002.json b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0002.json index 653f7ca5b18..df64c413030 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0002.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0002.json @@ -1,13 +1,13 @@ { "ID": "sig-0002", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0002", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:21Z", + "Client": "client-0014", "Scopes": "scope-0002", "GrantedScope": "granted_scope-0002", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0003.json b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0003.json index 56c5eabf419..a7e283bca1b 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0003.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0003.json @@ -1,13 +1,13 @@ { "ID": "sig-0003", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0003", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:21Z", + "Client": "client-0014", "Scopes": "scope-0003", "GrantedScope": "granted_scope-0003", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0004.json b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0004.json index 5eae151e448..16998b3daba 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0004.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0004.json @@ -1,13 +1,13 @@ { "ID": "sig-0004", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0004", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:21Z", + "Client": "client-0014", "Scopes": "scope-0004", "GrantedScope": "granted_scope-0004", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0005.json b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0005.json index 90ff903a389..94c8b09da3f 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0005.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0005.json @@ -1,13 +1,13 @@ { "ID": "sig-0005", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0005", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0014", "Scopes": "scope-0005", "GrantedScope": "granted_scope-0005", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0006.json b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0006.json index d4ec51f0029..dd40cf69613 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0006.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0006.json @@ -1,13 +1,13 @@ { "ID": "sig-0006", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0006", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0014", "Scopes": "scope-0006", "GrantedScope": "granted_scope-0006", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0007.json b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0007.json index f07b8acdc49..6f094162f58 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0007.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0007.json @@ -1,13 +1,13 @@ { "ID": "sig-0007", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0007", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0014", "Scopes": "scope-0007", "GrantedScope": "granted_scope-0007", "RequestedAudience": "requested_audience-0007", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0008.json b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0008.json index 637ce41ae43..b9ec4325c56 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0008.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0008.json @@ -1,13 +1,13 @@ { "ID": "sig-0008", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0008", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0008", "GrantedScope": "granted_scope-0008", "RequestedAudience": "requested_audience-0008", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0009.json b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0009.json index c8a7d675df2..84524888e21 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0009.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0009.json @@ -1,13 +1,13 @@ { "ID": "sig-0009", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0009", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0009", "GrantedScope": "granted_scope-0009", "RequestedAudience": "requested_audience-0009", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0010.json b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0010.json index 8f811a240c4..3eb52254da7 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0010.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0010.json @@ -1,13 +1,13 @@ { "ID": "sig-0010", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0010", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0010", "GrantedScope": "granted_scope-0010", "RequestedAudience": "requested_audience-0010", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0011.json b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0011.json index d5b80241925..8c57b54fb62 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0011.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-0011.json @@ -1,13 +1,13 @@ { "ID": "sig-0011", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0011", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0011", "GrantedScope": "granted_scope-0011", "RequestedAudience": "requested_audience-0011", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-20201110104000-01.json b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-20201110104000-01.json index 90a33971681..1530062d576 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-20201110104000-01.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-20201110104000-01.json @@ -1,13 +1,13 @@ { "ID": "sig-20201110104000-01", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-20201110104000", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0011", "GrantedScope": "granted_scope-0011", "RequestedAudience": "requested_audience-0011", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-20201110104000.json b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-20201110104000.json index a1a8516f246..7b4d42c798b 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-20201110104000.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_code/sig-20201110104000.json @@ -1,13 +1,13 @@ { "ID": "sig-20201110104000", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-20201110104000", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0011", "GrantedScope": "granted_scope-0011", "RequestedAudience": "requested_audience-0011", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_device_auth_codes/device-code-signature-0001.json b/persistence/sql/migratest/fixtures/hydra_oauth2_device_auth_codes/device-code-signature-0001.json new file mode 100644 index 00000000000..959cc1d2193 --- /dev/null +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_device_auth_codes/device-code-signature-0001.json @@ -0,0 +1,21 @@ +{ + "ID": "device-code-signature-0001", + "UserCodeID": "user-code-signature-0001", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "Request": "request-id-0001", + "ConsentChallenge": { + "String": "challenge-0018", + "Valid": true + }, + "RequestedAt": "2025-05-16T12:24:00Z", + "Client": "client-21", + "Scopes": "[\"scope-0001_1\",\"scope-0001_2\"]", + "GrantedScope": "[\"granted_scope-0001_1\",\"granted_scope-0001_2\"]", + "RequestedAudience": "[\"requested_audience-0001_1\",\"requested_audience-0001_2\"]", + "GrantedAudience": "[\"granted_audience-0001_1\",\"granted_audience-0001_2\"]", + "Form": "{\"form_data\": \"0001\"}", + "Subject": "subject-0001", + "DeviceCodeActive": true, + "UserCodeState": 0, + "Session": "eyJzZXNzaW9uX2RhdGEiOiAiMDAwMSJ9" +} diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0001.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0001.json index e6c93405db7..2416e92039c 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0001.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0001.json @@ -1,62 +1,33 @@ { - "ID": "challenge-0001", - "NID": "00000000-0000-0000-0000-000000000000", - "RequestedScope": [ + "i": "challenge-0001", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "rs": [ "requested_scope-0001_1" ], - "RequestedAudience": [], - "LoginSkip": true, - "Subject": "subject-0001", - "OpenIDConnectContext": { + "s": "subject-0001", + "oc": { "display": "display-0001" }, - "Client": null, - "ClientID": "", - "RequestURL": "http://request/0001", - "SessionID": "", - "LoginVerifier": "verifier-0001", - "LoginCSRF": "csrf-0001", - "LoginInitializedAt": null, - "RequestedAt": "0001-01-01T00:00:00Z", - "State": 128, - "LoginRemember": true, - "LoginRememberFor": 1, - "ACR": "acr-0001", - "AMR": [], - "ForceSubjectIdentifier": "", - "Context": {}, - "LoginWasUsed": true, - "LoginError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "LoginAuthenticatedAt": null, - "ConsentChallengeID": "challenge-0001", - "ConsentSkip": true, - "ConsentVerifier": "verifier-0001", - "ConsentCSRF": "csrf-0001", - "GrantedScope": [ + "ci": "client-0014", + "r": "http://request/0001", + "ia": "2022-02-15T22:20:21Z", + "a": "acr-0001", + "ct": {}, + "la": null, + "dh": null, + "cc": "challenge-0001", + "cs": true, + "gs": [ "granted_scope-0001_1" ], - "GrantedAudience": [], - "ConsentRemember": true, - "ConsentRememberFor": 1, - "ConsentHandledAt": null, - "ConsentWasHandled": true, - "ConsentError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "SessionIDToken": { + "ce": true, + "cf": 1, + "ch": null, + "cx": null, + "st": { "session_id_token-0001": "0001" }, - "SessionAccessToken": { + "sa": { "session_access_token-0001": "0001" } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0002.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0002.json index 61c8b0e1e8f..b37a376e165 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0002.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0002.json @@ -1,62 +1,33 @@ { - "ID": "challenge-0002", - "NID": "00000000-0000-0000-0000-000000000000", - "RequestedScope": [ + "i": "challenge-0002", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "rs": [ "requested_scope-0002_1" ], - "RequestedAudience": [], - "LoginSkip": true, - "Subject": "subject-0002", - "OpenIDConnectContext": { + "s": "subject-0002", + "oc": { "display": "display-0002" }, - "Client": null, - "ClientID": "", - "RequestURL": "http://request/0002", - "SessionID": "", - "LoginVerifier": "verifier-0002", - "LoginCSRF": "csrf-0002", - "LoginInitializedAt": null, - "RequestedAt": "0001-01-01T00:00:00Z", - "State": 128, - "LoginRemember": true, - "LoginRememberFor": 2, - "ACR": "acr-0002", - "AMR": [], - "ForceSubjectIdentifier": "force_subject_id-0002", - "Context": {}, - "LoginWasUsed": true, - "LoginError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "LoginAuthenticatedAt": null, - "ConsentChallengeID": "challenge-0002", - "ConsentSkip": true, - "ConsentVerifier": "verifier-0002", - "ConsentCSRF": "csrf-0002", - "GrantedScope": [ + "ci": "client-0014", + "r": "http://request/0002", + "ia": "2022-02-15T22:20:21Z", + "a": "acr-0002", + "ct": {}, + "la": null, + "dh": null, + "cc": "challenge-0002", + "cs": true, + "gs": [ "granted_scope-0002_1" ], - "GrantedAudience": [], - "ConsentRemember": true, - "ConsentRememberFor": 2, - "ConsentHandledAt": null, - "ConsentWasHandled": true, - "ConsentError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "SessionIDToken": { + "ce": true, + "cf": 2, + "ch": null, + "cx": null, + "st": { "session_id_token-0002": "0002" }, - "SessionAccessToken": { + "sa": { "session_access_token-0002": "0002" } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0003.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0003.json index 3a0023de89d..d5e0c59d6dd 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0003.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0003.json @@ -1,62 +1,34 @@ { - "ID": "challenge-0003", - "NID": "00000000-0000-0000-0000-000000000000", - "RequestedScope": [ + "i": "challenge-0003", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "rs": [ "requested_scope-0003_1" ], - "RequestedAudience": [], - "LoginSkip": true, - "Subject": "subject-0003", - "OpenIDConnectContext": { + "s": "subject-0003", + "oc": { "display": "display-0003" }, - "Client": null, - "ClientID": "", - "RequestURL": "http://request/0003", - "SessionID": "auth_session-0003", - "LoginVerifier": "verifier-0003", - "LoginCSRF": "csrf-0003", - "LoginInitializedAt": null, - "RequestedAt": "0001-01-01T00:00:00Z", - "State": 128, - "LoginRemember": true, - "LoginRememberFor": 3, - "ACR": "acr-0003", - "AMR": [], - "ForceSubjectIdentifier": "force_subject_id-0003", - "Context": {}, - "LoginWasUsed": true, - "LoginError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "LoginAuthenticatedAt": null, - "ConsentChallengeID": "challenge-0003", - "ConsentSkip": true, - "ConsentVerifier": "verifier-0003", - "ConsentCSRF": "csrf-0003", - "GrantedScope": [ + "ci": "client-0014", + "r": "http://request/0003", + "si": "auth_session-0003", + "ia": "2022-02-15T22:20:21Z", + "a": "acr-0003", + "ct": {}, + "la": null, + "dh": null, + "cc": "challenge-0003", + "cs": true, + "gs": [ "granted_scope-0003_1" ], - "GrantedAudience": [], - "ConsentRemember": true, - "ConsentRememberFor": 3, - "ConsentHandledAt": null, - "ConsentWasHandled": true, - "ConsentError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "SessionIDToken": { + "ce": true, + "cf": 3, + "ch": null, + "cx": null, + "st": { "session_id_token-0003": "0003" }, - "SessionAccessToken": { + "sa": { "session_access_token-0003": "0003" } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0004.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0004.json index af2fa42e3af..1fb65a2b5fe 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0004.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0004.json @@ -1,66 +1,40 @@ { - "ID": "challenge-0004", - "NID": "00000000-0000-0000-0000-000000000000", - "RequestedScope": [ + "i": "challenge-0004", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "rs": [ "requested_scope-0004_1" ], - "RequestedAudience": [ + "ra": [ "requested_audience-0004_1" ], - "LoginSkip": true, - "Subject": "subject-0004", - "OpenIDConnectContext": { + "s": "subject-0004", + "oc": { "display": "display-0004" }, - "Client": null, - "ClientID": "", - "RequestURL": "http://request/0004", - "SessionID": "auth_session-0004", - "LoginVerifier": "verifier-0004", - "LoginCSRF": "csrf-0004", - "LoginInitializedAt": null, - "RequestedAt": "0001-01-01T00:00:00Z", - "State": 128, - "LoginRemember": true, - "LoginRememberFor": 4, - "ACR": "acr-0004", - "AMR": [], - "ForceSubjectIdentifier": "force_subject_id-0004", - "Context": {}, - "LoginWasUsed": true, - "LoginError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "LoginAuthenticatedAt": null, - "ConsentChallengeID": "challenge-0004", - "ConsentSkip": true, - "ConsentVerifier": "verifier-0004", - "ConsentCSRF": "csrf-0004", - "GrantedScope": [ + "ci": "client-0014", + "r": "http://request/0004", + "si": "auth_session-0004", + "ia": "2022-02-15T22:20:21Z", + "a": "acr-0004", + "ct": {}, + "la": null, + "dh": null, + "cc": "challenge-0004", + "cs": true, + "gs": [ "granted_scope-0004_1" ], - "GrantedAudience": [ + "ga": [ "granted_audience-0004_1" ], - "ConsentRemember": true, - "ConsentRememberFor": 4, - "ConsentHandledAt": null, - "ConsentWasHandled": true, - "ConsentError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "SessionIDToken": { + "ce": true, + "cf": 4, + "ch": null, + "cx": null, + "st": { "session_id_token-0004": "0004" }, - "SessionAccessToken": { + "sa": { "session_access_token-0004": "0004" } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0005.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0005.json index 66e356b42c2..5c9158bf628 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0005.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0005.json @@ -1,66 +1,40 @@ { - "ID": "challenge-0005", - "NID": "00000000-0000-0000-0000-000000000000", - "RequestedScope": [ + "i": "challenge-0005", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "rs": [ "requested_scope-0005_1" ], - "RequestedAudience": [ + "ra": [ "requested_audience-0005_1" ], - "LoginSkip": true, - "Subject": "subject-0005", - "OpenIDConnectContext": { + "s": "subject-0005", + "oc": { "display": "display-0005" }, - "Client": null, - "ClientID": "", - "RequestURL": "http://request/0005", - "SessionID": "auth_session-0005", - "LoginVerifier": "verifier-0005", - "LoginCSRF": "csrf-0005", - "LoginInitializedAt": null, - "RequestedAt": "0001-01-01T00:00:00Z", - "State": 128, - "LoginRemember": true, - "LoginRememberFor": 5, - "ACR": "acr-0005", - "AMR": [], - "ForceSubjectIdentifier": "force_subject_id-0005", - "Context": {}, - "LoginWasUsed": true, - "LoginError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "LoginAuthenticatedAt": null, - "ConsentChallengeID": "challenge-0005", - "ConsentSkip": true, - "ConsentVerifier": "verifier-0005", - "ConsentCSRF": "csrf-0005", - "GrantedScope": [ + "ci": "client-0014", + "r": "http://request/0005", + "si": "auth_session-0005", + "ia": "2022-02-15T22:20:21Z", + "a": "acr-0005", + "ct": {}, + "la": null, + "dh": null, + "cc": "challenge-0005", + "cs": true, + "gs": [ "granted_scope-0005_1" ], - "GrantedAudience": [ + "ga": [ "granted_audience-0005_1" ], - "ConsentRemember": true, - "ConsentRememberFor": 5, - "ConsentHandledAt": null, - "ConsentWasHandled": true, - "ConsentError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "SessionIDToken": { + "ce": true, + "cf": 5, + "ch": null, + "cx": null, + "st": { "session_id_token-0005": "0005" }, - "SessionAccessToken": { + "sa": { "session_access_token-0005": "0005" } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0006.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0006.json index c457b1caba8..5f9c085743c 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0006.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0006.json @@ -1,66 +1,40 @@ { - "ID": "challenge-0006", - "NID": "00000000-0000-0000-0000-000000000000", - "RequestedScope": [ + "i": "challenge-0006", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "rs": [ "requested_scope-0006_1" ], - "RequestedAudience": [ + "ra": [ "requested_audience-0006_1" ], - "LoginSkip": true, - "Subject": "subject-0006", - "OpenIDConnectContext": { + "s": "subject-0006", + "oc": { "display": "display-0006" }, - "Client": null, - "ClientID": "", - "RequestURL": "http://request/0006", - "SessionID": "auth_session-0006", - "LoginVerifier": "verifier-0006", - "LoginCSRF": "csrf-0006", - "LoginInitializedAt": null, - "RequestedAt": "0001-01-01T00:00:00Z", - "State": 128, - "LoginRemember": true, - "LoginRememberFor": 6, - "ACR": "acr-0006", - "AMR": [], - "ForceSubjectIdentifier": "force_subject_id-0006", - "Context": {}, - "LoginWasUsed": true, - "LoginError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "LoginAuthenticatedAt": null, - "ConsentChallengeID": "challenge-0006", - "ConsentSkip": true, - "ConsentVerifier": "verifier-0006", - "ConsentCSRF": "csrf-0006", - "GrantedScope": [ + "ci": "client-0014", + "r": "http://request/0006", + "si": "auth_session-0006", + "ia": "2022-02-15T22:20:21Z", + "a": "acr-0006", + "ct": {}, + "la": null, + "dh": null, + "cc": "challenge-0006", + "cs": true, + "gs": [ "granted_scope-0006_1" ], - "GrantedAudience": [ + "ga": [ "granted_audience-0006_1" ], - "ConsentRemember": true, - "ConsentRememberFor": 6, - "ConsentHandledAt": null, - "ConsentWasHandled": true, - "ConsentError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "SessionIDToken": { + "ce": true, + "cf": 6, + "ch": null, + "cx": null, + "st": { "session_id_token-0006": "0006" }, - "SessionAccessToken": { + "sa": { "session_access_token-0006": "0006" } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0007.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0007.json index 55f894ef216..d93f1726a2f 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0007.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0007.json @@ -1,66 +1,40 @@ { - "ID": "challenge-0007", - "NID": "00000000-0000-0000-0000-000000000000", - "RequestedScope": [ + "i": "challenge-0007", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "rs": [ "requested_scope-0007_1" ], - "RequestedAudience": [ + "ra": [ "requested_audience-0007_1" ], - "LoginSkip": true, - "Subject": "subject-0007", - "OpenIDConnectContext": { + "s": "subject-0007", + "oc": { "display": "display-0007" }, - "Client": null, - "ClientID": "", - "RequestURL": "http://request/0007", - "SessionID": "auth_session-0007", - "LoginVerifier": "verifier-0007", - "LoginCSRF": "csrf-0007", - "LoginInitializedAt": null, - "RequestedAt": "0001-01-01T00:00:00Z", - "State": 128, - "LoginRemember": true, - "LoginRememberFor": 7, - "ACR": "acr-0007", - "AMR": [], - "ForceSubjectIdentifier": "force_subject_id-0007", - "Context": {}, - "LoginWasUsed": true, - "LoginError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "LoginAuthenticatedAt": null, - "ConsentChallengeID": "challenge-0007", - "ConsentSkip": true, - "ConsentVerifier": "verifier-0007", - "ConsentCSRF": "csrf-0007", - "GrantedScope": [ + "ci": "client-0014", + "r": "http://request/0007", + "si": "auth_session-0007", + "ia": "2022-02-15T22:20:21Z", + "a": "acr-0007", + "ct": {}, + "la": null, + "dh": null, + "cc": "challenge-0007", + "cs": true, + "gs": [ "granted_scope-0007_1" ], - "GrantedAudience": [ + "ga": [ "granted_audience-0007_1" ], - "ConsentRemember": true, - "ConsentRememberFor": 7, - "ConsentHandledAt": null, - "ConsentWasHandled": true, - "ConsentError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "SessionIDToken": { + "ce": true, + "cf": 7, + "ch": null, + "cx": null, + "st": { "session_id_token-0007": "0007" }, - "SessionAccessToken": { + "sa": { "session_access_token-0007": "0007" } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0008.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0008.json index adef19a20fb..29e1b9bcb7e 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0008.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0008.json @@ -1,68 +1,42 @@ { - "ID": "challenge-0008", - "NID": "00000000-0000-0000-0000-000000000000", - "RequestedScope": [ + "i": "challenge-0008", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "rs": [ "requested_scope-0008_1" ], - "RequestedAudience": [ + "ra": [ "requested_audience-0008_1" ], - "LoginSkip": true, - "Subject": "subject-0008", - "OpenIDConnectContext": { + "s": "subject-0008", + "oc": { "display": "display-0008" }, - "Client": null, - "ClientID": "", - "RequestURL": "http://request/0008", - "SessionID": "auth_session-0008", - "LoginVerifier": "verifier-0008", - "LoginCSRF": "csrf-0008", - "LoginInitializedAt": null, - "RequestedAt": "0001-01-01T00:00:00Z", - "State": 128, - "LoginRemember": true, - "LoginRememberFor": 8, - "ACR": "acr-0008", - "AMR": [], - "ForceSubjectIdentifier": "force_subject_id-0008", - "Context": { + "ci": "client-0014", + "r": "http://request/0008", + "si": "auth_session-0008", + "ia": "2022-02-15T22:20:21Z", + "a": "acr-0008", + "ct": { "context": "0008" }, - "LoginWasUsed": true, - "LoginError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "LoginAuthenticatedAt": null, - "ConsentChallengeID": "challenge-0008", - "ConsentSkip": true, - "ConsentVerifier": "verifier-0008", - "ConsentCSRF": "csrf-0008", - "GrantedScope": [ + "la": null, + "dh": null, + "cc": "challenge-0008", + "cs": true, + "gs": [ "granted_scope-0008_1" ], - "GrantedAudience": [ + "ga": [ "granted_audience-0008_1" ], - "ConsentRemember": true, - "ConsentRememberFor": 8, - "ConsentHandledAt": null, - "ConsentWasHandled": true, - "ConsentError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "SessionIDToken": { + "ce": true, + "cf": 8, + "ch": null, + "cx": null, + "st": { "session_id_token-0008": "0008" }, - "SessionAccessToken": { + "sa": { "session_access_token-0008": "0008" } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0009.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0009.json index 6ee8a6293cd..de13a1ebdc3 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0009.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0009.json @@ -1,68 +1,42 @@ { - "ID": "challenge-0009", - "NID": "00000000-0000-0000-0000-000000000000", - "RequestedScope": [ + "i": "challenge-0009", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "rs": [ "requested_scope-0009_1" ], - "RequestedAudience": [ + "ra": [ "requested_audience-0009_1" ], - "LoginSkip": true, - "Subject": "subject-0009", - "OpenIDConnectContext": { + "s": "subject-0009", + "oc": { "display": "display-0009" }, - "Client": null, - "ClientID": "", - "RequestURL": "http://request/0009", - "SessionID": "auth_session-0009", - "LoginVerifier": "verifier-0009", - "LoginCSRF": "csrf-0009", - "LoginInitializedAt": null, - "RequestedAt": "0001-01-01T00:00:00Z", - "State": 128, - "LoginRemember": true, - "LoginRememberFor": 9, - "ACR": "acr-0009", - "AMR": [], - "ForceSubjectIdentifier": "force_subject_id-0009", - "Context": { + "ci": "client-0014", + "r": "http://request/0009", + "si": "auth_session-0009", + "ia": "2022-02-15T22:20:21Z", + "a": "acr-0009", + "ct": { "context": "0009" }, - "LoginWasUsed": true, - "LoginError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "LoginAuthenticatedAt": null, - "ConsentChallengeID": "challenge-0009", - "ConsentSkip": true, - "ConsentVerifier": "verifier-0009", - "ConsentCSRF": "csrf-0009", - "GrantedScope": [ + "la": null, + "dh": null, + "cc": "challenge-0009", + "cs": true, + "gs": [ "granted_scope-0009_1" ], - "GrantedAudience": [ + "ga": [ "granted_audience-0009_1" ], - "ConsentRemember": true, - "ConsentRememberFor": 9, - "ConsentHandledAt": null, - "ConsentWasHandled": true, - "ConsentError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "SessionIDToken": { + "ce": true, + "cf": 9, + "ch": null, + "cx": null, + "st": { "session_id_token-0009": "0009" }, - "SessionAccessToken": { + "sa": { "session_access_token-0009": "0009" } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0010.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0010.json index 92acf0ecbf6..88e0a942ac7 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0010.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0010.json @@ -1,68 +1,42 @@ { - "ID": "challenge-0010", - "NID": "00000000-0000-0000-0000-000000000000", - "RequestedScope": [ + "i": "challenge-0010", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "rs": [ "requested_scope-0010_1" ], - "RequestedAudience": [ + "ra": [ "requested_audience-0010_1" ], - "LoginSkip": true, - "Subject": "subject-0010", - "OpenIDConnectContext": { + "s": "subject-0010", + "oc": { "display": "display-0010" }, - "Client": null, - "ClientID": "", - "RequestURL": "http://request/0010", - "SessionID": "auth_session-0010", - "LoginVerifier": "verifier-0010", - "LoginCSRF": "csrf-0010", - "LoginInitializedAt": null, - "RequestedAt": "0001-01-01T00:00:00Z", - "State": 128, - "LoginRemember": true, - "LoginRememberFor": 10, - "ACR": "acr-0010", - "AMR": [], - "ForceSubjectIdentifier": "force_subject_id-0010", - "Context": { + "ci": "client-0014", + "r": "http://request/0010", + "si": "auth_session-0010", + "ia": "2022-02-15T22:20:21Z", + "a": "acr-0010", + "ct": { "context": "0010" }, - "LoginWasUsed": true, - "LoginError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "LoginAuthenticatedAt": null, - "ConsentChallengeID": "challenge-0010", - "ConsentSkip": true, - "ConsentVerifier": "verifier-0010", - "ConsentCSRF": "csrf-0010", - "GrantedScope": [ + "la": null, + "dh": null, + "cc": "challenge-0010", + "cs": true, + "gs": [ "granted_scope-0010_1" ], - "GrantedAudience": [ + "ga": [ "granted_audience-0010_1" ], - "ConsentRemember": true, - "ConsentRememberFor": 10, - "ConsentHandledAt": null, - "ConsentWasHandled": true, - "ConsentError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "SessionIDToken": { + "ce": true, + "cf": 10, + "ch": null, + "cx": null, + "st": { "session_id_token-0010": "0010" }, - "SessionAccessToken": { + "sa": { "session_access_token-0010": "0010" } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0011.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0011.json index 0ed7c88f2aa..a79b8d521d2 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0011.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0011.json @@ -1,68 +1,42 @@ { - "ID": "challenge-0011", - "NID": "00000000-0000-0000-0000-000000000000", - "RequestedScope": [ + "i": "challenge-0011", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "rs": [ "requested_scope-0011_1" ], - "RequestedAudience": [ + "ra": [ "requested_audience-0011_1" ], - "LoginSkip": true, - "Subject": "subject-0011", - "OpenIDConnectContext": { + "s": "subject-0011", + "oc": { "display": "display-0011" }, - "Client": null, - "ClientID": "", - "RequestURL": "http://request/0011", - "SessionID": "auth_session-0011", - "LoginVerifier": "verifier-0011", - "LoginCSRF": "csrf-0011", - "LoginInitializedAt": null, - "RequestedAt": "0001-01-01T00:00:00Z", - "State": 128, - "LoginRemember": true, - "LoginRememberFor": 11, - "ACR": "acr-0011", - "AMR": [], - "ForceSubjectIdentifier": "force_subject_id-0011", - "Context": { + "ci": "client-0014", + "r": "http://request/0011", + "si": "auth_session-0011", + "ia": "2022-02-15T22:20:21Z", + "a": "acr-0011", + "ct": { "context": "0011" }, - "LoginWasUsed": true, - "LoginError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "LoginAuthenticatedAt": null, - "ConsentChallengeID": "challenge-0011", - "ConsentSkip": true, - "ConsentVerifier": "verifier-0011", - "ConsentCSRF": "csrf-0011", - "GrantedScope": [ + "la": null, + "dh": null, + "cc": "challenge-0011", + "cs": true, + "gs": [ "granted_scope-0011_1" ], - "GrantedAudience": [ + "ga": [ "granted_audience-0011_1" ], - "ConsentRemember": true, - "ConsentRememberFor": 11, - "ConsentHandledAt": null, - "ConsentWasHandled": true, - "ConsentError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "SessionIDToken": { + "ce": true, + "cf": 11, + "ch": null, + "cx": null, + "st": { "session_id_token-0011": "0011" }, - "SessionAccessToken": { + "sa": { "session_access_token-0011": "0011" } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0012.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0012.json index f8cc3c232f4..03fec5d243d 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0012.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0012.json @@ -1,68 +1,42 @@ { - "ID": "challenge-0012", - "NID": "00000000-0000-0000-0000-000000000000", - "RequestedScope": [ + "i": "challenge-0012", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "rs": [ "requested_scope-0012_1" ], - "RequestedAudience": [ + "ra": [ "requested_audience-0012_1" ], - "LoginSkip": true, - "Subject": "subject-0012", - "OpenIDConnectContext": { + "s": "subject-0012", + "oc": { "display": "display-0012" }, - "Client": null, - "ClientID": "", - "RequestURL": "http://request/0012", - "SessionID": "auth_session-0012", - "LoginVerifier": "verifier-0012", - "LoginCSRF": "csrf-0012", - "LoginInitializedAt": null, - "RequestedAt": "0001-01-01T00:00:00Z", - "State": 128, - "LoginRemember": true, - "LoginRememberFor": 12, - "ACR": "acr-0012", - "AMR": [], - "ForceSubjectIdentifier": "force_subject_id-0012", - "Context": { + "ci": "client-0014", + "r": "http://request/0012", + "si": "auth_session-0012", + "ia": "2022-02-15T22:20:21Z", + "a": "acr-0012", + "ct": { "context": "0012" }, - "LoginWasUsed": true, - "LoginError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "LoginAuthenticatedAt": null, - "ConsentChallengeID": "challenge-0012", - "ConsentSkip": true, - "ConsentVerifier": "verifier-0012", - "ConsentCSRF": "csrf-0012", - "GrantedScope": [ + "la": null, + "dh": null, + "cc": "challenge-0012", + "cs": true, + "gs": [ "granted_scope-0012_1" ], - "GrantedAudience": [ + "ga": [ "granted_audience-0012_1" ], - "ConsentRemember": true, - "ConsentRememberFor": 12, - "ConsentHandledAt": null, - "ConsentWasHandled": true, - "ConsentError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "SessionIDToken": { + "ce": true, + "cf": 12, + "ch": null, + "cx": null, + "st": { "session_id_token-0012": "0012" }, - "SessionAccessToken": { + "sa": { "session_access_token-0012": "0012" } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0013.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0013.json index b480e3813db..4bc236da1b6 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0013.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0013.json @@ -1,68 +1,42 @@ { - "ID": "challenge-0013", - "NID": "00000000-0000-0000-0000-000000000000", - "RequestedScope": [ + "i": "challenge-0013", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "rs": [ "requested_scope-0013_1" ], - "RequestedAudience": [ + "ra": [ "requested_audience-0013_1" ], - "LoginSkip": true, - "Subject": "subject-0013", - "OpenIDConnectContext": { + "s": "subject-0013", + "oc": { "display": "display-0013" }, - "Client": null, - "ClientID": "", - "RequestURL": "http://request/0013", - "SessionID": "auth_session-0013", - "LoginVerifier": "verifier-0013", - "LoginCSRF": "csrf-0013", - "LoginInitializedAt": null, - "RequestedAt": "0001-01-01T00:00:00Z", - "State": 128, - "LoginRemember": true, - "LoginRememberFor": 13, - "ACR": "acr-0013", - "AMR": [], - "ForceSubjectIdentifier": "force_subject_id-0013", - "Context": { + "ci": "client-0014", + "r": "http://request/0013", + "si": "auth_session-0013", + "ia": "2022-02-15T22:20:21Z", + "a": "acr-0013", + "ct": { "context": "0013" }, - "LoginWasUsed": true, - "LoginError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "LoginAuthenticatedAt": null, - "ConsentChallengeID": "challenge-0013", - "ConsentSkip": true, - "ConsentVerifier": "verifier-0013", - "ConsentCSRF": "csrf-0013", - "GrantedScope": [ + "la": null, + "dh": null, + "cc": "challenge-0013", + "cs": true, + "gs": [ "granted_scope-0013_1" ], - "GrantedAudience": [ + "ga": [ "granted_audience-0013_1" ], - "ConsentRemember": true, - "ConsentRememberFor": 13, - "ConsentHandledAt": null, - "ConsentWasHandled": true, - "ConsentError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "SessionIDToken": { + "ce": true, + "cf": 13, + "ch": null, + "cx": null, + "st": { "session_id_token-0013": "0013" }, - "SessionAccessToken": { + "sa": { "session_access_token-0013": "0013" } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0014.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0014.json index 44e10ddac7d..c195bcafb08 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0014.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0014.json @@ -1,68 +1,42 @@ { - "ID": "challenge-0014", - "NID": "00000000-0000-0000-0000-000000000000", - "RequestedScope": [ + "i": "challenge-0014", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "rs": [ "requested_scope-0014_1" ], - "RequestedAudience": [ + "ra": [ "requested_audience-0014_1" ], - "LoginSkip": true, - "Subject": "subject-0014", - "OpenIDConnectContext": { + "s": "subject-0014", + "oc": { "display": "display-0014" }, - "Client": null, - "ClientID": "", - "RequestURL": "http://request/0014", - "SessionID": "auth_session-0014", - "LoginVerifier": "verifier-0014", - "LoginCSRF": "csrf-0014", - "LoginInitializedAt": null, - "RequestedAt": "0001-01-01T00:00:00Z", - "State": 128, - "LoginRemember": true, - "LoginRememberFor": 14, - "ACR": "acr-0014", - "AMR": [], - "ForceSubjectIdentifier": "force_subject_id-0014", - "Context": { + "ci": "client-0014", + "r": "http://request/0014", + "si": "auth_session-0014", + "ia": "2022-02-15T22:20:21Z", + "a": "acr-0014", + "ct": { "context": "0014" }, - "LoginWasUsed": true, - "LoginError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "LoginAuthenticatedAt": null, - "ConsentChallengeID": "challenge-0014", - "ConsentSkip": true, - "ConsentVerifier": "verifier-0014", - "ConsentCSRF": "csrf-0014", - "GrantedScope": [ + "la": null, + "dh": null, + "cc": "challenge-0014", + "cs": true, + "gs": [ "granted_scope-0014_1" ], - "GrantedAudience": [ + "ga": [ "granted_audience-0014_1" ], - "ConsentRemember": true, - "ConsentRememberFor": 14, - "ConsentHandledAt": null, - "ConsentWasHandled": true, - "ConsentError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "SessionIDToken": { + "ce": true, + "cf": 14, + "ch": "2022-02-15T22:20:21Z", + "cx": null, + "st": { "session_id_token-0014": "0014" }, - "SessionAccessToken": { + "sa": { "session_access_token-0014": "0014" } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0015.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0015.json index 67ab4b4ce1d..8b0fb41d19b 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0015.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0015.json @@ -1,75 +1,50 @@ { - "ID": "challenge-0015", - "NID": "00000000-0000-0000-0000-000000000000", - "RequestedScope": [ + "i": "challenge-0015", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "rs": [ "requested_scope-0015_1", "requested_scope-0015_2" ], - "RequestedAudience": [ + "ra": [ "requested_audience-0015_1", "requested_audience-0015_2" ], - "LoginSkip": true, - "Subject": "subject-0015", - "OpenIDConnectContext": { + "s": "subject-0015", + "oc": { "display": "display-0015" }, - "Client": null, - "ClientID": "", - "RequestURL": "http://request/0015", - "SessionID": "auth_session-0015", - "LoginVerifier": "verifier-0015", - "LoginCSRF": "csrf-0015", - "LoginInitializedAt": null, - "RequestedAt": "0001-01-01T00:00:00Z", - "State": 128, - "LoginRemember": true, - "LoginRememberFor": 15, - "ACR": "acr-0015", - "AMR": [ + "ci": "client-21", + "r": "http://request/0015", + "si": "auth_session-0015", + "ia": "2022-02-15T22:20:21Z", + "a": "acr-0015", + "am": [ "amr-0015-1", "amr-0015-2" ], - "ForceSubjectIdentifier": "force_subject_id-0015", - "Context": { + "ct": { "context": "0015" }, - "LoginWasUsed": true, - "LoginError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "LoginAuthenticatedAt": null, - "ConsentChallengeID": "challenge-0015", - "ConsentSkip": true, - "ConsentVerifier": "verifier-0015", - "ConsentCSRF": "csrf-0015", - "GrantedScope": [ + "la": null, + "dh": null, + "cc": "challenge-0015", + "cs": true, + "gs": [ "granted_scope-0015_1", "granted_scope-0015_2" ], - "GrantedAudience": [ + "ga": [ "granted_audience-0015_1", "granted_audience-0015_2" ], - "ConsentRemember": true, - "ConsentRememberFor": 15, - "ConsentHandledAt": null, - "ConsentWasHandled": true, - "ConsentError": { - "error": "", - "error_description": "", - "error_hint": "", - "status_code": 0, - "error_debug": "" - }, - "SessionIDToken": { + "ce": true, + "cf": 15, + "ch": "2022-02-15T22:20:21Z", + "cx": null, + "st": { "session_id_token-0015": "0015" }, - "SessionAccessToken": { + "sa": { "session_access_token-0015": "0015" } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0016.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0016.json new file mode 100644 index 00000000000..6a7a49cd79f --- /dev/null +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0016.json @@ -0,0 +1,50 @@ +{ + "i": "challenge-0016", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "rs": [ + "requested_scope-0016_1", + "requested_scope-0016_2" + ], + "ra": [ + "requested_audience-0016_1", + "requested_audience-0016_2" + ], + "s": "subject-0016", + "oc": { + "display": "display-0016" + }, + "ci": "client-21", + "r": "http://request/0016", + "si": "auth_session-0016", + "ia": "2022-02-15T22:20:21Z", + "a": "acr-0016", + "am": [ + "amr-0016-1", + "amr-0016-2" + ], + "ct": { + "context": "0016" + }, + "la": null, + "dh": null, + "cc": "challenge-0016", + "cs": true, + "gs": [ + "granted_scope-0016_1", + "granted_scope-0016_2" + ], + "ga": [ + "granted_audience-0016_1", + "granted_audience-0016_2" + ], + "ce": true, + "cf": 15, + "ch": "2022-02-15T22:20:21Z", + "cx": null, + "st": { + "session_id_token-0016": "0016" + }, + "sa": { + "session_access_token-0016": "0016" + } +} diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0017.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0017.json new file mode 100644 index 00000000000..22568a0b3b0 --- /dev/null +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0017.json @@ -0,0 +1,50 @@ +{ + "i": "challenge-0017", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "rs": [ + "requested_scope-0016_1", + "requested_scope-0016_2" + ], + "ra": [ + "requested_audience-0016_1", + "requested_audience-0016_2" + ], + "s": "subject-0017", + "oc": { + "display": "display-0017" + }, + "ci": "client-21", + "r": "http://request/0017", + "si": "auth_session-0017", + "ia": "2022-02-15T22:20:21Z", + "a": "acr-0017", + "am": [ + "amr-0017-1", + "amr-0017-2" + ], + "ct": { + "context": "0017" + }, + "la": null, + "dh": null, + "cc": "challenge-0017", + "cs": true, + "gs": [ + "granted_scope-0016_1", + "granted_scope-0016_2" + ], + "ga": [ + "granted_audience-0016_1", + "granted_audience-0016_2" + ], + "ce": true, + "cf": 15, + "ch": "2022-02-15T22:20:21Z", + "cx": null, + "st": { + "session_id_token-0017": "0017" + }, + "sa": { + "session_access_token-0017": "0017" + } +} diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0018.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0018.json new file mode 100644 index 00000000000..bd97d3bf511 --- /dev/null +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0018.json @@ -0,0 +1,51 @@ +{ + "i": "challenge-0018", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "rs": [ + "requested_scope-0018_1", + "requested_scope-0018_2" + ], + "ra": [ + "requested_audience-0018_1", + "requested_audience-0018_2" + ], + "s": "subject-0018", + "oc": { + "display": "display-0018" + }, + "ci": "client-21", + "r": "http://request/0018", + "ia": "2022-02-15T22:20:21Z", + "a": "acr-0018", + "am": [ + "amr-0018-1", + "amr-0018-2" + ], + "ct": { + "context": "0018" + }, + "la": null, + "di": "device-challenge-0018", + "dr": "device-request-id-0018", + "dh": null, + "cc": "challenge-0018", + "cs": true, + "gs": [ + "granted_scope-0018_1", + "granted_scope-0018_2" + ], + "ga": [ + "granted_audience-0018_1", + "granted_audience-0018_2" + ], + "ce": true, + "cf": 15, + "ch": "2025-05-16T12:24:00Z", + "cx": null, + "st": { + "session_id_token-0018": "0018" + }, + "sa": { + "session_access_token-0018": "0018" + } +} diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0019.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0019.json new file mode 100644 index 00000000000..4e1b5cdae36 --- /dev/null +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0019.json @@ -0,0 +1,37 @@ +{ + "i": "challenge-0019", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "rs": [ + "requested_scope-0019_1", + "requested_scope-0019_2" + ], + "s": "subject-0019", + "oc": { + "display": "display-0019" + }, + "ci": "client-21", + "r": "http://request/0019", + "ia": "2022-02-15T22:20:21Z", + "a": "acr-0019", + "ct": {}, + "la": null, + "di": "device-challenge-0019", + "dr": "device-request-id-0019", + "dh": null, + "cc": "challenge-0019", + "cs": true, + "gs": [ + "granted_scope-0019_1", + "granted_scope-0019_2" + ], + "ce": true, + "cf": 15, + "ch": "2025-05-16T12:24:00Z", + "cx": null, + "st": { + "session_id_token-0019": "0019" + }, + "sa": { + "session_access_token-0019": "0019" + } +} diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0020.json b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0020.json new file mode 100644 index 00000000000..7f73cd28764 --- /dev/null +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_flow/challenge-0020.json @@ -0,0 +1,14 @@ +{ + "i": "challenge-0020", + "n": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "oc": null, + "ia": "2025-12-02T11:05:31Z", + "ct": null, + "la": null, + "dh": null, + "cf": null, + "ch": null, + "cx": null, + "st": null, + "sa": null +} diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_jti_blacklist/sig-0011.json b/persistence/sql/migratest/fixtures/hydra_oauth2_jti_blacklist/sig-0011.json index 8fa266f1740..43fb1a23ec7 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_jti_blacklist/sig-0011.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_jti_blacklist/sig-0011.json @@ -1,6 +1,6 @@ { "JTI": "", "ID": "sig-0011", - "Expiry": "0001-01-01T00:00:00Z", - "NID": "00000000-0000-0000-0000-000000000000" + "Expiry": "2022-02-15T22:20:22Z", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe" } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0009.json b/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0009.json index 669ab25c23d..7681dc70e21 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0009.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0009.json @@ -4,5 +4,7 @@ "sid": "session_id-0009", "request_url": "http://request/0009", "rp_initiated": true, + "expires_at": null, + "requested_at": null, "client": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0010.json b/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0010.json index 783139fc5a7..d1cb5f6aa61 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0010.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0010.json @@ -4,5 +4,7 @@ "sid": "session_id-0010", "request_url": "http://request/0010", "rp_initiated": true, + "expires_at": null, + "requested_at": null, "client": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0011.json b/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0011.json index 2650d028e15..3c81d38cb47 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0011.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0011.json @@ -4,5 +4,7 @@ "sid": "session_id-0011", "request_url": "http://request/0011", "rp_initiated": true, + "expires_at": null, + "requested_at": null, "client": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0012.json b/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0012.json index 8454181a885..67c839b88a0 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0012.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0012.json @@ -4,5 +4,7 @@ "sid": "session_id-0012", "request_url": "http://request/0012", "rp_initiated": true, + "expires_at": null, + "requested_at": null, "client": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0013.json b/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0013.json index fdb8f04be53..f8b84db4b56 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0013.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0013.json @@ -4,5 +4,7 @@ "sid": "session_id-0013", "request_url": "http://request/0013", "rp_initiated": true, + "expires_at": null, + "requested_at": null, "client": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0014.json b/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0014.json index 1f6e5ef98e9..c5194805b6a 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0014.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-0014.json @@ -4,5 +4,7 @@ "sid": "session_id-0014", "request_url": "http://request/0014", "rp_initiated": true, + "expires_at": null, + "requested_at": null, "client": null } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-20240916105610000001.json b/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-20240916105610000001.json new file mode 100644 index 00000000000..8b1841c9be6 --- /dev/null +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_logout_request/challenge-20240916105610000001.json @@ -0,0 +1,10 @@ +{ + "challenge": "challenge-20240916105610000001", + "subject": "subject-0014", + "sid": "session_id-0014", + "request_url": "http://request/0014", + "rp_initiated": true, + "expires_at": "2022-02-15T22:20:20Z", + "requested_at": "2022-02-15T22:20:20Z", + "client": null +} diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0002_client-0014.json b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0002_client-0014.json index ab7e745d86c..ec12c1e335e 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0002_client-0014.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0002_client-0014.json @@ -2,5 +2,5 @@ "ClientID": "client-0014", "Subject": "subject-0002", "SubjectObfuscated": "subject_obfuscated-0002", - "NID": "00000000-0000-0000-0000-000000000000" + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe" } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0003_client-0014.json b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0003_client-0014.json index 13a5b74e09c..fe789f55654 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0003_client-0014.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0003_client-0014.json @@ -2,5 +2,5 @@ "ClientID": "client-0014", "Subject": "subject-0003", "SubjectObfuscated": "subject_obfuscated-0003", - "NID": "00000000-0000-0000-0000-000000000000" + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe" } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0004_client-0014.json b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0004_client-0014.json index e4a97d806eb..28483077409 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0004_client-0014.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0004_client-0014.json @@ -2,5 +2,5 @@ "ClientID": "client-0014", "Subject": "subject-0004", "SubjectObfuscated": "subject_obfuscated-0004", - "NID": "00000000-0000-0000-0000-000000000000" + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe" } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0005_client-0014.json b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0005_client-0014.json index 6cc8e44e230..6c91840d021 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0005_client-0014.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0005_client-0014.json @@ -2,5 +2,5 @@ "ClientID": "client-0014", "Subject": "subject-0005", "SubjectObfuscated": "subject_obfuscated-0005", - "NID": "00000000-0000-0000-0000-000000000000" + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe" } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0006_client-0014.json b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0006_client-0014.json index 4db5a848732..0d99b6d4739 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0006_client-0014.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0006_client-0014.json @@ -2,5 +2,5 @@ "ClientID": "client-0014", "Subject": "subject-0006", "SubjectObfuscated": "subject_obfuscated-0006", - "NID": "00000000-0000-0000-0000-000000000000" + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe" } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0007_client-0014.json b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0007_client-0014.json index 13ece010731..5b17dca31dd 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0007_client-0014.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0007_client-0014.json @@ -2,5 +2,5 @@ "ClientID": "client-0014", "Subject": "subject-0007", "SubjectObfuscated": "subject_obfuscated-0007", - "NID": "00000000-0000-0000-0000-000000000000" + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe" } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0008_client-0014.json b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0008_client-0014.json index 87657bdf804..5d6667d21bd 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0008_client-0014.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0008_client-0014.json @@ -2,5 +2,5 @@ "ClientID": "client-0014", "Subject": "subject-0008", "SubjectObfuscated": "subject_obfuscated-0008", - "NID": "00000000-0000-0000-0000-000000000000" + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe" } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0009_client-0014.json b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0009_client-0014.json index 8ecd0640ace..3b1f12ede50 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0009_client-0014.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0009_client-0014.json @@ -2,5 +2,5 @@ "ClientID": "client-0014", "Subject": "subject-0009", "SubjectObfuscated": "subject_obfuscated-0009", - "NID": "00000000-0000-0000-0000-000000000000" + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe" } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0010_client-0014.json b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0010_client-0014.json index 87390b24fd7..71b7a5d85bb 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0010_client-0014.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0010_client-0014.json @@ -2,5 +2,5 @@ "ClientID": "client-0014", "Subject": "subject-0010", "SubjectObfuscated": "subject_obfuscated-0010", - "NID": "00000000-0000-0000-0000-000000000000" + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe" } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0011_client-0014.json b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0011_client-0014.json index 035f9a767fb..17d82e24495 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0011_client-0014.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0011_client-0014.json @@ -2,5 +2,5 @@ "ClientID": "client-0014", "Subject": "subject-0011", "SubjectObfuscated": "subject_obfuscated-0011", - "NID": "00000000-0000-0000-0000-000000000000" + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe" } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0012_client-0014.json b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0012_client-0014.json index f0300ca3eb9..2263a85f868 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0012_client-0014.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0012_client-0014.json @@ -2,5 +2,5 @@ "ClientID": "client-0014", "Subject": "subject-0012", "SubjectObfuscated": "subject_obfuscated-0012", - "NID": "00000000-0000-0000-0000-000000000000" + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe" } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0013_client-0014.json b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0013_client-0014.json index d0455c82fdc..688cf37f6e7 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0013_client-0014.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0013_client-0014.json @@ -2,5 +2,5 @@ "ClientID": "client-0014", "Subject": "subject-0013", "SubjectObfuscated": "subject_obfuscated-0013", - "NID": "00000000-0000-0000-0000-000000000000" + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe" } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0014_client-0014.json b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0014_client-0014.json index 56ebd6966ea..215b7b19b78 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0014_client-0014.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_obfuscated_authentication_session/subject-0014_client-0014.json @@ -2,5 +2,5 @@ "ClientID": "client-0014", "Subject": "subject-0014", "SubjectObfuscated": "subject_obfuscated-0014", - "NID": "00000000-0000-0000-0000-000000000000" + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe" } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0001.json b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0001.json index e0d857ce36c..f730be1e071 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0001.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0001.json @@ -1,13 +1,13 @@ { "ID": "sig-0001", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0001", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:21Z", + "Client": "client-0014", "Scopes": "scope-0001", "GrantedScope": "granted_scope-0001", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0002.json b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0002.json index 653f7ca5b18..df64c413030 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0002.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0002.json @@ -1,13 +1,13 @@ { "ID": "sig-0002", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0002", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:21Z", + "Client": "client-0014", "Scopes": "scope-0002", "GrantedScope": "granted_scope-0002", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0003.json b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0003.json index 56c5eabf419..a7e283bca1b 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0003.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0003.json @@ -1,13 +1,13 @@ { "ID": "sig-0003", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0003", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:21Z", + "Client": "client-0014", "Scopes": "scope-0003", "GrantedScope": "granted_scope-0003", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0004.json b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0004.json index 5eae151e448..16998b3daba 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0004.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0004.json @@ -1,13 +1,13 @@ { "ID": "sig-0004", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0004", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:21Z", + "Client": "client-0014", "Scopes": "scope-0004", "GrantedScope": "granted_scope-0004", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0005.json b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0005.json index 90ff903a389..94c8b09da3f 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0005.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0005.json @@ -1,13 +1,13 @@ { "ID": "sig-0005", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0005", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0014", "Scopes": "scope-0005", "GrantedScope": "granted_scope-0005", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0006.json b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0006.json index d4ec51f0029..dd40cf69613 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0006.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0006.json @@ -1,13 +1,13 @@ { "ID": "sig-0006", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0006", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0014", "Scopes": "scope-0006", "GrantedScope": "granted_scope-0006", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0007.json b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0007.json index f07b8acdc49..6f094162f58 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0007.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0007.json @@ -1,13 +1,13 @@ { "ID": "sig-0007", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0007", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0014", "Scopes": "scope-0007", "GrantedScope": "granted_scope-0007", "RequestedAudience": "requested_audience-0007", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0008.json b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0008.json index 637ce41ae43..b9ec4325c56 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0008.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0008.json @@ -1,13 +1,13 @@ { "ID": "sig-0008", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0008", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0008", "GrantedScope": "granted_scope-0008", "RequestedAudience": "requested_audience-0008", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0009.json b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0009.json index c8a7d675df2..84524888e21 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0009.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0009.json @@ -1,13 +1,13 @@ { "ID": "sig-0009", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0009", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0009", "GrantedScope": "granted_scope-0009", "RequestedAudience": "requested_audience-0009", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0010.json b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0010.json index 8f811a240c4..3eb52254da7 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0010.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0010.json @@ -1,13 +1,13 @@ { "ID": "sig-0010", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0010", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0010", "GrantedScope": "granted_scope-0010", "RequestedAudience": "requested_audience-0010", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0011.json b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0011.json index d5b80241925..8c57b54fb62 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0011.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-0011.json @@ -1,13 +1,13 @@ { "ID": "sig-0011", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0011", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0011", "GrantedScope": "granted_scope-0011", "RequestedAudience": "requested_audience-0011", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-20201110104000-01.json b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-20201110104000-01.json index 90a33971681..1530062d576 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-20201110104000-01.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-20201110104000-01.json @@ -1,13 +1,13 @@ { "ID": "sig-20201110104000-01", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-20201110104000", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0011", "GrantedScope": "granted_scope-0011", "RequestedAudience": "requested_audience-0011", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-20201110104000.json b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-20201110104000.json index a1a8516f246..7b4d42c798b 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-20201110104000.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_oidc/sig-20201110104000.json @@ -1,13 +1,13 @@ { "ID": "sig-20201110104000", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-20201110104000", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0011", "GrantedScope": "granted_scope-0011", "RequestedAudience": "requested_audience-0011", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0003.json b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0003.json index 56c5eabf419..a7e283bca1b 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0003.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0003.json @@ -1,13 +1,13 @@ { "ID": "sig-0003", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0003", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:21Z", + "Client": "client-0014", "Scopes": "scope-0003", "GrantedScope": "granted_scope-0003", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0004.json b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0004.json index 5eae151e448..16998b3daba 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0004.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0004.json @@ -1,13 +1,13 @@ { "ID": "sig-0004", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0004", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:21Z", + "Client": "client-0014", "Scopes": "scope-0004", "GrantedScope": "granted_scope-0004", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0005.json b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0005.json index 90ff903a389..94c8b09da3f 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0005.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0005.json @@ -1,13 +1,13 @@ { "ID": "sig-0005", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0005", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0014", "Scopes": "scope-0005", "GrantedScope": "granted_scope-0005", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0006.json b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0006.json index d4ec51f0029..dd40cf69613 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0006.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0006.json @@ -1,13 +1,13 @@ { "ID": "sig-0006", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0006", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0014", "Scopes": "scope-0006", "GrantedScope": "granted_scope-0006", "RequestedAudience": "", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0007.json b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0007.json index f07b8acdc49..6f094162f58 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0007.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0007.json @@ -1,13 +1,13 @@ { "ID": "sig-0007", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0007", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0014", "Scopes": "scope-0007", "GrantedScope": "granted_scope-0007", "RequestedAudience": "requested_audience-0007", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0008.json b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0008.json index 637ce41ae43..b9ec4325c56 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0008.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0008.json @@ -1,13 +1,13 @@ { "ID": "sig-0008", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0008", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0008", "GrantedScope": "granted_scope-0008", "RequestedAudience": "requested_audience-0008", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0009.json b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0009.json index c8a7d675df2..84524888e21 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0009.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0009.json @@ -1,13 +1,13 @@ { "ID": "sig-0009", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0009", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0009", "GrantedScope": "granted_scope-0009", "RequestedAudience": "requested_audience-0009", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0010.json b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0010.json index 8f811a240c4..3eb52254da7 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0010.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0010.json @@ -1,13 +1,13 @@ { "ID": "sig-0010", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0010", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0010", "GrantedScope": "granted_scope-0010", "RequestedAudience": "requested_audience-0010", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0011.json b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0011.json index d5b80241925..8c57b54fb62 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0011.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-0011.json @@ -1,13 +1,13 @@ { "ID": "sig-0011", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0011", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0011", "GrantedScope": "granted_scope-0011", "RequestedAudience": "requested_audience-0011", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-20201110104000-01.json b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-20201110104000-01.json index 90a33971681..1530062d576 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-20201110104000-01.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-20201110104000-01.json @@ -1,13 +1,13 @@ { "ID": "sig-20201110104000-01", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-20201110104000", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0011", "GrantedScope": "granted_scope-0011", "RequestedAudience": "requested_audience-0011", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-20201110104000.json b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-20201110104000.json index a1a8516f246..7b4d42c798b 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-20201110104000.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_pkce/sig-20201110104000.json @@ -1,13 +1,13 @@ { "ID": "sig-20201110104000", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-20201110104000", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0011", "GrantedScope": "granted_scope-0011", "RequestedAudience": "requested_audience-0011", diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0001.json b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0001.json index e0d857ce36c..c543dabd590 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0001.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0001.json @@ -1,13 +1,13 @@ { "ID": "sig-0001", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0001", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:21Z", + "Client": "client-0014", "Scopes": "scope-0001", "GrantedScope": "granted_scope-0001", "RequestedAudience": "", @@ -16,5 +16,17 @@ "Subject": "", "Active": true, "Session": "c2Vzc2lvbi0wMDAx", - "Table": "" + "Table": "", + "FirstUsedAt": { + "Time": "0001-01-01T00:00:00Z", + "Valid": false + }, + "AccessTokenSignature": { + "String": "", + "Valid": false + }, + "UsedTimes": { + "Int32": 0, + "Valid": false + } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0002.json b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0002.json index 653f7ca5b18..3f269712c34 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0002.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0002.json @@ -1,13 +1,13 @@ { "ID": "sig-0002", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0002", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:21Z", + "Client": "client-0014", "Scopes": "scope-0002", "GrantedScope": "granted_scope-0002", "RequestedAudience": "", @@ -16,5 +16,17 @@ "Subject": "subject-0002", "Active": true, "Session": "c2Vzc2lvbi0wMDAy", - "Table": "" + "Table": "", + "FirstUsedAt": { + "Time": "0001-01-01T00:00:00Z", + "Valid": false + }, + "AccessTokenSignature": { + "String": "", + "Valid": false + }, + "UsedTimes": { + "Int32": 0, + "Valid": false + } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0003.json b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0003.json index 56c5eabf419..53937ce9119 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0003.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0003.json @@ -1,13 +1,13 @@ { "ID": "sig-0003", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0003", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:21Z", + "Client": "client-0014", "Scopes": "scope-0003", "GrantedScope": "granted_scope-0003", "RequestedAudience": "", @@ -16,5 +16,17 @@ "Subject": "subject-0003", "Active": true, "Session": "c2Vzc2lvbi0wMDAz", - "Table": "" + "Table": "", + "FirstUsedAt": { + "Time": "0001-01-01T00:00:00Z", + "Valid": false + }, + "AccessTokenSignature": { + "String": "", + "Valid": false + }, + "UsedTimes": { + "Int32": 0, + "Valid": false + } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0004.json b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0004.json index 5eae151e448..01a1695e51d 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0004.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0004.json @@ -1,13 +1,13 @@ { "ID": "sig-0004", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0004", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:21Z", + "Client": "client-0014", "Scopes": "scope-0004", "GrantedScope": "granted_scope-0004", "RequestedAudience": "", @@ -16,5 +16,17 @@ "Subject": "subject-0004", "Active": false, "Session": "c2Vzc2lvbi0wMDA0", - "Table": "" + "Table": "", + "FirstUsedAt": { + "Time": "0001-01-01T00:00:00Z", + "Valid": false + }, + "AccessTokenSignature": { + "String": "", + "Valid": false + }, + "UsedTimes": { + "Int32": 0, + "Valid": false + } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0005.json b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0005.json index 90ff903a389..8df59e12e46 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0005.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0005.json @@ -1,13 +1,13 @@ { "ID": "sig-0005", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0005", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0014", "Scopes": "scope-0005", "GrantedScope": "granted_scope-0005", "RequestedAudience": "", @@ -16,5 +16,17 @@ "Subject": "subject-0005", "Active": false, "Session": "c2Vzc2lvbi0wMDA1", - "Table": "" + "Table": "", + "FirstUsedAt": { + "Time": "0001-01-01T00:00:00Z", + "Valid": false + }, + "AccessTokenSignature": { + "String": "", + "Valid": false + }, + "UsedTimes": { + "Int32": 0, + "Valid": false + } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0006.json b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0006.json index d4ec51f0029..e87e173969b 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0006.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0006.json @@ -1,13 +1,13 @@ { "ID": "sig-0006", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0006", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0014", "Scopes": "scope-0006", "GrantedScope": "granted_scope-0006", "RequestedAudience": "", @@ -16,5 +16,17 @@ "Subject": "subject-0006", "Active": false, "Session": "c2Vzc2lvbi0wMDA2", - "Table": "" + "Table": "", + "FirstUsedAt": { + "Time": "0001-01-01T00:00:00Z", + "Valid": false + }, + "AccessTokenSignature": { + "String": "", + "Valid": false + }, + "UsedTimes": { + "Int32": 0, + "Valid": false + } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0007.json b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0007.json index f07b8acdc49..b037f5f8f71 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0007.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0007.json @@ -1,13 +1,13 @@ { "ID": "sig-0007", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0007", "ConsentChallenge": { "String": "", "Valid": false }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0014", "Scopes": "scope-0007", "GrantedScope": "granted_scope-0007", "RequestedAudience": "requested_audience-0007", @@ -16,5 +16,17 @@ "Subject": "subject-0007", "Active": false, "Session": "c2Vzc2lvbi0wMDA3", - "Table": "" + "Table": "", + "FirstUsedAt": { + "Time": "0001-01-01T00:00:00Z", + "Valid": false + }, + "AccessTokenSignature": { + "String": "", + "Valid": false + }, + "UsedTimes": { + "Int32": 0, + "Valid": false + } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0008.json b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0008.json index 637ce41ae43..e422b50e1f1 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0008.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0008.json @@ -1,13 +1,13 @@ { "ID": "sig-0008", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0008", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0008", "GrantedScope": "granted_scope-0008", "RequestedAudience": "requested_audience-0008", @@ -16,5 +16,17 @@ "Subject": "subject-0008", "Active": false, "Session": "c2Vzc2lvbi0wMDA4", - "Table": "" + "Table": "", + "FirstUsedAt": { + "Time": "0001-01-01T00:00:00Z", + "Valid": false + }, + "AccessTokenSignature": { + "String": "", + "Valid": false + }, + "UsedTimes": { + "Int32": 0, + "Valid": false + } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0009.json b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0009.json index c8a7d675df2..6a43b026bcd 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0009.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0009.json @@ -1,13 +1,13 @@ { "ID": "sig-0009", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0009", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0009", "GrantedScope": "granted_scope-0009", "RequestedAudience": "requested_audience-0009", @@ -16,5 +16,17 @@ "Subject": "subject-0009", "Active": false, "Session": "c2Vzc2lvbi0wMDA5", - "Table": "" + "Table": "", + "FirstUsedAt": { + "Time": "0001-01-01T00:00:00Z", + "Valid": false + }, + "AccessTokenSignature": { + "String": "", + "Valid": false + }, + "UsedTimes": { + "Int32": 0, + "Valid": false + } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0010.json b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0010.json index 8f811a240c4..617f3feb77f 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0010.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0010.json @@ -1,13 +1,13 @@ { "ID": "sig-0010", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0010", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0010", "GrantedScope": "granted_scope-0010", "RequestedAudience": "requested_audience-0010", @@ -16,5 +16,17 @@ "Subject": "subject-0010", "Active": false, "Session": "c2Vzc2lvbi0wMDEw", - "Table": "" + "Table": "", + "FirstUsedAt": { + "Time": "0001-01-01T00:00:00Z", + "Valid": false + }, + "AccessTokenSignature": { + "String": "", + "Valid": false + }, + "UsedTimes": { + "Int32": 0, + "Valid": false + } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0011.json b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0011.json index d5b80241925..451de29f220 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0011.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-0011.json @@ -1,13 +1,13 @@ { "ID": "sig-0011", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-0011", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0011", "GrantedScope": "granted_scope-0011", "RequestedAudience": "requested_audience-0011", @@ -16,5 +16,17 @@ "Subject": "subject-0011", "Active": false, "Session": "c2Vzc2lvbi0wMDEx", - "Table": "" + "Table": "", + "FirstUsedAt": { + "Time": "0001-01-01T00:00:00Z", + "Valid": false + }, + "AccessTokenSignature": { + "String": "", + "Valid": false + }, + "UsedTimes": { + "Int32": 0, + "Valid": false + } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-20201110104000-01.json b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-20201110104000-01.json index 90a33971681..f641de4a704 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-20201110104000-01.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-20201110104000-01.json @@ -1,13 +1,13 @@ { "ID": "sig-20201110104000-01", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-20201110104000", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0011", "GrantedScope": "granted_scope-0011", "RequestedAudience": "requested_audience-0011", @@ -16,5 +16,17 @@ "Subject": "subject-0011", "Active": false, "Session": "c2Vzc2lvbi0wMDEx", - "Table": "" + "Table": "", + "FirstUsedAt": { + "Time": "0001-01-01T00:00:00Z", + "Valid": false + }, + "AccessTokenSignature": { + "String": "", + "Valid": false + }, + "UsedTimes": { + "Int32": 0, + "Valid": false + } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-20201110104000.json b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-20201110104000.json index a1a8516f246..aaedc9172a6 100644 --- a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-20201110104000.json +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-20201110104000.json @@ -1,13 +1,13 @@ { "ID": "sig-20201110104000", - "NID": "00000000-0000-0000-0000-000000000000", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", "Request": "req-20201110104000", "ConsentChallenge": { "String": "challenge-0014", "Valid": true }, - "RequestedAt": "0001-01-01T00:00:00Z", - "Client": "", + "RequestedAt": "2022-02-15T22:20:22Z", + "Client": "client-0001", "Scopes": "scope-0011", "GrantedScope": "granted_scope-0011", "RequestedAudience": "requested_audience-0011", @@ -16,5 +16,17 @@ "Subject": "subject-0011", "Active": false, "Session": "c2Vzc2lvbi0wMDEx", - "Table": "" + "Table": "", + "FirstUsedAt": { + "Time": "0001-01-01T00:00:00Z", + "Valid": false + }, + "AccessTokenSignature": { + "String": "", + "Valid": false + }, + "UsedTimes": { + "Int32": 0, + "Valid": false + } } diff --git a/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-20250513132142.json b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-20250513132142.json new file mode 100644 index 00000000000..819d1aa88b6 --- /dev/null +++ b/persistence/sql/migratest/fixtures/hydra_oauth2_refresh/sig-20250513132142.json @@ -0,0 +1,32 @@ +{ + "ID": "sig-20250513132142", + "NID": "24704dcb-0ab9-4bfa-a84c-405932ae53fe", + "Request": "req-20250513132142", + "ConsentChallenge": { + "String": "challenge-0014", + "Valid": true + }, + "RequestedAt": "2025-05-13T13:21:42Z", + "Client": "client-0014", + "Scopes": "scope", + "GrantedScope": "granted_scope", + "RequestedAudience": "requested_audience", + "GrantedAudience": "granted_audience", + "Form": "form_data", + "Subject": "subject-0014", + "Active": false, + "Session": "c2Vzc2lvbl9pZC0wMDE0", + "Table": "", + "FirstUsedAt": { + "Time": "0001-01-01T00:00:00Z", + "Valid": false + }, + "AccessTokenSignature": { + "String": "", + "Valid": false + }, + "UsedTimes": { + "Int32": 1, + "Valid": true + } +} diff --git a/persistence/sql/migratest/migration_test.go b/persistence/sql/migratest/migration_test.go index 6ca2cf1d543..ca0a96e7d01 100644 --- a/persistence/sql/migratest/migration_test.go +++ b/persistence/sql/migratest/migration_test.go @@ -4,43 +4,36 @@ package migratest import ( - "context" "encoding/json" "fmt" "os" "path/filepath" + "regexp" + "strings" + "sync" "testing" "time" - "github.com/ory/hydra/internal" - "github.com/ory/x/contextx" - "github.com/bradleyjkemp/cupaloy/v2" "github.com/fatih/structs" "github.com/gofrs/uuid" - "github.com/instana/testify/assert" "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" - "github.com/gobuffalo/pop/v6" - + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/consent" + "github.com/ory/hydra/v2/flow" + testhelpersuuid "github.com/ory/hydra/v2/internal/testhelpers/uuid" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/persistence/sql" + "github.com/ory/pop/v6" + "github.com/ory/x/dbal" "github.com/ory/x/logrusx" "github.com/ory/x/networkx" - "github.com/ory/x/sqlxx" - - "github.com/ory/hydra/flow" - testhelpersuuid "github.com/ory/hydra/internal/testhelpers/uuid" - "github.com/ory/hydra/persistence/sql" "github.com/ory/x/popx" - "github.com/ory/x/sqlcon/dockertest" - - "github.com/stretchr/testify/require" - - "github.com/ory/hydra/client" - "github.com/ory/hydra/consent" - "github.com/ory/hydra/jwk" - "github.com/ory/hydra/oauth2" - "github.com/ory/hydra/x" ) func snapshotFor(paths ...string) *cupaloy.Config { @@ -52,252 +45,251 @@ func snapshotFor(paths ...string) *cupaloy.Config { ) } -func CompareWithFixture(t *testing.T, actual interface{}, prefix string, id string) { +func compareWithFixture(t *testing.T, actual interface{}, prefix string, id string) { s := snapshotFor("fixtures", prefix) actualJSON, err := json.MarshalIndent(actual, "", " ") - require.NoError(t, err) + require.NoErrorf(t, err, "%+v", actual) assert.NoError(t, s.SnapshotWithName(id, actualJSON)) } func TestMigrations(t *testing.T) { - //pop.Debug = true - connections := make(map[string]*pop.Connection, 1) + connections := make(map[string]*pop.Connection, 4) - if testing.Short() { - reg := internal.NewMockedRegistry(t, &contextx.Default{}) - require.NoError(t, reg.Persister().MigrateUp(context.Background())) - c := reg.Persister().Connection(context.Background()) + { + c, err := pop.NewConnection(&pop.ConnectionDetails{URL: dbal.NewSQLiteTestDatabase(t)}) + require.NoError(t, err) + require.NoError(t, c.Open()) connections["sqlite"] = c } if !testing.Short() { - dockertest.Parallel([]func(){ - func() { - connections["postgres"] = dockertest.ConnectToTestPostgreSQLPop(t) - }, - func() { - connections["mysql"] = dockertest.ConnectToTestMySQLPop(t) - }, - func() { - connections["cockroach"] = dockertest.ConnectToTestCockroachDBPop(t) - }, - }) + wg := sync.WaitGroup{} + for db, dsn := range map[string]string{ + "postgres": dockertest.RunTestPostgreSQL(t), + "mysql": dockertest.RunTestMySQL(t), + "cockroach": dockertest.RunTestCockroachDBWithVersion(t, "latest-v25.1"), + } { + wg.Add(1) + go func() { + defer wg.Done() + + dbName := "testdb" + strings.ReplaceAll(uuid.Must(uuid.NewV4()).String(), "-", "") + t.Logf("using %s database %q", db, dbName) + + require.EventuallyWithT(t, func(t *assert.CollectT) { + c, err := pop.NewConnection(&pop.ConnectionDetails{URL: dsn}) + require.NoError(t, err) + require.NoError(t, c.Open()) + require.NoError(t, c.RawQuery("CREATE DATABASE "+dbName).Exec()) + dsn = regexp.MustCompile(`/[a-z0-9]+\?`).ReplaceAllString(dsn, "/"+dbName+"?") + require.NoError(t, c.Close()) + + c, err = pop.NewConnection(&pop.ConnectionDetails{URL: dsn}) + require.NoError(t, err) + require.NoError(t, c.Open()) + connections[db] = c + }, 20*time.Second, 100*time.Millisecond) + t.Cleanup(func() { + connections[db].Close() //nolint:errcheck + }) + }() + } + wg.Wait() } - var test = func(db string, c *pop.Connection) func(t *testing.T) { - return func(t *testing.T) { - ctx := context.Background() - x.CleanSQLPop(t, c) - url := c.URL() - - // workaround for https://github.com/gobuffalo/pop/issues/538 - if db == "mysql" { - url = "mysql://" + url - } else if db == "sqlite" { - url = "sqlite://" + url - } + for db, c := range connections { + t.Run("database="+db, func(t *testing.T) { + t.Parallel() l := logrusx.New("", "", logrusx.ForceLevel(logrus.DebugLevel)) tm, err := popx.NewMigrationBox( - os.DirFS("../migrations"), - popx.NewMigrator(c, l, nil, 1*time.Minute), + sql.Migrations, + c, l, popx.WithTestdata(t, os.DirFS("./testdata"))) - //tm := popx.NewTestMigrator(t, c, os.DirFS("../migrations"), os.DirFS("./testdata"), l) require.NoError(t, err) - require.NoError(t, tm.Up(ctx)) + require.NoError(t, tm.Up(t.Context())) t.Run("suite=fixtures", func(t *testing.T) { t.Run("case=hydra_client", func(t *testing.T) { cs := []client.Client{} require.NoError(t, c.All(&cs)) - require.Equal(t, 18, len(cs)) + require.Len(t, cs, 20) for _, c := range cs { - require.False(t, c.CreatedAt.IsZero()) - require.False(t, c.UpdatedAt.IsZero()) - c.CreatedAt = time.Time{} // Some CreatedAt and UpdatedAt values are generated during migrations so we zero them in the fixtures - c.UpdatedAt = time.Time{} - testhelpersuuid.AssertUUID(t, &c.ID) - testhelpersuuid.AssertUUID(t, &c.NID) - c.ID = uuid.Nil - c.NID = uuid.Nil - CompareWithFixture(t, structs.Map(c), "hydra_client", c.LegacyClientID) + if s := time.Since(c.CreatedAt); s > 0 && s < 10*time.Minute { + // Some are backfilled with the current time + assert.WithinDuration(t, c.CreatedAt, c.UpdatedAt, 5*time.Second) + c.CreatedAt, c.UpdatedAt = time.Time{}, time.Time{} + } + compareWithFixture(t, structs.Map(c), "hydra_client", c.ID) } }) t.Run("case=hydra_jwk", func(t *testing.T) { js := []jwk.SQLData{} require.NoError(t, c.All(&js)) - require.Equal(t, 7, len(js)) + require.Len(t, js, 7) for _, j := range js { - testhelpersuuid.AssertUUID(t, &j.ID) - testhelpersuuid.AssertUUID(t, &j.NID) + testhelpersuuid.AssertUUID(t, j.ID) j.ID = uuid.Nil // Some IDs are generated at migration time so we zero them in the fixtures - j.NID = uuid.Nil - require.False(t, j.CreatedAt.IsZero()) - j.CreatedAt = time.Time{} - CompareWithFixture(t, j, "hydra_jwk", j.KID) + if s := time.Since(j.CreatedAt); s > 0 && s < 10*time.Minute { + // Some are backfilled with the current time + j.CreatedAt = time.Time{} + } + compareWithFixture(t, j, "hydra_jwk", j.KID) } }) - flows := []flow.Flow{} - require.NoError(t, c.All(&flows)) - require.Equal(t, 15, len(flows)) - t.Run("case=hydra_oauth2_flow", func(t *testing.T) { - for _, f := range flows { - fixturizeFlow(t, &f) - CompareWithFixture(t, f, "hydra_oauth2_flow", f.ID) + // we first load the "full" flows + fullFlows := []flow.Flow{} + require.NoError(t, c.Where("client_id IS NOT NULL").All(&fullFlows)) + require.Len(t, fullFlows, 19) + + for _, f := range fullFlows { + assert.NotNil(t, f.Client) + f.Client = nil // clients are loaded eagerly, nil them for snapshot comparison + compareWithFixture(t, f, "hydra_oauth2_flow", f.ID) + } + + // then the "reduced" flows + reducedFlows := []flow.Flow{} + require.NoError(t, c.Select("login_challenge", "nid", "requested_at").Where("client_id IS NULL").All(&reducedFlows)) + require.Len(t, reducedFlows, 1) + + for _, f := range reducedFlows { + compareWithFixture(t, f, "hydra_oauth2_flow", f.ID) } }) t.Run("case=hydra_oauth2_authentication_session", func(t *testing.T) { - ss := []consent.LoginSession{} - c.All(&ss) - require.Equal(t, 15, len(ss)) + ss := []flow.LoginSession{} + require.NoError(t, c.All(&ss)) + require.Len(t, ss, 17) for _, s := range ss { - testhelpersuuid.AssertUUID(t, &s.NID) - s.NID = uuid.Nil - s.AuthenticatedAt = sqlxx.NullTime(time.Time{}) - CompareWithFixture(t, s, "hydra_oauth2_authentication_session", s.ID) + compareWithFixture(t, s, "hydra_oauth2_authentication_session", s.ID) } }) t.Run("case=hydra_oauth2_obfuscated_authentication_session", func(t *testing.T) { ss := []consent.ForcedObfuscatedLoginSession{} - c.All(&ss) - require.Equal(t, 13, len(ss)) + require.NoError(t, c.All(&ss)) + require.Len(t, ss, 13) for _, s := range ss { - testhelpersuuid.AssertUUID(t, &s.NID) - s.NID = uuid.Nil - CompareWithFixture(t, s, "hydra_oauth2_obfuscated_authentication_session", fmt.Sprintf("%s_%s", s.Subject, s.ClientID)) + compareWithFixture(t, s, "hydra_oauth2_obfuscated_authentication_session", fmt.Sprintf("%s_%s", s.Subject, s.ClientID)) } }) t.Run("case=hydra_oauth2_logout_request", func(t *testing.T) { - lrs := []consent.LogoutRequest{} - c.All(&lrs) - require.Equal(t, 6, len(lrs)) + lrs := []flow.LogoutRequest{} + require.NoError(t, c.All(&lrs)) + require.Len(t, lrs, 7) for _, s := range lrs { - testhelpersuuid.AssertUUID(t, &s.NID) - s.NID = uuid.Nil - s.Client = nil - CompareWithFixture(t, s, "hydra_oauth2_logout_request", s.ID) + assert.NotNil(t, s.Client) + s.Client = nil // clients are loaded eagerly, nil them for snapshot comparison + compareWithFixture(t, s, "hydra_oauth2_logout_request", s.ID) } }) t.Run("case=hydra_oauth2_jti_blacklist", func(t *testing.T) { bjtis := []oauth2.BlacklistedJTI{} - c.All(&bjtis) - require.Equal(t, 1, len(bjtis)) + require.NoError(t, c.All(&bjtis)) + require.Len(t, bjtis, 1) for _, bjti := range bjtis { - testhelpersuuid.AssertUUID(t, &bjti.NID) - bjti.NID = uuid.Nil - bjti.Expiry = time.Time{} - CompareWithFixture(t, bjti, "hydra_oauth2_jti_blacklist", bjti.ID) + compareWithFixture(t, bjti, "hydra_oauth2_jti_blacklist", bjti.ID) } }) t.Run("case=hydra_oauth2_access", func(t *testing.T) { as := []sql.OAuth2RequestSQL{} - c.RawQuery("SELECT * FROM hydra_oauth2_access").All(&as) - require.Equal(t, 13, len(as)) + require.NoError(t, c.RawQuery("SELECT * FROM hydra_oauth2_access").All(&as)) + require.Len(t, as, 13) for _, a := range as { - testhelpersuuid.AssertUUID(t, &a.NID) - a.NID = uuid.Nil - require.False(t, a.RequestedAt.IsZero()) - a.RequestedAt = time.Time{} - require.NotZero(t, a.Client) - a.Client = "" - CompareWithFixture(t, a, "hydra_oauth2_access", a.ID) + compareWithFixture(t, a, "hydra_oauth2_access", a.ID) } }) t.Run("case=hydra_oauth2_refresh", func(t *testing.T) { - rs := []sql.OAuth2RequestSQL{} - c.RawQuery("SELECT * FROM hydra_oauth2_refresh").All(&rs) - require.Equal(t, 13, len(rs)) + rs := []sql.OAuth2RefreshTable{} + require.NoError(t, c.All(&rs)) + require.Len(t, rs, 14) for _, r := range rs { - testhelpersuuid.AssertUUID(t, &r.NID) - r.NID = uuid.Nil - require.False(t, r.RequestedAt.IsZero()) - r.RequestedAt = time.Time{} - require.NotZero(t, r.Client) - r.Client = "" - CompareWithFixture(t, r, "hydra_oauth2_refresh", r.ID) + compareWithFixture(t, r, "hydra_oauth2_refresh", r.ID) } }) t.Run("case=hydra_oauth2_code", func(t *testing.T) { cs := []sql.OAuth2RequestSQL{} - c.RawQuery("SELECT * FROM hydra_oauth2_code").All(&cs) - require.Equal(t, 13, len(cs)) + require.NoError(t, c.RawQuery("SELECT * FROM hydra_oauth2_code").All(&cs)) + require.Len(t, cs, 13) for _, c := range cs { - testhelpersuuid.AssertUUID(t, &c.NID) - c.NID = uuid.Nil - require.False(t, c.RequestedAt.IsZero()) - c.RequestedAt = time.Time{} - require.NotZero(t, c.Client) - c.Client = "" - CompareWithFixture(t, c, "hydra_oauth2_code", c.ID) + compareWithFixture(t, c, "hydra_oauth2_code", c.ID) } }) t.Run("case=hydra_oauth2_oidc", func(t *testing.T) { os := []sql.OAuth2RequestSQL{} - c.RawQuery("SELECT * FROM hydra_oauth2_oidc").All(&os) - require.Equal(t, 13, len(os)) + require.NoError(t, c.RawQuery("SELECT * FROM hydra_oauth2_oidc").All(&os)) + require.Len(t, os, 13) for _, o := range os { - testhelpersuuid.AssertUUID(t, &o.NID) - o.NID = uuid.Nil - require.False(t, o.RequestedAt.IsZero()) - o.RequestedAt = time.Time{} - require.NotZero(t, o.Client) - o.Client = "" - CompareWithFixture(t, o, "hydra_oauth2_oidc", o.ID) + compareWithFixture(t, o, "hydra_oauth2_oidc", o.ID) } }) t.Run("case=hydra_oauth2_pkce", func(t *testing.T) { ps := []sql.OAuth2RequestSQL{} - c.RawQuery("SELECT * FROM hydra_oauth2_pkce").All(&ps) - require.Equal(t, 11, len(ps)) + require.NoError(t, c.RawQuery("SELECT * FROM hydra_oauth2_pkce").All(&ps)) + require.Len(t, ps, 11) for _, p := range ps { - testhelpersuuid.AssertUUID(t, &p.NID) - p.NID = uuid.Nil - require.False(t, p.RequestedAt.IsZero()) - p.RequestedAt = time.Time{} - require.NotZero(t, p.Client) - p.Client = "" - CompareWithFixture(t, p, "hydra_oauth2_pkce", p.ID) + compareWithFixture(t, p, "hydra_oauth2_pkce", p.ID) + } + }) + + t.Run("case=hydra_oauth2_device_auth_codes", func(t *testing.T) { + rs := []sql.DeviceRequestSQL{} + require.NoError(t, c.All(&rs)) + require.Len(t, rs, 1) + + for _, r := range rs { + compareWithFixture(t, r, "hydra_oauth2_device_auth_codes", r.ID) } }) t.Run("case=networks", func(t *testing.T) { ns := []networkx.Network{} - c.RawQuery("SELECT * FROM networks").All(&ns) - require.Equal(t, 1, len(ns)) + require.NoError(t, c.RawQuery("SELECT * FROM networks").All(&ns)) + require.Len(t, ns, 1) for _, n := range ns { - testhelpersuuid.AssertUUID(t, &n.ID) + testhelpersuuid.AssertUUID(t, n.ID) require.NotZero(t, n.CreatedAt) require.NotZero(t, n.UpdatedAt) } }) }) - } - } - for db, c := range connections { - t.Run(fmt.Sprintf("database=%s", db), test(db, c)) - x.CleanSQLPop(t, c) - require.NoError(t, c.Close()) + t.Run("down", func(t *testing.T) { + status, err := tm.Status(t.Context()) + require.NoError(t, err) + + // there are no proper down migrations from v2 to v1 + var stepsDown int + for i := range status { + if status[len(status)-1-i].Version == "20220210000001000000" { + stepsDown = i + break + } + } + + assert.NoError(t, tm.Down(t.Context(), stepsDown)) + }) + }) } } diff --git a/persistence/sql/migratest/testdata/20150101000001_testdata.sql b/persistence/sql/migratest/testdata/20150101000001_testdata.sql index 996bb39c89c..9350d49b299 100644 --- a/persistence/sql/migratest/testdata/20150101000001_testdata.sql +++ b/persistence/sql/migratest/testdata/20150101000001_testdata.sql @@ -1 +1,2 @@ --- INSERT INTO networks (id, created_at, updated_at) VALUES ('24704dcb-0ab9-4bfa-a84c-405932ae53fe', '2013-10-07 08:23:19', '2013-10-07 08:23:19'); +-- set a constant nid for stable snapshots +UPDATE networks SET id = '24704dcb-0ab9-4bfa-a84c-405932ae53fe'; diff --git a/persistence/sql/migratest/testdata/20190100000012_testdata.sql b/persistence/sql/migratest/testdata/20190100000012_testdata.sql index 764e9030816..45929badf85 100644 --- a/persistence/sql/migratest/testdata/20190100000012_testdata.sql +++ b/persistence/sql/migratest/testdata/20190100000012_testdata.sql @@ -1,4 +1,4 @@ INSERT INTO hydra_client (pk, id, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, audience, created_at, updated_at) VALUES -(0012, 'client-0012', 'Client 0012', 'secret-0012', 'http://redirect/0012_1', 'grant-0012_1', 'response-0012_1', 'scope-0012', 'owner-0012', 'http://policy/0012', 'http://tos/0012', 'http://client/0012', 'http://logo/0012', 'contact-0012_1', 0, 'http://sector_id/0012', '', 'http://jwks/0012', 'http://request/0012_1', 'token_auth-0012', 'r_alg-0012', 'u_alg-0012', 'subject-0012', 'http://cors/0012_1', 'autdience-0012_1', now(), now()); +(0012, 'client-0012', 'Client 0012', 'secret-0012', 'http://redirect/0012_1', 'grant-0012_1', 'response-0012_1', 'scope-0012', 'owner-0012', 'http://policy/0012', 'http://tos/0012', 'http://client/0012', 'http://logo/0012', 'contact-0012_1', 0, 'http://sector_id/0012', '', 'http://jwks/0012', 'http://request/0012_1', 'token_auth-0012', 'r_alg-0012', 'u_alg-0012', 'subject-0012', 'http://cors/0012_1', 'autdience-0012_1', '2022-02-15 22:20:20', '2022-02-15 22:20:20'); diff --git a/persistence/sql/migratest/testdata/20190100000013_testdata.sql b/persistence/sql/migratest/testdata/20190100000013_testdata.sql index ad5c613a9c8..82ab8e29920 100644 --- a/persistence/sql/migratest/testdata/20190100000013_testdata.sql +++ b/persistence/sql/migratest/testdata/20190100000013_testdata.sql @@ -1,4 +1,4 @@ INSERT INTO hydra_client (pk, id, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required) VALUES -(0013, 'client-0013', 'Client 0013', 'secret-0013', 'http://redirect/0013_1', 'grant-0013_1', 'response-0013_1', 'scope-0013', 'owner-0013', 'http://policy/0013', 'http://tos/0013', 'http://client/0013', 'http://logo/0013', 'contact-0013_1', 0, 'http://sector_id/0013', '', 'http://jwks/0013', 'http://request/0013_1', 'token_auth-0013', 'r_alg-0013', 'u_alg-0013', 'subject-0013', 'http://cors/0013_1', 'autdience-0013_1', now(), now(), 'http://front_logout/0013', true, 'http://post_redirect/0013_1', 'http://back_logout/0013', true); +(0013, 'client-0013', 'Client 0013', 'secret-0013', 'http://redirect/0013_1', 'grant-0013_1', 'response-0013_1', 'scope-0013', 'owner-0013', 'http://policy/0013', 'http://tos/0013', 'http://client/0013', 'http://logo/0013', 'contact-0013_1', 0, 'http://sector_id/0013', '', 'http://jwks/0013', 'http://request/0013_1', 'token_auth-0013', 'r_alg-0013', 'u_alg-0013', 'subject-0013', 'http://cors/0013_1', 'autdience-0013_1', '2022-02-15 22:20:20', '2022-02-15 22:20:20', 'http://front_logout/0013', true, 'http://post_redirect/0013_1', 'http://back_logout/0013', true); diff --git a/persistence/sql/migratest/testdata/20190100000014_testdata.sql b/persistence/sql/migratest/testdata/20190100000014_testdata.sql index bbabe7a19f4..a6b97dc62da 100644 --- a/persistence/sql/migratest/testdata/20190100000014_testdata.sql +++ b/persistence/sql/migratest/testdata/20190100000014_testdata.sql @@ -1,4 +1,4 @@ INSERT INTO hydra_client (pk, id, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata) VALUES -(0014, 'client-0014', 'Client 0014', 'secret-0014', 'http://redirect/0014_1', 'grant-0014_1', 'response-0014_1', 'scope-0014', 'owner-0014', 'http://policy/0014', 'http://tos/0014', 'http://client/0014', 'http://logo/0014', 'contact-0014_1', 0, 'http://sector_id/0014', '', 'http://jwks/0014', 'http://request/0014_1', 'token_auth-0014', 'r_alg-0014', 'u_alg-0014', 'subject-0014', 'http://cors/0014_1', 'autdience-0014_1', now(), now(), 'http://front_logout/0014', true, 'http://post_redirect/0014_1', 'http://back_logout/0014', true, '{"migration": "0014"}'); +(0014, 'client-0014', 'Client 0014', 'secret-0014', 'http://redirect/0014_1', 'grant-0014_1', 'response-0014_1', 'scope-0014', 'owner-0014', 'http://policy/0014', 'http://tos/0014', 'http://client/0014', 'http://logo/0014', 'contact-0014_1', 0, 'http://sector_id/0014', '', 'http://jwks/0014', 'http://request/0014_1', 'token_auth-0014', 'r_alg-0014', 'u_alg-0014', 'subject-0014', 'http://cors/0014_1', 'autdience-0014_1', '2022-02-15 22:20:21', '2022-02-15 22:20:21', 'http://front_logout/0014', true, 'http://post_redirect/0014_1', 'http://back_logout/0014', true, '{"migration": "0014"}'); diff --git a/persistence/sql/migratest/testdata/20190200000002_testdata.sql b/persistence/sql/migratest/testdata/20190200000002_testdata.sql index cf569e2aabd..41220f53df1 100644 --- a/persistence/sql/migratest/testdata/20190200000002_testdata.sql +++ b/persistence/sql/migratest/testdata/20190200000002_testdata.sql @@ -1,4 +1,4 @@ INSERT INTO hydra_jwk (sid, kid, version, keydata, created_at) VALUES -('sid-0002', 'kid-0002', 2, 'key-0002', now()); +('sid-0002', 'kid-0002', 2, 'key-0002', '2022-02-15 22:20:21'); diff --git a/persistence/sql/migratest/testdata/20190200000003_testdata.sql b/persistence/sql/migratest/testdata/20190200000003_testdata.sql index 0494a778858..c9d5d90403b 100644 --- a/persistence/sql/migratest/testdata/20190200000003_testdata.sql +++ b/persistence/sql/migratest/testdata/20190200000003_testdata.sql @@ -1,4 +1,4 @@ INSERT INTO hydra_jwk (sid, kid, version, keydata, created_at) VALUES -('sid-0003', 'kid-0003', 3, 'key-0003', now()); +('sid-0003', 'kid-0003', 3, 'key-0003', '2022-02-15 22:20:21'); diff --git a/persistence/sql/migratest/testdata/20190200000004_testdata.sql b/persistence/sql/migratest/testdata/20190200000004_testdata.sql index add63a14743..73b375d86db 100644 --- a/persistence/sql/migratest/testdata/20190200000004_testdata.sql +++ b/persistence/sql/migratest/testdata/20190200000004_testdata.sql @@ -1,4 +1,4 @@ INSERT INTO hydra_jwk (sid, kid, version, keydata, created_at, pk) VALUES -('sid-0004', 'kid-0004', 4, 'key-0004', now(), 4); +('sid-0004', 'kid-0004', 4, 'key-0004', '2022-02-15 22:20:21', 4); diff --git a/persistence/sql/migratest/testdata/20190300000003_testdata.sql b/persistence/sql/migratest/testdata/20190300000003_testdata.sql index 7913a014b50..c7e1ca34e8c 100644 --- a/persistence/sql/migratest/testdata/20190300000003_testdata.sql +++ b/persistence/sql/migratest/testdata/20190300000003_testdata.sql @@ -1,13 +1,13 @@ -- 20190300000001_testdata.sql (see 20190300000001_testdata.sql for an explanation) -- using the most lately added client as a foreign key INSERT INTO hydra_oauth2_consent_request (challenge, login_challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context) -SELECT 'challenge-0001', 'challenge-0001', 'verifier-0001', hydra_client.id, 'subject-0001', 'http://request/0001', true, 'requested_scope-0001_1', 'csrf-0001', now(), now(), '{"display": "display-0001"}' +SELECT 'challenge-0001', 'challenge-0001', 'verifier-0001', hydra_client.id, 'subject-0001', 'http://request/0001', true, 'requested_scope-0001_1', 'csrf-0001', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0001"}' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_authentication_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context) -SELECT 'challenge-0001', 'verifier-0001', hydra_client.id, 'subject-0001', 'http://request/0001', true, 'requested_scope-0001_1', 'csrf-0001', now(), now(), '{"display": "display-0001"}' +SELECT 'challenge-0001', 'verifier-0001', hydra_client.id, 'subject-0001', 'http://request/0001', true, 'requested_scope-0001_1', 'csrf-0001', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0001"}' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; @@ -15,29 +15,29 @@ LIMIT 1; INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject) VALUES -('auth_session-0001', now(), 'subject-0001'); +('auth_session-0001', '2022-02-15 22:20:21', 'subject-0001'); INSERT INTO hydra_oauth2_consent_request_handled (challenge, granted_scope, remember, remember_for, error, requested_at, session_access_token, session_id_token, authenticated_at, was_used) VALUES -('challenge-0001', 'granted_scope-0001_1', true, 0001, '{}', now(), '{"session_access_token-0001": "0001"}', '{"session_id_token-0001": "0001"}', now(), true); +('challenge-0001', 'granted_scope-0001_1', true, 0001, '{}', '2022-02-15 22:20:21', '{"session_access_token-0001": "0001"}', '{"session_id_token-0001": "0001"}', '2022-02-15 22:20:21', true); INSERT INTO hydra_oauth2_authentication_request_handled (challenge, subject, remember, remember_for, error, acr, requested_at, authenticated_at, was_used) VALUES -('challenge-0001', 'subject-0001', true, 0001, '{}', 'acr-0001', now(), now(), true); +('challenge-0001', 'subject-0001', true, 0001, '{}', 'acr-0001', '2022-02-15 22:20:21', '2022-02-15 22:20:21', true); -- EOF 20190300000001_testdata.sql -- 20190300000002_testdata.sql (see 20190300000002_testdata.sql for an explanation) -- using the most lately added client as a foreign key INSERT INTO hydra_oauth2_consent_request (challenge, login_challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, forced_subject_identifier) -SELECT 'challenge-0002', 'challenge-0002', 'verifier-0002', hydra_client.id, 'subject-0002', 'http://request/0002', true, 'requested_scope-0002_1', 'csrf-0002', now(), now(), '{"display": "display-0002"}', 'force_subject_id-0002' +SELECT 'challenge-0002', 'challenge-0002', 'verifier-0002', hydra_client.id, 'subject-0002', 'http://request/0002', true, 'requested_scope-0002_1', 'csrf-0002', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0002"}', 'force_subject_id-0002' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_authentication_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context) -SELECT 'challenge-0002', 'verifier-0002', hydra_client.id, 'subject-0002', 'http://request/0002', true, 'requested_scope-0002_1', 'csrf-0002', now(), now(), '{"display": "display-0002"}' +SELECT 'challenge-0002', 'verifier-0002', hydra_client.id, 'subject-0002', 'http://request/0002', true, 'requested_scope-0002_1', 'csrf-0002', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0002"}' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; @@ -45,17 +45,17 @@ LIMIT 1; INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject) VALUES -('auth_session-0002', now(), 'subject-0002'); +('auth_session-0002', '2022-02-15 22:20:21', 'subject-0002'); INSERT INTO hydra_oauth2_consent_request_handled (challenge, granted_scope, remember, remember_for, error, requested_at, session_access_token, session_id_token, authenticated_at, was_used) VALUES -('challenge-0002', 'granted_scope-0002_1', true, 0002, '{}', now(), '{"session_access_token-0002": "0002"}', '{"session_id_token-0002": "0002"}', now(), true); +('challenge-0002', 'granted_scope-0002_1', true, 0002, '{}', '2022-02-15 22:20:21', '{"session_access_token-0002": "0002"}', '{"session_id_token-0002": "0002"}', '2022-02-15 22:20:21', true); INSERT INTO hydra_oauth2_authentication_request_handled (challenge, subject, remember, remember_for, error, acr, requested_at, authenticated_at, was_used, forced_subject_identifier) VALUES -('challenge-0002', 'subject-0002', true, 0002, '{}', 'acr-0002', now(), now(), true, 'force_subject_id-0002'); +('challenge-0002', 'subject-0002', true, 0002, '{}', 'acr-0002', '2022-02-15 22:20:21', '2022-02-15 22:20:21', true, 'force_subject_id-0002'); INSERT INTO hydra_oauth2_obfuscated_authentication_session (client_id, subject, subject_obfuscated) SELECT hydra_client.id, 'subject-0002', 'subject_obfuscated-0002' @@ -68,17 +68,17 @@ LIMIT 1; INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject) VALUES -('auth_session-0003', now(), 'subject-0003'); +('auth_session-0003', '2022-02-15 22:20:21', 'subject-0003'); -- using the most lately added client as a foreign key INSERT INTO hydra_oauth2_authentication_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, login_session_id) -SELECT 'challenge-0003', 'verifier-0003', hydra_client.id, 'subject-0003', 'http://request/0003', true, 'requested_scope-0003_1', 'csrf-0003', now(), now(), '{"display": "display-0003"}', 'auth_session-0003' +SELECT 'challenge-0003', 'verifier-0003', hydra_client.id, 'subject-0003', 'http://request/0003', true, 'requested_scope-0003_1', 'csrf-0003', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0003"}', 'auth_session-0003' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_consent_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, forced_subject_identifier, login_session_id, login_challenge) -SELECT 'challenge-0003', 'verifier-0003', hydra_client.id, 'subject-0003', 'http://request/0003', true, 'requested_scope-0003_1', 'csrf-0003', now(), now(), '{"display": "display-0003"}', 'force_subject_id-0003', 'auth_session-0003', 'challenge-0003' +SELECT 'challenge-0003', 'verifier-0003', hydra_client.id, 'subject-0003', 'http://request/0003', true, 'requested_scope-0003_1', 'csrf-0003', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0003"}', 'force_subject_id-0003', 'auth_session-0003', 'challenge-0003' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; @@ -86,12 +86,12 @@ LIMIT 1; INSERT INTO hydra_oauth2_consent_request_handled (challenge, granted_scope, remember, remember_for, error, requested_at, session_access_token, session_id_token, authenticated_at, was_used) VALUES -('challenge-0003', 'granted_scope-0003_1', true, 0003, '{}', now(), '{"session_access_token-0003": "0003"}', '{"session_id_token-0003": "0003"}', now(), true); +('challenge-0003', 'granted_scope-0003_1', true, 0003, '{}', '2022-02-15 22:20:21', '{"session_access_token-0003": "0003"}', '{"session_id_token-0003": "0003"}', '2022-02-15 22:20:21', true); INSERT INTO hydra_oauth2_authentication_request_handled (challenge, subject, remember, remember_for, error, acr, requested_at, authenticated_at, was_used, forced_subject_identifier) VALUES -('challenge-0003', 'subject-0003', true, 0003, '{}', 'acr-0003', now(), now(), true, 'force_subject_id-0003'); +('challenge-0003', 'subject-0003', true, 0003, '{}', 'acr-0003', '2022-02-15 22:20:21', '2022-02-15 22:20:21', true, 'force_subject_id-0003'); INSERT INTO hydra_oauth2_obfuscated_authentication_session (client_id, subject, subject_obfuscated) SELECT hydra_client.id, 'subject-0003', 'subject_obfuscated-0003' diff --git a/persistence/sql/migratest/testdata/20190300000004_testdata.sql b/persistence/sql/migratest/testdata/20190300000004_testdata.sql index 47e143d0273..23384f5f4fd 100644 --- a/persistence/sql/migratest/testdata/20190300000004_testdata.sql +++ b/persistence/sql/migratest/testdata/20190300000004_testdata.sql @@ -1,17 +1,17 @@ INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject) VALUES -('auth_session-0004', now(), 'subject-0004'); +('auth_session-0004', '2022-02-15 22:20:21', 'subject-0004'); -- using the most lately added client as a foreign key INSERT INTO hydra_oauth2_authentication_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, requested_at_audience, login_session_id) -SELECT 'challenge-0004', 'verifier-0004', hydra_client.id, 'subject-0004', 'http://request/0004', true, 'requested_scope-0004_1', 'csrf-0004', now(), now(), '{"display": "display-0004"}', 'requested_audience-0004_1', 'auth_session-0004' +SELECT 'challenge-0004', 'verifier-0004', hydra_client.id, 'subject-0004', 'http://request/0004', true, 'requested_scope-0004_1', 'csrf-0004', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0004"}', 'requested_audience-0004_1', 'auth_session-0004' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_consent_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, forced_subject_identifier, login_session_id, login_challenge, requested_at_audience) -SELECT 'challenge-0004', 'verifier-0004', hydra_client.id, 'subject-0004', 'http://request/0004', true, 'requested_scope-0004_1', 'csrf-0004', now(), now(), '{"display": "display-0004"}', 'force_subject_id-0004', 'auth_session-0004', 'challenge-0004', 'requested_audience-0004_1' +SELECT 'challenge-0004', 'verifier-0004', hydra_client.id, 'subject-0004', 'http://request/0004', true, 'requested_scope-0004_1', 'csrf-0004', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0004"}', 'force_subject_id-0004', 'auth_session-0004', 'challenge-0004', 'requested_audience-0004_1' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; @@ -19,12 +19,12 @@ LIMIT 1; INSERT INTO hydra_oauth2_consent_request_handled (challenge, granted_scope, remember, remember_for, error, requested_at, session_access_token, session_id_token, authenticated_at, was_used, granted_at_audience) VALUES -('challenge-0004', 'granted_scope-0004_1', true, 0004, '{}', now(), '{"session_access_token-0004": "0004"}', '{"session_id_token-0004": "0004"}', now(), true, 'granted_audience-0004_1'); +('challenge-0004', 'granted_scope-0004_1', true, 0004, '{}', '2022-02-15 22:20:21', '{"session_access_token-0004": "0004"}', '{"session_id_token-0004": "0004"}', '2022-02-15 22:20:21', true, 'granted_audience-0004_1'); INSERT INTO hydra_oauth2_authentication_request_handled (challenge, subject, remember, remember_for, error, acr, requested_at, authenticated_at, was_used, forced_subject_identifier) VALUES -('challenge-0004', 'subject-0004', true, 0004, '{}', 'acr-0004', now(), now(), true, 'force_subject_id-0004'); +('challenge-0004', 'subject-0004', true, 0004, '{}', 'acr-0004', '2022-02-15 22:20:21', '2022-02-15 22:20:21', true, 'force_subject_id-0004'); INSERT INTO hydra_oauth2_obfuscated_authentication_session (client_id, subject, subject_obfuscated) SELECT hydra_client.id, 'subject-0004', 'subject_obfuscated-0004' diff --git a/persistence/sql/migratest/testdata/20190300000005_testdata.sql b/persistence/sql/migratest/testdata/20190300000005_testdata.sql index e30101c2644..7e81c32ba30 100644 --- a/persistence/sql/migratest/testdata/20190300000005_testdata.sql +++ b/persistence/sql/migratest/testdata/20190300000005_testdata.sql @@ -1,17 +1,17 @@ INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject) VALUES -('auth_session-0005', now(), 'subject-0005'); +('auth_session-0005', '2022-02-15 22:20:21', 'subject-0005'); -- using the most lately added client as a foreign key INSERT INTO hydra_oauth2_authentication_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, requested_at_audience, login_session_id) -SELECT 'challenge-0005', 'verifier-0005', hydra_client.id, 'subject-0005', 'http://request/0005', true, 'requested_scope-0005_1', 'csrf-0005', now(), now(), '{"display": "display-0005"}', 'requested_audience-0005_1', 'auth_session-0005' +SELECT 'challenge-0005', 'verifier-0005', hydra_client.id, 'subject-0005', 'http://request/0005', true, 'requested_scope-0005_1', 'csrf-0005', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0005"}', 'requested_audience-0005_1', 'auth_session-0005' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_consent_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, forced_subject_identifier, login_session_id, login_challenge, requested_at_audience) -SELECT 'challenge-0005', 'verifier-0005', hydra_client.id, 'subject-0005', 'http://request/0005', true, 'requested_scope-0005_1', 'csrf-0005', now(), now(), '{"display": "display-0005"}', 'force_subject_id-0005', 'auth_session-0005', 'challenge-0005', 'requested_audience-0005_1' +SELECT 'challenge-0005', 'verifier-0005', hydra_client.id, 'subject-0005', 'http://request/0005', true, 'requested_scope-0005_1', 'csrf-0005', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0005"}', 'force_subject_id-0005', 'auth_session-0005', 'challenge-0005', 'requested_audience-0005_1' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; @@ -19,12 +19,12 @@ LIMIT 1; INSERT INTO hydra_oauth2_consent_request_handled (challenge, granted_scope, remember, remember_for, error, requested_at, session_access_token, session_id_token, authenticated_at, was_used, granted_at_audience) VALUES -('challenge-0005', 'granted_scope-0005_1', true, 0005, '{}', now(), '{"session_access_token-0005": "0005"}', '{"session_id_token-0005": "0005"}', now(), true, 'granted_audience-0005_1'); +('challenge-0005', 'granted_scope-0005_1', true, 0005, '{}', '2022-02-15 22:20:21', '{"session_access_token-0005": "0005"}', '{"session_id_token-0005": "0005"}', '2022-02-15 22:20:21', true, 'granted_audience-0005_1'); INSERT INTO hydra_oauth2_authentication_request_handled (challenge, subject, remember, remember_for, error, acr, requested_at, authenticated_at, was_used, forced_subject_identifier) VALUES -('challenge-0005', 'subject-0005', true, 0005, '{}', 'acr-0005', now(), now(), true, 'force_subject_id-0005'); +('challenge-0005', 'subject-0005', true, 0005, '{}', 'acr-0005', '2022-02-15 22:20:21', '2022-02-15 22:20:21', true, 'force_subject_id-0005'); INSERT INTO hydra_oauth2_obfuscated_authentication_session (client_id, subject, subject_obfuscated) SELECT hydra_client.id, 'subject-0005', 'subject_obfuscated-0005' diff --git a/persistence/sql/migratest/testdata/20190300000006_testdata.sql b/persistence/sql/migratest/testdata/20190300000006_testdata.sql index 6d135c39ca8..437d56b4667 100644 --- a/persistence/sql/migratest/testdata/20190300000006_testdata.sql +++ b/persistence/sql/migratest/testdata/20190300000006_testdata.sql @@ -1,17 +1,17 @@ INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject) VALUES -('auth_session-0006', now(), 'subject-0006'); +('auth_session-0006', '2022-02-15 22:20:21', 'subject-0006'); -- using the most lately added client as a foreign key INSERT INTO hydra_oauth2_authentication_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, requested_at_audience, login_session_id) -SELECT 'challenge-0006', 'verifier-0006', hydra_client.id, 'subject-0006', 'http://request/0006', true, 'requested_scope-0006_1', 'csrf-0006', now(), now(), '{"display": "display-0006"}', 'requested_audience-0006_1', 'auth_session-0006' +SELECT 'challenge-0006', 'verifier-0006', hydra_client.id, 'subject-0006', 'http://request/0006', true, 'requested_scope-0006_1', 'csrf-0006', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0006"}', 'requested_audience-0006_1', 'auth_session-0006' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_consent_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, forced_subject_identifier, login_session_id, login_challenge, requested_at_audience, acr) -SELECT 'challenge-0006', 'verifier-0006', hydra_client.id, 'subject-0006', 'http://request/0006', true, 'requested_scope-0006_1', 'csrf-0006', now(), now(), '{"display": "display-0006"}', 'force_subject_id-0006', 'auth_session-0006', 'challenge-0006', 'requested_audience-0006_1', 'acr-0006' +SELECT 'challenge-0006', 'verifier-0006', hydra_client.id, 'subject-0006', 'http://request/0006', true, 'requested_scope-0006_1', 'csrf-0006', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0006"}', 'force_subject_id-0006', 'auth_session-0006', 'challenge-0006', 'requested_audience-0006_1', 'acr-0006' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; @@ -19,12 +19,12 @@ LIMIT 1; INSERT INTO hydra_oauth2_consent_request_handled (challenge, granted_scope, remember, remember_for, error, requested_at, session_access_token, session_id_token, authenticated_at, was_used, granted_at_audience) VALUES -('challenge-0006', 'granted_scope-0006_1', true, 0006, '{}', now(), '{"session_access_token-0006": "0006"}', '{"session_id_token-0006": "0006"}', now(), true, 'granted_audience-0006_1'); +('challenge-0006', 'granted_scope-0006_1', true, 0006, '{}', '2022-02-15 22:20:21', '{"session_access_token-0006": "0006"}', '{"session_id_token-0006": "0006"}', '2022-02-15 22:20:21', true, 'granted_audience-0006_1'); INSERT INTO hydra_oauth2_authentication_request_handled (challenge, subject, remember, remember_for, error, acr, requested_at, authenticated_at, was_used, forced_subject_identifier) VALUES -('challenge-0006', 'subject-0006', true, 0006, '{}', 'acr-0006', now(), now(), true, 'force_subject_id-0006'); +('challenge-0006', 'subject-0006', true, 0006, '{}', 'acr-0006', '2022-02-15 22:20:21', '2022-02-15 22:20:21', true, 'force_subject_id-0006'); INSERT INTO hydra_oauth2_obfuscated_authentication_session (client_id, subject, subject_obfuscated) SELECT hydra_client.id, 'subject-0006', 'subject_obfuscated-0006' diff --git a/persistence/sql/migratest/testdata/20190300000007_testdata.sql b/persistence/sql/migratest/testdata/20190300000007_testdata.sql index 1276e198425..bc2505167cd 100644 --- a/persistence/sql/migratest/testdata/20190300000007_testdata.sql +++ b/persistence/sql/migratest/testdata/20190300000007_testdata.sql @@ -1,17 +1,17 @@ INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject) VALUES -('auth_session-0007', now(), 'subject-0007'); +('auth_session-0007', '2022-02-15 22:20:21', 'subject-0007'); -- using the most lately added client as a foreign key INSERT INTO hydra_oauth2_authentication_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, requested_at_audience, login_session_id) -SELECT 'challenge-0007', 'verifier-0007', hydra_client.id, 'subject-0007', 'http://request/0007', true, 'requested_scope-0007_1', 'csrf-0007', now(), now(), '{"display": "display-0007"}', 'requested_audience-0007_1', 'auth_session-0007' +SELECT 'challenge-0007', 'verifier-0007', hydra_client.id, 'subject-0007', 'http://request/0007', true, 'requested_scope-0007_1', 'csrf-0007', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0007"}', 'requested_audience-0007_1', 'auth_session-0007' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_consent_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, forced_subject_identifier, login_session_id, login_challenge, requested_at_audience, acr) -SELECT 'challenge-0007', 'verifier-0007', hydra_client.id, 'subject-0007', 'http://request/0007', true, 'requested_scope-0007_1', 'csrf-0007', now(), now(), '{"display": "display-0007"}', 'force_subject_id-0007', 'auth_session-0007', 'challenge-0007', 'requested_audience-0007_1', 'acr-0007' +SELECT 'challenge-0007', 'verifier-0007', hydra_client.id, 'subject-0007', 'http://request/0007', true, 'requested_scope-0007_1', 'csrf-0007', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0007"}', 'force_subject_id-0007', 'auth_session-0007', 'challenge-0007', 'requested_audience-0007_1', 'acr-0007' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; @@ -19,12 +19,12 @@ LIMIT 1; INSERT INTO hydra_oauth2_consent_request_handled (challenge, granted_scope, remember, remember_for, error, requested_at, session_access_token, session_id_token, authenticated_at, was_used, granted_at_audience) VALUES -('challenge-0007', 'granted_scope-0007_1', true, 0007, '{}', now(), '{"session_access_token-0007": "0007"}', '{"session_id_token-0007": "0007"}', now(), true, 'granted_audience-0007_1'); +('challenge-0007', 'granted_scope-0007_1', true, 0007, '{}', '2022-02-15 22:20:21', '{"session_access_token-0007": "0007"}', '{"session_id_token-0007": "0007"}', '2022-02-15 22:20:21', true, 'granted_audience-0007_1'); INSERT INTO hydra_oauth2_authentication_request_handled (challenge, subject, remember, remember_for, error, acr, requested_at, authenticated_at, was_used, forced_subject_identifier) VALUES -('challenge-0007', 'subject-0007', true, 0007, '{}', 'acr-0007', now(), now(), true, 'force_subject_id-0007'); +('challenge-0007', 'subject-0007', true, 0007, '{}', 'acr-0007', '2022-02-15 22:20:21', '2022-02-15 22:20:21', true, 'force_subject_id-0007'); INSERT INTO hydra_oauth2_obfuscated_authentication_session (client_id, subject, subject_obfuscated) SELECT hydra_client.id, 'subject-0007', 'subject_obfuscated-0007' diff --git a/persistence/sql/migratest/testdata/20190300000008_testdata.sql b/persistence/sql/migratest/testdata/20190300000008_testdata.sql index 0c3c78016a1..96de5d9d84d 100644 --- a/persistence/sql/migratest/testdata/20190300000008_testdata.sql +++ b/persistence/sql/migratest/testdata/20190300000008_testdata.sql @@ -1,17 +1,17 @@ INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject) VALUES -('auth_session-0008', now(), 'subject-0008'); +('auth_session-0008', '2022-02-15 22:20:21', 'subject-0008'); -- using the most lately added client as a foreign key INSERT INTO hydra_oauth2_authentication_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, requested_at_audience, login_session_id) -SELECT 'challenge-0008', 'verifier-0008', hydra_client.id, 'subject-0008', 'http://request/0008', true, 'requested_scope-0008_1', 'csrf-0008', now(), now(), '{"display": "display-0008"}', 'requested_audience-0008_1', 'auth_session-0008' +SELECT 'challenge-0008', 'verifier-0008', hydra_client.id, 'subject-0008', 'http://request/0008', true, 'requested_scope-0008_1', 'csrf-0008', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0008"}', 'requested_audience-0008_1', 'auth_session-0008' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_consent_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, forced_subject_identifier, login_session_id, login_challenge, requested_at_audience, acr, context) -SELECT 'challenge-0008', 'verifier-0008', hydra_client.id, 'subject-0008', 'http://request/0008', true, 'requested_scope-0008_1', 'csrf-0008', now(), now(), '{"display": "display-0008"}', 'force_subject_id-0008', 'auth_session-0008', 'challenge-0008', 'requested_audience-0008_1', 'acr-0008', '{"context": "0008"}' +SELECT 'challenge-0008', 'verifier-0008', hydra_client.id, 'subject-0008', 'http://request/0008', true, 'requested_scope-0008_1', 'csrf-0008', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0008"}', 'force_subject_id-0008', 'auth_session-0008', 'challenge-0008', 'requested_audience-0008_1', 'acr-0008', '{"context": "0008"}' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; @@ -19,12 +19,12 @@ LIMIT 1; INSERT INTO hydra_oauth2_consent_request_handled (challenge, granted_scope, remember, remember_for, error, requested_at, session_access_token, session_id_token, authenticated_at, was_used, granted_at_audience) VALUES -('challenge-0008', 'granted_scope-0008_1', true, 0008, '{}', now(), '{"session_access_token-0008": "0008"}', '{"session_id_token-0008": "0008"}', now(), true, 'granted_audience-0008_1'); +('challenge-0008', 'granted_scope-0008_1', true, 0008, '{}', '2022-02-15 22:20:21', '{"session_access_token-0008": "0008"}', '{"session_id_token-0008": "0008"}', '2022-02-15 22:20:21', true, 'granted_audience-0008_1'); INSERT INTO hydra_oauth2_authentication_request_handled (challenge, subject, remember, remember_for, error, acr, requested_at, authenticated_at, was_used, forced_subject_identifier, context) VALUES -('challenge-0008', 'subject-0008', true, 0008, '{}', 'acr-0008', now(), now(), true, 'force_subject_id-0008', '{"context": "0008"}'); +('challenge-0008', 'subject-0008', true, 0008, '{}', 'acr-0008', '2022-02-15 22:20:21', '2022-02-15 22:20:21', true, 'force_subject_id-0008', '{"context": "0008"}'); INSERT INTO hydra_oauth2_obfuscated_authentication_session (client_id, subject, subject_obfuscated) SELECT hydra_client.id, 'subject-0008', 'subject_obfuscated-0008' diff --git a/persistence/sql/migratest/testdata/20190300000009_testdata.sql b/persistence/sql/migratest/testdata/20190300000009_testdata.sql index e4dfb9eb748..19c958866e8 100644 --- a/persistence/sql/migratest/testdata/20190300000009_testdata.sql +++ b/persistence/sql/migratest/testdata/20190300000009_testdata.sql @@ -1,17 +1,17 @@ INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject) VALUES -('auth_session-0009', now(), 'subject-0009'); +('auth_session-0009', '2022-02-15 22:20:21', 'subject-0009'); -- using the most lately added client as a foreign key INSERT INTO hydra_oauth2_authentication_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, requested_at_audience, login_session_id) -SELECT 'challenge-0009', 'verifier-0009', hydra_client.id, 'subject-0009', 'http://request/0009', true, 'requested_scope-0009_1', 'csrf-0009', now(), now(), '{"display": "display-0009"}', 'requested_audience-0009_1', 'auth_session-0009' +SELECT 'challenge-0009', 'verifier-0009', hydra_client.id, 'subject-0009', 'http://request/0009', true, 'requested_scope-0009_1', 'csrf-0009', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0009"}', 'requested_audience-0009_1', 'auth_session-0009' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_consent_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, forced_subject_identifier, login_session_id, login_challenge, requested_at_audience, acr, context) -SELECT 'challenge-0009', 'verifier-0009', hydra_client.id, 'subject-0009', 'http://request/0009', true, 'requested_scope-0009_1', 'csrf-0009', now(), now(), '{"display": "display-0009"}', 'force_subject_id-0009', 'auth_session-0009', 'challenge-0009', 'requested_audience-0009_1', 'acr-0009', '{"context": "0009"}' +SELECT 'challenge-0009', 'verifier-0009', hydra_client.id, 'subject-0009', 'http://request/0009', true, 'requested_scope-0009_1', 'csrf-0009', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0009"}', 'force_subject_id-0009', 'auth_session-0009', 'challenge-0009', 'requested_audience-0009_1', 'acr-0009', '{"context": "0009"}' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; @@ -19,12 +19,12 @@ LIMIT 1; INSERT INTO hydra_oauth2_consent_request_handled (challenge, granted_scope, remember, remember_for, error, requested_at, session_access_token, session_id_token, authenticated_at, was_used, granted_at_audience) VALUES -('challenge-0009', 'granted_scope-0009_1', true, 0009, '{}', now(), '{"session_access_token-0009": "0009"}', '{"session_id_token-0009": "0009"}', now(), true, 'granted_audience-0009_1'); +('challenge-0009', 'granted_scope-0009_1', true, 0009, '{}', '2022-02-15 22:20:21', '{"session_access_token-0009": "0009"}', '{"session_id_token-0009": "0009"}', '2022-02-15 22:20:21', true, 'granted_audience-0009_1'); INSERT INTO hydra_oauth2_authentication_request_handled (challenge, subject, remember, remember_for, error, acr, requested_at, authenticated_at, was_used, forced_subject_identifier, context) VALUES -('challenge-0009', 'subject-0009', true, 0009, '{}', 'acr-0009', now(), now(), true, 'force_subject_id-0009', '{"context": "0009"}'); +('challenge-0009', 'subject-0009', true, 0009, '{}', 'acr-0009', '2022-02-15 22:20:21', '2022-02-15 22:20:21', true, 'force_subject_id-0009', '{"context": "0009"}'); INSERT INTO hydra_oauth2_obfuscated_authentication_session (client_id, subject, subject_obfuscated) SELECT hydra_client.id, 'subject-0009', 'subject_obfuscated-0009' diff --git a/persistence/sql/migratest/testdata/20190300000010_testdata.sql b/persistence/sql/migratest/testdata/20190300000010_testdata.sql index 74f897be5a1..720e0c6a8f7 100644 --- a/persistence/sql/migratest/testdata/20190300000010_testdata.sql +++ b/persistence/sql/migratest/testdata/20190300000010_testdata.sql @@ -1,17 +1,17 @@ INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject) VALUES -('auth_session-0010', now(), 'subject-0010'); +('auth_session-0010', '2022-02-15 22:20:21', 'subject-0010'); -- using the most lately added client as a foreign key INSERT INTO hydra_oauth2_authentication_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, requested_at_audience, login_session_id) -SELECT 'challenge-0010', 'verifier-0010', hydra_client.id, 'subject-0010', 'http://request/0010', true, 'requested_scope-0010_1', 'csrf-0010', now(), now(), '{"display": "display-0010"}', 'requested_audience-0010_1', 'auth_session-0010' +SELECT 'challenge-0010', 'verifier-0010', hydra_client.id, 'subject-0010', 'http://request/0010', true, 'requested_scope-0010_1', 'csrf-0010', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0010"}', 'requested_audience-0010_1', 'auth_session-0010' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_consent_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, forced_subject_identifier, login_session_id, login_challenge, requested_at_audience, acr, context) -SELECT 'challenge-0010', 'verifier-0010', hydra_client.id, 'subject-0010', 'http://request/0010', true, 'requested_scope-0010_1', 'csrf-0010', now(), now(), '{"display": "display-0010"}', 'force_subject_id-0010', 'auth_session-0010', 'challenge-0010', 'requested_audience-0010_1', 'acr-0010', '{"context": "0010"}' +SELECT 'challenge-0010', 'verifier-0010', hydra_client.id, 'subject-0010', 'http://request/0010', true, 'requested_scope-0010_1', 'csrf-0010', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0010"}', 'force_subject_id-0010', 'auth_session-0010', 'challenge-0010', 'requested_audience-0010_1', 'acr-0010', '{"context": "0010"}' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; @@ -19,12 +19,12 @@ LIMIT 1; INSERT INTO hydra_oauth2_consent_request_handled (challenge, granted_scope, remember, remember_for, error, requested_at, session_access_token, session_id_token, authenticated_at, was_used, granted_at_audience) VALUES -('challenge-0010', 'granted_scope-0010_1', true, 0010, '{}', now(), '{"session_access_token-0010": "0010"}', '{"session_id_token-0010": "0010"}', now(), true, 'granted_audience-0010_1'); +('challenge-0010', 'granted_scope-0010_1', true, 0010, '{}', '2022-02-15 22:20:21', '{"session_access_token-0010": "0010"}', '{"session_id_token-0010": "0010"}', '2022-02-15 22:20:21', true, 'granted_audience-0010_1'); INSERT INTO hydra_oauth2_authentication_request_handled (challenge, subject, remember, remember_for, error, acr, requested_at, authenticated_at, was_used, forced_subject_identifier, context) VALUES -('challenge-0010', 'subject-0010', true, 0010, '{}', 'acr-0010', now(), now(), true, 'force_subject_id-0010', '{"context": "0010"}'); +('challenge-0010', 'subject-0010', true, 0010, '{}', 'acr-0010', '2022-02-15 22:20:21', '2022-02-15 22:20:21', true, 'force_subject_id-0010', '{"context": "0010"}'); INSERT INTO hydra_oauth2_obfuscated_authentication_session (client_id, subject, subject_obfuscated) SELECT hydra_client.id, 'subject-0010', 'subject_obfuscated-0010' diff --git a/persistence/sql/migratest/testdata/20190300000011_testdata.sql b/persistence/sql/migratest/testdata/20190300000011_testdata.sql index 11825f2ca9e..c8cad962db2 100644 --- a/persistence/sql/migratest/testdata/20190300000011_testdata.sql +++ b/persistence/sql/migratest/testdata/20190300000011_testdata.sql @@ -1,17 +1,17 @@ INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, remember) VALUES -('auth_session-0011', now(), 'subject-0011', false); +('auth_session-0011', '2022-02-15 22:20:21', 'subject-0011', false); -- using the most lately added client as a foreign key INSERT INTO hydra_oauth2_authentication_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, requested_at_audience, login_session_id) -SELECT 'challenge-0011', 'verifier-0011', hydra_client.id, 'subject-0011', 'http://request/0011', true, 'requested_scope-0011_1', 'csrf-0011', now(), now(), '{"display": "display-0011"}', 'requested_audience-0011_1', 'auth_session-0011' +SELECT 'challenge-0011', 'verifier-0011', hydra_client.id, 'subject-0011', 'http://request/0011', true, 'requested_scope-0011_1', 'csrf-0011', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0011"}', 'requested_audience-0011_1', 'auth_session-0011' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_consent_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, forced_subject_identifier, login_session_id, login_challenge, requested_at_audience, acr, context) -SELECT 'challenge-0011', 'verifier-0011', hydra_client.id, 'subject-0011', 'http://request/0011', true, 'requested_scope-0011_1', 'csrf-0011', now(), now(), '{"display": "display-0011"}', 'force_subject_id-0011', 'auth_session-0011', 'challenge-0011', 'requested_audience-0011_1', 'acr-0011', '{"context": "0011"}' +SELECT 'challenge-0011', 'verifier-0011', hydra_client.id, 'subject-0011', 'http://request/0011', true, 'requested_scope-0011_1', 'csrf-0011', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0011"}', 'force_subject_id-0011', 'auth_session-0011', 'challenge-0011', 'requested_audience-0011_1', 'acr-0011', '{"context": "0011"}' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; @@ -19,12 +19,12 @@ LIMIT 1; INSERT INTO hydra_oauth2_consent_request_handled (challenge, granted_scope, remember, remember_for, error, requested_at, session_access_token, session_id_token, authenticated_at, was_used, granted_at_audience) VALUES -('challenge-0011', 'granted_scope-0011_1', true, 0011, '{}', now(), '{"session_access_token-0011": "0011"}', '{"session_id_token-0011": "0011"}', now(), true, 'granted_audience-0011_1'); +('challenge-0011', 'granted_scope-0011_1', true, 0011, '{}', '2022-02-15 22:20:21', '{"session_access_token-0011": "0011"}', '{"session_id_token-0011": "0011"}', '2022-02-15 22:20:21', true, 'granted_audience-0011_1'); INSERT INTO hydra_oauth2_authentication_request_handled (challenge, subject, remember, remember_for, error, acr, requested_at, authenticated_at, was_used, forced_subject_identifier, context) VALUES -('challenge-0011', 'subject-0011', true, 0011, '{}', 'acr-0011', now(), now(), true, 'force_subject_id-0011', '{"context": "0011"}'); +('challenge-0011', 'subject-0011', true, 0011, '{}', 'acr-0011', '2022-02-15 22:20:21', '2022-02-15 22:20:21', true, 'force_subject_id-0011', '{"context": "0011"}'); INSERT INTO hydra_oauth2_obfuscated_authentication_session (client_id, subject, subject_obfuscated) SELECT hydra_client.id, 'subject-0011', 'subject_obfuscated-0011' diff --git a/persistence/sql/migratest/testdata/20190300000012_testdata.sql b/persistence/sql/migratest/testdata/20190300000012_testdata.sql index 8794855da69..70e59f2584b 100644 --- a/persistence/sql/migratest/testdata/20190300000012_testdata.sql +++ b/persistence/sql/migratest/testdata/20190300000012_testdata.sql @@ -1,17 +1,17 @@ INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, remember) VALUES -('auth_session-0012', now(), 'subject-0012', false); +('auth_session-0012', '2022-02-15 22:20:21', 'subject-0012', false); -- using the most lately added client as a foreign key INSERT INTO hydra_oauth2_authentication_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, requested_at_audience, login_session_id) -SELECT 'challenge-0012', 'verifier-0012', hydra_client.id, 'subject-0012', 'http://request/0012', true, 'requested_scope-0012_1', 'csrf-0012', now(), now(), '{"display": "display-0012"}', 'requested_audience-0012_1', 'auth_session-0012' +SELECT 'challenge-0012', 'verifier-0012', hydra_client.id, 'subject-0012', 'http://request/0012', true, 'requested_scope-0012_1', 'csrf-0012', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0012"}', 'requested_audience-0012_1', 'auth_session-0012' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_consent_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, forced_subject_identifier, login_session_id, login_challenge, requested_at_audience, acr, context) -SELECT 'challenge-0012', 'verifier-0012', hydra_client.id, 'subject-0012', 'http://request/0012', true, 'requested_scope-0012_1', 'csrf-0012', now(), now(), '{"display": "display-0012"}', 'force_subject_id-0012', 'auth_session-0012', 'challenge-0012', 'requested_audience-0012_1', 'acr-0012', '{"context": "0012"}' +SELECT 'challenge-0012', 'verifier-0012', hydra_client.id, 'subject-0012', 'http://request/0012', true, 'requested_scope-0012_1', 'csrf-0012', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0012"}', 'force_subject_id-0012', 'auth_session-0012', 'challenge-0012', 'requested_audience-0012_1', 'acr-0012', '{"context": "0012"}' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; @@ -19,12 +19,12 @@ LIMIT 1; INSERT INTO hydra_oauth2_consent_request_handled (challenge, granted_scope, remember, remember_for, error, requested_at, session_access_token, session_id_token, authenticated_at, was_used, granted_at_audience) VALUES -('challenge-0012', 'granted_scope-0012_1', true, 0012, '{}', now(), '{"session_access_token-0012": "0012"}', '{"session_id_token-0012": "0012"}', now(), true, 'granted_audience-0012_1'); +('challenge-0012', 'granted_scope-0012_1', true, 0012, '{}', '2022-02-15 22:20:21', '{"session_access_token-0012": "0012"}', '{"session_id_token-0012": "0012"}', '2022-02-15 22:20:21', true, 'granted_audience-0012_1'); INSERT INTO hydra_oauth2_authentication_request_handled (challenge, subject, remember, remember_for, error, acr, requested_at, authenticated_at, was_used, forced_subject_identifier, context) VALUES -('challenge-0012', 'subject-0012', true, 0012, '{}', 'acr-0012', now(), now(), true, 'force_subject_id-0012', '{"context": "0012"}'); +('challenge-0012', 'subject-0012', true, 0012, '{}', 'acr-0012', '2022-02-15 22:20:21', '2022-02-15 22:20:21', true, 'force_subject_id-0012', '{"context": "0012"}'); INSERT INTO hydra_oauth2_obfuscated_authentication_session (client_id, subject, subject_obfuscated) SELECT hydra_client.id, 'subject-0012', 'subject_obfuscated-0012' diff --git a/persistence/sql/migratest/testdata/20190300000013_testdata.sql b/persistence/sql/migratest/testdata/20190300000013_testdata.sql index 32c003e1a34..b95ff4a8f08 100644 --- a/persistence/sql/migratest/testdata/20190300000013_testdata.sql +++ b/persistence/sql/migratest/testdata/20190300000013_testdata.sql @@ -1,17 +1,17 @@ INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, remember) VALUES -('auth_session-0013', now(), 'subject-0013', false); +('auth_session-0013', '2022-02-15 22:20:21', 'subject-0013', false); -- using the most lately added client as a foreign key INSERT INTO hydra_oauth2_authentication_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, requested_at_audience, login_session_id) -SELECT 'challenge-0013', 'verifier-0013', hydra_client.id, 'subject-0013', 'http://request/0013', true, 'requested_scope-0013_1', 'csrf-0013', now(), now(), '{"display": "display-0013"}', 'requested_audience-0013_1', 'auth_session-0013' +SELECT 'challenge-0013', 'verifier-0013', hydra_client.id, 'subject-0013', 'http://request/0013', true, 'requested_scope-0013_1', 'csrf-0013', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0013"}', 'requested_audience-0013_1', 'auth_session-0013' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_consent_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, forced_subject_identifier, login_session_id, login_challenge, requested_at_audience, acr, context) -SELECT 'challenge-0013', 'verifier-0013', hydra_client.id, 'subject-0013', 'http://request/0013', true, 'requested_scope-0013_1', 'csrf-0013', now(), now(), '{"display": "display-0013"}', 'force_subject_id-0013', 'auth_session-0013', 'challenge-0013', 'requested_audience-0013_1', 'acr-0013', '{"context": "0013"}' +SELECT 'challenge-0013', 'verifier-0013', hydra_client.id, 'subject-0013', 'http://request/0013', true, 'requested_scope-0013_1', 'csrf-0013', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0013"}', 'force_subject_id-0013', 'auth_session-0013', 'challenge-0013', 'requested_audience-0013_1', 'acr-0013', '{"context": "0013"}' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; @@ -19,12 +19,12 @@ LIMIT 1; INSERT INTO hydra_oauth2_consent_request_handled (challenge, granted_scope, remember, remember_for, error, requested_at, session_access_token, session_id_token, authenticated_at, was_used, granted_at_audience) VALUES -('challenge-0013', 'granted_scope-0013_1', true, 0013, '{}', now(), '{"session_access_token-0013": "0013"}', '{"session_id_token-0013": "0013"}', now(), true, 'granted_audience-0013_1'); +('challenge-0013', 'granted_scope-0013_1', true, 0013, '{}', '2022-02-15 22:20:21', '{"session_access_token-0013": "0013"}', '{"session_id_token-0013": "0013"}', '2022-02-15 22:20:21', true, 'granted_audience-0013_1'); INSERT INTO hydra_oauth2_authentication_request_handled (challenge, subject, remember, remember_for, error, acr, requested_at, authenticated_at, was_used, forced_subject_identifier, context) VALUES -('challenge-0013', 'subject-0013', true, 0013, '{}', 'acr-0013', now(), now(), true, 'force_subject_id-0013', '{"context": "0013"}'); +('challenge-0013', 'subject-0013', true, 0013, '{}', 'acr-0013', '2022-02-15 22:20:21', '2022-02-15 22:20:21', true, 'force_subject_id-0013', '{"context": "0013"}'); INSERT INTO hydra_oauth2_obfuscated_authentication_session (client_id, subject, subject_obfuscated) SELECT hydra_client.id, 'subject-0013', 'subject_obfuscated-0013' diff --git a/persistence/sql/migratest/testdata/20190300000014_testdata.sql b/persistence/sql/migratest/testdata/20190300000014_testdata.sql index 1c7ea9f9c11..0f1d5c0388b 100644 --- a/persistence/sql/migratest/testdata/20190300000014_testdata.sql +++ b/persistence/sql/migratest/testdata/20190300000014_testdata.sql @@ -1,17 +1,17 @@ INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, remember) VALUES -('auth_session-0014', now(), 'subject-0014', false); +('auth_session-0014', '2022-02-15 22:20:21', 'subject-0014', false); -- using the most lately added client as a foreign key INSERT INTO hydra_oauth2_authentication_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, requested_at_audience, login_session_id) -SELECT 'challenge-0014', 'verifier-0014', hydra_client.id, 'subject-0014', 'http://request/0014', true, 'requested_scope-0014_1', 'csrf-0014', now(), now(), '{"display": "display-0014"}', 'requested_audience-0014_1', 'auth_session-0014' +SELECT 'challenge-0014', 'verifier-0014', hydra_client.id, 'subject-0014', 'http://request/0014', true, 'requested_scope-0014_1', 'csrf-0014', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0014"}', 'requested_audience-0014_1', 'auth_session-0014' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_consent_request (challenge, verifier, client_id, subject, request_url, skip, requested_scope, csrf, authenticated_at, requested_at, oidc_context, forced_subject_identifier, login_session_id, login_challenge, requested_at_audience, acr, context) -SELECT 'challenge-0014', 'verifier-0014', hydra_client.id, 'subject-0014', 'http://request/0014', true, 'requested_scope-0014_1', 'csrf-0014', now(), now(), '{"display": "display-0014"}', 'force_subject_id-0014', 'auth_session-0014', 'challenge-0014', 'requested_audience-0014_1', 'acr-0014', '{"context": "0014"}' +SELECT 'challenge-0014', 'verifier-0014', hydra_client.id, 'subject-0014', 'http://request/0014', true, 'requested_scope-0014_1', 'csrf-0014', '2022-02-15 22:20:21', '2022-02-15 22:20:21', '{"display": "display-0014"}', 'force_subject_id-0014', 'auth_session-0014', 'challenge-0014', 'requested_audience-0014_1', 'acr-0014', '{"context": "0014"}' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; @@ -19,12 +19,12 @@ LIMIT 1; INSERT INTO hydra_oauth2_consent_request_handled (challenge, granted_scope, remember, remember_for, error, requested_at, session_access_token, session_id_token, authenticated_at, was_used, granted_at_audience, handled_at) VALUES -('challenge-0014', 'granted_scope-0014_1', true, 0014, '{}', now(), '{"session_access_token-0014": "0014"}', '{"session_id_token-0014": "0014"}', now(), true, 'granted_audience-0014_1', now()); +('challenge-0014', 'granted_scope-0014_1', true, 0014, '{}', '2022-02-15 22:20:21', '{"session_access_token-0014": "0014"}', '{"session_id_token-0014": "0014"}', '2022-02-15 22:20:21', true, 'granted_audience-0014_1', '2022-02-15 22:20:21'); INSERT INTO hydra_oauth2_authentication_request_handled (challenge, subject, remember, remember_for, error, acr, requested_at, authenticated_at, was_used, forced_subject_identifier, context) VALUES -('challenge-0014', 'subject-0014', true, 0014, '{}', 'acr-0014', now(), now(), true, 'force_subject_id-0014', '{"context": "0014"}'); +('challenge-0014', 'subject-0014', true, 0014, '{}', 'acr-0014', '2022-02-15 22:20:21', '2022-02-15 22:20:21', true, 'force_subject_id-0014', '{"context": "0014"}'); INSERT INTO hydra_oauth2_obfuscated_authentication_session (client_id, subject, subject_obfuscated) SELECT hydra_client.id, 'subject-0014', 'subject_obfuscated-0014' diff --git a/persistence/sql/migratest/testdata/20190400000001_testdata.sql b/persistence/sql/migratest/testdata/20190400000001_testdata.sql index e3522daebb3..ed084b1f8cc 100644 --- a/persistence/sql/migratest/testdata/20190400000001_testdata.sql +++ b/persistence/sql/migratest/testdata/20190400000001_testdata.sql @@ -1,23 +1,23 @@ INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data) -SELECT 'sig-0001', 'req-0001', now(), hydra_client.id, 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001' +SELECT 'sig-0001', 'req-0001', '2022-02-15 22:20:21', hydra_client.id, 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data) -SELECT 'sig-0001', 'req-0001', now(), hydra_client.id, 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001' +SELECT 'sig-0001', 'req-0001', '2022-02-15 22:20:21', hydra_client.id, 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data) -SELECT 'sig-0001', 'req-0001', now(), hydra_client.id, 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001' +SELECT 'sig-0001', 'req-0001', '2022-02-15 22:20:21', hydra_client.id, 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data) -SELECT 'sig-0001', 'req-0001', now(), hydra_client.id, 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001' +SELECT 'sig-0001', 'req-0001', '2022-02-15 22:20:21', hydra_client.id, 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; diff --git a/persistence/sql/migratest/testdata/20190400000002_testdata.sql b/persistence/sql/migratest/testdata/20190400000002_testdata.sql index a92870e9524..fa671e88bbd 100644 --- a/persistence/sql/migratest/testdata/20190400000002_testdata.sql +++ b/persistence/sql/migratest/testdata/20190400000002_testdata.sql @@ -1,23 +1,23 @@ INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject) -SELECT 'sig-0002', 'req-0002', now(), hydra_client.id, 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002' +SELECT 'sig-0002', 'req-0002', '2022-02-15 22:20:21', hydra_client.id, 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject) -SELECT 'sig-0002', 'req-0002', now(), hydra_client.id, 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002' +SELECT 'sig-0002', 'req-0002', '2022-02-15 22:20:21', hydra_client.id, 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject) -SELECT 'sig-0002', 'req-0002', now(), hydra_client.id, 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002' +SELECT 'sig-0002', 'req-0002', '2022-02-15 22:20:21', hydra_client.id, 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject) -SELECT 'sig-0002', 'req-0002', now(), hydra_client.id, 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002' +SELECT 'sig-0002', 'req-0002', '2022-02-15 22:20:21', hydra_client.id, 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; diff --git a/persistence/sql/migratest/testdata/20190400000003_testdata.sql b/persistence/sql/migratest/testdata/20190400000003_testdata.sql index f8b8c294d6e..6fba92a3320 100644 --- a/persistence/sql/migratest/testdata/20190400000003_testdata.sql +++ b/persistence/sql/migratest/testdata/20190400000003_testdata.sql @@ -1,29 +1,29 @@ INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject) -SELECT 'sig-0003', 'req-0003', now(), hydra_client.id, 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003' +SELECT 'sig-0003', 'req-0003', '2022-02-15 22:20:21', hydra_client.id, 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject) -SELECT 'sig-0003', 'req-0003', now(), hydra_client.id, 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003' +SELECT 'sig-0003', 'req-0003', '2022-02-15 22:20:21', hydra_client.id, 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject) -SELECT 'sig-0003', 'req-0003', now(), hydra_client.id, 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003' +SELECT 'sig-0003', 'req-0003', '2022-02-15 22:20:21', hydra_client.id, 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject) -SELECT 'sig-0003', 'req-0003', now(), hydra_client.id, 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003' +SELECT 'sig-0003', 'req-0003', '2022-02-15 22:20:21', hydra_client.id, 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject) -SELECT 'sig-0003', 'req-0003', now(), hydra_client.id, 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003' +SELECT 'sig-0003', 'req-0003', '2022-02-15 22:20:21', hydra_client.id, 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; diff --git a/persistence/sql/migratest/testdata/20190400000004_testdata.sql b/persistence/sql/migratest/testdata/20190400000004_testdata.sql index b65dd9685d2..c6d3c65b464 100644 --- a/persistence/sql/migratest/testdata/20190400000004_testdata.sql +++ b/persistence/sql/migratest/testdata/20190400000004_testdata.sql @@ -1,29 +1,29 @@ INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active) -SELECT 'sig-0004', 'req-0004', now(), hydra_client.id, 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false +SELECT 'sig-0004', 'req-0004', '2022-02-15 22:20:21', hydra_client.id, 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active) -SELECT 'sig-0004', 'req-0004', now(), hydra_client.id, 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false +SELECT 'sig-0004', 'req-0004', '2022-02-15 22:20:21', hydra_client.id, 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active) -SELECT 'sig-0004', 'req-0004', now(), hydra_client.id, 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false +SELECT 'sig-0004', 'req-0004', '2022-02-15 22:20:21', hydra_client.id, 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active) -SELECT 'sig-0004', 'req-0004', now(), hydra_client.id, 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false +SELECT 'sig-0004', 'req-0004', '2022-02-15 22:20:21', hydra_client.id, 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active) -SELECT 'sig-0004', 'req-0004', now(), hydra_client.id, 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false +SELECT 'sig-0004', 'req-0004', '2022-02-15 22:20:21', hydra_client.id, 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; diff --git a/persistence/sql/migratest/testdata/20190400000005_testdata.sql b/persistence/sql/migratest/testdata/20190400000005_testdata.sql index 2f02a8b9dc8..852557b4003 100644 --- a/persistence/sql/migratest/testdata/20190400000005_testdata.sql +++ b/persistence/sql/migratest/testdata/20190400000005_testdata.sql @@ -1,29 +1,29 @@ INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active) -SELECT 'sig-0005', 'req-0005', now(), hydra_client.id, 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false +SELECT 'sig-0005', 'req-0005', '2022-02-15 22:20:22', hydra_client.id, 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active) -SELECT 'sig-0005', 'req-0005', now(), hydra_client.id, 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false +SELECT 'sig-0005', 'req-0005', '2022-02-15 22:20:22', hydra_client.id, 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active) -SELECT 'sig-0005', 'req-0005', now(), hydra_client.id, 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false +SELECT 'sig-0005', 'req-0005', '2022-02-15 22:20:22', hydra_client.id, 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active) -SELECT 'sig-0005', 'req-0005', now(), hydra_client.id, 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false +SELECT 'sig-0005', 'req-0005', '2022-02-15 22:20:22', hydra_client.id, 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active) -SELECT 'sig-0005', 'req-0005', now(), hydra_client.id, 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false +SELECT 'sig-0005', 'req-0005', '2022-02-15 22:20:22', hydra_client.id, 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; diff --git a/persistence/sql/migratest/testdata/20190400000006_testdata.sql b/persistence/sql/migratest/testdata/20190400000006_testdata.sql index d8c7466c5c3..acfbd22f533 100644 --- a/persistence/sql/migratest/testdata/20190400000006_testdata.sql +++ b/persistence/sql/migratest/testdata/20190400000006_testdata.sql @@ -1,29 +1,29 @@ INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active) -SELECT 'sig-0006', 'req-0006', now(), hydra_client.id, 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false +SELECT 'sig-0006', 'req-0006', '2022-02-15 22:20:22', hydra_client.id, 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active) -SELECT 'sig-0006', 'req-0006', now(), hydra_client.id, 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false +SELECT 'sig-0006', 'req-0006', '2022-02-15 22:20:22', hydra_client.id, 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active) -SELECT 'sig-0006', 'req-0006', now(), hydra_client.id, 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false +SELECT 'sig-0006', 'req-0006', '2022-02-15 22:20:22', hydra_client.id, 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active) -SELECT 'sig-0006', 'req-0006', now(), hydra_client.id, 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false +SELECT 'sig-0006', 'req-0006', '2022-02-15 22:20:22', hydra_client.id, 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active) -SELECT 'sig-0006', 'req-0006', now(), hydra_client.id, 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false +SELECT 'sig-0006', 'req-0006', '2022-02-15 22:20:22', hydra_client.id, 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; diff --git a/persistence/sql/migratest/testdata/20190400000007_testdata.sql b/persistence/sql/migratest/testdata/20190400000007_testdata.sql index 388af34781a..6e501d28c34 100644 --- a/persistence/sql/migratest/testdata/20190400000007_testdata.sql +++ b/persistence/sql/migratest/testdata/20190400000007_testdata.sql @@ -1,29 +1,29 @@ INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience) -SELECT 'sig-0007', 'req-0007', now(), hydra_client.id, 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007' +SELECT 'sig-0007', 'req-0007', '2022-02-15 22:20:22', hydra_client.id, 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience) -SELECT 'sig-0007', 'req-0007', now(), hydra_client.id, 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007' +SELECT 'sig-0007', 'req-0007', '2022-02-15 22:20:22', hydra_client.id, 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience) -SELECT 'sig-0007', 'req-0007', now(), hydra_client.id, 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007' +SELECT 'sig-0007', 'req-0007', '2022-02-15 22:20:22', hydra_client.id, 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience) -SELECT 'sig-0007', 'req-0007', now(), hydra_client.id, 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007' +SELECT 'sig-0007', 'req-0007', '2022-02-15 22:20:22', hydra_client.id, 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience) -SELECT 'sig-0007', 'req-0007', now(), hydra_client.id, 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007' +SELECT 'sig-0007', 'req-0007', '2022-02-15 22:20:22', hydra_client.id, 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007' FROM hydra_client ORDER BY hydra_client.pk DESC LIMIT 1; diff --git a/persistence/sql/migratest/testdata/20190400000008_testdata.sql b/persistence/sql/migratest/testdata/20190400000008_testdata.sql index cafa2ecb087..a6c68c26ffc 100644 --- a/persistence/sql/migratest/testdata/20190400000008_testdata.sql +++ b/persistence/sql/migratest/testdata/20190400000008_testdata.sql @@ -1,29 +1,29 @@ INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0008', 'req-0008', now(), hc.id, 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', crh.challenge +SELECT 'sig-0008', 'req-0008', '2022-02-15 22:20:22', hc.id, 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0008', 'req-0008', now(), hc.id, 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', crh.challenge +SELECT 'sig-0008', 'req-0008', '2022-02-15 22:20:22', hc.id, 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0008', 'req-0008', now(), hc.id, 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', crh.challenge +SELECT 'sig-0008', 'req-0008', '2022-02-15 22:20:22', hc.id, 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0008', 'req-0008', now(), hc.id, 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', crh.challenge +SELECT 'sig-0008', 'req-0008', '2022-02-15 22:20:22', hc.id, 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0008', 'req-0008', now(), hc.id, 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', crh.challenge +SELECT 'sig-0008', 'req-0008', '2022-02-15 22:20:22', hc.id, 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; diff --git a/persistence/sql/migratest/testdata/20190400000009_testdata.sql b/persistence/sql/migratest/testdata/20190400000009_testdata.sql index 737a0c6b9a7..145eeefca0d 100644 --- a/persistence/sql/migratest/testdata/20190400000009_testdata.sql +++ b/persistence/sql/migratest/testdata/20190400000009_testdata.sql @@ -1,29 +1,29 @@ INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0009', 'req-0009', now(), hc.id, 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', crh.challenge +SELECT 'sig-0009', 'req-0009', '2022-02-15 22:20:22', hc.id, 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0009', 'req-0009', now(), hc.id, 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', crh.challenge +SELECT 'sig-0009', 'req-0009', '2022-02-15 22:20:22', hc.id, 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0009', 'req-0009', now(), hc.id, 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', crh.challenge +SELECT 'sig-0009', 'req-0009', '2022-02-15 22:20:22', hc.id, 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0009', 'req-0009', now(), hc.id, 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', crh.challenge +SELECT 'sig-0009', 'req-0009', '2022-02-15 22:20:22', hc.id, 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0009', 'req-0009', now(), hc.id, 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', crh.challenge +SELECT 'sig-0009', 'req-0009', '2022-02-15 22:20:22', hc.id, 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; diff --git a/persistence/sql/migratest/testdata/20190400000010_testdata.sql b/persistence/sql/migratest/testdata/20190400000010_testdata.sql index 55c68b32f1a..99eb095ef4d 100644 --- a/persistence/sql/migratest/testdata/20190400000010_testdata.sql +++ b/persistence/sql/migratest/testdata/20190400000010_testdata.sql @@ -1,29 +1,29 @@ INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0010', 'req-0010', now(), hc.id, 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', crh.challenge +SELECT 'sig-0010', 'req-0010', '2022-02-15 22:20:22', hc.id, 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0010', 'req-0010', now(), hc.id, 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', crh.challenge +SELECT 'sig-0010', 'req-0010', '2022-02-15 22:20:22', hc.id, 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0010', 'req-0010', now(), hc.id, 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', crh.challenge +SELECT 'sig-0010', 'req-0010', '2022-02-15 22:20:22', hc.id, 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0010', 'req-0010', now(), hc.id, 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', crh.challenge +SELECT 'sig-0010', 'req-0010', '2022-02-15 22:20:22', hc.id, 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0010', 'req-0010', now(), hc.id, 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', crh.challenge +SELECT 'sig-0010', 'req-0010', '2022-02-15 22:20:22', hc.id, 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; diff --git a/persistence/sql/migratest/testdata/20190400000011_testdata.sql b/persistence/sql/migratest/testdata/20190400000011_testdata.sql index 0be0ff9f7ab..5ca87135a06 100644 --- a/persistence/sql/migratest/testdata/20190400000011_testdata.sql +++ b/persistence/sql/migratest/testdata/20190400000011_testdata.sql @@ -1,29 +1,29 @@ INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0011', 'req-0011', now(), hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge +SELECT 'sig-0011', 'req-0011', '2022-02-15 22:20:22', hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0011', 'req-0011', now(), hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge +SELECT 'sig-0011', 'req-0011', '2022-02-15 22:20:22', hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0011', 'req-0011', now(), hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge +SELECT 'sig-0011', 'req-0011', '2022-02-15 22:20:22', hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0011', 'req-0011', now(), hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge +SELECT 'sig-0011', 'req-0011', '2022-02-15 22:20:22', hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-0011', 'req-0011', now(), hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge +SELECT 'sig-0011', 'req-0011', '2022-02-15 22:20:22', hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; @@ -31,4 +31,4 @@ LIMIT 1; INSERT INTO hydra_oauth2_jti_blacklist (signature, expires_at) VALUES -('sig-0011', now()) +('sig-0011', '2022-02-15 22:20:22') diff --git a/persistence/sql/migratest/testdata/20200527215732_testdata.sql b/persistence/sql/migratest/testdata/20200527215732_testdata.sql index a98b0a2925f..8620448173a 100644 --- a/persistence/sql/migratest/testdata/20200527215732_testdata.sql +++ b/persistence/sql/migratest/testdata/20200527215732_testdata.sql @@ -1,4 +1,4 @@ INSERT INTO hydra_client (pk, id, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg) VALUES -(2005, 'client-2005', 'Client 2005', 'secret-2005', 'http://redirect/2005_1', 'grant-2005_1', 'response-2005_1', 'scope-2005', 'owner-2005', 'http://policy/2005', 'http://tos/2005', 'http://client/2005', 'http://logo/2005', 'contact-2005_1', 0, 'http://sector_id/2005', '', 'http://jwks/2005', 'http://request/2005_1', 'token_auth-2005', 'r_alg-2005', 'u_alg-2005', 'subject-2005', 'http://cors/2005_1', 'autdience-2005_1', now(), now(), 'http://front_logout/2005', true, 'http://post_redirect/2005_1', 'http://back_logout/2005', true, '{"migration": "2005"}', ''); +(2005, 'client-2005', 'Client 2005', 'secret-2005', 'http://redirect/2005_1', 'grant-2005_1', 'response-2005_1', 'scope-2005', 'owner-2005', 'http://policy/2005', 'http://tos/2005', 'http://client/2005', 'http://logo/2005', 'contact-2005_1', 0, 'http://sector_id/2005', '', 'http://jwks/2005', 'http://request/2005_1', 'token_auth-2005', 'r_alg-2005', 'u_alg-2005', 'subject-2005', 'http://cors/2005_1', 'autdience-2005_1', '2022-02-15 22:20:22', '2022-02-15 22:20:22', 'http://front_logout/2005', true, 'http://post_redirect/2005_1', 'http://back_logout/2005', true, '{"migration": "2005"}', ''); diff --git a/persistence/sql/migratest/testdata/20201110104000_testdata.sql b/persistence/sql/migratest/testdata/20201110104000_testdata.sql index 27b9eb8a604..c35bff44838 100644 --- a/persistence/sql/migratest/testdata/20201110104000_testdata.sql +++ b/persistence/sql/migratest/testdata/20201110104000_testdata.sql @@ -1,29 +1,29 @@ INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-20201110104000', 'req-20201110104000', CURRENT_TIMESTAMP, hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge +SELECT 'sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22', hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-20201110104000', 'req-20201110104000', CURRENT_TIMESTAMP, hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge +SELECT 'sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22', hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-20201110104000', 'req-20201110104000', CURRENT_TIMESTAMP, hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge +SELECT 'sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22', hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-20201110104000', 'req-20201110104000', CURRENT_TIMESTAMP, hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge +SELECT 'sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22', hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-20201110104000', 'req-20201110104000', CURRENT_TIMESTAMP, hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge +SELECT 'sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22', hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; @@ -31,31 +31,31 @@ LIMIT 1; -- insert another batch with different sig but same id INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-20201110104000-01', 'req-20201110104000', CURRENT_TIMESTAMP, hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge +SELECT 'sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22', hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-20201110104000-01', 'req-20201110104000', CURRENT_TIMESTAMP, hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge +SELECT 'sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22', hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-20201110104000-01', 'req-20201110104000', CURRENT_TIMESTAMP, hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge +SELECT 'sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22', hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-20201110104000-01', 'req-20201110104000', CURRENT_TIMESTAMP, hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge +SELECT 'sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22', hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id) -SELECT 'sig-20201110104000-01', 'req-20201110104000', CURRENT_TIMESTAMP, hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge +SELECT 'sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22', hc.id, 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', crh.challenge FROM hydra_client hc, hydra_oauth2_consent_request_handled crh ORDER BY hc.pk, crh.challenge DESC LIMIT 1; diff --git a/persistence/sql/migratest/testdata/20210928175900_testdata.sql b/persistence/sql/migratest/testdata/20210928175900_testdata.sql index 830dbc9cc52..bcb8eaa7ca1 100644 --- a/persistence/sql/migratest/testdata/20210928175900_testdata.sql +++ b/persistence/sql/migratest/testdata/20210928175900_testdata.sql @@ -74,8 +74,8 @@ VALUES 'subject-0015', 'http://cors/0015_1', 'autdience-0015_1', - CURRENT_TIMESTAMP, - CURRENT_TIMESTAMP, + '2022-02-15 22:20:21', + '2022-02-15 22:20:21', 'http://front_logout/0015', true, 'http://post_redirect/0015_1', diff --git a/persistence/sql/migratest/testdata/20211004110001_testdata.sql b/persistence/sql/migratest/testdata/20211004110001_testdata.sql index aa201af1908..11d8eca763d 100644 --- a/persistence/sql/migratest/testdata/20211004110001_testdata.sql +++ b/persistence/sql/migratest/testdata/20211004110001_testdata.sql @@ -63,8 +63,8 @@ VALUES 'subject-20', 'http://cors/20_1', 'autdience-20_1', - CURRENT_TIMESTAMP, - CURRENT_TIMESTAMP, + '2022-02-15 22:20:23', + '2022-02-15 22:20:23', 'http://front_logout/20', true, 'http://post_redirect/20_1', diff --git a/persistence/sql/migratest/testdata/20211011000001_testdata.sql b/persistence/sql/migratest/testdata/20211011000001_testdata.sql index 4480420a203..2c8a9a1da93 100644 --- a/persistence/sql/migratest/testdata/20211011000001_testdata.sql +++ b/persistence/sql/migratest/testdata/20211011000001_testdata.sql @@ -1,4 +1,4 @@ INSERT INTO hydra_jwk (sid, kid, version, keydata, created_at, pk) VALUES -('sid-0005', 'kid-0005', 4, 'key-0005', CURRENT_TIMESTAMP, '94075738-c3e7-41bc-a7a5-89aece609c39'); +('sid-0005', 'kid-0005', 4, 'key-0005', '2022-02-15 22:20:23', '94075738-c3e7-41bc-a7a5-89aece609c39'); diff --git a/persistence/sql/migratest/testdata/20211226155900_testdata.sql b/persistence/sql/migratest/testdata/20211226155900_testdata.sql index 66f43668392..c85643c09d9 100644 --- a/persistence/sql/migratest/testdata/20211226155900_testdata.sql +++ b/persistence/sql/migratest/testdata/20211226155900_testdata.sql @@ -1,4 +1,4 @@ -INSERT INTO hydra_jwk (pk, sid, kid, version, keydata, created_at) VALUES ('e18d8447-3ec2-42d9-a3ad-e7cca8aa81f0', 'sid-0008', 'kid-0008', 2, 'key-0002', CURRENT_TIMESTAMP); +INSERT INTO hydra_jwk (pk, sid, kid, version, keydata, created_at) VALUES ('e18d8447-3ec2-42d9-a3ad-e7cca8aa81f0', 'sid-0008', 'kid-0008', 2, 'key-0002', '2022-02-15 22:20:23'); INSERT INTO hydra_oauth2_trusted_jwt_bearer_issuer (id, issuer, subject, scope, key_set, key_id) VALUES ('30e51720-4a88-48ca-8243-de7d8f461674', 'some-issuer', 'some-subject', 'some-scope', 'sid-0008', 'kid-0008'); diff --git a/persistence/sql/migratest/testdata/20220210000001_testdata.cockroach.sql b/persistence/sql/migratest/testdata/20220210000001_testdata.cockroach.sql index 6a1e4a85060..6a794708029 100644 --- a/persistence/sql/migratest/testdata/20220210000001_testdata.cockroach.sql +++ b/persistence/sql/migratest/testdata/20220210000001_testdata.cockroach.sql @@ -1,144 +1,142 @@ -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0001', (SELECT id FROM networks LIMIT 1), 'Client 0001', 'secret-0001', 'http://redirect/0001_1', 'grant-0001_1', 'response-0001_1', 'scope-0001', 'owner-0001', 'http://policy/0001', 'http://tos/0001', 'http://client/0001', 'http://logo/0001', 'contact-0001_1', 0, '', '', '', '', 'none', '', '', '', '', 1, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', '4db0bba3-ff30-4e6d-a7eb-a08a9b0d4d0f', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0002', (SELECT id FROM networks LIMIT 1), 'Client 0002', 'secret-0002', 'http://redirect/0002_1', 'grant-0002_1', 'response-0002_1', 'scope-0002', 'owner-0002', 'http://policy/0002', 'http://tos/0002', 'http://client/0002', 'http://logo/0002', 'contact-0002_1', 0, '', '', '', '', 'none', '', '', '', '', 2, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', 'f24d3183-8b4f-40fa-a1a0-c5dd2aa9fabb', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0003', (SELECT id FROM networks LIMIT 1), 'Client 0003', 'secret-0003', 'http://redirect/0003_1', 'grant-0003_1', 'response-0003_1', 'scope-0003', 'owner-0003', 'http://policy/0003', 'http://tos/0003', 'http://client/0003', 'http://logo/0003', 'contact-0003_1', 0, '', '', '', '', 'none', 'r_alg-0003', 'u_alg-0003', '', '', 3, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', '15dc61a0-cff0-452f-aacf-860bc79dbfe1', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0004', (SELECT id FROM networks LIMIT 1), 'Client 0004', 'secret-0004', 'http://redirect/0004_1', 'grant-0004_1', 'response-0004_1', 'scope-0004', 'owner-0004', 'http://policy/0004', 'http://tos/0004', 'http://client/0004', 'http://logo/0004', 'contact-0004_1', 0, 'http://sector_id/0004', '', 'http://jwks/0004', 'http://request/0004_1', 'none', 'r_alg-0004', 'u_alg-0004', '', '', 4, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', '277dd36c-1d88-4883-a9bb-c332e7dd390f', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0005', (SELECT id FROM networks LIMIT 1), 'Client 0005', 'secret-0005', 'http://redirect/0005_1', 'grant-0005_1', 'response-0005_1', 'scope-0005', 'owner-0005', 'http://policy/0005', 'http://tos/0005', 'http://client/0005', 'http://logo/0005', 'contact-0005_1', 0, 'http://sector_id/0005', '', 'http://jwks/0005', 'http://request/0005_1', 'token_auth-0005', 'r_alg-0005', 'u_alg-0005', '', '', 5, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', '604510c7-390d-4751-95bf-3be3a3bfb065', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0006', (SELECT id FROM networks LIMIT 1), 'Client 0006', 'secret-0006', 'http://redirect/0006_1', 'grant-0006_1', 'response-0006_1', 'scope-0006', 'owner-0006', 'http://policy/0006', 'http://tos/0006', 'http://client/0006', 'http://logo/0006', 'contact-0006_1', 0, 'http://sector_id/0006', '', 'http://jwks/0006', 'http://request/0006_1', 'token_auth-0006', 'r_alg-0006', 'u_alg-0006', 'subject-0006', '', 6, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', '44de31b8-38cd-4873-93c9-a0e532427f0a', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0007', (SELECT id FROM networks LIMIT 1), 'Client 0007', 'secret-0007', 'http://redirect/0007_1', 'grant-0007_1', 'response-0007_1', 'scope-0007', 'owner-0007', 'http://policy/0007', 'http://tos/0007', 'http://client/0007', 'http://logo/0007', 'contact-0007_1', 0, 'http://sector_id/0007', '', 'http://jwks/0007', 'http://request/0007_1', 'token_auth-0007', 'r_alg-0007', 'u_alg-0007', 'subject-0007', '', 7, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', '704488ca-c654-4e39-9f32-702a48f7ad40', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0008', (SELECT id FROM networks LIMIT 1), 'Client 0008', 'secret-0008', 'http://redirect/0008_1', 'grant-0008_1', 'response-0008_1', 'scope-0008', 'owner-0008', 'http://policy/0008', 'http://tos/0008', 'http://client/0008', 'http://logo/0008', 'contact-0008_1', 0, 'http://sector_id/0008', '', 'http://jwks/0008', 'http://request/0008_1', 'token_auth-0008', 'r_alg-0008', 'u_alg-0008', 'subject-0008', 'http://cors/0008_1', 8, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', 'e64adc34-cdc7-4aeb-a1a0-d387af2d23e7', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0009', (SELECT id FROM networks LIMIT 1), 'Client 0009', 'secret-0009', 'http://redirect/0009_1', 'grant-0009_1', 'response-0009_1', 'scope-0009', 'owner-0009', 'http://policy/0009', 'http://tos/0009', 'http://client/0009', 'http://logo/0009', 'contact-0009_1', 0, 'http://sector_id/0009', '', 'http://jwks/0009', 'http://request/0009_1', 'token_auth-0009', 'r_alg-0009', 'u_alg-0009', 'subject-0009', 'http://cors/0009_1', 9, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', '99c1528a-4ac4-46e2-a6e4-e09d0306fd7b', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0010', (SELECT id FROM networks LIMIT 1), 'Client 0010', 'secret-0010', 'http://redirect/0010_1', 'grant-0010_1', 'response-0010_1', 'scope-0010', 'owner-0010', 'http://policy/0010', 'http://tos/0010', 'http://client/0010', 'http://logo/0010', 'contact-0010_1', 0, 'http://sector_id/0010', '', 'http://jwks/0010', 'http://request/0010_1', 'token_auth-0010', 'r_alg-0010', 'u_alg-0010', 'subject-0010', 'http://cors/0010_1', 10, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', 'ebda0ac9-0afd-4532-aec6-72c7bbd9e0dc', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0011', (SELECT id FROM networks LIMIT 1), 'Client 0011', 'secret-0011', 'http://redirect/0011_1', 'grant-0011_1', 'response-0011_1', 'scope-0011', 'owner-0011', 'http://policy/0011', 'http://tos/0011', 'http://client/0011', 'http://logo/0011', 'contact-0011_1', 0, 'http://sector_id/0011', '', 'http://jwks/0011', 'http://request/0011_1', 'token_auth-0011', 'r_alg-0011', 'u_alg-0011', 'subject-0011', 'http://cors/0011_1', 11, 'autdience-0011_1', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', '93311fb4-f5a6-4510-95dc-06cd0cfaa03b', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0012', (SELECT id FROM networks LIMIT 1), 'Client 0012', 'secret-0012', 'http://redirect/0012_1', 'grant-0012_1', 'response-0012_1', 'scope-0012', 'owner-0012', 'http://policy/0012', 'http://tos/0012', 'http://client/0012', 'http://logo/0012', 'contact-0012_1', 0, 'http://sector_id/0012', '', 'http://jwks/0012', 'http://request/0012_1', 'token_auth-0012', 'r_alg-0012', 'u_alg-0012', 'subject-0012', 'http://cors/0012_1', 12, 'autdience-0012_1', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', 'e7206da5-155b-4194-9536-caa54442a470', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0013', (SELECT id FROM networks LIMIT 1), 'Client 0013', 'secret-0013', 'http://redirect/0013_1', 'grant-0013_1', 'response-0013_1', 'scope-0013', 'owner-0013', 'http://policy/0013', 'http://tos/0013', 'http://client/0013', 'http://logo/0013', 'contact-0013_1', 0, 'http://sector_id/0013', '', 'http://jwks/0013', 'http://request/0013_1', 'token_auth-0013', 'r_alg-0013', 'u_alg-0013', 'subject-0013', 'http://cors/0013_1', 13, 'autdience-0013_1', '2022-02-15 22:20:20.969385', '2022-02-15 22:20:20.969385', 'http://front_logout/0013', true, 'http://post_redirect/0013_1', 'http://back_logout/0013', true, '{}', '', '0f309b7d-61d5-495f-bdfa-4836edcbb26d', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0014', (SELECT id FROM networks LIMIT 1), 'Client 0014', 'secret-0014', 'http://redirect/0014_1', 'grant-0014_1', 'response-0014_1', 'scope-0014', 'owner-0014', 'http://policy/0014', 'http://tos/0014', 'http://client/0014', 'http://logo/0014', 'contact-0014_1', 0, 'http://sector_id/0014', '', 'http://jwks/0014', 'http://request/0014_1', 'token_auth-0014', 'r_alg-0014', 'u_alg-0014', 'subject-0014', 'http://cors/0014_1', 14, 'autdience-0014_1', '2022-02-15 22:20:21.13937', '2022-02-15 22:20:21.13937', 'http://front_logout/0014', true, 'http://post_redirect/0014_1', 'http://back_logout/0014', true, '{"migration": "0014"}', '', '928e8098-c3b5-499b-af29-938bb55462e9', ''); --- INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-2005', (SELECT id FROM networks LIMIT 1), 'Client 2005', 'secret-2005', 'http://redirect/2005_1', 'grant-2005_1', 'response-2005_1', 'scope-2005', 'owner-2005', 'http://policy/2005', 'http://tos/2005', 'http://client/2005', 'http://logo/2005', 'contact-2005_1', 0, 'http://sector_id/2005', '', 'http://jwks/2005', 'http://request/2005_1', 'token_auth-2005', 'r_alg-2005', 'u_alg-2005', 'subject-2005', 'http://cors/2005_1', 2005, 'autdience-2005_1', '2022-02-15 22:20:22.882985', '2022-02-15 22:20:22.882985', 'http://front_logout/2005', true, 'http://post_redirect/2005_1', 'http://back_logout/2005', true, '{"migration": "2005"}', '', 'afaabbc2-def3-4663-a6e0-6eb08efd6904', ''); --- INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-20', (SELECT id FROM networks LIMIT 1), 'Client 20', 'secret-20', 'http://redirect/20_1', 'grant-20_1', 'response-20_1', 'scope-20', 'owner-20', 'http://policy/20', 'http://tos/20', 'http://client/20', 'http://logo/20', 'contact-20_1', 0, 'http://sector_id/20', '', 'http://jwks/20', 'http://request/20_1', 'token_auth-20', 'r_alg-20', 'u_alg-20', 'subject-20', 'http://cors/20_1', 0, 'autdience-20_1', '2022-02-15 22:20:23.004598', '2022-02-15 22:20:23.004598', 'http://front_logout/20', true, 'http://post_redirect/20_1', 'http://back_logout/20', true, '{"migration": "20"}', '', '08f4a4b7-6601-4fd7-bb7f-29ec0681b86d', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0001', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0001', 'secret-0001', 'http://redirect/0001_1', 'grant-0001_1', 'response-0001_1', 'scope-0001', 'owner-0001', 'http://policy/0001', 'http://tos/0001', 'http://client/0001', 'http://logo/0001', 'contact-0001_1', 0, '', '', '', '', 'none', '', '', '', '', 1, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', '4db0bba3-ff30-4e6d-a7eb-a08a9b0d4d0f', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0002', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0002', 'secret-0002', 'http://redirect/0002_1', 'grant-0002_1', 'response-0002_1', 'scope-0002', 'owner-0002', 'http://policy/0002', 'http://tos/0002', 'http://client/0002', 'http://logo/0002', 'contact-0002_1', 0, '', '', '', '', 'none', '', '', '', '', 2, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', 'f24d3183-8b4f-40fa-a1a0-c5dd2aa9fabb', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0003', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0003', 'secret-0003', 'http://redirect/0003_1', 'grant-0003_1', 'response-0003_1', 'scope-0003', 'owner-0003', 'http://policy/0003', 'http://tos/0003', 'http://client/0003', 'http://logo/0003', 'contact-0003_1', 0, '', '', '', '', 'none', 'r_alg-0003', 'u_alg-0003', '', '', 3, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', '15dc61a0-cff0-452f-aacf-860bc79dbfe1', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0004', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0004', 'secret-0004', 'http://redirect/0004_1', 'grant-0004_1', 'response-0004_1', 'scope-0004', 'owner-0004', 'http://policy/0004', 'http://tos/0004', 'http://client/0004', 'http://logo/0004', 'contact-0004_1', 0, 'http://sector_id/0004', '', 'http://jwks/0004', 'http://request/0004_1', 'none', 'r_alg-0004', 'u_alg-0004', '', '', 4, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', '277dd36c-1d88-4883-a9bb-c332e7dd390f', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0005', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0005', 'secret-0005', 'http://redirect/0005_1', 'grant-0005_1', 'response-0005_1', 'scope-0005', 'owner-0005', 'http://policy/0005', 'http://tos/0005', 'http://client/0005', 'http://logo/0005', 'contact-0005_1', 0, 'http://sector_id/0005', '', 'http://jwks/0005', 'http://request/0005_1', 'token_auth-0005', 'r_alg-0005', 'u_alg-0005', '', '', 5, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', '604510c7-390d-4751-95bf-3be3a3bfb065', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0006', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0006', 'secret-0006', 'http://redirect/0006_1', 'grant-0006_1', 'response-0006_1', 'scope-0006', 'owner-0006', 'http://policy/0006', 'http://tos/0006', 'http://client/0006', 'http://logo/0006', 'contact-0006_1', 0, 'http://sector_id/0006', '', 'http://jwks/0006', 'http://request/0006_1', 'token_auth-0006', 'r_alg-0006', 'u_alg-0006', 'subject-0006', '', 6, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', '44de31b8-38cd-4873-93c9-a0e532427f0a', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0007', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0007', 'secret-0007', 'http://redirect/0007_1', 'grant-0007_1', 'response-0007_1', 'scope-0007', 'owner-0007', 'http://policy/0007', 'http://tos/0007', 'http://client/0007', 'http://logo/0007', 'contact-0007_1', 0, 'http://sector_id/0007', '', 'http://jwks/0007', 'http://request/0007_1', 'token_auth-0007', 'r_alg-0007', 'u_alg-0007', 'subject-0007', '', 7, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', '704488ca-c654-4e39-9f32-702a48f7ad40', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0008', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0008', 'secret-0008', 'http://redirect/0008_1', 'grant-0008_1', 'response-0008_1', 'scope-0008', 'owner-0008', 'http://policy/0008', 'http://tos/0008', 'http://client/0008', 'http://logo/0008', 'contact-0008_1', 0, 'http://sector_id/0008', '', 'http://jwks/0008', 'http://request/0008_1', 'token_auth-0008', 'r_alg-0008', 'u_alg-0008', 'subject-0008', 'http://cors/0008_1', 8, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', 'e64adc34-cdc7-4aeb-a1a0-d387af2d23e7', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0009', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0009', 'secret-0009', 'http://redirect/0009_1', 'grant-0009_1', 'response-0009_1', 'scope-0009', 'owner-0009', 'http://policy/0009', 'http://tos/0009', 'http://client/0009', 'http://logo/0009', 'contact-0009_1', 0, 'http://sector_id/0009', '', 'http://jwks/0009', 'http://request/0009_1', 'token_auth-0009', 'r_alg-0009', 'u_alg-0009', 'subject-0009', 'http://cors/0009_1', 9, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', '99c1528a-4ac4-46e2-a6e4-e09d0306fd7b', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0010', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0010', 'secret-0010', 'http://redirect/0010_1', 'grant-0010_1', 'response-0010_1', 'scope-0010', 'owner-0010', 'http://policy/0010', 'http://tos/0010', 'http://client/0010', 'http://logo/0010', 'contact-0010_1', 0, 'http://sector_id/0010', '', 'http://jwks/0010', 'http://request/0010_1', 'token_auth-0010', 'r_alg-0010', 'u_alg-0010', 'subject-0010', 'http://cors/0010_1', 10, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', 'ebda0ac9-0afd-4532-aec6-72c7bbd9e0dc', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0011', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0011', 'secret-0011', 'http://redirect/0011_1', 'grant-0011_1', 'response-0011_1', 'scope-0011', 'owner-0011', 'http://policy/0011', 'http://tos/0011', 'http://client/0011', 'http://logo/0011', 'contact-0011_1', 0, 'http://sector_id/0011', '', 'http://jwks/0011', 'http://request/0011_1', 'token_auth-0011', 'r_alg-0011', 'u_alg-0011', 'subject-0011', 'http://cors/0011_1', 11, 'autdience-0011_1', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', '93311fb4-f5a6-4510-95dc-06cd0cfaa03b', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0012', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0012', 'secret-0012', 'http://redirect/0012_1', 'grant-0012_1', 'response-0012_1', 'scope-0012', 'owner-0012', 'http://policy/0012', 'http://tos/0012', 'http://client/0012', 'http://logo/0012', 'contact-0012_1', 0, 'http://sector_id/0012', '', 'http://jwks/0012', 'http://request/0012_1', 'token_auth-0012', 'r_alg-0012', 'u_alg-0012', 'subject-0012', 'http://cors/0012_1', 12, 'autdience-0012_1', '2022-02-15 22:20:20', '2022-02-15 22:20:20', '', false, '', '', false, '{}', '', 'e7206da5-155b-4194-9536-caa54442a470', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0013', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0013', 'secret-0013', 'http://redirect/0013_1', 'grant-0013_1', 'response-0013_1', 'scope-0013', 'owner-0013', 'http://policy/0013', 'http://tos/0013', 'http://client/0013', 'http://logo/0013', 'contact-0013_1', 0, 'http://sector_id/0013', '', 'http://jwks/0013', 'http://request/0013_1', 'token_auth-0013', 'r_alg-0013', 'u_alg-0013', 'subject-0013', 'http://cors/0013_1', 13, 'autdience-0013_1', '2022-02-15 22:20:20', '2022-02-15 22:20:20', 'http://front_logout/0013', true, 'http://post_redirect/0013_1', 'http://back_logout/0013', true, '{}', '', '0f309b7d-61d5-495f-bdfa-4836edcbb26d', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0014', 'secret-0014', 'http://redirect/0014_1', 'grant-0014_1', 'response-0014_1', 'scope-0014', 'owner-0014', 'http://policy/0014', 'http://tos/0014', 'http://client/0014', 'http://logo/0014', 'contact-0014_1', 0, 'http://sector_id/0014', '', 'http://jwks/0014', 'http://request/0014_1', 'token_auth-0014', 'r_alg-0014', 'u_alg-0014', 'subject-0014', 'http://cors/0014_1', 14, 'autdience-0014_1', '2022-02-15 22:20:21', '2022-02-15 22:20:21', 'http://front_logout/0014', true, 'http://post_redirect/0014_1', 'http://back_logout/0014', true, '{"migration": "0014"}', '', '928e8098-c3b5-499b-af29-938bb55462e9', ''); +-- INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-2005', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 2005', 'secret-2005', 'http://redirect/2005_1', 'grant-2005_1', 'response-2005_1', 'scope-2005', 'owner-2005', 'http://policy/2005', 'http://tos/2005', 'http://client/2005', 'http://logo/2005', 'contact-2005_1', 0, 'http://sector_id/2005', '', 'http://jwks/2005', 'http://request/2005_1', 'token_auth-2005', 'r_alg-2005', 'u_alg-2005', 'subject-2005', 'http://cors/2005_1', 2005, 'autdience-2005_1', '2022-02-15 22:20:22.882985', '2022-02-15 22:20:22.882985', 'http://front_logout/2005', true, 'http://post_redirect/2005_1', 'http://back_logout/2005', true, '{"migration": "2005"}', '', 'afaabbc2-def3-4663-a6e0-6eb08efd6904', ''); +-- INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-20', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 20', 'secret-20', 'http://redirect/20_1', 'grant-20_1', 'response-20_1', 'scope-20', 'owner-20', 'http://policy/20', 'http://tos/20', 'http://client/20', 'http://logo/20', 'contact-20_1', 0, 'http://sector_id/20', '', 'http://jwks/20', 'http://request/20_1', 'token_auth-20', 'r_alg-20', 'u_alg-20', 'subject-20', 'http://cors/20_1', 0, 'autdience-20_1', '2022-02-15 22:20:23.004598', '2022-02-15 22:20:23.004598', 'http://front_logout/20', true, 'http://post_redirect/20_1', 'http://back_logout/20', true, '{"migration": "20"}', '', '08f4a4b7-6601-4fd7-bb7f-29ec0681b86d', ''); -INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0001', 'kid-0001', (SELECT id FROM networks LIMIT 1), 1, 'key-0001', '2022-02-15 22:20:21.166613', 1, '593ff81f-a21e-4d29-b276-bdce7b5a33df'); -INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0002', 'kid-0002', (SELECT id FROM networks LIMIT 1), 2, 'key-0002', '2022-02-15 22:20:21.166613', 2, '609b0140-8d03-4d1b-aef1-91b86da5e6be'); -INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0003', 'kid-0003', (SELECT id FROM networks LIMIT 1), 3, 'key-0003', '2022-02-15 22:20:21.195915', 3, 'edd5c644-c3fe-43ce-9e80-9acd1f310ecc'); -INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0004', 'kid-0004', (SELECT id FROM networks LIMIT 1), 4, 'key-0004', '2022-02-15 22:20:21.202612', 4, 'a6e7382c-6e30-4553-a67f-4d9bf32118b4'); --- INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0005', 'kid-0005', (SELECT id FROM networks LIMIT 1), 4, 'key-0005', '2022-02-15 22:20:23.042296', 4, '94075738-c3e7-41bc-a7a5-89aece609c39'); --- INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0008', 'kid-0008', (SELECT id FROM networks LIMIT 1), 2, 'key-0002', '2022-02-15 22:20:23.197118', 5, 'e18d8447-3ec2-42d9-a3ad-e7cca8aa81f0'); +INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0001', 'kid-0001', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 1, 'key-0001', CURRENT_TIMESTAMP, 1, '593ff81f-a21e-4d29-b276-bdce7b5a33df'); +INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0002', 'kid-0002', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 2, 'key-0002', '2022-02-15 22:20:21', 2, '609b0140-8d03-4d1b-aef1-91b86da5e6be'); +INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0003', 'kid-0003', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 3, 'key-0003', '2022-02-15 22:20:21', 3, 'edd5c644-c3fe-43ce-9e80-9acd1f310ecc'); +INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0004', 'kid-0004', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 4, 'key-0004', '2022-02-15 22:20:21', 4, 'a6e7382c-6e30-4553-a67f-4d9bf32118b4'); +-- INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0005', 'kid-0005', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 4, 'key-0005', '2022-02-15 22:20:23.042296', 4, '94075738-c3e7-41bc-a7a5-89aece609c39'); +-- INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0008', 'kid-0008', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 2, 'key-0002', '2022-02-15 22:20:23.197118', 5, 'e18d8447-3ec2-42d9-a3ad-e7cca8aa81f0'); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0001', CURRENT_TIMESTAMP, 'subject-0001', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0002', CURRENT_TIMESTAMP, 'subject-0002', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0003', CURRENT_TIMESTAMP, 'subject-0003', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0004', CURRENT_TIMESTAMP, 'subject-0004', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0005', CURRENT_TIMESTAMP, 'subject-0005', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0006', CURRENT_TIMESTAMP, 'subject-0006', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0007', CURRENT_TIMESTAMP, 'subject-0007', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0008', CURRENT_TIMESTAMP, 'subject-0008', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0009', CURRENT_TIMESTAMP, 'subject-0009', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0010', CURRENT_TIMESTAMP, 'subject-0010', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0011', CURRENT_TIMESTAMP, 'subject-0011', (SELECT id FROM networks LIMIT 1), false); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0012', CURRENT_TIMESTAMP, 'subject-0012', (SELECT id FROM networks LIMIT 1), false); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0013', CURRENT_TIMESTAMP, 'subject-0013', (SELECT id FROM networks LIMIT 1), false); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0014', CURRENT_TIMESTAMP, 'subject-0014', (SELECT id FROM networks LIMIT 1), false); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0001', '2022-02-15 22:20:21', 'subject-0001', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0002', '2022-02-15 22:20:21', 'subject-0002', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0003', '2022-02-15 22:20:21', 'subject-0003', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0004', '2022-02-15 22:20:21', 'subject-0004', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0005', '2022-02-15 22:20:21', 'subject-0005', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0006', '2022-02-15 22:20:21', 'subject-0006', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0007', '2022-02-15 22:20:21', 'subject-0007', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0008', '2022-02-15 22:20:21', 'subject-0008', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0009', '2022-02-15 22:20:21', 'subject-0009', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0010', '2022-02-15 22:20:21', 'subject-0010', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0011', '2022-02-15 22:20:21', 'subject-0011', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', false); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0012', '2022-02-15 22:20:21', 'subject-0012', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', false); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0013', '2022-02-15 22:20:21', 'subject-0013', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', false); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0014', '2022-02-15 22:20:21', 'subject-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', false); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0003', (SELECT id FROM networks LIMIT 1), 'requested_scope-0003_1', 'verifier-0003', 'csrf-0003', 'subject-0003', 'http://request/0003', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0003"}', 'auth_session-0003', '', CURRENT_TIMESTAMP, 128, true, 3, '{}', 'acr-0003', CURRENT_TIMESTAMP, true, 'force_subject_id-0003', '{}', '', 'challenge-0003', 'verifier-0003', true, 'csrf-0003', 'granted_scope-0003_1', true, 3, '{}', '{"session_access_token-0003": "0003"}', '{"session_id_token-0003": "0003"}', true, '', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0004', (SELECT id FROM networks LIMIT 1), 'requested_scope-0004_1', 'verifier-0004', 'csrf-0004', 'subject-0004', 'http://request/0004', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0004"}', 'auth_session-0004', 'requested_audience-0004_1', CURRENT_TIMESTAMP, 128, true, 4, '{}', 'acr-0004', CURRENT_TIMESTAMP, true, 'force_subject_id-0004', '{}', '', 'challenge-0004', 'verifier-0004', true, 'csrf-0004', 'granted_scope-0004_1', true, 4, '{}', '{"session_access_token-0004": "0004"}', '{"session_id_token-0004": "0004"}', true, 'granted_audience-0004_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0005', (SELECT id FROM networks LIMIT 1), 'requested_scope-0005_1', 'verifier-0005', 'csrf-0005', 'subject-0005', 'http://request/0005', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0005"}', 'auth_session-0005', 'requested_audience-0005_1', CURRENT_TIMESTAMP, 128, true, 5, '{}', 'acr-0005', CURRENT_TIMESTAMP, true, 'force_subject_id-0005', '{}', '', 'challenge-0005', 'verifier-0005', true, 'csrf-0005', 'granted_scope-0005_1', true, 5, '{}', '{"session_access_token-0005": "0005"}', '{"session_id_token-0005": "0005"}', true, 'granted_audience-0005_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0006', (SELECT id FROM networks LIMIT 1), 'requested_scope-0006_1', 'verifier-0006', 'csrf-0006', 'subject-0006', 'http://request/0006', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0006"}', 'auth_session-0006', 'requested_audience-0006_1', CURRENT_TIMESTAMP, 128, true, 6, '{}', 'acr-0006', CURRENT_TIMESTAMP, true, 'force_subject_id-0006', '{}', '', 'challenge-0006', 'verifier-0006', true, 'csrf-0006', 'granted_scope-0006_1', true, 6, '{}', '{"session_access_token-0006": "0006"}', '{"session_id_token-0006": "0006"}', true, 'granted_audience-0006_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0001', (SELECT id FROM networks LIMIT 1), 'requested_scope-0001_1', 'verifier-0001', 'csrf-0001', 'subject-0001', 'http://request/0001', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0001"}', NULL, '', CURRENT_TIMESTAMP, 128, true, 1, '{}', 'acr-0001', CURRENT_TIMESTAMP, true, '', '{}', '', 'challenge-0001', 'verifier-0001', true, 'csrf-0001', 'granted_scope-0001_1', true, 1, '{}', '{"session_access_token-0001": "0001"}', '{"session_id_token-0001": "0001"}', true, '', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0002', (SELECT id FROM networks LIMIT 1), 'requested_scope-0002_1', 'verifier-0002', 'csrf-0002', 'subject-0002', 'http://request/0002', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0002"}', NULL, '', CURRENT_TIMESTAMP, 128, true, 2, '{}', 'acr-0002', CURRENT_TIMESTAMP, true, 'force_subject_id-0002', '{}', '', 'challenge-0002', 'verifier-0002', true, 'csrf-0002', 'granted_scope-0002_1', true, 2, '{}', '{"session_access_token-0002": "0002"}', '{"session_id_token-0002": "0002"}', true, '', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0007', (SELECT id FROM networks LIMIT 1), 'requested_scope-0007_1', 'verifier-0007', 'csrf-0007', 'subject-0007', 'http://request/0007', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0007"}', 'auth_session-0007', 'requested_audience-0007_1', CURRENT_TIMESTAMP, 128, true, 7, '{}', 'acr-0007', CURRENT_TIMESTAMP, true, 'force_subject_id-0007', '{}', '', 'challenge-0007', 'verifier-0007', true, 'csrf-0007', 'granted_scope-0007_1', true, 7, '{}', '{"session_access_token-0007": "0007"}', '{"session_id_token-0007": "0007"}', true, 'granted_audience-0007_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0008', (SELECT id FROM networks LIMIT 1), 'requested_scope-0008_1', 'verifier-0008', 'csrf-0008', 'subject-0008', 'http://request/0008', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0008"}', 'auth_session-0008', 'requested_audience-0008_1', CURRENT_TIMESTAMP, 128, true, 8, '{}', 'acr-0008', CURRENT_TIMESTAMP, true, 'force_subject_id-0008', '{"context": "0008"}', '', 'challenge-0008', 'verifier-0008', true, 'csrf-0008', 'granted_scope-0008_1', true, 8, '{}', '{"session_access_token-0008": "0008"}', '{"session_id_token-0008": "0008"}', true, 'granted_audience-0008_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0009', (SELECT id FROM networks LIMIT 1), 'requested_scope-0009_1', 'verifier-0009', 'csrf-0009', 'subject-0009', 'http://request/0009', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0009"}', 'auth_session-0009', 'requested_audience-0009_1', CURRENT_TIMESTAMP, 128, true, 9, '{}', 'acr-0009', CURRENT_TIMESTAMP, true, 'force_subject_id-0009', '{"context": "0009"}', '', 'challenge-0009', 'verifier-0009', true, 'csrf-0009', 'granted_scope-0009_1', true, 9, '{}', '{"session_access_token-0009": "0009"}', '{"session_id_token-0009": "0009"}', true, 'granted_audience-0009_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0010', (SELECT id FROM networks LIMIT 1), 'requested_scope-0010_1', 'verifier-0010', 'csrf-0010', 'subject-0010', 'http://request/0010', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0010"}', 'auth_session-0010', 'requested_audience-0010_1', CURRENT_TIMESTAMP, 128, true, 10, '{}', 'acr-0010', CURRENT_TIMESTAMP, true, 'force_subject_id-0010', '{"context": "0010"}', '', 'challenge-0010', 'verifier-0010', true, 'csrf-0010', 'granted_scope-0010_1', true, 10, '{}', '{"session_access_token-0010": "0010"}', '{"session_id_token-0010": "0010"}', true, 'granted_audience-0010_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0011', (SELECT id FROM networks LIMIT 1), 'requested_scope-0011_1', 'verifier-0011', 'csrf-0011', 'subject-0011', 'http://request/0011', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0011"}', 'auth_session-0011', 'requested_audience-0011_1', CURRENT_TIMESTAMP, 128, true, 11, '{}', 'acr-0011', CURRENT_TIMESTAMP, true, 'force_subject_id-0011', '{"context": "0011"}', '', 'challenge-0011', 'verifier-0011', true, 'csrf-0011', 'granted_scope-0011_1', true, 11, '{}', '{"session_access_token-0011": "0011"}', '{"session_id_token-0011": "0011"}', true, 'granted_audience-0011_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0012', (SELECT id FROM networks LIMIT 1), 'requested_scope-0012_1', 'verifier-0012', 'csrf-0012', 'subject-0012', 'http://request/0012', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0012"}', 'auth_session-0012', 'requested_audience-0012_1', CURRENT_TIMESTAMP, 128, true, 12, '{}', 'acr-0012', CURRENT_TIMESTAMP, true, 'force_subject_id-0012', '{"context": "0012"}', '', 'challenge-0012', 'verifier-0012', true, 'csrf-0012', 'granted_scope-0012_1', true, 12, '{}', '{"session_access_token-0012": "0012"}', '{"session_id_token-0012": "0012"}', true, 'granted_audience-0012_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0013', (SELECT id FROM networks LIMIT 1), 'requested_scope-0013_1', 'verifier-0013', 'csrf-0013', 'subject-0013', 'http://request/0013', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0013"}', 'auth_session-0013', 'requested_audience-0013_1', CURRENT_TIMESTAMP, 128, true, 13, '{}', 'acr-0013', CURRENT_TIMESTAMP, true, 'force_subject_id-0013', '{"context": "0013"}', '', 'challenge-0013', 'verifier-0013', true, 'csrf-0013', 'granted_scope-0013_1', true, 13, '{}', '{"session_access_token-0013": "0013"}', '{"session_id_token-0013": "0013"}', true, 'granted_audience-0013_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0014', (SELECT id FROM networks LIMIT 1), 'requested_scope-0014_1', 'verifier-0014', 'csrf-0014', 'subject-0014', 'http://request/0014', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0014"}', 'auth_session-0014', 'requested_audience-0014_1', CURRENT_TIMESTAMP, 128, true, 14, '{}', 'acr-0014', CURRENT_TIMESTAMP, true, 'force_subject_id-0014', '{"context": "0014"}', '', 'challenge-0014', 'verifier-0014', true, 'csrf-0014', 'granted_scope-0014_1', true, 14, '{}', '{"session_access_token-0014": "0014"}', '{"session_id_token-0014": "0014"}', true, 'granted_audience-0014_1', CURRENT_TIMESTAMP); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0003', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0003_1', 'verifier-0003', 'csrf-0003', 'subject-0003', 'http://request/0003', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0003"}', 'auth_session-0003', '', '2022-02-15 22:20:21', 128, true, 3, '{}', 'acr-0003', '2022-02-15 22:20:21', true, 'force_subject_id-0003', '{}', '', 'challenge-0003', 'verifier-0003', true, 'csrf-0003', 'granted_scope-0003_1', true, 3, '{}', '{"session_access_token-0003": "0003"}', '{"session_id_token-0003": "0003"}', true, '', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0004', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0004_1', 'verifier-0004', 'csrf-0004', 'subject-0004', 'http://request/0004', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0004"}', 'auth_session-0004', 'requested_audience-0004_1', '2022-02-15 22:20:21', 128, true, 4, '{}', 'acr-0004', '2022-02-15 22:20:21', true, 'force_subject_id-0004', '{}', '', 'challenge-0004', 'verifier-0004', true, 'csrf-0004', 'granted_scope-0004_1', true, 4, '{}', '{"session_access_token-0004": "0004"}', '{"session_id_token-0004": "0004"}', true, 'granted_audience-0004_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0005', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0005_1', 'verifier-0005', 'csrf-0005', 'subject-0005', 'http://request/0005', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0005"}', 'auth_session-0005', 'requested_audience-0005_1', '2022-02-15 22:20:21', 128, true, 5, '{}', 'acr-0005', '2022-02-15 22:20:21', true, 'force_subject_id-0005', '{}', '', 'challenge-0005', 'verifier-0005', true, 'csrf-0005', 'granted_scope-0005_1', true, 5, '{}', '{"session_access_token-0005": "0005"}', '{"session_id_token-0005": "0005"}', true, 'granted_audience-0005_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0006', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0006_1', 'verifier-0006', 'csrf-0006', 'subject-0006', 'http://request/0006', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0006"}', 'auth_session-0006', 'requested_audience-0006_1', '2022-02-15 22:20:21', 128, true, 6, '{}', 'acr-0006', '2022-02-15 22:20:21', true, 'force_subject_id-0006', '{}', '', 'challenge-0006', 'verifier-0006', true, 'csrf-0006', 'granted_scope-0006_1', true, 6, '{}', '{"session_access_token-0006": "0006"}', '{"session_id_token-0006": "0006"}', true, 'granted_audience-0006_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0001', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0001_1', 'verifier-0001', 'csrf-0001', 'subject-0001', 'http://request/0001', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0001"}', NULL, '', '2022-02-15 22:20:21', 128, true, 1, '{}', 'acr-0001', '2022-02-15 22:20:21', true, '', '{}', '', 'challenge-0001', 'verifier-0001', true, 'csrf-0001', 'granted_scope-0001_1', true, 1, '{}', '{"session_access_token-0001": "0001"}', '{"session_id_token-0001": "0001"}', true, '', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0002', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0002_1', 'verifier-0002', 'csrf-0002', 'subject-0002', 'http://request/0002', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0002"}', NULL, '', '2022-02-15 22:20:21', 128, true, 2, '{}', 'acr-0002', '2022-02-15 22:20:21', true, 'force_subject_id-0002', '{}', '', 'challenge-0002', 'verifier-0002', true, 'csrf-0002', 'granted_scope-0002_1', true, 2, '{}', '{"session_access_token-0002": "0002"}', '{"session_id_token-0002": "0002"}', true, '', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0007', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0007_1', 'verifier-0007', 'csrf-0007', 'subject-0007', 'http://request/0007', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0007"}', 'auth_session-0007', 'requested_audience-0007_1', '2022-02-15 22:20:21', 128, true, 7, '{}', 'acr-0007', '2022-02-15 22:20:21', true, 'force_subject_id-0007', '{}', '', 'challenge-0007', 'verifier-0007', true, 'csrf-0007', 'granted_scope-0007_1', true, 7, '{}', '{"session_access_token-0007": "0007"}', '{"session_id_token-0007": "0007"}', true, 'granted_audience-0007_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0008', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0008_1', 'verifier-0008', 'csrf-0008', 'subject-0008', 'http://request/0008', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0008"}', 'auth_session-0008', 'requested_audience-0008_1', '2022-02-15 22:20:21', 128, true, 8, '{}', 'acr-0008', '2022-02-15 22:20:21', true, 'force_subject_id-0008', '{"context": "0008"}', '', 'challenge-0008', 'verifier-0008', true, 'csrf-0008', 'granted_scope-0008_1', true, 8, '{}', '{"session_access_token-0008": "0008"}', '{"session_id_token-0008": "0008"}', true, 'granted_audience-0008_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0009', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0009_1', 'verifier-0009', 'csrf-0009', 'subject-0009', 'http://request/0009', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0009"}', 'auth_session-0009', 'requested_audience-0009_1', '2022-02-15 22:20:21', 128, true, 9, '{}', 'acr-0009', '2022-02-15 22:20:21', true, 'force_subject_id-0009', '{"context": "0009"}', '', 'challenge-0009', 'verifier-0009', true, 'csrf-0009', 'granted_scope-0009_1', true, 9, '{}', '{"session_access_token-0009": "0009"}', '{"session_id_token-0009": "0009"}', true, 'granted_audience-0009_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0010', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0010_1', 'verifier-0010', 'csrf-0010', 'subject-0010', 'http://request/0010', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0010"}', 'auth_session-0010', 'requested_audience-0010_1', '2022-02-15 22:20:21', 128, true, 10, '{}', 'acr-0010', '2022-02-15 22:20:21', true, 'force_subject_id-0010', '{"context": "0010"}', '', 'challenge-0010', 'verifier-0010', true, 'csrf-0010', 'granted_scope-0010_1', true, 10, '{}', '{"session_access_token-0010": "0010"}', '{"session_id_token-0010": "0010"}', true, 'granted_audience-0010_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0011', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0011_1', 'verifier-0011', 'csrf-0011', 'subject-0011', 'http://request/0011', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0011"}', 'auth_session-0011', 'requested_audience-0011_1', '2022-02-15 22:20:21', 128, true, 11, '{}', 'acr-0011', '2022-02-15 22:20:21', true, 'force_subject_id-0011', '{"context": "0011"}', '', 'challenge-0011', 'verifier-0011', true, 'csrf-0011', 'granted_scope-0011_1', true, 11, '{}', '{"session_access_token-0011": "0011"}', '{"session_id_token-0011": "0011"}', true, 'granted_audience-0011_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0012', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0012_1', 'verifier-0012', 'csrf-0012', 'subject-0012', 'http://request/0012', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0012"}', 'auth_session-0012', 'requested_audience-0012_1', '2022-02-15 22:20:21', 128, true, 12, '{}', 'acr-0012', '2022-02-15 22:20:21', true, 'force_subject_id-0012', '{"context": "0012"}', '', 'challenge-0012', 'verifier-0012', true, 'csrf-0012', 'granted_scope-0012_1', true, 12, '{}', '{"session_access_token-0012": "0012"}', '{"session_id_token-0012": "0012"}', true, 'granted_audience-0012_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0013', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0013_1', 'verifier-0013', 'csrf-0013', 'subject-0013', 'http://request/0013', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0013"}', 'auth_session-0013', 'requested_audience-0013_1', '2022-02-15 22:20:21', 128, true, 13, '{}', 'acr-0013', '2022-02-15 22:20:21', true, 'force_subject_id-0013', '{"context": "0013"}', '', 'challenge-0013', 'verifier-0013', true, 'csrf-0013', 'granted_scope-0013_1', true, 13, '{}', '{"session_access_token-0013": "0013"}', '{"session_id_token-0013": "0013"}', true, 'granted_audience-0013_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0014_1', 'verifier-0014', 'csrf-0014', 'subject-0014', 'http://request/0014', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0014"}', 'auth_session-0014', 'requested_audience-0014_1', '2022-02-15 22:20:21', 128, true, 14, '{}', 'acr-0014', '2022-02-15 22:20:21', true, 'force_subject_id-0014', '{"context": "0014"}', '', 'challenge-0014', 'verifier-0014', true, 'csrf-0014', 'granted_scope-0014_1', true, 14, '{}', '{"session_access_token-0014": "0014"}', '{"session_id_token-0014": "0014"}', true, 'granted_audience-0014_1', '2022-02-15 22:20:21'); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0002', 'client-0014', 'subject_obfuscated-0002', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0003', 'client-0014', 'subject_obfuscated-0003', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0004', 'client-0014', 'subject_obfuscated-0004', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0005', 'client-0014', 'subject_obfuscated-0005', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0006', 'client-0014', 'subject_obfuscated-0006', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0007', 'client-0014', 'subject_obfuscated-0007', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0008', 'client-0014', 'subject_obfuscated-0008', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0009', 'client-0014', 'subject_obfuscated-0009', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0010', 'client-0014', 'subject_obfuscated-0010', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0011', 'client-0014', 'subject_obfuscated-0011', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0012', 'client-0014', 'subject_obfuscated-0012', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0013', 'client-0014', 'subject_obfuscated-0013', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0014', 'client-0014', 'subject_obfuscated-0014', (SELECT id FROM networks LIMIT 1)); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0002', 'client-0014', 'subject_obfuscated-0002', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0003', 'client-0014', 'subject_obfuscated-0003', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0004', 'client-0014', 'subject_obfuscated-0004', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0005', 'client-0014', 'subject_obfuscated-0005', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0006', 'client-0014', 'subject_obfuscated-0006', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0007', 'client-0014', 'subject_obfuscated-0007', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0008', 'client-0014', 'subject_obfuscated-0008', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0009', 'client-0014', 'subject_obfuscated-0009', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0010', 'client-0014', 'subject_obfuscated-0010', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0011', 'client-0014', 'subject_obfuscated-0011', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0012', 'client-0014', 'subject_obfuscated-0012', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0013', 'client-0014', 'subject_obfuscated-0013', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0014', 'client-0014', 'subject_obfuscated-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); -INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0009', 'verifier-0009', 'subject-0009', 'session_id-0009', 'client-0014', (SELECT id FROM networks LIMIT 1), 'http://request/0009', 'http://post_logout/0009', true, true, false, true); -INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0010', 'verifier-0010', 'subject-0010', 'session_id-0010', 'client-0014', (SELECT id FROM networks LIMIT 1), 'http://request/0010', 'http://post_logout/0010', true, true, false, true); -INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0011', 'verifier-0011', 'subject-0011', 'session_id-0011', 'client-0014', (SELECT id FROM networks LIMIT 1), 'http://request/0011', 'http://post_logout/0011', true, true, false, true); -INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0012', 'verifier-0012', 'subject-0012', 'session_id-0012', 'client-0014', (SELECT id FROM networks LIMIT 1), 'http://request/0012', 'http://post_logout/0012', true, true, false, true); -INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0013', 'verifier-0013', 'subject-0013', 'session_id-0013', 'client-0014', (SELECT id FROM networks LIMIT 1), 'http://request/0013', 'http://post_logout/0013', true, true, false, true); -INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0014', 'verifier-0014', 'subject-0014', 'session_id-0014', 'client-0014', (SELECT id FROM networks LIMIT 1), 'http://request/0014', 'http://post_logout/0014', true, true, false, true); +INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0009', 'verifier-0009', 'subject-0009', 'session_id-0009', 'client-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'http://request/0009', 'http://post_logout/0009', true, true, false, true); +INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0010', 'verifier-0010', 'subject-0010', 'session_id-0010', 'client-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'http://request/0010', 'http://post_logout/0010', true, true, false, true); +INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0011', 'verifier-0011', 'subject-0011', 'session_id-0011', 'client-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'http://request/0011', 'http://post_logout/0011', true, true, false, true); +INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0012', 'verifier-0012', 'subject-0012', 'session_id-0012', 'client-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'http://request/0012', 'http://post_logout/0012', true, true, false, true); +INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0013', 'verifier-0013', 'subject-0013', 'session_id-0013', 'client-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'http://request/0013', 'http://post_logout/0013', true, true, false, true); +INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0014', 'verifier-0014', 'subject-0014', 'session_id-0014', 'client-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'http://request/0014', 'http://post_logout/0014', true, true, false, true); --- INSERT INTO hydra_oauth2_jti_blacklist (signature, expires_at, nid) VALUES ('sig-0011', '2022-02-15 22:20:22.816314', (SELECT id FROM networks LIMIT 1)); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0001', 'req-0001', '2022-02-15 22:20:21', 'client-0014', 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001', '', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0002', 'req-0002', '2022-02-15 22:20:21', 'client-0014', 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0001', 'req-0001', '2022-02-15 22:20:21.825677', 'client-0014', 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001', '', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0002', 'req-0002', '2022-02-15 22:20:21.890642', 'client-0014', 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21.966429', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21.985991', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22.073402', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22.09772', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22.110219', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22.366046', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22.381518', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22.755881', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22.816314', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0001', 'req-0001', '2022-02-15 22:20:21', 'client-0014', 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001', '', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0002', 'req-0002', '2022-02-15 22:20:21', 'client-0014', 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0001', 'req-0001', '2022-02-15 22:20:21.825677', 'client-0014', 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001', '', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0002', 'req-0002', '2022-02-15 22:20:21.890642', 'client-0014', 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21.966429', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21.985991', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22.073402', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22.09772', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22.110219', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22.366046', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22.381518', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22.755881', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22.816314', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0001', 'req-0001', '2022-02-15 22:20:21', 'client-0014', 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001', '', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0002', 'req-0002', '2022-02-15 22:20:21', 'client-0014', 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0001', 'req-0001', '2022-02-15 22:20:21.825677', 'client-0014', 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001', '', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0002', 'req-0002', '2022-02-15 22:20:21.890642', 'client-0014', 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21.966429', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21.985991', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22.073402', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22.09772', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22.110219', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22.366046', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22.381518', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22.755881', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22.816314', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0001', 'req-0001', '2022-02-15 22:20:21', 'client-0014', 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001', '', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0002', 'req-0002', '2022-02-15 22:20:21', 'client-0014', 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0001', 'req-0001', '2022-02-15 22:20:21.825677', 'client-0014', 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001', '', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0002', 'req-0002', '2022-02-15 22:20:21.890642', 'client-0014', 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21.966429', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21.985991', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22.073402', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22.09772', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22.110219', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22.366046', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22.381518', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22.755881', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22.816314', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); - -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21.966429', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21.985991', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22.073402', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22.09772', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22.110219', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22.366046', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22.381518', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22.755881', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22.816314', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); diff --git a/persistence/sql/migratest/testdata/20220210000001_testdata.sqlite3.sql b/persistence/sql/migratest/testdata/20220210000001_testdata.sqlite3.sql index 4069c57a84c..3f5c9f456f7 100644 --- a/persistence/sql/migratest/testdata/20220210000001_testdata.sqlite3.sql +++ b/persistence/sql/migratest/testdata/20220210000001_testdata.sqlite3.sql @@ -1,145 +1,144 @@ -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0001', (SELECT id FROM networks LIMIT 1), 'Client 0001', 'secret-0001', 'http://redirect/0001_1', 'grant-0001_1', 'response-0001_1', 'scope-0001', 'owner-0001', 'http://policy/0001', 'http://tos/0001', 'http://client/0001', 'http://logo/0001', 'contact-0001_1', 0, '', '', '', '', 'none', '', '', '', '', 1, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', '4db0bba3-ff30-4e6d-a7eb-a08a9b0d4d0f', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0002', (SELECT id FROM networks LIMIT 1), 'Client 0002', 'secret-0002', 'http://redirect/0002_1', 'grant-0002_1', 'response-0002_1', 'scope-0002', 'owner-0002', 'http://policy/0002', 'http://tos/0002', 'http://client/0002', 'http://logo/0002', 'contact-0002_1', 0, '', '', '', '', 'none', '', '', '', '', 2, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', 'f24d3183-8b4f-40fa-a1a0-c5dd2aa9fabb', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0003', (SELECT id FROM networks LIMIT 1), 'Client 0003', 'secret-0003', 'http://redirect/0003_1', 'grant-0003_1', 'response-0003_1', 'scope-0003', 'owner-0003', 'http://policy/0003', 'http://tos/0003', 'http://client/0003', 'http://logo/0003', 'contact-0003_1', 0, '', '', '', '', 'none', 'r_alg-0003', 'u_alg-0003', '', '', 3, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', '15dc61a0-cff0-452f-aacf-860bc79dbfe1', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0004', (SELECT id FROM networks LIMIT 1), 'Client 0004', 'secret-0004', 'http://redirect/0004_1', 'grant-0004_1', 'response-0004_1', 'scope-0004', 'owner-0004', 'http://policy/0004', 'http://tos/0004', 'http://client/0004', 'http://logo/0004', 'contact-0004_1', 0, 'http://sector_id/0004', '', 'http://jwks/0004', 'http://request/0004_1', 'none', 'r_alg-0004', 'u_alg-0004', '', '', 4, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', '277dd36c-1d88-4883-a9bb-c332e7dd390f', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0005', (SELECT id FROM networks LIMIT 1), 'Client 0005', 'secret-0005', 'http://redirect/0005_1', 'grant-0005_1', 'response-0005_1', 'scope-0005', 'owner-0005', 'http://policy/0005', 'http://tos/0005', 'http://client/0005', 'http://logo/0005', 'contact-0005_1', 0, 'http://sector_id/0005', '', 'http://jwks/0005', 'http://request/0005_1', 'token_auth-0005', 'r_alg-0005', 'u_alg-0005', '', '', 5, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', '604510c7-390d-4751-95bf-3be3a3bfb065', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0006', (SELECT id FROM networks LIMIT 1), 'Client 0006', 'secret-0006', 'http://redirect/0006_1', 'grant-0006_1', 'response-0006_1', 'scope-0006', 'owner-0006', 'http://policy/0006', 'http://tos/0006', 'http://client/0006', 'http://logo/0006', 'contact-0006_1', 0, 'http://sector_id/0006', '', 'http://jwks/0006', 'http://request/0006_1', 'token_auth-0006', 'r_alg-0006', 'u_alg-0006', 'subject-0006', '', 6, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', '44de31b8-38cd-4873-93c9-a0e532427f0a', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0007', (SELECT id FROM networks LIMIT 1), 'Client 0007', 'secret-0007', 'http://redirect/0007_1', 'grant-0007_1', 'response-0007_1', 'scope-0007', 'owner-0007', 'http://policy/0007', 'http://tos/0007', 'http://client/0007', 'http://logo/0007', 'contact-0007_1', 0, 'http://sector_id/0007', '', 'http://jwks/0007', 'http://request/0007_1', 'token_auth-0007', 'r_alg-0007', 'u_alg-0007', 'subject-0007', '', 7, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', '704488ca-c654-4e39-9f32-702a48f7ad40', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0008', (SELECT id FROM networks LIMIT 1), 'Client 0008', 'secret-0008', 'http://redirect/0008_1', 'grant-0008_1', 'response-0008_1', 'scope-0008', 'owner-0008', 'http://policy/0008', 'http://tos/0008', 'http://client/0008', 'http://logo/0008', 'contact-0008_1', 0, 'http://sector_id/0008', '', 'http://jwks/0008', 'http://request/0008_1', 'token_auth-0008', 'r_alg-0008', 'u_alg-0008', 'subject-0008', 'http://cors/0008_1', 8, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', 'e64adc34-cdc7-4aeb-a1a0-d387af2d23e7', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0009', (SELECT id FROM networks LIMIT 1), 'Client 0009', 'secret-0009', 'http://redirect/0009_1', 'grant-0009_1', 'response-0009_1', 'scope-0009', 'owner-0009', 'http://policy/0009', 'http://tos/0009', 'http://client/0009', 'http://logo/0009', 'contact-0009_1', 0, 'http://sector_id/0009', '', 'http://jwks/0009', 'http://request/0009_1', 'token_auth-0009', 'r_alg-0009', 'u_alg-0009', 'subject-0009', 'http://cors/0009_1', 9, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', '99c1528a-4ac4-46e2-a6e4-e09d0306fd7b', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0010', (SELECT id FROM networks LIMIT 1), 'Client 0010', 'secret-0010', 'http://redirect/0010_1', 'grant-0010_1', 'response-0010_1', 'scope-0010', 'owner-0010', 'http://policy/0010', 'http://tos/0010', 'http://client/0010', 'http://logo/0010', 'contact-0010_1', 0, 'http://sector_id/0010', '', 'http://jwks/0010', 'http://request/0010_1', 'token_auth-0010', 'r_alg-0010', 'u_alg-0010', 'subject-0010', 'http://cors/0010_1', 10, '', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', 'ebda0ac9-0afd-4532-aec6-72c7bbd9e0dc', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0011', (SELECT id FROM networks LIMIT 1), 'Client 0011', 'secret-0011', 'http://redirect/0011_1', 'grant-0011_1', 'response-0011_1', 'scope-0011', 'owner-0011', 'http://policy/0011', 'http://tos/0011', 'http://client/0011', 'http://logo/0011', 'contact-0011_1', 0, 'http://sector_id/0011', '', 'http://jwks/0011', 'http://request/0011_1', 'token_auth-0011', 'r_alg-0011', 'u_alg-0011', 'subject-0011', 'http://cors/0011_1', 11, 'autdience-0011_1', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', '93311fb4-f5a6-4510-95dc-06cd0cfaa03b', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0012', (SELECT id FROM networks LIMIT 1), 'Client 0012', 'secret-0012', 'http://redirect/0012_1', 'grant-0012_1', 'response-0012_1', 'scope-0012', 'owner-0012', 'http://policy/0012', 'http://tos/0012', 'http://client/0012', 'http://logo/0012', 'contact-0012_1', 0, 'http://sector_id/0012', '', 'http://jwks/0012', 'http://request/0012_1', 'token_auth-0012', 'r_alg-0012', 'u_alg-0012', 'subject-0012', 'http://cors/0012_1', 12, 'autdience-0012_1', '2022-02-15 22:20:20.911379', '2022-02-15 22:20:20.911379', '', false, '', '', false, '{}', '', 'e7206da5-155b-4194-9536-caa54442a470', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0013', (SELECT id FROM networks LIMIT 1), 'Client 0013', 'secret-0013', 'http://redirect/0013_1', 'grant-0013_1', 'response-0013_1', 'scope-0013', 'owner-0013', 'http://policy/0013', 'http://tos/0013', 'http://client/0013', 'http://logo/0013', 'contact-0013_1', 0, 'http://sector_id/0013', '', 'http://jwks/0013', 'http://request/0013_1', 'token_auth-0013', 'r_alg-0013', 'u_alg-0013', 'subject-0013', 'http://cors/0013_1', 13, 'autdience-0013_1', '2022-02-15 22:20:20.969385', '2022-02-15 22:20:20.969385', 'http://front_logout/0013', true, 'http://post_redirect/0013_1', 'http://back_logout/0013', true, '{}', '', '0f309b7d-61d5-495f-bdfa-4836edcbb26d', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0014', (SELECT id FROM networks LIMIT 1), 'Client 0014', 'secret-0014', 'http://redirect/0014_1', 'grant-0014_1', 'response-0014_1', 'scope-0014', 'owner-0014', 'http://policy/0014', 'http://tos/0014', 'http://client/0014', 'http://logo/0014', 'contact-0014_1', 0, 'http://sector_id/0014', '', 'http://jwks/0014', 'http://request/0014_1', 'token_auth-0014', 'r_alg-0014', 'u_alg-0014', 'subject-0014', 'http://cors/0014_1', 14, 'autdience-0014_1', '2022-02-15 22:20:21.13937', '2022-02-15 22:20:21.13937', 'http://front_logout/0014', true, 'http://post_redirect/0014_1', 'http://back_logout/0014', true, '{"migration": "0014"}', '', '928e8098-c3b5-499b-af29-938bb55462e9', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-2005', (SELECT id FROM networks LIMIT 1), 'Client 2005', 'secret-2005', 'http://redirect/2005_1', 'grant-2005_1', 'response-2005_1', 'scope-2005', 'owner-2005', 'http://policy/2005', 'http://tos/2005', 'http://client/2005', 'http://logo/2005', 'contact-2005_1', 0, 'http://sector_id/2005', '', 'http://jwks/2005', 'http://request/2005_1', 'token_auth-2005', 'r_alg-2005', 'u_alg-2005', 'subject-2005', 'http://cors/2005_1', 2005, 'autdience-2005_1', '2022-02-15 22:20:22.882985', '2022-02-15 22:20:22.882985', 'http://front_logout/2005', true, 'http://post_redirect/2005_1', 'http://back_logout/2005', true, '{"migration": "2005"}', '', 'afaabbc2-def3-4663-a6e0-6eb08efd6904', ''); -INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-20', (SELECT id FROM networks LIMIT 1), 'Client 20', 'secret-20', 'http://redirect/20_1', 'grant-20_1', 'response-20_1', 'scope-20', 'owner-20', 'http://policy/20', 'http://tos/20', 'http://client/20', 'http://logo/20', 'contact-20_1', 0, 'http://sector_id/20', '', 'http://jwks/20', 'http://request/20_1', 'token_auth-20', 'r_alg-20', 'u_alg-20', 'subject-20', 'http://cors/20_1', 0, 'autdience-20_1', '2022-02-15 22:20:23.004598', '2022-02-15 22:20:23.004598', 'http://front_logout/20', true, 'http://post_redirect/20_1', 'http://back_logout/20', true, '{"migration": "20"}', '', '08f4a4b7-6601-4fd7-bb7f-29ec0681b86d', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0001', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0001', 'secret-0001', 'http://redirect/0001_1', 'grant-0001_1', 'response-0001_1', 'scope-0001', 'owner-0001', 'http://policy/0001', 'http://tos/0001', 'http://client/0001', 'http://logo/0001', 'contact-0001_1', 0, '', '', '', '', 'none', '', '', '', '', 1, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', '4db0bba3-ff30-4e6d-a7eb-a08a9b0d4d0f', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0002', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0002', 'secret-0002', 'http://redirect/0002_1', 'grant-0002_1', 'response-0002_1', 'scope-0002', 'owner-0002', 'http://policy/0002', 'http://tos/0002', 'http://client/0002', 'http://logo/0002', 'contact-0002_1', 0, '', '', '', '', 'none', '', '', '', '', 2, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', 'f24d3183-8b4f-40fa-a1a0-c5dd2aa9fabb', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0003', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0003', 'secret-0003', 'http://redirect/0003_1', 'grant-0003_1', 'response-0003_1', 'scope-0003', 'owner-0003', 'http://policy/0003', 'http://tos/0003', 'http://client/0003', 'http://logo/0003', 'contact-0003_1', 0, '', '', '', '', 'none', 'r_alg-0003', 'u_alg-0003', '', '', 3, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', '15dc61a0-cff0-452f-aacf-860bc79dbfe1', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0004', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0004', 'secret-0004', 'http://redirect/0004_1', 'grant-0004_1', 'response-0004_1', 'scope-0004', 'owner-0004', 'http://policy/0004', 'http://tos/0004', 'http://client/0004', 'http://logo/0004', 'contact-0004_1', 0, 'http://sector_id/0004', '', 'http://jwks/0004', 'http://request/0004_1', 'none', 'r_alg-0004', 'u_alg-0004', '', '', 4, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', '277dd36c-1d88-4883-a9bb-c332e7dd390f', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0005', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0005', 'secret-0005', 'http://redirect/0005_1', 'grant-0005_1', 'response-0005_1', 'scope-0005', 'owner-0005', 'http://policy/0005', 'http://tos/0005', 'http://client/0005', 'http://logo/0005', 'contact-0005_1', 0, 'http://sector_id/0005', '', 'http://jwks/0005', 'http://request/0005_1', 'token_auth-0005', 'r_alg-0005', 'u_alg-0005', '', '', 5, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', '604510c7-390d-4751-95bf-3be3a3bfb065', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0006', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0006', 'secret-0006', 'http://redirect/0006_1', 'grant-0006_1', 'response-0006_1', 'scope-0006', 'owner-0006', 'http://policy/0006', 'http://tos/0006', 'http://client/0006', 'http://logo/0006', 'contact-0006_1', 0, 'http://sector_id/0006', '', 'http://jwks/0006', 'http://request/0006_1', 'token_auth-0006', 'r_alg-0006', 'u_alg-0006', 'subject-0006', '', 6, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', '44de31b8-38cd-4873-93c9-a0e532427f0a', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0007', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0007', 'secret-0007', 'http://redirect/0007_1', 'grant-0007_1', 'response-0007_1', 'scope-0007', 'owner-0007', 'http://policy/0007', 'http://tos/0007', 'http://client/0007', 'http://logo/0007', 'contact-0007_1', 0, 'http://sector_id/0007', '', 'http://jwks/0007', 'http://request/0007_1', 'token_auth-0007', 'r_alg-0007', 'u_alg-0007', 'subject-0007', '', 7, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', '704488ca-c654-4e39-9f32-702a48f7ad40', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0008', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0008', 'secret-0008', 'http://redirect/0008_1', 'grant-0008_1', 'response-0008_1', 'scope-0008', 'owner-0008', 'http://policy/0008', 'http://tos/0008', 'http://client/0008', 'http://logo/0008', 'contact-0008_1', 0, 'http://sector_id/0008', '', 'http://jwks/0008', 'http://request/0008_1', 'token_auth-0008', 'r_alg-0008', 'u_alg-0008', 'subject-0008', 'http://cors/0008_1', 8, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', 'e64adc34-cdc7-4aeb-a1a0-d387af2d23e7', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0009', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0009', 'secret-0009', 'http://redirect/0009_1', 'grant-0009_1', 'response-0009_1', 'scope-0009', 'owner-0009', 'http://policy/0009', 'http://tos/0009', 'http://client/0009', 'http://logo/0009', 'contact-0009_1', 0, 'http://sector_id/0009', '', 'http://jwks/0009', 'http://request/0009_1', 'token_auth-0009', 'r_alg-0009', 'u_alg-0009', 'subject-0009', 'http://cors/0009_1', 9, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', '99c1528a-4ac4-46e2-a6e4-e09d0306fd7b', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0010', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0010', 'secret-0010', 'http://redirect/0010_1', 'grant-0010_1', 'response-0010_1', 'scope-0010', 'owner-0010', 'http://policy/0010', 'http://tos/0010', 'http://client/0010', 'http://logo/0010', 'contact-0010_1', 0, 'http://sector_id/0010', '', 'http://jwks/0010', 'http://request/0010_1', 'token_auth-0010', 'r_alg-0010', 'u_alg-0010', 'subject-0010', 'http://cors/0010_1', 10, '', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', 'ebda0ac9-0afd-4532-aec6-72c7bbd9e0dc', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0011', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0011', 'secret-0011', 'http://redirect/0011_1', 'grant-0011_1', 'response-0011_1', 'scope-0011', 'owner-0011', 'http://policy/0011', 'http://tos/0011', 'http://client/0011', 'http://logo/0011', 'contact-0011_1', 0, 'http://sector_id/0011', '', 'http://jwks/0011', 'http://request/0011_1', 'token_auth-0011', 'r_alg-0011', 'u_alg-0011', 'subject-0011', 'http://cors/0011_1', 11, 'autdience-0011_1', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, '', false, '', '', false, '{}', '', '93311fb4-f5a6-4510-95dc-06cd0cfaa03b', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0012', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0012', 'secret-0012', 'http://redirect/0012_1', 'grant-0012_1', 'response-0012_1', 'scope-0012', 'owner-0012', 'http://policy/0012', 'http://tos/0012', 'http://client/0012', 'http://logo/0012', 'contact-0012_1', 0, 'http://sector_id/0012', '', 'http://jwks/0012', 'http://request/0012_1', 'token_auth-0012', 'r_alg-0012', 'u_alg-0012', 'subject-0012', 'http://cors/0012_1', 12, 'autdience-0012_1', '2022-02-15 22:20:20', '2022-02-15 22:20:20', '', false, '', '', false, '{}', '', 'e7206da5-155b-4194-9536-caa54442a470', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0013', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0013', 'secret-0013', 'http://redirect/0013_1', 'grant-0013_1', 'response-0013_1', 'scope-0013', 'owner-0013', 'http://policy/0013', 'http://tos/0013', 'http://client/0013', 'http://logo/0013', 'contact-0013_1', 0, 'http://sector_id/0013', '', 'http://jwks/0013', 'http://request/0013_1', 'token_auth-0013', 'r_alg-0013', 'u_alg-0013', 'subject-0013', 'http://cors/0013_1', 13, 'autdience-0013_1', '2022-02-15 22:20:20', '2022-02-15 22:20:20', 'http://front_logout/0013', true, 'http://post_redirect/0013_1', 'http://back_logout/0013', true, '{}', '', '0f309b7d-61d5-495f-bdfa-4836edcbb26d', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 0014', 'secret-0014', 'http://redirect/0014_1', 'grant-0014_1', 'response-0014_1', 'scope-0014', 'owner-0014', 'http://policy/0014', 'http://tos/0014', 'http://client/0014', 'http://logo/0014', 'contact-0014_1', 0, 'http://sector_id/0014', '', 'http://jwks/0014', 'http://request/0014_1', 'token_auth-0014', 'r_alg-0014', 'u_alg-0014', 'subject-0014', 'http://cors/0014_1', 14, 'autdience-0014_1', '2022-02-15 22:20:21', '2022-02-15 22:20:21', 'http://front_logout/0014', true, 'http://post_redirect/0014_1', 'http://back_logout/0014', true, '{"migration": "0014"}', '', '928e8098-c3b5-499b-af29-938bb55462e9', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-2005', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 2005', 'secret-2005', 'http://redirect/2005_1', 'grant-2005_1', 'response-2005_1', 'scope-2005', 'owner-2005', 'http://policy/2005', 'http://tos/2005', 'http://client/2005', 'http://logo/2005', 'contact-2005_1', 0, 'http://sector_id/2005', '', 'http://jwks/2005', 'http://request/2005_1', 'token_auth-2005', 'r_alg-2005', 'u_alg-2005', 'subject-2005', 'http://cors/2005_1', 2005, 'autdience-2005_1', '2022-02-15 22:20:22', '2022-02-15 22:20:22', 'http://front_logout/2005', true, 'http://post_redirect/2005_1', 'http://back_logout/2005', true, '{"migration": "2005"}', '', 'afaabbc2-def3-4663-a6e0-6eb08efd6904', ''); +INSERT INTO hydra_client (id, nid, client_name, client_secret, redirect_uris, grant_types, response_types, scope, owner, policy_uri, tos_uri, client_uri, logo_uri, contacts, client_secret_expires_at, sector_identifier_uri, jwks, jwks_uri, request_uris, token_endpoint_auth_method, request_object_signing_alg, userinfo_signed_response_alg, subject_type, allowed_cors_origins, pk_deprecated, audience, created_at, updated_at, frontchannel_logout_uri, frontchannel_logout_session_required, post_logout_redirect_uris, backchannel_logout_uri, backchannel_logout_session_required, metadata, token_endpoint_auth_signing_alg, pk, registration_access_token_signature) VALUES ('client-20', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 20', 'secret-20', 'http://redirect/20_1', 'grant-20_1', 'response-20_1', 'scope-20', 'owner-20', 'http://policy/20', 'http://tos/20', 'http://client/20', 'http://logo/20', 'contact-20_1', 0, 'http://sector_id/20', '', 'http://jwks/20', 'http://request/20_1', 'token_auth-20', 'r_alg-20', 'u_alg-20', 'subject-20', 'http://cors/20_1', 0, 'autdience-20_1', '2022-02-15 22:20:23', '2022-02-15 22:20:23', 'http://front_logout/20', true, 'http://post_redirect/20_1', 'http://back_logout/20', true, '{"migration": "20"}', '', '08f4a4b7-6601-4fd7-bb7f-29ec0681b86d', ''); -INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0001', 'kid-0001', (SELECT id FROM networks LIMIT 1), 1, 'key-0001', '2022-02-15 22:20:21.166613', 1, '593ff81f-a21e-4d29-b276-bdce7b5a33df'); -INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0002', 'kid-0002', (SELECT id FROM networks LIMIT 1), 2, 'key-0002', '2022-02-15 22:20:21.166613', 2, '609b0140-8d03-4d1b-aef1-91b86da5e6be'); -INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0003', 'kid-0003', (SELECT id FROM networks LIMIT 1), 3, 'key-0003', '2022-02-15 22:20:21.195915', 3, 'edd5c644-c3fe-43ce-9e80-9acd1f310ecc'); -INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0004', 'kid-0004', (SELECT id FROM networks LIMIT 1), 4, 'key-0004', '2022-02-15 22:20:21.202612', 4, 'a6e7382c-6e30-4553-a67f-4d9bf32118b4'); -INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0005', 'kid-0005', (SELECT id FROM networks LIMIT 1), 4, 'key-0005', '2022-02-15 22:20:23.042296', 4, '94075738-c3e7-41bc-a7a5-89aece609c39'); -INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0008', 'kid-0008', (SELECT id FROM networks LIMIT 1), 2, 'key-0002', '2022-02-15 22:20:23.197118', 5, 'e18d8447-3ec2-42d9-a3ad-e7cca8aa81f0'); +INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0001', 'kid-0001', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 1, 'key-0001', CURRENT_TIMESTAMP, 1, '593ff81f-a21e-4d29-b276-bdce7b5a33df'); +INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0002', 'kid-0002', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 2, 'key-0002', '2022-02-15 22:20:21', 2, '609b0140-8d03-4d1b-aef1-91b86da5e6be'); +INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0003', 'kid-0003', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 3, 'key-0003', '2022-02-15 22:20:21', 3, 'edd5c644-c3fe-43ce-9e80-9acd1f310ecc'); +INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0004', 'kid-0004', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 4, 'key-0004', '2022-02-15 22:20:21', 4, 'a6e7382c-6e30-4553-a67f-4d9bf32118b4'); +INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0005', 'kid-0005', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 4, 'key-0005', '2022-02-15 22:20:23', 5, '94075738-c3e7-41bc-a7a5-89aece609c39'); +INSERT INTO hydra_jwk (sid, kid, nid, version, keydata, created_at, pk_deprecated, pk) VALUES ('sid-0008', 'kid-0008', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 2, 'key-0002', '2022-02-15 22:20:23', 6, 'e18d8447-3ec2-42d9-a3ad-e7cca8aa81f0'); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0003', (SELECT id FROM networks LIMIT 1), 'requested_scope-0003_1', 'verifier-0003', 'csrf-0003', 'subject-0003', 'http://request/0003', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0003"}', 'auth_session-0003', '', CURRENT_TIMESTAMP, 128, true, 3, '{}', 'acr-0003', CURRENT_TIMESTAMP, true, 'force_subject_id-0003', '{}', '', 'challenge-0003', 'verifier-0003', true, 'csrf-0003', 'granted_scope-0003_1', true, 3, '{}', '{"session_access_token-0003": "0003"}', '{"session_id_token-0003": "0003"}', true, '', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0004', (SELECT id FROM networks LIMIT 1), 'requested_scope-0004_1', 'verifier-0004', 'csrf-0004', 'subject-0004', 'http://request/0004', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0004"}', 'auth_session-0004', 'requested_audience-0004_1', CURRENT_TIMESTAMP, 128, true, 4, '{}', 'acr-0004', CURRENT_TIMESTAMP, true, 'force_subject_id-0004', '{}', '', 'challenge-0004', 'verifier-0004', true, 'csrf-0004', 'granted_scope-0004_1', true, 4, '{}', '{"session_access_token-0004": "0004"}', '{"session_id_token-0004": "0004"}', true, 'granted_audience-0004_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0005', (SELECT id FROM networks LIMIT 1), 'requested_scope-0005_1', 'verifier-0005', 'csrf-0005', 'subject-0005', 'http://request/0005', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0005"}', 'auth_session-0005', 'requested_audience-0005_1', CURRENT_TIMESTAMP, 128, true, 5, '{}', 'acr-0005', CURRENT_TIMESTAMP, true, 'force_subject_id-0005', '{}', '', 'challenge-0005', 'verifier-0005', true, 'csrf-0005', 'granted_scope-0005_1', true, 5, '{}', '{"session_access_token-0005": "0005"}', '{"session_id_token-0005": "0005"}', true, 'granted_audience-0005_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0006', (SELECT id FROM networks LIMIT 1), 'requested_scope-0006_1', 'verifier-0006', 'csrf-0006', 'subject-0006', 'http://request/0006', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0006"}', 'auth_session-0006', 'requested_audience-0006_1', CURRENT_TIMESTAMP, 128, true, 6, '{}', 'acr-0006', CURRENT_TIMESTAMP, true, 'force_subject_id-0006', '{}', '', 'challenge-0006', 'verifier-0006', true, 'csrf-0006', 'granted_scope-0006_1', true, 6, '{}', '{"session_access_token-0006": "0006"}', '{"session_id_token-0006": "0006"}', true, 'granted_audience-0006_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0001', (SELECT id FROM networks LIMIT 1), 'requested_scope-0001_1', 'verifier-0001', 'csrf-0001', 'subject-0001', 'http://request/0001', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0001"}', NULL, '', CURRENT_TIMESTAMP, 128, true, 1, '{}', 'acr-0001', CURRENT_TIMESTAMP, true, '', '{}', '', 'challenge-0001', 'verifier-0001', true, 'csrf-0001', 'granted_scope-0001_1', true, 1, '{}', '{"session_access_token-0001": "0001"}', '{"session_id_token-0001": "0001"}', true, '', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0002', (SELECT id FROM networks LIMIT 1), 'requested_scope-0002_1', 'verifier-0002', 'csrf-0002', 'subject-0002', 'http://request/0002', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0002"}', NULL, '', CURRENT_TIMESTAMP, 128, true, 2, '{}', 'acr-0002', CURRENT_TIMESTAMP, true, 'force_subject_id-0002', '{}', '', 'challenge-0002', 'verifier-0002', true, 'csrf-0002', 'granted_scope-0002_1', true, 2, '{}', '{"session_access_token-0002": "0002"}', '{"session_id_token-0002": "0002"}', true, '', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0007', (SELECT id FROM networks LIMIT 1), 'requested_scope-0007_1', 'verifier-0007', 'csrf-0007', 'subject-0007', 'http://request/0007', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0007"}', 'auth_session-0007', 'requested_audience-0007_1', CURRENT_TIMESTAMP, 128, true, 7, '{}', 'acr-0007', CURRENT_TIMESTAMP, true, 'force_subject_id-0007', '{}', '', 'challenge-0007', 'verifier-0007', true, 'csrf-0007', 'granted_scope-0007_1', true, 7, '{}', '{"session_access_token-0007": "0007"}', '{"session_id_token-0007": "0007"}', true, 'granted_audience-0007_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0008', (SELECT id FROM networks LIMIT 1), 'requested_scope-0008_1', 'verifier-0008', 'csrf-0008', 'subject-0008', 'http://request/0008', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0008"}', 'auth_session-0008', 'requested_audience-0008_1', CURRENT_TIMESTAMP, 128, true, 8, '{}', 'acr-0008', CURRENT_TIMESTAMP, true, 'force_subject_id-0008', '{"context": "0008"}', '', 'challenge-0008', 'verifier-0008', true, 'csrf-0008', 'granted_scope-0008_1', true, 8, '{}', '{"session_access_token-0008": "0008"}', '{"session_id_token-0008": "0008"}', true, 'granted_audience-0008_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0009', (SELECT id FROM networks LIMIT 1), 'requested_scope-0009_1', 'verifier-0009', 'csrf-0009', 'subject-0009', 'http://request/0009', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0009"}', 'auth_session-0009', 'requested_audience-0009_1', CURRENT_TIMESTAMP, 128, true, 9, '{}', 'acr-0009', CURRENT_TIMESTAMP, true, 'force_subject_id-0009', '{"context": "0009"}', '', 'challenge-0009', 'verifier-0009', true, 'csrf-0009', 'granted_scope-0009_1', true, 9, '{}', '{"session_access_token-0009": "0009"}', '{"session_id_token-0009": "0009"}', true, 'granted_audience-0009_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0010', (SELECT id FROM networks LIMIT 1), 'requested_scope-0010_1', 'verifier-0010', 'csrf-0010', 'subject-0010', 'http://request/0010', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0010"}', 'auth_session-0010', 'requested_audience-0010_1', CURRENT_TIMESTAMP, 128, true, 10, '{}', 'acr-0010', CURRENT_TIMESTAMP, true, 'force_subject_id-0010', '{"context": "0010"}', '', 'challenge-0010', 'verifier-0010', true, 'csrf-0010', 'granted_scope-0010_1', true, 10, '{}', '{"session_access_token-0010": "0010"}', '{"session_id_token-0010": "0010"}', true, 'granted_audience-0010_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0011', (SELECT id FROM networks LIMIT 1), 'requested_scope-0011_1', 'verifier-0011', 'csrf-0011', 'subject-0011', 'http://request/0011', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0011"}', 'auth_session-0011', 'requested_audience-0011_1', CURRENT_TIMESTAMP, 128, true, 11, '{}', 'acr-0011', CURRENT_TIMESTAMP, true, 'force_subject_id-0011', '{"context": "0011"}', '', 'challenge-0011', 'verifier-0011', true, 'csrf-0011', 'granted_scope-0011_1', true, 11, '{}', '{"session_access_token-0011": "0011"}', '{"session_id_token-0011": "0011"}', true, 'granted_audience-0011_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0012', (SELECT id FROM networks LIMIT 1), 'requested_scope-0012_1', 'verifier-0012', 'csrf-0012', 'subject-0012', 'http://request/0012', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0012"}', 'auth_session-0012', 'requested_audience-0012_1', CURRENT_TIMESTAMP, 128, true, 12, '{}', 'acr-0012', CURRENT_TIMESTAMP, true, 'force_subject_id-0012', '{"context": "0012"}', '', 'challenge-0012', 'verifier-0012', true, 'csrf-0012', 'granted_scope-0012_1', true, 12, '{}', '{"session_access_token-0012": "0012"}', '{"session_id_token-0012": "0012"}', true, 'granted_audience-0012_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0013', (SELECT id FROM networks LIMIT 1), 'requested_scope-0013_1', 'verifier-0013', 'csrf-0013', 'subject-0013', 'http://request/0013', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0013"}', 'auth_session-0013', 'requested_audience-0013_1', CURRENT_TIMESTAMP, 128, true, 13, '{}', 'acr-0013', CURRENT_TIMESTAMP, true, 'force_subject_id-0013', '{"context": "0013"}', '', 'challenge-0013', 'verifier-0013', true, 'csrf-0013', 'granted_scope-0013_1', true, 13, '{}', '{"session_access_token-0013": "0013"}', '{"session_id_token-0013": "0013"}', true, 'granted_audience-0013_1', NULL); -INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0014', (SELECT id FROM networks LIMIT 1), 'requested_scope-0014_1', 'verifier-0014', 'csrf-0014', 'subject-0014', 'http://request/0014', true, 'client-0014', CURRENT_TIMESTAMP, '{"display": "display-0014"}', 'auth_session-0014', 'requested_audience-0014_1', CURRENT_TIMESTAMP, 128, true, 14, '{}', 'acr-0014', CURRENT_TIMESTAMP, true, 'force_subject_id-0014', '{"context": "0014"}', '', 'challenge-0014', 'verifier-0014', true, 'csrf-0014', 'granted_scope-0014_1', true, 14, '{}', '{"session_access_token-0014": "0014"}', '{"session_id_token-0014": "0014"}', true, 'granted_audience-0014_1', CURRENT_TIMESTAMP); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0001', '2022-02-15 22:20:21', 'subject-0001', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0002', '2022-02-15 22:20:21', 'subject-0002', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0003', '2022-02-15 22:20:21', 'subject-0003', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0004', '2022-02-15 22:20:21', 'subject-0004', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0005', '2022-02-15 22:20:21', 'subject-0005', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0006', '2022-02-15 22:20:21', 'subject-0006', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0007', '2022-02-15 22:20:21', 'subject-0007', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0008', '2022-02-15 22:20:21', 'subject-0008', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0009', '2022-02-15 22:20:21', 'subject-0009', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0010', '2022-02-15 22:20:21', 'subject-0010', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', true); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0011', '2022-02-15 22:20:21', 'subject-0011', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', false); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0012', '2022-02-15 22:20:21', 'subject-0012', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', false); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0013', '2022-02-15 22:20:21', 'subject-0013', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', false); +INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0014', '2022-02-15 22:20:21', 'subject-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', false); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0001', CURRENT_TIMESTAMP, 'subject-0001', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0002', CURRENT_TIMESTAMP, 'subject-0002', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0003', CURRENT_TIMESTAMP, 'subject-0003', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0004', CURRENT_TIMESTAMP, 'subject-0004', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0005', CURRENT_TIMESTAMP, 'subject-0005', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0006', CURRENT_TIMESTAMP, 'subject-0006', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0007', CURRENT_TIMESTAMP, 'subject-0007', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0008', CURRENT_TIMESTAMP, 'subject-0008', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0009', CURRENT_TIMESTAMP, 'subject-0009', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0010', CURRENT_TIMESTAMP, 'subject-0010', (SELECT id FROM networks LIMIT 1), true); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0011', CURRENT_TIMESTAMP, 'subject-0011', (SELECT id FROM networks LIMIT 1), false); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0012', CURRENT_TIMESTAMP, 'subject-0012', (SELECT id FROM networks LIMIT 1), false); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0013', CURRENT_TIMESTAMP, 'subject-0013', (SELECT id FROM networks LIMIT 1), false); -INSERT INTO hydra_oauth2_authentication_session (id, authenticated_at, subject, nid, remember) VALUES ('auth_session-0014', CURRENT_TIMESTAMP, 'subject-0014', (SELECT id FROM networks LIMIT 1), false); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0003', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0003_1', 'verifier-0003', 'csrf-0003', 'subject-0003', 'http://request/0003', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0003"}', 'auth_session-0003', '', '2022-02-15 22:20:21', 128, true, 3, '{}', 'acr-0003', '2022-02-15 22:20:21', true, 'force_subject_id-0003', '{}', '', 'challenge-0003', 'verifier-0003', true, 'csrf-0003', 'granted_scope-0003_1', true, 3, '{}', '{"session_access_token-0003": "0003"}', '{"session_id_token-0003": "0003"}', true, '', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0004', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0004_1', 'verifier-0004', 'csrf-0004', 'subject-0004', 'http://request/0004', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0004"}', 'auth_session-0004', 'requested_audience-0004_1', '2022-02-15 22:20:21', 128, true, 4, '{}', 'acr-0004', '2022-02-15 22:20:21', true, 'force_subject_id-0004', '{}', '', 'challenge-0004', 'verifier-0004', true, 'csrf-0004', 'granted_scope-0004_1', true, 4, '{}', '{"session_access_token-0004": "0004"}', '{"session_id_token-0004": "0004"}', true, 'granted_audience-0004_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0005', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0005_1', 'verifier-0005', 'csrf-0005', 'subject-0005', 'http://request/0005', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0005"}', 'auth_session-0005', 'requested_audience-0005_1', '2022-02-15 22:20:21', 128, true, 5, '{}', 'acr-0005', '2022-02-15 22:20:21', true, 'force_subject_id-0005', '{}', '', 'challenge-0005', 'verifier-0005', true, 'csrf-0005', 'granted_scope-0005_1', true, 5, '{}', '{"session_access_token-0005": "0005"}', '{"session_id_token-0005": "0005"}', true, 'granted_audience-0005_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0006', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0006_1', 'verifier-0006', 'csrf-0006', 'subject-0006', 'http://request/0006', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0006"}', 'auth_session-0006', 'requested_audience-0006_1', '2022-02-15 22:20:21', 128, true, 6, '{}', 'acr-0006', '2022-02-15 22:20:21', true, 'force_subject_id-0006', '{}', '', 'challenge-0006', 'verifier-0006', true, 'csrf-0006', 'granted_scope-0006_1', true, 6, '{}', '{"session_access_token-0006": "0006"}', '{"session_id_token-0006": "0006"}', true, 'granted_audience-0006_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0001', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0001_1', 'verifier-0001', 'csrf-0001', 'subject-0001', 'http://request/0001', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0001"}', NULL, '', '2022-02-15 22:20:21', 128, true, 1, '{}', 'acr-0001', '2022-02-15 22:20:21', true, '', '{}', '', 'challenge-0001', 'verifier-0001', true, 'csrf-0001', 'granted_scope-0001_1', true, 1, '{}', '{"session_access_token-0001": "0001"}', '{"session_id_token-0001": "0001"}', true, '', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0002', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0002_1', 'verifier-0002', 'csrf-0002', 'subject-0002', 'http://request/0002', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0002"}', NULL, '', '2022-02-15 22:20:21', 128, true, 2, '{}', 'acr-0002', '2022-02-15 22:20:21', true, 'force_subject_id-0002', '{}', '', 'challenge-0002', 'verifier-0002', true, 'csrf-0002', 'granted_scope-0002_1', true, 2, '{}', '{"session_access_token-0002": "0002"}', '{"session_id_token-0002": "0002"}', true, '', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0007', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0007_1', 'verifier-0007', 'csrf-0007', 'subject-0007', 'http://request/0007', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0007"}', 'auth_session-0007', 'requested_audience-0007_1', '2022-02-15 22:20:21', 128, true, 7, '{}', 'acr-0007', '2022-02-15 22:20:21', true, 'force_subject_id-0007', '{}', '', 'challenge-0007', 'verifier-0007', true, 'csrf-0007', 'granted_scope-0007_1', true, 7, '{}', '{"session_access_token-0007": "0007"}', '{"session_id_token-0007": "0007"}', true, 'granted_audience-0007_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0008', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0008_1', 'verifier-0008', 'csrf-0008', 'subject-0008', 'http://request/0008', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0008"}', 'auth_session-0008', 'requested_audience-0008_1', '2022-02-15 22:20:21', 128, true, 8, '{}', 'acr-0008', '2022-02-15 22:20:21', true, 'force_subject_id-0008', '{"context": "0008"}', '', 'challenge-0008', 'verifier-0008', true, 'csrf-0008', 'granted_scope-0008_1', true, 8, '{}', '{"session_access_token-0008": "0008"}', '{"session_id_token-0008": "0008"}', true, 'granted_audience-0008_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0009', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0009_1', 'verifier-0009', 'csrf-0009', 'subject-0009', 'http://request/0009', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0009"}', 'auth_session-0009', 'requested_audience-0009_1', '2022-02-15 22:20:21', 128, true, 9, '{}', 'acr-0009', '2022-02-15 22:20:21', true, 'force_subject_id-0009', '{"context": "0009"}', '', 'challenge-0009', 'verifier-0009', true, 'csrf-0009', 'granted_scope-0009_1', true, 9, '{}', '{"session_access_token-0009": "0009"}', '{"session_id_token-0009": "0009"}', true, 'granted_audience-0009_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0010', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0010_1', 'verifier-0010', 'csrf-0010', 'subject-0010', 'http://request/0010', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0010"}', 'auth_session-0010', 'requested_audience-0010_1', '2022-02-15 22:20:21', 128, true, 10, '{}', 'acr-0010', '2022-02-15 22:20:21', true, 'force_subject_id-0010', '{"context": "0010"}', '', 'challenge-0010', 'verifier-0010', true, 'csrf-0010', 'granted_scope-0010_1', true, 10, '{}', '{"session_access_token-0010": "0010"}', '{"session_id_token-0010": "0010"}', true, 'granted_audience-0010_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0011', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0011_1', 'verifier-0011', 'csrf-0011', 'subject-0011', 'http://request/0011', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0011"}', 'auth_session-0011', 'requested_audience-0011_1', '2022-02-15 22:20:21', 128, true, 11, '{}', 'acr-0011', '2022-02-15 22:20:21', true, 'force_subject_id-0011', '{"context": "0011"}', '', 'challenge-0011', 'verifier-0011', true, 'csrf-0011', 'granted_scope-0011_1', true, 11, '{}', '{"session_access_token-0011": "0011"}', '{"session_id_token-0011": "0011"}', true, 'granted_audience-0011_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0012', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0012_1', 'verifier-0012', 'csrf-0012', 'subject-0012', 'http://request/0012', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0012"}', 'auth_session-0012', 'requested_audience-0012_1', '2022-02-15 22:20:21', 128, true, 12, '{}', 'acr-0012', '2022-02-15 22:20:21', true, 'force_subject_id-0012', '{"context": "0012"}', '', 'challenge-0012', 'verifier-0012', true, 'csrf-0012', 'granted_scope-0012_1', true, 12, '{}', '{"session_access_token-0012": "0012"}', '{"session_id_token-0012": "0012"}', true, 'granted_audience-0012_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0013', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0013_1', 'verifier-0013', 'csrf-0013', 'subject-0013', 'http://request/0013', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0013"}', 'auth_session-0013', 'requested_audience-0013_1', '2022-02-15 22:20:21', 128, true, 13, '{}', 'acr-0013', '2022-02-15 22:20:21', true, 'force_subject_id-0013', '{"context": "0013"}', '', 'challenge-0013', 'verifier-0013', true, 'csrf-0013', 'granted_scope-0013_1', true, 13, '{}', '{"session_access_token-0013": "0013"}', '{"session_id_token-0013": "0013"}', true, 'granted_audience-0013_1', NULL); +INSERT INTO hydra_oauth2_flow (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_verifier, consent_skip, consent_csrf, granted_scope, consent_remember, consent_remember_for, consent_error, session_access_token, session_id_token, consent_was_used, granted_at_audience, consent_handled_at) VALUES ('challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'requested_scope-0014_1', 'verifier-0014', 'csrf-0014', 'subject-0014', 'http://request/0014', true, 'client-0014', '2022-02-15 22:20:21', '{"display": "display-0014"}', 'auth_session-0014', 'requested_audience-0014_1', '2022-02-15 22:20:21', 128, true, 14, '{}', 'acr-0014', '2022-02-15 22:20:21', true, 'force_subject_id-0014', '{"context": "0014"}', '', 'challenge-0014', 'verifier-0014', true, 'csrf-0014', 'granted_scope-0014_1', true, 14, '{}', '{"session_access_token-0014": "0014"}', '{"session_id_token-0014": "0014"}', true, 'granted_audience-0014_1', '2022-02-15 22:20:21'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0002', 'client-0014', 'subject_obfuscated-0002', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0003', 'client-0014', 'subject_obfuscated-0003', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0004', 'client-0014', 'subject_obfuscated-0004', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0005', 'client-0014', 'subject_obfuscated-0005', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0006', 'client-0014', 'subject_obfuscated-0006', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0007', 'client-0014', 'subject_obfuscated-0007', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0008', 'client-0014', 'subject_obfuscated-0008', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0009', 'client-0014', 'subject_obfuscated-0009', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0010', 'client-0014', 'subject_obfuscated-0010', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0011', 'client-0014', 'subject_obfuscated-0011', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0012', 'client-0014', 'subject_obfuscated-0012', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0013', 'client-0014', 'subject_obfuscated-0013', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0014', 'client-0014', 'subject_obfuscated-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0002', 'client-0014', 'subject_obfuscated-0002', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0003', 'client-0014', 'subject_obfuscated-0003', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0004', 'client-0014', 'subject_obfuscated-0004', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0005', 'client-0014', 'subject_obfuscated-0005', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0006', 'client-0014', 'subject_obfuscated-0006', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0007', 'client-0014', 'subject_obfuscated-0007', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0008', 'client-0014', 'subject_obfuscated-0008', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0009', 'client-0014', 'subject_obfuscated-0009', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0010', 'client-0014', 'subject_obfuscated-0010', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0011', 'client-0014', 'subject_obfuscated-0011', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0012', 'client-0014', 'subject_obfuscated-0012', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0013', 'client-0014', 'subject_obfuscated-0013', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_obfuscated_authentication_session (subject, client_id, subject_obfuscated, nid) VALUES ('subject-0014', 'client-0014', 'subject_obfuscated-0014', (SELECT id FROM networks LIMIT 1)); +INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0009', 'verifier-0009', 'subject-0009', 'session_id-0009', 'client-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'http://request/0009', 'http://post_logout/0009', true, true, false, true); +INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0010', 'verifier-0010', 'subject-0010', 'session_id-0010', 'client-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'http://request/0010', 'http://post_logout/0010', true, true, false, true); +INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0011', 'verifier-0011', 'subject-0011', 'session_id-0011', 'client-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'http://request/0011', 'http://post_logout/0011', true, true, false, true); +INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0012', 'verifier-0012', 'subject-0012', 'session_id-0012', 'client-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'http://request/0012', 'http://post_logout/0012', true, true, false, true); +INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0013', 'verifier-0013', 'subject-0013', 'session_id-0013', 'client-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'http://request/0013', 'http://post_logout/0013', true, true, false, true); +INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0014', 'verifier-0014', 'subject-0014', 'session_id-0014', 'client-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'http://request/0014', 'http://post_logout/0014', true, true, false, true); -INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0009', 'verifier-0009', 'subject-0009', 'session_id-0009', 'client-0014', (SELECT id FROM networks LIMIT 1), 'http://request/0009', 'http://post_logout/0009', true, true, false, true); -INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0010', 'verifier-0010', 'subject-0010', 'session_id-0010', 'client-0014', (SELECT id FROM networks LIMIT 1), 'http://request/0010', 'http://post_logout/0010', true, true, false, true); -INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0011', 'verifier-0011', 'subject-0011', 'session_id-0011', 'client-0014', (SELECT id FROM networks LIMIT 1), 'http://request/0011', 'http://post_logout/0011', true, true, false, true); -INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0012', 'verifier-0012', 'subject-0012', 'session_id-0012', 'client-0014', (SELECT id FROM networks LIMIT 1), 'http://request/0012', 'http://post_logout/0012', true, true, false, true); -INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0013', 'verifier-0013', 'subject-0013', 'session_id-0013', 'client-0014', (SELECT id FROM networks LIMIT 1), 'http://request/0013', 'http://post_logout/0013', true, true, false, true); -INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, was_used, accepted, rejected, rp_initiated) VALUES ('challenge-0014', 'verifier-0014', 'subject-0014', 'session_id-0014', 'client-0014', (SELECT id FROM networks LIMIT 1), 'http://request/0014', 'http://post_logout/0014', true, true, false, true); +INSERT INTO hydra_oauth2_jti_blacklist (signature, expires_at, nid) VALUES ('sig-0011', '2022-02-15 22:20:22', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); -INSERT INTO hydra_oauth2_jti_blacklist (signature, expires_at, nid) VALUES ('sig-0011', '2022-02-15 22:20:22.816314', (SELECT id FROM networks LIMIT 1)); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0001', 'req-0001', '2022-02-15 22:20:21', 'client-0014', 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001', '', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0002', 'req-0002', '2022-02-15 22:20:21', 'client-0014', 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0001', 'req-0001', '2022-02-15 22:20:21.825677', 'client-0014', 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001', '', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0002', 'req-0002', '2022-02-15 22:20:21.890642', 'client-0014', 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21.966429', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21.985991', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22.073402', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22.09772', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22.110219', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22.366046', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22.381518', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22.755881', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22.816314', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_access (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0001', 'req-0001', '2022-02-15 22:20:21', 'client-0014', 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001', '', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0002', 'req-0002', '2022-02-15 22:20:21', 'client-0014', 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0001', 'req-0001', '2022-02-15 22:20:21.825677', 'client-0014', 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001', '', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0002', 'req-0002', '2022-02-15 22:20:21.890642', 'client-0014', 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21.966429', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21.985991', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22.073402', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22.09772', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22.110219', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22.366046', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22.381518', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22.755881', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22.816314', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0001', 'req-0001', '2022-02-15 22:20:21', 'client-0014', 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001', '', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0002', 'req-0002', '2022-02-15 22:20:21', 'client-0014', 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0001', 'req-0001', '2022-02-15 22:20:21.825677', 'client-0014', 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001', '', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0002', 'req-0002', '2022-02-15 22:20:21.890642', 'client-0014', 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21.966429', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21.985991', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22.073402', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22.09772', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22.110219', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22.366046', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22.381518', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22.755881', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22.816314', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_code (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0001', 'req-0001', '2022-02-15 22:20:21', 'client-0014', 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001', '', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0002', 'req-0002', '2022-02-15 22:20:21', 'client-0014', 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0001', 'req-0001', '2022-02-15 22:20:21.825677', 'client-0014', 'scope-0001', 'granted_scope-0001', 'form_data-0001', 'session-0001', '', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0002', 'req-0002', '2022-02-15 22:20:21.890642', 'client-0014', 'scope-0002', 'granted_scope-0002', 'form_data-0002', 'session-0002', 'subject-0002', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21.966429', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21.985991', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22.073402', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22.09772', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22.110219', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22.366046', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22.381518', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22.755881', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22.816314', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_oidc (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); - -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21.966429', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21.985991', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22.073402', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22.09772', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22.110219', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22.366046', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22.381518', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22.755881', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22.816314', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); -INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22.907631', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', (SELECT id FROM networks LIMIT 1)); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0003', 'req-0003', '2022-02-15 22:20:21', 'client-0014', 'scope-0003', 'granted_scope-0003', 'form_data-0003', 'session-0003', 'subject-0003', true, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0004', 'req-0004', '2022-02-15 22:20:21', 'client-0014', 'scope-0004', 'granted_scope-0004', 'form_data-0004', 'session-0004', 'subject-0004', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0005', 'req-0005', '2022-02-15 22:20:22', 'client-0014', 'scope-0005', 'granted_scope-0005', 'form_data-0005', 'session-0005', 'subject-0005', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0006', 'req-0006', '2022-02-15 22:20:22', 'client-0014', 'scope-0006', 'granted_scope-0006', 'form_data-0006', 'session-0006', 'subject-0006', false, '', '', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0007', 'req-0007', '2022-02-15 22:20:22', 'client-0014', 'scope-0007', 'granted_scope-0007', 'form_data-0007', 'session-0007', 'subject-0007', false, 'requested_audience-0007', 'granted_audience-0007', NULL, '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0008', 'req-0008', '2022-02-15 22:20:22', 'client-0001', 'scope-0008', 'granted_scope-0008', 'form_data-0008', 'session-0008', 'subject-0008', false, 'requested_audience-0008', 'granted_audience-0008', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0009', 'req-0009', '2022-02-15 22:20:22', 'client-0001', 'scope-0009', 'granted_scope-0009', 'form_data-0009', 'session-0009', 'subject-0009', false, 'requested_audience-0009', 'granted_audience-0009', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0010', 'req-0010', '2022-02-15 22:20:22', 'client-0001', 'scope-0010', 'granted_scope-0010', 'form_data-0010', 'session-0010', 'subject-0010', false, 'requested_audience-0010', 'granted_audience-0010', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-0011', 'req-0011', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); +INSERT INTO hydra_oauth2_pkce (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, session_data, subject, active, requested_audience, granted_audience, challenge_id, nid) VALUES ('sig-20201110104000-01', 'req-20201110104000', '2022-02-15 22:20:22', 'client-0001', 'scope-0011', 'granted_scope-0011', 'form_data-0011', 'session-0011', 'subject-0011', false, 'requested_audience-0011', 'granted_audience-0011', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe'); diff --git a/persistence/sql/migratest/testdata/20220328111500_testdata.sql b/persistence/sql/migratest/testdata/20220328111500_testdata.sql index 8294364f55e..bdb3c3d717f 100644 --- a/persistence/sql/migratest/testdata/20220328111500_testdata.sql +++ b/persistence/sql/migratest/testdata/20220328111500_testdata.sql @@ -1,7 +1,7 @@ -INSERT INTO hydra_jwk (pk_deprecated, pk, sid, kid, nid, version, keydata, created_at) VALUES (9, '98565339-57c7-4bc0-bc3d-53171d60e832', 'sid-0009', 'kid-0009', (SELECT id FROM networks LIMIT 1), 2, 'key-0002', CURRENT_TIMESTAMP); +INSERT INTO hydra_jwk (pk_deprecated, pk, sid, kid, nid, version, keydata, created_at) VALUES (9, '98565339-57c7-4bc0-bc3d-53171d60e832', 'sid-0009', 'kid-0009', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 2, 'key-0002', '2022-02-15 22:20:21'); INSERT INTO hydra_oauth2_trusted_jwt_bearer_issuer (id, nid, issuer, subject, allow_any_subject, scope, key_set, key_id) -VALUES ('30e51720-4a88-48ca-8243-de7d8f461675', (SELECT id FROM networks LIMIT 1), 'some-issuer', 'some-subject', false, 'some-scope', 'sid-0009', 'kid-0009'); +VALUES ('30e51720-4a88-48ca-8243-de7d8f461675', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'some-issuer', 'some-subject', false, 'some-scope', 'sid-0009', 'kid-0009'); INSERT INTO hydra_oauth2_trusted_jwt_bearer_issuer (id, nid, issuer, subject, allow_any_subject, scope, key_set, key_id) -VALUES ('30e51720-4a88-48ca-8243-de7d8f461676', (SELECT id FROM networks LIMIT 1), 'some-issuer', '', true, 'some-scope', 'sid-0009', 'kid-0009'); +VALUES ('30e51720-4a88-48ca-8243-de7d8f461676', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'some-issuer', '', true, 'some-scope', 'sid-0009', 'kid-0009'); diff --git a/persistence/sql/migratest/testdata/20220513000001_testdata.sql b/persistence/sql/migratest/testdata/20220513000001_testdata.sql index a8b21367bed..1187dc56742 100644 --- a/persistence/sql/migratest/testdata/20220513000001_testdata.sql +++ b/persistence/sql/migratest/testdata/20220513000001_testdata.sql @@ -38,7 +38,7 @@ INSERT INTO hydra_client ( registration_access_token_signature) VALUES ( 'client-21', - (SELECT id FROM networks LIMIT 1), + '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 21', 'secret-21', '["http://redirect/21_1","http://redirect/21_2"]', @@ -63,8 +63,8 @@ VALUES ( '["http://cors/21_1","http://cors/21_2"]', 0, '["autdience-21_1","autdience-21_2"]', - '2022-02-15 22:20:23.004598', - '2022-02-15 22:20:23.004598', + '2022-02-15 22:20:23', + '2022-02-15 22:20:23', 'http://front_logout/21', true, '["http://post_redirect/21_1","http://post_redirect/21_2"]', @@ -84,8 +84,8 @@ INSERT INTO hydra_oauth2_authentication_session ( remember ) VALUES ( 'auth_session-0015', - (SELECT id FROM networks LIMIT 1), - CURRENT_TIMESTAMP, + '24704dcb-0ab9-4bfa-a84c-405932ae53fe', + '2022-02-15 22:20:21', 'subject-0015', false ); @@ -130,7 +130,7 @@ INSERT INTO hydra_oauth2_flow ( consent_handled_at ) VALUES ( 'challenge-0015', - (SELECT id FROM networks LIMIT 1), + '24704dcb-0ab9-4bfa-a84c-405932ae53fe', '["requested_scope-0015_1","requested_scope-0015_2"]', 'verifier-0015', 'csrf-0015', @@ -138,17 +138,17 @@ INSERT INTO hydra_oauth2_flow ( 'http://request/0015', true, 'client-21', - CURRENT_TIMESTAMP, + '2022-02-15 22:20:21', '{"display": "display-0015"}', 'auth_session-0015', '["requested_audience-0015_1","requested_audience-0015_2"]', - CURRENT_TIMESTAMP, + '2022-02-15 22:20:21', 128, true, 15, '{}', 'acr-0015', - CURRENT_TIMESTAMP, + '2022-02-15 22:20:21', true, 'force_subject_id-0015', '{"context": "0015"}', @@ -165,5 +165,5 @@ INSERT INTO hydra_oauth2_flow ( '{"session_id_token-0015": "0015"}', true, '["granted_audience-0015_1","granted_audience-0015_2"]', - CURRENT_TIMESTAMP + '2022-02-15 22:20:21' ); diff --git a/persistence/sql/migratest/testdata/20230313112801_testdata.sql b/persistence/sql/migratest/testdata/20230313112801_testdata.sql new file mode 100644 index 00000000000..771b500465d --- /dev/null +++ b/persistence/sql/migratest/testdata/20230313112801_testdata.sql @@ -0,0 +1,93 @@ +INSERT INTO hydra_oauth2_authentication_session ( + id, + nid, + authenticated_at, + subject, + remember +) VALUES ( + 'auth_session-0016', + '24704dcb-0ab9-4bfa-a84c-405932ae53fe', + '2022-02-15 22:20:21', + 'subject-0016', + true +); + +INSERT INTO hydra_oauth2_flow ( + login_challenge, + nid, + requested_scope, + login_verifier, + login_csrf, + subject, + request_url, + login_skip, + client_id, + requested_at, + oidc_context, + login_session_id, + requested_at_audience, + login_initialized_at, + state, + login_remember, + login_remember_for, + login_error, + acr, + login_authenticated_at, + login_was_used, + forced_subject_identifier, + context, + amr, + consent_challenge_id, + consent_verifier, + consent_skip, + consent_csrf, + granted_scope, + consent_remember, + consent_remember_for, + consent_error, + session_access_token, + session_id_token, + consent_was_used, + granted_at_audience, + consent_handled_at, + login_extend_session_lifespan +) VALUES ( + 'challenge-0016', + '24704dcb-0ab9-4bfa-a84c-405932ae53fe', + '["requested_scope-0016_1","requested_scope-0016_2"]', + 'verifier-0016', + 'csrf-0016', + 'subject-0016', + 'http://request/0016', + true, + 'client-21', + '2022-02-15 22:20:21', + '{"display": "display-0016"}', + 'auth_session-0016', + '["requested_audience-0016_1","requested_audience-0016_2"]', + '2022-02-15 22:20:21', + 128, + true, + 15, + '{}', + 'acr-0016', + '2022-02-15 22:20:21', + true, + 'force_subject_id-0016', + '{"context": "0016"}', + '["amr-0016-1","amr-0016-2"]', + 'challenge-0016', + 'verifier-0016', + true, + 'csrf-0016', + '["granted_scope-0016_1","granted_scope-0016_2"]', + true, + 15, + '{}', + '{"session_access_token-0016": "0016"}', + '{"session_id_token-0016": "0016"}', + true, + '["granted_audience-0016_1","granted_audience-0016_2"]', + '2022-02-15 22:20:21', + true +); diff --git a/persistence/sql/migratest/testdata/20230809122501_testdata.sql b/persistence/sql/migratest/testdata/20230809122501_testdata.sql new file mode 100644 index 00000000000..a4ca9755174 --- /dev/null +++ b/persistence/sql/migratest/testdata/20230809122501_testdata.sql @@ -0,0 +1,97 @@ +INSERT INTO hydra_oauth2_authentication_session ( + id, + nid, + authenticated_at, + subject, + remember, + identity_provider_session_id +) VALUES ( + 'auth_session-0017', + '24704dcb-0ab9-4bfa-a84c-405932ae53fe', + '2022-02-15 22:20:21', + 'subject-0017', + true, + 'identity_provider_session_id-0017' +); + +INSERT INTO hydra_oauth2_flow ( + login_challenge, + nid, + requested_scope, + login_verifier, + login_csrf, + subject, + request_url, + login_skip, + client_id, + requested_at, + oidc_context, + login_session_id, + requested_at_audience, + login_initialized_at, + state, + login_remember, + login_remember_for, + login_error, + acr, + login_authenticated_at, + login_was_used, + forced_subject_identifier, + context, + amr, + consent_challenge_id, + consent_verifier, + consent_skip, + consent_csrf, + granted_scope, + consent_remember, + consent_remember_for, + consent_error, + session_access_token, + session_id_token, + consent_was_used, + granted_at_audience, + consent_handled_at, + login_extend_session_lifespan, + identity_provider_session_id +) VALUES ( + 'challenge-0017', + '24704dcb-0ab9-4bfa-a84c-405932ae53fe', + '["requested_scope-0016_1","requested_scope-0016_2"]', + 'verifier-0017', + 'csrf-0017', + 'subject-0017', + 'http://request/0017', + true, + 'client-21', + '2022-02-15 22:20:21', + '{"display": "display-0017"}', + 'auth_session-0017', + '["requested_audience-0016_1","requested_audience-0016_2"]', + '2022-02-15 22:20:21', + 128, + true, + 15, + '{}', + 'acr-0017', + '2022-02-15 22:20:21', + true, + 'force_subject_id-0017', + '{"context": "0017"}', + '["amr-0017-1","amr-0017-2"]', + 'challenge-0017', + 'verifier-0017', + true, + 'csrf-0017', + '["granted_scope-0016_1","granted_scope-0016_2"]', + true, + 15, + '{}', + '{"session_access_token-0017": "0017"}', + '{"session_id_token-0017": "0017"}', + true, + '["granted_audience-0016_1","granted_audience-0016_2"]', + '2022-02-15 22:20:21', + true, + 'identity_provider_session_id-0017' +); diff --git a/persistence/sql/migratest/testdata/20240129174410_testdata.sql b/persistence/sql/migratest/testdata/20240129174410_testdata.sql new file mode 100644 index 00000000000..ed0dea93dec --- /dev/null +++ b/persistence/sql/migratest/testdata/20240129174410_testdata.sql @@ -0,0 +1,46 @@ +INSERT INTO hydra_client (id, + nid, + client_name, + client_secret, + redirect_uris, + grant_types, + response_types, + scope, + owner, + policy_uri, + tos_uri, + client_uri, + logo_uri, + contacts, + client_secret_expires_at, + sector_identifier_uri, + jwks, + jwks_uri, + request_uris, + token_endpoint_auth_method, + request_object_signing_alg, + userinfo_signed_response_alg, + subject_type, + allowed_cors_origins, + pk_deprecated, + audience, + created_at, + updated_at, + frontchannel_logout_uri, + frontchannel_logout_session_required, + post_logout_redirect_uris, + backchannel_logout_uri, + backchannel_logout_session_required, + metadata, + token_endpoint_auth_signing_alg, + pk, + registration_access_token_signature, + skip_consent, + skip_logout_consent) +VALUES ('client-22', + '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 22', 'secret-22', '["http://redirect/22_1","http://redirect/22_2"]', '["grant-22_1","grant-22_2"]', + '["response-22_1","response-22_2"]', 'scope-22', 'owner-22', 'http://policy/22', 'http://tos/22', 'http://client/22', 'http://logo/22', + '["contact-22_1","contact-22_2"]', 0, 'http://sector_id/22', '', 'http://jwks/22', '["http://request/22_1","http://request/22_2"]', + 'token_auth-22', 'r_alg-22', 'u_alg-22', 'subject-22', '["http://cors/22_1","http://cors/22_2"]', 0, '["autdience-22_1","autdience-22_2"]', + '2022-02-15 22:20:23', '2022-02-15 22:20:23', 'http://front_logout/22', true, '["http://post_redirect/22_1","http://post_redirect/22_2"]', + 'http://back_logout/22', true, '{"migration": "22"}', '', '2ca3b77b-ee29-4b63-aa07-6384e6c650fb', '', TRUE, TRUE ); diff --git a/persistence/sql/migratest/testdata/20240916105610_testdata.sql b/persistence/sql/migratest/testdata/20240916105610_testdata.sql new file mode 100644 index 00000000000..bbdce40d912 --- /dev/null +++ b/persistence/sql/migratest/testdata/20240916105610_testdata.sql @@ -0,0 +1,4 @@ +INSERT INTO hydra_oauth2_logout_request (challenge, verifier, subject, sid, client_id, nid, request_url, redir_url, + was_used, accepted, rejected, rp_initiated, expires_at, requested_at) +VALUES ('challenge-20240916105610000001', 'verifier-20240916105610000001', 'subject-0014', 'session_id-0014', 'client-0014', + '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'http://request/0014', 'http://post_logout/0014', true, true, false, true, '2022-02-15 22:20:20', '2022-02-15 22:20:20'); diff --git a/persistence/sql/migratest/testdata/20241609000001_testdata.sql b/persistence/sql/migratest/testdata/20241609000001_testdata.sql new file mode 100644 index 00000000000..9e9a5d7f2f5 --- /dev/null +++ b/persistence/sql/migratest/testdata/20241609000001_testdata.sql @@ -0,0 +1,106 @@ +INSERT INTO hydra_client (id, + nid, + client_name, + client_secret, + redirect_uris, + grant_types, + response_types, + scope, + owner, + policy_uri, + tos_uri, + client_uri, + logo_uri, + contacts, + client_secret_expires_at, + sector_identifier_uri, + jwks, + jwks_uri, + request_uris, + token_endpoint_auth_method, + request_object_signing_alg, + userinfo_signed_response_alg, + subject_type, + allowed_cors_origins, + pk_deprecated, + audience, + created_at, + updated_at, + frontchannel_logout_uri, + frontchannel_logout_session_required, + post_logout_redirect_uris, + backchannel_logout_uri, + backchannel_logout_session_required, + metadata, + token_endpoint_auth_signing_alg, + pk, + registration_access_token_signature, + skip_consent, + skip_logout_consent, + device_authorization_grant_id_token_lifespan, + device_authorization_grant_access_token_lifespan, + device_authorization_grant_refresh_token_lifespan) +VALUES ('client-23', + '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 'Client 23', 'secret-23', '["http://redirect/23_1","http://redirect/23_2"]', '["grant-23_1","grant-23_2"]', '["response-23_1","response-23_2"]', 'scope-23', 'owner-23', 'http://policy/23', 'http://tos/23', 'http://client/23', 'http://logo/23', '["contact-23_1","contact-23_2"]', 0, 'http://sector_id/23', '', 'http://jwks/23', '["http://request/23_1","http://request/23_2"]', 'token_auth-23', 'r_alg-23', 'u_alg-23', 'subject-23', '["http://cors/23_1","http://cors/23_2"]', 0, '["autdience-23_1","autdience-23_2"]', '2023-02-15 23:20:23', '2023-02-15 23:20:23', 'http://front_logout/23', true, '["http://post_redirect/23_1","http://post_redirect/23_2"]', 'http://back_logout/23', true, '{"migration": "23"}', '', '52f38352-7944-4ace-b55c-5aded28f4ba6', '', TRUE, TRUE, 3600, 3600, 3600); + + +INSERT INTO hydra_oauth2_flow (login_challenge, + nid, + requested_scope, + login_verifier, + login_csrf, + subject, + request_url, + login_skip, + client_id, + requested_at, + oidc_context, + login_session_id, + requested_at_audience, + login_initialized_at, + state, + login_remember, + login_remember_for, + login_error, + acr, + login_authenticated_at, + login_was_used, + forced_subject_identifier, + context, + amr, + consent_challenge_id, + consent_verifier, + consent_skip, + consent_csrf, + granted_scope, + consent_remember, + consent_remember_for, + consent_error, + session_access_token, + session_id_token, + consent_was_used, + granted_at_audience, + consent_handled_at, + login_extend_session_lifespan, + device_challenge_id, + device_code_request_id, + device_verifier, + device_csrf, + device_was_used, + device_handled_at, + device_error) +VALUES ('challenge-0018', + '24704dcb-0ab9-4bfa-a84c-405932ae53fe', '["requested_scope-0018_1","requested_scope-0018_2"]', 'verifier-0018', 'csrf-0018', 'subject-0018', 'http://request/0018', true, 'client-21', '2022-02-15 22:20:21', '{"display": "display-0018"}', NULL, '["requested_audience-0018_1","requested_audience-0018_2"]', '2022-02-15 22:20:21', 128, true, 15, '{}', 'acr-0018', '2022-02-15 22:20:21', true, 'force_subject_id-0018', '{"context": "0018"}', '["amr-0018-1","amr-0018-2"]', 'challenge-0018', 'verifier-0018', true, 'csrf-0018', '["granted_scope-0018_1","granted_scope-0018_2"]', true, 15, '{}', '{"session_access_token-0018": "0018"}', '{"session_id_token-0018": "0018"}', true, '["granted_audience-0018_1","granted_audience-0018_2"]', '2025-05-16 12:24', true, 'device-challenge-0018', 'device-request-id-0018', 'device-verifier-0018', 'device-csrf-0018', true, '2025-05-16 12:24', '{}' ); + +INSERT INTO hydra_oauth2_device_auth_codes (device_code_signature, user_code_signature, request_id, requested_at, + client_id, scope, granted_scope, form_data, session_data, subject, + device_code_active, user_code_state, requested_audience, granted_audience, + challenge_id, expires_at, nid) +VALUES ('device-code-signature-0001', 'user-code-signature-0001', 'request-id-0001', '2025-05-16 12:24', + 'client-21', '["scope-0001_1","scope-0001_2"]', '["granted_scope-0001_1","granted_scope-0001_2"]', + '{"form_data": "0001"}', + '{"session_data": "0001"}', 'subject-0001', true, 0, + '["requested_audience-0001_1","requested_audience-0001_2"]', + '["granted_audience-0001_1","granted_audience-0001_2"]', 'challenge-0018', '2025-05-16 12:24', + '24704dcb-0ab9-4bfa-a84c-405932ae53fe' + ); diff --git a/persistence/sql/migratest/testdata/20250513132142_testdata.sql b/persistence/sql/migratest/testdata/20250513132142_testdata.sql new file mode 100644 index 00000000000..4e14e9c31b3 --- /dev/null +++ b/persistence/sql/migratest/testdata/20250513132142_testdata.sql @@ -0,0 +1,6 @@ +INSERT INTO hydra_oauth2_refresh (signature, request_id, requested_at, client_id, scope, granted_scope, form_data, + session_data, subject, active, requested_audience, granted_audience, challenge_id, + nid, used_times) +VALUES ('sig-20250513132142', 'req-20250513132142', '2025-05-13 13:21:42', 'client-0014', 'scope', + 'granted_scope', 'form_data', 'session_id-0014', 'subject-0014', false, 'requested_audience', + 'granted_audience', 'challenge-0014', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', 1); diff --git a/persistence/sql/migratest/testdata/20251119112639_testdata.sql b/persistence/sql/migratest/testdata/20251119112639_testdata.sql new file mode 100644 index 00000000000..72251fbaa76 --- /dev/null +++ b/persistence/sql/migratest/testdata/20251119112639_testdata.sql @@ -0,0 +1,55 @@ +INSERT INTO hydra_oauth2_flow (login_challenge, + nid, + requested_scope, + login_verifier, + login_csrf, + subject, + request_url, + login_skip, + client_id, + requested_at, + oidc_context, + login_session_id, + requested_at_audience, + login_initialized_at, + state, + login_remember, + login_remember_for, + login_error, + acr, + login_authenticated_at, + login_was_used, + forced_subject_identifier, + context, + amr, + consent_challenge_id, + consent_verifier, + consent_skip, + consent_csrf, + granted_scope, + consent_remember, + consent_remember_for, + consent_error, + session_access_token, + session_id_token, + consent_was_used, + granted_at_audience, + consent_handled_at, + login_extend_session_lifespan, + device_challenge_id, + device_code_request_id, + device_verifier, + device_csrf, + device_was_used, + device_handled_at, + device_error) +VALUES ('challenge-0019', + '24704dcb-0ab9-4bfa-a84c-405932ae53fe', '["requested_scope-0019_1","requested_scope-0019_2"]', 'verifier-0019', + 'csrf-0019', 'subject-0019', 'http://request/0019', true, 'client-21', '2022-02-15 22:20:21', + '{"display": "display-0019"}', NULL, NULL, + '2022-02-15 22:20:21', 128, true, 15, '{}', 'acr-0019', '2022-02-15 22:20:21', true, NULL, + '{}', NULL, 'challenge-0019', 'verifier-0019', true, 'csrf-0019', + '["granted_scope-0019_1","granted_scope-0019_2"]', true, 15, '{}', '{"session_access_token-0019": "0019"}', + '{"session_id_token-0019": "0019"}', true, NULL, + '2025-05-16 12:24', NULL, 'device-challenge-0019', 'device-request-id-0019', 'device-verifier-0019', + 'device-csrf-0019', true, '2025-05-16 12:24', '{}'); diff --git a/persistence/sql/migratest/testdata/20251202130532_testdata.sql b/persistence/sql/migratest/testdata/20251202130532_testdata.sql new file mode 100644 index 00000000000..a6d6e099fc9 --- /dev/null +++ b/persistence/sql/migratest/testdata/20251202130532_testdata.sql @@ -0,0 +1,5 @@ +INSERT +INTO hydra_oauth2_flow (login_challenge, + nid, + requested_at) +VALUES ('challenge-0020', '24704dcb-0ab9-4bfa-a84c-405932ae53fe', '2025-12-02 11:05:31'); diff --git a/persistence/sql/migrations/20150101000001000000_networks.postgres.up.sql b/persistence/sql/migrations/20150101000001000000_networks.postgres.up.sql index a9d7a408e6d..d4d928b5ce5 100644 --- a/persistence/sql/migrations/20150101000001000000_networks.postgres.up.sql +++ b/persistence/sql/migrations/20150101000001000000_networks.postgres.up.sql @@ -6,15 +6,5 @@ CREATE TABLE "networks" ( "created_at" timestamp NOT NULL, "updated_at" timestamp NOT NULL ); - -INSERT INTO networks (id, created_at, updated_at) VALUES (uuid_in( - overlay( - overlay( - md5(random()::text || ':' || clock_timestamp()::text) - placing '4' - from 13 - ) - placing to_hex(floor(random()*(11-8+1) + 8)::int)::text - from 17 - )::cstring -), '2013-10-07 08:23:19', '2013-10-07 08:23:19'); +CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; +INSERT INTO networks (id, created_at, updated_at) VALUES (uuid_generate_v4(), '2013-10-07 08:23:19', '2013-10-07 08:23:19'); diff --git a/persistence/sql/migrations/20211004110001000000_change_client_primary_key.cockroach.down.sql b/persistence/sql/migrations/20211004110001000000_change_client_primary_key.cockroach.down.sql index cb9c48b3fd2..953b115b244 100644 --- a/persistence/sql/migrations/20211004110001000000_change_client_primary_key.cockroach.down.sql +++ b/persistence/sql/migrations/20211004110001000000_change_client_primary_key.cockroach.down.sql @@ -1 +1,3 @@ -ALTER TABLE hydra_client DROP CONSTRAINT "hydra_client_pkey", ADD CONSTRAINT "primary" PRIMARY KEY (pk_deprecated); +ALTER TABLE hydra_client "hydra_client_pkey" + DROP COLUMN pk, + RENAME COLUMN pk_deprecated TO pk; diff --git a/persistence/sql/migrations/20211004110001000000_change_client_primary_key.cockroach.up.sql b/persistence/sql/migrations/20211004110001000000_change_client_primary_key.cockroach.up.sql index 7c3c83224b4..3b623582931 100644 --- a/persistence/sql/migrations/20211004110001000000_change_client_primary_key.cockroach.up.sql +++ b/persistence/sql/migrations/20211004110001000000_change_client_primary_key.cockroach.up.sql @@ -1,2 +1,3 @@ -ALTER TABLE hydra_client RENAME pk TO pk_deprecated; -ALTER TABLE hydra_client ADD pk UUID NOT NULL DEFAULT gen_random_uuid(); +ALTER TABLE hydra_client + RENAME pk TO pk_deprecated, + ADD COLUMN pk UUID NOT NULL DEFAULT gen_random_uuid(); diff --git a/persistence/sql/migrations/20211004110001000000_change_client_primary_key.postgres.up.sql b/persistence/sql/migrations/20211004110001000000_change_client_primary_key.postgres.up.sql index 772ed6301e6..9f106daf7cf 100644 --- a/persistence/sql/migrations/20211004110001000000_change_client_primary_key.postgres.up.sql +++ b/persistence/sql/migrations/20211004110001000000_change_client_primary_key.postgres.up.sql @@ -1,16 +1,6 @@ ALTER TABLE hydra_client RENAME pk TO pk_deprecated; --- UUID generation based on https://stackoverflow.com/a/21327318/12723442 -ALTER TABLE hydra_client ADD COLUMN pk UUID DEFAULT uuid_in( - overlay( - overlay( - md5(random()::text || ':' || clock_timestamp()::text) - placing '4' - from 13 - ) - placing to_hex(floor(random()*(11-8+1) + 8)::int)::text - from 17 - )::cstring -); +CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; +ALTER TABLE hydra_client ADD COLUMN pk UUID DEFAULT uuid_generate_v4(); ALTER TABLE hydra_client ALTER pk DROP DEFAULT; ALTER TABLE hydra_client DROP CONSTRAINT hydra_client_pkey; ALTER TABLE hydra_client ADD PRIMARY KEY (pk); diff --git a/persistence/sql/migrations/20211004110003000000_change_client_primary_key.cockroach.down.sql b/persistence/sql/migrations/20211004110003000000_change_client_primary_key.cockroach.down.sql index 1d1517e0709..d0da84fa039 100644 --- a/persistence/sql/migrations/20211004110003000000_change_client_primary_key.cockroach.down.sql +++ b/persistence/sql/migrations/20211004110003000000_change_client_primary_key.cockroach.down.sql @@ -1,2 +1,3 @@ -ALTER TABLE hydra_client DROP pk; -ALTER TABLE hydra_client RENAME pk_deprecated TO pk; +ALTER TABLE hydra_client + DROP CONSTRAINT "primary", + ADD CONSTRAINT "hydra_client_pkey" PRIMARY KEY (pk_deprecated); diff --git a/persistence/sql/migrations/20211004110003000000_change_client_primary_key.cockroach.up.sql b/persistence/sql/migrations/20211004110003000000_change_client_primary_key.cockroach.up.sql index fe7cc1aa68c..78efac3849d 100644 --- a/persistence/sql/migrations/20211004110003000000_change_client_primary_key.cockroach.up.sql +++ b/persistence/sql/migrations/20211004110003000000_change_client_primary_key.cockroach.up.sql @@ -1,2 +1,3 @@ -ALTER TABLE hydra_client DROP CONSTRAINT "primary"; -ALTER TABLE hydra_client ADD CONSTRAINT "hydra_client_pkey" PRIMARY KEY (pk); +ALTER TABLE hydra_client + DROP CONSTRAINT "primary", + ADD CONSTRAINT "hydra_client_pkey" PRIMARY KEY (pk); diff --git a/persistence/sql/migrations/20211011000001000000_change_jwk_primary_key.cockroach.down.sql b/persistence/sql/migrations/20211011000001000000_change_jwk_primary_key.cockroach.down.sql index 144208f7a90..1fd6f78d400 100644 --- a/persistence/sql/migrations/20211011000001000000_change_jwk_primary_key.cockroach.down.sql +++ b/persistence/sql/migrations/20211011000001000000_change_jwk_primary_key.cockroach.down.sql @@ -1 +1,3 @@ -ALTER TABLE hydra_jwk DROP CONSTRAINT "hydra_jwk", ADD CONSTRAINT "primary" PRIMARY KEY (pk_deprecated); +ALTER TABLE hydra_jwk + DROP COLUMN pk, + RENAME pk_deprecated TO pk; diff --git a/persistence/sql/migrations/20211011000001000000_change_jwk_primary_key.cockroach.up.sql b/persistence/sql/migrations/20211011000001000000_change_jwk_primary_key.cockroach.up.sql index 8a9101d2acf..f2c57f03fff 100644 --- a/persistence/sql/migrations/20211011000001000000_change_jwk_primary_key.cockroach.up.sql +++ b/persistence/sql/migrations/20211011000001000000_change_jwk_primary_key.cockroach.up.sql @@ -1,2 +1,3 @@ -ALTER TABLE hydra_jwk RENAME pk TO pk_deprecated; -ALTER TABLE hydra_jwk ADD pk UUID NOT NULL DEFAULT gen_random_uuid(); +ALTER TABLE hydra_jwk + RENAME pk TO pk_deprecated, + ADD COLUMN pk UUID NOT NULL DEFAULT gen_random_uuid(); diff --git a/persistence/sql/migrations/20211011000001000000_change_jwk_primary_key.postgres.up.sql b/persistence/sql/migrations/20211011000001000000_change_jwk_primary_key.postgres.up.sql index 1c33b74e3a6..b2b64554d34 100644 --- a/persistence/sql/migrations/20211011000001000000_change_jwk_primary_key.postgres.up.sql +++ b/persistence/sql/migrations/20211011000001000000_change_jwk_primary_key.postgres.up.sql @@ -1,16 +1,6 @@ ALTER TABLE hydra_jwk RENAME pk TO pk_deprecated; --- UUID generation based on https://stackoverflow.com/a/21327318/12723442 -ALTER TABLE hydra_jwk ADD COLUMN pk UUID DEFAULT uuid_in( - overlay( - overlay( - md5(random()::text || ':' || clock_timestamp()::text) - placing '4' - from 13 - ) - placing to_hex(floor(random()*(11-8+1) + 8)::int)::text - from 17 - )::cstring -); +CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; +ALTER TABLE hydra_jwk ADD COLUMN pk UUID DEFAULT uuid_generate_v4(); ALTER TABLE hydra_jwk ALTER pk DROP DEFAULT; ALTER TABLE hydra_jwk DROP CONSTRAINT hydra_jwk_pkey; ALTER TABLE hydra_jwk ADD PRIMARY KEY (pk); diff --git a/persistence/sql/migrations/20211011000003000000_change_jwk_primary_key.cockroach.down.sql b/persistence/sql/migrations/20211011000003000000_change_jwk_primary_key.cockroach.down.sql index 633b8d14463..e62eb59d758 100644 --- a/persistence/sql/migrations/20211011000003000000_change_jwk_primary_key.cockroach.down.sql +++ b/persistence/sql/migrations/20211011000003000000_change_jwk_primary_key.cockroach.down.sql @@ -1,2 +1,3 @@ -ALTER TABLE hydra_jwk DROP pk; -ALTER TABLE hydra_jwk RENAME pk_deprecated TO pk; +ALTER TABLE hydra_jwk + DROP CONSTRAINT "primary", + ADD CONSTRAINT "primary" PRIMARY KEY (pk_deprecated) diff --git a/persistence/sql/migrations/20211011000003000000_change_jwk_primary_key.cockroach.up.sql b/persistence/sql/migrations/20211011000003000000_change_jwk_primary_key.cockroach.up.sql index 41e84f5651c..942892acf99 100644 --- a/persistence/sql/migrations/20211011000003000000_change_jwk_primary_key.cockroach.up.sql +++ b/persistence/sql/migrations/20211011000003000000_change_jwk_primary_key.cockroach.up.sql @@ -1,2 +1,3 @@ -ALTER TABLE hydra_jwk DROP CONSTRAINT "primary"; -ALTER TABLE hydra_jwk ADD CONSTRAINT "hydra_jwk_pkey" PRIMARY KEY (pk); +ALTER TABLE hydra_jwk + DROP CONSTRAINT "primary", + ADD CONSTRAINT "hydra_jwk_pkey" PRIMARY KEY (pk); diff --git a/persistence/sql/migrations/20220210000001000000_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000000_nid.cockroach.up.sql index 558b3e562a0..fe6ccdeb4af 100644 --- a/persistence/sql/migrations/20220210000001000000_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000000_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_client ALTER TABLE hydra_client ADD COLUMN "nid" UUID; ALTER TABLE hydra_client ADD CONSTRAINT "hydra_client_nid_fk_idx" FOREIGN KEY ("nid") REFERENCES "networks" ("id") ON UPDATE RESTRICT ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000000_nid.down.sql b/persistence/sql/migrations/20220210000001000000_nid.down.sql index 7d21f197986..b3e25eb10d6 100644 --- a/persistence/sql/migrations/20220210000001000000_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000000_nid.down.sql @@ -1,3 +1,3 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- Down migrations from Hydra 2.x to 1.x are not available. diff --git a/persistence/sql/migrations/20220210000001000000_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000000_nid.mysql.up.sql index be60c1e04e5..da36a767f9d 100644 --- a/persistence/sql/migrations/20220210000001000000_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000000_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- Encode key_id in ascii as a workaround for the 3072-byte index entry size limit[1] -- This is a breaking change for MySQL key IDs with utf-8 symbols higher than 127 -- [1]: https://dev.mysql.com/doc/refman/8.0/en/innodb-limits.html diff --git a/persistence/sql/migrations/20220210000001000000_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000000_nid.postgres.up.sql index 0a5b640244b..24d3a060ad2 100644 --- a/persistence/sql/migrations/20220210000001000000_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000000_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_client ALTER TABLE hydra_client ADD COLUMN nid UUID; ALTER TABLE hydra_client ADD CONSTRAINT hydra_client_nid_fk_idx FOREIGN KEY (nid) REFERENCES networks (id) ON UPDATE RESTRICT ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000000_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000000_nid.sqlite.up.sql index 7ef7791a371..baadfa684f2 100644 --- a/persistence/sql/migrations/20220210000001000000_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000000_nid.sqlite.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_oauth2_jti_blacklist ALTER TABLE hydra_oauth2_jti_blacklist ADD COLUMN nid CHAR(36) NULL REFERENCES networks(id) ON DELETE CASCADE ON UPDATE RESTRICT; UPDATE hydra_oauth2_jti_blacklist SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000001_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000001_nid.cockroach.up.sql index 9106ffa8608..f925995d525 100644 --- a/persistence/sql/migrations/20220210000001000001_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000001_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_client SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000001_nid.down.sql b/persistence/sql/migrations/20220210000001000001_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000001_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000001_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000001_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000001_nid.mysql.up.sql index be0f5e2b9e9..154c2c6c981 100644 --- a/persistence/sql/migrations/20220210000001000001_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000001_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_client diff --git a/persistence/sql/migrations/20220210000001000001_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000001_nid.postgres.up.sql index 9106ffa8608..f925995d525 100644 --- a/persistence/sql/migrations/20220210000001000001_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000001_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_client SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000001_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000001_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000001_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000001_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000002_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000002_nid.cockroach.up.sql index 58efd3b0aee..e0c5b0d0772 100644 --- a/persistence/sql/migrations/20220210000001000002_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000002_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_client ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000002_nid.down.sql b/persistence/sql/migrations/20220210000001000002_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000002_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000002_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000002_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000002_nid.mysql.up.sql index 27d64e5ad23..72419854050 100644 --- a/persistence/sql/migrations/20220210000001000002_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000002_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_client SET nid = (SELECT id FROM networks LIMIT 1); ALTER TABLE hydra_client MODIFY `nid` char(36) NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000002_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000002_nid.postgres.up.sql index 58efd3b0aee..e0c5b0d0772 100644 --- a/persistence/sql/migrations/20220210000001000002_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000002_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_client ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000002_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000002_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000002_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000002_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000003_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000003_nid.cockroach.up.sql index a0f4097cb42..4ca0d54ccf5 100644 --- a/persistence/sql/migrations/20220210000001000003_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000003_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_client_id_key CASCADE; diff --git a/persistence/sql/migrations/20220210000001000003_nid.down.sql b/persistence/sql/migrations/20220210000001000003_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000003_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000003_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000003_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000003_nid.mysql.up.sql index cd14d541742..62bba5316f4 100644 --- a/persistence/sql/migrations/20220210000001000003_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000003_nid.mysql.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE UNIQUE INDEX hydra_client_id_key ON hydra_client (id ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000003_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000003_nid.postgres.up.sql index bf5b247ceac..244868eb906 100644 --- a/persistence/sql/migrations/20220210000001000003_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000003_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_client_idx_id_uq CASCADE; diff --git a/persistence/sql/migrations/20220210000001000003_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000003_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000003_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000003_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000004_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000004_nid.cockroach.up.sql index cd14d541742..62bba5316f4 100644 --- a/persistence/sql/migrations/20220210000001000004_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000004_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE UNIQUE INDEX hydra_client_id_key ON hydra_client (id ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000004_nid.down.sql b/persistence/sql/migrations/20220210000001000004_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000004_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000004_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000004_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000004_nid.mysql.up.sql index 8a19b908673..5638822466c 100644 --- a/persistence/sql/migrations/20220210000001000004_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000004_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000004_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000004_nid.postgres.up.sql index d1fb21febc4..58d1cb3c2d3 100644 --- a/persistence/sql/migrations/20220210000001000004_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000004_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE UNIQUE INDEX hydra_client_idx_id_uq ON hydra_client (id ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000004_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000004_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000004_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000004_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000005_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000005_nid.cockroach.up.sql index ef78769d3d5..feaca571ffa 100644 --- a/persistence/sql/migrations/20220210000001000005_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000005_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_oauth2_access diff --git a/persistence/sql/migrations/20220210000001000005_nid.down.sql b/persistence/sql/migrations/20220210000001000005_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000005_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000005_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000005_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000005_nid.mysql.up.sql index 57b183c7445..d78c41b4d73 100644 --- a/persistence/sql/migrations/20220210000001000005_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000005_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_access SET nid = (SELECT id FROM networks LIMIT 1); ALTER TABLE hydra_oauth2_access MODIFY `nid` char(36) NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000005_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000005_nid.postgres.up.sql index 40cd72b49ec..a81a524e3d8 100644 --- a/persistence/sql/migrations/20220210000001000005_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000005_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000005_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000005_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000005_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000005_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000006_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000006_nid.cockroach.up.sql index 18225de47c0..b29b5ea5a32 100644 --- a/persistence/sql/migrations/20220210000001000006_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000006_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_access SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000006_nid.down.sql b/persistence/sql/migrations/20220210000001000006_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000006_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000006_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000006_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000006_nid.mysql.up.sql index 9517d2034ee..617eb6b9ec2 100644 --- a/persistence/sql/migrations/20220210000001000006_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000006_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_access DROP FOREIGN KEY `hydra_oauth2_access_client_id_fk`; ALTER TABLE hydra_oauth2_access ADD CONSTRAINT `hydra_oauth2_access_client_id_fk` FOREIGN KEY (`client_id`, `nid`) REFERENCES `hydra_client` (`id`, `nid`) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000006_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000006_nid.postgres.up.sql index 18225de47c0..b29b5ea5a32 100644 --- a/persistence/sql/migrations/20220210000001000006_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000006_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_access SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000006_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000006_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000006_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000006_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000007_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000007_nid.cockroach.up.sql index e847d439956..a6bb20fa5f4 100644 --- a/persistence/sql/migrations/20220210000001000007_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000007_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_access ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000007_nid.down.sql b/persistence/sql/migrations/20220210000001000007_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000007_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000007_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000007_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000007_nid.mysql.up.sql index a310e329bc5..2c84d7a1e37 100644 --- a/persistence/sql/migrations/20220210000001000007_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000007_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_access_requested_at_idx ON hydra_oauth2_access; DROP INDEX hydra_oauth2_access_request_id_idx ON hydra_oauth2_access; diff --git a/persistence/sql/migrations/20220210000001000007_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000007_nid.postgres.up.sql index e847d439956..a6bb20fa5f4 100644 --- a/persistence/sql/migrations/20220210000001000007_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000007_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_access ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000007_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000007_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000007_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000007_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000008_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000008_nid.cockroach.up.sql index 72dec51c645..1a3d12251b7 100644 --- a/persistence/sql/migrations/20220210000001000008_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000008_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_access ADD CONSTRAINT hydra_oauth2_access_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES hydra_client(id, nid) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000008_nid.down.sql b/persistence/sql/migrations/20220210000001000008_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000008_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000008_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000008_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000008_nid.mysql.up.sql index 5e462e0b7cb..d4c5b07c6ec 100644 --- a/persistence/sql/migrations/20220210000001000008_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000008_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_access_requested_at_idx ON hydra_oauth2_access (requested_at, nid); CREATE INDEX hydra_oauth2_access_client_id_subject_nid_idx ON hydra_oauth2_access (client_id, subject, nid); diff --git a/persistence/sql/migrations/20220210000001000008_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000008_nid.postgres.up.sql index 72dec51c645..1a3d12251b7 100644 --- a/persistence/sql/migrations/20220210000001000008_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000008_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_access ADD CONSTRAINT hydra_oauth2_access_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES hydra_client(id, nid) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000008_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000008_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000008_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000008_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000009_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000009_nid.cockroach.up.sql index 2b5881538b8..fbc5c6f424a 100644 --- a/persistence/sql/migrations/20220210000001000009_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000009_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_access_requested_at_idx; DROP INDEX hydra_oauth2_access_client_id_idx; diff --git a/persistence/sql/migrations/20220210000001000009_nid.down.sql b/persistence/sql/migrations/20220210000001000009_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000009_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000009_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000009_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000009_nid.mysql.up.sql index 2dc7182f297..0a1f5f072af 100644 --- a/persistence/sql/migrations/20220210000001000009_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000009_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000009_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000009_nid.postgres.up.sql index 2b5881538b8..fbc5c6f424a 100644 --- a/persistence/sql/migrations/20220210000001000009_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000009_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_access_requested_at_idx; DROP INDEX hydra_oauth2_access_client_id_idx; diff --git a/persistence/sql/migrations/20220210000001000009_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000009_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000009_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000009_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000010_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000010_nid.cockroach.up.sql index 8b4718a3fa4..6156b043abb 100644 --- a/persistence/sql/migrations/20220210000001000010_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000010_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_access_requested_at_idx ON hydra_oauth2_access (requested_at, nid); CREATE INDEX hydra_oauth2_access_client_id_idx ON hydra_oauth2_access (client_id, nid); diff --git a/persistence/sql/migrations/20220210000001000010_nid.down.sql b/persistence/sql/migrations/20220210000001000010_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000010_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000010_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000010_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000010_nid.mysql.up.sql index 2e9e8da8902..7311f73e89d 100644 --- a/persistence/sql/migrations/20220210000001000010_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000010_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_authentication_session SET nid = (SELECT id FROM networks LIMIT 1); ALTER TABLE hydra_oauth2_authentication_session MODIFY `nid` char(36) NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000010_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000010_nid.postgres.up.sql index 8b4718a3fa4..6156b043abb 100644 --- a/persistence/sql/migrations/20220210000001000010_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000010_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_access_requested_at_idx ON hydra_oauth2_access (requested_at, nid); CREATE INDEX hydra_oauth2_access_client_id_idx ON hydra_oauth2_access (client_id, nid); diff --git a/persistence/sql/migrations/20220210000001000010_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000010_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000010_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000010_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000011_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000011_nid.cockroach.up.sql index 5f0804fb294..98cf88f97d0 100644 --- a/persistence/sql/migrations/20220210000001000011_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000011_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_oauth2_authentication_session diff --git a/persistence/sql/migrations/20220210000001000011_nid.down.sql b/persistence/sql/migrations/20220210000001000011_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000011_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000011_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000011_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000011_nid.mysql.up.sql index a63b27bf125..24a44ff19e9 100644 --- a/persistence/sql/migrations/20220210000001000011_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000011_nid.mysql.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_authentication_session_subject_nid_idx ON hydra_oauth2_authentication_session (subject ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000011_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000011_nid.postgres.up.sql index 617d1f3d85f..130e5d993da 100644 --- a/persistence/sql/migrations/20220210000001000011_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000011_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000011_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000011_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000011_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000011_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000012_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000012_nid.cockroach.up.sql index bd241aa700c..d9573f74f92 100644 --- a/persistence/sql/migrations/20220210000001000012_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000012_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_authentication_session SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000012_nid.down.sql b/persistence/sql/migrations/20220210000001000012_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000012_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000012_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000012_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000012_nid.mysql.up.sql index bf231889bb1..ee218439809 100644 --- a/persistence/sql/migrations/20220210000001000012_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000012_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000012_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000012_nid.postgres.up.sql index bd241aa700c..d9573f74f92 100644 --- a/persistence/sql/migrations/20220210000001000012_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000012_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_authentication_session SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000012_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000012_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000012_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000012_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000013_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000013_nid.cockroach.up.sql index a4ab62dc5fb..c6845214c2b 100644 --- a/persistence/sql/migrations/20220210000001000013_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000013_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_authentication_session ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000013_nid.down.sql b/persistence/sql/migrations/20220210000001000013_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000013_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000013_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000013_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000013_nid.mysql.up.sql index e5b9c22c97d..6ea0151c0e0 100644 --- a/persistence/sql/migrations/20220210000001000013_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000013_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_code SET nid = (SELECT id FROM networks LIMIT 1); ALTER TABLE hydra_oauth2_code MODIFY `nid` char(36) NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000013_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000013_nid.postgres.up.sql index a4ab62dc5fb..c6845214c2b 100644 --- a/persistence/sql/migrations/20220210000001000013_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000013_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_authentication_session ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000013_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000013_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000013_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000013_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000014_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000014_nid.cockroach.up.sql index 1ad4416f165..3b40920611e 100644 --- a/persistence/sql/migrations/20220210000001000014_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000014_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_authentication_session_subject_idx; diff --git a/persistence/sql/migrations/20220210000001000014_nid.down.sql b/persistence/sql/migrations/20220210000001000014_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000014_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000014_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000014_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000014_nid.mysql.up.sql index 9ea0dc048ba..e9380c30aa6 100644 --- a/persistence/sql/migrations/20220210000001000014_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000014_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_code DROP FOREIGN KEY `hydra_oauth2_code_client_id_fk`; ALTER TABLE hydra_oauth2_code ADD CONSTRAINT `hydra_oauth2_code_client_id_fk` FOREIGN KEY (`client_id`, `nid`) REFERENCES `hydra_client` (`id`, `nid`) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000014_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000014_nid.postgres.up.sql index d05d5ddc990..66b507f25f3 100644 --- a/persistence/sql/migrations/20220210000001000014_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000014_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_authentication_session_sub_idx; diff --git a/persistence/sql/migrations/20220210000001000014_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000014_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000014_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000014_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000015_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000015_nid.cockroach.up.sql index 572f1e0b5bc..8de497ab180 100644 --- a/persistence/sql/migrations/20220210000001000015_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000015_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_authentication_session_subject_idx ON hydra_oauth2_authentication_session (subject ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000015_nid.down.sql b/persistence/sql/migrations/20220210000001000015_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000015_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000015_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000015_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000015_nid.mysql.up.sql index 1a345517135..5983c52e0da 100644 --- a/persistence/sql/migrations/20220210000001000015_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000015_nid.mysql.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_code_request_id_idx ON hydra_oauth2_code; diff --git a/persistence/sql/migrations/20220210000001000015_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000015_nid.postgres.up.sql index 9f7a5bfb632..e78167922bb 100644 --- a/persistence/sql/migrations/20220210000001000015_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000015_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_authentication_session_sub_idx ON hydra_oauth2_authentication_session (subject ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000015_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000015_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000015_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000015_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000016_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000016_nid.cockroach.up.sql index 365c84c98fc..df1ec2becd7 100644 --- a/persistence/sql/migrations/20220210000001000016_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000016_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_oauth2_code diff --git a/persistence/sql/migrations/20220210000001000016_nid.down.sql b/persistence/sql/migrations/20220210000001000016_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000016_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000016_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000016_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000016_nid.mysql.up.sql index e456ef22eb3..5de674075d1 100644 --- a/persistence/sql/migrations/20220210000001000016_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000016_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_code_request_id_idx ON hydra_oauth2_code (request_id, nid); diff --git a/persistence/sql/migrations/20220210000001000016_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000016_nid.postgres.up.sql index ccca5bfe3cf..83b6a041258 100644 --- a/persistence/sql/migrations/20220210000001000016_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000016_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000016_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000016_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000016_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000016_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000017_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000017_nid.cockroach.up.sql index 9fa8cf89b0e..365c7fd03aa 100644 --- a/persistence/sql/migrations/20220210000001000017_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000017_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_code SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000017_nid.down.sql b/persistence/sql/migrations/20220210000001000017_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000017_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000017_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000017_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000017_nid.mysql.up.sql index 86f37caecb6..6316dfafca6 100644 --- a/persistence/sql/migrations/20220210000001000017_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000017_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_flow DROP FOREIGN KEY `hydra_oauth2_flow_client_id_fk`; ALTER TABLE hydra_oauth2_flow ADD CONSTRAINT `hydra_oauth2_flow_client_id_fk` FOREIGN KEY (`client_id`, `nid`) REFERENCES `hydra_client` (`id`, `nid`) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000017_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000017_nid.postgres.up.sql index 9fa8cf89b0e..365c7fd03aa 100644 --- a/persistence/sql/migrations/20220210000001000017_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000017_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_code SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000017_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000017_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000017_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000017_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000018_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000018_nid.cockroach.up.sql index c1743eb19bc..99b16c9a18f 100644 --- a/persistence/sql/migrations/20220210000001000018_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000018_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_code ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000018_nid.down.sql b/persistence/sql/migrations/20220210000001000018_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000018_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000018_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000018_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000018_nid.mysql.up.sql index 364a741eb21..976ccab1212 100644 --- a/persistence/sql/migrations/20220210000001000018_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000018_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_flow SET nid = (SELECT id FROM networks LIMIT 1); ALTER TABLE hydra_oauth2_flow MODIFY `nid` char(36) NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000018_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000018_nid.postgres.up.sql index c1743eb19bc..99b16c9a18f 100644 --- a/persistence/sql/migrations/20220210000001000018_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000018_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_code ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000018_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000018_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000018_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000018_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000019_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000019_nid.cockroach.up.sql index 4a2ce55526d..6682a66810c 100644 --- a/persistence/sql/migrations/20220210000001000019_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000019_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_code ADD CONSTRAINT hydra_oauth2_code_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES hydra_client(id, nid) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000019_nid.down.sql b/persistence/sql/migrations/20220210000001000019_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000019_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000019_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000019_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000019_nid.mysql.up.sql index ab07216b31e..a3d3b104137 100644 --- a/persistence/sql/migrations/20220210000001000019_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000019_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_flow_client_id_subject_idx ON hydra_oauth2_flow; -- DROP INDEX hydra_oauth2_flow_login_session_id_idx ON hydra_oauth2_flow; diff --git a/persistence/sql/migrations/20220210000001000019_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000019_nid.postgres.up.sql index 4a2ce55526d..6682a66810c 100644 --- a/persistence/sql/migrations/20220210000001000019_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000019_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_code ADD CONSTRAINT hydra_oauth2_code_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES hydra_client(id, nid) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000019_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000019_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000019_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000019_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000020_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000020_nid.cockroach.up.sql index f05c28f1bef..89e74603aa2 100644 --- a/persistence/sql/migrations/20220210000001000020_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000020_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_code_client_id_idx; DROP INDEX hydra_oauth2_code_challenge_id_idx; diff --git a/persistence/sql/migrations/20220210000001000020_nid.down.sql b/persistence/sql/migrations/20220210000001000020_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000020_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000020_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000020_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000020_nid.mysql.up.sql index c1c47236700..c69aed507f2 100644 --- a/persistence/sql/migrations/20220210000001000020_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000020_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_flow_client_id_subject_idx ON hydra_oauth2_flow (client_id ASC, nid ASC, subject ASC); -- CREATE INDEX hydra_oauth2_flow_login_session_id_idx ON hydra_oauth2_flow (login_session_id ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000020_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000020_nid.postgres.up.sql index f05c28f1bef..89e74603aa2 100644 --- a/persistence/sql/migrations/20220210000001000020_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000020_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_code_client_id_idx; DROP INDEX hydra_oauth2_code_challenge_id_idx; diff --git a/persistence/sql/migrations/20220210000001000020_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000020_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000020_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000020_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000021_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000021_nid.cockroach.up.sql index 618698374fd..440212b9125 100644 --- a/persistence/sql/migrations/20220210000001000021_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000021_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_code_client_id_idx ON hydra_oauth2_code (client_id, nid); CREATE INDEX hydra_oauth2_code_challenge_id_idx ON hydra_oauth2_code (challenge_id, nid); diff --git a/persistence/sql/migrations/20220210000001000021_nid.down.sql b/persistence/sql/migrations/20220210000001000021_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000021_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000021_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000021_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000021_nid.mysql.up.sql index dd5a79ba078..c33532a4467 100644 --- a/persistence/sql/migrations/20220210000001000021_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000021_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE `hydra_oauth2_jti_blacklist` ADD COLUMN `nid` char(36); ALTER TABLE `hydra_oauth2_jti_blacklist` ADD CONSTRAINT `hydra_oauth2_jti_blacklist_nid_fk_idx` FOREIGN KEY (`nid`) REFERENCES `networks` (`id`) ON UPDATE RESTRICT ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000021_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000021_nid.postgres.up.sql index 618698374fd..440212b9125 100644 --- a/persistence/sql/migrations/20220210000001000021_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000021_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_code_client_id_idx ON hydra_oauth2_code (client_id, nid); CREATE INDEX hydra_oauth2_code_challenge_id_idx ON hydra_oauth2_code (challenge_id, nid); diff --git a/persistence/sql/migrations/20220210000001000021_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000021_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000021_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000021_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000022_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000022_nid.cockroach.up.sql index 948d254793e..ded23eeba20 100644 --- a/persistence/sql/migrations/20220210000001000022_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000022_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_oauth2_flow diff --git a/persistence/sql/migrations/20220210000001000022_nid.down.sql b/persistence/sql/migrations/20220210000001000022_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000022_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000022_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000022_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000022_nid.mysql.up.sql index bb5b0a07d43..0f255dbbf43 100644 --- a/persistence/sql/migrations/20220210000001000022_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000022_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_jti_blacklist SET nid = (SELECT id FROM networks LIMIT 1); ALTER TABLE hydra_oauth2_jti_blacklist MODIFY `nid` char(36) NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000022_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000022_nid.postgres.up.sql index 2bf7660ab21..eac4a52afa1 100644 --- a/persistence/sql/migrations/20220210000001000022_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000022_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000022_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000022_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000022_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000022_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000023_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000023_nid.cockroach.up.sql index cd52e0ccb50..51b48dcd53c 100644 --- a/persistence/sql/migrations/20220210000001000023_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000023_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_flow SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000023_nid.down.sql b/persistence/sql/migrations/20220210000001000023_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000023_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000023_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000023_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000023_nid.mysql.up.sql index 19fd4455c09..c360a66aca5 100644 --- a/persistence/sql/migrations/20220210000001000023_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000023_nid.mysql.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_jti_blacklist_expiry ON hydra_oauth2_jti_blacklist; diff --git a/persistence/sql/migrations/20220210000001000023_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000023_nid.postgres.up.sql index cd52e0ccb50..51b48dcd53c 100644 --- a/persistence/sql/migrations/20220210000001000023_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000023_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_flow SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000023_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000023_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000023_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000023_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000024_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000024_nid.cockroach.up.sql index d0226f51930..e6b02615916 100644 --- a/persistence/sql/migrations/20220210000001000024_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000024_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_flow ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000024_nid.down.sql b/persistence/sql/migrations/20220210000001000024_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000024_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000024_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000024_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000024_nid.mysql.up.sql index 71b1fb90695..fbae99b7f49 100644 --- a/persistence/sql/migrations/20220210000001000024_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000024_nid.mysql.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_jti_blacklist_expiry ON hydra_oauth2_jti_blacklist (expires_at ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000024_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000024_nid.postgres.up.sql index d0226f51930..e6b02615916 100644 --- a/persistence/sql/migrations/20220210000001000024_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000024_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_flow ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000024_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000024_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000024_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000024_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000025_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000025_nid.cockroach.up.sql index ba8b3b9d5a4..f7a2094bc41 100644 --- a/persistence/sql/migrations/20220210000001000025_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000025_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_flow ADD CONSTRAINT hydra_oauth2_flow_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES hydra_client(id, nid) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000025_nid.down.sql b/persistence/sql/migrations/20220210000001000025_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000025_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000025_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000025_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000025_nid.mysql.up.sql index 7b95fd28914..9689c3fe660 100644 --- a/persistence/sql/migrations/20220210000001000025_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000025_nid.mysql.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_jti_blacklist DROP PRIMARY KEY, ADD PRIMARY KEY (signature, nid); diff --git a/persistence/sql/migrations/20220210000001000025_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000025_nid.postgres.up.sql index ba8b3b9d5a4..f7a2094bc41 100644 --- a/persistence/sql/migrations/20220210000001000025_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000025_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_flow ADD CONSTRAINT hydra_oauth2_flow_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES hydra_client(id, nid) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000025_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000025_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000025_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000025_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000026_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000026_nid.cockroach.up.sql index 36b177d5d22..52c170d9d98 100644 --- a/persistence/sql/migrations/20220210000001000026_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000026_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_flow_client_id_subject_idx; DROP INDEX hydra_oauth2_flow_cid_idx; diff --git a/persistence/sql/migrations/20220210000001000026_nid.down.sql b/persistence/sql/migrations/20220210000001000026_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000026_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000026_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000026_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000026_nid.mysql.up.sql index 3868ccad3ae..9b63a008f26 100644 --- a/persistence/sql/migrations/20220210000001000026_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000026_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000026_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000026_nid.postgres.up.sql index 36b177d5d22..52c170d9d98 100644 --- a/persistence/sql/migrations/20220210000001000026_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000026_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_flow_client_id_subject_idx; DROP INDEX hydra_oauth2_flow_cid_idx; diff --git a/persistence/sql/migrations/20220210000001000026_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000026_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000026_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000026_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000027_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000027_nid.cockroach.up.sql index 80967209331..9b9e4c23f9c 100644 --- a/persistence/sql/migrations/20220210000001000027_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000027_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_flow_client_id_subject_idx ON hydra_oauth2_flow (client_id ASC, nid ASC, subject ASC); CREATE INDEX hydra_oauth2_flow_cid_idx ON hydra_oauth2_flow (client_id ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000027_nid.down.sql b/persistence/sql/migrations/20220210000001000027_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000027_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000027_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000027_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000027_nid.mysql.up.sql index 51ef8609347..89fb40b3d8d 100644 --- a/persistence/sql/migrations/20220210000001000027_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000027_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_logout_request DROP FOREIGN KEY `hydra_oauth2_logout_request_client_id_fk`; ALTER TABLE hydra_oauth2_logout_request ADD CONSTRAINT `hydra_oauth2_logout_request_client_id_fk` FOREIGN KEY (`client_id`, `nid`) REFERENCES `hydra_client` (`id`, `nid`) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000027_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000027_nid.postgres.up.sql index 80967209331..9b9e4c23f9c 100644 --- a/persistence/sql/migrations/20220210000001000027_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000027_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_flow_client_id_subject_idx ON hydra_oauth2_flow (client_id ASC, nid ASC, subject ASC); CREATE INDEX hydra_oauth2_flow_cid_idx ON hydra_oauth2_flow (client_id ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000027_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000027_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000027_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000027_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000028_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000028_nid.cockroach.up.sql index 18bc8b7d316..209a7e25986 100644 --- a/persistence/sql/migrations/20220210000001000028_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000028_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_oauth2_jti_blacklist diff --git a/persistence/sql/migrations/20220210000001000028_nid.down.sql b/persistence/sql/migrations/20220210000001000028_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000028_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000028_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000028_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000028_nid.mysql.up.sql index ff54f5d85bd..93dbeabb657 100644 --- a/persistence/sql/migrations/20220210000001000028_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000028_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_logout_request SET nid = (SELECT id FROM networks LIMIT 1); ALTER TABLE hydra_oauth2_logout_request MODIFY `nid` char(36) NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000028_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000028_nid.postgres.up.sql index 744aaad9c45..90bfa807129 100644 --- a/persistence/sql/migrations/20220210000001000028_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000028_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000028_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000028_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000028_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000028_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000029_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000029_nid.cockroach.up.sql index f0ce0ddc47b..022c8c65583 100644 --- a/persistence/sql/migrations/20220210000001000029_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000029_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_jti_blacklist SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000029_nid.down.sql b/persistence/sql/migrations/20220210000001000029_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000029_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000029_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000029_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000029_nid.mysql.up.sql index a43efb2ae1d..f03cfaa8a7d 100644 --- a/persistence/sql/migrations/20220210000001000029_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000029_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_oauth2_obfuscated_authentication_session ALTER TABLE hydra_oauth2_obfuscated_authentication_session ADD COLUMN `nid` char(36); diff --git a/persistence/sql/migrations/20220210000001000029_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000029_nid.postgres.up.sql index 3a5b86faa13..f09b0a519e4 100644 --- a/persistence/sql/migrations/20220210000001000029_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000029_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE "hydra_oauth2_jti_blacklist" ADD COLUMN "nid" UUID; ALTER TABLE "hydra_oauth2_jti_blacklist" ADD CONSTRAINT "hydra_oauth2_jti_blacklist_nid_fk_idx" FOREIGN KEY ("nid") REFERENCES "networks" ("id") ON UPDATE RESTRICT ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000029_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000029_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000029_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000029_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000030_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000030_nid.cockroach.up.sql index 00593fd3dd8..3621d4350cb 100644 --- a/persistence/sql/migrations/20220210000001000030_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000030_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_jti_blacklist ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000030_nid.down.sql b/persistence/sql/migrations/20220210000001000030_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000030_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000030_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000030_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000030_nid.mysql.up.sql index 4e37695d992..076670e61db 100644 --- a/persistence/sql/migrations/20220210000001000030_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000030_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_obfuscated_authentication_session DROP FOREIGN KEY `hydra_oauth2_obfuscated_authentication_session_client_id_fk`; ALTER TABLE hydra_oauth2_obfuscated_authentication_session ADD CONSTRAINT `hydra_oauth2_obfuscated_authentication_session_client_id_fk` FOREIGN KEY (`client_id`, `nid`) REFERENCES `hydra_client` (`id`, `nid`) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000030_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000030_nid.postgres.up.sql index f0ce0ddc47b..022c8c65583 100644 --- a/persistence/sql/migrations/20220210000001000030_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000030_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_jti_blacklist SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000030_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000030_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000030_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000030_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000031_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000031_nid.cockroach.up.sql index 3586debf386..341cd7c6fa5 100644 --- a/persistence/sql/migrations/20220210000001000031_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000031_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_jti_blacklist_expires_at_idx; diff --git a/persistence/sql/migrations/20220210000001000031_nid.down.sql b/persistence/sql/migrations/20220210000001000031_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000031_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000031_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000031_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000031_nid.mysql.up.sql index 7701efec3e6..787ba18bcd6 100644 --- a/persistence/sql/migrations/20220210000001000031_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000031_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_obfuscated_authentication_session SET nid = (SELECT id FROM networks LIMIT 1); ALTER TABLE hydra_oauth2_obfuscated_authentication_session MODIFY `nid` char(36) NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000031_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000031_nid.postgres.up.sql index 00593fd3dd8..3621d4350cb 100644 --- a/persistence/sql/migrations/20220210000001000031_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000031_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_jti_blacklist ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000031_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000031_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000031_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000031_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000032_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000032_nid.cockroach.up.sql index 90a53e1b9e4..60e460bc859 100644 --- a/persistence/sql/migrations/20220210000001000032_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000032_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_jti_blacklist_expires_at_idx ON hydra_oauth2_jti_blacklist (expires_at ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000032_nid.down.sql b/persistence/sql/migrations/20220210000001000032_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000032_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000032_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000032_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000032_nid.mysql.up.sql index 71af924c06d..299cb4da876 100644 --- a/persistence/sql/migrations/20220210000001000032_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000032_nid.mysql.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_obfuscated_authentication_session DROP PRIMARY KEY, ADD PRIMARY KEY (subject, client_id, nid); diff --git a/persistence/sql/migrations/20220210000001000032_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000032_nid.postgres.up.sql index 2157fc49454..f2864c328c9 100644 --- a/persistence/sql/migrations/20220210000001000032_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000032_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_jti_blacklist_expiry; diff --git a/persistence/sql/migrations/20220210000001000032_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000032_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000032_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000032_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000033_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000033_nid.cockroach.up.sql index 43869bf26b6..ba15f2276e5 100644 --- a/persistence/sql/migrations/20220210000001000033_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000033_nid.cockroach.up.sql @@ -1,5 +1,6 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -ALTER TABLE hydra_oauth2_jti_blacklist DROP CONSTRAINT "primary"; -ALTER TABLE hydra_oauth2_jti_blacklist ADD CONSTRAINT hydra_oauth2_jti_blacklist_pkey PRIMARY KEY (signature ASC, nid ASC); +ALTER TABLE hydra_oauth2_jti_blacklist + DROP CONSTRAINT "primary", + ADD CONSTRAINT hydra_oauth2_jti_blacklist_pkey PRIMARY KEY (signature ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000033_nid.down.sql b/persistence/sql/migrations/20220210000001000033_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000033_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000033_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000033_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000033_nid.mysql.up.sql index 70453e41900..87c1cdeb6f7 100644 --- a/persistence/sql/migrations/20220210000001000033_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000033_nid.mysql.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE UNIQUE INDEX hydra_oauth2_obfuscated_authentication_session_so_nid_idx ON hydra_oauth2_obfuscated_authentication_session (client_id ASC, subject_obfuscated ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000033_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000033_nid.postgres.up.sql index 21d01f49c1a..4fe95d9c9b3 100644 --- a/persistence/sql/migrations/20220210000001000033_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000033_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_jti_blacklist_expires_at_idx ON hydra_oauth2_jti_blacklist USING btree (expires_at ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000033_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000033_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000033_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000033_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000034_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000034_nid.cockroach.up.sql index e3426883e21..3dcdfe59704 100644 --- a/persistence/sql/migrations/20220210000001000034_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000034_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_oauth2_logout_request diff --git a/persistence/sql/migrations/20220210000001000034_nid.down.sql b/persistence/sql/migrations/20220210000001000034_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000034_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000034_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000034_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000034_nid.mysql.up.sql index 6820cba4df4..431d5ccfde5 100644 --- a/persistence/sql/migrations/20220210000001000034_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000034_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000034_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000034_nid.postgres.up.sql index b254d4bbc30..9892cd1ba25 100644 --- a/persistence/sql/migrations/20220210000001000034_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000034_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_jti_blacklist DROP CONSTRAINT "hydra_oauth2_jti_blacklist_pkey"; ALTER TABLE hydra_oauth2_jti_blacklist ADD PRIMARY KEY (signature, nid); diff --git a/persistence/sql/migrations/20220210000001000034_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000034_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000034_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000034_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000035_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000035_nid.cockroach.up.sql index 28d10482c59..fed54baf47f 100644 --- a/persistence/sql/migrations/20220210000001000035_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000035_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_logout_request SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000035_nid.down.sql b/persistence/sql/migrations/20220210000001000035_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000035_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000035_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000035_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000035_nid.mysql.up.sql index 6a10fb85da9..60b18c96bbf 100644 --- a/persistence/sql/migrations/20220210000001000035_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000035_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_oidc SET nid = (SELECT id FROM networks LIMIT 1); ALTER TABLE hydra_oauth2_oidc MODIFY `nid` char(36) NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000035_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000035_nid.postgres.up.sql index ac0665e9488..d5c38192251 100644 --- a/persistence/sql/migrations/20220210000001000035_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000035_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000035_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000035_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000035_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000035_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000036_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000036_nid.cockroach.up.sql index 4af05674780..daaee9d5aac 100644 --- a/persistence/sql/migrations/20220210000001000036_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000036_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_logout_request ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000036_nid.down.sql b/persistence/sql/migrations/20220210000001000036_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000036_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000036_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000036_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000036_nid.mysql.up.sql index 13557601e3c..1c083729e08 100644 --- a/persistence/sql/migrations/20220210000001000036_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000036_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_oidc DROP FOREIGN KEY `hydra_oauth2_oidc_client_id_fk`; ALTER TABLE hydra_oauth2_oidc ADD CONSTRAINT `hydra_oauth2_oidc_client_id_fk` FOREIGN KEY (`client_id`, `nid`) REFERENCES `hydra_client` (`id`, `nid`) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000036_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000036_nid.postgres.up.sql index 28d10482c59..fed54baf47f 100644 --- a/persistence/sql/migrations/20220210000001000036_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000036_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_logout_request SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000036_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000036_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000036_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000036_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000037_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000037_nid.cockroach.up.sql index 9a5906eb79b..49b63579dbe 100644 --- a/persistence/sql/migrations/20220210000001000037_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000037_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_logout_request ADD CONSTRAINT hydra_oauth2_logout_request_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES hydra_client(id, nid) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000037_nid.down.sql b/persistence/sql/migrations/20220210000001000037_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000037_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000037_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000037_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000037_nid.mysql.up.sql index 0a1e01b7839..5294f9673df 100644 --- a/persistence/sql/migrations/20220210000001000037_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000037_nid.mysql.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_oidc_request_id_idx ON hydra_oauth2_oidc; diff --git a/persistence/sql/migrations/20220210000001000037_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000037_nid.postgres.up.sql index 4af05674780..daaee9d5aac 100644 --- a/persistence/sql/migrations/20220210000001000037_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000037_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_logout_request ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000037_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000037_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000037_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000037_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000038_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000038_nid.cockroach.up.sql index 825d3008764..1d85c7c58c1 100644 --- a/persistence/sql/migrations/20220210000001000038_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000038_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_logout_request_client_id_idx; diff --git a/persistence/sql/migrations/20220210000001000038_nid.down.sql b/persistence/sql/migrations/20220210000001000038_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000038_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000038_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000038_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000038_nid.mysql.up.sql index 8a27d062f54..d9c1da793ae 100644 --- a/persistence/sql/migrations/20220210000001000038_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000038_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_oidc_request_id_idx ON hydra_oauth2_oidc (request_id ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000038_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000038_nid.postgres.up.sql index 9a5906eb79b..49b63579dbe 100644 --- a/persistence/sql/migrations/20220210000001000038_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000038_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_logout_request ADD CONSTRAINT hydra_oauth2_logout_request_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES hydra_client(id, nid) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000038_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000038_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000038_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000038_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000039_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000039_nid.cockroach.up.sql index c998b026318..942c3d2e130 100644 --- a/persistence/sql/migrations/20220210000001000039_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000039_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_logout_request_client_id_idx ON hydra_oauth2_logout_request (client_id ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000039_nid.down.sql b/persistence/sql/migrations/20220210000001000039_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000039_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000039_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000039_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000039_nid.mysql.up.sql index a940e4ea7a6..3cae208ecd7 100644 --- a/persistence/sql/migrations/20220210000001000039_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000039_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_oauth2_pkce ALTER TABLE hydra_oauth2_pkce ADD COLUMN `nid` char(36); diff --git a/persistence/sql/migrations/20220210000001000039_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000039_nid.postgres.up.sql index 825d3008764..1d85c7c58c1 100644 --- a/persistence/sql/migrations/20220210000001000039_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000039_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_logout_request_client_id_idx; diff --git a/persistence/sql/migrations/20220210000001000039_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000039_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000039_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000039_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000040_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000040_nid.cockroach.up.sql index f95091fb208..f671439fbc2 100644 --- a/persistence/sql/migrations/20220210000001000040_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000040_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_oauth2_obfuscated_authentication_session diff --git a/persistence/sql/migrations/20220210000001000040_nid.down.sql b/persistence/sql/migrations/20220210000001000040_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000040_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000040_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000040_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000040_nid.mysql.up.sql index 2f3205b8c2b..2020cc2e3fe 100644 --- a/persistence/sql/migrations/20220210000001000040_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000040_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_pkce SET nid = (SELECT id FROM networks LIMIT 1); ALTER TABLE hydra_oauth2_pkce MODIFY `nid` char(36) NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000040_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000040_nid.postgres.up.sql index c998b026318..942c3d2e130 100644 --- a/persistence/sql/migrations/20220210000001000040_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000040_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_logout_request_client_id_idx ON hydra_oauth2_logout_request (client_id ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000040_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000040_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000040_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000040_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000041_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000041_nid.cockroach.up.sql index 317766933bd..736b83f8dbe 100644 --- a/persistence/sql/migrations/20220210000001000041_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000041_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_obfuscated_authentication_session SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000041_nid.down.sql b/persistence/sql/migrations/20220210000001000041_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000041_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000041_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000041_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000041_nid.mysql.up.sql index 08cb01eb059..247b6428444 100644 --- a/persistence/sql/migrations/20220210000001000041_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000041_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_pkce DROP FOREIGN KEY `hydra_oauth2_pkce_client_id_fk`; ALTER TABLE hydra_oauth2_pkce ADD CONSTRAINT `hydra_oauth2_pkce_client_id_fk` FOREIGN KEY (`client_id`, `nid`) REFERENCES `hydra_client` (`id`, `nid`) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000041_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000041_nid.postgres.up.sql index da89ed761f2..d5d4c169bcf 100644 --- a/persistence/sql/migrations/20220210000001000041_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000041_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000041_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000041_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000041_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000041_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000042_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000042_nid.cockroach.up.sql index 5dccf012d44..9ec9cecb022 100644 --- a/persistence/sql/migrations/20220210000001000042_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000042_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_obfuscated_authentication_session ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000042_nid.down.sql b/persistence/sql/migrations/20220210000001000042_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000042_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000042_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000042_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000042_nid.mysql.up.sql index 154392f0f73..a4bf01ac18b 100644 --- a/persistence/sql/migrations/20220210000001000042_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000042_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- DROP INDEX hydra_oauth2_pkce_challenge_id_idx ON hydra_oauth2_pkce; DROP INDEX hydra_oauth2_pkce_request_id_idx ON hydra_oauth2_pkce; diff --git a/persistence/sql/migrations/20220210000001000042_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000042_nid.postgres.up.sql index 317766933bd..736b83f8dbe 100644 --- a/persistence/sql/migrations/20220210000001000042_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000042_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_obfuscated_authentication_session SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000042_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000042_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000042_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000042_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000043_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000043_nid.cockroach.up.sql index 7ef292d487a..66d51c5051e 100644 --- a/persistence/sql/migrations/20220210000001000043_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000043_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_obfuscated_authentication_session ADD CONSTRAINT hydra_oauth2_obfuscated_authentication_session_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES hydra_client(id, nid) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000043_nid.down.sql b/persistence/sql/migrations/20220210000001000043_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000043_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000043_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000043_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000043_nid.mysql.up.sql index 156b934d297..6a7bf185346 100644 --- a/persistence/sql/migrations/20220210000001000043_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000043_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- CREATE INDEX hydra_oauth2_pkce_challenge_id_idx ON hydra_oauth2_pkce (challenge_id ASC); CREATE INDEX hydra_oauth2_pkce_request_id_idx ON hydra_oauth2_pkce (request_id ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000043_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000043_nid.postgres.up.sql index 5dccf012d44..9ec9cecb022 100644 --- a/persistence/sql/migrations/20220210000001000043_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000043_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_obfuscated_authentication_session ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000043_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000043_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000043_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000043_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000044_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000044_nid.cockroach.up.sql index 5af9f652b7d..4eb66d5099c 100644 --- a/persistence/sql/migrations/20220210000001000044_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000044_nid.cockroach.up.sql @@ -1,5 +1,6 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -ALTER TABLE hydra_oauth2_obfuscated_authentication_session DROP CONSTRAINT "primary"; -ALTER TABLE hydra_oauth2_obfuscated_authentication_session ADD CONSTRAINT "hydra_oauth2_obfuscated_authentication_session_pkey" PRIMARY KEY (subject ASC, client_id ASC, nid ASC); +ALTER TABLE hydra_oauth2_obfuscated_authentication_session + DROP CONSTRAINT "primary", + ADD CONSTRAINT "hydra_oauth2_obfuscated_authentication_session_pkey" PRIMARY KEY (subject ASC, client_id ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000044_nid.down.sql b/persistence/sql/migrations/20220210000001000044_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000044_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000044_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000044_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000044_nid.mysql.up.sql index c8d558d0dfe..b87a408e95d 100644 --- a/persistence/sql/migrations/20220210000001000044_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000044_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_oauth2_refresh ALTER TABLE hydra_oauth2_refresh ADD COLUMN `nid` char(36); diff --git a/persistence/sql/migrations/20220210000001000044_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000044_nid.postgres.up.sql index 7ef292d487a..66d51c5051e 100644 --- a/persistence/sql/migrations/20220210000001000044_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000044_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_obfuscated_authentication_session ADD CONSTRAINT hydra_oauth2_obfuscated_authentication_session_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES hydra_client(id, nid) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000044_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000044_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000044_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000044_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000045_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000045_nid.cockroach.up.sql index 4695bd4ace6..579e1ef24d9 100644 --- a/persistence/sql/migrations/20220210000001000045_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000045_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_obfuscated_authentication_session_client_id_subject_obfuscated_idx; diff --git a/persistence/sql/migrations/20220210000001000045_nid.down.sql b/persistence/sql/migrations/20220210000001000045_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000045_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000045_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000045_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000045_nid.mysql.up.sql index 8d5b22d08bd..d92a1202baf 100644 --- a/persistence/sql/migrations/20220210000001000045_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000045_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_refresh SET nid = (SELECT id FROM networks LIMIT 1); ALTER TABLE hydra_oauth2_refresh MODIFY `nid` char(36) NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000045_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000045_nid.postgres.up.sql index 13328aba126..b54897b0f3b 100644 --- a/persistence/sql/migrations/20220210000001000045_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000045_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_obfuscated_authentication_session DROP CONSTRAINT "hydra_oauth2_obfuscated_authentication_session_pkey"; ALTER TABLE hydra_oauth2_obfuscated_authentication_session ADD PRIMARY KEY (subject, client_id, nid); diff --git a/persistence/sql/migrations/20220210000001000045_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000045_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000045_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000045_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000046_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000046_nid.cockroach.up.sql index f5c48d0326a..e19f3da009f 100644 --- a/persistence/sql/migrations/20220210000001000046_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000046_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE UNIQUE INDEX hydra_oauth2_obfuscated_authentication_session_client_id_subject_obfuscated_idx ON hydra_oauth2_obfuscated_authentication_session (client_id ASC, subject_obfuscated ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000046_nid.down.sql b/persistence/sql/migrations/20220210000001000046_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000046_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000046_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000046_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000046_nid.mysql.up.sql index 7d46fc5c4f5..412b13ae926 100644 --- a/persistence/sql/migrations/20220210000001000046_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000046_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_refresh DROP FOREIGN KEY `hydra_oauth2_refresh_client_id_fk`; ALTER TABLE hydra_oauth2_refresh ADD CONSTRAINT `hydra_oauth2_refresh_client_id_fk` FOREIGN KEY (`client_id`, `nid`) REFERENCES `hydra_client` (`id`, `nid`) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000046_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000046_nid.postgres.up.sql index 2e4e7f8269e..1faad046fa1 100644 --- a/persistence/sql/migrations/20220210000001000046_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000046_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_obfuscated_authentication_session_so_idx; diff --git a/persistence/sql/migrations/20220210000001000046_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000046_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000046_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000046_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000047_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000047_nid.cockroach.up.sql index 8f6c32a4846..7ad7431016f 100644 --- a/persistence/sql/migrations/20220210000001000047_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000047_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_oauth2_oidc diff --git a/persistence/sql/migrations/20220210000001000047_nid.down.sql b/persistence/sql/migrations/20220210000001000047_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000047_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000047_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000047_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000047_nid.mysql.up.sql index 171b5fecfdf..baf55b6a8db 100644 --- a/persistence/sql/migrations/20220210000001000047_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000047_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- DROP INDEX hydra_oauth2_refresh_challenge_id_idx ON hydra_oauth2_refresh; DROP INDEX hydra_oauth2_refresh_client_id_subject_idx ON hydra_oauth2_refresh; diff --git a/persistence/sql/migrations/20220210000001000047_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000047_nid.postgres.up.sql index b8c0ce10c35..f69e425be7d 100644 --- a/persistence/sql/migrations/20220210000001000047_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000047_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE UNIQUE INDEX hydra_oauth2_obfuscated_authentication_session_so_idx ON hydra_oauth2_obfuscated_authentication_session USING btree (client_id ASC, subject_obfuscated ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000047_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000047_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000047_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000047_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000048_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000048_nid.cockroach.up.sql index 2f05edb2a16..360ba568ce9 100644 --- a/persistence/sql/migrations/20220210000001000048_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000048_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_oidc SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000048_nid.down.sql b/persistence/sql/migrations/20220210000001000048_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000048_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000048_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000048_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000048_nid.mysql.up.sql index 8d1e5712ff4..fddf78b0aa9 100644 --- a/persistence/sql/migrations/20220210000001000048_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000048_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- CREATE INDEX hydra_oauth2_refresh_challenge_id_idx ON hydra_oauth2_refresh (challenge_id ASC); CREATE INDEX hydra_oauth2_refresh_client_id_subject_idx ON hydra_oauth2_refresh (client_id ASC, subject ASC); diff --git a/persistence/sql/migrations/20220210000001000048_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000048_nid.postgres.up.sql index e1697fd0d46..1bfb3cd43b8 100644 --- a/persistence/sql/migrations/20220210000001000048_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000048_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000048_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000048_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000048_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000048_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000049_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000049_nid.cockroach.up.sql index 7eb0bce737e..3b4e95a4c5a 100644 --- a/persistence/sql/migrations/20220210000001000049_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000049_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_oidc ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000049_nid.down.sql b/persistence/sql/migrations/20220210000001000049_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000049_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000049_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000049_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000049_nid.mysql.up.sql index 2711d27377e..2e98cbf473f 100644 --- a/persistence/sql/migrations/20220210000001000049_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000049_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_jwk SET nid = (SELECT id FROM networks LIMIT 1); ALTER TABLE hydra_jwk MODIFY `nid` char(36) NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000049_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000049_nid.postgres.up.sql index 2f05edb2a16..360ba568ce9 100644 --- a/persistence/sql/migrations/20220210000001000049_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000049_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_oidc SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000049_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000049_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000049_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000049_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000050_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000050_nid.cockroach.up.sql index 4b70deb5199..ea99c6237e4 100644 --- a/persistence/sql/migrations/20220210000001000050_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000050_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_oidc ADD CONSTRAINT hydra_oauth2_oidc_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES hydra_client(id, nid) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000050_nid.down.sql b/persistence/sql/migrations/20220210000001000050_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000050_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000050_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000050_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000050_nid.mysql.up.sql index 9646df676b8..3d61d3a0a87 100644 --- a/persistence/sql/migrations/20220210000001000050_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000050_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE UNIQUE INDEX hydra_jwk_sid_kid_nid_key ON hydra_jwk (sid ASC, kid ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000050_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000050_nid.postgres.up.sql index 7eb0bce737e..3b4e95a4c5a 100644 --- a/persistence/sql/migrations/20220210000001000050_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000050_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_oidc ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000050_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000050_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000050_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000050_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000051_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000051_nid.cockroach.up.sql index 8998e8b4856..f9a21721558 100644 --- a/persistence/sql/migrations/20220210000001000051_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000051_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_oidc_client_id_idx; DROP INDEX hydra_oauth2_oidc_challenge_id_idx; diff --git a/persistence/sql/migrations/20220210000001000051_nid.down.sql b/persistence/sql/migrations/20220210000001000051_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000051_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000051_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000051_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000051_nid.mysql.up.sql index 67d0ee1e17c..fd33d972468 100644 --- a/persistence/sql/migrations/20220210000001000051_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000051_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_oauth2_trusted_jwt_bearer_issuer ALTER TABLE hydra_oauth2_trusted_jwt_bearer_issuer ADD COLUMN `nid` char(36); diff --git a/persistence/sql/migrations/20220210000001000051_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000051_nid.postgres.up.sql index 4b70deb5199..ea99c6237e4 100644 --- a/persistence/sql/migrations/20220210000001000051_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000051_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_oidc ADD CONSTRAINT hydra_oauth2_oidc_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES hydra_client(id, nid) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000051_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000051_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000051_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000051_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000052_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000052_nid.cockroach.up.sql index b3655d09e17..23037e51858 100644 --- a/persistence/sql/migrations/20220210000001000052_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000052_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_oidc_client_id_idx ON hydra_oauth2_oidc (client_id ASC, nid ASC); CREATE INDEX hydra_oauth2_oidc_challenge_id_idx ON hydra_oauth2_oidc (challenge_id ASC); diff --git a/persistence/sql/migrations/20220210000001000052_nid.down.sql b/persistence/sql/migrations/20220210000001000052_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000052_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000052_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000052_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000052_nid.mysql.up.sql index 21c9e017699..9c6b4af3ad6 100644 --- a/persistence/sql/migrations/20220210000001000052_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000052_nid.mysql.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_trusted_jwt_bearer_issuer ADD CONSTRAINT hydra_oauth2_trusted_jwt_bearer_issuer_nid_fk_idx FOREIGN KEY (`nid`) REFERENCES `networks` (`id`) ON UPDATE RESTRICT ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000052_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000052_nid.postgres.up.sql index 8998e8b4856..f9a21721558 100644 --- a/persistence/sql/migrations/20220210000001000052_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000052_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_oidc_client_id_idx; DROP INDEX hydra_oauth2_oidc_challenge_id_idx; diff --git a/persistence/sql/migrations/20220210000001000052_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000052_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000052_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000052_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000053_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000053_nid.cockroach.up.sql index 0a1ad2fb422..e7aba0a006b 100644 --- a/persistence/sql/migrations/20220210000001000053_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000053_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_oauth2_pkce diff --git a/persistence/sql/migrations/20220210000001000053_nid.down.sql b/persistence/sql/migrations/20220210000001000053_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000053_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000053_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000053_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000053_nid.mysql.up.sql index 615e9441319..56c76b258bf 100644 --- a/persistence/sql/migrations/20220210000001000053_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000053_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_trusted_jwt_bearer_issuer SET nid = (SELECT id FROM networks LIMIT 1); ALTER TABLE hydra_oauth2_trusted_jwt_bearer_issuer MODIFY `nid` char(36) NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000053_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000053_nid.postgres.up.sql index ac4b6c1a85c..17b7b2dd621 100644 --- a/persistence/sql/migrations/20220210000001000053_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000053_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_oidc_client_id_idx ON hydra_oauth2_oidc (client_id ASC, nid ASC); CREATE INDEX hydra_oauth2_oidc_challenge_id_idx ON hydra_oauth2_oidc (challenge_id ASC); diff --git a/persistence/sql/migrations/20220210000001000053_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000053_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000053_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000053_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000054_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000054_nid.cockroach.up.sql index 6901819bbd5..48ecde4e0bc 100644 --- a/persistence/sql/migrations/20220210000001000054_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000054_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_pkce SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000054_nid.down.sql b/persistence/sql/migrations/20220210000001000054_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000054_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000054_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000054_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000054_nid.mysql.up.sql index f764bfe76f8..a22dc35922b 100644 --- a/persistence/sql/migrations/20220210000001000054_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000054_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_trusted_jwt_bearer_issuer DROP FOREIGN KEY `hydra_oauth2_trusted_jwt_bearer_issuer_ibfk_1`; ALTER TABLE hydra_oauth2_trusted_jwt_bearer_issuer ADD CONSTRAINT `hydra_oauth2_trusted_jwt_bearer_issuer_ibfk_1` FOREIGN KEY (`key_set`, `key_id`, `nid`) REFERENCES `hydra_jwk` (`sid`, `kid`, `nid`) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000054_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000054_nid.postgres.up.sql index 8cad7411df8..fbd0d29b70a 100644 --- a/persistence/sql/migrations/20220210000001000054_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000054_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_oauth2_pkce ALTER TABLE hydra_oauth2_pkce ADD COLUMN "nid" UUID; diff --git a/persistence/sql/migrations/20220210000001000054_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000054_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000054_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000054_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000055_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000055_nid.cockroach.up.sql index 3e0e35674d8..eeac6745b5a 100644 --- a/persistence/sql/migrations/20220210000001000055_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000055_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_pkce ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000055_nid.down.sql b/persistence/sql/migrations/20220210000001000055_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000055_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000055_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000055_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000055_nid.mysql.up.sql index dd0b341015c..f921277f0b0 100644 --- a/persistence/sql/migrations/20220210000001000055_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000055_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX issuer ON hydra_oauth2_trusted_jwt_bearer_issuer; DROP INDEX hydra_oauth2_trusted_jwt_bearer_issuer_expires_at_idx ON hydra_oauth2_trusted_jwt_bearer_issuer; diff --git a/persistence/sql/migrations/20220210000001000055_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000055_nid.postgres.up.sql index 6901819bbd5..48ecde4e0bc 100644 --- a/persistence/sql/migrations/20220210000001000055_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000055_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_pkce SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000055_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000055_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000055_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000055_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000056_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000056_nid.cockroach.up.sql index fda2af27cbc..425e5226eb3 100644 --- a/persistence/sql/migrations/20220210000001000056_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000056_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_pkce ADD CONSTRAINT hydra_oauth2_pkce_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES hydra_client(id, nid) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000056_nid.down.sql b/persistence/sql/migrations/20220210000001000056_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000056_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000056_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000056_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000056_nid.mysql.up.sql index a6fa55e37c4..47c5e28aa6e 100644 --- a/persistence/sql/migrations/20220210000001000056_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000056_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE UNIQUE INDEX issuer ON hydra_oauth2_trusted_jwt_bearer_issuer (issuer, subject, key_id, nid); CREATE INDEX hydra_oauth2_trusted_jwt_bearer_issuer_expires_at_idx ON hydra_oauth2_trusted_jwt_bearer_issuer (expires_at ASC); diff --git a/persistence/sql/migrations/20220210000001000056_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000056_nid.postgres.up.sql index 3e0e35674d8..eeac6745b5a 100644 --- a/persistence/sql/migrations/20220210000001000056_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000056_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_pkce ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000056_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000056_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000056_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000056_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000057_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000057_nid.cockroach.up.sql index e594560b437..a3642723a1f 100644 --- a/persistence/sql/migrations/20220210000001000057_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000057_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_pkce_client_id_idx; DROP INDEX hydra_oauth2_pkce_challenge_id_idx; diff --git a/persistence/sql/migrations/20220210000001000057_nid.down.sql b/persistence/sql/migrations/20220210000001000057_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000057_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000057_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000057_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000057_nid.mysql.up.sql index d225573590c..289c8fcbf32 100644 --- a/persistence/sql/migrations/20220210000001000057_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000057_nid.mysql.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_jwk_idx_id_uq ON hydra_jwk; DROP INDEX hydra_client_idx_id_uq ON hydra_client; diff --git a/persistence/sql/migrations/20220210000001000057_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000057_nid.postgres.up.sql index fda2af27cbc..425e5226eb3 100644 --- a/persistence/sql/migrations/20220210000001000057_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000057_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_pkce ADD CONSTRAINT hydra_oauth2_pkce_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES hydra_client(id, nid) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000057_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000057_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000057_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000057_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000058_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000058_nid.cockroach.up.sql index 525a2beff16..8274311e95b 100644 --- a/persistence/sql/migrations/20220210000001000058_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000058_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_pkce_client_id_idx ON hydra_oauth2_pkce (client_id ASC, nid ASC); CREATE INDEX hydra_oauth2_pkce_challenge_id_idx ON hydra_oauth2_pkce (challenge_id ASC); diff --git a/persistence/sql/migrations/20220210000001000058_nid.down.sql b/persistence/sql/migrations/20220210000001000058_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000058_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000058_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000058_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000058_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000058_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000058_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000058_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000058_nid.postgres.up.sql index e594560b437..a3642723a1f 100644 --- a/persistence/sql/migrations/20220210000001000058_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000058_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_pkce_client_id_idx; DROP INDEX hydra_oauth2_pkce_challenge_id_idx; diff --git a/persistence/sql/migrations/20220210000001000058_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000058_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000058_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000058_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000059_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000059_nid.cockroach.up.sql index 94e21be87b7..18becfd7434 100644 --- a/persistence/sql/migrations/20220210000001000059_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000059_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_oauth2_refresh diff --git a/persistence/sql/migrations/20220210000001000059_nid.down.sql b/persistence/sql/migrations/20220210000001000059_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000059_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000059_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000059_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000059_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000059_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000059_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000059_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000059_nid.postgres.up.sql index f1c6729d4b2..992308aa932 100644 --- a/persistence/sql/migrations/20220210000001000059_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000059_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_pkce_client_id_idx ON hydra_oauth2_pkce (client_id ASC, nid ASC); CREATE INDEX hydra_oauth2_pkce_challenge_id_idx ON hydra_oauth2_pkce (challenge_id ASC); diff --git a/persistence/sql/migrations/20220210000001000059_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000059_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000059_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000059_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000060_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000060_nid.cockroach.up.sql index c446303243d..09909cb3675 100644 --- a/persistence/sql/migrations/20220210000001000060_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000060_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_refresh SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000060_nid.down.sql b/persistence/sql/migrations/20220210000001000060_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000060_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000060_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000060_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000060_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000060_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000060_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000060_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000060_nid.postgres.up.sql index 879a3b0b556..ffb946bb752 100644 --- a/persistence/sql/migrations/20220210000001000060_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000060_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_oauth2_refresh ALTER TABLE hydra_oauth2_refresh ADD COLUMN "nid" UUID; diff --git a/persistence/sql/migrations/20220210000001000060_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000060_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000060_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000060_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000061_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000061_nid.cockroach.up.sql index 54405b835f5..4c71498ad66 100644 --- a/persistence/sql/migrations/20220210000001000061_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000061_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_refresh ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000061_nid.down.sql b/persistence/sql/migrations/20220210000001000061_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000061_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000061_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000061_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000061_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000061_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000061_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000061_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000061_nid.postgres.up.sql index c446303243d..09909cb3675 100644 --- a/persistence/sql/migrations/20220210000001000061_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000061_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_refresh SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000061_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000061_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000061_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000061_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000062_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000062_nid.cockroach.up.sql index 259d82e6a94..a37f2b78780 100644 --- a/persistence/sql/migrations/20220210000001000062_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000062_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_refresh ADD CONSTRAINT hydra_oauth2_refresh_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES hydra_client(id, nid) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000062_nid.down.sql b/persistence/sql/migrations/20220210000001000062_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000062_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000062_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000062_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000062_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000062_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000062_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000062_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000062_nid.postgres.up.sql index 54405b835f5..4c71498ad66 100644 --- a/persistence/sql/migrations/20220210000001000062_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000062_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_refresh ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000062_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000062_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000062_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000062_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000063_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000063_nid.cockroach.up.sql index b71a4e38889..b23b6d12cf4 100644 --- a/persistence/sql/migrations/20220210000001000063_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000063_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_refresh_client_id_idx; DROP INDEX hydra_oauth2_refresh_challenge_id_idx; diff --git a/persistence/sql/migrations/20220210000001000063_nid.down.sql b/persistence/sql/migrations/20220210000001000063_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000063_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000063_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000063_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000063_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000063_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000063_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000063_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000063_nid.postgres.up.sql index 259d82e6a94..a37f2b78780 100644 --- a/persistence/sql/migrations/20220210000001000063_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000063_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_refresh ADD CONSTRAINT hydra_oauth2_refresh_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES hydra_client(id, nid) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000063_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000063_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000063_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000063_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000064_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000064_nid.cockroach.up.sql index afc90b3dc5e..d06f5c4eabe 100644 --- a/persistence/sql/migrations/20220210000001000064_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000064_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_refresh_client_id_idx ON hydra_oauth2_refresh (client_id ASC, nid ASC); CREATE INDEX hydra_oauth2_refresh_challenge_id_idx ON hydra_oauth2_refresh (challenge_id ASC); diff --git a/persistence/sql/migrations/20220210000001000064_nid.down.sql b/persistence/sql/migrations/20220210000001000064_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000064_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000064_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000064_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000064_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000064_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000064_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000064_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000064_nid.postgres.up.sql index b71a4e38889..b23b6d12cf4 100644 --- a/persistence/sql/migrations/20220210000001000064_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000064_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_refresh_client_id_idx; DROP INDEX hydra_oauth2_refresh_challenge_id_idx; diff --git a/persistence/sql/migrations/20220210000001000064_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000064_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000064_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000064_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000065_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000065_nid.cockroach.up.sql index ceda1453758..25336f1b468 100644 --- a/persistence/sql/migrations/20220210000001000065_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000065_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_jwk diff --git a/persistence/sql/migrations/20220210000001000065_nid.down.sql b/persistence/sql/migrations/20220210000001000065_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000065_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000065_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000065_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000065_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000065_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000065_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000065_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000065_nid.postgres.up.sql index afc90b3dc5e..d06f5c4eabe 100644 --- a/persistence/sql/migrations/20220210000001000065_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000065_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_refresh_client_id_idx ON hydra_oauth2_refresh (client_id ASC, nid ASC); CREATE INDEX hydra_oauth2_refresh_challenge_id_idx ON hydra_oauth2_refresh (challenge_id ASC); diff --git a/persistence/sql/migrations/20220210000001000065_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000065_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000065_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000065_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000066_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000066_nid.cockroach.up.sql index fa59ccba271..77c49909402 100644 --- a/persistence/sql/migrations/20220210000001000066_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000066_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_jwk SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000066_nid.down.sql b/persistence/sql/migrations/20220210000001000066_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000066_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000066_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000066_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000066_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000066_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000066_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000066_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000066_nid.postgres.up.sql index 8985669b2b3..336dd957610 100644 --- a/persistence/sql/migrations/20220210000001000066_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000066_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000066_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000066_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000066_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000066_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000067_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000067_nid.cockroach.up.sql index b3b05c8c977..593613790bd 100644 --- a/persistence/sql/migrations/20220210000001000067_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000067_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_jwk ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000067_nid.down.sql b/persistence/sql/migrations/20220210000001000067_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000067_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000067_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000067_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000067_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000067_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000067_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000067_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000067_nid.postgres.up.sql index fa59ccba271..77c49909402 100644 --- a/persistence/sql/migrations/20220210000001000067_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000067_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_jwk SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000067_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000067_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000067_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000067_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000068_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000068_nid.cockroach.up.sql index 55fbb156e70..4660531f180 100644 --- a/persistence/sql/migrations/20220210000001000068_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000068_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_jwk_sid_kid_key CASCADE; diff --git a/persistence/sql/migrations/20220210000001000068_nid.down.sql b/persistence/sql/migrations/20220210000001000068_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000068_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000068_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000068_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000068_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000068_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000068_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000068_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000068_nid.postgres.up.sql index b3b05c8c977..593613790bd 100644 --- a/persistence/sql/migrations/20220210000001000068_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000068_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_jwk ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000068_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000068_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000068_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000068_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000069_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000069_nid.cockroach.up.sql index b3c2418dc4b..18258e67752 100644 --- a/persistence/sql/migrations/20220210000001000069_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000069_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE UNIQUE INDEX hydra_jwk_sid_kid_nid_key ON hydra_jwk (sid ASC, kid ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000069_nid.down.sql b/persistence/sql/migrations/20220210000001000069_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000069_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000069_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000069_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000069_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000069_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000069_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000069_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000069_nid.postgres.up.sql index b3c2418dc4b..18258e67752 100644 --- a/persistence/sql/migrations/20220210000001000069_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000069_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE UNIQUE INDEX hydra_jwk_sid_kid_nid_key ON hydra_jwk (sid ASC, kid ASC, nid ASC); diff --git a/persistence/sql/migrations/20220210000001000069_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000069_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000069_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000069_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000070_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000070_nid.cockroach.up.sql index cdb47ac6efe..34158cb537c 100644 --- a/persistence/sql/migrations/20220210000001000070_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000070_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ -- hydra_oauth2_trusted_jwt_bearer_issuer diff --git a/persistence/sql/migrations/20220210000001000070_nid.down.sql b/persistence/sql/migrations/20220210000001000070_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000070_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000070_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000070_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000070_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000070_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000070_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000070_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000070_nid.postgres.up.sql index 854b37b1457..30f706aeeb5 100644 --- a/persistence/sql/migrations/20220210000001000070_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000070_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000070_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000070_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000070_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000070_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000071_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000071_nid.cockroach.up.sql index 0480d74e286..4cabdd8305c 100644 --- a/persistence/sql/migrations/20220210000001000071_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000071_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_trusted_jwt_bearer_issuer ADD CONSTRAINT hydra_oauth2_trusted_jwt_bearer_issuer_nid_fk_idx FOREIGN KEY ("nid") REFERENCES "networks" ("id") ON UPDATE RESTRICT ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000071_nid.down.sql b/persistence/sql/migrations/20220210000001000071_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000071_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000071_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000071_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000071_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000071_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000071_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000071_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000071_nid.postgres.up.sql index 0480d74e286..4cabdd8305c 100644 --- a/persistence/sql/migrations/20220210000001000071_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000071_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_trusted_jwt_bearer_issuer ADD CONSTRAINT hydra_oauth2_trusted_jwt_bearer_issuer_nid_fk_idx FOREIGN KEY ("nid") REFERENCES "networks" ("id") ON UPDATE RESTRICT ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000071_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000071_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000071_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000071_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000072_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000072_nid.cockroach.up.sql index 0f09c67df20..48289041728 100644 --- a/persistence/sql/migrations/20220210000001000072_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000072_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_trusted_jwt_bearer_issuer ADD CONSTRAINT fk_key_set_ref_hydra_jwk FOREIGN KEY (key_set, key_id, nid) REFERENCES hydra_jwk(sid, kid, nid) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000072_nid.down.sql b/persistence/sql/migrations/20220210000001000072_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000072_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000072_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000072_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000072_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000072_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000072_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000072_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000072_nid.postgres.up.sql index 0ca8726d608..d8d6251529b 100644 --- a/persistence/sql/migrations/20220210000001000072_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000072_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_trusted_jwt_bearer_issuer DROP CONSTRAINT hydra_oauth2_trusted_jwt_bearer_issue_issuer_subject_key_id_key; diff --git a/persistence/sql/migrations/20220210000001000072_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000072_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000072_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000072_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000073_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000073_nid.cockroach.up.sql index 2e3eb979536..bebee1f1043 100644 --- a/persistence/sql/migrations/20220210000001000073_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000073_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_trusted_jwt_bearer_issuer SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000073_nid.down.sql b/persistence/sql/migrations/20220210000001000073_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000073_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000073_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000073_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000073_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000073_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000073_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000073_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000073_nid.postgres.up.sql index e4f6033fbb6..db44ee8c5ef 100644 --- a/persistence/sql/migrations/20220210000001000073_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000073_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_trusted_jwt_bearer_issuer ADD CONSTRAINT hydra_oauth2_trusted_jwt_bearer_issue_issuer_subject_key_id_key UNIQUE (issuer, subject, key_id, nid); diff --git a/persistence/sql/migrations/20220210000001000073_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000073_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000073_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000073_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000074_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000074_nid.cockroach.up.sql index 2e30dbb4db0..f7de95cc385 100644 --- a/persistence/sql/migrations/20220210000001000074_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000074_nid.cockroach.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_trusted_jwt_bearer_issuer ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000074_nid.down.sql b/persistence/sql/migrations/20220210000001000074_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000074_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000074_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000074_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000074_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000074_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000074_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000074_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000074_nid.postgres.up.sql index 60a07f659dc..5449249956c 100644 --- a/persistence/sql/migrations/20220210000001000074_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000074_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_trusted_jwt_bearer_issuer DROP CONSTRAINT IF EXISTS hydra_oauth2_trusted_jwt_bearer_issuer_key_set_fkey; ALTER TABLE hydra_oauth2_trusted_jwt_bearer_issuer DROP CONSTRAINT IF EXISTS hydra_oauth2_trusted_jwt_bearer_issuer_key_set_key_id_fkey; diff --git a/persistence/sql/migrations/20220210000001000074_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000074_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000074_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000074_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000075_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000075_nid.cockroach.up.sql index c8c6233a1d4..70319ad4725 100644 --- a/persistence/sql/migrations/20220210000001000075_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000075_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ DROP INDEX hydra_oauth2_trusted_jwt_bearer_issuer_issuer_subject_key_id_key CASCADE; DROP INDEX hydra_oauth2_trusted_jwt_bearer_issuer_expires_at_idx; diff --git a/persistence/sql/migrations/20220210000001000075_nid.down.sql b/persistence/sql/migrations/20220210000001000075_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000075_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000075_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000075_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000075_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000075_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000075_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000075_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000075_nid.postgres.up.sql index 4b9ba32c7a4..5a7d9bbf16b 100644 --- a/persistence/sql/migrations/20220210000001000075_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000075_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_trusted_jwt_bearer_issuer ADD CONSTRAINT hydra_oauth2_trusted_jwt_bearer_issuer_key_set_fkey FOREIGN KEY (key_set, key_id, nid) REFERENCES hydra_jwk(sid, kid, nid) ON DELETE CASCADE; diff --git a/persistence/sql/migrations/20220210000001000075_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000075_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000075_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000075_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000076_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000076_nid.cockroach.up.sql index d9148e6af0a..78e6e766914 100644 --- a/persistence/sql/migrations/20220210000001000076_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000076_nid.cockroach.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE UNIQUE INDEX hydra_oauth2_trusted_jwt_bearer_issuer_issuer_subject_key_id_key ON hydra_oauth2_trusted_jwt_bearer_issuer (issuer ASC, subject ASC, key_id ASC, nid ASC); CREATE INDEX hydra_oauth2_trusted_jwt_bearer_issuer_expires_at_idx ON hydra_oauth2_trusted_jwt_bearer_issuer (expires_at ASC); diff --git a/persistence/sql/migrations/20220210000001000076_nid.down.sql b/persistence/sql/migrations/20220210000001000076_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000076_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000076_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000076_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000076_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000076_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000076_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000076_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000076_nid.postgres.up.sql index 2e3eb979536..bebee1f1043 100644 --- a/persistence/sql/migrations/20220210000001000076_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000076_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ UPDATE hydra_oauth2_trusted_jwt_bearer_issuer SET nid = (SELECT id FROM networks LIMIT 1); diff --git a/persistence/sql/migrations/20220210000001000076_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000076_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000076_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000076_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000077_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000077_nid.cockroach.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000077_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000077_nid.cockroach.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000077_nid.down.sql b/persistence/sql/migrations/20220210000001000077_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000077_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000077_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000077_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000077_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000077_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000077_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000077_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000077_nid.postgres.up.sql index 2e30dbb4db0..f7de95cc385 100644 --- a/persistence/sql/migrations/20220210000001000077_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000077_nid.postgres.up.sql @@ -1,4 +1,4 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ ALTER TABLE hydra_oauth2_trusted_jwt_bearer_issuer ALTER nid SET NOT NULL; diff --git a/persistence/sql/migrations/20220210000001000077_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000077_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000077_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000077_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000078_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000078_nid.cockroach.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000078_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000078_nid.cockroach.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000078_nid.down.sql b/persistence/sql/migrations/20220210000001000078_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000078_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000078_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000078_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000078_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000078_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000078_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000078_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000078_nid.postgres.up.sql index f96c01e87ea..5efb3a894a1 100644 --- a/persistence/sql/migrations/20220210000001000078_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000078_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000078_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000078_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000078_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000078_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000079_nid.cockroach.up.sql b/persistence/sql/migrations/20220210000001000079_nid.cockroach.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000079_nid.cockroach.up.sql +++ b/persistence/sql/migrations/20220210000001000079_nid.cockroach.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000079_nid.down.sql b/persistence/sql/migrations/20220210000001000079_nid.down.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000079_nid.down.sql +++ b/persistence/sql/migrations/20220210000001000079_nid.down.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000079_nid.mysql.up.sql b/persistence/sql/migrations/20220210000001000079_nid.mysql.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000079_nid.mysql.up.sql +++ b/persistence/sql/migrations/20220210000001000079_nid.mysql.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220210000001000079_nid.postgres.up.sql b/persistence/sql/migrations/20220210000001000079_nid.postgres.up.sql index 2ec474ee239..00d3b5068b8 100644 --- a/persistence/sql/migrations/20220210000001000079_nid.postgres.up.sql +++ b/persistence/sql/migrations/20220210000001000079_nid.postgres.up.sql @@ -1,5 +1,5 @@ -- Migration generated by the command below; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ CREATE INDEX hydra_oauth2_trusted_jwt_bearer_issuer_expires_at_idx ON hydra_oauth2_trusted_jwt_bearer_issuer (expires_at ASC); CREATE INDEX hydra_oauth2_trusted_jwt_bearer_issuer_nid_idx ON hydra_oauth2_trusted_jwt_bearer_issuer (id, nid); diff --git a/persistence/sql/migrations/20220210000001000079_nid.sqlite.up.sql b/persistence/sql/migrations/20220210000001000079_nid.sqlite.up.sql index d357e94c8f3..5d66b873588 100644 --- a/persistence/sql/migrations/20220210000001000079_nid.sqlite.up.sql +++ b/persistence/sql/migrations/20220210000001000079_nid.sqlite.up.sql @@ -1,2 +1,2 @@ --- This blank migration was generated to meet ory/x/popx validation criteria, see https://github.com/ory/x/pull/509; DO NOT EDIT. --- hydra:generate hydra migrate gen +-- This is a blank migration. It is generated to ensure that all dialects are represented in the migration files. +-- ./hydra migrate gen ./persistence/sql/src/20220210000001_nid/ ./persistence/sql/migrations/ diff --git a/persistence/sql/migrations/20220916000010000000_hydra_oauth2_flow.mysql.down.sql b/persistence/sql/migrations/20220916000010000000_hydra_oauth2_flow.mysql.down.sql new file mode 100644 index 00000000000..9f0a7068e37 --- /dev/null +++ b/persistence/sql/migrations/20220916000010000000_hydra_oauth2_flow.mysql.down.sql @@ -0,0 +1 @@ +DROP INDEX hydra_oauth2_flow_multi_query_idx ON hydra_oauth2_flow; diff --git a/persistence/sql/migrations/20221109000010000000_fix_foreign_key.mysql.down.sql b/persistence/sql/migrations/20221109000010000000_fix_foreign_key.mysql.down.sql index e9d3f3c24fe..64c9cc06cca 100644 --- a/persistence/sql/migrations/20221109000010000000_fix_foreign_key.mysql.down.sql +++ b/persistence/sql/migrations/20221109000010000000_fix_foreign_key.mysql.down.sql @@ -1 +1 @@ -ALTER TABLE ONLY hydra_oauth2_flow ALTER COLUMN login_session_id SET DEFAULT ''; +ALTER TABLE hydra_oauth2_flow ALTER COLUMN login_session_id SET DEFAULT ''; diff --git a/persistence/sql/migrations/20230220000000000000_access_token_strategy.down.sql b/persistence/sql/migrations/20230220000000000000_access_token_strategy.down.sql new file mode 100644 index 00000000000..7ecd6c1cd1d --- /dev/null +++ b/persistence/sql/migrations/20230220000000000000_access_token_strategy.down.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_client DROP COLUMN access_token_strategy; diff --git a/persistence/sql/migrations/20230220000000000000_access_token_strategy.up.sql b/persistence/sql/migrations/20230220000000000000_access_token_strategy.up.sql new file mode 100644 index 00000000000..711c467f8b2 --- /dev/null +++ b/persistence/sql/migrations/20230220000000000000_access_token_strategy.up.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_client ADD COLUMN access_token_strategy VARCHAR(10) NOT NULL DEFAULT ''; diff --git a/persistence/sql/migrations/20230228000010000001_client_add_skip_consent_column.down.sql b/persistence/sql/migrations/20230228000010000001_client_add_skip_consent_column.down.sql new file mode 100644 index 00000000000..950efade3b7 --- /dev/null +++ b/persistence/sql/migrations/20230228000010000001_client_add_skip_consent_column.down.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_client DROP COLUMN skip_consent; diff --git a/persistence/sql/migrations/20230228000010000001_client_add_skip_consent_column.up.sql b/persistence/sql/migrations/20230228000010000001_client_add_skip_consent_column.up.sql new file mode 100644 index 00000000000..cfa0efe463f --- /dev/null +++ b/persistence/sql/migrations/20230228000010000001_client_add_skip_consent_column.up.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_client ADD COLUMN skip_consent BOOLEAN NOT NULL DEFAULT false; diff --git a/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.cockroach.down.sql b/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.cockroach.down.sql new file mode 100644 index 00000000000..9d5c9db74db --- /dev/null +++ b/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_oauth2_flow DROP COLUMN login_extend_session_lifespan; diff --git a/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.cockroach.up.sql b/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.cockroach.up.sql new file mode 100644 index 00000000000..f19f41d875c --- /dev/null +++ b/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_oauth2_flow ADD login_extend_session_lifespan BOOLEAN NOT NULL DEFAULT FALSE; diff --git a/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.mysql.down.sql b/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.mysql.down.sql new file mode 100644 index 00000000000..9d5c9db74db --- /dev/null +++ b/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.mysql.down.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_oauth2_flow DROP COLUMN login_extend_session_lifespan; diff --git a/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.mysql.up.sql b/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.mysql.up.sql new file mode 100644 index 00000000000..91c0f8716ed --- /dev/null +++ b/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.mysql.up.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_oauth2_flow ADD COLUMN login_extend_session_lifespan BOOLEAN NOT NULL DEFAULT FALSE; diff --git a/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.postgres.down.sql b/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.postgres.down.sql new file mode 100644 index 00000000000..9d5c9db74db --- /dev/null +++ b/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.postgres.down.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_oauth2_flow DROP COLUMN login_extend_session_lifespan; diff --git a/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.postgres.up.sql b/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.postgres.up.sql new file mode 100644 index 00000000000..f19f41d875c --- /dev/null +++ b/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.postgres.up.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_oauth2_flow ADD login_extend_session_lifespan BOOLEAN NOT NULL DEFAULT FALSE; diff --git a/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.sqlite.down.sql b/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.sqlite.down.sql new file mode 100644 index 00000000000..9d5c9db74db --- /dev/null +++ b/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.sqlite.down.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_oauth2_flow DROP COLUMN login_extend_session_lifespan; diff --git a/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.sqlite.up.sql b/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.sqlite.up.sql new file mode 100644 index 00000000000..f19f41d875c --- /dev/null +++ b/persistence/sql/migrations/20230313112801000001_support_extend_session_lifespan.sqlite.up.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_oauth2_flow ADD login_extend_session_lifespan BOOLEAN NOT NULL DEFAULT FALSE; diff --git a/persistence/sql/migrations/20230512112801000001_refresh_requested_at_index.down.sql b/persistence/sql/migrations/20230512112801000001_refresh_requested_at_index.down.sql new file mode 100644 index 00000000000..cee58284abb --- /dev/null +++ b/persistence/sql/migrations/20230512112801000001_refresh_requested_at_index.down.sql @@ -0,0 +1 @@ +DROP INDEX hydra_oauth2_refresh_requested_at_idx; diff --git a/persistence/sql/migrations/20230512112801000001_refresh_requested_at_index.mysql.down.sql b/persistence/sql/migrations/20230512112801000001_refresh_requested_at_index.mysql.down.sql new file mode 100644 index 00000000000..ceb8c7ecd04 --- /dev/null +++ b/persistence/sql/migrations/20230512112801000001_refresh_requested_at_index.mysql.down.sql @@ -0,0 +1,2 @@ +-- cannot drop because it is used by a foreign key constraint +--DROP INDEX hydra_oauth2_refresh_requested_at_idx ON hydra_oauth2_refresh; diff --git a/persistence/sql/migrations/20230512112801000001_refresh_requested_at_index.up.sql b/persistence/sql/migrations/20230512112801000001_refresh_requested_at_index.up.sql new file mode 100644 index 00000000000..e0d1e273ed9 --- /dev/null +++ b/persistence/sql/migrations/20230512112801000001_refresh_requested_at_index.up.sql @@ -0,0 +1 @@ +CREATE INDEX hydra_oauth2_refresh_requested_at_idx ON hydra_oauth2_refresh (nid, requested_at); diff --git a/persistence/sql/migrations/20230606112801000001_remove_flow_indices.down.sql b/persistence/sql/migrations/20230606112801000001_remove_flow_indices.down.sql new file mode 100644 index 00000000000..a391920ba8f --- /dev/null +++ b/persistence/sql/migrations/20230606112801000001_remove_flow_indices.down.sql @@ -0,0 +1,12 @@ +CREATE UNIQUE INDEX hydra_oauth2_flow_login_verifier_idx ON hydra_oauth2_flow (login_verifier); +CREATE UNIQUE INDEX hydra_oauth2_flow_consent_verifier_idx ON hydra_oauth2_flow (consent_verifier); + +CREATE INDEX hydra_oauth2_flow_multi_query_idx + ON hydra_oauth2_flow + ( + consent_error ASC, state ASC, subject ASC, + client_id ASC, consent_skip ASC, consent_remember + ASC, nid ASC + ); + +DROP INDEX hydra_oauth2_flow_previous_consents_idx; diff --git a/persistence/sql/migrations/20230606112801000001_remove_flow_indices.mysql.down.sql b/persistence/sql/migrations/20230606112801000001_remove_flow_indices.mysql.down.sql new file mode 100644 index 00000000000..16d4e470dae --- /dev/null +++ b/persistence/sql/migrations/20230606112801000001_remove_flow_indices.mysql.down.sql @@ -0,0 +1,12 @@ +CREATE UNIQUE INDEX hydra_oauth2_flow_login_verifier_idx ON hydra_oauth2_flow (login_verifier); +CREATE UNIQUE INDEX hydra_oauth2_flow_consent_verifier_idx ON hydra_oauth2_flow (consent_verifier); + +CREATE INDEX hydra_oauth2_flow_multi_query_idx + ON hydra_oauth2_flow + ( + consent_error(2) ASC, state ASC, subject ASC, + client_id ASC, consent_skip ASC, consent_remember + ASC, nid ASC + ); + +DROP INDEX hydra_oauth2_flow_previous_consents_idx ON hydra_oauth2_flow; diff --git a/persistence/sql/migrations/20230606112801000001_remove_flow_indices.mysql.up.sql b/persistence/sql/migrations/20230606112801000001_remove_flow_indices.mysql.up.sql new file mode 100644 index 00000000000..d7f86b61f94 --- /dev/null +++ b/persistence/sql/migrations/20230606112801000001_remove_flow_indices.mysql.up.sql @@ -0,0 +1,6 @@ +DROP INDEX hydra_oauth2_flow_login_verifier_idx ON hydra_oauth2_flow; +DROP INDEX hydra_oauth2_flow_consent_verifier_idx ON hydra_oauth2_flow; +DROP INDEX hydra_oauth2_flow_multi_query_idx ON hydra_oauth2_flow; + +CREATE INDEX hydra_oauth2_flow_previous_consents_idx + ON hydra_oauth2_flow (subject, client_id, nid, consent_skip, consent_error(2), consent_remember); diff --git a/persistence/sql/migrations/20230606112801000001_remove_flow_indices.up.sql b/persistence/sql/migrations/20230606112801000001_remove_flow_indices.up.sql new file mode 100644 index 00000000000..d522d3482f5 --- /dev/null +++ b/persistence/sql/migrations/20230606112801000001_remove_flow_indices.up.sql @@ -0,0 +1,6 @@ +DROP INDEX hydra_oauth2_flow_login_verifier_idx; +DROP INDEX hydra_oauth2_flow_consent_verifier_idx; +DROP INDEX hydra_oauth2_flow_multi_query_idx; + +CREATE INDEX IF NOT EXISTS hydra_oauth2_flow_previous_consents_idx + ON hydra_oauth2_flow (subject, client_id, nid, consent_skip, consent_error, consent_remember); diff --git a/persistence/sql/migrations/20230809122501000001_add_kratos_session_id.down.sql b/persistence/sql/migrations/20230809122501000001_add_kratos_session_id.down.sql new file mode 100644 index 00000000000..b5ab0899249 --- /dev/null +++ b/persistence/sql/migrations/20230809122501000001_add_kratos_session_id.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE hydra_oauth2_flow DROP COLUMN identity_provider_session_id; +ALTER TABLE hydra_oauth2_authentication_session DROP COLUMN identity_provider_session_id; \ No newline at end of file diff --git a/persistence/sql/migrations/20230809122501000001_add_kratos_session_id.up.sql b/persistence/sql/migrations/20230809122501000001_add_kratos_session_id.up.sql new file mode 100644 index 00000000000..1d39c457105 --- /dev/null +++ b/persistence/sql/migrations/20230809122501000001_add_kratos_session_id.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE hydra_oauth2_flow ADD COLUMN identity_provider_session_id VARCHAR(40); +ALTER TABLE hydra_oauth2_authentication_session ADD COLUMN identity_provider_session_id VARCHAR(40); \ No newline at end of file diff --git a/persistence/sql/migrations/20230908104443000000_change_client_pk.cockroach.down.sql b/persistence/sql/migrations/20230908104443000000_change_client_pk.cockroach.down.sql new file mode 100644 index 00000000000..e0325012a57 --- /dev/null +++ b/persistence/sql/migrations/20230908104443000000_change_client_pk.cockroach.down.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_client ALTER PRIMARY KEY USING COLUMNS (pk); diff --git a/persistence/sql/migrations/20230908104443000000_change_client_pk.cockroach.up.sql b/persistence/sql/migrations/20230908104443000000_change_client_pk.cockroach.up.sql new file mode 100644 index 00000000000..5dedcc76bb3 --- /dev/null +++ b/persistence/sql/migrations/20230908104443000000_change_client_pk.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_client ALTER PRIMARY KEY USING COLUMNS (id, nid); diff --git a/persistence/sql/migrations/20230908104443000000_change_client_pk.down.sql b/persistence/sql/migrations/20230908104443000000_change_client_pk.down.sql new file mode 100644 index 00000000000..0835302054e --- /dev/null +++ b/persistence/sql/migrations/20230908104443000000_change_client_pk.down.sql @@ -0,0 +1,7 @@ +UPDATE hydra_client SET pk = gen_random_uuid() WHERE pk IS NULL; + +ALTER TABLE hydra_client ALTER COLUMN pk SET NOT NULL; + +ALTER TABLE hydra_client DROP CONSTRAINT hydra_client_pkey; + +ALTER TABLE hydra_client ADD PRIMARY KEY (pk); diff --git a/persistence/sql/migrations/20230908104443000000_change_client_pk.mysql.down.sql b/persistence/sql/migrations/20230908104443000000_change_client_pk.mysql.down.sql new file mode 100644 index 00000000000..0764ae08246 --- /dev/null +++ b/persistence/sql/migrations/20230908104443000000_change_client_pk.mysql.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE hydra_client MODIFY pk CHAR(36) NOT NULL; + +ALTER TABLE hydra_client DROP PRIMARY KEY, ADD PRIMARY KEY (pk); diff --git a/persistence/sql/migrations/20230908104443000000_change_client_pk.mysql.up.sql b/persistence/sql/migrations/20230908104443000000_change_client_pk.mysql.up.sql new file mode 100644 index 00000000000..9a951cefa36 --- /dev/null +++ b/persistence/sql/migrations/20230908104443000000_change_client_pk.mysql.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE hydra_client DROP PRIMARY KEY, ADD PRIMARY KEY (id, nid); + +ALTER TABLE hydra_client MODIFY pk CHAR(36); diff --git a/persistence/sql/migrations/20230908104443000000_change_client_pk.sqlite.down.sql b/persistence/sql/migrations/20230908104443000000_change_client_pk.sqlite.down.sql new file mode 100644 index 00000000000..efe48deae6a --- /dev/null +++ b/persistence/sql/migrations/20230908104443000000_change_client_pk.sqlite.down.sql @@ -0,0 +1,159 @@ +CREATE TABLE "_hydra_client_tmp" +( + id VARCHAR(255) NOT NULL, + client_name TEXT NOT NULL, + client_secret TEXT NOT NULL, + redirect_uris TEXT NOT NULL, + grant_types TEXT NOT NULL, + response_types TEXT NOT NULL, + scope TEXT NOT NULL, + owner TEXT NOT NULL, + policy_uri TEXT NOT NULL, + tos_uri TEXT NOT NULL, + client_uri TEXT NOT NULL, + logo_uri TEXT NOT NULL, + contacts TEXT NOT NULL, + client_secret_expires_at INTEGER NOT NULL DEFAULT 0, + sector_identifier_uri TEXT NOT NULL, + jwks TEXT NOT NULL, + jwks_uri TEXT NOT NULL, + request_uris TEXT NOT NULL, + token_endpoint_auth_method VARCHAR(25) NOT NULL DEFAULT '', + request_object_signing_alg VARCHAR(10) NOT NULL DEFAULT '', + userinfo_signed_response_alg VARCHAR(10) NOT NULL DEFAULT '', + subject_type VARCHAR(15) NOT NULL DEFAULT '', + allowed_cors_origins TEXT NOT NULL, + pk TEXT PRIMARY KEY NOT NULL, + pk_deprecated INTEGER NULL DEFAULT NULL, + audience TEXT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + frontchannel_logout_uri TEXT NOT NULL DEFAULT '', + frontchannel_logout_session_required INTEGER NOT NULL DEFAULT false, + post_logout_redirect_uris TEXT NOT NULL DEFAULT '', + backchannel_logout_uri TEXT NOT NULL DEFAULT '', + backchannel_logout_session_required INTEGER NOT NULL DEFAULT false, + metadata TEXT NOT NULL DEFAULT '{}', + token_endpoint_auth_signing_alg VARCHAR(10) NOT NULL DEFAULT '', + registration_access_token_signature VARCHAR(128) NOT NULL DEFAULT '', + access_token_strategy VARCHAR(10) NOT NULL DEFAULT '', + authorization_code_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + authorization_code_grant_id_token_lifespan BIGINT NULL DEFAULT NULL, + authorization_code_grant_refresh_token_lifespan BIGINT NULL DEFAULT NULL, + client_credentials_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + implicit_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + implicit_grant_id_token_lifespan BIGINT NULL DEFAULT NULL, + jwt_bearer_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + password_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + password_grant_refresh_token_lifespan BIGINT NULL DEFAULT NULL, + refresh_token_grant_id_token_lifespan BIGINT NULL DEFAULT NULL, + refresh_token_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + refresh_token_grant_refresh_token_lifespan BIGINT NULL DEFAULT NULL, + skip_consent BOOLEAN NOT NULL DEFAULT false, + nid CHAR(36) NOT NULL +); +INSERT INTO "_hydra_client_tmp" (id, + client_name, + client_secret, + redirect_uris, + grant_types, + response_types, + scope, + owner, + policy_uri, + tos_uri, + client_uri, + logo_uri, + contacts, + client_secret_expires_at, + sector_identifier_uri, + jwks, + jwks_uri, + request_uris, + token_endpoint_auth_method, + request_object_signing_alg, + userinfo_signed_response_alg, + subject_type, + allowed_cors_origins, + pk, + pk_deprecated, + audience, + created_at, + updated_at, + frontchannel_logout_uri, + frontchannel_logout_session_required, + post_logout_redirect_uris, + backchannel_logout_uri, + backchannel_logout_session_required, + metadata, + token_endpoint_auth_signing_alg, + access_token_strategy, + registration_access_token_signature, + authorization_code_grant_access_token_lifespan, + authorization_code_grant_id_token_lifespan, + authorization_code_grant_refresh_token_lifespan, + client_credentials_grant_access_token_lifespan, + implicit_grant_access_token_lifespan, + implicit_grant_id_token_lifespan, + jwt_bearer_grant_access_token_lifespan, + password_grant_access_token_lifespan, + password_grant_refresh_token_lifespan, + refresh_token_grant_id_token_lifespan, + refresh_token_grant_access_token_lifespan, + refresh_token_grant_refresh_token_lifespan, + skip_consent, + nid) +SELECT id, + client_name, + client_secret, + redirect_uris, + grant_types, + response_types, + scope, + owner, + policy_uri, + tos_uri, + client_uri, + logo_uri, + contacts, + client_secret_expires_at, + sector_identifier_uri, + jwks, + jwks_uri, + request_uris, + token_endpoint_auth_method, + request_object_signing_alg, + userinfo_signed_response_alg, + subject_type, + allowed_cors_origins, + pk, + pk_deprecated, + audience, + created_at, + updated_at, + frontchannel_logout_uri, + frontchannel_logout_session_required, + post_logout_redirect_uris, + backchannel_logout_uri, + backchannel_logout_session_required, + metadata, + token_endpoint_auth_signing_alg, + access_token_strategy, + registration_access_token_signature, + authorization_code_grant_access_token_lifespan, + authorization_code_grant_id_token_lifespan, + authorization_code_grant_refresh_token_lifespan, + client_credentials_grant_access_token_lifespan, + implicit_grant_access_token_lifespan, + implicit_grant_id_token_lifespan, + jwt_bearer_grant_access_token_lifespan, + password_grant_access_token_lifespan, + password_grant_refresh_token_lifespan, + refresh_token_grant_id_token_lifespan, + refresh_token_grant_access_token_lifespan, + refresh_token_grant_refresh_token_lifespan, + skip_consent, + nid +FROM "hydra_client"; +DROP TABLE "hydra_client"; +ALTER TABLE "_hydra_client_tmp" RENAME TO "hydra_client"; diff --git a/persistence/sql/migrations/20230908104443000000_change_client_pk.sqlite.up.sql b/persistence/sql/migrations/20230908104443000000_change_client_pk.sqlite.up.sql new file mode 100644 index 00000000000..4c348c2ba41 --- /dev/null +++ b/persistence/sql/migrations/20230908104443000000_change_client_pk.sqlite.up.sql @@ -0,0 +1,158 @@ +CREATE TABLE "_hydra_client_tmp" +( + id VARCHAR(255) NOT NULL, + client_name TEXT NOT NULL, + client_secret TEXT NOT NULL, + redirect_uris TEXT NOT NULL, + grant_types TEXT NOT NULL, + response_types TEXT NOT NULL, + scope TEXT NOT NULL, + owner TEXT NOT NULL, + policy_uri TEXT NOT NULL, + tos_uri TEXT NOT NULL, + client_uri TEXT NOT NULL, + logo_uri TEXT NOT NULL, + contacts TEXT NOT NULL, + client_secret_expires_at INTEGER NOT NULL DEFAULT 0, + sector_identifier_uri TEXT NOT NULL, + jwks TEXT NOT NULL, + jwks_uri TEXT NOT NULL, + request_uris TEXT NOT NULL, + token_endpoint_auth_method VARCHAR(25) NOT NULL DEFAULT '', + request_object_signing_alg VARCHAR(10) NOT NULL DEFAULT '', + userinfo_signed_response_alg VARCHAR(10) NOT NULL DEFAULT '', + subject_type VARCHAR(15) NOT NULL DEFAULT '', + allowed_cors_origins TEXT NOT NULL, + pk TEXT NULL, + pk_deprecated INTEGER NULL DEFAULT NULL, + audience TEXT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + frontchannel_logout_uri TEXT NOT NULL DEFAULT '', + frontchannel_logout_session_required INTEGER NOT NULL DEFAULT false, + post_logout_redirect_uris TEXT NOT NULL DEFAULT '', + backchannel_logout_uri TEXT NOT NULL DEFAULT '', + backchannel_logout_session_required INTEGER NOT NULL DEFAULT false, + metadata TEXT NOT NULL DEFAULT '{}', + token_endpoint_auth_signing_alg VARCHAR(10) NOT NULL DEFAULT '', + registration_access_token_signature VARCHAR(128) NOT NULL DEFAULT '', + access_token_strategy VARCHAR(10) NOT NULL DEFAULT '', + authorization_code_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + authorization_code_grant_id_token_lifespan BIGINT NULL DEFAULT NULL, + authorization_code_grant_refresh_token_lifespan BIGINT NULL DEFAULT NULL, + client_credentials_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + implicit_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + implicit_grant_id_token_lifespan BIGINT NULL DEFAULT NULL, + jwt_bearer_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + password_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + password_grant_refresh_token_lifespan BIGINT NULL DEFAULT NULL, + refresh_token_grant_id_token_lifespan BIGINT NULL DEFAULT NULL, + refresh_token_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + refresh_token_grant_refresh_token_lifespan BIGINT NULL DEFAULT NULL, + skip_consent BOOLEAN NOT NULL DEFAULT false, + nid CHAR(36) NOT NULL, + PRIMARY KEY (id, nid) +); +INSERT INTO "_hydra_client_tmp" (id, + client_name, + client_secret, + redirect_uris, + grant_types, + response_types, + scope, + owner, + policy_uri, + tos_uri, + client_uri, + logo_uri, + contacts, + client_secret_expires_at, + sector_identifier_uri, + jwks, + jwks_uri, + request_uris, + token_endpoint_auth_method, + request_object_signing_alg, + userinfo_signed_response_alg, + subject_type, + allowed_cors_origins, + pk, + pk_deprecated, + audience, + created_at, + updated_at, + frontchannel_logout_uri, + frontchannel_logout_session_required, + post_logout_redirect_uris, + backchannel_logout_uri, + backchannel_logout_session_required, + metadata, + token_endpoint_auth_signing_alg, + registration_access_token_signature, + authorization_code_grant_access_token_lifespan, + authorization_code_grant_id_token_lifespan, + authorization_code_grant_refresh_token_lifespan, + client_credentials_grant_access_token_lifespan, + implicit_grant_access_token_lifespan, + implicit_grant_id_token_lifespan, + jwt_bearer_grant_access_token_lifespan, + password_grant_access_token_lifespan, + password_grant_refresh_token_lifespan, + refresh_token_grant_id_token_lifespan, + refresh_token_grant_access_token_lifespan, + refresh_token_grant_refresh_token_lifespan, + skip_consent, + nid) +SELECT id, + client_name, + client_secret, + redirect_uris, + grant_types, + response_types, + scope, + owner, + policy_uri, + tos_uri, + client_uri, + logo_uri, + contacts, + client_secret_expires_at, + sector_identifier_uri, + jwks, + jwks_uri, + request_uris, + token_endpoint_auth_method, + request_object_signing_alg, + userinfo_signed_response_alg, + subject_type, + allowed_cors_origins, + pk, + pk_deprecated, + audience, + created_at, + updated_at, + frontchannel_logout_uri, + frontchannel_logout_session_required, + post_logout_redirect_uris, + backchannel_logout_uri, + backchannel_logout_session_required, + metadata, + token_endpoint_auth_signing_alg, + registration_access_token_signature, + authorization_code_grant_access_token_lifespan, + authorization_code_grant_id_token_lifespan, + authorization_code_grant_refresh_token_lifespan, + client_credentials_grant_access_token_lifespan, + implicit_grant_access_token_lifespan, + implicit_grant_id_token_lifespan, + jwt_bearer_grant_access_token_lifespan, + password_grant_access_token_lifespan, + password_grant_refresh_token_lifespan, + refresh_token_grant_id_token_lifespan, + refresh_token_grant_access_token_lifespan, + refresh_token_grant_refresh_token_lifespan, + skip_consent, + nid +FROM "hydra_client"; +DROP TABLE "hydra_client"; +ALTER TABLE "_hydra_client_tmp" RENAME TO "hydra_client"; diff --git a/persistence/sql/migrations/20230908104443000000_change_client_pk.up.sql b/persistence/sql/migrations/20230908104443000000_change_client_pk.up.sql new file mode 100644 index 00000000000..a22b8c6c305 --- /dev/null +++ b/persistence/sql/migrations/20230908104443000000_change_client_pk.up.sql @@ -0,0 +1,5 @@ +ALTER TABLE hydra_client DROP CONSTRAINT hydra_client_pkey; + +ALTER TABLE hydra_client ALTER COLUMN pk DROP NOT NULL; + +ALTER TABLE hydra_client ADD PRIMARY KEY (id, nid); diff --git a/persistence/sql/migrations/20230908104443000001_change_client_pk.cockroach.down.sql b/persistence/sql/migrations/20230908104443000001_change_client_pk.cockroach.down.sql new file mode 100644 index 00000000000..bfee76d222c --- /dev/null +++ b/persistence/sql/migrations/20230908104443000001_change_client_pk.cockroach.down.sql @@ -0,0 +1,3 @@ +UPDATE hydra_client SET pk = gen_random_uuid() WHERE pk IS NULL; + +ALTER TABLE hydra_client ALTER COLUMN pk SET NOT NULL; diff --git a/persistence/sql/migrations/20230908104443000001_change_client_pk.cockroach.up.sql b/persistence/sql/migrations/20230908104443000001_change_client_pk.cockroach.up.sql new file mode 100644 index 00000000000..d56f3d514cf --- /dev/null +++ b/persistence/sql/migrations/20230908104443000001_change_client_pk.cockroach.up.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_client ALTER COLUMN pk DROP NOT NULL; diff --git a/persistence/sql/migrations/20230908104443000001_change_client_pk.down.sql b/persistence/sql/migrations/20230908104443000001_change_client_pk.down.sql new file mode 100644 index 00000000000..11c36dee1b6 --- /dev/null +++ b/persistence/sql/migrations/20230908104443000001_change_client_pk.down.sql @@ -0,0 +1 @@ +-- only for crdb diff --git a/persistence/sql/migrations/20230908104443000001_change_client_pk.up.sql b/persistence/sql/migrations/20230908104443000001_change_client_pk.up.sql new file mode 100644 index 00000000000..11c36dee1b6 --- /dev/null +++ b/persistence/sql/migrations/20230908104443000001_change_client_pk.up.sql @@ -0,0 +1 @@ +-- only for crdb diff --git a/persistence/sql/migrations/20240104181300000001_jwk_index.down.sql b/persistence/sql/migrations/20240104181300000001_jwk_index.down.sql new file mode 100644 index 00000000000..2f6ee7479fe --- /dev/null +++ b/persistence/sql/migrations/20240104181300000001_jwk_index.down.sql @@ -0,0 +1,2 @@ +DROP INDEX hydra_jwk_nid_sid_created_at_idx; +DROP INDEX hydra_jwk_nid_sid_kid_created_at_idx; diff --git a/persistence/sql/migrations/20240104181300000001_jwk_index.mysql.down.sql b/persistence/sql/migrations/20240104181300000001_jwk_index.mysql.down.sql new file mode 100644 index 00000000000..79f73e980f8 --- /dev/null +++ b/persistence/sql/migrations/20240104181300000001_jwk_index.mysql.down.sql @@ -0,0 +1,3 @@ +DROP INDEX hydra_jwk_nid_sid_created_at_idx ON hydra_jwk; +-- can't drop this because of a foreign key constraint +-- DROP INDEX hydra_jwk_nid_sid_kid_created_at_idx ON hydra_jwk; diff --git a/persistence/sql/migrations/20240104181300000001_jwk_index.up.sql b/persistence/sql/migrations/20240104181300000001_jwk_index.up.sql new file mode 100644 index 00000000000..8ec5ed97302 --- /dev/null +++ b/persistence/sql/migrations/20240104181300000001_jwk_index.up.sql @@ -0,0 +1,2 @@ +CREATE INDEX hydra_jwk_nid_sid_created_at_idx ON hydra_jwk (nid, sid, created_at); +CREATE INDEX hydra_jwk_nid_sid_kid_created_at_idx ON hydra_jwk (nid, sid, kid, created_at); diff --git a/persistence/sql/migrations/20240129174410000001_client_add_logout_skip_consent_column.down.sql b/persistence/sql/migrations/20240129174410000001_client_add_logout_skip_consent_column.down.sql new file mode 100644 index 00000000000..98258eecc43 --- /dev/null +++ b/persistence/sql/migrations/20240129174410000001_client_add_logout_skip_consent_column.down.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_client DROP COLUMN skip_logout_consent; diff --git a/persistence/sql/migrations/20240129174410000001_client_add_logout_skip_consent_column.up.sql b/persistence/sql/migrations/20240129174410000001_client_add_logout_skip_consent_column.up.sql new file mode 100644 index 00000000000..96a8e3ec0c2 --- /dev/null +++ b/persistence/sql/migrations/20240129174410000001_client_add_logout_skip_consent_column.up.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_client ADD COLUMN skip_logout_consent BOOLEAN NULL; diff --git a/persistence/sql/migrations/20240403121110000001_add_expire_columns.down.sql b/persistence/sql/migrations/20240403121110000001_add_expire_columns.down.sql new file mode 100644 index 00000000000..539996e4efc --- /dev/null +++ b/persistence/sql/migrations/20240403121110000001_add_expire_columns.down.sql @@ -0,0 +1,5 @@ +ALTER TABLE hydra_oauth2_oidc DROP COLUMN expires_at; +ALTER TABLE hydra_oauth2_access DROP COLUMN expires_at; +ALTER TABLE hydra_oauth2_refresh DROP COLUMN expires_at; +ALTER TABLE hydra_oauth2_code DROP COLUMN expires_at; +ALTER TABLE hydra_oauth2_pkce DROP COLUMN expires_at; diff --git a/persistence/sql/migrations/20240403121110000001_add_expire_columns.sqlite.down.sql b/persistence/sql/migrations/20240403121110000001_add_expire_columns.sqlite.down.sql new file mode 100644 index 00000000000..723235c21f9 --- /dev/null +++ b/persistence/sql/migrations/20240403121110000001_add_expire_columns.sqlite.down.sql @@ -0,0 +1,11 @@ +DROP INDEX hydra_oauth2_oidc_expires_at_idx; +DROP INDEX hydra_oauth2_access_expires_at_idx; +DROP INDEX hydra_oauth2_refresh_expires_at_idx; +DROP INDEX hydra_oauth2_code_expires_at_idx; +DROP INDEX hydra_oauth2_pkce_expires_at_idx; + +ALTER TABLE hydra_oauth2_oidc DROP COLUMN expires_at; +ALTER TABLE hydra_oauth2_access DROP COLUMN expires_at; +ALTER TABLE hydra_oauth2_refresh DROP COLUMN expires_at; +ALTER TABLE hydra_oauth2_code DROP COLUMN expires_at; +ALTER TABLE hydra_oauth2_pkce DROP COLUMN expires_at; diff --git a/persistence/sql/migrations/20240403121110000001_add_expire_columns.up.sql b/persistence/sql/migrations/20240403121110000001_add_expire_columns.up.sql new file mode 100644 index 00000000000..e3814953e96 --- /dev/null +++ b/persistence/sql/migrations/20240403121110000001_add_expire_columns.up.sql @@ -0,0 +1,11 @@ +ALTER TABLE hydra_oauth2_oidc ADD COLUMN expires_at TIMESTAMP NULL; +ALTER TABLE hydra_oauth2_access ADD COLUMN expires_at TIMESTAMP NULL; +ALTER TABLE hydra_oauth2_refresh ADD COLUMN expires_at TIMESTAMP NULL; +ALTER TABLE hydra_oauth2_code ADD COLUMN expires_at TIMESTAMP NULL; +ALTER TABLE hydra_oauth2_pkce ADD COLUMN expires_at TIMESTAMP NULL; + +CREATE INDEX hydra_oauth2_oidc_expires_at_idx ON hydra_oauth2_oidc (expires_at); +CREATE INDEX hydra_oauth2_access_expires_at_idx ON hydra_oauth2_oidc (expires_at); +CREATE INDEX hydra_oauth2_refresh_expires_at_idx ON hydra_oauth2_oidc (expires_at); +CREATE INDEX hydra_oauth2_code_expires_at_idx ON hydra_oauth2_oidc (expires_at); +CREATE INDEX hydra_oauth2_pkce_expires_at_idx ON hydra_oauth2_oidc (expires_at); diff --git a/persistence/sql/migrations/20240612222110000001_add_oauth2_expires_at_indices.down.sql b/persistence/sql/migrations/20240612222110000001_add_oauth2_expires_at_indices.down.sql new file mode 100644 index 00000000000..89adb722a78 --- /dev/null +++ b/persistence/sql/migrations/20240612222110000001_add_oauth2_expires_at_indices.down.sql @@ -0,0 +1,9 @@ +CREATE INDEX IF NOT EXISTS hydra_oauth2_access_expires_at_idx ON hydra_oauth2_oidc (expires_at); +CREATE INDEX IF NOT EXISTS hydra_oauth2_refresh_expires_at_idx ON hydra_oauth2_oidc (expires_at); +CREATE INDEX IF NOT EXISTS hydra_oauth2_code_expires_at_idx ON hydra_oauth2_oidc (expires_at); +CREATE INDEX IF NOT EXISTS hydra_oauth2_pkce_expires_at_idx ON hydra_oauth2_oidc (expires_at); + +DROP INDEX hydra_oauth2_access_expires_at_v2_idx; +DROP INDEX hydra_oauth2_refresh_expires_at_v2_idx; +DROP INDEX hydra_oauth2_code_expires_at_v2_idx; +DROP INDEX hydra_oauth2_pkce_expires_at_v2_idx; diff --git a/persistence/sql/migrations/20240612222110000001_add_oauth2_expires_at_indices.mysql.down.sql b/persistence/sql/migrations/20240612222110000001_add_oauth2_expires_at_indices.mysql.down.sql new file mode 100644 index 00000000000..4e340d9ae92 --- /dev/null +++ b/persistence/sql/migrations/20240612222110000001_add_oauth2_expires_at_indices.mysql.down.sql @@ -0,0 +1,9 @@ +CREATE INDEX hydra_oauth2_access_expires_at_idx ON hydra_oauth2_oidc (expires_at); +CREATE INDEX hydra_oauth2_refresh_expires_at_idx ON hydra_oauth2_oidc (expires_at); +CREATE INDEX hydra_oauth2_code_expires_at_idx ON hydra_oauth2_oidc (expires_at); +CREATE INDEX hydra_oauth2_pkce_expires_at_idx ON hydra_oauth2_oidc (expires_at); + +DROP INDEX hydra_oauth2_access_expires_at_v2_idx ON hydra_oauth2_access; +DROP INDEX hydra_oauth2_refresh_expires_at_v2_idx ON hydra_oauth2_refresh; +DROP INDEX hydra_oauth2_code_expires_at_v2_idx ON hydra_oauth2_code; +DROP INDEX hydra_oauth2_pkce_expires_at_v2_idx ON hydra_oauth2_pkce; diff --git a/persistence/sql/migrations/20240612222110000001_add_oauth2_expires_at_indices.mysql.up.sql b/persistence/sql/migrations/20240612222110000001_add_oauth2_expires_at_indices.mysql.up.sql new file mode 100644 index 00000000000..71c9b49bf81 --- /dev/null +++ b/persistence/sql/migrations/20240612222110000001_add_oauth2_expires_at_indices.mysql.up.sql @@ -0,0 +1,9 @@ +CREATE INDEX hydra_oauth2_access_expires_at_v2_idx ON hydra_oauth2_access (expires_at); +CREATE INDEX hydra_oauth2_refresh_expires_at_v2_idx ON hydra_oauth2_refresh (expires_at); +CREATE INDEX hydra_oauth2_code_expires_at_v2_idx ON hydra_oauth2_code (expires_at); +CREATE INDEX hydra_oauth2_pkce_expires_at_v2_idx ON hydra_oauth2_pkce (expires_at); + +DROP INDEX hydra_oauth2_access_expires_at_idx ON hydra_oauth2_oidc; +DROP INDEX hydra_oauth2_refresh_expires_at_idx ON hydra_oauth2_oidc; +DROP INDEX hydra_oauth2_code_expires_at_idx ON hydra_oauth2_oidc; +DROP INDEX hydra_oauth2_pkce_expires_at_idx ON hydra_oauth2_oidc; diff --git a/persistence/sql/migrations/20240612222110000001_add_oauth2_expires_at_indices.up.sql b/persistence/sql/migrations/20240612222110000001_add_oauth2_expires_at_indices.up.sql new file mode 100644 index 00000000000..8928b89454c --- /dev/null +++ b/persistence/sql/migrations/20240612222110000001_add_oauth2_expires_at_indices.up.sql @@ -0,0 +1,9 @@ +CREATE INDEX IF NOT EXISTS hydra_oauth2_access_expires_at_v2_idx ON hydra_oauth2_access (expires_at); +CREATE INDEX IF NOT EXISTS hydra_oauth2_refresh_expires_at_v2_idx ON hydra_oauth2_refresh (expires_at); +CREATE INDEX IF NOT EXISTS hydra_oauth2_code_expires_at_v2_idx ON hydra_oauth2_code (expires_at); +CREATE INDEX IF NOT EXISTS hydra_oauth2_pkce_expires_at_v2_idx ON hydra_oauth2_pkce (expires_at); + +DROP INDEX hydra_oauth2_access_expires_at_idx; +DROP INDEX hydra_oauth2_refresh_expires_at_idx; +DROP INDEX hydra_oauth2_code_expires_at_idx; +DROP INDEX hydra_oauth2_pkce_expires_at_idx; diff --git a/persistence/sql/migrations/20240916105610000001_add_logout_request_timestamps.down.sql b/persistence/sql/migrations/20240916105610000001_add_logout_request_timestamps.down.sql new file mode 100644 index 00000000000..a435c6af599 --- /dev/null +++ b/persistence/sql/migrations/20240916105610000001_add_logout_request_timestamps.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE hydra_oauth2_logout_request DROP expires_at; +ALTER TABLE hydra_oauth2_logout_request DROP requested_at; diff --git a/persistence/sql/migrations/20240916105610000001_add_logout_request_timestamps.up.sql b/persistence/sql/migrations/20240916105610000001_add_logout_request_timestamps.up.sql new file mode 100644 index 00000000000..8bcd8c3e789 --- /dev/null +++ b/persistence/sql/migrations/20240916105610000001_add_logout_request_timestamps.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE hydra_oauth2_logout_request ADD expires_at timestamp NULL; +ALTER TABLE hydra_oauth2_logout_request ADD requested_at timestamp NULL; diff --git a/persistence/sql/migrations/20241012144910000001_unused_indices.down.sql b/persistence/sql/migrations/20241012144910000001_unused_indices.down.sql new file mode 100644 index 00000000000..16cb58bb35d --- /dev/null +++ b/persistence/sql/migrations/20241012144910000001_unused_indices.down.sql @@ -0,0 +1,14 @@ +-- CREATE INDEX IF NOT EXISTS hydra_oauth2_access_client_id_subject_idx ON hydra_oauth2_access (client_id ASC, subject ASC, nid ASC); +CREATE INDEX IF NOT EXISTS hydra_oauth2_access_expires_at_v2_idx ON hydra_oauth2_access (expires_at ASC); + +CREATE INDEX IF NOT EXISTS hydra_oauth2_refresh_client_id_subject_idx ON hydra_oauth2_refresh (client_id ASC, subject ASC); +CREATE INDEX IF NOT EXISTS hydra_oauth2_refresh_expires_at_v2_idx ON hydra_oauth2_refresh (expires_at ASC); + +CREATE INDEX IF NOT EXISTS hydra_oauth2_pkce_request_id_idx ON hydra_oauth2_pkce (request_id ASC, nid ASC); +CREATE INDEX IF NOT EXISTS hydra_oauth2_pkce_expires_at_v2_idx ON hydra_oauth2_pkce (expires_at ASC); + +CREATE INDEX IF NOT EXISTS hydra_oauth2_oidc_request_id_idx ON hydra_oauth2_oidc (request_id ASC, nid ASC); +CREATE INDEX IF NOT EXISTS hydra_oauth2_oidc_expires_at_idx ON hydra_oauth2_oidc (expires_at ASC); + +CREATE INDEX IF NOT EXISTS hydra_oauth2_code_request_id_idx ON hydra_oauth2_code (request_id ASC, nid ASC); +CREATE INDEX IF NOT EXISTS hydra_oauth2_code_expires_at_v2_idx ON hydra_oauth2_code (expires_at ASC); diff --git a/persistence/sql/migrations/20241012144910000001_unused_indices.mysql.down.sql b/persistence/sql/migrations/20241012144910000001_unused_indices.mysql.down.sql new file mode 100644 index 00000000000..92df4af8e15 --- /dev/null +++ b/persistence/sql/migrations/20241012144910000001_unused_indices.mysql.down.sql @@ -0,0 +1,14 @@ +-- CREATE INDEX IF NOT EXISTS hydra_oauth2_access_client_id_subject_idx ON hydra_oauth2_access (client_id ASC, subject ASC, nid ASC); +CREATE INDEX hydra_oauth2_access_expires_at_v2_idx ON hydra_oauth2_access (expires_at ASC); + +CREATE INDEX hydra_oauth2_refresh_client_id_subject_idx ON hydra_oauth2_refresh (client_id ASC, subject ASC); +CREATE INDEX hydra_oauth2_refresh_expires_at_v2_idx ON hydra_oauth2_refresh (expires_at ASC); + +CREATE INDEX hydra_oauth2_pkce_request_id_idx ON hydra_oauth2_pkce (request_id ASC, nid ASC); +CREATE INDEX hydra_oauth2_pkce_expires_at_v2_idx ON hydra_oauth2_pkce (expires_at ASC); + +CREATE INDEX hydra_oauth2_oidc_request_id_idx ON hydra_oauth2_oidc (request_id ASC, nid ASC); +CREATE INDEX hydra_oauth2_oidc_expires_at_idx ON hydra_oauth2_oidc (expires_at ASC); + +CREATE INDEX hydra_oauth2_code_request_id_idx ON hydra_oauth2_code (request_id ASC, nid ASC); +CREATE INDEX hydra_oauth2_code_expires_at_v2_idx ON hydra_oauth2_code (expires_at ASC); diff --git a/persistence/sql/migrations/20241012144910000001_unused_indices.mysql.up.sql b/persistence/sql/migrations/20241012144910000001_unused_indices.mysql.up.sql new file mode 100644 index 00000000000..83ae09f7edf --- /dev/null +++ b/persistence/sql/migrations/20241012144910000001_unused_indices.mysql.up.sql @@ -0,0 +1,14 @@ +-- DROP INDEX hydra_oauth2_access_client_id_subject_idx ON hydra_oauth2_access; +DROP INDEX hydra_oauth2_access_expires_at_v2_idx ON hydra_oauth2_access; -- janitor still uses requested_at index + +DROP INDEX hydra_oauth2_refresh_client_id_subject_idx ON hydra_oauth2_refresh; +DROP INDEX hydra_oauth2_refresh_expires_at_v2_idx ON hydra_oauth2_refresh; -- janitor still uses requested_at index + +DROP INDEX hydra_oauth2_pkce_request_id_idx ON hydra_oauth2_pkce; +DROP INDEX hydra_oauth2_pkce_expires_at_v2_idx ON hydra_oauth2_pkce; -- janitor still uses requested_at index + +DROP INDEX hydra_oauth2_oidc_request_id_idx ON hydra_oauth2_oidc; +DROP INDEX hydra_oauth2_oidc_expires_at_idx ON hydra_oauth2_oidc; -- janitor still uses requested_at index + +DROP INDEX hydra_oauth2_code_request_id_idx ON hydra_oauth2_code; +DROP INDEX hydra_oauth2_code_expires_at_v2_idx ON hydra_oauth2_code; -- janitor still uses requested_at index diff --git a/persistence/sql/migrations/20241012144910000001_unused_indices.up.sql b/persistence/sql/migrations/20241012144910000001_unused_indices.up.sql new file mode 100644 index 00000000000..ac2ba6ebf8b --- /dev/null +++ b/persistence/sql/migrations/20241012144910000001_unused_indices.up.sql @@ -0,0 +1,14 @@ +DROP INDEX IF EXISTS hydra_oauth2_access_client_id_subject_idx; +DROP INDEX IF EXISTS hydra_oauth2_access_expires_at_v2_idx; -- janitor still uses requested_at index + +DROP INDEX IF EXISTS hydra_oauth2_refresh_client_id_subject_idx; +DROP INDEX IF EXISTS hydra_oauth2_refresh_expires_at_v2_idx; -- janitor still uses requested_at index + +DROP INDEX IF EXISTS hydra_oauth2_pkce_request_id_idx; +DROP INDEX IF EXISTS hydra_oauth2_pkce_expires_at_v2_idx; -- janitor still uses requested_at index + +DROP INDEX IF EXISTS hydra_oauth2_oidc_request_id_idx; +DROP INDEX IF EXISTS hydra_oauth2_oidc_expires_at_idx; -- janitor still uses requested_at index + +DROP INDEX IF EXISTS hydra_oauth2_code_request_id_idx; +DROP INDEX IF EXISTS hydra_oauth2_code_expires_at_v2_idx; -- janitor still uses requested_at index diff --git a/persistence/sql/migrations/20241014121000000000_add_refresh_token_in_grace_period_flag.down.sql b/persistence/sql/migrations/20241014121000000000_add_refresh_token_in_grace_period_flag.down.sql new file mode 100644 index 00000000000..a30a127e902 --- /dev/null +++ b/persistence/sql/migrations/20241014121000000000_add_refresh_token_in_grace_period_flag.down.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_oauth2_refresh DROP COLUMN first_used_at; diff --git a/persistence/sql/migrations/20241014121000000000_add_refresh_token_in_grace_period_flag.up.sql b/persistence/sql/migrations/20241014121000000000_add_refresh_token_in_grace_period_flag.up.sql new file mode 100644 index 00000000000..8ae823047f7 --- /dev/null +++ b/persistence/sql/migrations/20241014121000000000_add_refresh_token_in_grace_period_flag.up.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_oauth2_refresh ADD first_used_at TIMESTAMP DEFAULT NULL; diff --git a/persistence/sql/migrations/20241129111700000000_add_refresh_token_access_token_link.cockroach.autocommit.down.sql b/persistence/sql/migrations/20241129111700000000_add_refresh_token_access_token_link.cockroach.autocommit.down.sql new file mode 100644 index 00000000000..e49103705e1 --- /dev/null +++ b/persistence/sql/migrations/20241129111700000000_add_refresh_token_access_token_link.cockroach.autocommit.down.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_oauth2_refresh DROP COLUMN IF EXISTS access_token_signature; diff --git a/persistence/sql/migrations/20241129111700000000_add_refresh_token_access_token_link.cockroach.autocommit.up.sql b/persistence/sql/migrations/20241129111700000000_add_refresh_token_access_token_link.cockroach.autocommit.up.sql new file mode 100644 index 00000000000..da61fbd1ecd --- /dev/null +++ b/persistence/sql/migrations/20241129111700000000_add_refresh_token_access_token_link.cockroach.autocommit.up.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_oauth2_refresh ADD COLUMN IF NOT EXISTS access_token_signature VARCHAR(255) DEFAULT NULL; diff --git a/persistence/sql/migrations/20241129111700000000_add_refresh_token_access_token_link.down.sql b/persistence/sql/migrations/20241129111700000000_add_refresh_token_access_token_link.down.sql new file mode 100644 index 00000000000..46db0f98db5 --- /dev/null +++ b/persistence/sql/migrations/20241129111700000000_add_refresh_token_access_token_link.down.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_oauth2_refresh DROP COLUMN access_token_signature; diff --git a/persistence/sql/migrations/20241129111700000000_add_refresh_token_access_token_link.up.sql b/persistence/sql/migrations/20241129111700000000_add_refresh_token_access_token_link.up.sql new file mode 100644 index 00000000000..49ada2a8403 --- /dev/null +++ b/persistence/sql/migrations/20241129111700000000_add_refresh_token_access_token_link.up.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_oauth2_refresh ADD COLUMN access_token_signature VARCHAR(255) DEFAULT NULL; diff --git a/persistence/sql/migrations/20241609000001000000_device_flow.autocommit.up.sql b/persistence/sql/migrations/20241609000001000000_device_flow.autocommit.up.sql new file mode 100644 index 00000000000..bdadde20fd0 --- /dev/null +++ b/persistence/sql/migrations/20241609000001000000_device_flow.autocommit.up.sql @@ -0,0 +1,46 @@ +CREATE TABLE IF NOT EXISTS hydra_oauth2_device_auth_codes +( + device_code_signature VARCHAR(255) NOT NULL, + user_code_signature VARCHAR(255) NOT NULL, + request_id VARCHAR(40) NOT NULL, + requested_at TIMESTAMP NOT NULL DEFAULT NOW(), + client_id VARCHAR(255) NOT NULL, + scope VARCHAR(1024) NOT NULL, + granted_scope VARCHAR(1024) NOT NULL, + form_data VARCHAR(4096) NOT NULL, + session_data TEXT NOT NULL, + subject VARCHAR(255) NOT NULL DEFAULT '', + device_code_active BOOL NOT NULL DEFAULT true, + user_code_state SMALLINT NOT NULL DEFAULT 0, + requested_audience VARCHAR(1024) NOT NULL, + granted_audience VARCHAR(1024) NOT NULL, + challenge_id VARCHAR(40) NULL, + expires_at TIMESTAMP NULL, + nid UUID NOT NULL, + + FOREIGN KEY (client_id, nid) REFERENCES hydra_client (id, nid) ON DELETE CASCADE, + FOREIGN KEY (nid) REFERENCES networks (id) ON UPDATE RESTRICT ON DELETE CASCADE, + FOREIGN KEY (challenge_id) REFERENCES hydra_oauth2_flow (consent_challenge_id) ON DELETE CASCADE, + PRIMARY KEY (device_code_signature, nid), + + INDEX hydra_oauth2_device_auth_codes_request_id_idx (request_id, nid), + INDEX hydra_oauth2_device_auth_codes_client_id_idx (client_id, nid), + INDEX hydra_oauth2_device_auth_codes_challenge_id_idx (challenge_id), + UNIQUE INDEX hydra_oauth2_device_auth_codes_user_code_signature_idx (nid, user_code_signature) +); + +ALTER TABLE hydra_oauth2_flow + ADD COLUMN IF NOT EXISTS device_challenge_id VARCHAR(255) NULL, + ADD COLUMN IF NOT EXISTS device_code_request_id VARCHAR(255) NULL, + ADD COLUMN IF NOT EXISTS device_verifier VARCHAR(40) NULL, + ADD COLUMN IF NOT EXISTS device_csrf VARCHAR(40) NULL, + ADD COLUMN IF NOT EXISTS device_was_used BOOLEAN NULL, + ADD COLUMN IF NOT EXISTS device_handled_at TIMESTAMP NULL, + ADD COLUMN IF NOT EXISTS device_error VARCHAR(2048) NULL; + +CREATE UNIQUE INDEX IF NOT EXISTS hydra_oauth2_flow_device_challenge_idx ON hydra_oauth2_flow (device_challenge_id); + +ALTER TABLE hydra_client + ADD COLUMN IF NOT EXISTS device_authorization_grant_id_token_lifespan BIGINT NULL DEFAULT NULL, + ADD COLUMN IF NOT EXISTS device_authorization_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + ADD COLUMN IF NOT EXISTS device_authorization_grant_refresh_token_lifespan BIGINT NULL DEFAULT NULL; diff --git a/persistence/sql/migrations/20241609000001000000_device_flow.cockroach.down.sql b/persistence/sql/migrations/20241609000001000000_device_flow.cockroach.down.sql new file mode 100644 index 00000000000..60170d40a98 --- /dev/null +++ b/persistence/sql/migrations/20241609000001000000_device_flow.cockroach.down.sql @@ -0,0 +1,15 @@ +DROP TABLE IF EXISTS hydra_oauth2_device_auth_codes; + +ALTER TABLE hydra_oauth2_flow + DROP COLUMN IF EXISTS device_challenge_id, + DROP COLUMN IF EXISTS device_code_request_id, + DROP COLUMN IF EXISTS device_verifier, + DROP COLUMN IF EXISTS device_csrf, + DROP COLUMN IF EXISTS device_was_used, + DROP COLUMN IF EXISTS device_handled_at, + DROP COLUMN IF EXISTS device_error; + +ALTER TABLE hydra_client + DROP COLUMN IF EXISTS device_authorization_grant_id_token_lifespan, + DROP COLUMN IF EXISTS device_authorization_grant_access_token_lifespan, + DROP COLUMN IF EXISTS device_authorization_grant_refresh_token_lifespan; diff --git a/persistence/sql/migrations/20241609000001000000_device_flow.down.sql b/persistence/sql/migrations/20241609000001000000_device_flow.down.sql new file mode 100644 index 00000000000..3e968e326d6 --- /dev/null +++ b/persistence/sql/migrations/20241609000001000000_device_flow.down.sql @@ -0,0 +1,15 @@ +DROP TABLE hydra_oauth2_device_auth_codes; + +ALTER TABLE hydra_oauth2_flow + DROP COLUMN device_challenge_id, + DROP COLUMN device_code_request_id, + DROP COLUMN device_verifier, + DROP COLUMN device_csrf, + DROP COLUMN device_was_used, + DROP COLUMN device_handled_at, + DROP COLUMN device_error; + +ALTER TABLE hydra_client + DROP COLUMN device_authorization_grant_id_token_lifespan, + DROP COLUMN device_authorization_grant_access_token_lifespan, + DROP COLUMN device_authorization_grant_refresh_token_lifespan; diff --git a/persistence/sql/migrations/20241609000001000000_device_flow.mysql.up.sql b/persistence/sql/migrations/20241609000001000000_device_flow.mysql.up.sql new file mode 100644 index 00000000000..89ae76d125f --- /dev/null +++ b/persistence/sql/migrations/20241609000001000000_device_flow.mysql.up.sql @@ -0,0 +1,54 @@ +CREATE TABLE IF NOT EXISTS hydra_oauth2_device_auth_codes +( + device_code_signature VARCHAR(255) NOT NULL, + user_code_signature VARCHAR(255) NOT NULL, + request_id VARCHAR(40) NOT NULL, + requested_at TIMESTAMP NOT NULL DEFAULT NOW(), + client_id VARCHAR(255) NOT NULL, + scope VARCHAR(1024) NOT NULL, + granted_scope VARCHAR(1024) NOT NULL, + form_data VARCHAR(4096) NOT NULL, + session_data TEXT NOT NULL, + subject VARCHAR(255) NOT NULL DEFAULT '', + device_code_active BOOL NOT NULL DEFAULT true, + user_code_state SMALLINT NOT NULL DEFAULT 0, + requested_audience VARCHAR(1024) NOT NULL, + granted_audience VARCHAR(1024) NOT NULL, + challenge_id VARCHAR(40) NULL, + expires_at TIMESTAMP NULL, + nid CHAR(36) NOT NULL, + + FOREIGN KEY (client_id, nid) REFERENCES hydra_client (id, nid) ON DELETE CASCADE, + FOREIGN KEY (nid) REFERENCES networks (id) ON UPDATE RESTRICT ON DELETE CASCADE, + FOREIGN KEY (challenge_id) REFERENCES hydra_oauth2_flow (consent_challenge_id) ON DELETE CASCADE, + PRIMARY KEY (device_code_signature, nid) +); + +CREATE INDEX hydra_oauth2_device_auth_codes_request_id_idx ON hydra_oauth2_device_auth_codes (request_id, nid); +CREATE INDEX hydra_oauth2_device_auth_codes_client_id_idx ON hydra_oauth2_device_auth_codes (client_id, nid); +CREATE INDEX hydra_oauth2_device_auth_codes_challenge_id_idx ON hydra_oauth2_device_auth_codes (challenge_id); +CREATE UNIQUE INDEX hydra_oauth2_device_auth_codes_user_code_signature_idx ON hydra_oauth2_device_auth_codes (nid, user_code_signature); + +ALTER TABLE hydra_oauth2_flow + ADD COLUMN device_challenge_id VARCHAR(255) NULL; +ALTER TABLE hydra_oauth2_flow + ADD COLUMN device_code_request_id VARCHAR(255) NULL; +ALTER TABLE hydra_oauth2_flow + ADD COLUMN device_verifier VARCHAR(40) NULL; +ALTER TABLE hydra_oauth2_flow + ADD COLUMN device_csrf VARCHAR(40) NULL; +ALTER TABLE hydra_oauth2_flow + ADD COLUMN device_was_used BOOL NULL; +ALTER TABLE hydra_oauth2_flow + ADD COLUMN device_handled_at TIMESTAMP NULL; +ALTER TABLE hydra_oauth2_flow + ADD COLUMN device_error VARCHAR(2048) NULL; + +CREATE UNIQUE INDEX hydra_oauth2_flow_device_challenge_idx ON hydra_oauth2_flow (device_challenge_id); + +ALTER TABLE hydra_client + ADD COLUMN device_authorization_grant_id_token_lifespan BIGINT NULL DEFAULT NULL; +ALTER TABLE hydra_client + ADD COLUMN device_authorization_grant_access_token_lifespan BIGINT NULL DEFAULT NULL; +ALTER TABLE hydra_client + ADD COLUMN device_authorization_grant_refresh_token_lifespan BIGINT NULL DEFAULT NULL; diff --git a/persistence/sql/migrations/20241609000001000000_device_flow.postgres.up.sql b/persistence/sql/migrations/20241609000001000000_device_flow.postgres.up.sql new file mode 100644 index 00000000000..a1f561327d0 --- /dev/null +++ b/persistence/sql/migrations/20241609000001000000_device_flow.postgres.up.sql @@ -0,0 +1,46 @@ +CREATE TABLE IF NOT EXISTS hydra_oauth2_device_auth_codes +( + device_code_signature VARCHAR(255) NOT NULL, + user_code_signature VARCHAR(255) NOT NULL, + request_id VARCHAR(40) NOT NULL, + requested_at TIMESTAMP NOT NULL DEFAULT NOW(), + client_id VARCHAR(255) NOT NULL, + scope VARCHAR(1024) NOT NULL, + granted_scope VARCHAR(1024) NOT NULL, + form_data VARCHAR(4096) NOT NULL, + session_data TEXT NOT NULL, + subject VARCHAR(255) NOT NULL DEFAULT '', + device_code_active BOOL NOT NULL DEFAULT true, + user_code_state SMALLINT NOT NULL DEFAULT 0, + requested_audience VARCHAR(1024) NOT NULL, + granted_audience VARCHAR(1024) NOT NULL, + challenge_id VARCHAR(40) NULL, + expires_at TIMESTAMP NULL, + nid UUID NOT NULL, + + FOREIGN KEY (client_id, nid) REFERENCES hydra_client (id, nid) ON DELETE CASCADE, + FOREIGN KEY (nid) REFERENCES networks (id) ON UPDATE RESTRICT ON DELETE CASCADE, + FOREIGN KEY (challenge_id) REFERENCES hydra_oauth2_flow (consent_challenge_id) ON DELETE CASCADE, + PRIMARY KEY (device_code_signature, nid) +); + +CREATE INDEX hydra_oauth2_device_auth_codes_request_id_idx ON hydra_oauth2_device_auth_codes (request_id, nid); +CREATE INDEX hydra_oauth2_device_auth_codes_client_id_idx ON hydra_oauth2_device_auth_codes (client_id, nid); +CREATE INDEX hydra_oauth2_device_auth_codes_challenge_id_idx ON hydra_oauth2_device_auth_codes (challenge_id); +CREATE UNIQUE INDEX hydra_oauth2_device_auth_codes_user_code_signature_idx ON hydra_oauth2_device_auth_codes (nid, user_code_signature); + +ALTER TABLE hydra_oauth2_flow + ADD COLUMN IF NOT EXISTS device_challenge_id VARCHAR(255) NULL, + ADD COLUMN IF NOT EXISTS device_code_request_id VARCHAR(255) NULL, + ADD COLUMN IF NOT EXISTS device_verifier VARCHAR(40) NULL, + ADD COLUMN IF NOT EXISTS device_csrf VARCHAR(40) NULL, + ADD COLUMN IF NOT EXISTS device_was_used BOOLEAN NULL, + ADD COLUMN IF NOT EXISTS device_handled_at TIMESTAMP NULL, + ADD COLUMN IF NOT EXISTS device_error VARCHAR(2048) NULL; + +CREATE UNIQUE INDEX IF NOT EXISTS hydra_oauth2_flow_device_challenge_idx ON hydra_oauth2_flow (device_challenge_id); + +ALTER TABLE hydra_client + ADD COLUMN IF NOT EXISTS device_authorization_grant_id_token_lifespan BIGINT NULL DEFAULT NULL, + ADD COLUMN IF NOT EXISTS device_authorization_grant_access_token_lifespan BIGINT NULL DEFAULT NULL, + ADD COLUMN IF NOT EXISTS device_authorization_grant_refresh_token_lifespan BIGINT NULL DEFAULT NULL; diff --git a/persistence/sql/migrations/20241609000001000000_device_flow.sqlite.down.sql b/persistence/sql/migrations/20241609000001000000_device_flow.sqlite.down.sql new file mode 100644 index 00000000000..0a78b68511d --- /dev/null +++ b/persistence/sql/migrations/20241609000001000000_device_flow.sqlite.down.sql @@ -0,0 +1,15 @@ +DROP TABLE hydra_oauth2_device_auth_codes; + +DROP INDEX hydra_oauth2_flow_device_challenge_idx; + +ALTER TABLE hydra_oauth2_flow DROP COLUMN device_challenge_id; +ALTER TABLE hydra_oauth2_flow DROP COLUMN device_code_request_id; +ALTER TABLE hydra_oauth2_flow DROP COLUMN device_verifier; +ALTER TABLE hydra_oauth2_flow DROP COLUMN device_csrf; +ALTER TABLE hydra_oauth2_flow DROP COLUMN device_was_used; +ALTER TABLE hydra_oauth2_flow DROP COLUMN device_handled_at; +ALTER TABLE hydra_oauth2_flow DROP COLUMN device_error; + +ALTER TABLE hydra_client DROP COLUMN device_authorization_grant_id_token_lifespan; +ALTER TABLE hydra_client DROP COLUMN device_authorization_grant_access_token_lifespan; +ALTER TABLE hydra_client DROP COLUMN device_authorization_grant_refresh_token_lifespan; diff --git a/persistence/sql/migrations/20241609000001000000_device_flow.sqlite.up.sql b/persistence/sql/migrations/20241609000001000000_device_flow.sqlite.up.sql new file mode 100644 index 00000000000..d6fe883f137 --- /dev/null +++ b/persistence/sql/migrations/20241609000001000000_device_flow.sqlite.up.sql @@ -0,0 +1,54 @@ +CREATE TABLE IF NOT EXISTS hydra_oauth2_device_auth_codes +( + device_code_signature VARCHAR(255) NOT NULL, + user_code_signature VARCHAR(255) NOT NULL, + request_id VARCHAR(40) NOT NULL, + requested_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + client_id VARCHAR(255) NOT NULL, + scope VARCHAR(1024) NOT NULL, + granted_scope VARCHAR(1024) NOT NULL, + form_data VARCHAR(4096) NOT NULL, + session_data TEXT NOT NULL, + subject VARCHAR(255) NOT NULL DEFAULT '', + device_code_active BOOL NOT NULL DEFAULT true, + user_code_state SMALLINT NOT NULL DEFAULT 0, + requested_audience VARCHAR(1024) NOT NULL, + granted_audience VARCHAR(1024) NOT NULL, + challenge_id VARCHAR(40) NULL, + expires_at TIMESTAMP NULL, + nid UUID NOT NULL, + + FOREIGN KEY (client_id, nid) REFERENCES hydra_client (id, nid) ON DELETE CASCADE, + FOREIGN KEY (nid) REFERENCES networks (id) ON UPDATE RESTRICT ON DELETE CASCADE, + FOREIGN KEY (challenge_id) REFERENCES hydra_oauth2_flow (consent_challenge_id) ON DELETE CASCADE, + PRIMARY KEY (device_code_signature, nid) +); + +CREATE INDEX hydra_oauth2_device_auth_codes_request_id_idx ON hydra_oauth2_device_auth_codes (request_id, nid); +CREATE INDEX hydra_oauth2_device_auth_codes_client_id_idx ON hydra_oauth2_device_auth_codes (client_id, nid); +CREATE INDEX hydra_oauth2_device_auth_codes_challenge_id_idx ON hydra_oauth2_device_auth_codes (challenge_id); +CREATE UNIQUE INDEX hydra_oauth2_device_auth_codes_user_code_signature_idx ON hydra_oauth2_device_auth_codes (nid, user_code_signature); + +ALTER TABLE hydra_oauth2_flow + ADD COLUMN device_challenge_id VARCHAR(255) NULL; +ALTER TABLE hydra_oauth2_flow + ADD COLUMN device_code_request_id VARCHAR(255) NULL; +ALTER TABLE hydra_oauth2_flow + ADD COLUMN device_verifier VARCHAR(40) NULL; +ALTER TABLE hydra_oauth2_flow + ADD COLUMN device_csrf VARCHAR(40) NULL; +ALTER TABLE hydra_oauth2_flow + ADD COLUMN device_was_used BOOLEAN NULL; +ALTER TABLE hydra_oauth2_flow + ADD COLUMN device_handled_at TIMESTAMP NULL; +ALTER TABLE hydra_oauth2_flow + ADD COLUMN device_error VARCHAR(2048) NULL; + +CREATE UNIQUE INDEX hydra_oauth2_flow_device_challenge_idx ON hydra_oauth2_flow (device_challenge_id); + +ALTER TABLE hydra_client + ADD COLUMN device_authorization_grant_id_token_lifespan BIGINT NULL DEFAULT NULL; +ALTER TABLE hydra_client + ADD COLUMN device_authorization_grant_access_token_lifespan BIGINT NULL DEFAULT NULL; +ALTER TABLE hydra_client + ADD COLUMN device_authorization_grant_refresh_token_lifespan BIGINT NULL DEFAULT NULL; diff --git a/persistence/sql/migrations/20250513132142000000_refresh_used_times.cockroach.up.sql b/persistence/sql/migrations/20250513132142000000_refresh_used_times.cockroach.up.sql new file mode 100644 index 00000000000..de88b6fab65 --- /dev/null +++ b/persistence/sql/migrations/20250513132142000000_refresh_used_times.cockroach.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE hydra_oauth2_refresh + ADD COLUMN used_times INT4 NULL; diff --git a/persistence/sql/migrations/20250513132142000000_refresh_used_times.down.sql b/persistence/sql/migrations/20250513132142000000_refresh_used_times.down.sql new file mode 100644 index 00000000000..7000d2cb4af --- /dev/null +++ b/persistence/sql/migrations/20250513132142000000_refresh_used_times.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE hydra_oauth2_refresh + DROP COLUMN used_times; diff --git a/persistence/sql/migrations/20250513132142000000_refresh_used_times.up.sql b/persistence/sql/migrations/20250513132142000000_refresh_used_times.up.sql new file mode 100644 index 00000000000..67e561dbea3 --- /dev/null +++ b/persistence/sql/migrations/20250513132142000000_refresh_used_times.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE hydra_oauth2_refresh + ADD COLUMN used_times INT NULL; diff --git a/persistence/sql/migrations/20250520000001000000_add_oauth2_trusted_jwt_bearer_issuer_index.autocommit.down.sql b/persistence/sql/migrations/20250520000001000000_add_oauth2_trusted_jwt_bearer_issuer_index.autocommit.down.sql new file mode 100644 index 00000000000..c174430362e --- /dev/null +++ b/persistence/sql/migrations/20250520000001000000_add_oauth2_trusted_jwt_bearer_issuer_index.autocommit.down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS hydra_oauth2_trusted_jwt_bearer_issuer_nid_uq_idx; diff --git a/persistence/sql/migrations/20250520000001000000_add_oauth2_trusted_jwt_bearer_issuer_index.autocommit.up.sql b/persistence/sql/migrations/20250520000001000000_add_oauth2_trusted_jwt_bearer_issuer_index.autocommit.up.sql new file mode 100644 index 00000000000..ca5bdc2c994 --- /dev/null +++ b/persistence/sql/migrations/20250520000001000000_add_oauth2_trusted_jwt_bearer_issuer_index.autocommit.up.sql @@ -0,0 +1,4 @@ +-- We must avoid having empty fields at the beginning of an index, to avoid queries degenerating into full table scans. +-- Apart from this consideration, the order of the fields in the index is not really important when the query uses them all. +-- To avoid performance pitfalls/surprises, we place Ory-controlled fields before user-controlled fields. +CREATE UNIQUE INDEX IF NOT EXISTS hydra_oauth2_trusted_jwt_bearer_issuer_nid_uq_idx ON hydra_oauth2_trusted_jwt_bearer_issuer (nid ASC, key_id ASC, issuer ASC, subject ASC); diff --git a/persistence/sql/migrations/20250520000001000000_add_oauth2_trusted_jwt_bearer_issuer_index.mysql.down.sql b/persistence/sql/migrations/20250520000001000000_add_oauth2_trusted_jwt_bearer_issuer_index.mysql.down.sql new file mode 100644 index 00000000000..15c98401cc2 --- /dev/null +++ b/persistence/sql/migrations/20250520000001000000_add_oauth2_trusted_jwt_bearer_issuer_index.mysql.down.sql @@ -0,0 +1,2 @@ +-- can't drop this because it replaces the auto-generated index for the foreign key constraint (yes, mysql is weird like that) +-- DROP INDEX hydra_oauth2_trusted_jwt_bearer_issuer_nid_uq_idx ON hydra_oauth2_trusted_jwt_bearer_issuer; diff --git a/persistence/sql/migrations/20250520000001000000_add_oauth2_trusted_jwt_bearer_issuer_index.mysql.up.sql b/persistence/sql/migrations/20250520000001000000_add_oauth2_trusted_jwt_bearer_issuer_index.mysql.up.sql new file mode 100644 index 00000000000..78fcb081b67 --- /dev/null +++ b/persistence/sql/migrations/20250520000001000000_add_oauth2_trusted_jwt_bearer_issuer_index.mysql.up.sql @@ -0,0 +1,4 @@ +-- We must avoid having empty fields at the beginning of an index, to avoid queries degenerating into full table scans. +-- Apart from this consideration, the order of the fields in the index is not really important when the query uses them all. +-- To avoid performance pitfalls/surprises, we place Ory-controlled fields before user-controlled fields. +CREATE UNIQUE INDEX hydra_oauth2_trusted_jwt_bearer_issuer_nid_uq_idx ON hydra_oauth2_trusted_jwt_bearer_issuer (nid ASC, key_id ASC, issuer ASC, subject ASC); diff --git a/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.cockroach.autocommit.down.sql b/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.cockroach.autocommit.down.sql new file mode 100644 index 00000000000..8630ef914cd --- /dev/null +++ b/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.cockroach.autocommit.down.sql @@ -0,0 +1,2 @@ +CREATE UNIQUE INDEX IF NOT EXISTS hydra_oauth2_trusted_jwt_bearer_issuer_issuer_subject_key_id_key ON hydra_oauth2_trusted_jwt_bearer_issuer (issuer ASC, subject ASC, key_id ASC, nid ASC); + diff --git a/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.cockroach.autocommit.up.sql b/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.cockroach.autocommit.up.sql new file mode 100644 index 00000000000..20e8ae6f587 --- /dev/null +++ b/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.cockroach.autocommit.up.sql @@ -0,0 +1,2 @@ +DROP INDEX IF EXISTS hydra_oauth2_trusted_jwt_bearer_issuer_issuer_subject_key_id_key; + diff --git a/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.mysql.down.sql b/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.mysql.down.sql new file mode 100644 index 00000000000..964e97dd379 --- /dev/null +++ b/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.mysql.down.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX issuer ON hydra_oauth2_trusted_jwt_bearer_issuer (issuer, subject, key_id, nid); diff --git a/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.mysql.up.sql b/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.mysql.up.sql new file mode 100644 index 00000000000..06fa6d358eb --- /dev/null +++ b/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.mysql.up.sql @@ -0,0 +1,2 @@ +DROP INDEX issuer ON hydra_oauth2_trusted_jwt_bearer_issuer; + diff --git a/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.postgres.down.sql b/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.postgres.down.sql new file mode 100644 index 00000000000..8630ef914cd --- /dev/null +++ b/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.postgres.down.sql @@ -0,0 +1,2 @@ +CREATE UNIQUE INDEX IF NOT EXISTS hydra_oauth2_trusted_jwt_bearer_issuer_issuer_subject_key_id_key ON hydra_oauth2_trusted_jwt_bearer_issuer (issuer ASC, subject ASC, key_id ASC, nid ASC); + diff --git a/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.postgres.up.sql b/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.postgres.up.sql new file mode 100644 index 00000000000..20e8ae6f587 --- /dev/null +++ b/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.postgres.up.sql @@ -0,0 +1,2 @@ +DROP INDEX IF EXISTS hydra_oauth2_trusted_jwt_bearer_issuer_issuer_subject_key_id_key; + diff --git a/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.sqlite.down.sql b/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.sqlite.down.sql new file mode 100644 index 00000000000..3c7bb3dc088 --- /dev/null +++ b/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.sqlite.down.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX IF NOT EXISTS hydra_oauth2_trusted_jwt_bearer_issuer_issuer_subject_key_id_key ON hydra_oauth2_trusted_jwt_bearer_issuer (issuer ASC, subject ASC, key_id ASC, nid ASC); diff --git a/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.sqlite.up.sql b/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.sqlite.up.sql new file mode 100644 index 00000000000..55526bb2fed --- /dev/null +++ b/persistence/sql/migrations/20250523000001000000_rm_oauth2_trusted_jwt_bearer_issuer_unefficient_uq_idx.sqlite.up.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS hydra_oauth2_trusted_jwt_bearer_issuer.sqlite_autoindex_hydra_oauth2_trusted_jwt_bearer_issuer_1; diff --git a/persistence/sql/migrations/20250610132310000001_more_expire_columns.cockroach.autocommit.down.sql b/persistence/sql/migrations/20250610132310000001_more_expire_columns.cockroach.autocommit.down.sql new file mode 100644 index 00000000000..a916ad50a89 --- /dev/null +++ b/persistence/sql/migrations/20250610132310000001_more_expire_columns.cockroach.autocommit.down.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_oauth2_authentication_session DROP COLUMN IF EXISTS expires_at; diff --git a/persistence/sql/migrations/20250610132310000001_more_expire_columns.cockroach.autocommit.up.sql b/persistence/sql/migrations/20250610132310000001_more_expire_columns.cockroach.autocommit.up.sql new file mode 100644 index 00000000000..479bd18c30a --- /dev/null +++ b/persistence/sql/migrations/20250610132310000001_more_expire_columns.cockroach.autocommit.up.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_oauth2_authentication_session ADD COLUMN IF NOT EXISTS expires_at TIMESTAMP NULL; diff --git a/persistence/sql/migrations/20250610132310000001_more_expire_columns.down.sql b/persistence/sql/migrations/20250610132310000001_more_expire_columns.down.sql new file mode 100644 index 00000000000..47d74a9dc01 --- /dev/null +++ b/persistence/sql/migrations/20250610132310000001_more_expire_columns.down.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_oauth2_authentication_session DROP COLUMN expires_at; diff --git a/persistence/sql/migrations/20250610132310000001_more_expire_columns.up.sql b/persistence/sql/migrations/20250610132310000001_more_expire_columns.up.sql new file mode 100644 index 00000000000..ade94f140c4 --- /dev/null +++ b/persistence/sql/migrations/20250610132310000001_more_expire_columns.up.sql @@ -0,0 +1 @@ +ALTER TABLE hydra_oauth2_authentication_session ADD COLUMN expires_at TIMESTAMP NULL; diff --git a/persistence/sql/migrations/20251030112809000000_flow_expires_at.cockroach.up.sql b/persistence/sql/migrations/20251030112809000000_flow_expires_at.cockroach.up.sql new file mode 100644 index 00000000000..dc8713dbad9 --- /dev/null +++ b/persistence/sql/migrations/20251030112809000000_flow_expires_at.cockroach.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE hydra_oauth2_flow + ADD COLUMN IF NOT EXISTS expires_at TIMESTAMP + AS (IF(consent_remember_for > 0, requested_at + INTERVAL '1 second' * consent_remember_for, NULL)) VIRTUAL; diff --git a/persistence/sql/migrations/20251030112809000000_flow_expires_at.down.sql b/persistence/sql/migrations/20251030112809000000_flow_expires_at.down.sql new file mode 100644 index 00000000000..4d496358227 --- /dev/null +++ b/persistence/sql/migrations/20251030112809000000_flow_expires_at.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE hydra_oauth2_flow + DROP COLUMN expires_at; diff --git a/persistence/sql/migrations/20251030112809000000_flow_expires_at.mysql.up.sql b/persistence/sql/migrations/20251030112809000000_flow_expires_at.mysql.up.sql new file mode 100644 index 00000000000..cac58639ace --- /dev/null +++ b/persistence/sql/migrations/20251030112809000000_flow_expires_at.mysql.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE hydra_oauth2_flow + ADD COLUMN expires_at TIMESTAMP + AS (IF(consent_remember_for > 0, DATE_ADD(requested_at, INTERVAL consent_remember_for SECOND), NULL)) VIRTUAL; diff --git a/persistence/sql/migrations/20251030112809000000_flow_expires_at.postgres.up.sql b/persistence/sql/migrations/20251030112809000000_flow_expires_at.postgres.up.sql new file mode 100644 index 00000000000..a2be14a250e --- /dev/null +++ b/persistence/sql/migrations/20251030112809000000_flow_expires_at.postgres.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE hydra_oauth2_flow + ADD COLUMN IF NOT EXISTS expires_at TIMESTAMP + GENERATED ALWAYS AS (CASE WHEN consent_remember_for > 0 THEN requested_at + (consent_remember_for * INTERVAL '1 second') END) STORED; -- postgres supports virtual columns only with version 18+, so we have to use a stored column instead diff --git a/persistence/sql/migrations/20251030112809000000_flow_expires_at.sqlite.up.sql b/persistence/sql/migrations/20251030112809000000_flow_expires_at.sqlite.up.sql new file mode 100644 index 00000000000..ffd75719827 --- /dev/null +++ b/persistence/sql/migrations/20251030112809000000_flow_expires_at.sqlite.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE hydra_oauth2_flow + ADD COLUMN expires_at TIMESTAMP + GENERATED ALWAYS AS (if(consent_remember_for > 0, datetime(requested_at, '+' || consent_remember_for || ' seconds'), NULL)) VIRTUAL; diff --git a/persistence/sql/migrations/20251119112639000000_flow_drop_defaults.down.sql b/persistence/sql/migrations/20251119112639000000_flow_drop_defaults.down.sql new file mode 100644 index 00000000000..1bddd1d046a --- /dev/null +++ b/persistence/sql/migrations/20251119112639000000_flow_drop_defaults.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE hydra_oauth2_flow + ALTER COLUMN login_extend_session_lifespan SET NOT NULL, + ALTER COLUMN forced_subject_identifier SET NOT NULL; diff --git a/persistence/sql/migrations/20251119112639000000_flow_drop_defaults.mysql.down.sql b/persistence/sql/migrations/20251119112639000000_flow_drop_defaults.mysql.down.sql new file mode 100644 index 00000000000..8e8b5d9f3bd --- /dev/null +++ b/persistence/sql/migrations/20251119112639000000_flow_drop_defaults.mysql.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE hydra_oauth2_flow + MODIFY COLUMN login_extend_session_lifespan tinyint(1) NOT NULL DEFAULT FALSE, + MODIFY COLUMN forced_subject_identifier varchar(255) NOT NULL DEFAULT ''; diff --git a/persistence/sql/migrations/20251119112639000000_flow_drop_defaults.mysql.up.sql b/persistence/sql/migrations/20251119112639000000_flow_drop_defaults.mysql.up.sql new file mode 100644 index 00000000000..70714533ddd --- /dev/null +++ b/persistence/sql/migrations/20251119112639000000_flow_drop_defaults.mysql.up.sql @@ -0,0 +1,19 @@ +ALTER TABLE hydra_oauth2_flow + -- We need to drop these NOT NULL constraints, because the fields are actually not used anymore in the code, and therefore cannot be set by it. + -- Mysql has issues with two changes to the same column in one statement, therefore we need to use MODIFY COLUMN here to both drop the default and constraint. + MODIFY COLUMN forced_subject_identifier varchar(255) NULL, + MODIFY COLUMN login_extend_session_lifespan tinyint(1) NULL, + + ALTER COLUMN requested_at_audience DROP DEFAULT, + ALTER COLUMN amr DROP DEFAULT, + ALTER COLUMN granted_at_audience DROP DEFAULT, + ALTER COLUMN oidc_context DROP DEFAULT, + ALTER COLUMN context DROP DEFAULT, + ALTER COLUMN acr DROP DEFAULT, + ALTER COLUMN consent_skip DROP DEFAULT, + ALTER COLUMN consent_remember DROP DEFAULT, + ALTER COLUMN login_remember DROP DEFAULT, + ALTER COLUMN consent_was_used DROP DEFAULT, + ALTER COLUMN login_was_used DROP DEFAULT, + ALTER COLUMN session_id_token DROP DEFAULT, + ALTER COLUMN session_access_token DROP DEFAULT; diff --git a/persistence/sql/migrations/20251119112639000000_flow_drop_defaults.sqlite.down.sql b/persistence/sql/migrations/20251119112639000000_flow_drop_defaults.sqlite.down.sql new file mode 100644 index 00000000000..261bcca3a12 --- /dev/null +++ b/persistence/sql/migrations/20251119112639000000_flow_drop_defaults.sqlite.down.sql @@ -0,0 +1,143 @@ +CREATE TABLE hydra_oauth2_flow_prev ( + login_challenge VARCHAR(40) NOT NULL PRIMARY KEY, + nid CHAR(36) NOT NULL, + requested_scope TEXT NOT NULL, + login_verifier VARCHAR(40) NOT NULL, + login_csrf VARCHAR(40) NOT NULL, + subject VARCHAR(255) NOT NULL, + request_url TEXT NOT NULL, + login_skip INTEGER NOT NULL, + client_id VARCHAR(255) NOT NULL, + requested_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + oidc_context TEXT NOT NULL, + login_session_id VARCHAR(40) NULL REFERENCES hydra_oauth2_authentication_session (id) ON DELETE SET NULL, + requested_at_audience TEXT NULL DEFAULT '[]', + login_initialized_at TIMESTAMP NULL, + + state INTEGER NOT NULL, + + login_remember INTEGER NULL, + login_remember_for INTEGER NULL, + login_error TEXT NULL, + acr TEXT NULL, + login_authenticated_at TIMESTAMP NULL, + login_was_used INTEGER NULL, + forced_subject_identifier VARCHAR(255) NULL DEFAULT '', + context TEXT NULL DEFAULT '{}', + amr TEXT NULL DEFAULT '[]', + + consent_challenge_id VARCHAR(40) NULL, + consent_skip INTEGER NULL DEFAULT false, + consent_verifier VARCHAR(40) NULL, + consent_csrf VARCHAR(40) NULL, + + granted_scope TEXT NULL, + granted_at_audience TEXT NULL DEFAULT '', + consent_remember INTEGER NULL DEFAULT 0, + consent_remember_for INTEGER NULL, + consent_handled_at TIMESTAMP NULL, + consent_was_used INTEGER NOT NULL DEFAULT false, + consent_error TEXT NULL, + session_id_token TEXT NULL DEFAULT '{}', + session_access_token TEXT NULL DEFAULT '{}', + login_extend_session_lifespan BOOLEAN NULL DEFAULT FALSE, + identity_provider_session_id VARCHAR(40) NULL, + device_challenge_id VARCHAR(255) NULL, + device_code_request_id VARCHAR(255) NULL, + device_verifier VARCHAR(40) NULL, + device_csrf VARCHAR(40) NULL, + device_was_used BOOLEAN NULL, + device_handled_at TIMESTAMP NULL, + device_error VARCHAR(2048) NULL, + expires_at TIMESTAMP GENERATED ALWAYS AS (if(consent_remember_for > 0, datetime(requested_at, '+' || consent_remember_for || ' seconds'), NULL)) VIRTUAL, + + FOREIGN KEY (client_id, nid) REFERENCES hydra_client (id, nid) ON DELETE CASCADE, + CHECK ( + state = 128 OR + state = 129 OR + state = 1 OR + (state = 2 AND ( + login_remember IS NOT NULL AND + login_remember_for IS NOT NULL AND + login_error IS NOT NULL AND + acr IS NOT NULL AND + login_was_used IS NOT NULL AND + context IS NOT NULL AND + amr IS NOT NULL + )) OR + (state = 3 AND ( + login_remember IS NOT NULL AND + login_remember_for IS NOT NULL AND + login_error IS NOT NULL AND + acr IS NOT NULL AND + login_was_used IS NOT NULL AND + context IS NOT NULL AND + amr IS NOT NULL + )) OR + (state = 4 AND ( + login_remember IS NOT NULL AND + login_remember_for IS NOT NULL AND + login_error IS NOT NULL AND + acr IS NOT NULL AND + login_was_used IS NOT NULL AND + context IS NOT NULL AND + amr IS NOT NULL AND + + consent_challenge_id IS NOT NULL AND + consent_verifier IS NOT NULL AND + consent_skip IS NOT NULL AND + consent_csrf IS NOT NULL + )) OR + (state = 5 AND ( + login_remember IS NOT NULL AND + login_remember_for IS NOT NULL AND + login_error IS NOT NULL AND + acr IS NOT NULL AND + login_was_used IS NOT NULL AND + context IS NOT NULL AND + amr IS NOT NULL AND + + consent_challenge_id IS NOT NULL AND + consent_verifier IS NOT NULL AND + consent_skip IS NOT NULL AND + consent_csrf IS NOT NULL + )) OR + (state = 6 AND ( + login_remember IS NOT NULL AND + login_remember_for IS NOT NULL AND + login_error IS NOT NULL AND + acr IS NOT NULL AND + login_was_used IS NOT NULL AND + context IS NOT NULL AND + amr IS NOT NULL AND + + consent_challenge_id IS NOT NULL AND + consent_verifier IS NOT NULL AND + consent_skip IS NOT NULL AND + consent_csrf IS NOT NULL AND + + granted_scope IS NOT NULL AND + consent_remember IS NOT NULL AND + consent_remember_for IS NOT NULL AND + consent_error IS NOT NULL AND + session_access_token IS NOT NULL AND + session_id_token IS NOT NULL AND + consent_was_used IS NOT NULL + )) + ) +); + +INSERT INTO hydra_oauth2_flow_prev (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_skip, consent_verifier, consent_csrf, granted_scope, granted_at_audience, consent_remember, consent_remember_for, consent_handled_at, consent_was_used, consent_error, session_id_token, session_access_token, login_extend_session_lifespan, identity_provider_session_id, device_challenge_id, device_code_request_id, device_verifier, device_csrf, device_was_used, device_handled_at, device_error) +SELECT login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, login_initialized_at, state, login_remember, login_remember_for, login_error, acr, login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, consent_challenge_id, consent_skip, consent_verifier, consent_csrf, granted_scope, granted_at_audience, consent_remember, consent_remember_for, consent_handled_at, consent_was_used, consent_error, session_id_token, session_access_token, login_extend_session_lifespan, identity_provider_session_id, device_challenge_id, device_code_request_id, device_verifier, device_csrf, device_was_used, device_handled_at, device_error +FROM hydra_oauth2_flow; + +DROP TABLE hydra_oauth2_flow; + +ALTER TABLE hydra_oauth2_flow_prev RENAME TO hydra_oauth2_flow; + +CREATE INDEX hydra_oauth2_flow_client_id_idx ON hydra_oauth2_flow (client_id, nid); +CREATE INDEX hydra_oauth2_flow_login_session_id_idx ON hydra_oauth2_flow (login_session_id); +CREATE INDEX hydra_oauth2_flow_subject_idx ON hydra_oauth2_flow (subject, nid); +CREATE UNIQUE INDEX hydra_oauth2_flow_consent_challenge_id_idx ON hydra_oauth2_flow (consent_challenge_id); +CREATE INDEX hydra_oauth2_flow_previous_consents_idx ON hydra_oauth2_flow (subject, client_id, nid, consent_skip, consent_error, consent_remember); +CREATE UNIQUE INDEX hydra_oauth2_flow_device_challenge_idx ON hydra_oauth2_flow (device_challenge_id); diff --git a/persistence/sql/migrations/20251119112639000000_flow_drop_defaults.sqlite.up.sql b/persistence/sql/migrations/20251119112639000000_flow_drop_defaults.sqlite.up.sql new file mode 100644 index 00000000000..d035e8c6de8 --- /dev/null +++ b/persistence/sql/migrations/20251119112639000000_flow_drop_defaults.sqlite.up.sql @@ -0,0 +1,198 @@ +CREATE TABLE hydra_oauth2_flow_next ( + login_challenge VARCHAR(40) NOT NULL PRIMARY KEY, + nid CHAR(36) NOT NULL, + requested_scope TEXT NOT NULL, + login_verifier VARCHAR(40) NOT NULL, + login_csrf VARCHAR(40) NOT NULL, + subject VARCHAR(255) NOT NULL, + request_url TEXT NOT NULL, + login_skip INTEGER NOT NULL, + client_id VARCHAR(255) NOT NULL, + requested_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + oidc_context TEXT NOT NULL, + login_session_id VARCHAR(40) NULL REFERENCES hydra_oauth2_authentication_session (id) ON DELETE SET NULL, + requested_at_audience TEXT NULL, + login_initialized_at TIMESTAMP NULL, + + state INTEGER NOT NULL, + + login_remember INTEGER NULL, + login_remember_for INTEGER NULL, + login_error TEXT NULL, + acr TEXT NULL, + login_authenticated_at TIMESTAMP NULL, + login_was_used INTEGER NULL, + forced_subject_identifier VARCHAR(255) NULL, + context TEXT NULL, + amr TEXT NULL, + + consent_challenge_id VARCHAR(40) NULL, + consent_skip INTEGER NULL, + consent_verifier VARCHAR(40) NULL, + consent_csrf VARCHAR(40) NULL, + + granted_scope TEXT NULL, + granted_at_audience TEXT NULL, + consent_remember INTEGER NULL, + consent_remember_for INTEGER NULL, + consent_handled_at TIMESTAMP NULL, + consent_was_used INTEGER NOT NULL, + consent_error TEXT NULL, + session_id_token TEXT NULL, + session_access_token TEXT NULL, + login_extend_session_lifespan BOOLEAN NULL, + identity_provider_session_id VARCHAR(40) NULL, + device_challenge_id VARCHAR(255) NULL, + device_code_request_id VARCHAR(255) NULL, + device_verifier VARCHAR(40) NULL, + device_csrf VARCHAR(40) NULL, + device_was_used BOOLEAN NULL, + device_handled_at TIMESTAMP NULL, + device_error VARCHAR(2048) NULL, + expires_at TIMESTAMP GENERATED ALWAYS AS (if(consent_remember_for > 0, + datetime(requested_at, '+' || consent_remember_for || ' seconds'), + NULL)) VIRTUAL, + + FOREIGN KEY (client_id, nid) REFERENCES hydra_client (id, nid) ON DELETE CASCADE, + CHECK ( + state = 128 OR + state = 129 OR + state = 1 OR + (state = 2 AND ( + login_remember IS NOT NULL AND + login_remember_for IS NOT NULL AND + login_error IS NOT NULL AND + acr IS NOT NULL AND + login_was_used IS NOT NULL AND + context IS NOT NULL AND + amr IS NOT NULL + )) OR + (state = 3 AND ( + login_remember IS NOT NULL AND + login_remember_for IS NOT NULL AND + login_error IS NOT NULL AND + acr IS NOT NULL AND + login_was_used IS NOT NULL AND + context IS NOT NULL AND + amr IS NOT NULL + )) OR + (state = 4 AND ( + login_remember IS NOT NULL AND + login_remember_for IS NOT NULL AND + login_error IS NOT NULL AND + acr IS NOT NULL AND + login_was_used IS NOT NULL AND + context IS NOT NULL AND + amr IS NOT NULL AND + consent_challenge_id IS NOT NULL AND + consent_verifier IS NOT NULL AND + consent_skip IS NOT NULL AND + consent_csrf IS NOT NULL + )) OR + (state = 5 AND ( + login_remember IS NOT NULL AND + login_remember_for IS NOT NULL AND + login_error IS NOT NULL AND + acr IS NOT NULL AND + login_was_used IS NOT NULL AND + context IS NOT NULL AND + amr IS NOT NULL AND + consent_challenge_id IS NOT NULL AND + consent_verifier IS NOT NULL AND + consent_skip IS NOT NULL AND + consent_csrf IS NOT NULL + )) OR + (state = 6 AND ( + login_remember IS NOT NULL AND + login_remember_for IS NOT NULL AND + login_error IS NOT NULL AND + acr IS NOT NULL AND + login_was_used IS NOT NULL AND + context IS NOT NULL AND + amr IS NOT NULL AND + consent_challenge_id IS NOT NULL AND + consent_verifier IS NOT NULL AND + consent_skip IS NOT NULL AND + consent_csrf IS NOT NULL AND + granted_scope IS NOT NULL AND + consent_remember IS NOT NULL AND + consent_remember_for IS NOT NULL AND + consent_error IS NOT NULL AND + session_access_token IS NOT NULL AND + session_id_token IS NOT NULL AND + consent_was_used IS NOT NULL + )) + ) +); + +INSERT INTO hydra_oauth2_flow_next (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, + request_url, login_skip, client_id, requested_at, oidc_context, login_session_id, + requested_at_audience, login_initialized_at, state, login_remember, + login_remember_for, login_error, acr, login_authenticated_at, login_was_used, + forced_subject_identifier, context, amr, consent_challenge_id, consent_skip, + consent_verifier, consent_csrf, granted_scope, granted_at_audience, + consent_remember, consent_remember_for, consent_handled_at, consent_was_used, + consent_error, session_id_token, session_access_token, + login_extend_session_lifespan, identity_provider_session_id, device_challenge_id, + device_code_request_id, device_verifier, device_csrf, device_was_used, + device_handled_at, device_error) +SELECT login_challenge, + nid, + requested_scope, + login_verifier, + login_csrf, + subject, + request_url, + login_skip, + client_id, + requested_at, + oidc_context, + login_session_id, + requested_at_audience, + login_initialized_at, + state, + login_remember, + login_remember_for, + login_error, + acr, + login_authenticated_at, + login_was_used, + forced_subject_identifier, + context, + amr, + consent_challenge_id, + consent_skip, + consent_verifier, + consent_csrf, + granted_scope, + granted_at_audience, + consent_remember, + consent_remember_for, + consent_handled_at, + consent_was_used, + consent_error, + session_id_token, + session_access_token, + login_extend_session_lifespan, + identity_provider_session_id, + device_challenge_id, + device_code_request_id, + device_verifier, + device_csrf, + device_was_used, + device_handled_at, + device_error +FROM hydra_oauth2_flow; + +DROP TABLE hydra_oauth2_flow; + +ALTER TABLE hydra_oauth2_flow_next + RENAME TO hydra_oauth2_flow; + +CREATE INDEX hydra_oauth2_flow_client_id_idx ON hydra_oauth2_flow (client_id, nid); +CREATE INDEX hydra_oauth2_flow_login_session_id_idx ON hydra_oauth2_flow (login_session_id); +CREATE INDEX hydra_oauth2_flow_subject_idx ON hydra_oauth2_flow (subject, nid); +CREATE UNIQUE INDEX hydra_oauth2_flow_consent_challenge_id_idx ON hydra_oauth2_flow (consent_challenge_id); +CREATE INDEX hydra_oauth2_flow_previous_consents_idx ON hydra_oauth2_flow (subject, client_id, nid, consent_skip, + consent_error, consent_remember); +CREATE UNIQUE INDEX hydra_oauth2_flow_device_challenge_idx ON hydra_oauth2_flow (device_challenge_id); diff --git a/persistence/sql/migrations/20251119112639000000_flow_drop_defaults.up.sql b/persistence/sql/migrations/20251119112639000000_flow_drop_defaults.up.sql new file mode 100644 index 00000000000..0fd1368193e --- /dev/null +++ b/persistence/sql/migrations/20251119112639000000_flow_drop_defaults.up.sql @@ -0,0 +1,20 @@ +ALTER TABLE hydra_oauth2_flow + -- We need to drop these NOT NULL constraints, because the fields are actually not used anymore in the code, and therefore cannot be set by it. + ALTER COLUMN login_extend_session_lifespan DROP NOT NULL, + ALTER COLUMN forced_subject_identifier DROP NOT NULL, + + ALTER COLUMN requested_at_audience DROP DEFAULT, + ALTER COLUMN forced_subject_identifier DROP DEFAULT, + ALTER COLUMN oidc_context DROP DEFAULT, + ALTER COLUMN context DROP DEFAULT, + ALTER COLUMN amr DROP DEFAULT, + ALTER COLUMN acr DROP DEFAULT, + ALTER COLUMN consent_skip DROP DEFAULT, + ALTER COLUMN granted_at_audience DROP DEFAULT, + ALTER COLUMN consent_remember DROP DEFAULT, + ALTER COLUMN login_remember DROP DEFAULT, + ALTER COLUMN consent_was_used DROP DEFAULT, + ALTER COLUMN login_was_used DROP DEFAULT, + ALTER COLUMN session_id_token DROP DEFAULT, + ALTER COLUMN session_access_token DROP DEFAULT, + ALTER COLUMN login_extend_session_lifespan DROP DEFAULT; diff --git a/persistence/sql/migrations/20251119112639000001_flow_drop_defaults.down.sql b/persistence/sql/migrations/20251119112639000001_flow_drop_defaults.down.sql new file mode 100644 index 00000000000..bc252671000 --- /dev/null +++ b/persistence/sql/migrations/20251119112639000001_flow_drop_defaults.down.sql @@ -0,0 +1,5 @@ +UPDATE hydra_oauth2_flow +SET login_extend_session_lifespan = COALESCE(login_extend_session_lifespan, FALSE), + forced_subject_identifier = COALESCE(forced_subject_identifier, '') +WHERE login_extend_session_lifespan IS NULL + OR forced_subject_identifier IS NULL; diff --git a/persistence/sql/migrations/20251119112639000001_flow_drop_defaults.up.sql b/persistence/sql/migrations/20251119112639000001_flow_drop_defaults.up.sql new file mode 100644 index 00000000000..002018aeb26 --- /dev/null +++ b/persistence/sql/migrations/20251119112639000001_flow_drop_defaults.up.sql @@ -0,0 +1 @@ +-- only down needed diff --git a/persistence/sql/migrations/20251119112639000002_flow_drop_defaults.down.sql b/persistence/sql/migrations/20251119112639000002_flow_drop_defaults.down.sql new file mode 100644 index 00000000000..f99b7829ca4 --- /dev/null +++ b/persistence/sql/migrations/20251119112639000002_flow_drop_defaults.down.sql @@ -0,0 +1,17 @@ +ALTER TABLE hydra_oauth2_flow + ALTER COLUMN login_extend_session_lifespan SET DEFAULT FALSE, + ALTER COLUMN forced_subject_identifier SET DEFAULT '', + + ALTER COLUMN requested_at_audience SET DEFAULT '[]'::jsonb, + ALTER COLUMN oidc_context SET DEFAULT '{}'::jsonb, + ALTER COLUMN context SET DEFAULT '{}'::jsonb, + ALTER COLUMN amr SET DEFAULT '[]'::jsonb, + ALTER COLUMN acr SET DEFAULT '', + ALTER COLUMN consent_skip SET DEFAULT FALSE, + ALTER COLUMN granted_at_audience SET DEFAULT '[]'::jsonb, + ALTER COLUMN consent_remember SET DEFAULT FALSE, + ALTER COLUMN login_remember SET DEFAULT FALSE, + ALTER COLUMN consent_was_used SET DEFAULT FALSE, + ALTER COLUMN login_was_used SET DEFAULT FALSE, + ALTER COLUMN session_id_token SET DEFAULT '{}'::jsonb, + ALTER COLUMN session_access_token SET DEFAULT '{}'::jsonb; diff --git a/persistence/sql/migrations/20251119112639000002_flow_drop_defaults.mysql.down.sql b/persistence/sql/migrations/20251119112639000002_flow_drop_defaults.mysql.down.sql new file mode 100644 index 00000000000..3f3d49f8a4e --- /dev/null +++ b/persistence/sql/migrations/20251119112639000002_flow_drop_defaults.mysql.down.sql @@ -0,0 +1,17 @@ +ALTER TABLE hydra_oauth2_flow + ALTER COLUMN forced_subject_identifier SET DEFAULT '', + ALTER COLUMN login_extend_session_lifespan SET DEFAULT FALSE, + + MODIFY COLUMN requested_at_audience JSON DEFAULT (_utf8mb4'[]'), + MODIFY COLUMN amr JSON DEFAULT (_utf8mb4'[]'), + MODIFY COLUMN granted_at_audience JSON DEFAULT (_utf8mb4'[]'), + MODIFY COLUMN oidc_context JSON NOT NULL DEFAULT (_utf8mb4'{}'), + MODIFY COLUMN context JSON NOT NULL DEFAULT (_utf8mb4'{}'), + MODIFY COLUMN acr TEXT NOT NULL DEFAULT (_utf8mb4''), + ALTER COLUMN consent_skip SET DEFAULT FALSE, + ALTER COLUMN consent_remember SET DEFAULT FALSE, + ALTER COLUMN login_remember SET DEFAULT FALSE, + ALTER COLUMN consent_was_used SET DEFAULT FALSE, + ALTER COLUMN login_was_used SET DEFAULT FALSE, + MODIFY COLUMN session_id_token JSON NOT NULL DEFAULT (_utf8mb4'{}'), + MODIFY COLUMN session_access_token JSON NOT NULL DEFAULT (_utf8mb4'{}'); diff --git a/persistence/sql/migrations/20251119112639000002_flow_drop_defaults.sqlite.down.sql b/persistence/sql/migrations/20251119112639000002_flow_drop_defaults.sqlite.down.sql new file mode 100644 index 00000000000..8a789effe9c --- /dev/null +++ b/persistence/sql/migrations/20251119112639000002_flow_drop_defaults.sqlite.down.sql @@ -0,0 +1 @@ +-- nothing to do for sqlite diff --git a/persistence/sql/migrations/20251119112639000002_flow_drop_defaults.sqlite.up.sql b/persistence/sql/migrations/20251119112639000002_flow_drop_defaults.sqlite.up.sql new file mode 100644 index 00000000000..8a789effe9c --- /dev/null +++ b/persistence/sql/migrations/20251119112639000002_flow_drop_defaults.sqlite.up.sql @@ -0,0 +1 @@ +-- nothing to do for sqlite diff --git a/persistence/sql/migrations/20251119112639000002_flow_drop_defaults.up.sql b/persistence/sql/migrations/20251119112639000002_flow_drop_defaults.up.sql new file mode 100644 index 00000000000..002018aeb26 --- /dev/null +++ b/persistence/sql/migrations/20251119112639000002_flow_drop_defaults.up.sql @@ -0,0 +1 @@ +-- only down needed diff --git a/persistence/sql/migrations/20251202130532000000_flow_drop_check.cockroach.down.sql b/persistence/sql/migrations/20251202130532000000_flow_drop_check.cockroach.down.sql new file mode 100644 index 00000000000..98a152a3c89 --- /dev/null +++ b/persistence/sql/migrations/20251202130532000000_flow_drop_check.cockroach.down.sql @@ -0,0 +1,70 @@ +ALTER TABLE hydra_oauth2_flow + ADD CONSTRAINT check_state_state_state_state_login_remember_login_remember_for_login_error_acr_login_was_used_context_amr_state_login_remember_login_remember_for_login_error_acr_login_was_used_context_amr_state_login_remember_login_remember_for_login_error_acr_login_was_used_context_amr_consent_challenge_id_consent_verifier_consent_skip_consent_csrf_state_login_remember_login_remember_for_login_error_acr_login_was_used_context_amr_consent_challenge_id_consent_verifier_consent_skip_consent_csrf_state_login_remember_login_remember_for_login_error_acr_login_was_used_context_amr_consent_challenge_id_consent_verifier_consent_skip_consent_csrf_granted_scope_consent_remember_consent_remember_for_consent_error_session_access_token_session_id_token_consent_was_used + CHECK (((state = 128) OR (state = 129) OR (state = 1) OR ((state = 2) AND + ((login_remember IS NOT NULL) AND + (login_remember_for IS NOT NULL) AND + (login_error IS NOT NULL) AND + (acr IS NOT NULL) AND + (login_was_used IS NOT NULL) AND + (context IS NOT NULL) AND + (amr IS NOT NULL))) OR + ((state = 3) AND + ((login_remember IS NOT NULL) AND (login_remember_for IS NOT NULL) AND + (login_error IS NOT NULL) AND (acr IS NOT NULL) AND + (login_was_used IS NOT NULL) AND (context IS NOT NULL) AND + (amr IS NOT NULL))) OR ((state = 4) AND + ((login_remember IS NOT NULL) AND + (login_remember_for IS NOT NULL) AND + (login_error IS NOT NULL) AND + (acr IS NOT NULL) AND + (login_was_used IS NOT NULL) AND + (context IS NOT NULL) AND + (amr IS NOT NULL) AND + (consent_challenge_id IS NOT NULL) AND + (consent_verifier IS NOT NULL) AND + (consent_skip IS NOT NULL) AND + (consent_csrf IS NOT NULL))) OR + ((state = 5) AND + ((login_remember IS NOT NULL) AND (login_remember_for IS NOT NULL) AND + (login_error IS NOT NULL) AND (acr IS NOT NULL) AND + (login_was_used IS NOT NULL) AND (context IS NOT NULL) AND + (amr IS NOT NULL) AND (consent_challenge_id IS NOT NULL) AND + (consent_verifier IS NOT NULL) AND (consent_skip IS NOT NULL) AND + (consent_csrf IS NOT NULL))) OR ((state = 6) AND + ((login_remember IS NOT NULL) AND + (login_remember_for IS NOT NULL) AND + (login_error IS NOT NULL) AND + (acr IS NOT NULL) AND + (login_was_used IS NOT NULL) AND + (context IS NOT NULL) AND + (amr IS NOT NULL) AND + (consent_challenge_id IS NOT NULL) AND + (consent_verifier IS NOT NULL) AND + (consent_skip IS NOT NULL) AND + (consent_csrf IS NOT NULL) AND + (granted_scope IS NOT NULL) AND + (consent_remember IS NOT NULL) AND + (consent_remember_for IS NOT NULL) AND + (consent_error IS NOT NULL) AND + (session_access_token IS NOT NULL) AND + (session_id_token IS NOT NULL) AND + (consent_was_used IS NOT NULL))))), + ALTER COLUMN requested_scope SET NOT NULL, + ALTER COLUMN login_csrf SET NOT NULL, + ALTER COLUMN subject SET NOT NULL, + ALTER COLUMN request_url SET NOT NULL, + ALTER COLUMN login_skip SET NOT NULL, + ALTER COLUMN client_id SET NOT NULL, + ALTER COLUMN oidc_context SET NOT NULL, + ALTER COLUMN context SET NOT NULL, + ALTER COLUMN state SET NOT NULL, + ALTER COLUMN login_verifier SET NOT NULL, + ALTER COLUMN login_remember SET NOT NULL, + ALTER COLUMN login_remember_for SET NOT NULL, + ALTER COLUMN acr SET NOT NULL, + ALTER COLUMN login_was_used SET NOT NULL, + ALTER COLUMN consent_skip SET NOT NULL, + ALTER COLUMN consent_remember SET NOT NULL, + ALTER COLUMN session_access_token SET NOT NULL, + ALTER COLUMN session_id_token SET NOT NULL, + ALTER COLUMN consent_was_used SET NOT NULL; diff --git a/persistence/sql/migrations/20251202130532000000_flow_drop_check.cockroach.up.sql b/persistence/sql/migrations/20251202130532000000_flow_drop_check.cockroach.up.sql new file mode 100644 index 00000000000..8ccb8a8c294 --- /dev/null +++ b/persistence/sql/migrations/20251202130532000000_flow_drop_check.cockroach.up.sql @@ -0,0 +1,21 @@ +ALTER TABLE hydra_oauth2_flow + DROP CONSTRAINT check_state_state_state_state_login_remember_login_remember_for_login_error_acr_login_was_used_context_amr_state_login_remember_login_remember_for_login_error_acr_login_was_used_context_amr_state_login_remember_login_remember_for_login_error_acr_login_was_used_context_amr_consent_challenge_id_consent_verifier_consent_skip_consent_csrf_state_login_remember_login_remember_for_login_error_acr_login_was_used_context_amr_consent_challenge_id_consent_verifier_consent_skip_consent_csrf_state_login_remember_login_remember_for_login_error_acr_login_was_used_context_amr_consent_challenge_id_consent_verifier_consent_skip_consent_csrf_granted_scope_consent_remember_consent_remember_for_consent_error_session_access_token_session_id_token_consent_was_used, + ALTER COLUMN requested_scope DROP NOT NULL, + ALTER COLUMN login_csrf DROP NOT NULL, + ALTER COLUMN subject DROP NOT NULL, + ALTER COLUMN request_url DROP NOT NULL, + ALTER COLUMN login_skip DROP NOT NULL, + ALTER COLUMN client_id DROP NOT NULL, + ALTER COLUMN oidc_context DROP NOT NULL, + ALTER COLUMN context DROP NOT NULL, + ALTER COLUMN state DROP NOT NULL, + ALTER COLUMN login_verifier DROP NOT NULL, + ALTER COLUMN login_remember DROP NOT NULL, + ALTER COLUMN login_remember_for DROP NOT NULL, + ALTER COLUMN acr DROP NOT NULL, + ALTER COLUMN login_was_used DROP NOT NULL, + ALTER COLUMN consent_skip DROP NOT NULL, + ALTER COLUMN consent_remember DROP NOT NULL, + ALTER COLUMN session_access_token DROP NOT NULL, + ALTER COLUMN session_id_token DROP NOT NULL, + ALTER COLUMN consent_was_used DROP NOT NULL; diff --git a/persistence/sql/migrations/20251202130532000000_flow_drop_check.mysql.down.sql b/persistence/sql/migrations/20251202130532000000_flow_drop_check.mysql.down.sql new file mode 100644 index 00000000000..b11f4301d15 --- /dev/null +++ b/persistence/sql/migrations/20251202130532000000_flow_drop_check.mysql.down.sql @@ -0,0 +1,73 @@ +-- this is not ideal, but required because of MySQL limitations regarding changing columns that are used in foreign key constraints +SET FOREIGN_KEY_CHECKS = 0; + +ALTER TABLE hydra_oauth2_flow + ADD CONSTRAINT hydra_oauth2_flow_chk CHECK (((state = 128) OR (state = 129) OR (state = 1) OR ((state = 2) AND + ((login_remember IS NOT NULL) AND + (login_remember_for IS NOT NULL) AND + (login_error IS NOT NULL) AND + (acr IS NOT NULL) AND + (login_was_used IS NOT NULL) AND + (context IS NOT NULL) AND + (amr IS NOT NULL))) OR + ((state = 3) AND + ((login_remember IS NOT NULL) AND (login_remember_for IS NOT NULL) AND + (login_error IS NOT NULL) AND (acr IS NOT NULL) AND + (login_was_used IS NOT NULL) AND (context IS NOT NULL) AND + (amr IS NOT NULL))) OR ((state = 4) AND + ((login_remember IS NOT NULL) AND + (login_remember_for IS NOT NULL) AND + (login_error IS NOT NULL) AND + (acr IS NOT NULL) AND + (login_was_used IS NOT NULL) AND + (context IS NOT NULL) AND + (amr IS NOT NULL) AND + (consent_challenge_id IS NOT NULL) AND + (consent_verifier IS NOT NULL) AND + (consent_skip IS NOT NULL) AND + (consent_csrf IS NOT NULL))) OR + ((state = 5) AND + ((login_remember IS NOT NULL) AND (login_remember_for IS NOT NULL) AND + (login_error IS NOT NULL) AND (acr IS NOT NULL) AND + (login_was_used IS NOT NULL) AND (context IS NOT NULL) AND + (amr IS NOT NULL) AND (consent_challenge_id IS NOT NULL) AND + (consent_verifier IS NOT NULL) AND (consent_skip IS NOT NULL) AND + (consent_csrf IS NOT NULL))) OR ((state = 6) AND + ((login_remember IS NOT NULL) AND + (login_remember_for IS NOT NULL) AND + (login_error IS NOT NULL) AND + (acr IS NOT NULL) AND + (login_was_used IS NOT NULL) AND + (context IS NOT NULL) AND + (amr IS NOT NULL) AND + (consent_challenge_id IS NOT NULL) AND + (consent_verifier IS NOT NULL) AND + (consent_skip IS NOT NULL) AND + (consent_csrf IS NOT NULL) AND + (granted_scope IS NOT NULL) AND + (consent_remember IS NOT NULL) AND + (consent_remember_for IS NOT NULL) AND + (consent_error IS NOT NULL) AND + (session_access_token IS NOT NULL) AND + (session_id_token IS NOT NULL) AND + (consent_was_used IS NOT NULL))))), + + MODIFY COLUMN requested_scope json NOT NULL, + MODIFY COLUMN login_csrf VARCHAR (40) NOT NULL, + MODIFY COLUMN subject VARCHAR (255) NOT NULL, + MODIFY COLUMN request_url TEXT NOT NULL, + MODIFY COLUMN login_skip tinyint(1) NOT NULL, + MODIFY COLUMN client_id varchar(255) NOT NULL, + MODIFY COLUMN oidc_context json NOT NULL, + MODIFY COLUMN context json NOT NULL, + MODIFY COLUMN state SMALLINT NOT NULL, + MODIFY COLUMN acr TEXT NOT NULL, + MODIFY COLUMN consent_skip tinyint(1) NOT NULL, + MODIFY COLUMN consent_remember tinyint(1) NOT NULL, + MODIFY COLUMN login_remember tinyint(1) NOT NULL, + MODIFY COLUMN consent_was_used tinyint(1) NOT NULL, + MODIFY COLUMN login_was_used tinyint(1) NOT NULL, + MODIFY COLUMN session_id_token json NOT NULL, + MODIFY COLUMN session_access_token json NOT NULL; + +SET FOREIGN_KEY_CHECKS = 1; diff --git a/persistence/sql/migrations/20251202130532000000_flow_drop_check.mysql.up.sql b/persistence/sql/migrations/20251202130532000000_flow_drop_check.mysql.up.sql new file mode 100644 index 00000000000..a5eab003174 --- /dev/null +++ b/persistence/sql/migrations/20251202130532000000_flow_drop_check.mysql.up.sql @@ -0,0 +1,29 @@ +ALTER TABLE hydra_oauth2_flow + DROP CONSTRAINT hydra_oauth2_flow_chk, + MODIFY COLUMN requested_scope json NULL, + MODIFY COLUMN login_csrf varchar(40) NULL, + MODIFY COLUMN subject varchar(255) NULL, + MODIFY COLUMN request_url text DEFAULT NULL, + MODIFY COLUMN login_skip tinyint(1) NULL, + MODIFY COLUMN client_id varchar(255) NULL, + MODIFY COLUMN oidc_context json NULL, + MODIFY COLUMN context json NULL, + MODIFY COLUMN state smallint NULL, + MODIFY COLUMN login_verifier varchar(40) NULL, + MODIFY COLUMN login_remember tinyint(1) NULL, + MODIFY COLUMN login_remember_for int NULL, + MODIFY COLUMN acr text DEFAULT NULL, + MODIFY COLUMN login_was_used tinyint(1) NULL, + MODIFY COLUMN consent_skip tinyint(1) NULL, + MODIFY COLUMN consent_remember tinyint(1) NULL, + MODIFY COLUMN session_access_token json NULL, + MODIFY COLUMN session_id_token json NULL, + MODIFY COLUMN consent_was_used tinyint(1) NULL, + + -- Set default values to NULL for compatibility with strict SQL modes + MODIFY COLUMN login_session_id varchar(40) DEFAULT NULL, + MODIFY COLUMN login_error text DEFAULT NULL, + MODIFY COLUMN consent_error text DEFAULT NULL, + MODIFY COLUMN requested_at_audience json DEFAULT NULL, + MODIFY COLUMN amr json DEFAULT NULL, + MODIFY COLUMN granted_at_audience json DEFAULT NULL; diff --git a/persistence/sql/migrations/20251202130532000000_flow_drop_check.postgres.down.sql b/persistence/sql/migrations/20251202130532000000_flow_drop_check.postgres.down.sql new file mode 100644 index 00000000000..7997ecc117f --- /dev/null +++ b/persistence/sql/migrations/20251202130532000000_flow_drop_check.postgres.down.sql @@ -0,0 +1,69 @@ +ALTER TABLE hydra_oauth2_flow + ADD CONSTRAINT hydra_oauth2_flow_check CHECK (((state = 128) OR (state = 129) OR (state = 1) OR ((state = 2) AND + ((login_remember IS NOT NULL) AND + (login_remember_for IS NOT NULL) AND + (login_error IS NOT NULL) AND + (acr IS NOT NULL) AND + (login_was_used IS NOT NULL) AND + (context IS NOT NULL) AND + (amr IS NOT NULL))) OR + ((state = 3) AND + ((login_remember IS NOT NULL) AND (login_remember_for IS NOT NULL) AND + (login_error IS NOT NULL) AND (acr IS NOT NULL) AND + (login_was_used IS NOT NULL) AND (context IS NOT NULL) AND + (amr IS NOT NULL))) OR ((state = 4) AND + ((login_remember IS NOT NULL) AND + (login_remember_for IS NOT NULL) AND + (login_error IS NOT NULL) AND + (acr IS NOT NULL) AND + (login_was_used IS NOT NULL) AND + (context IS NOT NULL) AND + (amr IS NOT NULL) AND + (consent_challenge_id IS NOT NULL) AND + (consent_verifier IS NOT NULL) AND + (consent_skip IS NOT NULL) AND + (consent_csrf IS NOT NULL))) OR + ((state = 5) AND + ((login_remember IS NOT NULL) AND (login_remember_for IS NOT NULL) AND + (login_error IS NOT NULL) AND (acr IS NOT NULL) AND + (login_was_used IS NOT NULL) AND (context IS NOT NULL) AND + (amr IS NOT NULL) AND (consent_challenge_id IS NOT NULL) AND + (consent_verifier IS NOT NULL) AND (consent_skip IS NOT NULL) AND + (consent_csrf IS NOT NULL))) OR ((state = 6) AND + ((login_remember IS NOT NULL) AND + (login_remember_for IS NOT NULL) AND + (login_error IS NOT NULL) AND + (acr IS NOT NULL) AND + (login_was_used IS NOT NULL) AND + (context IS NOT NULL) AND + (amr IS NOT NULL) AND + (consent_challenge_id IS NOT NULL) AND + (consent_verifier IS NOT NULL) AND + (consent_skip IS NOT NULL) AND + (consent_csrf IS NOT NULL) AND + (granted_scope IS NOT NULL) AND + (consent_remember IS NOT NULL) AND + (consent_remember_for IS NOT NULL) AND + (consent_error IS NOT NULL) AND + (session_access_token IS NOT NULL) AND + (session_id_token IS NOT NULL) AND + (consent_was_used IS NOT NULL))))), + ALTER COLUMN requested_scope SET NOT NULL, + ALTER COLUMN login_csrf SET NOT NULL, + ALTER COLUMN subject SET NOT NULL, + ALTER COLUMN request_url SET NOT NULL, + ALTER COLUMN login_skip SET NOT NULL, + ALTER COLUMN client_id SET NOT NULL, + ALTER COLUMN oidc_context SET NOT NULL, + ALTER COLUMN context SET NOT NULL, + ALTER COLUMN state SET NOT NULL, + ALTER COLUMN login_verifier SET NOT NULL, + ALTER COLUMN login_remember SET NOT NULL, + ALTER COLUMN login_remember_for SET NOT NULL, + ALTER COLUMN acr SET NOT NULL, + ALTER COLUMN login_was_used SET NOT NULL, + ALTER COLUMN consent_skip SET NOT NULL, + ALTER COLUMN consent_remember SET NOT NULL, + ALTER COLUMN session_access_token SET NOT NULL, + ALTER COLUMN session_id_token SET NOT NULL, + ALTER COLUMN consent_was_used SET NOT NULL; diff --git a/persistence/sql/migrations/20251202130532000000_flow_drop_check.postgres.up.sql b/persistence/sql/migrations/20251202130532000000_flow_drop_check.postgres.up.sql new file mode 100644 index 00000000000..3d311e3fc6c --- /dev/null +++ b/persistence/sql/migrations/20251202130532000000_flow_drop_check.postgres.up.sql @@ -0,0 +1,21 @@ +ALTER TABLE hydra_oauth2_flow + DROP CONSTRAINT hydra_oauth2_flow_check, + ALTER COLUMN requested_scope DROP NOT NULL, + ALTER COLUMN login_csrf DROP NOT NULL, + ALTER COLUMN subject DROP NOT NULL, + ALTER COLUMN request_url DROP NOT NULL, + ALTER COLUMN login_skip DROP NOT NULL, + ALTER COLUMN client_id DROP NOT NULL, + ALTER COLUMN oidc_context DROP NOT NULL, + ALTER COLUMN context DROP NOT NULL, + ALTER COLUMN state DROP NOT NULL, + ALTER COLUMN login_verifier DROP NOT NULL, + ALTER COLUMN login_remember DROP NOT NULL, + ALTER COLUMN login_remember_for DROP NOT NULL, + ALTER COLUMN acr DROP NOT NULL, + ALTER COLUMN login_was_used DROP NOT NULL, + ALTER COLUMN consent_skip DROP NOT NULL, + ALTER COLUMN consent_remember DROP NOT NULL, + ALTER COLUMN session_access_token DROP NOT NULL, + ALTER COLUMN session_id_token DROP NOT NULL, + ALTER COLUMN consent_was_used DROP NOT NULL; diff --git a/persistence/sql/migrations/20251202130532000000_flow_drop_check.sqlite.down.sql b/persistence/sql/migrations/20251202130532000000_flow_drop_check.sqlite.down.sql new file mode 100644 index 00000000000..887caee6af7 --- /dev/null +++ b/persistence/sql/migrations/20251202130532000000_flow_drop_check.sqlite.down.sql @@ -0,0 +1,198 @@ +CREATE TABLE hydra_oauth2_flow_prev ( + login_challenge VARCHAR(40) NOT NULL PRIMARY KEY, + nid CHAR(36) NOT NULL, + requested_scope TEXT NOT NULL, + login_verifier VARCHAR(40) NOT NULL, + login_csrf VARCHAR(40) NOT NULL, + subject VARCHAR(255) NOT NULL, + request_url TEXT NOT NULL, + login_skip INTEGER NOT NULL, + client_id VARCHAR(255) NOT NULL, + requested_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + oidc_context TEXT NOT NULL, + login_session_id VARCHAR(40) NULL REFERENCES hydra_oauth2_authentication_session (id) ON DELETE SET NULL, + requested_at_audience TEXT NULL, + login_initialized_at TIMESTAMP NULL, + + state INTEGER NOT NULL, + + login_remember INTEGER NULL, + login_remember_for INTEGER NULL, + login_error TEXT NULL, + acr TEXT NULL, + login_authenticated_at TIMESTAMP NULL, + login_was_used INTEGER NULL, + forced_subject_identifier VARCHAR(255) NULL, + context TEXT NULL, + amr TEXT NULL, + + consent_challenge_id VARCHAR(40) NULL, + consent_skip INTEGER NULL, + consent_verifier VARCHAR(40) NULL, + consent_csrf VARCHAR(40) NULL, + + granted_scope TEXT NULL, + granted_at_audience TEXT NULL, + consent_remember INTEGER NULL, + consent_remember_for INTEGER NULL, + consent_handled_at TIMESTAMP NULL, + consent_was_used INTEGER NOT NULL, + consent_error TEXT NULL, + session_id_token TEXT NULL, + session_access_token TEXT NULL, + login_extend_session_lifespan BOOLEAN NULL, + identity_provider_session_id VARCHAR(40) NULL, + device_challenge_id VARCHAR(255) NULL, + device_code_request_id VARCHAR(255) NULL, + device_verifier VARCHAR(40) NULL, + device_csrf VARCHAR(40) NULL, + device_was_used BOOLEAN NULL, + device_handled_at TIMESTAMP NULL, + device_error VARCHAR(2048) NULL, + expires_at TIMESTAMP GENERATED ALWAYS AS (IF(consent_remember_for > 0, + datetime(requested_at, '+' || consent_remember_for || ' seconds'), + NULL)) VIRTUAL, + + FOREIGN KEY (client_id, nid) REFERENCES hydra_client (id, nid) ON DELETE CASCADE, + CHECK ( + state = 128 OR + state = 129 OR + state = 1 OR + (state = 2 AND ( + login_remember IS NOT NULL AND + login_remember_for IS NOT NULL AND + login_error IS NOT NULL AND + acr IS NOT NULL AND + login_was_used IS NOT NULL AND + context IS NOT NULL AND + amr IS NOT NULL + )) OR + (state = 3 AND ( + login_remember IS NOT NULL AND + login_remember_for IS NOT NULL AND + login_error IS NOT NULL AND + acr IS NOT NULL AND + login_was_used IS NOT NULL AND + context IS NOT NULL AND + amr IS NOT NULL + )) OR + (state = 4 AND ( + login_remember IS NOT NULL AND + login_remember_for IS NOT NULL AND + login_error IS NOT NULL AND + acr IS NOT NULL AND + login_was_used IS NOT NULL AND + context IS NOT NULL AND + amr IS NOT NULL AND + consent_challenge_id IS NOT NULL AND + consent_verifier IS NOT NULL AND + consent_skip IS NOT NULL AND + consent_csrf IS NOT NULL + )) OR + (state = 5 AND ( + login_remember IS NOT NULL AND + login_remember_for IS NOT NULL AND + login_error IS NOT NULL AND + acr IS NOT NULL AND + login_was_used IS NOT NULL AND + context IS NOT NULL AND + amr IS NOT NULL AND + consent_challenge_id IS NOT NULL AND + consent_verifier IS NOT NULL AND + consent_skip IS NOT NULL AND + consent_csrf IS NOT NULL + )) OR + (state = 6 AND ( + login_remember IS NOT NULL AND + login_remember_for IS NOT NULL AND + login_error IS NOT NULL AND + acr IS NOT NULL AND + login_was_used IS NOT NULL AND + context IS NOT NULL AND + amr IS NOT NULL AND + consent_challenge_id IS NOT NULL AND + consent_verifier IS NOT NULL AND + consent_skip IS NOT NULL AND + consent_csrf IS NOT NULL AND + granted_scope IS NOT NULL AND + consent_remember IS NOT NULL AND + consent_remember_for IS NOT NULL AND + consent_error IS NOT NULL AND + session_access_token IS NOT NULL AND + session_id_token IS NOT NULL AND + consent_was_used IS NOT NULL + )) + ) +); + +INSERT +INTO hydra_oauth2_flow_prev (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, + login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, + login_initialized_at, state, login_remember, login_remember_for, login_error, acr, + login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, + consent_challenge_id, consent_skip, consent_verifier, consent_csrf, granted_scope, + granted_at_audience, consent_remember, consent_remember_for, consent_handled_at, + consent_was_used, consent_error, session_id_token, session_access_token, + login_extend_session_lifespan, identity_provider_session_id, device_challenge_id, + device_code_request_id, device_verifier, device_csrf, device_was_used, device_handled_at, + device_error) +SELECT login_challenge, + nid, + requested_scope, + login_verifier, + login_csrf, + subject, + request_url, + login_skip, + client_id, + requested_at, + oidc_context, + login_session_id, + requested_at_audience, + login_initialized_at, + state, + login_remember, + login_remember_for, + login_error, + acr, + login_authenticated_at, + login_was_used, + forced_subject_identifier, + context, + amr, + consent_challenge_id, + consent_skip, + consent_verifier, + consent_csrf, + granted_scope, + granted_at_audience, + consent_remember, + consent_remember_for, + consent_handled_at, + consent_was_used, + consent_error, + session_id_token, + session_access_token, + login_extend_session_lifespan, + identity_provider_session_id, + device_challenge_id, + device_code_request_id, + device_verifier, + device_csrf, + device_was_used, + device_handled_at, + device_error +FROM hydra_oauth2_flow; + +DROP TABLE hydra_oauth2_flow; + +ALTER TABLE hydra_oauth2_flow_prev + RENAME TO hydra_oauth2_flow; + +CREATE INDEX hydra_oauth2_flow_client_id_idx ON hydra_oauth2_flow (client_id, nid); +CREATE INDEX hydra_oauth2_flow_login_session_id_idx ON hydra_oauth2_flow (login_session_id); +CREATE INDEX hydra_oauth2_flow_subject_idx ON hydra_oauth2_flow (subject, nid); +CREATE UNIQUE INDEX hydra_oauth2_flow_consent_challenge_id_idx ON hydra_oauth2_flow (consent_challenge_id); +CREATE INDEX hydra_oauth2_flow_previous_consents_idx ON hydra_oauth2_flow (subject, client_id, nid, consent_skip, + consent_error, consent_remember); +CREATE UNIQUE INDEX hydra_oauth2_flow_device_challenge_idx ON hydra_oauth2_flow (device_challenge_id); diff --git a/persistence/sql/migrations/20251202130532000000_flow_drop_check.sqlite.up.sql b/persistence/sql/migrations/20251202130532000000_flow_drop_check.sqlite.up.sql new file mode 100644 index 00000000000..6c5424041d4 --- /dev/null +++ b/persistence/sql/migrations/20251202130532000000_flow_drop_check.sqlite.up.sql @@ -0,0 +1,129 @@ +CREATE TABLE hydra_oauth2_flow_next ( + login_challenge VARCHAR(40) NOT NULL PRIMARY KEY, + nid CHAR(36) NOT NULL, + requested_scope TEXT NULL, + login_verifier VARCHAR(40) NULL, + login_csrf VARCHAR(40) NULL, + subject VARCHAR(255) NULL, + request_url TEXT NULL, + login_skip INTEGER NULL, + client_id VARCHAR(255) NULL, + requested_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + oidc_context TEXT NULL, + login_session_id VARCHAR(40) NULL REFERENCES hydra_oauth2_authentication_session (id) ON DELETE SET NULL, + requested_at_audience TEXT NULL, + login_initialized_at TIMESTAMP NULL, + + state INTEGER NULL, + + login_remember INTEGER NULL, + login_remember_for INTEGER NULL, + login_error TEXT NULL, + acr TEXT NULL, + login_authenticated_at TIMESTAMP NULL, + login_was_used INTEGER NULL, + forced_subject_identifier VARCHAR(255) NULL, + context TEXT NULL, + amr TEXT NULL, + + consent_challenge_id VARCHAR(40) NULL, + consent_skip INTEGER NULL, + consent_verifier VARCHAR(40) NULL, + consent_csrf VARCHAR(40) NULL, + + granted_scope TEXT NULL, + granted_at_audience TEXT NULL, + consent_remember INTEGER NULL, + consent_remember_for INTEGER NULL, + consent_handled_at TIMESTAMP NULL, + consent_was_used INTEGER NULL, + consent_error TEXT NULL, + session_id_token TEXT NULL, + session_access_token TEXT NULL, + login_extend_session_lifespan BOOLEAN NULL, + identity_provider_session_id VARCHAR(40) NULL, + device_challenge_id VARCHAR(255) NULL, + device_code_request_id VARCHAR(255) NULL, + device_verifier VARCHAR(40) NULL, + device_csrf VARCHAR(40) NULL, + device_was_used BOOLEAN NULL, + device_handled_at TIMESTAMP NULL, + device_error VARCHAR(2048) NULL, + expires_at TIMESTAMP GENERATED ALWAYS AS (IF(consent_remember_for > 0, + datetime(requested_at, '+' || consent_remember_for || ' seconds'), + NULL)) VIRTUAL, + + FOREIGN KEY (client_id, nid) REFERENCES hydra_client (id, nid) ON DELETE CASCADE +); + +INSERT +INTO hydra_oauth2_flow_next (login_challenge, nid, requested_scope, login_verifier, login_csrf, subject, request_url, + login_skip, client_id, requested_at, oidc_context, login_session_id, requested_at_audience, + login_initialized_at, state, login_remember, login_remember_for, login_error, acr, + login_authenticated_at, login_was_used, forced_subject_identifier, context, amr, + consent_challenge_id, consent_skip, consent_verifier, consent_csrf, granted_scope, + granted_at_audience, consent_remember, consent_remember_for, consent_handled_at, + consent_was_used, consent_error, session_id_token, session_access_token, + login_extend_session_lifespan, identity_provider_session_id, device_challenge_id, + device_code_request_id, device_verifier, device_csrf, device_was_used, device_handled_at, + device_error) +SELECT login_challenge, + nid, + requested_scope, + login_verifier, + login_csrf, + subject, + request_url, + login_skip, + client_id, + requested_at, + oidc_context, + login_session_id, + requested_at_audience, + login_initialized_at, + state, + login_remember, + login_remember_for, + login_error, + acr, + login_authenticated_at, + login_was_used, + forced_subject_identifier, + context, + amr, + consent_challenge_id, + consent_skip, + consent_verifier, + consent_csrf, + granted_scope, + granted_at_audience, + consent_remember, + consent_remember_for, + consent_handled_at, + consent_was_used, + consent_error, + session_id_token, + session_access_token, + login_extend_session_lifespan, + identity_provider_session_id, + device_challenge_id, + device_code_request_id, + device_verifier, + device_csrf, + device_was_used, + device_handled_at, + device_error +FROM hydra_oauth2_flow; + +DROP TABLE hydra_oauth2_flow; + +ALTER TABLE hydra_oauth2_flow_next + RENAME TO hydra_oauth2_flow; + +CREATE INDEX hydra_oauth2_flow_client_id_idx ON hydra_oauth2_flow (client_id, nid); +CREATE INDEX hydra_oauth2_flow_login_session_id_idx ON hydra_oauth2_flow (login_session_id); +CREATE INDEX hydra_oauth2_flow_subject_idx ON hydra_oauth2_flow (subject, nid); +CREATE UNIQUE INDEX hydra_oauth2_flow_consent_challenge_id_idx ON hydra_oauth2_flow (consent_challenge_id); +CREATE INDEX hydra_oauth2_flow_previous_consents_idx ON hydra_oauth2_flow (subject, client_id, nid, consent_skip, + consent_error, consent_remember); +CREATE UNIQUE INDEX hydra_oauth2_flow_device_challenge_idx ON hydra_oauth2_flow (device_challenge_id); diff --git a/persistence/sql/migrations/20251202130532000001_flow_drop_check.down.sql b/persistence/sql/migrations/20251202130532000001_flow_drop_check.down.sql new file mode 100644 index 00000000000..4b47a3e99e0 --- /dev/null +++ b/persistence/sql/migrations/20251202130532000001_flow_drop_check.down.sql @@ -0,0 +1,21 @@ +DELETE +FROM hydra_oauth2_flow +WHERE requested_scope IS NULL + OR login_csrf IS NULL + OR subject IS NULL + OR request_url IS NULL + OR login_skip IS NULL + OR client_id IS NULL + OR oidc_context IS NULL + OR context IS NULL + OR state IS NULL + OR login_verifier IS NULL + OR login_remember IS NULL + OR login_remember_for IS NULL + OR acr IS NULL + OR login_was_used IS NULL + OR consent_skip IS NULL + OR consent_remember IS NULL + OR session_access_token IS NULL + OR session_id_token IS NULL + OR consent_was_used IS NULL; diff --git a/persistence/sql/migrations/20251202130532000001_flow_drop_check.up.sql b/persistence/sql/migrations/20251202130532000001_flow_drop_check.up.sql new file mode 100644 index 00000000000..d4499a4cfe3 --- /dev/null +++ b/persistence/sql/migrations/20251202130532000001_flow_drop_check.up.sql @@ -0,0 +1 @@ +-- nothing to do here diff --git a/persistence/sql/persister.go b/persistence/sql/persister.go index 2f1b2adf3be..20c24d7d836 100644 --- a/persistence/sql/persister.go +++ b/persistence/sql/persister.go @@ -8,162 +8,126 @@ import ( "database/sql" "reflect" - "github.com/gobuffalo/pop/v6" - "github.com/gobuffalo/x/randx" "github.com/gofrs/uuid" - "github.com/pkg/errors" - "github.com/ory/fosite" - "github.com/ory/fosite/storage" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/jwk" - "github.com/ory/hydra/persistence" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/aead" + "github.com/ory/hydra/v2/driver/config" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/internal/kratos" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/persistence" + "github.com/ory/hydra/v2/x" + "github.com/ory/pop/v6" "github.com/ory/x/contextx" - "github.com/ory/x/errorsx" "github.com/ory/x/logrusx" "github.com/ory/x/networkx" + "github.com/ory/x/otelx" "github.com/ory/x/popx" ) -var _ persistence.Persister = new(Persister) -var _ storage.Transactional = new(Persister) - var ( - ErrTransactionOpen = errors.New("There is already a transaction in this context.") - ErrNoTransactionOpen = errors.New("There is no transaction in this context.") + _ persistence.Persister = (*Persister)(nil) + _ fosite.Transactional = (*Persister)(nil) + _ fosite.ClientManager = (*Persister)(nil) + _ oauth2.AssertionJWTReader = (*Persister)(nil) + _ x.FositeStorer = (*Persister)(nil) ) +var ErrNoTransactionOpen = errors.New("There is no Transaction in this context.") + +type skipCommitContextKey int + +const skipCommitKey skipCommitContextKey = 0 + type ( Persister struct { - conn *pop.Connection - mb *popx.MigrationBox - mbs popx.MigrationStatuses - r Dependencies - config *config.DefaultProvider - l *logrusx.Logger - fallbackNID uuid.UUID - p *networkx.Manager + *BasePersister + r Dependencies + l *logrusx.Logger } Dependencies interface { ClientHasher() fosite.Hasher - KeyCipher() *jwk.AEAD + KeyCipher() *aead.AESGCM + FlowCipher() *aead.XChaCha20Poly1305 + Kratos() kratos.Client contextx.Provider x.RegistryLogger x.TracingProvider + config.Provider } -) - -func (p *Persister) BeginTX(ctx context.Context) (context.Context, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.BeginTX") - defer span.End() - - fallback := &pop.Connection{TX: &pop.Tx{}} - if popx.GetConnection(ctx, fallback).TX != fallback.TX { - return ctx, errorsx.WithStack(ErrTransactionOpen) + BasePersister struct { + c *pop.Connection + fallbackNID uuid.UUID + d baseDependencies } - - tx, err := p.conn.Store.TransactionContextOptions(ctx, &sql.TxOptions{ - Isolation: sql.LevelRepeatableRead, - ReadOnly: false, - }) - c := &pop.Connection{ - TX: tx, - Store: tx, - ID: randx.String(30), - Dialect: p.conn.Dialect, + baseDependencies interface { + x.RegistryLogger + x.TracingProvider + contextx.Provider + config.Provider + jwk.ManagerProvider } - return popx.WithTransaction(ctx, c), err -} - -func (p *Persister) Commit(ctx context.Context) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.Commit") - defer span.End() - - fallback := &pop.Connection{TX: &pop.Tx{}} - tx := popx.GetConnection(ctx, fallback) - if tx.TX == fallback.TX || tx.TX == nil { - return errorsx.WithStack(ErrNoTransactionOpen) + BasePersisterProvider interface { + BasePersister() *BasePersister } +) - return errorsx.WithStack(tx.TX.Commit()) -} - -func (p *Persister) Rollback(ctx context.Context) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.Rollback") - defer span.End() - - fallback := &pop.Connection{TX: &pop.Tx{}} - tx := popx.GetConnection(ctx, fallback) - if tx.TX == fallback.TX || tx.TX == nil { - return errorsx.WithStack(ErrNoTransactionOpen) +func NewPersister(base *BasePersister, r Dependencies) *Persister { + return &Persister{ + BasePersister: base, + r: r, + l: r.Logger(), } - - return errorsx.WithStack(tx.TX.Rollback()) } -func NewPersister(ctx context.Context, c *pop.Connection, r Dependencies, config *config.DefaultProvider, l *logrusx.Logger) (*Persister, error) { - mb, err := popx.NewMigrationBox(migrations, popx.NewMigrator(c, r.Logger(), r.Tracer(ctx), 0)) - if err != nil { - return nil, errorsx.WithStack(err) - } - - return &Persister{ - conn: c, - mb: mb, - r: r, - config: config, - l: l, - p: networkx.NewManager(c, r.Logger(), r.Tracer(ctx)), - }, nil +func NewBasePersister(c *pop.Connection, d baseDependencies) *BasePersister { + return &BasePersister{c: c, d: d} } -func (p *Persister) DetermineNetwork(ctx context.Context) (*networkx.Network, error) { - return p.p.Determine(ctx) +func (p *BasePersister) DetermineNetwork(ctx context.Context) (*networkx.Network, error) { + return networkx.Determine(p.Connection(ctx)) } -func (p Persister) WithFallbackNetworkID(nid uuid.UUID) persistence.Persister { +func (p BasePersister) WithFallbackNetworkID(nid uuid.UUID) *BasePersister { p.fallbackNID = nid return &p } -func (p *Persister) CreateWithNetwork(ctx context.Context, v interface{}) error { - n := p.NetworkID(ctx) - return p.Connection(ctx).Create(p.mustSetNetwork(n, v)) +func (p *BasePersister) CreateWithNetwork(ctx context.Context, v interface{}) error { + p.mustSetNetwork(ctx, v) + return p.Connection(ctx).Create(v) } -func (p *Persister) UpdateWithNetwork(ctx context.Context, v interface{}) (int64, error) { - n := p.NetworkID(ctx) - v = p.mustSetNetwork(n, v) +func (p *BasePersister) UpdateWithNetwork(ctx context.Context, v interface{}) (int64, error) { + p.mustSetNetwork(ctx, v) m := pop.NewModel(v, ctx) - var cs []string + cols := m.Columns() + cs := make([]string, 0, len(cols.Cols)) for _, t := range m.Columns().Cols { cs = append(cs, t.Name) } - return p.Connection(ctx).Where(m.IDField()+" = ? AND nid = ?", m.ID(), n).UpdateQuery(v, cs...) + return p.Connection(ctx).Where(m.IDField()+" = ? AND nid = ?", m.ID(), p.NetworkID(ctx)).UpdateQuery(v, cs...) } -func (p *Persister) NetworkID(ctx context.Context) uuid.UUID { - return p.r.Contextualizer().Network(ctx, p.fallbackNID) +func (p *BasePersister) NetworkID(ctx context.Context) uuid.UUID { + return p.d.Contextualizer().Network(ctx, p.fallbackNID) } -func (p *Persister) QueryWithNetwork(ctx context.Context) *pop.Query { +func (p *BasePersister) QueryWithNetwork(ctx context.Context) *pop.Query { return p.Connection(ctx).Where("nid = ?", p.NetworkID(ctx)) } -func (p *Persister) Connection(ctx context.Context) *pop.Connection { - return popx.GetConnection(ctx, p.conn) +func (p *BasePersister) Connection(ctx context.Context) *pop.Connection { + return popx.GetConnection(ctx, p.c) } -func (p *Persister) Ping() error { - type pinger interface{ Ping() error } - return p.conn.Store.(pinger).Ping() -} +func (p *BasePersister) Ping(ctx context.Context) error { return p.c.Store.SQLDB().PingContext(ctx) } -func (p *Persister) mustSetNetwork(nid uuid.UUID, v interface{}) interface{} { +func (p *BasePersister) mustSetNetwork(ctx context.Context, v interface{}) { rv := reflect.ValueOf(v) if rv.Kind() != reflect.Ptr || (rv.Kind() == reflect.Ptr && rv.Elem().Kind() != reflect.Struct) { @@ -173,10 +137,68 @@ func (p *Persister) mustSetNetwork(nid uuid.UUID, v interface{}) interface{} { if !nf.IsValid() || !nf.CanSet() { panic("v must have settable a field 'NID uuid.UUID'") } - nf.Set(reflect.ValueOf(nid)) - return v + nf.Set(reflect.ValueOf(p.NetworkID(ctx))) +} + +func (p *BasePersister) Transaction(ctx context.Context, f func(ctx context.Context, c *pop.Connection) error) error { + return popx.Transaction(ctx, p.c, f) +} + +// BeginTX implements Transactional. +func (p *BasePersister) BeginTX(ctx context.Context) (_ context.Context, err error) { + ctx, span := p.d.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.BeginTX") + defer otelx.End(span, &err) + + fallback := &pop.Connection{TX: &pop.Tx{}} + if popx.GetConnection(ctx, fallback).TX != fallback.TX { + return context.WithValue(ctx, skipCommitKey, true), nil // no-op + } + + tx, err := p.c.Store.TransactionContextOptions(ctx, &sql.TxOptions{ + Isolation: sql.LevelRepeatableRead, + ReadOnly: false, + }) + c := &pop.Connection{ + TX: tx, + Store: tx, + ID: uuid.Must(uuid.NewV4()).String(), + Dialect: p.c.Dialect, + } + return popx.WithTransaction(ctx, c), err } -func (p *Persister) transaction(ctx context.Context, f func(ctx context.Context, c *pop.Connection) error) error { - return popx.Transaction(ctx, p.conn, f) +// Commit implements Transactional. +func (p *BasePersister) Commit(ctx context.Context) (err error) { + ctx, span := p.d.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.Commit") + defer otelx.End(span, &err) + + if skip, ok := ctx.Value(skipCommitKey).(bool); ok && skip { + return nil // we skipped BeginTX, so we also skip Commit + } + + fallback := &pop.Connection{TX: &pop.Tx{}} + tx := popx.GetConnection(ctx, fallback) + if tx.TX == fallback.TX || tx.TX == nil { + return errors.WithStack(ErrNoTransactionOpen) + } + + return errors.WithStack(tx.TX.Commit()) +} + +// Rollback implements Transactional. +func (p *BasePersister) Rollback(ctx context.Context) (err error) { + ctx, span := p.d.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.Rollback") + defer otelx.End(span, &err) + + if skip, ok := ctx.Value(skipCommitKey).(bool); ok && skip { + return nil // we skipped BeginTX, so we also skip Rollback + } + + fallback := &pop.Connection{TX: &pop.Tx{}} + tx := popx.GetConnection(ctx, fallback) + if tx.TX == fallback.TX || tx.TX == nil { + return errors.WithStack(ErrNoTransactionOpen) + } + + return errors.WithStack(tx.TX.Rollback()) } diff --git a/persistence/sql/persister_authenticate.go b/persistence/sql/persister_authenticate.go new file mode 100644 index 00000000000..be95dff63cf --- /dev/null +++ b/persistence/sql/persister_authenticate.go @@ -0,0 +1,17 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package sql + +import ( + "context" +) + +// Authenticate implements ResourceOwnerPasswordCredentialsGrantStorage. +func (p *Persister) Authenticate(ctx context.Context, name, secret string) (subject string, err error) { + session, err := p.r.Kratos().Authenticate(ctx, name, secret) + if err != nil { + return "", err + } + return session.Identity.Id, nil +} diff --git a/persistence/sql/persister_client.go b/persistence/sql/persister_client.go index 9f82843dc53..24dd672aa86 100644 --- a/persistence/sql/persister_client.go +++ b/persistence/sql/persister_client.go @@ -7,36 +7,69 @@ import ( "context" "github.com/gofrs/uuid" - - "github.com/gobuffalo/pop/v6" - - "github.com/ory/x/errorsx" - - "github.com/ory/fosite" - "github.com/ory/hydra/client" + "go.opentelemetry.io/otel/trace" + + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/x/events" + "github.com/ory/pop/v6" + "github.com/ory/x/otelx" + keysetpagination "github.com/ory/x/pagination/keysetpagination_v2" "github.com/ory/x/sqlcon" ) -func (p *Persister) GetConcreteClient(ctx context.Context, id string) (*client.Client, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetConcreteClient") - defer span.End() +// AuthenticateClient implements client.Manager. +func (p *Persister) AuthenticateClient(ctx context.Context, id string, secret []byte) (_ *client.Client, err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.AuthenticateClient", + trace.WithAttributes(events.ClientID(id)), + ) + defer otelx.End(span, &err) - var cl client.Client - if err := p.QueryWithNetwork(ctx).Where("id = ?", id).First(&cl); err != nil { - return nil, sqlcon.HandleError(err) + c, err := p.GetConcreteClient(ctx, id) + if err != nil { + return nil, err } - return &cl, nil + + if err := p.r.ClientHasher().Compare(ctx, c.GetHashedSecret(), secret); err != nil { + return nil, err + } + + return c, nil } -func (p *Persister) GetClient(ctx context.Context, id string) (fosite.Client, error) { - return p.GetConcreteClient(ctx, id) +// CreateClient implements client.Storage. +func (p *Persister) CreateClient(ctx context.Context, c *client.Client) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.CreateClient") + defer otelx.End(span, &err) + + h, err := p.r.ClientHasher().Hash(ctx, []byte(c.Secret)) + if err != nil { + return err + } + + c.Secret = string(h) + if c.ID == "" { + c.ID = uuid.Must(uuid.NewV4()).String() + } + if err := sqlcon.HandleError(p.CreateWithNetwork(ctx, c)); err != nil { + return err + } + + events.Trace(ctx, events.ClientCreated, + events.WithClientID(c.ID), + events.WithClientName(c.Name)) + + return nil } -func (p *Persister) UpdateClient(ctx context.Context, cl *client.Client) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.UpdateClient") - defer span.End() +// UpdateClient implements client.Storage. +func (p *Persister) UpdateClient(ctx context.Context, cl *client.Client) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.UpdateClient", + trace.WithAttributes(events.ClientID(cl.ID)), + ) + defer otelx.End(span, &err) - return p.transaction(ctx, func(ctx context.Context, c *pop.Connection) error { + return p.Transaction(ctx, func(ctx context.Context, c *pop.Connection) error { o, err := p.GetConcreteClient(ctx, cl.GetID()) if err != nil { return err @@ -47,15 +80,13 @@ func (p *Persister) UpdateClient(ctx context.Context, cl *client.Client) error { } else { h, err := p.r.ClientHasher().Hash(ctx, []byte(cl.Secret)) if err != nil { - return errorsx.WithStack(err) + return err } cl.Secret = string(h) } - // set the internal primary key - cl.ID = o.ID - // Set the legacy client ID - cl.LegacyClientID = o.LegacyClientID + // Ensure ID is the same + cl.ID = o.ID if err = cl.BeforeSave(c); err != nil { return sqlcon.HandleError(err) @@ -67,66 +98,49 @@ func (p *Persister) UpdateClient(ctx context.Context, cl *client.Client) error { } else if count == 0 { return sqlcon.HandleError(sqlcon.ErrNoRows) } + + events.Trace(ctx, events.ClientUpdated, + events.WithClientID(cl.ID), + events.WithClientName(cl.Name)) + return sqlcon.HandleError(err) }) } -func (p *Persister) Authenticate(ctx context.Context, id string, secret []byte) (*client.Client, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.Authenticate") - defer span.End() +// DeleteClient implements client.Storage. +func (p *Persister) DeleteClient(ctx context.Context, id string) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.DeleteClient", + trace.WithAttributes(events.ClientID(id)), + ) + defer otelx.End(span, &err) c, err := p.GetConcreteClient(ctx, id) - if err != nil { - return nil, errorsx.WithStack(err) - } - - if err := p.r.ClientHasher().Compare(ctx, c.GetHashedSecret(), secret); err != nil { - return nil, errorsx.WithStack(err) - } - - return c, nil -} - -func (p *Persister) CreateClient(ctx context.Context, c *client.Client) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.CreateClient") - defer span.End() - - h, err := p.r.ClientHasher().Hash(ctx, []byte(c.Secret)) if err != nil { return err } - c.Secret = string(h) - if c.ID == uuid.Nil { - c.ID = uuid.Must(uuid.NewV4()) - } - if c.LegacyClientID == "" { - c.LegacyClientID = c.ID.String() - } - return sqlcon.HandleError(p.CreateWithNetwork(ctx, c)) -} - -func (p *Persister) DeleteClient(ctx context.Context, id string) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.DeleteClient") - defer span.End() - - _, err := p.GetConcreteClient(ctx, id) - if err != nil { + if err := sqlcon.HandleError(p.QueryWithNetwork(ctx).Where("id = ?", id).Delete(&client.Client{})); err != nil { return err } - return sqlcon.HandleError(p.QueryWithNetwork(ctx).Where("id = ?", id).Delete(&client.Client{})) + events.Trace(ctx, events.ClientDeleted, + events.WithClientID(c.ID), + events.WithClientName(c.Name)) + + return nil } -func (p *Persister) GetClients(ctx context.Context, filters client.Filter) ([]client.Client, error) { +// GetClients implements client.Storage. +func (p *Persister) GetClients(ctx context.Context, filters client.Filter) (cs []client.Client, _ *keysetpagination.Paginator, err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetClients") - defer span.End() + defer otelx.End(span, &err) - cs := make([]client.Client, 0) + paginator := keysetpagination.NewPaginator(append(filters.PageOpts, + keysetpagination.WithDefaultToken(keysetpagination.NewPageToken(keysetpagination.Column{Name: "id", Value: ""})), + )...) - query := p.QueryWithNetwork(ctx). - Paginate(filters.Offset/filters.Limit+1, filters.Limit). - Order("pk") + query := p.QueryWithNetwork(ctx).Scope( + keysetpagination.Paginate[client.Client](paginator)) if filters.Name != "" { query.Where("client_name = ?", filters.Name) @@ -134,17 +148,32 @@ func (p *Persister) GetClients(ctx context.Context, filters client.Filter) ([]cl if filters.Owner != "" { query.Where("owner = ?", filters.Owner) } + if len(filters.IDs) > 0 { + query.Where("id IN (?)", filters.IDs) + } if err := query.All(&cs); err != nil { - return nil, sqlcon.HandleError(err) + return nil, nil, sqlcon.HandleError(err) } - return cs, nil + cs, nextPage := keysetpagination.Result(cs, paginator) + return cs, nextPage, nil } -func (p *Persister) CountClients(ctx context.Context) (int, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.CountClients") - defer span.End() +// GetConcreteClient implements client.Storage. +func (p *Persister) GetConcreteClient(ctx context.Context, id string) (c *client.Client, err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetConcreteClient", + trace.WithAttributes(events.ClientID(id)), + ) + defer otelx.End(span, &err) + + var cl client.Client + if err := p.QueryWithNetwork(ctx).Where("id = ?", id).First(&cl); err != nil { + return nil, sqlcon.HandleError(err) + } + return &cl, nil +} - n, err := p.QueryWithNetwork(ctx).Count(&client.Client{}) - return n, sqlcon.HandleError(err) +// GetClient implements fosite.ClientManager. +func (p *Persister) GetClient(ctx context.Context, id string) (fosite.Client, error) { + return p.GetConcreteClient(ctx, id) } diff --git a/persistence/sql/persister_consent.go b/persistence/sql/persister_consent.go index a93f60a7705..87b50fc85d3 100644 --- a/persistence/sql/persister_consent.go +++ b/persistence/sql/persister_consent.go @@ -7,95 +7,116 @@ import ( "context" "database/sql" "fmt" - "strings" "time" - "github.com/gobuffalo/pop/v6" - + "github.com/pkg/errors" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" + + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/consent" + "github.com/ory/hydra/v2/flow" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/x" + "github.com/ory/pop/v6" + "github.com/ory/x/otelx" + keysetpagination "github.com/ory/x/pagination/keysetpagination_v2" + "github.com/ory/x/popx" + "github.com/ory/x/sqlcon" "github.com/ory/x/sqlxx" +) - "github.com/ory/x/errorsx" +var ( + _ consent.Manager = (*ConsentPersister)(nil) + _ consent.LoginManager = (*Persister)(nil) + _ consent.LogoutManager = (*Persister)(nil) + _ consent.ObfuscatedSubjectManager = (*Persister)(nil) +) - "github.com/pkg/errors" +type ConsentPersister struct { + *BasePersister +} - "github.com/ory/fosite" - "github.com/ory/hydra/client" - "github.com/ory/hydra/consent" - "github.com/ory/hydra/flow" - "github.com/ory/hydra/x" - "github.com/ory/x/sqlcon" -) +func (p *ConsentPersister) RevokeSubjectConsentSession(ctx context.Context, user string) (err error) { + ctx, span := p.d.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.RevokeSubjectConsentSession") + defer otelx.End(span, &err) -var _ consent.Manager = &Persister{} + return p.Transaction(ctx, p.revokeConsentSession("consent_challenge_id IS NOT NULL AND subject = ?", user)) +} -func (p *Persister) RevokeSubjectConsentSession(ctx context.Context, user string) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.RevokeSubjectConsentSession") - defer span.End() +func (p *ConsentPersister) RevokeSubjectClientConsentSession(ctx context.Context, user, client string) (err error) { + ctx, span := p.d.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.RevokeSubjectClientConsentSession", trace.WithAttributes(attribute.String("client", client))) + defer otelx.End(span, &err) - return p.transaction(ctx, p.revokeConsentSession("consent_challenge_id IS NOT NULL AND subject = ?", user)) + return p.Transaction(ctx, p.revokeConsentSession("consent_challenge_id IS NOT NULL AND subject = ? AND client_id = ?", user, client)) } -func (p *Persister) RevokeSubjectClientConsentSession(ctx context.Context, user, client string) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.RevokeSubjectClientConsentSession") - defer span.End() +func (p *ConsentPersister) RevokeConsentSessionByID(ctx context.Context, consentRequestID string) (err error) { + ctx, span := p.d.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.RevokeConsentSessionByID", + trace.WithAttributes(attribute.String("consent_challenge_id", consentRequestID))) + defer otelx.End(span, &err) - return p.transaction(ctx, p.revokeConsentSession("consent_challenge_id IS NOT NULL AND subject = ? AND client_id = ?", user, client)) + return p.Transaction(ctx, p.revokeConsentSession("consent_challenge_id = ?", consentRequestID)) } -func (p *Persister) revokeConsentSession(whereStmt string, whereArgs ...interface{}) func(context.Context, *pop.Connection) error { +func (p *ConsentPersister) revokeConsentSession(whereStmt string, whereArgs ...interface{}) func(context.Context, *pop.Connection) error { return func(ctx context.Context, c *pop.Connection) error { fs := make([]*flow.Flow, 0) if err := p.QueryWithNetwork(ctx). Where(whereStmt, whereArgs...). Select("consent_challenge_id"). - All(&fs); err != nil { - if errors.Is(err, sql.ErrNoRows) { - return errorsx.WithStack(x.ErrNotFound) - } - + All(&fs); errors.Is(err, sql.ErrNoRows) { + return errors.WithStack(x.ErrNotFound) + } else if err != nil { return sqlcon.HandleError(err) } - var count int + ids := make([]interface{}, 0, len(fs)) + nid := p.NetworkID(ctx) for _, f := range fs { - if err := p.RevokeAccessToken(ctx, f.ConsentChallengeID.String()); errors.Is(err, fosite.ErrNotFound) { - // do nothing - } else if err != nil { - return err - } - - if err := p.RevokeRefreshToken(ctx, f.ConsentChallengeID.String()); errors.Is(err, fosite.ErrNotFound) { - // do nothing - } else if err != nil { - return err - } - - localCount, err := c.RawQuery("DELETE FROM hydra_oauth2_flow WHERE consent_challenge_id = ? AND nid = ?", f.ConsentChallengeID, p.NetworkID(ctx)).ExecWithCount() - if err != nil { - if errors.Is(err, sql.ErrNoRows) { - return errorsx.WithStack(x.ErrNotFound) - } - return sqlcon.HandleError(err) - } - - // If there are no sessions to revoke we should return an error to indicate to the caller - // that the request failed. - count += localCount + ids = append(ids, f.ConsentRequestID.String()) } - if count == 0 { - return errorsx.WithStack(x.ErrNotFound) + if len(ids) == 0 { + return nil + } + + if err := p.QueryWithNetwork(ctx). + Where("nid = ?", nid). + Where("request_id IN (?)", ids...). + Delete(OAuth2RequestSQL{Table: sqlTableAccess}.TableName()); errors.Is(err, fosite.ErrNotFound) { + // do nothing + } else if err != nil { + return err + } + + if err := p.QueryWithNetwork(ctx). + Where("nid = ?", nid). + Where("request_id IN (?)", ids...). + Delete(OAuth2RefreshTable{}.TableName()); errors.Is(err, fosite.ErrNotFound) { + // do nothing + } else if err != nil { + return err + } + + if err := p.QueryWithNetwork(ctx). + Where("nid = ?", nid). + Where("consent_challenge_id IN (?)", ids...). + Delete(new(flow.Flow)); errors.Is(err, sql.ErrNoRows) { + return errors.WithStack(x.ErrNotFound) + } else if err != nil { + return sqlcon.HandleError(err) } return nil } } -func (p *Persister) RevokeSubjectLoginSession(ctx context.Context, subject string) error { +func (p *Persister) RevokeSubjectLoginSession(ctx context.Context, subject string) (err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.RevokeSubjectLoginSession") - defer span.End() + defer otelx.End(span, &err) - err := p.QueryWithNetwork(ctx).Where("subject = ?", subject).Delete(&consent.LoginSession{}) + err = p.QueryWithNetwork(ctx).Where("subject = ?", subject).Delete(&flow.LoginSession{}) if err != nil { return sqlcon.HandleError(err) } @@ -104,17 +125,17 @@ func (p *Persister) RevokeSubjectLoginSession(ctx context.Context, subject strin // // count, _ := rows.RowsAffected() // if count == 0 { - // return errorsx.WithStack(x.ErrNotFound) + // return errors.WithStack(x.ErrNotFound) // } return nil } -func (p *Persister) CreateForcedObfuscatedLoginSession(ctx context.Context, session *consent.ForcedObfuscatedLoginSession) error { +func (p *Persister) CreateForcedObfuscatedLoginSession(ctx context.Context, session *consent.ForcedObfuscatedLoginSession) (err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.CreateForcedObfuscatedLoginSession") - defer span.End() + defer otelx.End(span, &err) - return p.transaction(ctx, func(ctx context.Context, c *pop.Connection) error { + return p.Transaction(ctx, func(ctx context.Context, c *pop.Connection) error { nid := p.NetworkID(ctx) if err := c.RawQuery( "DELETE FROM hydra_oauth2_obfuscated_authentication_session WHERE nid = ? AND client_id = ? AND subject = ?", @@ -135,9 +156,9 @@ func (p *Persister) CreateForcedObfuscatedLoginSession(ctx context.Context, sess }) } -func (p *Persister) GetForcedObfuscatedLoginSession(ctx context.Context, client, obfuscated string) (*consent.ForcedObfuscatedLoginSession, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetForcedObfuscatedLoginSession") - defer span.End() +func (p *Persister) GetForcedObfuscatedLoginSession(ctx context.Context, client, obfuscated string) (_ *consent.ForcedObfuscatedLoginSession, err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetForcedObfuscatedLoginSession", trace.WithAttributes(attribute.String("client", client))) + defer otelx.End(span, &err) var s consent.ForcedObfuscatedLoginSession @@ -147,7 +168,7 @@ func (p *Persister) GetForcedObfuscatedLoginSession(ctx context.Context, client, obfuscated, p.NetworkID(ctx), ).First(&s); errors.Is(err, sql.ErrNoRows) { - return nil, errorsx.WithStack(x.ErrNotFound) + return nil, errors.WithStack(x.ErrNotFound) } else if err != nil { return nil, sqlcon.HandleError(err) } @@ -155,442 +176,287 @@ func (p *Persister) GetForcedObfuscatedLoginSession(ctx context.Context, client, return &s, nil } -// CreateConsentRequest configures fields that are introduced or changed in the -// consent request. It doesn't touch fields that would be copied from the login -// request. -func (p *Persister) CreateConsentRequest(ctx context.Context, req *consent.OAuth2ConsentRequest) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.CreateConsentRequest") - defer span.End() - - c, err := p.Connection(ctx).RawQuery(` -UPDATE hydra_oauth2_flow -SET - state = ?, - consent_challenge_id = ?, - consent_skip = ?, - consent_verifier = ?, - consent_csrf = ? -WHERE login_challenge = ? AND nid = ?; -`, - flow.FlowStateConsentInitialized, - sqlxx.NullString(req.ID), - req.Skip, - req.Verifier, - req.CSRF, - req.LoginChallenge.String(), - p.NetworkID(ctx), - ).ExecWithCount() - if err != nil { - return sqlcon.HandleError(err) - } - if c != 1 { - return errorsx.WithStack(x.ErrNotFound) - } - return nil -} - -func (p *Persister) GetFlowByConsentChallenge(ctx context.Context, challenge string) (*flow.Flow, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetFlowByConsentChallenge") - defer span.End() - - f := &flow.Flow{} +func (p *ConsentPersister) CreateConsentSession(ctx context.Context, f *flow.Flow) (err error) { + ctx, span := p.d.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.CreateConsentSession") + defer otelx.End(span, &err) - if err := sqlcon.HandleError(p.QueryWithNetwork(ctx).Where("consent_challenge_id = ?", challenge).First(f)); err != nil { - return nil, err + if f.NID != p.NetworkID(ctx) { + return errors.WithStack(sqlcon.ErrNoRows) } - - return f, nil + return sqlcon.HandleError(p.Connection(ctx).Create(f)) } -func (p *Persister) GetConsentRequest(ctx context.Context, challenge string) (*consent.OAuth2ConsentRequest, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetConsentRequest") - defer span.End() +func (p *Persister) GetRememberedLoginSession(ctx context.Context, id string) (_ *flow.LoginSession, err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetRememberedLoginSession") + defer otelx.End(span, &err) - f, err := p.GetFlowByConsentChallenge(ctx, challenge) - if err != nil { - if errors.Is(err, sqlcon.ErrNoRows) { - return nil, errorsx.WithStack(x.ErrNotFound) - } - return nil, err + var s flow.LoginSession + if err := p.QueryWithNetwork(ctx).Where("remember = TRUE").Find(&s, id); errors.Is(err, sql.ErrNoRows) { + return nil, errors.WithStack(x.ErrNotFound) + } else if err != nil { + return nil, sqlcon.HandleError(err) } - return f.GetConsentRequest(), nil -} - -func (p *Persister) CreateLoginRequest(ctx context.Context, req *consent.LoginRequest) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.CreateLoginRequest") - defer span.End() - - f := flow.NewFlow(req) - return sqlcon.HandleError(p.CreateWithNetwork(ctx, f)) -} - -func (p *Persister) GetFlow(ctx context.Context, loginChallenge string) (*flow.Flow, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetFlow") - defer span.End() - - var f flow.Flow - return &f, p.transaction(ctx, func(ctx context.Context, c *pop.Connection) error { - if err := p.QueryWithNetwork(ctx).Where("login_challenge = ?", loginChallenge).First(&f); err != nil { - if errors.Is(err, sql.ErrNoRows) { - return errorsx.WithStack(x.ErrNotFound) - } - return sqlcon.HandleError(err) - } - - return nil - }) -} - -func (p *Persister) GetLoginRequest(ctx context.Context, loginChallenge string) (*consent.LoginRequest, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetLoginRequest") - defer span.End() - - var lr *consent.LoginRequest - return lr, p.transaction(ctx, func(ctx context.Context, c *pop.Connection) error { - var f flow.Flow - if err := p.QueryWithNetwork(ctx).Where("login_challenge = ?", loginChallenge).First(&f); err != nil { - if errors.Is(err, sql.ErrNoRows) { - return errorsx.WithStack(x.ErrNotFound) - } - return sqlcon.HandleError(err) - } - lr = f.GetLoginRequest() - - return nil - }) + return &s, nil } -func (p *Persister) HandleConsentRequest(ctx context.Context, r *consent.AcceptOAuth2ConsentRequest) (*consent.OAuth2ConsentRequest, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.HandleConsentRequest") - defer span.End() +// ConfirmLoginSession creates or updates the login session. The NID will be set to the network ID of the context. +func (p *Persister) ConfirmLoginSession(ctx context.Context, loginSession *flow.LoginSession) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.ConfirmLoginSession") + defer otelx.End(span, &err) - f := &flow.Flow{} + loginSession.NID = p.NetworkID(ctx) + loginSession.AuthenticatedAt = sqlxx.NullTime(time.Time(loginSession.AuthenticatedAt).Truncate(time.Second)) + loginSession.ExpiresAt = sqlxx.NullTime(time.Now().Truncate(time.Second).Add(p.r.Config().GetAuthenticationSessionLifespan(ctx)).UTC()) - if err := sqlcon.HandleError(p.QueryWithNetwork(ctx).Where("consent_challenge_id = ?", r.ID).First(f)); errors.Is(err, sqlcon.ErrNoRows) { - return nil, err + if p.Connection(ctx).Dialect.Name() == "mysql" { + // MySQL does not support UPSERT. + return p.mySQLConfirmLoginSession(ctx, loginSession) } - if err := f.HandleConsentRequest(r); err != nil { - return nil, errorsx.WithStack(err) + res, err := p.Connection(ctx).Store.NamedExecContext(ctx, ` +INSERT INTO hydra_oauth2_authentication_session (id, nid, authenticated_at, subject, remember, identity_provider_session_id, expires_at) +VALUES (:id, :nid, :authenticated_at, :subject, :remember, :identity_provider_session_id, :expires_at) +ON CONFLICT(id) DO +UPDATE SET + authenticated_at = :authenticated_at, + subject = :subject, + remember = :remember, + identity_provider_session_id = :identity_provider_session_id, + expires_at = :expires_at +WHERE hydra_oauth2_authentication_session.id = :id AND hydra_oauth2_authentication_session.nid = :nid +`, loginSession) + if err != nil { + return sqlcon.HandleError(err) } - - _, err := p.UpdateWithNetwork(ctx, f) + n, err := res.RowsAffected() if err != nil { - return nil, sqlcon.HandleError(err) + return sqlcon.HandleError(err) } - - return p.GetConsentRequest(ctx, r.ID) + if n == 0 { + return errors.WithStack(x.ErrNotFound) + } + return nil } -func (p *Persister) VerifyAndInvalidateConsentRequest(ctx context.Context, verifier string) (*consent.AcceptOAuth2ConsentRequest, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.VerifyAndInvalidateConsentRequest") - defer span.End() +func (p *Persister) DeleteLoginSession(ctx context.Context, id string) (_ *flow.LoginSession, err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.DeleteLoginSession") + defer otelx.End(span, &err) - var r consent.AcceptOAuth2ConsentRequest - return &r, p.transaction(ctx, func(ctx context.Context, c *pop.Connection) error { - var f flow.Flow - if err := p.QueryWithNetwork(ctx).Where("consent_verifier = ?", verifier).First(&f); err != nil { - return sqlcon.HandleError(err) - } + c := p.Connection(ctx) + if c.Dialect.Name() == "mysql" { + // MySQL does not support RETURNING. + return p.mySQLDeleteLoginSession(ctx, id) + } - if err := f.InvalidateConsentRequest(); err != nil { - return errorsx.WithStack(fosite.ErrInvalidRequest.WithDebug(err.Error())) - } + var session flow.LoginSession + columns := popx.DBColumns[flow.LoginSession](c.Dialect) + if err := p.Connection(ctx).RawQuery( + fmt.Sprintf(`DELETE FROM hydra_oauth2_authentication_session WHERE id = ? AND nid = ? RETURNING %s`, columns), + id, + p.NetworkID(ctx), + ).First(&session); err != nil { + return nil, sqlcon.HandleError(err) + } - r = *f.GetHandledConsentRequest() - _, err := p.UpdateWithNetwork(ctx, &f) - return err - }) + return &session, nil } -func (p *Persister) HandleLoginRequest(ctx context.Context, challenge string, r *consent.HandledLoginRequest) (lr *consent.LoginRequest, err error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.HandleLoginRequest") - defer span.End() +func (p *Persister) mySQLDeleteLoginSession(ctx context.Context, id string) (_ *flow.LoginSession, err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.mySQLDeleteLoginSession") + defer otelx.End(span, &err) - return lr, p.transaction(ctx, func(ctx context.Context, c *pop.Connection) error { - f, err := p.GetFlow(ctx, challenge) - if err != nil { - return sqlcon.HandleError(err) - } - err = f.HandleLoginRequest(r) - if err != nil { + var session flow.LoginSession + if err := p.Connection(ctx).Transaction(func(tx *pop.Connection) error { + if err := tx.Where("id = ? AND nid = ?", id, p.NetworkID(ctx)).First(&session); err != nil { return err } - _, err = p.UpdateWithNetwork(ctx, f) - if err != nil { - return sqlcon.HandleError(err) - } + return tx.RawQuery( + `DELETE FROM hydra_oauth2_authentication_session WHERE id = ? AND nid = ?`, + id, p.NetworkID(ctx), + ).Exec() + }); err != nil { + return nil, sqlcon.HandleError(err) + } - lr, err = p.GetLoginRequest(ctx, challenge) - return sqlcon.HandleError(err) - }) + return &session, nil } -func (p *Persister) VerifyAndInvalidateLoginRequest(ctx context.Context, verifier string) (*consent.HandledLoginRequest, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.VerifyAndInvalidateLoginRequest") - defer span.End() +func (p *ConsentPersister) FindGrantedAndRememberedConsentRequest(ctx context.Context, client, subject string) (_ *flow.Flow, err error) { + ctx, span := p.d.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.FindGrantedAndRememberedConsentRequest") + defer otelx.End(span, &err) - var d consent.HandledLoginRequest - return &d, p.transaction(ctx, func(ctx context.Context, c *pop.Connection) error { - var f flow.Flow - if err := p.QueryWithNetwork(ctx).Where("login_verifier = ?", verifier).First(&f); err != nil { - return sqlcon.HandleError(err) - } + f := flow.Flow{} + conn := p.Connection(ctx) - if err := f.InvalidateLoginRequest(); err != nil { - return errorsx.WithStack(fosite.ErrInvalidRequest.WithDebug(err.Error())) - } + // apply index hint + tableName := applyTableNameWithIndexHint(conn, f.TableName(), "hydra_oauth2_flow_previous_consents_idx") - d = f.GetHandledLoginRequest() - _, err := p.UpdateWithNetwork(ctx, &f) - return sqlcon.HandleError(err) - }) -} + // prepare columns + cols := popx.DBColumns[flow.Flow](conn.Dialect) -func (p *Persister) GetRememberedLoginSession(ctx context.Context, id string) (*consent.LoginSession, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetRememberedLoginSession") - defer span.End() + // prepare sql statement + q := fmt.Sprintf(` +SELECT %s FROM %s +WHERE nid = ? +AND (state = ? OR state IS NULL) +AND subject = ? +AND client_id = ? +AND consent_skip = FALSE +AND consent_remember = TRUE +AND (expires_at IS NULL OR expires_at > CURRENT_TIMESTAMP) +ORDER BY requested_at DESC +LIMIT 1`, + cols, + tableName, + ) - var s consent.LoginSession + // query first record + err = conn.RawQuery(q, + p.NetworkID(ctx), + flow.FlowStateConsentUsed, + subject, + client, + ).First(&f) - if err := p.QueryWithNetwork(ctx).Where("remember = TRUE").Find(&s, id); errors.Is(err, sql.ErrNoRows) { - return nil, errorsx.WithStack(x.ErrNotFound) + // handle error + if errors.Is(err, sql.ErrNoRows) { + return nil, errors.WithStack(consent.ErrNoPreviousConsentFound) } else if err != nil { return nil, sqlcon.HandleError(err) } - return &s, nil + return &f, nil } -func (p *Persister) ConfirmLoginSession(ctx context.Context, id string, authenticatedAt time.Time, subject string, remember bool) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.ConfirmLoginSession") - defer span.End() - - _, err := p.Connection(ctx).Where("id = ? AND nid = ?", id, p.NetworkID(ctx)).UpdateQuery(&consent.LoginSession{ - AuthenticatedAt: sqlxx.NullTime(authenticatedAt), - Subject: subject, - Remember: remember, - }, "authenticated_at", "subject", "remember") - return sqlcon.HandleError(err) -} - -func (p *Persister) CreateLoginSession(ctx context.Context, session *consent.LoginSession) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.CreateLoginSession") - defer span.End() - - return sqlcon.HandleError(p.CreateWithNetwork(ctx, session)) -} - -func (p *Persister) DeleteLoginSession(ctx context.Context, id string) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.DeleteLoginSession") - defer span.End() - - count, err := p.Connection(ctx).RawQuery("DELETE FROM hydra_oauth2_authentication_session WHERE id=? AND nid = ?", id, p.NetworkID(ctx)).ExecWithCount() - if count == 0 { - return errorsx.WithStack(x.ErrNotFound) - } else { - return sqlcon.HandleError(err) +func applyTableNameWithIndexHint(conn *pop.Connection, table string, index string) string { + switch conn.Dialect.Name() { + case "cockroach": + return table + "@" + index + case "sqlite3": + return table + " INDEXED BY " + index + case "mysql": + return table + " USE INDEX(" + index + ")" + default: + return table } } -func (p *Persister) FindGrantedAndRememberedConsentRequests(ctx context.Context, client, subject string) ([]consent.AcceptOAuth2ConsentRequest, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.FindGrantedAndRememberedConsentRequests") - defer span.End() - - rs := make([]consent.AcceptOAuth2ConsentRequest, 0) - - return rs, p.transaction(ctx, func(ctx context.Context, c *pop.Connection) error { - f := &flow.Flow{} - - if err := c. - Where( - strings.TrimSpace(fmt.Sprintf(` -(state = %d OR state = %d) AND -subject = ? AND -client_id = ? AND -consent_skip=FALSE AND -consent_error='{}' AND -consent_remember=TRUE AND -nid = ?`, flow.FlowStateConsentUsed, flow.FlowStateConsentUnused, - )), - subject, client, p.NetworkID(ctx)). - Order("requested_at DESC"). - Limit(1). - First(f); err != nil { - if errors.Is(err, sql.ErrNoRows) { - return errorsx.WithStack(consent.ErrNoPreviousConsentFound) - } - return sqlcon.HandleError(err) - } +func (p *ConsentPersister) FindSubjectsGrantedConsentRequests(ctx context.Context, subject string, pageOpts ...keysetpagination.Option) (_ []flow.Flow, _ *keysetpagination.Paginator, err error) { + ctx, span := p.d.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.FindSubjectsGrantedConsentRequests") + defer otelx.End(span, &err) - var err error - rs, err = p.filterExpiredConsentRequests(ctx, []consent.AcceptOAuth2ConsentRequest{*f.GetHandledConsentRequest()}) - return err - }) -} - -func (p *Persister) FindSubjectsGrantedConsentRequests(ctx context.Context, subject string, limit, offset int) ([]consent.AcceptOAuth2ConsentRequest, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.FindSubjectsGrantedConsentRequests") - defer span.End() + paginator := keysetpagination.NewPaginator(append(pageOpts, + keysetpagination.WithDefaultToken(keysetpagination.NewPageToken(keysetpagination.Column{Name: "login_challenge", Value: ""})), + )...) var fs []flow.Flow - c := p.Connection(ctx) - - if err := c. - Where( - strings.TrimSpace(fmt.Sprintf(` -(state = %d OR state = %d) AND -subject = ? AND -consent_skip=FALSE AND -consent_error='{}' AND -nid = ?`, flow.FlowStateConsentUsed, flow.FlowStateConsentUnused, - )), - subject, p.NetworkID(ctx)). - Order("requested_at DESC"). - Paginate(offset/limit+1, limit). - All(&fs); err != nil { - if errors.Is(err, sql.ErrNoRows) { - return nil, errorsx.WithStack(consent.ErrNoPreviousConsentFound) - } - return nil, sqlcon.HandleError(err) + err = p.QueryWithNetwork(ctx). + Where("(state IN (?, ?) OR state IS NULL)", flow.FlowStateConsentUsed, flow.FlowStateConsentUnused). + Where("subject = ?", subject). + Where("consent_skip = FALSE"). + Where("(expires_at IS NULL OR expires_at > CURRENT_TIMESTAMP)"). + Scope(keysetpagination.Paginate[flow.Flow](paginator)). + All(&fs) + if err != nil { + return nil, nil, sqlcon.HandleError(err) } - - var rs []consent.AcceptOAuth2ConsentRequest - for _, f := range fs { - rs = append(rs, *f.GetHandledConsentRequest()) + if len(fs) == 0 { + return nil, nil, errors.WithStack(consent.ErrNoPreviousConsentFound) } - return p.filterExpiredConsentRequests(ctx, rs) + fs, nextPage := keysetpagination.Result(fs, paginator) + return fs, nextPage, nil } -func (p *Persister) FindSubjectsSessionGrantedConsentRequests(ctx context.Context, subject, sid string, limit, offset int) ([]consent.AcceptOAuth2ConsentRequest, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.FindSubjectsSessionGrantedConsentRequests") - defer span.End() +func (p *ConsentPersister) FindSubjectsSessionGrantedConsentRequests(ctx context.Context, subject, sid string, pageOpts ...keysetpagination.Option) (_ []flow.Flow, _ *keysetpagination.Paginator, err error) { + ctx, span := p.d.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.FindSubjectsSessionGrantedConsentRequests", trace.WithAttributes(attribute.String("sid", sid))) + defer otelx.End(span, &err) - var fs []flow.Flow - c := p.Connection(ctx) + paginator := keysetpagination.NewPaginator(append(pageOpts, + keysetpagination.WithDefaultToken(keysetpagination.NewPageToken(keysetpagination.Column{Name: "login_challenge", Value: ""})), + )...) - if err := c. - Where( - strings.TrimSpace(fmt.Sprintf(` -(state = %d OR state = %d) AND -subject = ? AND -login_session_id = ? AND -consent_skip=FALSE AND -consent_error='{}' AND -nid = ?`, flow.FlowStateConsentUsed, flow.FlowStateConsentUnused, - )), - subject, sid, p.NetworkID(ctx)). - Order("requested_at DESC"). - Paginate(offset/limit+1, limit). - All(&fs); err != nil { - if errors.Is(err, sql.ErrNoRows) { - return nil, errorsx.WithStack(consent.ErrNoPreviousConsentFound) - } - return nil, sqlcon.HandleError(err) + var fs []flow.Flow + err = p.QueryWithNetwork(ctx). + Where("(state IN (?, ?) OR state IS NULL)", flow.FlowStateConsentUsed, flow.FlowStateConsentUnused). + Where("subject = ?", subject). + Where("login_session_id = ?", sid). + Where("consent_skip = FALSE"). + Where("(expires_at IS NULL OR expires_at > CURRENT_TIMESTAMP)"). + Scope(keysetpagination.Paginate[flow.Flow](paginator)). + All(&fs) + if err != nil { + return nil, nil, sqlcon.HandleError(err) } - - var rs []consent.AcceptOAuth2ConsentRequest - for _, f := range fs { - rs = append(rs, *f.GetHandledConsentRequest()) + if len(fs) == 0 { + return nil, nil, errors.WithStack(consent.ErrNoPreviousConsentFound) } - return p.filterExpiredConsentRequests(ctx, rs) -} - -func (p *Persister) CountSubjectsGrantedConsentRequests(ctx context.Context, subject string) (int, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.CountSubjectsGrantedConsentRequests") - defer span.End() - - n, err := p.Connection(ctx). - Where( - strings.TrimSpace(fmt.Sprintf(` -(state = %d OR state = %d) AND -subject = ? AND -consent_skip=FALSE AND -consent_error='{}' AND -nid = ?`, flow.FlowStateConsentUsed, flow.FlowStateConsentUnused, - )), - subject, p.NetworkID(ctx)). - Count(&flow.Flow{}) - return n, sqlcon.HandleError(err) -} - -func (p *Persister) filterExpiredConsentRequests(ctx context.Context, requests []consent.AcceptOAuth2ConsentRequest) ([]consent.AcceptOAuth2ConsentRequest, error) { - _, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.filterExpiredConsentRequests") - defer span.End() - - var result []consent.AcceptOAuth2ConsentRequest - for _, v := range requests { - if v.RememberFor > 0 && v.RequestedAt.Add(time.Duration(v.RememberFor)*time.Second).Before(time.Now().UTC()) { - continue - } - result = append(result, v) - } - if len(result) == 0 { - return nil, errorsx.WithStack(consent.ErrNoPreviousConsentFound) - } - return result, nil + fs, nextPage := keysetpagination.Result(fs, paginator) + return fs, nextPage, nil } -func (p *Persister) ListUserAuthenticatedClientsWithFrontChannelLogout(ctx context.Context, subject, sid string) ([]client.Client, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.ListUserAuthenticatedClientsWithFrontChannelLogout") - defer span.End() +func (p *ConsentPersister) ListUserAuthenticatedClientsWithFrontChannelLogout(ctx context.Context, subject, sid string) (_ []client.Client, err error) { + ctx, span := p.d.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.ListUserAuthenticatedClientsWithFrontChannelLogout") + defer otelx.End(span, &err) return p.listUserAuthenticatedClients(ctx, subject, sid, "front") } -func (p *Persister) ListUserAuthenticatedClientsWithBackChannelLogout(ctx context.Context, subject, sid string) ([]client.Client, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.ListUserAuthenticatedClientsWithBackChannelLogout") - defer span.End() +func (p *ConsentPersister) ListUserAuthenticatedClientsWithBackChannelLogout(ctx context.Context, subject, sid string) (_ []client.Client, err error) { + ctx, span := p.d.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.ListUserAuthenticatedClientsWithBackChannelLogout") + defer otelx.End(span, &err) + return p.listUserAuthenticatedClients(ctx, subject, sid, "back") } -func (p *Persister) listUserAuthenticatedClients(ctx context.Context, subject, sid, channel string) ([]client.Client, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.listUserAuthenticatedClients") - defer span.End() +func (p *ConsentPersister) listUserAuthenticatedClients(ctx context.Context, subject, sid, channel string) (cs []client.Client, err error) { + ctx, span := p.d.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.listUserAuthenticatedClients", + trace.WithAttributes(attribute.String("sid", sid))) + defer otelx.End(span, &err) - var cs []client.Client - return cs, p.transaction(ctx, func(ctx context.Context, c *pop.Connection) error { - if err := c.RawQuery( - /* #nosec G201 - channel can either be "front" or "back" */ - fmt.Sprintf(` -SELECT DISTINCT c.* FROM hydra_client as c -JOIN hydra_oauth2_flow as f ON (c.id = f.client_id) + conn := p.Connection(ctx) + columns := popx.DBColumns[client.Client](&popx.AliasQuoter{Alias: "c", Quoter: conn.Dialect}) + + if err := conn.RawQuery( + /* #nosec G201 - channel can either be "front" or "back" */ + fmt.Sprintf(` +SELECT DISTINCT %s FROM hydra_client as c +JOIN hydra_oauth2_flow as f ON (c.id = f.client_id AND c.nid = f.nid) WHERE - f.subject=? AND - c.%schannel_logout_uri!='' AND + f.subject = ? AND + c.%schannel_logout_uri != '' AND c.%schannel_logout_uri IS NOT NULL AND f.login_session_id = ? AND f.nid = ? AND c.nid = ?`, - channel, - channel, - ), - subject, - sid, - p.NetworkID(ctx), - p.NetworkID(ctx), - ).All(&cs); err != nil { - return sqlcon.HandleError(err) - } + columns, + channel, + channel, + ), + subject, + sid, + p.NetworkID(ctx), + p.NetworkID(ctx), + ).All(&cs); err != nil { + return nil, sqlcon.HandleError(err) + } - return nil - }) + return cs, nil } -func (p *Persister) CreateLogoutRequest(ctx context.Context, request *consent.LogoutRequest) error { +func (p *Persister) CreateLogoutRequest(ctx context.Context, request *flow.LogoutRequest) (err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.CreateLogoutRequest") - defer span.End() + defer otelx.End(span, &err) - return errorsx.WithStack(p.CreateWithNetwork(ctx, request)) + return errors.WithStack(p.CreateWithNetwork(ctx, request)) } -func (p *Persister) AcceptLogoutRequest(ctx context.Context, challenge string) (*consent.LogoutRequest, error) { +func (p *Persister) AcceptLogoutRequest(ctx context.Context, challenge string) (_ *flow.LogoutRequest, err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.AcceptLogoutRequest") - defer span.End() + defer otelx.End(span, &err) if err := p.Connection(ctx).RawQuery("UPDATE hydra_oauth2_logout_request SET accepted=true, rejected=false WHERE challenge=? AND nid = ?", challenge, p.NetworkID(ctx)).Exec(); err != nil { return nil, sqlcon.HandleError(err) @@ -599,81 +465,74 @@ func (p *Persister) AcceptLogoutRequest(ctx context.Context, challenge string) ( return p.GetLogoutRequest(ctx, challenge) } -func (p *Persister) RejectLogoutRequest(ctx context.Context, challenge string) error { +func (p *Persister) RejectLogoutRequest(ctx context.Context, challenge string) (err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.RejectLogoutRequest") - defer span.End() + defer otelx.End(span, &err) count, err := p.Connection(ctx). RawQuery("UPDATE hydra_oauth2_logout_request SET rejected=true, accepted=false WHERE challenge=? AND nid = ?", challenge, p.NetworkID(ctx)). ExecWithCount() if count == 0 { - return errorsx.WithStack(x.ErrNotFound) + return errors.WithStack(x.ErrNotFound) } else { - return errorsx.WithStack(err) + return errors.WithStack(err) } } -func (p *Persister) GetLogoutRequest(ctx context.Context, challenge string) (*consent.LogoutRequest, error) { +func (p *Persister) GetLogoutRequest(ctx context.Context, challenge string) (_ *flow.LogoutRequest, err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetLogoutRequest") - defer span.End() + defer otelx.End(span, &err) - var lr consent.LogoutRequest + var lr flow.LogoutRequest return &lr, sqlcon.HandleError(p.QueryWithNetwork(ctx).Where("challenge = ? AND rejected = FALSE", challenge).First(&lr)) } -func (p *Persister) VerifyAndInvalidateLogoutRequest(ctx context.Context, verifier string) (*consent.LogoutRequest, error) { +func (p *Persister) VerifyAndInvalidateLogoutRequest(ctx context.Context, verifier string) (_ *flow.LogoutRequest, err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.VerifyAndInvalidateLogoutRequest") - defer span.End() + defer otelx.End(span, &err) + + var lr flow.LogoutRequest + if count, err := p.Connection(ctx).RawQuery(` +UPDATE hydra_oauth2_logout_request + SET was_used = TRUE +WHERE nid = ? + AND verifier = ? + AND accepted = TRUE + AND rejected = FALSE`, + p.NetworkID(ctx), + verifier, + ).ExecWithCount(); count == 0 && err == nil { + return nil, errors.WithStack(x.ErrNotFound) + } else if err != nil { + return nil, sqlcon.HandleError(err) + } - var lr consent.LogoutRequest - return &lr, p.transaction(ctx, func(ctx context.Context, c *pop.Connection) error { - if count, err := c.RawQuery( - "UPDATE hydra_oauth2_logout_request SET was_used=TRUE WHERE nid = ? AND verifier=? AND was_used=FALSE AND accepted=TRUE AND rejected=FALSE", - p.NetworkID(ctx), - verifier, - ).ExecWithCount(); count == 0 && err == nil { - return errorsx.WithStack(x.ErrNotFound) - } else if err != nil { - return sqlcon.HandleError(err) - } + err = sqlcon.HandleError(p.QueryWithNetwork(ctx).Where("verifier = ?", verifier).First(&lr)) + if err != nil { + return nil, err + } - err := sqlcon.HandleError(p.QueryWithNetwork(ctx).Where("verifier=?", verifier).First(&lr)) - if err != nil { - return err - } + if expiry := time.Time(lr.ExpiresAt); + // If the expiry is unset, we are in a legacy use case (allow logout). + // TODO: Remove this in the future. + !expiry.IsZero() && expiry.Before(time.Now().UTC()) { + return nil, errors.WithStack(flow.ErrorLogoutFlowExpired) + } - return nil - }) + return &lr, nil } -func (p *Persister) FlushInactiveLoginConsentRequests(ctx context.Context, notAfter time.Time, limit int, batchSize int) error { +func (p *Persister) FlushInactiveLoginConsentRequests(ctx context.Context, notAfter time.Time, limit, batchSize int) (err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.FlushInactiveLoginConsentRequests") - defer span.End() - - /* #nosec G201 table is static */ - var f flow.Flow + defer otelx.End(span, &err) // The value of notAfter should be the minimum between input parameter and request max expire based on its configured age - requestMaxExpire := time.Now().Add(-p.config.ConsentRequestMaxAge(ctx)) + requestMaxExpire := time.Now().Add(-p.r.Config().ConsentRequestMaxAge(ctx)) if requestMaxExpire.Before(notAfter) { notAfter = requestMaxExpire } - challenges := []string{} - queryFormat := ` - SELECT login_challenge - FROM hydra_oauth2_flow - WHERE ( - (state != ?) - OR (login_error IS NOT NULL AND login_error <> '{}' AND login_error <> '') - OR (consent_error IS NOT NULL AND consent_error <> '{}' AND consent_error <> '') - ) - AND requested_at < ? - AND nid = ? - ORDER BY login_challenge - LIMIT %[1]d - ` - + challenges := make([]string, 0, limit) // Select up to [limit] flows that can be safely deleted, i.e. flows that meet // the following criteria: // - flow.state is anything between FlowStateLoginInitialized and FlowStateConsentUnused (unhandled) @@ -681,21 +540,30 @@ func (p *Persister) FlushInactiveLoginConsentRequests(ctx context.Context, notAf // - flow.consent_error has valid error (consent rejected) // AND timed-out // - flow.requested_at < minimum of ttl.login_consent_request and notAfter - q := p.Connection(ctx).RawQuery(fmt.Sprintf(queryFormat, limit), flow.FlowStateConsentUsed, notAfter, p.NetworkID(ctx)) - - if err := q.All(&challenges); err == sql.ErrNoRows { - return errors.Wrap(fosite.ErrNotFound, "") + q := p.Connection(ctx).RawQuery(` + SELECT login_challenge + FROM hydra_oauth2_flow + WHERE ( + (state != ? AND state IS NOT NULL) + OR (login_error IS NOT NULL AND login_error <> '{}' AND login_error <> '') + OR (consent_error IS NOT NULL AND consent_error <> '{}' AND consent_error <> '') + ) + AND requested_at < ? + AND nid = ? + ORDER BY login_challenge + LIMIT ?`, + flow.FlowStateConsentUsed, notAfter, p.NetworkID(ctx), limit) + + if err := q.All(&challenges); err != nil { + return errors.WithStack(err) } // Delete in batch consent requests and their references in cascade for i := 0; i < len(challenges); i += batchSize { - j := i + batchSize - if j > len(challenges) { - j = len(challenges) - } + j := min(i+batchSize, len(challenges)) q := p.Connection(ctx).RawQuery( - fmt.Sprintf("DELETE FROM %s WHERE login_challenge in (?) AND nid = ?", (&f).TableName()), + "DELETE FROM hydra_oauth2_flow WHERE login_challenge in (?) AND nid = ?", challenges[i:j], p.NetworkID(ctx), ) @@ -707,3 +575,28 @@ func (p *Persister) FlushInactiveLoginConsentRequests(ctx context.Context, notAf return nil } + +func (p *Persister) mySQLConfirmLoginSession(ctx context.Context, session *flow.LoginSession) error { + return p.Transaction(ctx, func(ctx context.Context, c *pop.Connection) error { + err := sqlcon.HandleError(c.Create(session)) + if err == nil { + return nil + } + + if !errors.Is(err, sqlcon.ErrUniqueViolation) { + return err + } + + n, err := c. + Where("id = ? and nid = ?", session.ID, session.NID). + UpdateQuery(session, "authenticated_at", "subject", "identity_provider_session_id", "remember", "expires_at") + if err != nil { + return errors.WithStack(sqlcon.HandleError(err)) + } + if n == 0 { + return errors.WithStack(x.ErrNotFound) + } + + return nil + }) +} diff --git a/persistence/sql/persister_device.go b/persistence/sql/persister_device.go new file mode 100644 index 00000000000..fbcfc340c86 --- /dev/null +++ b/persistence/sql/persister_device.go @@ -0,0 +1,290 @@ +// Copyright © 2024 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package sql + +import ( + "context" + "database/sql" + "encoding/json" + "fmt" + "net/url" + "strings" + "time" + + "github.com/gofrs/uuid" + "github.com/pkg/errors" + "github.com/tidwall/gjson" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/x/otelx" + "github.com/ory/x/sqlcon" + "github.com/ory/x/sqlxx" + "github.com/ory/x/stringsx" +) + +const ( + sqlTableDeviceAuthCodes tableName = "hydra_oauth2_device_auth_codes" +) + +type DeviceRequestSQL struct { + ID string `db:"device_code_signature"` + UserCodeID string `db:"user_code_signature"` + NID uuid.UUID `db:"nid"` + Request string `db:"request_id"` + ConsentChallenge sql.NullString `db:"challenge_id"` + RequestedAt time.Time `db:"requested_at"` + Client string `db:"client_id"` + Scopes string `db:"scope"` + GrantedScope string `db:"granted_scope"` + RequestedAudience string `db:"requested_audience"` + GrantedAudience string `db:"granted_audience"` + Form string `db:"form_data"` + Subject string `db:"subject"` + DeviceCodeActive bool `db:"device_code_active"` + UserCodeState fosite.UserCodeState `db:"user_code_state"` + Session []byte `db:"session_data"` + // InternalExpiresAt denormalizes the expiry from the session to additionally store it as a row. + InternalExpiresAt sqlxx.NullTime `db:"expires_at" json:"-"` +} + +func (r DeviceRequestSQL) TableName() string { + return string(sqlTableDeviceAuthCodes) +} + +func (r *DeviceRequestSQL) toRequest(ctx context.Context, session fosite.Session, p *Persister) (_ *fosite.DeviceRequest, err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.DeviceRequestSQL.toRequest") + defer otelx.End(span, &err) + + sess := r.Session + if !gjson.ValidBytes(sess) { + var err error + sess, err = p.r.KeyCipher().Decrypt(ctx, string(sess), nil) + if err != nil { + return nil, errors.WithStack(err) + } + } + + if session != nil { + if err := json.Unmarshal(sess, session); err != nil { + return nil, errors.WithStack(err) + } + } else { + p.l.Debugf("Got an empty session in toRequest") + } + + c, err := p.GetClient(ctx, r.Client) + if err != nil { + return nil, err + } + + val, err := url.ParseQuery(r.Form) + if err != nil { + return nil, errors.WithStack(err) + } + + return &fosite.DeviceRequest{ + UserCodeState: r.UserCodeState, + Request: fosite.Request{ + ID: r.Request, + RequestedAt: r.RequestedAt, + // ExpiresAt does not need to be populated as we get the expiry time from the session. + Client: c, + RequestedScope: stringsx.Splitx(r.Scopes, "|"), + GrantedScope: stringsx.Splitx(r.GrantedScope, "|"), + RequestedAudience: stringsx.Splitx(r.RequestedAudience, "|"), + GrantedAudience: stringsx.Splitx(r.GrantedAudience, "|"), + Form: val, + Session: session, + }, + }, nil +} + +func (p *Persister) sqlDeviceSchemaFromRequest(ctx context.Context, deviceCodeSignature, userCodeSignature string, r fosite.DeviceRequester, expiresAt time.Time) (*DeviceRequestSQL, error) { + subject := "" + if r.GetSession() == nil { + p.l.Debugf("Got an empty session in sqlSchemaFromRequest") + } else { + subject = r.GetSession().GetSubject() + } + + session, err := json.Marshal(r.GetSession()) + if err != nil { + return nil, errors.WithStack(err) + } + + if p.r.Config().EncryptSessionData(ctx) { + ciphertext, err := p.r.KeyCipher().Encrypt(ctx, session, nil) + if err != nil { + return nil, errors.WithStack(err) + } + session = []byte(ciphertext) + } + + var challenge sql.NullString + rr, ok := r.GetSession().(*oauth2.Session) + if !ok && r.GetSession() != nil { + return nil, errors.Errorf("Expected request to be of type *Session, but got: %T", r.GetSession()) + } else if ok { + if len(rr.ConsentChallenge) > 0 { + challenge = sql.NullString{Valid: true, String: rr.ConsentChallenge} + } + } + + return &DeviceRequestSQL{ + Request: r.GetID(), + ConsentChallenge: challenge, + ID: deviceCodeSignature, + UserCodeID: userCodeSignature, + RequestedAt: r.GetRequestedAt(), + InternalExpiresAt: sqlxx.NullTime(expiresAt), + Client: r.GetClient().GetID(), + Scopes: strings.Join(r.GetRequestedScopes(), "|"), + GrantedScope: strings.Join(r.GetGrantedScopes(), "|"), + GrantedAudience: strings.Join(r.GetGrantedAudience(), "|"), + RequestedAudience: strings.Join(r.GetRequestedAudience(), "|"), + Form: r.GetRequestForm().Encode(), + Session: session, + Subject: subject, + DeviceCodeActive: true, + UserCodeState: r.GetUserCodeState(), + }, nil +} + +// CreateDeviceCodeSession creates a new device code session and stores it in the database. Implements DeviceAuthStorage. +func (p *Persister) CreateDeviceAuthSession(ctx context.Context, deviceCodeSignature, userCodeSignature string, requester fosite.DeviceRequester) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.CreateDeviceCodeSession") + defer otelx.End(span, &err) + + req, err := p.sqlDeviceSchemaFromRequest(ctx, deviceCodeSignature, userCodeSignature, requester, requester.GetSession().GetExpiresAt(fosite.DeviceCode).UTC()) + if err != nil { + return err + } + + if err := sqlcon.HandleError(p.CreateWithNetwork(ctx, req)); errors.Is(err, sqlcon.ErrConcurrentUpdate) { + return errors.Wrap(fosite.ErrSerializationFailure, err.Error()) + } else if errors.Is(err, sqlcon.ErrUniqueViolation) { + return errors.Wrap(fosite.ErrExistingUserCodeSignature, err.Error()) + } else if err != nil { + return err + } + + return nil +} + +// GetDeviceCodeSession returns a device code session from the database. Implements DeviceAuthStorage. +func (p *Persister) GetDeviceCodeSession(ctx context.Context, signature string, session fosite.Session) (_ fosite.DeviceRequester, err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetDeviceCodeSession") + defer otelx.End(span, &err) + + r := DeviceRequestSQL{} + if err = p.QueryWithNetwork(ctx).Where("device_code_signature = ?", signature).First(&r); errors.Is(err, sql.ErrNoRows) { + return nil, errors.WithStack(fosite.ErrNotFound) + } else if err != nil { + return nil, sqlcon.HandleError(err) + } + + if !r.DeviceCodeActive { + fr, err := r.toRequest(ctx, session, p) + if err != nil { + return nil, err + } + return fr, errors.WithStack(fosite.ErrInactiveToken) + } + + return r.toRequest(ctx, session, p) +} + +// InvalidateDeviceCodeSession invalidates a device code session. Implements DeviceAuthStorage. +func (p *Persister) InvalidateDeviceCodeSession(ctx context.Context, signature string) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.InvalidateDeviceCodeSession") + defer otelx.End(span, &err) + + /* #nosec G201 table is static */ + return sqlcon.HandleError( + p.QueryWithNetwork(ctx). + Where("device_code_signature = ?", signature). + Delete(DeviceRequestSQL{})) +} + +// GetUserCodeSession returns a user code session from the database. Implements FositeStorer. +func (p *Persister) GetUserCodeSession(ctx context.Context, signature string, session fosite.Session) (_ fosite.DeviceRequester, err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetUserCodeSession") + defer otelx.End(span, &err) + + r := DeviceRequestSQL{} + if session == nil { + session = oauth2.NewSessionWithCustomClaims(ctx, p.r.Config(), "") + } + + if err = p.QueryWithNetwork(ctx).Where("user_code_signature = ?", signature).First(&r); errors.Is(err, sql.ErrNoRows) { + return nil, errors.WithStack(fosite.ErrNotFound) + } else if err != nil { + return nil, sqlcon.HandleError(err) + } + + fr, err := r.toRequest(ctx, session, p) + if err != nil { + return nil, err + } + + if r.UserCodeState != fosite.UserCodeUnused { + return fr, errors.WithStack(fosite.ErrInactiveToken) + } + + return fr, err +} + +// GetDeviceCodeSessionByRequestID returns a device code session from the database. Implements FositeStorer. +func (p *Persister) GetDeviceCodeSessionByRequestID(ctx context.Context, requestID string, session fosite.Session) (_ fosite.DeviceRequester, deviceCodeSignature string, err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetDeviceCodeSessionByRequestID") + defer otelx.End(span, &err) + + r := DeviceRequestSQL{} + if err = p.QueryWithNetwork(ctx).Where("request_id = ?", requestID).First(&r); errors.Is(err, sql.ErrNoRows) { + return nil, "", errors.WithStack(fosite.ErrNotFound) + } else if err != nil { + return nil, "", sqlcon.HandleError(err) + } + + if !r.DeviceCodeActive { + fr, err := r.toRequest(ctx, session, p) + if err != nil { + return nil, "", err + } + return fr, r.ID, errors.WithStack(fosite.ErrInactiveToken) + } + + fr, err := r.toRequest(ctx, session, p) + if err != nil { + return nil, "", err + } + return fr, r.ID, nil +} + +// UpdateDeviceCodeSessionBySignature updates a device code session by the device_code signature. Implements FositeStorer. +func (p *Persister) UpdateDeviceCodeSessionBySignature(ctx context.Context, signature string, requester fosite.DeviceRequester) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.UpdateDeviceCodeSessionBySignature") + defer otelx.End(span, &err) + + req, err := p.sqlDeviceSchemaFromRequest(ctx, signature, "", requester, requester.GetSession().GetExpiresAt(fosite.DeviceCode).UTC()) + if err != nil { + return err + } + + stmt := fmt.Sprintf( + "UPDATE %s SET granted_scope=?, granted_audience=?, session_data=?, user_code_state=?, subject=?, challenge_id=? WHERE device_code_signature=? AND nid = ?", + sqlTableDeviceAuthCodes, + ) + + /* #nosec G201 table is static */ + return sqlcon.HandleError( + p.Connection(ctx).RawQuery(stmt, + req.GrantedScope, req.GrantedAudience, + req.Session, req.UserCodeState, + req.Subject, req.ConsentChallenge, + signature, p.NetworkID(ctx), + ).Exec(), + ) +} diff --git a/persistence/sql/persister_grant_jwk.go b/persistence/sql/persister_grant_jwk.go index 3c1c668cfb7..001167ad0b7 100644 --- a/persistence/sql/persister_grant_jwk.go +++ b/persistence/sql/persister_grant_jwk.go @@ -5,122 +5,187 @@ package sql import ( "context" - "strings" + "fmt" "time" + "github.com/go-jose/go-jose/v3" + "github.com/gofrs/uuid" "github.com/pkg/errors" - "github.com/gobuffalo/pop/v6" - "gopkg.in/square/go-jose.v2" - - "github.com/ory/hydra/oauth2/trust" - "github.com/ory/x/stringsx" - + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/oauth2/trust" + "github.com/ory/pop/v6" + "github.com/ory/x/otelx" + keysetpagination "github.com/ory/x/pagination/keysetpagination_v2" "github.com/ory/x/sqlcon" + "github.com/ory/x/sqlxx" ) -var _ trust.GrantManager = &Persister{} +var _ trust.GrantManager = (*Persister)(nil) + +type SQLGrant struct { + ID uuid.UUID `db:"id"` + NID uuid.UUID `db:"nid"` + Issuer string `db:"issuer"` + Subject string `db:"subject"` + AllowAnySubject bool `db:"allow_any_subject"` + Scope sqlxx.StringSlicePipeDelimiter `db:"scope"` + KeySet string `db:"key_set"` + KeyID string `db:"key_id"` + CreatedAt time.Time `db:"created_at"` + ExpiresAt time.Time `db:"expires_at"` +} + +func (SQLGrant) TableName() string { + return "hydra_oauth2_trusted_jwt_bearer_issuer" +} + +func (SQLGrant) fromGrant(g trust.Grant) SQLGrant { + return SQLGrant{ + ID: g.ID, + Issuer: g.Issuer, + Subject: g.Subject, + AllowAnySubject: g.AllowAnySubject, + Scope: g.Scope, + KeySet: g.PublicKey.Set, + KeyID: g.PublicKey.KeyID, + CreatedAt: g.CreatedAt, + ExpiresAt: g.ExpiresAt, + } +} + +func (d SQLGrant) toGrant() trust.Grant { + return trust.Grant{ + ID: d.ID, + Issuer: d.Issuer, + Subject: d.Subject, + AllowAnySubject: d.AllowAnySubject, + Scope: d.Scope, + PublicKey: trust.PublicKey{ + Set: d.KeySet, + KeyID: d.KeyID, + }, + CreatedAt: d.CreatedAt, + ExpiresAt: d.ExpiresAt, + } +} -func (p *Persister) CreateGrant(ctx context.Context, g trust.Grant, publicKey jose.JSONWebKey) error { +// CreateGrant implements GrantManager +func (p *Persister) CreateGrant(ctx context.Context, g trust.Grant, publicKey jose.JSONWebKey) (err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.CreateGrant") - defer span.End() + defer otelx.End(span, &err) - return p.transaction(ctx, func(ctx context.Context, c *pop.Connection) error { + return p.Transaction(ctx, func(ctx context.Context, c *pop.Connection) error { // add key, if it doesn't exist - if _, err := p.GetKey(ctx, g.PublicKey.Set, g.PublicKey.KeyID); err != nil { + if _, err := p.d.KeyManager().GetKey(ctx, g.PublicKey.Set, g.PublicKey.KeyID); err != nil { if !errors.Is(err, sqlcon.ErrNoRows) { return sqlcon.HandleError(err) } - if err = p.AddKey(ctx, g.PublicKey.Set, &publicKey); err != nil { + if err = p.d.KeyManager().AddKey(ctx, g.PublicKey.Set, &publicKey); err != nil { return sqlcon.HandleError(err) } } - data := p.sqlDataFromJWTGrant(g) + data := SQLGrant{}.fromGrant(g) return sqlcon.HandleError(p.CreateWithNetwork(ctx, &data)) }) } -func (p *Persister) GetConcreteGrant(ctx context.Context, id string) (trust.Grant, error) { +// GetConcreteGrant implements GrantManager +func (p *Persister) GetConcreteGrant(ctx context.Context, id uuid.UUID) (_ trust.Grant, err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetConcreteGrant") - defer span.End() + defer otelx.End(span, &err) - var data trust.SQLData + var data SQLGrant if err := p.QueryWithNetwork(ctx).Where("id = ?", id).First(&data); err != nil { return trust.Grant{}, sqlcon.HandleError(err) } - return p.jwtGrantFromSQlData(data), nil + return data.toGrant(), nil } -func (p *Persister) DeleteGrant(ctx context.Context, id string) error { +// DeleteGrant implements GrantManager +func (p *Persister) DeleteGrant(ctx context.Context, id uuid.UUID) (err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.DeleteGrant") - defer span.End() + defer otelx.End(span, &err) - return p.transaction(ctx, func(ctx context.Context, c *pop.Connection) error { + return p.Transaction(ctx, func(ctx context.Context, c *pop.Connection) error { grant, err := p.GetConcreteGrant(ctx, id) if err != nil { return sqlcon.HandleError(err) } - if err := p.QueryWithNetwork(ctx).Where("id = ?", grant.ID).Delete(&trust.SQLData{}); err != nil { + if err := p.QueryWithNetwork(ctx).Where("id = ?", grant.ID).Delete(&SQLGrant{}); err != nil { return sqlcon.HandleError(err) } - return p.DeleteKey(ctx, grant.PublicKey.Set, grant.PublicKey.KeyID) + return p.d.KeyManager().DeleteKey(ctx, grant.PublicKey.Set, grant.PublicKey.KeyID) }) } -func (p *Persister) GetGrants(ctx context.Context, limit, offset int, optionalIssuer string) ([]trust.Grant, error) { +// GetGrants implements GrantManager +func (p *Persister) GetGrants(ctx context.Context, optionalIssuer string, pageOpts ...keysetpagination.Option) (_ []trust.Grant, _ *keysetpagination.Paginator, err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetGrants") - defer span.End() + defer otelx.End(span, &err) - grantsData := make([]trust.SQLData, 0) + paginator := keysetpagination.NewPaginator(append(pageOpts, + keysetpagination.WithDefaultToken(keysetpagination.NewPageToken(keysetpagination.Column{Name: "id", Value: uuid.Nil})), + )...) - query := p.QueryWithNetwork(ctx). - Paginate(offset/limit+1, limit). - Order("id") + var grantsData []SQLGrant + query := p.QueryWithNetwork(ctx).Scope(keysetpagination.Paginate[SQLGrant](paginator)) if optionalIssuer != "" { query = query.Where("issuer = ?", optionalIssuer) } if err := query.All(&grantsData); err != nil { - return nil, sqlcon.HandleError(err) + return nil, nil, sqlcon.HandleError(err) } + grantsData, nextPage := keysetpagination.Result(grantsData, paginator) - grants := make([]trust.Grant, 0, len(grantsData)) - for _, data := range grantsData { - grants = append(grants, p.jwtGrantFromSQlData(data)) + grants := make([]trust.Grant, len(grantsData)) + for i := range grantsData { + grants[i] = grantsData[i].toGrant() } - return grants, nil + return grants, nextPage, nil } -func (p *Persister) CountGrants(ctx context.Context) (int, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.CountGrants") - defer span.End() +// FlushInactiveGrants implements GrantManager +func (p *Persister) FlushInactiveGrants(ctx context.Context, notAfter time.Time, _ int, _ int) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.FlushInactiveGrants") + defer otelx.End(span, &err) - n, err := p.QueryWithNetwork(ctx). - Count(&trust.SQLData{}) - return n, sqlcon.HandleError(err) + deleteUntil := time.Now().UTC() + if deleteUntil.After(notAfter) { + deleteUntil = notAfter + } + return sqlcon.HandleError(p.QueryWithNetwork(ctx).Where("expires_at < ?", deleteUntil).Delete(&SQLGrant{})) } -func (p *Persister) GetPublicKey(ctx context.Context, issuer string, subject string, keyId string) (*jose.JSONWebKey, error) { +// GetPublicKey implements RFC7523KeyStorage +func (p *Persister) GetPublicKey(ctx context.Context, issuer string, subject string, keyId string) (_ *jose.JSONWebKey, err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetPublicKey") - defer span.End() + defer otelx.End(span, &err) - var data trust.SQLData - query := p.QueryWithNetwork(ctx). - Where("issuer = ?", issuer). - Where("subject = ? OR allow_any_subject IS TRUE", subject). - Where("key_id = ?", keyId). - Where("nid = ?", p.NetworkID(ctx)) - if err := query.First(&data); err != nil { + tableName := SQLGrant{}.TableName() + // Index hint. + if p.Connection(ctx).Dialect.Name() == "cockroach" { + tableName += "@hydra_oauth2_trusted_jwt_bearer_issuer_nid_uq_idx" + } + + sql := fmt.Sprintf(`SELECT key_set FROM %s WHERE key_id = ? AND nid = ? AND issuer = ? AND (subject = ? OR allow_any_subject IS TRUE) LIMIT 1`, tableName) + query := p.Connection(ctx).RawQuery(sql, + keyId, p.NetworkID(ctx), issuer, subject, + ) + var keySetID string + if err := query.First(&keySetID); err != nil { return nil, sqlcon.HandleError(err) } - keySet, err := p.GetKey(ctx, data.KeySet, keyId) + // TODO: Consider merging this query with the one above using a `JOIN`. + keySet, err := p.d.KeyManager().GetKey(ctx, keySetID, keyId) if err != nil { return nil, err } @@ -128,15 +193,25 @@ func (p *Persister) GetPublicKey(ctx context.Context, issuer string, subject str return &keySet.Keys[0], nil } -func (p *Persister) GetPublicKeys(ctx context.Context, issuer string, subject string) (*jose.JSONWebKeySet, error) { +// GetPublicKeys implements RFC7523KeyStorage +func (p *Persister) GetPublicKeys(ctx context.Context, issuer string, subject string) (_ *jose.JSONWebKeySet, err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetPublicKeys") - defer span.End() + defer otelx.End(span, &err) + + q := p.QueryWithNetwork(ctx) + expiresAt := "expires_at > NOW()" + if q.Connection.Dialect.Name() == "sqlite3" { + expiresAt = "expires_at > datetime('now')" + } - grantsData := make([]trust.SQLData, 0) - query := p.QueryWithNetwork(ctx). + grantsData := make([]SQLGrant, 0) + query := q. + Select("key_id"). + Where(expiresAt). Where("issuer = ?", issuer). - Where("subject = ? OR allow_any_subject IS TRUE", subject). - Where("nid = ?", p.NetworkID(ctx)) + Where("(subject = ? OR allow_any_subject IS TRUE)", subject). + Order("created_at DESC"). + Limit(100) // Load maximum of 100 keys if err := query.All(&grantsData); err != nil { return nil, sqlcon.HandleError(err) @@ -146,46 +221,69 @@ func (p *Persister) GetPublicKeys(ctx context.Context, issuer string, subject st return &jose.JSONWebKeySet{}, nil } - // because keys must be grouped by issuer, we can retrieve set name from first grant - keySet, err := p.GetKeySet(ctx, grantsData[0].KeySet) - if err != nil { - return nil, err + keyIDs := make([]interface{}, len(grantsData)) + for k, d := range grantsData { + keyIDs[k] = d.KeyID } - // find keys, that belong to grants - filteredKeySet := &jose.JSONWebKeySet{} - for _, data := range grantsData { - if keys := keySet.Key(data.KeyID); len(keys) > 0 { - filteredKeySet.Keys = append(filteredKeySet.Keys, keys...) - } + var js jwk.SQLDataRows + if err := p.QueryWithNetwork(ctx). + // key_set and issuer are set to the same value on creation: + // + // grant := Grant{ + // ID: uuid.New().String(), + // Issuer: grantRequest.Issuer, + // Subject: grantRequest.Subject, + // AllowAnySubject: grantRequest.AllowAnySubject, + // Scope: grantRequest.Scope, + // PublicKey: PublicKey{ + // Set: grantRequest.Issuer, // group all keys by issuer, so set=issuer + // KeyID: grantRequest.PublicKeyJWK.KeyID, + // }, + // CreatedAt: time.Now().UTC().Round(time.Second), + // ExpiresAt: grantRequest.ExpiresAt.UTC().Round(time.Second), + // } + // + // Therefore it is fine if we only look for the issuer here instead of the key set id. + Where("sid = ?", issuer). + Where("kid IN (?)", keyIDs). + Order("created_at DESC"). + All(&js); err != nil { + return nil, sqlcon.HandleError(err) } - return filteredKeySet, nil + return js.ToJWK(ctx, p.r.KeyCipher()) } -func (p *Persister) GetPublicKeyScopes(ctx context.Context, issuer string, subject string, keyId string) ([]string, error) { +// GetPublicKeyScopes implements RFC7523KeyStorage +func (p *Persister) GetPublicKeyScopes(ctx context.Context, issuer string, subject string, keyId string) (_ []string, err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetPublicKeyScopes") - defer span.End() + defer otelx.End(span, &err) - var data trust.SQLData - query := p.QueryWithNetwork(ctx). - Where("issuer = ?", issuer). - Where("subject = ? OR allow_any_subject IS TRUE", subject). - Where("key_id = ?", keyId). - Where("nid = ?", p.NetworkID(ctx)) + tableName := SQLGrant{}.TableName() + // Index hint. + if p.Connection(ctx).Dialect.Name() == "cockroach" { + tableName += "@hydra_oauth2_trusted_jwt_bearer_issuer_nid_uq_idx" + } - if err := query.First(&data); err != nil { + sql := fmt.Sprintf(`SELECT scope FROM %s WHERE key_id = ? AND nid = ? AND issuer = ? AND (subject = ? OR allow_any_subject IS TRUE) LIMIT 1`, tableName) + query := p.Connection(ctx).RawQuery(sql, + keyId, p.NetworkID(ctx), issuer, subject, + ) + var scopes sqlxx.StringSlicePipeDelimiter + if err := query.First(&scopes); err != nil { return nil, sqlcon.HandleError(err) } - return p.jwtGrantFromSQlData(data).Scope, nil + return scopes, nil } -func (p *Persister) IsJWTUsed(ctx context.Context, jti string) (bool, error) { +// IsJWTUsed implements RFC7523KeyStorage +func (p *Persister) IsJWTUsed(ctx context.Context, jti string) (ok bool, err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.IsJWTUsed") - defer span.End() + defer otelx.End(span, &err) - err := p.ClientAssertionJWTValid(ctx, jti) + err = p.ClientAssertionJWTValid(ctx, jti) if err != nil { return true, nil } @@ -193,50 +291,10 @@ func (p *Persister) IsJWTUsed(ctx context.Context, jti string) (bool, error) { return false, nil } -func (p *Persister) MarkJWTUsedForTime(ctx context.Context, jti string, exp time.Time) error { +// MarkJWTUsedForTime implements RFC7523KeyStorage +func (p *Persister) MarkJWTUsedForTime(ctx context.Context, jti string, exp time.Time) (err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.MarkJWTUsedForTime") - defer span.End() + defer otelx.End(span, &err) return p.SetClientAssertionJWT(ctx, jti, exp) } - -func (p *Persister) sqlDataFromJWTGrant(g trust.Grant) trust.SQLData { - return trust.SQLData{ - ID: g.ID, - Issuer: g.Issuer, - Subject: g.Subject, - AllowAnySubject: g.AllowAnySubject, - Scope: strings.Join(g.Scope, "|"), - KeySet: g.PublicKey.Set, - KeyID: g.PublicKey.KeyID, - CreatedAt: g.CreatedAt, - ExpiresAt: g.ExpiresAt, - } -} - -func (p *Persister) jwtGrantFromSQlData(data trust.SQLData) trust.Grant { - return trust.Grant{ - ID: data.ID, - Issuer: data.Issuer, - Subject: data.Subject, - AllowAnySubject: data.AllowAnySubject, - Scope: stringsx.Splitx(data.Scope, "|"), - PublicKey: trust.PublicKey{ - Set: data.KeySet, - KeyID: data.KeyID, - }, - CreatedAt: data.CreatedAt, - ExpiresAt: data.ExpiresAt, - } -} - -func (p *Persister) FlushInactiveGrants(ctx context.Context, notAfter time.Time, limit int, batchSize int) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.FlushInactiveGrants") - defer span.End() - - deleteUntil := time.Now().UTC() - if deleteUntil.After(notAfter) { - deleteUntil = notAfter - } - return sqlcon.HandleError(p.QueryWithNetwork(ctx).Where("expires_at < ?", deleteUntil).Delete(&trust.SQLData{})) -} diff --git a/persistence/sql/persister_jwk.go b/persistence/sql/persister_jwk.go index ad4e8dc3bde..7a679f0f175 100644 --- a/persistence/sql/persister_jwk.go +++ b/persistence/sql/persister_jwk.go @@ -7,25 +7,42 @@ import ( "context" "encoding/json" - "github.com/gobuffalo/pop/v6" - "gopkg.in/square/go-jose.v2" - - "github.com/ory/x/errorsx" - + "github.com/go-jose/go-jose/v3" + "github.com/gofrs/uuid" "github.com/pkg/errors" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" - "github.com/ory/hydra/jwk" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/aead" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/pop/v6" + "github.com/ory/x/otelx" "github.com/ory/x/sqlcon" ) -var _ jwk.Manager = &Persister{} +var _ jwk.Manager = (*JWKPersister)(nil) + +type JWKPersister struct { + D interface { + BasePersisterProvider + baseDependencies + } +} -func (p *Persister) GenerateAndPersistKeySet(ctx context.Context, set, kid, alg, use string) (*jose.JSONWebKeySet, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GenerateAndPersistKey") - defer span.End() +// GenerateAndPersistKeySet implements jwk.Manager. +func (p *JWKPersister) GenerateAndPersistKeySet(ctx context.Context, set, kid, alg, use string) (_ *jose.JSONWebKeySet, err error) { + ctx, span := p.D.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GenerateAndPersistKeySet", + trace.WithAttributes( + attribute.String("set", set), + attribute.String("kid", kid), + attribute.String("alg", alg))) + defer otelx.End(span, &err) + + if kid == "" { + kid = uuid.Must(uuid.NewV4()).String() + } - keys, err := jwk.GenerateJWK(ctx, jose.SignatureAlgorithm(alg), kid, use) + keys, err := jwk.GenerateJWK(jose.SignatureAlgorithm(alg), kid, use) if err != nil { return nil, errors.Wrapf(jwk.ErrUnsupportedKeyAlgorithm, "%s", err) } @@ -38,21 +55,25 @@ func (p *Persister) GenerateAndPersistKeySet(ctx context.Context, set, kid, alg, return keys, nil } -func (p *Persister) AddKey(ctx context.Context, set string, key *jose.JSONWebKey) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.AddKey") - defer span.End() +// AddKey implements jwk.Manager. +func (p *JWKPersister) AddKey(ctx context.Context, set string, key *jose.JSONWebKey) (err error) { + ctx, span := p.D.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.AddKey", + trace.WithAttributes( + attribute.String("set", set), + attribute.String("kid", key.KeyID))) + defer otelx.End(span, &err) out, err := json.Marshal(key) if err != nil { - return errorsx.WithStack(err) + return errors.WithStack(err) } - encrypted, err := p.r.KeyCipher().Encrypt(ctx, out) + encrypted, err := aead.NewAESGCM(p.D.Config()).Encrypt(ctx, out, nil) if err != nil { - return errorsx.WithStack(err) + return errors.WithStack(err) } - return sqlcon.HandleError(p.CreateWithNetwork(ctx, &jwk.SQLData{ + return sqlcon.HandleError(p.D.BasePersister().CreateWithNetwork(ctx, &jwk.SQLData{ Set: set, KID: key.KeyID, Version: 0, @@ -60,23 +81,24 @@ func (p *Persister) AddKey(ctx context.Context, set string, key *jose.JSONWebKey })) } -func (p *Persister) AddKeySet(ctx context.Context, set string, keys *jose.JSONWebKeySet) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.AddKey") - defer span.End() +// AddKeySet implements jwk.Manager. +func (p *JWKPersister) AddKeySet(ctx context.Context, set string, keys *jose.JSONWebKeySet) (err error) { + ctx, span := p.D.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.AddKeySet", trace.WithAttributes(attribute.String("set", set))) + defer otelx.End(span, &err) - return p.transaction(ctx, func(ctx context.Context, c *pop.Connection) error { + return p.D.BasePersister().Transaction(ctx, func(ctx context.Context, c *pop.Connection) error { for _, key := range keys.Keys { out, err := json.Marshal(key) if err != nil { - return errorsx.WithStack(err) + return errors.WithStack(err) } - encrypted, err := p.r.KeyCipher().Encrypt(ctx, out) + encrypted, err := aead.NewAESGCM(p.D.Config()).Encrypt(ctx, out, nil) if err != nil { return err } - if err := p.CreateWithNetwork(ctx, &jwk.SQLData{ + if err := p.D.BasePersister().CreateWithNetwork(ctx, &jwk.SQLData{ Set: set, KID: key.KeyID, Version: 0, @@ -89,12 +111,15 @@ func (p *Persister) AddKeySet(ctx context.Context, set string, keys *jose.JSONWe }) } -// UpdateKey updates or creates the key. -func (p *Persister) UpdateKey(ctx context.Context, set string, key *jose.JSONWebKey) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.UpdateKey") - defer span.End() +// UpdateKey updates or creates the key. Implements jwk.Manager. +func (p *JWKPersister) UpdateKey(ctx context.Context, set string, key *jose.JSONWebKey) (err error) { + ctx, span := p.D.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.UpdateKey", + trace.WithAttributes( + attribute.String("set", set), + attribute.String("kid", key.KeyID))) + defer otelx.End(span, &err) - return p.transaction(ctx, func(ctx context.Context, c *pop.Connection) error { + return p.D.BasePersister().Transaction(ctx, func(ctx context.Context, c *pop.Connection) error { if err := p.DeleteKey(ctx, set, key.KeyID); err != nil { return err } @@ -105,12 +130,12 @@ func (p *Persister) UpdateKey(ctx context.Context, set string, key *jose.JSONWeb }) } -// UpdateKeySet updates or creates the key set. -func (p *Persister) UpdateKeySet(ctx context.Context, set string, keySet *jose.JSONWebKeySet) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.UpdateKeySet") - defer span.End() +// UpdateKeySet updates or creates the key set. Implements jwk.Manager. +func (p *JWKPersister) UpdateKeySet(ctx context.Context, set string, keySet *jose.JSONWebKeySet) (err error) { + ctx, span := p.D.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.UpdateKeySet", trace.WithAttributes(attribute.String("set", set))) + defer otelx.End(span, &err) - return p.transaction(ctx, func(ctx context.Context, c *pop.Connection) error { + return p.D.BasePersister().Transaction(ctx, func(ctx context.Context, c *pop.Connection) error { if err := p.DeleteKeySet(ctx, set); err != nil { return err } @@ -121,26 +146,30 @@ func (p *Persister) UpdateKeySet(ctx context.Context, set string, keySet *jose.J }) } -func (p *Persister) GetKey(ctx context.Context, set, kid string) (*jose.JSONWebKeySet, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetKey") - defer span.End() +// GetKey implements jwk.Manager. +func (p *JWKPersister) GetKey(ctx context.Context, set, kid string) (_ *jose.JSONWebKeySet, err error) { + ctx, span := p.D.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetKey", + trace.WithAttributes( + attribute.String("set", set), + attribute.String("kid", kid))) + defer otelx.End(span, &err) var j jwk.SQLData - if err := p.QueryWithNetwork(ctx). + if err := p.D.BasePersister().QueryWithNetwork(ctx). Where("sid = ? AND kid = ?", set, kid). Order("created_at DESC"). First(&j); err != nil { return nil, sqlcon.HandleError(err) } - key, err := p.r.KeyCipher().Decrypt(ctx, j.Key) + key, err := aead.NewAESGCM(p.D.Config()).Decrypt(ctx, j.Key, nil) if err != nil { - return nil, errorsx.WithStack(err) + return nil, errors.WithStack(err) } var c jose.JSONWebKey if err := json.Unmarshal(key, &c); err != nil { - return nil, errorsx.WithStack(err) + return nil, errors.WithStack(err) } return &jose.JSONWebKeySet{ @@ -148,55 +177,39 @@ func (p *Persister) GetKey(ctx context.Context, set, kid string) (*jose.JSONWebK }, nil } -func (p *Persister) GetKeySet(ctx context.Context, set string) (*jose.JSONWebKeySet, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetKeySet") - defer span.End() +// GetKeySet implements jwk.Manager. +func (p *JWKPersister) GetKeySet(ctx context.Context, set string) (keys *jose.JSONWebKeySet, err error) { + ctx, span := p.D.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetKeySet", trace.WithAttributes(attribute.String("set", set))) + defer otelx.End(span, &err) - var js []jwk.SQLData - if err := p.QueryWithNetwork(ctx). + var js jwk.SQLDataRows + if err := p.D.BasePersister().QueryWithNetwork(ctx). Where("sid = ?", set). Order("created_at DESC"). All(&js); err != nil { return nil, sqlcon.HandleError(err) } - if len(js) == 0 { - return nil, errors.Wrap(x.ErrNotFound, "") - } - - keys := &jose.JSONWebKeySet{Keys: []jose.JSONWebKey{}} - for _, d := range js { - key, err := p.r.KeyCipher().Decrypt(ctx, d.Key) - if err != nil { - return nil, errorsx.WithStack(err) - } - - var c jose.JSONWebKey - if err := json.Unmarshal(key, &c); err != nil { - return nil, errorsx.WithStack(err) - } - keys.Keys = append(keys.Keys, c) - } - - if len(keys.Keys) == 0 { - return nil, errorsx.WithStack(x.ErrNotFound) - } - - return keys, nil + return js.ToJWK(ctx, aead.NewAESGCM(p.D.Config())) } -func (p *Persister) DeleteKey(ctx context.Context, set, kid string) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.DeleteKey") - defer span.End() +// DeleteKey implements jwk.Manager. +func (p *JWKPersister) DeleteKey(ctx context.Context, set, kid string) (err error) { + ctx, span := p.D.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.DeleteKey", + trace.WithAttributes( + attribute.String("set", set), + attribute.String("kid", kid))) + defer otelx.End(span, &err) - err := p.QueryWithNetwork(ctx).Where("sid=? AND kid=?", set, kid).Delete(&jwk.SQLData{}) + err = p.D.BasePersister().QueryWithNetwork(ctx).Where("sid=? AND kid=?", set, kid).Delete(&jwk.SQLData{}) return sqlcon.HandleError(err) } -func (p *Persister) DeleteKeySet(ctx context.Context, set string) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.DeleteKeySet") - defer span.End() +// DeleteKeySet implements jwk.Manager. +func (p *JWKPersister) DeleteKeySet(ctx context.Context, set string) (err error) { + ctx, span := p.D.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.DeleteKeySet", trace.WithAttributes(attribute.String("set", set))) + defer otelx.End(span, &err) - err := p.QueryWithNetwork(ctx).Where("sid=?", set).Delete(&jwk.SQLData{}) + err = p.D.BasePersister().QueryWithNetwork(ctx).Where("sid=?", set).Delete(&jwk.SQLData{}) return sqlcon.HandleError(err) } diff --git a/persistence/sql/persister_migration.go b/persistence/sql/persister_migration.go index f82605c90b4..7fed0cf2a63 100644 --- a/persistence/sql/persister_migration.go +++ b/persistence/sql/persister_migration.go @@ -7,60 +7,111 @@ import ( "context" "embed" "fmt" + "io/fs" "strconv" "strings" - "time" - "github.com/gobuffalo/pop/v6" + "github.com/pkg/errors" + "github.com/sirupsen/logrus/hooks/test" + "github.com/ory/hydra/v2/x" + "github.com/ory/pop/v6" + "github.com/ory/x/fsx" + "github.com/ory/x/logrusx" "github.com/ory/x/popx" - - "github.com/ory/x/errorsx" - "github.com/ory/x/sqlcon" ) //go:embed migrations/*.sql -var migrations embed.FS +var Migrations embed.FS + +var SilenceMigrations = false + +type ( + MigrationManager struct { + d migrationDependencies + conn *pop.Connection + extraMigrations []fs.FS + goMigrations []popx.Migration + + // cached values + mb *popx.MigrationBox + mbs popx.MigrationStatuses + } + migrationDependencies interface { + x.RegistryLogger + } +) + +func NewMigrationManager(c *pop.Connection, d migrationDependencies, extraMigrations []fs.FS, goMigrations []popx.Migration) *MigrationManager { + return &MigrationManager{ + d: d, + conn: c, + extraMigrations: extraMigrations, + goMigrations: goMigrations, + } +} + +func (m *MigrationManager) migrationBox() (_ *popx.MigrationBox, err error) { + if m.mb == nil { + logger := m.d.Logger() + if SilenceMigrations { + inner, _ := test.NewNullLogger() + logger = logrusx.New("hydra", "", logrusx.UseLogger(inner)) + } + m.mb, err = popx.NewMigrationBox( + fsx.Merge(append([]fs.FS{Migrations}, m.extraMigrations...)...), + m.conn, logger, + popx.WithGoMigrations(m.goMigrations)) + if err != nil { + return nil, err + } + } + return m.mb, nil +} -func (p *Persister) MigrationStatus(ctx context.Context) (popx.MigrationStatuses, error) { - if p.mbs != nil { - return p.mbs, nil +func (m *MigrationManager) MigrationStatus(ctx context.Context) (popx.MigrationStatuses, error) { + if m.mbs != nil { + return m.mbs, nil } - status, err := p.mb.Status(ctx) + mb, err := m.migrationBox() + if err != nil { + return nil, err + } + status, err := mb.Status(ctx) if err != nil { return nil, err } if !status.HasPending() { - p.mbs = status + m.mbs = status } return status, nil } -func (p *Persister) MigrateDown(ctx context.Context, steps int) error { - return errorsx.WithStack(p.mb.Down(ctx, steps)) -} - -func (p *Persister) MigrateUp(ctx context.Context) error { - if err := p.migrateOldMigrationTables(); err != nil { +func (m *MigrationManager) MigrateDown(ctx context.Context, steps int) error { + mb, err := m.migrationBox() + if err != nil { return err } - return errorsx.WithStack(p.mb.Up(ctx)) + return mb.Down(ctx, steps) } -func (p *Persister) MigrateUpTo(ctx context.Context, steps int) (int, error) { - if err := p.migrateOldMigrationTables(); err != nil { - return 0, err +func (m *MigrationManager) MigrateUp(ctx context.Context) error { + if err := m.migrateOldMigrationTables(); err != nil { + return err } - n, err := p.mb.UpTo(ctx, steps) - return n, errorsx.WithStack(err) + mb, err := m.migrationBox() + if err != nil { + return err + } + return mb.Up(ctx) } -func (p *Persister) PrepareMigration(_ context.Context) error { - return p.migrateOldMigrationTables() +func (m *MigrationManager) PrepareMigration(_ context.Context) error { + return m.migrateOldMigrationTables() } type oldTableName string @@ -72,21 +123,15 @@ const ( oauth2MigrationTableName oldTableName = "hydra_oauth2_migration" ) -// this type is copied from sql-migrate to remove the dependency -type OldMigrationRecord struct { - ID string `db:"id"` - AppliedAt time.Time `db:"applied_at"` -} - // this function is idempotent -func (p *Persister) migrateOldMigrationTables() error { - if err := p.conn.RawQuery(fmt.Sprintf("SELECT * FROM %s", clientMigrationTableName)).Exec(); err != nil { +func (m *MigrationManager) migrateOldMigrationTables() error { + if err := m.conn.RawQuery(fmt.Sprintf("SELECT * FROM %s", clientMigrationTableName)).Exec(); err != nil { // assume there are no old migration tables => done return nil } - if err := pop.CreateSchemaMigrations(p.conn); err != nil { - return errorsx.WithStack(err) + if err := pop.CreateSchemaMigrations(m.conn); err != nil { + return errors.WithStack(err) } // in this order the migrations only depend on already done ones @@ -98,10 +143,10 @@ func (p *Persister) migrateOldMigrationTables() error { // https://github.com/jackc/pgx/issues/110 // https://github.com/flynn/flynn/issues/2235 // get old migrations - var migrations []OldMigrationRecord + var migrations []string /* #nosec G201 table is static */ - if err := p.conn.RawQuery(fmt.Sprintf("SELECT * FROM %s", table)).All(&migrations); err != nil { + if err := m.conn.RawQuery(fmt.Sprintf("SELECT id FROM %s", table)).All(&migrations); err != nil { if strings.Contains(err.Error(), string(table)) { continue } @@ -109,24 +154,24 @@ func (p *Persister) migrateOldMigrationTables() error { } // translate migrations - for _, m := range migrations { + for _, migration := range migrations { // mark the migration as run for fizz // fizz standard version pattern: YYYYMMDDhhmmss - migrationNumber, err := strconv.ParseInt(m.ID, 10, 0) + migrationNumber, err := strconv.ParseInt(migration, 10, 0) if err != nil { - return errorsx.WithStack(err) + return errors.WithStack(err) } /* #nosec G201 - i is static (0..3) and migrationNumber is from the database */ - if err := p.conn.RawQuery( + if err := m.conn.RawQuery( fmt.Sprintf("INSERT INTO schema_migration (version) VALUES ('2019%02d%08d')", i+1, migrationNumber)). Exec(); err != nil { - return errorsx.WithStack(err) + return errors.WithStack(err) } } // delete old migration table - if err := p.conn.RawQuery(fmt.Sprintf("DROP TABLE %s", table)).Exec(); err != nil { + if err := m.conn.RawQuery(fmt.Sprintf("DROP TABLE %s", table)).Exec(); err != nil { return sqlcon.HandleError(err) } } diff --git a/persistence/sql/persister_nid_test.go b/persistence/sql/persister_nid_test.go index ae4410a4af4..5a99cd00b78 100644 --- a/persistence/sql/persister_nid_test.go +++ b/persistence/sql/persister_nid_test.go @@ -6,55 +6,57 @@ package sql_test import ( "context" "database/sql" + "encoding/json" "testing" "time" - "github.com/ory/x/uuidx" - - "github.com/ory/x/assertx" - + "github.com/go-jose/go-jose/v3" "github.com/gofrs/uuid" - "github.com/instana/testify/require" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - "gopkg.in/square/go-jose.v2" - - "github.com/ory/fosite" - "github.com/ory/hydra/client" - "github.com/ory/hydra/consent" - "github.com/ory/hydra/driver" - "github.com/ory/hydra/flow" - "github.com/ory/hydra/internal" - "github.com/ory/hydra/jwk" - "github.com/ory/hydra/oauth2" - "github.com/ory/hydra/oauth2/trust" - persistencesql "github.com/ory/hydra/persistence/sql" - "github.com/ory/hydra/x" + + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/consent" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/flow" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/oauth2/trust" + "github.com/ory/hydra/v2/persistence" + persistencesql "github.com/ory/hydra/v2/persistence/sql" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/assertx" "github.com/ory/x/contextx" - "github.com/ory/x/dbal" "github.com/ory/x/networkx" + "github.com/ory/x/pointerx" + "github.com/ory/x/servicelocatorx" + "github.com/ory/x/sqlcon" "github.com/ory/x/sqlxx" + "github.com/ory/x/uuidx" ) type PersisterTestSuite struct { suite.Suite - registries map[string]driver.Registry - clean func(*testing.T) + registries map[string]*driver.RegistrySQL t1 context.Context t2 context.Context t1NID uuid.UUID t2NID uuid.UUID } -var _ PersisterTestSuite = PersisterTestSuite{} +var _ interface { + suite.SetupAllSuite + suite.TearDownTestSuite +} = (*PersisterTestSuite)(nil) func (s *PersisterTestSuite) SetupSuite() { - s.registries = map[string]driver.Registry{ - "memory": internal.NewRegistrySQLFromURL(s.T(), dbal.NewSQLiteTestDatabase(s.T()), true, &contextx.Default{}), - } + withCtxer := driver.WithServiceLocatorOptions(servicelocatorx.WithContextualizer(&contextx.TestContextualizer{})) - if !testing.Short() { - s.registries["postgres"], s.registries["mysql"], s.registries["cockroach"], s.clean = internal.ConnectDatabases(s.T(), true, &contextx.Default{}) - } + s.registries = testhelpers.ConnectDatabases(s.T(), true, withCtxer) s.t1NID, s.t2NID = uuid.Must(uuid.NewV4()), uuid.Must(uuid.NewV4()) s.t1 = contextx.SetNIDContext(context.Background(), s.t1NID) @@ -63,34 +65,31 @@ func (s *PersisterTestSuite) SetupSuite() { for _, r := range s.registries { require.NoError(s.T(), r.Persister().Connection(context.Background()).Create(&networkx.Network{ID: s.t1NID})) require.NoError(s.T(), r.Persister().Connection(context.Background()).Create(&networkx.Network{ID: s.t2NID})) - r.WithContextualizer(&contextx.TestContextualizer{}) } } func (s *PersisterTestSuite) TearDownTest() { for _, r := range s.registries { - r.WithContextualizer(&contextx.TestContextualizer{}) x.DeleteHydraRows(s.T(), r.Persister().Connection(context.Background())) } } func (s *PersisterTestSuite) TestAcceptLogoutRequest() { - t := s.T() lr := newLogoutRequest() for k, r := range s.registries { - t.Run("dialect="+k, func(*testing.T) { - require.NoError(t, r.ConsentManager().CreateLogoutRequest(s.t1, lr)) + s.T().Run("dialect="+k, func(t *testing.T) { + require.NoError(t, r.LogoutManager().CreateLogoutRequest(s.t1, lr)) - expected, err := r.ConsentManager().GetLogoutRequest(s.t1, lr.ID) + expected, err := r.LogoutManager().GetLogoutRequest(s.t1, lr.ID) require.NoError(t, err) require.Equal(t, false, expected.Accepted) - lrAccepted, err := r.ConsentManager().AcceptLogoutRequest(s.t2, lr.ID) + lrAccepted, err := r.LogoutManager().AcceptLogoutRequest(s.t2, lr.ID) require.Error(t, err) - require.Equal(t, &consent.LogoutRequest{}, lrAccepted) + require.Equal(t, &flow.LogoutRequest{}, lrAccepted) - actual, err := r.ConsentManager().GetLogoutRequest(s.t1, lr.ID) + actual, err := r.LogoutManager().GetLogoutRequest(s.t1, lr.ID) require.NoError(t, err) require.Equal(t, expected, actual) }) @@ -98,65 +97,62 @@ func (s *PersisterTestSuite) TestAcceptLogoutRequest() { } func (s *PersisterTestSuite) TestAddKeyGetKeyDeleteKey() { - t := s.T() key := newKey("test-ks", "test") for k, r := range s.registries { - t.Run("dialect="+k, func(*testing.T) { + s.T().Run("dialect="+k, func(t *testing.T) { ks := "key-set" - require.NoError(t, r.Persister().AddKey(s.t1, ks, &key)) - actual, err := r.Persister().GetKey(s.t2, ks, key.KeyID) + require.NoError(t, r.KeyManager().AddKey(s.t1, ks, &key)) + actual, err := r.KeyManager().GetKey(s.t2, ks, key.KeyID) require.Error(t, err) require.Equal(t, (*jose.JSONWebKeySet)(nil), actual) - actual, err = r.Persister().GetKey(s.t1, ks, key.KeyID) + actual, err = r.KeyManager().GetKey(s.t1, ks, key.KeyID) require.NoError(t, err) assertx.EqualAsJSON(t, &jose.JSONWebKeySet{Keys: []jose.JSONWebKey{key}}, actual) - r.Persister().DeleteKey(s.t2, ks, key.KeyID) - _, err = r.Persister().GetKey(s.t1, ks, key.KeyID) + require.NoError(t, r.KeyManager().DeleteKey(s.t2, ks, key.KeyID)) + _, err = r.KeyManager().GetKey(s.t1, ks, key.KeyID) require.NoError(t, err) - r.Persister().DeleteKey(s.t1, ks, key.KeyID) - _, err = r.Persister().GetKey(s.t1, ks, key.KeyID) + require.NoError(t, r.KeyManager().DeleteKey(s.t1, ks, key.KeyID)) + _, err = r.KeyManager().GetKey(s.t1, ks, key.KeyID) require.Error(t, err) }) } } func (s *PersisterTestSuite) TestAddKeySetGetKeySetDeleteKeySet() { - t := s.T() ks := newKeySet("test-ks", "test") for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { ksID := "key-set" - r.Persister().AddKeySet(s.t1, ksID, ks) - actual, err := r.Persister().GetKeySet(s.t2, ksID) + require.NoError(t, r.KeyManager().AddKeySet(s.t1, ksID, ks)) + actual, err := r.KeyManager().GetKeySet(s.t2, ksID) require.Error(t, err) require.Equal(t, (*jose.JSONWebKeySet)(nil), actual) - actual, err = r.Persister().GetKeySet(s.t1, ksID) + actual, err = r.KeyManager().GetKeySet(s.t1, ksID) require.NoError(t, err) requireKeySetEqual(t, ks, actual) - r.Persister().DeleteKeySet(s.t2, ksID) - _, err = r.Persister().GetKeySet(s.t1, ksID) + require.NoError(t, r.KeyManager().DeleteKeySet(s.t2, ksID)) + _, err = r.KeyManager().GetKeySet(s.t1, ksID) require.NoError(t, err) - r.Persister().DeleteKeySet(s.t1, ksID) - _, err = r.Persister().GetKeySet(s.t1, ksID) + require.NoError(t, r.KeyManager().DeleteKeySet(s.t1, ksID)) + _, err = r.KeyManager().GetKeySet(s.t1, ksID) require.Error(t, err) }) } } func (s *PersisterTestSuite) TestAuthenticate() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id", Secret: "secret"} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id", Secret: "secret"} + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) - actual, err := r.Persister().Authenticate(s.t2, "client-id", []byte("secret")) + actual, err := r.Persister().AuthenticateClient(s.t2, "client-id", []byte("secret")) require.Error(t, err) require.Nil(t, actual) - actual, err = r.Persister().Authenticate(s.t1, "client-id", []byte("secret")) + actual, err = r.Persister().AuthenticateClient(s.t1, "client-id", []byte("secret")) require.NoError(t, err) require.NotNil(t, actual) }) @@ -164,9 +160,8 @@ func (s *PersisterTestSuite) TestAuthenticate() { } func (s *PersisterTestSuite) TestClientAssertionJWTValid() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { jti := oauth2.NewBlacklistedJTI(uuid.Must(uuid.NewV4()).String(), time.Now().Add(24*time.Hour)) require.NoError(t, r.Persister().SetClientAssertionJWT(s.t1, jti.JTI, jti.Expiry)) @@ -177,160 +172,78 @@ func (s *PersisterTestSuite) TestClientAssertionJWTValid() { } func (s *PersisterTestSuite) TestConfirmLoginSession() { - t := s.T() ls := newLoginSession() - for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - require.NoError(t, r.Persister().CreateLoginSession(s.t1, ls)) - expected := &consent.LoginSession{} - require.NoError(t, r.Persister().Connection(context.Background()).Find(expected, ls.ID)) + ls.AuthenticatedAt = sqlxx.NullTime(time.Now().UTC()) + ls.Remember = true + ls.NID = s.t1NID - require.NoError(t, r.Persister().ConfirmLoginSession(s.t2, expected.ID, time.Now(), expected.Subject, !expected.Remember)) - actual := &consent.LoginSession{} + for k, r := range s.registries { + s.T().Run(k, func(t *testing.T) { + // Expects the login session to be confirmed in the correct context. + require.NoError(t, r.Persister().ConfirmLoginSession(s.t1, ls)) + actual := &flow.LoginSession{} require.NoError(t, r.Persister().Connection(context.Background()).Find(actual, ls.ID)) - require.Equal(t, expected, actual) - require.NoError(t, r.Persister().ConfirmLoginSession(s.t1, expected.ID, time.Now(), expected.Subject, !expected.Remember)) - require.NoError(t, r.Persister().Connection(context.Background()).Find(actual, ls.ID)) - require.NotEqual(t, expected, actual) + require.True(t, time.Time(ls.AuthenticatedAt).UTC().Equal(time.Time(actual.AuthenticatedAt).UTC())) + require.True(t, time.Time(ls.ExpiresAt).UTC().Equal(time.Time(actual.ExpiresAt).UTC())) + + exp, _ := json.Marshal(ls) + act, _ := json.Marshal(actual) + require.JSONEq(t, string(exp), string(act)) + + // Can't find the login session in the wrong context. + require.ErrorIs(t, + r.Persister().ConfirmLoginSession(s.t2, ls), + x.ErrNotFound, + ) }) } } func (s *PersisterTestSuite) TestCreateSession() { - t := s.T() - ls := newLoginSession() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - require.NoError(t, r.Persister().CreateLoginSession(s.t1, ls)) - actual := &consent.LoginSession{} + s.T().Run(k, func(t *testing.T) { + ls := newLoginSession() + require.NoError(t, r.Persister().ConfirmLoginSession(s.t1, ls)) + actual := &flow.LoginSession{} require.NoError(t, r.Persister().Connection(context.Background()).Find(actual, ls.ID)) require.Equal(t, s.t1NID, actual.NID) - ls.NID = actual.NID require.Equal(t, ls, actual) }) } } -func (s *PersisterTestSuite) TestCountClients() { - t := s.T() - for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - count, err := r.Persister().CountClients(s.t1) - require.NoError(t, err) - require.Equal(t, 0, count) - - count, err = r.Persister().CountClients(s.t2) - require.NoError(t, err) - require.Equal(t, 0, count) - - require.NoError(t, r.Persister().CreateClient(s.t1, newClient())) - - count, err = r.Persister().CountClients(s.t1) - require.NoError(t, err) - require.Equal(t, 1, count) - - count, err = r.Persister().CountClients(s.t2) - require.NoError(t, err) - require.Equal(t, 0, count) - }) - } -} - -func (s *PersisterTestSuite) TestCountGrants() { - t := s.T() - for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - count, err := r.Persister().CountGrants(s.t1) - require.NoError(t, err) - require.Equal(t, 0, count) - - count, err = r.Persister().CountGrants(s.t2) - require.NoError(t, err) - require.Equal(t, 0, count) - - keySet := uuid.Must(uuid.NewV4()).String() - publicKey := newKey(keySet, "use") - grant := newGrant(keySet, publicKey.KeyID) - require.NoError(t, r.Persister().AddKey(s.t1, keySet, &publicKey)) - require.NoError(t, r.Persister().CreateGrant(s.t1, grant, publicKey)) - - count, err = r.Persister().CountGrants(s.t1) - require.NoError(t, err) - require.Equal(t, 1, count) - - count, err = r.Persister().CountGrants(s.t2) - require.NoError(t, err) - require.Equal(t, 0, count) - }) - } -} - -func (s *PersisterTestSuite) TestCountSubjectsGrantedConsentRequests() { - t := s.T() - for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - sub := uuid.Must(uuid.NewV4()).String() - count, err := r.Persister().CountSubjectsGrantedConsentRequests(s.t1, sub) - require.NoError(t, err) - require.Equal(t, 0, count) - - count, err = r.Persister().CountSubjectsGrantedConsentRequests(s.t2, sub) - require.NoError(t, err) - require.Equal(t, 0, count) - - sessionID := uuid.Must(uuid.NewV4()).String() - require.NoError(t, r.Persister().CreateLoginSession(s.t1, &consent.LoginSession{ID: sessionID})) - client := &client.Client{LegacyClientID: "client-id"} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) - f := newFlow(s.t1NID, client.LegacyClientID, sub, sqlxx.NullString(sessionID)) - f.ConsentSkip = false - f.ConsentError = &consent.RequestDeniedError{} - f.State = flow.FlowStateConsentUnused - require.NoError(t, r.Persister().Connection(context.Background()).Create(f)) - - count, err = r.Persister().CountSubjectsGrantedConsentRequests(s.t1, sub) - require.NoError(t, err) - require.Equal(t, 1, count) - - count, err = r.Persister().CountSubjectsGrantedConsentRequests(s.t2, sub) - require.NoError(t, err) - require.Equal(t, 0, count) - }) - } -} - func (s *PersisterTestSuite) TestCreateAccessTokenSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - c1 := &client.Client{LegacyClientID: "client-id"} + s.T().Run(k, func(t *testing.T) { + c1 := &client.Client{ID: "client-id"} require.NoError(t, r.Persister().CreateClient(s.t1, c1)) - c2 := &client.Client{LegacyClientID: "client-id"} + c2 := &client.Client{ID: "client-id"} require.NoError(t, r.Persister().CreateClient(s.t2, c2)) sig := uuid.Must(uuid.NewV4()).String() fr := fosite.NewRequest() - fr.Client = &fosite.DefaultClient{ID: c1.LegacyClientID} + fr.Client = &fosite.DefaultClient{ID: c1.ID} + fr.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} require.NoError(t, r.Persister().CreateAccessTokenSession(s.t1, sig, fr)) actual := persistencesql.OAuth2RequestSQL{Table: "access"} - require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, sig)) + require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, x.SignatureHash(sig))) require.Equal(t, s.t1NID, actual.NID) }) } } func (s *PersisterTestSuite) TestCreateAuthorizeCodeSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - c1 := &client.Client{LegacyClientID: "client-id"} + s.T().Run(k, func(t *testing.T) { + c1 := &client.Client{ID: "client-id"} require.NoError(t, r.Persister().CreateClient(s.t1, c1)) - c2 := &client.Client{LegacyClientID: "client-id"} + c2 := &client.Client{ID: "client-id"} require.NoError(t, r.Persister().CreateClient(s.t2, c2)) sig := uuid.Must(uuid.NewV4()).String() fr := fosite.NewRequest() - fr.Client = &fosite.DefaultClient{ID: c1.LegacyClientID} + fr.Client = &fosite.DefaultClient{ID: c1.ID} + fr.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} require.NoError(t, r.Persister().CreateAuthorizeCodeSession(s.t1, sig, fr)) actual := persistencesql.OAuth2RequestSQL{Table: "code"} require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, sig)) @@ -340,10 +253,9 @@ func (s *PersisterTestSuite) TestCreateAuthorizeCodeSession() { } func (s *PersisterTestSuite) TestCreateClient() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - expected := &client.Client{LegacyClientID: "client-id"} + s.T().Run(k, func(t *testing.T) { + expected := &client.Client{ID: "client-id"} require.NoError(t, r.Persister().CreateClient(s.t1, expected)) actual := client.Client{} require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, expected.ID)) @@ -352,42 +264,14 @@ func (s *PersisterTestSuite) TestCreateClient() { } } -func (s *PersisterTestSuite) TestCreateConsentRequest() { - t := s.T() - for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - sessionID := uuid.Must(uuid.NewV4()).String() - client := &client.Client{LegacyClientID: "client-id"} - f := newFlow(s.t1NID, client.LegacyClientID, "sub", sqlxx.NullString(sessionID)) - require.NoError(t, r.Persister().CreateLoginSession(s.t1, &consent.LoginSession{ID: sessionID})) - require.NoError(t, r.Persister().CreateClient(s.t1, client)) - require.NoError(t, r.Persister().Connection(context.Background()).Create(f)) - - req := &consent.OAuth2ConsentRequest{ - ID: "consent-request-id", - LoginChallenge: sqlxx.NullString(f.ID), - Skip: false, - Verifier: "verifier", - CSRF: "csrf", - } - require.NoError(t, r.Persister().CreateConsentRequest(s.t1, req)) - - actual := flow.Flow{} - require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, f.ID)) - require.Equal(t, s.t1NID, actual.NID) - }) - } -} - func (s *PersisterTestSuite) TestCreateForcedObfuscatedLoginSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - session := &consent.ForcedObfuscatedLoginSession{ClientID: client.LegacyClientID} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} + session := &consent.ForcedObfuscatedLoginSession{ClientID: cl.ID} + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) require.NoError(t, r.Persister().CreateForcedObfuscatedLoginSession(s.t1, session)) - actual, err := r.Persister().GetForcedObfuscatedLoginSession(s.t1, client.LegacyClientID, "") + actual, err := r.Persister().GetForcedObfuscatedLoginSession(s.t1, cl.ID, "") require.NoError(t, err) require.Equal(t, s.t1NID, actual.NID) }) @@ -395,65 +279,33 @@ func (s *PersisterTestSuite) TestCreateForcedObfuscatedLoginSession() { } func (s *PersisterTestSuite) TestCreateGrant() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { ks := newKeySet("ks-id", "use") grant := trust.Grant{ - ID: uuid.Must(uuid.NewV4()).String(), + ID: uuid.Must(uuid.NewV4()), ExpiresAt: time.Now().Add(time.Hour), PublicKey: trust.PublicKey{Set: "ks-id", KeyID: ks.Keys[0].KeyID}, } - require.NoError(t, r.Persister().AddKeySet(s.t1, "ks-id", ks)) - require.NoError(t, r.Persister().CreateGrant(s.t1, grant, ks.Keys[0])) - actual := trust.SQLData{} + require.NoError(t, r.Persister().CreateGrant(s.t1, grant, ks.Keys[0].Public())) + actual := persistencesql.SQLGrant{} require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, grant.ID)) require.Equal(t, s.t1NID, actual.NID) }) } } -func (s *PersisterTestSuite) TestCreateLoginRequest() { - t := s.T() - for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - lr := consent.LoginRequest{ID: "lr-id", ClientID: client.LegacyClientID, RequestedAt: time.Now()} - - require.NoError(t, r.Persister().CreateClient(s.t1, client)) - require.NoError(t, r.ConsentManager().CreateLoginRequest(s.t1, &lr)) - f := flow.Flow{} - require.NoError(t, r.Persister().Connection(context.Background()).Find(&f, lr.ID)) - require.Equal(t, s.t1NID, f.NID) - }) - } -} - -func (s *PersisterTestSuite) TestCreateLoginSession() { - t := s.T() - for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - ls := consent.LoginSession{ID: uuid.Must(uuid.NewV4()).String(), Remember: true} - require.NoError(t, r.Persister().CreateLoginSession(s.t1, &ls)) - actual, err := r.Persister().GetRememberedLoginSession(s.t1, ls.ID) - require.NoError(t, err) - require.Equal(t, s.t1NID, actual.NID) - }) - } -} - func (s *PersisterTestSuite) TestCreateLogoutRequest() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - lr := consent.LogoutRequest{ + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} + lr := flow.LogoutRequest{ // TODO there is not FK for SessionID so we don't need it here; TODO make sure the missing FK is intentional ID: uuid.Must(uuid.NewV4()).String(), - ClientID: sql.NullString{Valid: true, String: client.LegacyClientID}, + ClientID: sql.NullString{Valid: true, String: cl.ID}, } - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) require.NoError(t, r.Persister().CreateLogoutRequest(s.t1, &lr)) actual, err := r.Persister().GetLogoutRequest(s.t1, lr.ID) require.NoError(t, err) @@ -463,14 +315,14 @@ func (s *PersisterTestSuite) TestCreateLogoutRequest() { } func (s *PersisterTestSuite) TestCreateOpenIDConnectSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) request := fosite.NewRequest() request.Client = &fosite.DefaultClient{ID: "client-id"} + request.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} authorizeCode := uuid.Must(uuid.NewV4()).String() require.NoError(t, r.Persister().CreateOpenIDConnectSession(s.t1, authorizeCode, request)) @@ -483,14 +335,14 @@ func (s *PersisterTestSuite) TestCreateOpenIDConnectSession() { } func (s *PersisterTestSuite) TestCreatePKCERequestSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) request := fosite.NewRequest() request.Client = &fosite.DefaultClient{ID: "client-id"} + request.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} authorizeCode := uuid.Must(uuid.NewV4()).String() @@ -504,19 +356,19 @@ func (s *PersisterTestSuite) TestCreatePKCERequestSession() { } func (s *PersisterTestSuite) TestCreateRefreshTokenSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) request := fosite.NewRequest() request.Client = &fosite.DefaultClient{ID: "client-id"} + request.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} authorizeCode := uuid.Must(uuid.NewV4()).String() - actual := persistencesql.OAuth2RequestSQL{Table: "refresh"} + actual := persistencesql.OAuth2RefreshTable{} require.Error(t, r.Persister().Connection(context.Background()).Find(&actual, authorizeCode)) - require.NoError(t, r.Persister().CreateRefreshTokenSession(s.t1, authorizeCode, request)) + require.NoError(t, r.Persister().CreateRefreshTokenSession(s.t1, authorizeCode, "", request)) require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, authorizeCode)) require.Equal(t, s.t1NID, actual.NID) }) @@ -524,96 +376,90 @@ func (s *PersisterTestSuite) TestCreateRefreshTokenSession() { } func (s *PersisterTestSuite) TestCreateWithNetwork() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - expected := &client.Client{LegacyClientID: "client-id"} + s.T().Run(k, func(t *testing.T) { + expected := &client.Client{ID: "client-id"} store, ok := r.OAuth2Storage().(*persistencesql.Persister) - if !ok { - t.Fatal("type assertion failed") - } - store.CreateWithNetwork(s.t1, expected) + require.True(t, ok) + require.NoError(t, store.CreateWithNetwork(s.t1, expected)) actual := &client.Client{} - require.NoError(t, r.Persister().Connection(context.Background()).Where("id = ?", expected.LegacyClientID).First(actual)) + require.NoError(t, r.Persister().Connection(context.Background()).Where("id = ?", expected.ID).First(actual)) require.Equal(t, s.t1NID, actual.NID) }) } } func (s *PersisterTestSuite) DeleteAccessTokenSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) sig := uuid.Must(uuid.NewV4()).String() fr := fosite.NewRequest() - fr.Client = &fosite.DefaultClient{ID: client.LegacyClientID} + fr.Client = &fosite.DefaultClient{ID: cl.ID} + fr.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} require.NoError(t, r.Persister().CreateAccessTokenSession(s.t1, sig, fr)) require.NoError(t, r.Persister().DeleteAccessTokenSession(s.t2, sig)) actual := persistencesql.OAuth2RequestSQL{Table: "access"} - require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, sig)) + require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, x.SignatureHash(sig))) require.Equal(t, s.t1NID, actual.NID) require.NoError(t, r.Persister().DeleteAccessTokenSession(s.t1, sig)) - require.Error(t, r.Persister().Connection(context.Background()).Find(&actual, sig)) + require.Error(t, r.Persister().Connection(context.Background()).Find(&actual, x.SignatureHash(sig))) }) } } func (s *PersisterTestSuite) TestDeleteAccessTokens() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) sig := uuid.Must(uuid.NewV4()).String() fr := fosite.NewRequest() - fr.Client = &fosite.DefaultClient{ID: client.LegacyClientID} + fr.Client = &fosite.DefaultClient{ID: cl.ID} + fr.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} require.NoError(t, r.Persister().CreateAccessTokenSession(s.t1, sig, fr)) - require.NoError(t, r.Persister().DeleteAccessTokens(s.t2, client.LegacyClientID)) + require.NoError(t, r.Persister().DeleteAccessTokens(s.t2, cl.ID)) actual := persistencesql.OAuth2RequestSQL{Table: "access"} - require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, sig)) + require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, x.SignatureHash(sig))) require.Equal(t, s.t1NID, actual.NID) - require.NoError(t, r.Persister().DeleteAccessTokens(s.t1, client.LegacyClientID)) - require.Error(t, r.Persister().Connection(context.Background()).Find(&actual, sig)) + require.NoError(t, r.Persister().DeleteAccessTokens(s.t1, cl.ID)) + require.Error(t, r.Persister().Connection(context.Background()).Find(&actual, x.SignatureHash(sig))) }) } } func (s *PersisterTestSuite) TestDeleteClient() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - c := &client.Client{LegacyClientID: "client-id"} + s.T().Run(k, func(t *testing.T) { + c := &client.Client{ID: "client-id"} require.NoError(t, r.Persister().CreateClient(s.t1, c)) actual := client.Client{} - require.Error(t, r.Persister().DeleteClient(s.t2, c.LegacyClientID)) + require.Error(t, r.Persister().DeleteClient(s.t2, c.ID)) require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, c.ID)) - require.NoError(t, r.Persister().DeleteClient(s.t1, c.LegacyClientID)) + require.NoError(t, r.Persister().DeleteClient(s.t1, c.ID)) require.Error(t, r.Persister().Connection(context.Background()).Find(&actual, c.ID)) }) } } func (s *PersisterTestSuite) TestDeleteGrant() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { ks := newKeySet("ks-id", "use") grant := trust.Grant{ - ID: uuid.Must(uuid.NewV4()).String(), + ID: uuid.Must(uuid.NewV4()), ExpiresAt: time.Now().Add(time.Hour), PublicKey: trust.PublicKey{Set: "ks-id", KeyID: ks.Keys[0].KeyID}, } - require.NoError(t, r.Persister().AddKeySet(s.t1, "ks-id", ks)) - require.NoError(t, r.Persister().CreateGrant(s.t1, grant, ks.Keys[0])) + require.NoError(t, r.Persister().CreateGrant(s.t1, grant, ks.Keys[0].Public())) - actual := trust.SQLData{} + actual := persistencesql.SQLGrant{} require.Error(t, r.Persister().DeleteGrant(s.t2, grant.ID)) require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, grant.ID)) require.NoError(t, r.Persister().DeleteGrant(s.t1, grant.ID)) @@ -623,32 +469,39 @@ func (s *PersisterTestSuite) TestDeleteGrant() { } func (s *PersisterTestSuite) TestDeleteLoginSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - ls := consent.LoginSession{ID: uuid.Must(uuid.NewV4()).String(), Remember: true} - require.NoError(t, r.Persister().CreateLoginSession(s.t1, &ls)) + s.T().Run(k, func(t *testing.T) { + ls := flow.LoginSession{ + ID: uuid.Must(uuid.NewV4()).String(), + Remember: true, + IdentityProviderSessionID: sqlxx.NullString(uuid.Must(uuid.NewV4()).String()), + } + require.NoError(t, r.Persister().ConfirmLoginSession(s.t1, &ls)) - require.Error(t, r.Persister().DeleteLoginSession(s.t2, ls.ID)) - _, err := r.Persister().GetRememberedLoginSession(s.t1, ls.ID) + deletedLS, err := r.Persister().DeleteLoginSession(s.t2, ls.ID) + require.ErrorIs(t, err, sqlcon.ErrNoRows) + assert.Nil(t, deletedLS) + _, err = r.Persister().GetRememberedLoginSession(s.t1, ls.ID) require.NoError(t, err) - require.NoError(t, r.Persister().DeleteLoginSession(s.t1, ls.ID)) + deletedLS, err = r.Persister().DeleteLoginSession(s.t1, ls.ID) + require.NoError(t, err) + assert.Equal(t, ls, *deletedLS) _, err = r.Persister().GetRememberedLoginSession(s.t1, ls.ID) - require.Error(t, err) + require.ErrorIs(t, err, x.ErrNotFound) }) } } func (s *PersisterTestSuite) TestDeleteOpenIDConnectSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) request := fosite.NewRequest() request.Client = &fosite.DefaultClient{ID: "client-id"} + request.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} authorizeCode := uuid.Must(uuid.NewV4()).String() require.NoError(t, r.Persister().CreateOpenIDConnectSession(s.t1, authorizeCode, request)) @@ -664,17 +517,17 @@ func (s *PersisterTestSuite) TestDeleteOpenIDConnectSession() { } func (s *PersisterTestSuite) TestDeletePKCERequestSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) request := fosite.NewRequest() request.Client = &fosite.DefaultClient{ID: "client-id"} + request.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} authorizeCode := uuid.Must(uuid.NewV4()).String() - r.Persister().CreatePKCERequestSession(s.t1, authorizeCode, request) + require.NoError(t, r.Persister().CreatePKCERequestSession(s.t1, authorizeCode, request)) actual := persistencesql.OAuth2RequestSQL{Table: "pkce"} @@ -687,19 +540,19 @@ func (s *PersisterTestSuite) TestDeletePKCERequestSession() { } func (s *PersisterTestSuite) TestDeleteRefreshTokenSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) request := fosite.NewRequest() request.Client = &fosite.DefaultClient{ID: "client-id"} + request.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} signature := uuid.Must(uuid.NewV4()).String() - require.NoError(t, r.Persister().CreateRefreshTokenSession(s.t1, signature, request)) + require.NoError(t, r.Persister().CreateRefreshTokenSession(s.t1, signature, "", request)) - actual := persistencesql.OAuth2RequestSQL{Table: "refresh"} + actual := persistencesql.OAuth2RefreshTable{} require.NoError(t, r.Persister().DeleteRefreshTokenSession(s.t2, signature)) require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, signature)) @@ -710,156 +563,126 @@ func (s *PersisterTestSuite) TestDeleteRefreshTokenSession() { } func (s *PersisterTestSuite) TestDetermineNetwork() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { store, ok := r.OAuth2Storage().(*persistencesql.Persister) - if !ok { - t.Fatal("type assertion failed") - } + require.True(t, ok) - r.Persister().Connection(context.Background()).Where("id <> ? AND id <> ?", s.t1NID, s.t2NID).Delete(&networkx.Network{}) + require.NoError(t, r.Persister().Connection(t.Context()).Where("id <> ? AND id <> ?", s.t1NID, s.t2NID).Delete(&networkx.Network{})) - actual, err := store.DetermineNetwork(context.Background()) + actual, err := store.DetermineNetwork(t.Context()) require.NoError(t, err) - require.True(t, actual.ID == s.t1NID || actual.ID == s.t2NID) + assert.Contains(t, []uuid.UUID{s.t1NID, s.t2NID}, actual.ID) }) } } func (s *PersisterTestSuite) TestFindGrantedAndRememberedConsentRequests() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { sessionID := uuid.Must(uuid.NewV4()).String() - client := &client.Client{LegacyClientID: "client-id"} - f := newFlow(s.t1NID, client.LegacyClientID, "sub", sqlxx.NullString(sessionID)) - require.NoError(t, r.Persister().CreateLoginSession(s.t1, &consent.LoginSession{ID: sessionID})) - require.NoError(t, r.Persister().CreateClient(s.t1, client)) - require.NoError(t, r.Persister().Connection(context.Background()).Create(f)) - - req := &consent.OAuth2ConsentRequest{ - ID: "consent-request-id", - LoginChallenge: sqlxx.NullString(f.ID), - Skip: false, - Verifier: "verifier", - CSRF: "csrf", + cl := &client.Client{ID: "client-id"} + f := newFlow(s.t1NID, cl.ID, "sub", sqlxx.NullString(sessionID)) + persistLoginSession(s.t1, t, r.Persister(), &flow.LoginSession{ID: sessionID}) + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) + + req := &flow.OAuth2ConsentRequest{ + ConsentRequestID: "consent-request-id", + LoginChallenge: sqlxx.NullString(f.ID), + Skip: false, } - hcr := &consent.AcceptOAuth2ConsentRequest{ - ID: req.ID, - HandledAt: sqlxx.NullTime(time.Now()), - Remember: true, - } - require.NoError(t, r.Persister().CreateConsentRequest(s.t1, req)) - _, err := r.Persister().HandleConsentRequest(s.t1, hcr) - require.NoError(t, err) + f.ConsentRequestID = sqlxx.NullString(req.ConsentRequestID) + require.NoError(t, f.HandleConsentRequest(&flow.AcceptOAuth2ConsentRequest{ + Remember: true, + })) - actual, err := r.Persister().FindGrantedAndRememberedConsentRequests(s.t2, client.LegacyClientID, f.Subject) - require.Error(t, err) - require.Equal(t, 0, len(actual)) + f.State = flow.FlowStateConsentUsed + require.NoError(t, r.ConsentManager().CreateConsentSession(s.t1, f)) + + actual, err := r.ConsentManager().FindGrantedAndRememberedConsentRequest(s.t2, cl.ID, f.Subject) + require.ErrorIs(t, err, consent.ErrNoPreviousConsentFound) + assert.Nil(t, actual) - actual, err = r.Persister().FindGrantedAndRememberedConsentRequests(s.t1, client.LegacyClientID, f.Subject) + actual, err = r.ConsentManager().FindGrantedAndRememberedConsentRequest(s.t1, cl.ID, f.Subject) require.NoError(t, err) - require.Equal(t, 1, len(actual)) + assert.EqualValues(t, req.ConsentRequestID, actual.ConsentRequestID) }) } } func (s *PersisterTestSuite) TestFindSubjectsGrantedConsentRequests() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { sessionID := uuid.Must(uuid.NewV4()).String() - client := &client.Client{LegacyClientID: "client-id"} - f := newFlow(s.t1NID, client.LegacyClientID, "sub", sqlxx.NullString(sessionID)) - require.NoError(t, r.Persister().CreateLoginSession(s.t1, &consent.LoginSession{ID: sessionID})) - require.NoError(t, r.Persister().CreateClient(s.t1, client)) - require.NoError(t, r.Persister().Connection(context.Background()).Create(f)) - - req := &consent.OAuth2ConsentRequest{ - ID: "consent-request-id", - LoginChallenge: sqlxx.NullString(f.ID), - Skip: false, - Verifier: "verifier", - CSRF: "csrf", - } - - hcr := &consent.AcceptOAuth2ConsentRequest{ - ID: req.ID, - HandledAt: sqlxx.NullTime(time.Now()), - Remember: true, - } - require.NoError(t, r.Persister().CreateConsentRequest(s.t1, req)) - _, err := r.Persister().HandleConsentRequest(s.t1, hcr) - require.NoError(t, err) + cl := &client.Client{ID: "client-id"} + f := newFlow(s.t1NID, cl.ID, "sub", sqlxx.NullString(sessionID)) + persistLoginSession(s.t1, t, r.Persister(), &flow.LoginSession{ID: sessionID}) + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) + require.NoError(t, r.ConsentManager().CreateConsentSession(s.t1, f)) - actual, err := r.Persister().FindSubjectsGrantedConsentRequests(s.t2, f.Subject, 100, 0) - require.Error(t, err) - require.Equal(t, 0, len(actual)) + _, _, err := r.ConsentManager().FindSubjectsGrantedConsentRequests(s.t2, f.Subject) + require.ErrorIs(t, err, consent.ErrNoPreviousConsentFound) - actual, err = r.Persister().FindSubjectsGrantedConsentRequests(s.t1, f.Subject, 100, 0) + actual, nextPage, err := r.ConsentManager().FindSubjectsGrantedConsentRequests(s.t1, f.Subject) require.NoError(t, err) - require.Equal(t, 1, len(actual)) + require.Len(t, actual, 1) + assert.Equal(t, f.ConsentRequestID.String(), actual[0].ConsentRequestID.String()) + assert.True(t, nextPage.IsLast()) }) } } func (s *PersisterTestSuite) TestFlushInactiveAccessTokens() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) sig := uuid.Must(uuid.NewV4()).String() fr := fosite.NewRequest() fr.RequestedAt = time.Now().UTC().Add(-24 * time.Hour) - fr.Client = &fosite.DefaultClient{ID: client.LegacyClientID} + fr.Client = &fosite.DefaultClient{ID: cl.ID} + fr.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} require.NoError(t, r.Persister().CreateAccessTokenSession(s.t1, sig, fr)) actual := persistencesql.OAuth2RequestSQL{Table: "access"} require.NoError(t, r.Persister().FlushInactiveAccessTokens(s.t2, time.Now().Add(time.Hour), 100, 100)) - require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, sig)) + require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, x.SignatureHash(sig))) require.NoError(t, r.Persister().FlushInactiveAccessTokens(s.t1, time.Now().Add(time.Hour), 100, 100)) - require.Error(t, r.Persister().Connection(context.Background()).Find(&actual, sig)) + require.Error(t, r.Persister().Connection(context.Background()).Find(&actual, x.SignatureHash(sig))) }) } } func (s *PersisterTestSuite) TestGenerateAndPersistKeySet() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - store, ok := r.OAuth2Storage().(*persistencesql.Persister) - if !ok { - t.Fatal("type assertion failed") - } - + s.T().Run(k, func(t *testing.T) { actual := &jwk.SQLData{} - ks, err := store.GenerateAndPersistKeySet(s.t1, "ks", "kid", "RS256", "use") + key, err := jwk.GenerateJWK("RS256", "kid", "use") require.NoError(t, err) - require.Error(t, r.Persister().Connection(context.Background()).Where("sid = ? AND kid = ? AND nid = ?", "ks", ks.Keys[0].KeyID, s.t2NID).First(actual)) - require.NoError(t, r.Persister().Connection(context.Background()).Where("sid = ? AND kid = ? AND nid = ?", "ks", ks.Keys[0].KeyID, s.t1NID).First(actual)) + require.NoError(t, r.KeyManager().AddKey(s.t1, "ks", pointerx.Ptr(key.Keys[0].Public()))) + + err = sqlcon.HandleError(r.Persister().Connection(t.Context()).Where("sid = ? AND kid = ? AND nid = ?", "ks", "kid", s.t2NID).First(actual)) + require.ErrorIs(t, err, sqlcon.ErrNoRows) + require.NoError(t, r.Persister().Connection(t.Context()).Where("sid = ? AND kid = ? AND nid = ?", "ks", "kid", s.t1NID).First(actual)) }) } } func (s *PersisterTestSuite) TestFlushInactiveGrants() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { ks := newKeySet("ks-id", "use") grant := trust.Grant{ - ID: uuid.Must(uuid.NewV4()).String(), + ID: uuid.Must(uuid.NewV4()), ExpiresAt: time.Now().Add(-24 * time.Hour), PublicKey: trust.PublicKey{Set: "ks-id", KeyID: ks.Keys[0].KeyID}, } - require.NoError(t, r.Persister().AddKeySet(s.t1, "ks-id", ks)) - require.NoError(t, r.Persister().CreateGrant(s.t1, grant, ks.Keys[0])) + require.NoError(t, r.Persister().CreateGrant(s.t1, grant, ks.Keys[0].Public())) - actual := trust.SQLData{} + actual := persistencesql.SQLGrant{} require.NoError(t, r.Persister().FlushInactiveGrants(s.t2, time.Now(), 100, 100)) require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, grant.ID)) require.NoError(t, r.Persister().FlushInactiveGrants(s.t1, time.Now(), 100, 100)) @@ -869,41 +692,45 @@ func (s *PersisterTestSuite) TestFlushInactiveGrants() { } func (s *PersisterTestSuite) TestFlushInactiveLoginConsentRequests() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - sessionID := uuid.Must(uuid.NewV4()).String() - client := &client.Client{LegacyClientID: "client-id"} - f := newFlow(s.t1NID, client.LegacyClientID, "sub", sqlxx.NullString(sessionID)) + s.T().Run(k, func(t *testing.T) { + sessionID := uuidx.NewV4().String() + cl := &client.Client{ID: uuidx.NewV4().String()} + f := newFlow(s.t1NID, cl.ID, "sub", sqlxx.NullString(sessionID)) f.RequestedAt = time.Now().Add(-24 * time.Hour) - require.NoError(t, r.Persister().CreateLoginSession(s.t1, &consent.LoginSession{ID: sessionID})) - require.NoError(t, r.Persister().CreateClient(s.t1, client)) - require.NoError(t, r.Persister().Connection(context.Background()).Create(f)) + persistLoginSession(s.t1, t, r.Persister(), &flow.LoginSession{ID: sessionID}) + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) + + type legacyFlow struct { + *flow.Flow + State flow.State `db:"state"` + } + require.NoError(t, r.Persister().Connection(s.t1).Create(&legacyFlow{Flow: f, State: f.State})) actual := flow.Flow{} require.NoError(t, r.Persister().FlushInactiveLoginConsentRequests(s.t2, time.Now(), 100, 100)) require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, f.ID)) require.NoError(t, r.Persister().FlushInactiveLoginConsentRequests(s.t1, time.Now(), 100, 100)) - require.Error(t, r.Persister().Connection(context.Background()).Find(&actual, f.ID)) + require.ErrorIs(t, r.Persister().Connection(context.Background()).Find(&actual, f.ID), sql.ErrNoRows) }) } } func (s *PersisterTestSuite) TestFlushInactiveRefreshTokens() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} request := fosite.NewRequest() request.RequestedAt = time.Now().Add(-240 * 365 * time.Hour) request.Client = &fosite.DefaultClient{ID: "client-id"} + request.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} signature := uuid.Must(uuid.NewV4()).String() - require.NoError(t, r.Persister().CreateClient(s.t1, client)) - require.NoError(t, r.Persister().CreateRefreshTokenSession(s.t1, signature, request)) + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) + require.NoError(t, r.Persister().CreateRefreshTokenSession(s.t1, signature, "", request)) - actual := persistencesql.OAuth2RequestSQL{Table: "refresh"} + actual := persistencesql.OAuth2RefreshTable{} require.NoError(t, r.Persister().FlushInactiveRefreshTokens(s.t2, time.Now(), 100, 100)) require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, signature)) @@ -914,14 +741,14 @@ func (s *PersisterTestSuite) TestFlushInactiveRefreshTokens() { } func (s *PersisterTestSuite) TestGetAccessTokenSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) sig := uuid.Must(uuid.NewV4()).String() fr := fosite.NewRequest() - fr.Client = &fosite.DefaultClient{ID: client.LegacyClientID} + fr.Client = &fosite.DefaultClient{ID: cl.ID} + fr.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} require.NoError(t, r.Persister().CreateAccessTokenSession(s.t1, sig, fr)) actual, err := r.Persister().GetAccessTokenSession(s.t2, sig, &fosite.DefaultSession{}) @@ -935,14 +762,14 @@ func (s *PersisterTestSuite) TestGetAccessTokenSession() { } func (s *PersisterTestSuite) TestGetAuthorizeCodeSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) sig := uuid.Must(uuid.NewV4()).String() fr := fosite.NewRequest() - fr.Client = &fosite.DefaultClient{ID: client.LegacyClientID} + fr.Client = &fosite.DefaultClient{ID: cl.ID} + fr.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} require.NoError(t, r.Persister().CreateAuthorizeCodeSession(s.t1, sig, fr)) actual, err := r.Persister().GetAuthorizeCodeSession(s.t2, sig, &fosite.DefaultSession{}) @@ -956,30 +783,26 @@ func (s *PersisterTestSuite) TestGetAuthorizeCodeSession() { } func (s *PersisterTestSuite) TestGetClient() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - expected := &client.Client{LegacyClientID: "client-id"} + s.T().Run(k, func(t *testing.T) { + expected := &client.Client{ID: "client-id"} require.NoError(t, r.Persister().CreateClient(s.t1, expected)) - actual, err := r.Persister().GetClient(s.t2, expected.LegacyClientID) + actual, err := r.Persister().GetClient(s.t2, expected.ID) require.Error(t, err) require.Nil(t, actual) - actual, err = r.Persister().GetClient(s.t1, expected.LegacyClientID) + actual, err = r.Persister().GetClient(s.t1, expected.ID) require.NoError(t, err) - require.Equal(t, expected.LegacyClientID, actual.GetID()) + require.Equal(t, expected.ID, actual.GetID()) }) } } func (s *PersisterTestSuite) TestGetClientAssertionJWT() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { store, ok := r.OAuth2Storage().(oauth2.AssertionJWTReader) - if !ok { - t.Fatal("type assertion failed") - } + require.True(t, ok) expected := oauth2.NewBlacklistedJTI(uuid.Must(uuid.NewV4()).String(), time.Now().Add(24*time.Hour)) require.NoError(t, r.Persister().SetClientAssertionJWT(s.t1, expected.JTI, expected.Expiry)) @@ -992,51 +815,50 @@ func (s *PersisterTestSuite) TestGetClientAssertionJWT() { } func (s *PersisterTestSuite) TestGetClients() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - c := &client.Client{LegacyClientID: "client-id"} + s.T().Run(k, func(t *testing.T) { + c := &client.Client{ID: "client-id"} require.NoError(t, r.Persister().CreateClient(s.t1, c)) - actual, err := r.Persister().GetClients(s.t2, client.Filter{Offset: 0, Limit: 100}) + actual, nextPage, err := r.Persister().GetClients(s.t2, client.Filter{}) require.NoError(t, err) - require.Equal(t, 0, len(actual)) - actual, err = r.Persister().GetClients(s.t1, client.Filter{Offset: 0, Limit: 100}) + assert.Len(t, actual, 0) + assert.True(t, nextPage.IsLast()) + + actual, nextPage, err = r.Persister().GetClients(s.t1, client.Filter{}) require.NoError(t, err) - require.Equal(t, 1, len(actual)) + assert.Len(t, actual, 1) + assert.True(t, nextPage.IsLast()) }) } } func (s *PersisterTestSuite) TestGetConcreteClient() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - expected := &client.Client{LegacyClientID: "client-id"} + s.T().Run(k, func(t *testing.T) { + expected := &client.Client{ID: "client-id"} require.NoError(t, r.Persister().CreateClient(s.t1, expected)) - actual, err := r.Persister().GetConcreteClient(s.t2, expected.LegacyClientID) + actual, err := r.Persister().GetConcreteClient(s.t2, expected.ID) require.Error(t, err) require.Nil(t, actual) - actual, err = r.Persister().GetConcreteClient(s.t1, expected.LegacyClientID) + actual, err = r.Persister().GetConcreteClient(s.t1, expected.ID) require.NoError(t, err) - require.Equal(t, expected.LegacyClientID, actual.GetID()) + require.Equal(t, expected.ID, actual.GetID()) }) } } func (s *PersisterTestSuite) TestGetConcreteGrant() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { ks := newKeySet("ks-id", "use") grant := trust.Grant{ - ID: uuid.Must(uuid.NewV4()).String(), + ID: uuid.Must(uuid.NewV4()), ExpiresAt: time.Now().Add(time.Hour), PublicKey: trust.PublicKey{Set: "ks-id", KeyID: ks.Keys[0].KeyID}, } - require.NoError(t, r.Persister().AddKeySet(s.t1, "ks-id", ks)) - require.NoError(t, r.Persister().CreateGrant(s.t1, grant, ks.Keys[0])) + require.NoError(t, r.Persister().CreateGrant(s.t1, grant, ks.Keys[0].Public())) actual, err := r.Persister().GetConcreteGrant(s.t2, grant.ID) require.Error(t, err) @@ -1049,101 +871,19 @@ func (s *PersisterTestSuite) TestGetConcreteGrant() { } } -func (s *PersisterTestSuite) TestGetConsentRequest() { - t := s.T() - for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - sessionID := uuid.Must(uuid.NewV4()).String() - client := &client.Client{LegacyClientID: "client-id"} - f := newFlow(s.t1NID, client.LegacyClientID, "sub", sqlxx.NullString(sessionID)) - require.NoError(t, r.Persister().CreateLoginSession(s.t1, &consent.LoginSession{ID: sessionID})) - require.NoError(t, r.Persister().CreateClient(s.t1, client)) - require.NoError(t, r.Persister().Connection(context.Background()).Create(f)) - - req := &consent.OAuth2ConsentRequest{ - ID: "consent-request-id", - LoginChallenge: sqlxx.NullString(f.ID), - Skip: false, - Verifier: "verifier", - CSRF: "csrf", - } - require.NoError(t, r.Persister().CreateConsentRequest(s.t1, req)) - - actual, err := r.Persister().GetConsentRequest(s.t2, req.ID) - require.Error(t, err) - require.Nil(t, actual) - - actual, err = r.Persister().GetConsentRequest(s.t1, req.ID) - require.NoError(t, err) - require.NotNil(t, actual) - }) - } -} - -func (s *PersisterTestSuite) TestGetFlow() { - t := s.T() - for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - sessionID := uuid.Must(uuid.NewV4()).String() - client := &client.Client{LegacyClientID: "client-id"} - f := newFlow(s.t1NID, client.LegacyClientID, "sub", sqlxx.NullString(sessionID)) - require.NoError(t, r.Persister().CreateLoginSession(s.t1, &consent.LoginSession{ID: sessionID})) - require.NoError(t, r.Persister().CreateClient(s.t1, client)) - require.NoError(t, r.Persister().Connection(context.Background()).Create(f)) - - store, ok := r.Persister().(*persistencesql.Persister) - if !ok { - t.Fatal("type assertion failed") - } - - _, err := store.GetFlow(s.t2, f.ID) - require.Error(t, err) - - _, err = store.GetFlow(s.t1, f.ID) - require.NoError(t, err) - }) - } -} - -func (s *PersisterTestSuite) TestGetFlowByConsentChallenge() { - t := s.T() - for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - sessionID := uuid.Must(uuid.NewV4()).String() - client := &client.Client{LegacyClientID: "client-id"} - f := newFlow(s.t1NID, client.LegacyClientID, "sub", sqlxx.NullString(sessionID)) - require.NoError(t, r.Persister().CreateLoginSession(s.t1, &consent.LoginSession{ID: sessionID})) - require.NoError(t, r.Persister().CreateClient(s.t1, client)) - require.NoError(t, r.Persister().Connection(context.Background()).Create(f)) - - store, ok := r.Persister().(*persistencesql.Persister) - if !ok { - t.Fatal("type assertion failed") - } - - _, err := store.GetFlowByConsentChallenge(s.t2, f.ConsentChallengeID.String()) - require.Error(t, err) - - _, err = store.GetFlowByConsentChallenge(s.t1, f.ConsentChallengeID.String()) - require.NoError(t, err) - }) - } -} - func (s *PersisterTestSuite) TestGetForcedObfuscatedLoginSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - session := &consent.ForcedObfuscatedLoginSession{ClientID: client.LegacyClientID} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} + session := &consent.ForcedObfuscatedLoginSession{ClientID: cl.ID} + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) require.NoError(t, r.Persister().CreateForcedObfuscatedLoginSession(s.t1, session)) - actual, err := r.Persister().GetForcedObfuscatedLoginSession(s.t2, client.LegacyClientID, "") + actual, err := r.Persister().GetForcedObfuscatedLoginSession(s.t2, cl.ID, "") require.Error(t, err) require.Nil(t, actual) - actual, err = r.Persister().GetForcedObfuscatedLoginSession(s.t1, client.LegacyClientID, "") + actual, err = r.Persister().GetForcedObfuscatedLoginSession(s.t1, cl.ID, "") require.NoError(t, err) require.NotNil(t, actual) }) @@ -1151,87 +891,62 @@ func (s *PersisterTestSuite) TestGetForcedObfuscatedLoginSession() { } func (s *PersisterTestSuite) TestGetGrants() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { ks := newKeySet("ks-id", "use") grant := trust.Grant{ - ID: uuid.Must(uuid.NewV4()).String(), + ID: uuid.Must(uuid.NewV4()), ExpiresAt: time.Now().Add(time.Hour), PublicKey: trust.PublicKey{Set: "ks-id", KeyID: ks.Keys[0].KeyID}, } - require.NoError(t, r.Persister().AddKeySet(s.t1, "ks-id", ks)) - require.NoError(t, r.Persister().CreateGrant(s.t1, grant, ks.Keys[0])) - - actual, err := r.Persister().GetGrants(s.t2, 100, 0, "") - require.NoError(t, err) - require.Equal(t, 0, len(actual)) + require.NoError(t, r.Persister().CreateGrant(s.t1, grant, ks.Keys[0].Public())) - actual, err = r.Persister().GetGrants(s.t1, 100, 0, "") + actual, nextPage, err := r.Persister().GetGrants(s.t2, "") require.NoError(t, err) - require.Equal(t, 1, len(actual)) - }) - } -} + assert.Len(t, actual, 0) + assert.True(t, nextPage.IsLast()) -func (s *PersisterTestSuite) TestGetLoginRequest() { - t := s.T() - for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - lr := consent.LoginRequest{ID: "lr-id", ClientID: client.LegacyClientID, RequestedAt: time.Now()} - - require.NoError(t, r.Persister().CreateClient(s.t1, client)) - require.NoError(t, r.ConsentManager().CreateLoginRequest(s.t1, &lr)) - f := flow.Flow{} - require.NoError(t, r.Persister().Connection(context.Background()).Find(&f, lr.ID)) - require.Equal(t, s.t1NID, f.NID) - - actual, err := r.Persister().GetLoginRequest(s.t2, lr.ID) - require.Error(t, err) - require.Nil(t, actual) - - actual, err = r.Persister().GetLoginRequest(s.t1, lr.ID) + actual, nextPage, err = r.Persister().GetGrants(s.t1, "") require.NoError(t, err) - require.NotNil(t, actual) + assert.Len(t, actual, 1) + assert.True(t, nextPage.IsLast()) }) } } func (s *PersisterTestSuite) TestGetLogoutRequest() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - lr := consent.LogoutRequest{ + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} + lr := flow.LogoutRequest{ ID: uuid.Must(uuid.NewV4()).String(), - ClientID: sql.NullString{Valid: true, String: client.LegacyClientID}, + ClientID: sql.NullString{Valid: true, String: cl.ID}, } - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) require.NoError(t, r.Persister().CreateLogoutRequest(s.t1, &lr)) actual, err := r.Persister().GetLogoutRequest(s.t2, lr.ID) require.Error(t, err) - require.Equal(t, &consent.LogoutRequest{}, actual) + require.Equal(t, &flow.LogoutRequest{}, actual) actual, err = r.Persister().GetLogoutRequest(s.t1, lr.ID) require.NoError(t, err) - require.NotEqual(t, &consent.LogoutRequest{}, actual) + require.NotEqual(t, &flow.LogoutRequest{}, actual) }) } } func (s *PersisterTestSuite) TestGetOpenIDConnectSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} request := fosite.NewRequest() request.SetID("request-id") request.Client = &fosite.DefaultClient{ID: "client-id"} + request.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} authorizeCode := uuid.Must(uuid.NewV4()).String() - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) require.NoError(t, r.Persister().CreateOpenIDConnectSession(s.t1, authorizeCode, request)) actual, err := r.Persister().GetOpenIDConnectSession(s.t2, authorizeCode, &fosite.Request{}) @@ -1246,15 +961,15 @@ func (s *PersisterTestSuite) TestGetOpenIDConnectSession() { } func (s *PersisterTestSuite) TestGetPKCERequestSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} request := fosite.NewRequest() request.SetID("request-id") request.Client = &fosite.DefaultClient{ID: "client-id"} + request.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} sig := uuid.Must(uuid.NewV4()).String() - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) require.NoError(t, r.Persister().CreatePKCERequestSession(s.t1, sig, request)) actual, err := r.Persister().GetPKCERequestSession(s.t2, sig, &fosite.DefaultSession{}) @@ -1269,17 +984,15 @@ func (s *PersisterTestSuite) TestGetPKCERequestSession() { } func (s *PersisterTestSuite) TestGetPublicKey() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { ks := newKeySet("ks-id", "use") grant := trust.Grant{ - ID: uuid.Must(uuid.NewV4()).String(), + ID: uuid.Must(uuid.NewV4()), ExpiresAt: time.Now().Add(time.Hour), PublicKey: trust.PublicKey{Set: "ks-id", KeyID: ks.Keys[0].KeyID}, } - require.NoError(t, r.Persister().AddKeySet(s.t1, "ks-id", ks)) - require.NoError(t, r.Persister().CreateGrant(s.t1, grant, ks.Keys[0])) + require.NoError(t, r.Persister().CreateGrant(s.t1, grant, ks.Keys[0].Public())) actual, err := r.Persister().GetPublicKey(s.t2, grant.Issuer, grant.Subject, grant.PublicKey.KeyID) require.Error(t, err) @@ -1293,18 +1006,16 @@ func (s *PersisterTestSuite) TestGetPublicKey() { } func (s *PersisterTestSuite) TestGetPublicKeyScopes() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { ks := newKeySet("ks-id", "use") grant := trust.Grant{ - ID: uuid.Must(uuid.NewV4()).String(), + ID: uuid.Must(uuid.NewV4()), Scope: []string{"a", "b", "c"}, ExpiresAt: time.Now().Add(time.Hour), PublicKey: trust.PublicKey{Set: "ks-id", KeyID: ks.Keys[0].KeyID}, } - require.NoError(t, r.Persister().AddKeySet(s.t1, "ks-id", ks)) - require.NoError(t, r.Persister().CreateGrant(s.t1, grant, ks.Keys[0])) + require.NoError(t, r.Persister().CreateGrant(s.t1, grant, ks.Keys[0].Public())) actual, err := r.Persister().GetPublicKeyScopes(s.t2, grant.Issuer, grant.Subject, grant.PublicKey.KeyID) require.Error(t, err) @@ -1318,17 +1029,17 @@ func (s *PersisterTestSuite) TestGetPublicKeyScopes() { } func (s *PersisterTestSuite) TestGetPublicKeys() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - ks := newKeySet("ks-id", "use") + s.T().Run(k, func(t *testing.T) { + const issuer = "ks-id" + ks := newKeySet(issuer, "use") grant := trust.Grant{ - ID: uuid.Must(uuid.NewV4()).String(), - ExpiresAt: time.Now().Add(time.Hour), - PublicKey: trust.PublicKey{Set: "ks-id", KeyID: ks.Keys[0].KeyID}, + ID: uuid.Must(uuid.NewV4()), + ExpiresAt: time.Now().UTC().Add(time.Hour), + Issuer: issuer, + PublicKey: trust.PublicKey{Set: issuer, KeyID: ks.Keys[0].KeyID}, } - require.NoError(t, r.Persister().AddKeySet(s.t1, "ks-id", ks)) - require.NoError(t, r.Persister().CreateGrant(s.t1, grant, ks.Keys[0])) + require.NoError(t, r.Persister().CreateGrant(s.t1, grant, ks.Keys[0].Public())) actual, err := r.Persister().GetPublicKeys(s.t2, grant.Issuer, grant.Subject) require.NoError(t, err) @@ -1342,16 +1053,16 @@ func (s *PersisterTestSuite) TestGetPublicKeys() { } func (s *PersisterTestSuite) TestGetRefreshTokenSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} request := fosite.NewRequest() request.SetID("request-id") request.Client = &fosite.DefaultClient{ID: "client-id"} + request.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} sig := uuid.Must(uuid.NewV4()).String() - require.NoError(t, r.Persister().CreateClient(s.t1, client)) - require.NoError(t, r.Persister().CreateRefreshTokenSession(s.t1, sig, request)) + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) + require.NoError(t, r.Persister().CreateRefreshTokenSession(s.t1, sig, "", request)) actual, err := r.Persister().GetRefreshTokenSession(s.t2, sig, &fosite.DefaultSession{}) require.Error(t, err) @@ -1365,14 +1076,17 @@ func (s *PersisterTestSuite) TestGetRefreshTokenSession() { } func (s *PersisterTestSuite) TestGetRememberedLoginSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - ls := consent.LoginSession{ID: uuid.Must(uuid.NewV4()).String(), Remember: true} - require.NoError(t, r.Persister().CreateLoginSession(s.t1, &ls)) + s.T().Run(k, func(t *testing.T) { + ls := flow.LoginSession{ + ID: uuid.Must(uuid.NewV4()).String(), + NID: s.t1NID, + Remember: true, + } + require.NoError(t, r.Persister().ConfirmLoginSession(s.t1, &ls)) actual, err := r.Persister().GetRememberedLoginSession(s.t2, ls.ID) - require.Error(t, err) + require.ErrorIs(t, err, x.ErrNotFound) require.Nil(t, actual) actual, err = r.Persister().GetRememberedLoginSession(s.t1, ls.ID) @@ -1383,61 +1097,46 @@ func (s *PersisterTestSuite) TestGetRememberedLoginSession() { } func (s *PersisterTestSuite) TestHandleConsentRequest() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { sessionID := uuid.Must(uuid.NewV4()).String() - c1 := &client.Client{LegacyClientID: uuidx.NewV4().String()} - f := newFlow(s.t1NID, c1.LegacyClientID, "sub", sqlxx.NullString(sessionID)) - require.NoError(t, r.Persister().CreateLoginSession(s.t1, &consent.LoginSession{ID: sessionID})) + c1 := &client.Client{ID: uuidx.NewV4().String()} + f := newFlow(s.t1NID, c1.ID, "sub", sqlxx.NullString(sessionID)) + + persistLoginSession(s.t1, t, r.Persister(), &flow.LoginSession{ID: sessionID}) require.NoError(t, r.Persister().CreateClient(s.t1, c1)) - c1.ID = uuid.Nil require.NoError(t, r.Persister().CreateClient(s.t2, c1)) - require.NoError(t, r.Persister().Connection(context.Background()).Create(f)) - - req := &consent.OAuth2ConsentRequest{ - ID: "consent-request-id", - LoginChallenge: sqlxx.NullString(f.ID), - Skip: false, - Verifier: "verifier", - CSRF: "csrf", - } - hcr := &consent.AcceptOAuth2ConsentRequest{ - ID: req.ID, - HandledAt: sqlxx.NullTime(time.Now()), - Remember: true, - } - require.NoError(t, r.Persister().CreateConsentRequest(s.t1, req)) + f.ConsentRequestID = "consent-request-id" - actualCR, err := r.Persister().HandleConsentRequest(s.t2, hcr) - require.Error(t, err) - require.Nil(t, actualCR) - actual, err := r.Persister().FindGrantedAndRememberedConsentRequests(s.t1, c1.LegacyClientID, f.Subject) - require.Error(t, err) - require.Equal(t, 0, len(actual)) + actual, err := r.ConsentManager().FindGrantedAndRememberedConsentRequest(s.t1, c1.ID, f.Subject) + require.ErrorIs(t, err, consent.ErrNoPreviousConsentFound) + assert.Nil(t, actual) - actualCR, err = r.Persister().HandleConsentRequest(s.t1, hcr) - require.NoError(t, err) - require.NotNil(t, actualCR) - actual, err = r.Persister().FindGrantedAndRememberedConsentRequests(s.t1, c1.LegacyClientID, f.Subject) + require.NoError(t, f.HandleConsentRequest(&flow.AcceptOAuth2ConsentRequest{ + Remember: true, + })) + + f.State = flow.FlowStateConsentUsed + + require.NoError(t, r.ConsentManager().CreateConsentSession(s.t1, f)) + actual, err = r.ConsentManager().FindGrantedAndRememberedConsentRequest(s.t1, c1.ID, f.Subject) require.NoError(t, err) - require.Equal(t, 1, len(actual)) + assert.EqualValues(t, f.ConsentRequestID, actual.ConsentRequestID) }) } } func (s *PersisterTestSuite) TestInvalidateAuthorizeCodeSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: uuidx.NewV4().String()} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) - client.ID = uuid.Nil - require.NoError(t, r.Persister().CreateClient(s.t2, client)) + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: uuidx.NewV4().String()} + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) + require.NoError(t, r.Persister().CreateClient(s.t2, cl)) sig := uuid.Must(uuid.NewV4()).String() fr := fosite.NewRequest() - fr.Client = &fosite.DefaultClient{ID: client.LegacyClientID} + fr.Client = &fosite.DefaultClient{ID: cl.ID} + fr.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} require.NoError(t, r.Persister().CreateAuthorizeCodeSession(s.t1, sig, fr)) require.NoError(t, r.Persister().InvalidateAuthorizeCodeSession(s.t2, sig)) @@ -1454,9 +1153,8 @@ func (s *PersisterTestSuite) TestInvalidateAuthorizeCodeSession() { } func (s *PersisterTestSuite) TestIsJWTUsed() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { jti := oauth2.NewBlacklistedJTI(uuid.Must(uuid.NewV4()).String(), time.Now().Add(24*time.Hour)) require.NoError(t, r.Persister().SetClientAssertionJWT(s.t1, jti.JTI, jti.Expiry)) @@ -1472,53 +1170,38 @@ func (s *PersisterTestSuite) TestIsJWTUsed() { } func (s *PersisterTestSuite) TestListUserAuthenticatedClientsWithBackChannelLogout() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - c1 := &client.Client{LegacyClientID: "client-1", BackChannelLogoutURI: "not-null"} - c2 := &client.Client{LegacyClientID: "client-2", BackChannelLogoutURI: "not-null"} + s.T().Run(k, func(t *testing.T) { + c1 := &client.Client{ID: "client-1", BackChannelLogoutURI: "not-null"} + c2 := &client.Client{ID: "client-2", BackChannelLogoutURI: "not-null"} require.NoError(t, r.Persister().CreateClient(s.t1, c1)) - c1.ID = uuid.Nil require.NoError(t, r.Persister().CreateClient(s.t2, c1)) require.NoError(t, r.Persister().CreateClient(s.t2, c2)) - t1f1 := newFlow(s.t1NID, c1.LegacyClientID, "sub", sqlxx.NullString(uuid.Must(uuid.NewV4()).String())) - t1f1.ConsentChallengeID = "t1f1-consent-challenge" - t1f1.LoginVerifier = "t1f1-login-verifier" - t1f1.ConsentVerifier = "t1f1-consent-verifier" + t1f1 := newFlow(s.t1NID, c1.ID, "sub", sqlxx.NullString(uuid.Must(uuid.NewV4()).String())) + t1f1.ConsentRequestID = "t1f1-consent-challenge" - t2f1 := newFlow(s.t2NID, c1.LegacyClientID, "sub", t1f1.SessionID) - t2f1.ConsentChallengeID = "t2f1-consent-challenge" - t2f1.LoginVerifier = "t2f1-login-verifier" - t2f1.ConsentVerifier = "t2f1-consent-verifier" + t2f1 := newFlow(s.t2NID, c1.ID, "sub", t1f1.SessionID) + t2f1.ConsentRequestID = "t2f1-consent-challenge" - t2f2 := newFlow(s.t2NID, c2.LegacyClientID, "sub", t1f1.SessionID) - t2f2.ConsentChallengeID = "t2f2-consent-challenge" - t2f2.LoginVerifier = "t2f2-login-verifier" - t2f2.ConsentVerifier = "t2f2-consent-verifier" + t2f2 := newFlow(s.t2NID, c2.ID, "sub", t1f1.SessionID) + t2f2.ConsentRequestID = "t2f2-consent-challenge" - require.NoError(t, r.Persister().CreateLoginSession(s.t1, &consent.LoginSession{ID: t1f1.SessionID.String()})) + persistLoginSession(s.t1, t, r.Persister(), &flow.LoginSession{ID: t1f1.SessionID.String()}) - require.NoError(t, r.Persister().Connection(context.Background()).Create(t1f1)) - require.NoError(t, r.Persister().Connection(context.Background()).Create(t2f1)) - require.NoError(t, r.Persister().Connection(context.Background()).Create(t2f2)) + require.NoError(t, r.ConsentManager().CreateConsentSession(s.t1, t1f1)) + require.NoError(t, r.ConsentManager().CreateConsentSession(s.t2, t2f1)) + require.NoError(t, r.ConsentManager().CreateConsentSession(s.t2, t2f2)) - require.NoError(t, r.Persister().CreateConsentRequest(s.t1, &consent.OAuth2ConsentRequest{ID: t1f1.ID, LoginChallenge: sqlxx.NullString(t1f1.ID), Skip: false, Verifier: t1f1.ConsentVerifier.String(), CSRF: "csrf"})) - require.NoError(t, r.Persister().CreateConsentRequest(s.t2, &consent.OAuth2ConsentRequest{ID: t2f1.ID, LoginChallenge: sqlxx.NullString(t2f1.ID), Skip: false, Verifier: t2f1.ConsentVerifier.String(), CSRF: "csrf"})) - require.NoError(t, r.Persister().CreateConsentRequest(s.t2, &consent.OAuth2ConsentRequest{ID: t2f2.ID, LoginChallenge: sqlxx.NullString(t2f2.ID), Skip: false, Verifier: t2f2.ConsentVerifier.String(), CSRF: "csrf"})) - - _, err := r.Persister().HandleConsentRequest(s.t1, &consent.AcceptOAuth2ConsentRequest{ID: t1f1.ID, HandledAt: sqlxx.NullTime(time.Now()), Remember: true}) - require.NoError(t, err) - _, err = r.Persister().HandleConsentRequest(s.t2, &consent.AcceptOAuth2ConsentRequest{ID: t2f1.ID, HandledAt: sqlxx.NullTime(time.Now()), Remember: true}) - require.NoError(t, err) - _, err = r.Persister().HandleConsentRequest(s.t2, &consent.AcceptOAuth2ConsentRequest{ID: t2f2.ID, HandledAt: sqlxx.NullTime(time.Now()), Remember: true}) - require.NoError(t, err) + t1f1.ConsentRequestID = sqlxx.NullString(t1f1.ID) + t2f1.ConsentRequestID = sqlxx.NullString(t2f1.ID) + t2f2.ConsentRequestID = sqlxx.NullString(t2f2.ID) - cs, err := r.Persister().ListUserAuthenticatedClientsWithBackChannelLogout(s.t1, "sub", t1f1.SessionID.String()) + cs, err := r.ConsentManager().ListUserAuthenticatedClientsWithBackChannelLogout(s.t1, "sub", t1f1.SessionID.String()) require.NoError(t, err) require.Equal(t, 1, len(cs)) - cs, err = r.Persister().ListUserAuthenticatedClientsWithBackChannelLogout(s.t2, "sub", t1f1.SessionID.String()) + cs, err = r.ConsentManager().ListUserAuthenticatedClientsWithBackChannelLogout(s.t2, "sub", t1f1.SessionID.String()) require.NoError(t, err) require.Equal(t, 2, len(cs)) }) @@ -1526,53 +1209,38 @@ func (s *PersisterTestSuite) TestListUserAuthenticatedClientsWithBackChannelLogo } func (s *PersisterTestSuite) TestListUserAuthenticatedClientsWithFrontChannelLogout() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - c1 := &client.Client{LegacyClientID: "client-1", FrontChannelLogoutURI: "not-null"} - c2 := &client.Client{LegacyClientID: "client-2", FrontChannelLogoutURI: "not-null"} + s.T().Run(k, func(t *testing.T) { + c1 := &client.Client{ID: "client-1", FrontChannelLogoutURI: "not-null"} + c2 := &client.Client{ID: "client-2", FrontChannelLogoutURI: "not-null"} require.NoError(t, r.Persister().CreateClient(s.t1, c1)) - c1.ID = uuid.Nil require.NoError(t, r.Persister().CreateClient(s.t2, c1)) require.NoError(t, r.Persister().CreateClient(s.t2, c2)) - t1f1 := newFlow(s.t1NID, c1.LegacyClientID, "sub", sqlxx.NullString(uuid.Must(uuid.NewV4()).String())) - t1f1.ConsentChallengeID = "t1f1-consent-challenge" - t1f1.LoginVerifier = "t1f1-login-verifier" - t1f1.ConsentVerifier = "t1f1-consent-verifier" + t1f1 := newFlow(s.t1NID, c1.ID, "sub", sqlxx.NullString(uuid.Must(uuid.NewV4()).String())) + t1f1.ConsentRequestID = "t1f1-consent-challenge" - t2f1 := newFlow(s.t2NID, c1.LegacyClientID, "sub", t1f1.SessionID) - t2f1.ConsentChallengeID = "t2f1-consent-challenge" - t2f1.LoginVerifier = "t2f1-login-verifier" - t2f1.ConsentVerifier = "t2f1-consent-verifier" + t2f1 := newFlow(s.t2NID, c1.ID, "sub", t1f1.SessionID) + t2f1.ConsentRequestID = "t2f1-consent-challenge" - t2f2 := newFlow(s.t2NID, c2.LegacyClientID, "sub", t1f1.SessionID) - t2f2.ConsentChallengeID = "t2f2-consent-challenge" - t2f2.LoginVerifier = "t2f2-login-verifier" - t2f2.ConsentVerifier = "t2f2-consent-verifier" + t2f2 := newFlow(s.t2NID, c2.ID, "sub", t1f1.SessionID) + t2f2.ConsentRequestID = "t2f2-consent-challenge" - require.NoError(t, r.Persister().CreateLoginSession(s.t1, &consent.LoginSession{ID: t1f1.SessionID.String()})) + persistLoginSession(s.t1, t, r.Persister(), &flow.LoginSession{ID: t1f1.SessionID.String()}) - require.NoError(t, r.Persister().Connection(context.Background()).Create(t1f1)) - require.NoError(t, r.Persister().Connection(context.Background()).Create(t2f1)) - require.NoError(t, r.Persister().Connection(context.Background()).Create(t2f2)) + require.NoError(t, r.ConsentManager().CreateConsentSession(s.t1, t1f1)) + require.NoError(t, r.ConsentManager().CreateConsentSession(s.t2, t2f1)) + require.NoError(t, r.ConsentManager().CreateConsentSession(s.t2, t2f2)) - require.NoError(t, r.Persister().CreateConsentRequest(s.t1, &consent.OAuth2ConsentRequest{ID: t1f1.ID, LoginChallenge: sqlxx.NullString(t1f1.ID), Skip: false, Verifier: t1f1.ConsentVerifier.String(), CSRF: "csrf"})) - require.NoError(t, r.Persister().CreateConsentRequest(s.t2, &consent.OAuth2ConsentRequest{ID: t2f1.ID, LoginChallenge: sqlxx.NullString(t2f1.ID), Skip: false, Verifier: t2f1.ConsentVerifier.String(), CSRF: "csrf"})) - require.NoError(t, r.Persister().CreateConsentRequest(s.t2, &consent.OAuth2ConsentRequest{ID: t2f2.ID, LoginChallenge: sqlxx.NullString(t2f2.ID), Skip: false, Verifier: t2f2.ConsentVerifier.String(), CSRF: "csrf"})) + t1f1.ConsentRequestID = sqlxx.NullString(t1f1.ID) + t2f1.ConsentRequestID = sqlxx.NullString(t2f1.ID) + t2f2.ConsentRequestID = sqlxx.NullString(t2f2.ID) - _, err := r.Persister().HandleConsentRequest(s.t1, &consent.AcceptOAuth2ConsentRequest{ID: t1f1.ID, HandledAt: sqlxx.NullTime(time.Now()), Remember: true}) - require.NoError(t, err) - _, err = r.Persister().HandleConsentRequest(s.t2, &consent.AcceptOAuth2ConsentRequest{ID: t2f1.ID, HandledAt: sqlxx.NullTime(time.Now()), Remember: true}) - require.NoError(t, err) - _, err = r.Persister().HandleConsentRequest(s.t2, &consent.AcceptOAuth2ConsentRequest{ID: t2f2.ID, HandledAt: sqlxx.NullTime(time.Now()), Remember: true}) - require.NoError(t, err) - - cs, err := r.Persister().ListUserAuthenticatedClientsWithFrontChannelLogout(s.t1, "sub", t1f1.SessionID.String()) + cs, err := r.ConsentManager().ListUserAuthenticatedClientsWithFrontChannelLogout(s.t1, "sub", t1f1.SessionID.String()) require.NoError(t, err) require.Equal(t, 1, len(cs)) - cs, err = r.Persister().ListUserAuthenticatedClientsWithFrontChannelLogout(s.t2, "sub", t1f1.SessionID.String()) + cs, err = r.ConsentManager().ListUserAuthenticatedClientsWithFrontChannelLogout(s.t2, "sub", t1f1.SessionID.String()) require.NoError(t, err) require.Equal(t, 2, len(cs)) }) @@ -1580,19 +1248,16 @@ func (s *PersisterTestSuite) TestListUserAuthenticatedClientsWithFrontChannelLog } func (s *PersisterTestSuite) TestMarkJWTUsedForTime() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - r.Persister().SetClientAssertionJWT(s.t1, "a", time.Now().Add(-24*time.Hour)) - r.Persister().SetClientAssertionJWT(s.t2, "a", time.Now().Add(-24*time.Hour)) - r.Persister().SetClientAssertionJWT(s.t2, "b", time.Now().Add(-24*time.Hour)) + s.T().Run(k, func(t *testing.T) { + require.NoError(t, r.Persister().SetClientAssertionJWT(s.t1, "a", time.Now().Add(-24*time.Hour))) + require.NoError(t, r.Persister().SetClientAssertionJWT(s.t2, "a", time.Now().Add(-24*time.Hour))) + require.NoError(t, r.Persister().SetClientAssertionJWT(s.t2, "b", time.Now().Add(-24*time.Hour))) require.NoError(t, r.Persister().MarkJWTUsedForTime(s.t2, "a", time.Now().Add(48*time.Hour))) store, ok := r.OAuth2Storage().(oauth2.AssertionJWTReader) - if !ok { - t.Fatal("type assertion failed") - } + require.True(t, ok) _, err := store.GetClientAssertionJWT(s.t1, "a") require.NoError(t, err) @@ -1605,148 +1270,209 @@ func (s *PersisterTestSuite) TestMarkJWTUsedForTime() { } func (s *PersisterTestSuite) TestQueryWithNetwork() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - r.Persister().CreateClient(s.t1, &client.Client{LegacyClientID: "client-1", FrontChannelLogoutURI: "not-null"}) + s.T().Run(k, func(t *testing.T) { + require.NoError(t, r.Persister().CreateClient(s.t1, &client.Client{ID: "client-1", FrontChannelLogoutURI: "not-null"})) store, ok := r.Persister().(*persistencesql.Persister) - if !ok { - t.Fatal("type assertion failed") - } + require.True(t, ok) var actual []client.Client - store.QueryWithNetwork(s.t2).All(&actual) - require.Equal(t, 0, len(actual)) - store.QueryWithNetwork(s.t1).All(&actual) - require.Equal(t, 1, len(actual)) + require.NoError(t, store.QueryWithNetwork(s.t2).All(&actual)) + require.Len(t, actual, 0) + require.NoError(t, store.QueryWithNetwork(s.t1).All(&actual)) + require.Len(t, actual, 1) }) } } func (s *PersisterTestSuite) TestRejectLogoutRequest() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { lr := newLogoutRequest() - require.NoError(t, r.ConsentManager().CreateLogoutRequest(s.t1, lr)) + require.NoError(t, r.LogoutManager().CreateLogoutRequest(s.t1, lr)) - require.Error(t, r.ConsentManager().RejectLogoutRequest(s.t2, lr.ID)) - actual, err := r.ConsentManager().GetLogoutRequest(s.t1, lr.ID) + require.Error(t, r.LogoutManager().RejectLogoutRequest(s.t2, lr.ID)) + actual, err := r.LogoutManager().GetLogoutRequest(s.t1, lr.ID) require.NoError(t, err) require.Equal(t, lr, actual) - require.NoError(t, r.ConsentManager().RejectLogoutRequest(s.t1, lr.ID)) - actual, err = r.ConsentManager().GetLogoutRequest(s.t1, lr.ID) + require.NoError(t, r.LogoutManager().RejectLogoutRequest(s.t1, lr.ID)) + actual, err = r.LogoutManager().GetLogoutRequest(s.t1, lr.ID) require.Error(t, err) - require.Equal(t, &consent.LogoutRequest{}, actual) + require.Equal(t, &flow.LogoutRequest{}, actual) }) } } func (s *PersisterTestSuite) TestRevokeAccessToken() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) sig := uuid.Must(uuid.NewV4()).String() fr := fosite.NewRequest() - fr.Client = &fosite.DefaultClient{ID: client.LegacyClientID} + fr.Client = &fosite.DefaultClient{ID: cl.ID} + fr.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} require.NoError(t, r.Persister().CreateAccessTokenSession(s.t1, sig, fr)) require.NoError(t, r.Persister().RevokeAccessToken(s.t2, fr.ID)) actual := persistencesql.OAuth2RequestSQL{Table: "access"} - require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, sig)) + require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, x.SignatureHash(sig))) require.Equal(t, s.t1NID, actual.NID) require.NoError(t, r.Persister().RevokeAccessToken(s.t1, fr.ID)) - require.Error(t, r.Persister().Connection(context.Background()).Find(&actual, sig)) + require.Error(t, r.Persister().Connection(context.Background()).Find(&actual, x.SignatureHash(sig))) }) } } func (s *PersisterTestSuite) TestRevokeRefreshToken() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + s.T().Run(k, func(t *testing.T) { + cl := &client.Client{ID: "client-id"} + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) request := fosite.NewRequest() request.Client = &fosite.DefaultClient{ID: "client-id"} + request.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} signature := uuid.Must(uuid.NewV4()).String() - require.NoError(t, r.Persister().CreateRefreshTokenSession(s.t1, signature, request)) - - actual := persistencesql.OAuth2RequestSQL{Table: "refresh"} + require.NoError(t, r.Persister().CreateRefreshTokenSession(s.t1, signature, "", request)) + var actualt2 persistencesql.OAuth2RefreshTable require.NoError(t, r.Persister().RevokeRefreshToken(s.t2, request.ID)) - require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, signature)) - require.Equal(t, true, actual.Active) + require.NoError(t, r.Persister().Connection(context.Background()).Find(&actualt2, signature)) + require.Equal(t, true, actualt2.Active) + require.NoError(t, r.Persister().RevokeRefreshToken(s.t1, request.ID)) - require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, signature)) - require.Equal(t, false, actual.Active) + require.ErrorIs(t, r.Persister().Connection(context.Background()).Find(new(persistencesql.OAuth2RefreshTable), signature), sql.ErrNoRows) }) } } -func (s *PersisterTestSuite) TestRevokeRefreshTokenMaybeGracePeriod() { - t := s.T() +func (s *PersisterTestSuite) TestRotateRefreshToken() { for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - client := &client.Client{LegacyClientID: "client-id"} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) + s.T().Run(k, func(t *testing.T) { + t.Run("with access signature", func(t *testing.T) { + clientID := uuid.Must(uuid.NewV4()).String() + require.NoError(t, r.Persister().CreateClient(s.t1, &client.Client{ID: clientID})) + require.NoError(t, r.Persister().CreateClient(s.t2, &client.Client{ID: clientID})) - request := fosite.NewRequest() - request.Client = &fosite.DefaultClient{ID: "client-id"} + request := fosite.NewRequest() + request.Client = &fosite.DefaultClient{ID: clientID} + request.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} - signature := uuid.Must(uuid.NewV4()).String() - require.NoError(t, r.Persister().CreateRefreshTokenSession(s.t1, signature, request)) + // Create token T1 + signatureT1 := uuid.Must(uuid.NewV4()).String() + accessSignatureT1 := uuid.Must(uuid.NewV4()).String() + require.NoError(t, r.Persister().CreateAccessTokenSession(s.t1, accessSignatureT1, request)) + require.NoError(t, r.Persister().CreateRefreshTokenSession(s.t1, signatureT1, accessSignatureT1, request)) - actual := persistencesql.OAuth2RequestSQL{Table: "refresh"} + // Create token T2 + signatureT2 := uuid.Must(uuid.NewV4()).String() + accessSignatureT2 := uuid.Must(uuid.NewV4()).String() + require.ErrorIs(t, r.Persister().RotateRefreshToken(s.t2, request.ID, signatureT2), fosite.ErrNotFound, "Rotation fails as token is non-existent.") + require.NoError(t, r.Persister().CreateAccessTokenSession(s.t2, accessSignatureT2, request)) + require.NoError(t, r.Persister().CreateRefreshTokenSession(s.t2, signatureT2, accessSignatureT2, request)) - store, ok := r.Persister().(*persistencesql.Persister) - if !ok { - t.Fatal("type assertion failed") - } + accessT2 := persistencesql.OAuth2RequestSQL{Table: "access"} + assert.NoError(t, r.Persister().Connection(s.t2).Where("signature = ?", x.SignatureHash(accessSignatureT2)).First(&accessT2)) + require.Equal(t, true, accessT2.Active) - require.NoError(t, store.RevokeRefreshTokenMaybeGracePeriod(s.t2, request.ID, signature)) - require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, signature)) - require.Equal(t, true, actual.Active) - require.NoError(t, store.RevokeRefreshTokenMaybeGracePeriod(s.t1, request.ID, signature)) - require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, signature)) - require.Equal(t, false, actual.Active) + accessT1 := persistencesql.OAuth2RequestSQL{Table: "access"} + assert.NoError(t, r.Persister().Connection(s.t1).Where("signature = ?", x.SignatureHash(accessSignatureT1)).First(&accessT1)) + require.Equal(t, true, accessT2.Active) + + // Rotate token T1 + require.NoError(t, r.Persister().RotateRefreshToken(s.t1, request.ID, signatureT1)) + { + refreshT1 := persistencesql.OAuth2RefreshTable{} + require.NoError(t, r.Persister().Connection(s.t1).Where("signature = ?", signatureT1).First(&refreshT1)) + require.Equal(t, false, refreshT1.Active) + + accessT1 := persistencesql.OAuth2RequestSQL{Table: "access"} + require.ErrorIs(t, r.Persister().Connection(s.t1).Where("signature = ?", x.SignatureHash(accessSignatureT1)).First(&accessT1), sql.ErrNoRows) + + refreshT2 := persistencesql.OAuth2RefreshTable{} + require.NoError(t, r.Persister().Connection(s.t2).Where("signature = ?", signatureT2).First(&refreshT2)) + require.Equal(t, true, refreshT2.Active) + + accessT2 := persistencesql.OAuth2RequestSQL{Table: "access"} + require.NoError(t, r.Persister().Connection(s.t2).Where("signature = ?", x.SignatureHash(accessSignatureT2)).First(&accessT2)) + require.Equal(t, true, accessT2.Active) + } + + require.NoError(t, r.Persister().RotateRefreshToken(s.t2, request.ID, signatureT2)) + { + refreshT2 := persistencesql.OAuth2RefreshTable{} + require.NoError(t, r.Persister().Connection(s.t2).Where("signature = ?", signatureT2).First(&refreshT2)) + require.Equal(t, false, refreshT2.Active) + + accessT2 := persistencesql.OAuth2RequestSQL{Table: "access"} + require.ErrorIs(t, r.Persister().Connection(s.t2).Where("signature = ?", x.SignatureHash(accessSignatureT2)).First(&accessT2), sql.ErrNoRows) + require.Equal(t, false, accessT2.Active) + } + }) + + t.Run("without access signature", func(t *testing.T) { + clientID := uuid.Must(uuid.NewV4()).String() + require.NoError(t, r.Persister().CreateClient(s.t1, &client.Client{ID: clientID})) + + request1 := fosite.NewRequest() + request1.Client = &fosite.DefaultClient{ID: clientID} + request1.Session = &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "sub"}} + + signature := uuid.Must(uuid.NewV4()).String() + require.NoError(t, r.Persister().CreateRefreshTokenSession(s.t1, signature, "", request1)) + + accessSignature1 := uuid.Must(uuid.NewV4()).String() + require.NoError(t, r.Persister().CreateAccessTokenSession(s.t1, accessSignature1, request1)) + + accessSignature2 := uuid.Must(uuid.NewV4()).String() + require.NoError(t, r.Persister().CreateAccessTokenSession(s.t1, accessSignature2, request1)) + + require.NoError(t, r.Persister().RotateRefreshToken(s.t1, request1.ID, signature)) + { + accessT1 := persistencesql.OAuth2RequestSQL{Table: "access"} + require.ErrorIs(t, r.Persister().Connection(s.t1).Where("signature = ?", x.SignatureHash(accessSignature1)).First(&accessT1), sql.ErrNoRows) + + refresh := persistencesql.OAuth2RefreshTable{} + require.NoError(t, r.Persister().Connection(s.t1).Where("signature = ?", signature).First(&refresh)) + require.Equal(t, false, refresh.Active) + + accessT2 := persistencesql.OAuth2RequestSQL{Table: "access"} + require.ErrorIs(t, r.Persister().Connection(s.t1).Where("signature = ?", x.SignatureHash(accessSignature2)).First(&accessT2), sql.ErrNoRows) + } + }) }) } } func (s *PersisterTestSuite) TestRevokeSubjectClientConsentSession() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { sessionID := uuid.Must(uuid.NewV4()).String() - client := &client.Client{LegacyClientID: "client-id"} - f := newFlow(s.t1NID, client.LegacyClientID, "sub", sqlxx.NullString(sessionID)) + cl := &client.Client{ID: "client-id"} + f := newFlow(s.t1NID, cl.ID, "sub", sqlxx.NullString(sessionID)) f.RequestedAt = time.Now().Add(-24 * time.Hour) - require.NoError(t, r.Persister().CreateLoginSession(s.t1, &consent.LoginSession{ID: sessionID})) - require.NoError(t, r.Persister().CreateClient(s.t1, client)) - require.NoError(t, r.Persister().Connection(context.Background()).Create(f)) + persistLoginSession(s.t1, t, r.Persister(), &flow.LoginSession{ID: sessionID}) + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) + require.NoError(t, r.ConsentManager().CreateConsentSession(s.t1, f)) actual := flow.Flow{} - require.Error(t, r.Persister().RevokeSubjectClientConsentSession(s.t2, "sub", client.LegacyClientID)) + require.NoError(t, r.ConsentManager().RevokeSubjectClientConsentSession(s.t2, "sub", cl.ID), "should not error if nothing was found") require.NoError(t, r.Persister().Connection(context.Background()).Find(&actual, f.ID)) - require.NoError(t, r.Persister().RevokeSubjectClientConsentSession(s.t1, "sub", client.LegacyClientID)) + require.NoError(t, r.ConsentManager().RevokeSubjectClientConsentSession(s.t1, "sub", cl.ID)) require.Error(t, r.Persister().Connection(context.Background()).Find(&actual, f.ID)) }) } } func (s *PersisterTestSuite) TestSetClientAssertionJWT() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { jti := oauth2.NewBlacklistedJTI(uuid.Must(uuid.NewV4()).String(), time.Now().Add(24*time.Hour)) require.NoError(t, r.Persister().SetClientAssertionJWT(s.t1, jti.JTI, jti.Expiry)) @@ -1758,13 +1484,10 @@ func (s *PersisterTestSuite) TestSetClientAssertionJWT() { } func (s *PersisterTestSuite) TestSetClientAssertionJWTRaw() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { store, ok := r.Persister().(*persistencesql.Persister) - if !ok { - t.Fatal("type assertion failed") - } + require.True(t, ok) jti := oauth2.NewBlacklistedJTI(uuid.Must(uuid.NewV4()).String(), time.Now().Add(24*time.Hour)) require.NoError(t, store.SetClientAssertionJWTRaw(s.t1, jti)) @@ -1777,63 +1500,75 @@ func (s *PersisterTestSuite) TestSetClientAssertionJWTRaw() { } func (s *PersisterTestSuite) TestUpdateClient() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - t1c1 := &client.Client{LegacyClientID: "client-id", Name: "original", Secret: "original-secret"} - t2c1 := &client.Client{LegacyClientID: "client-id", Name: "original", Secret: "original-secret"} + s.T().Run(k, func(t *testing.T) { + t1c1 := &client.Client{ID: "client-id", Name: "original", Secret: "original-secret"} + t2c1 := &client.Client{ID: "client-id", Name: "original", Secret: "original-secret"} require.NoError(t, r.Persister().CreateClient(s.t1, t1c1)) require.NoError(t, r.Persister().CreateClient(s.t2, t2c1)) - expectedHash := t1c1.Secret + t1Hash, t2Hash := t1c1.Secret, t2c1.Secret u1 := *t1c1 u1.Name = "updated" u1.Secret = "" require.NoError(t, r.Persister().UpdateClient(s.t2, &u1)) - actual := &client.Client{} - require.NoError(t, r.Persister().Connection(context.Background()).Find(actual, t1c1.ID)) + actual, err := r.Persister().GetConcreteClient(s.t1, t1c1.ID) + require.NoError(t, err) require.Equal(t, "original", actual.Name) - require.Equal(t, expectedHash, actual.Secret) + require.Equal(t, t1Hash, actual.Secret) + + actual, err = r.Persister().GetConcreteClient(s.t2, t1c1.ID) + require.NoError(t, err) + require.Equal(t, "updated", actual.Name) + require.Equal(t, t2Hash, actual.Secret) u2 := *t1c1 u2.Name = "updated" u2.Secret = "" require.NoError(t, r.Persister().UpdateClient(s.t1, &u2)) - require.NoError(t, r.Persister().Connection(context.Background()).Find(actual, t1c1.ID)) + + actual, err = r.Persister().GetConcreteClient(s.t1, t1c1.ID) + require.NoError(t, err) require.Equal(t, "updated", actual.Name) - require.Equal(t, expectedHash, actual.Secret) + require.Equal(t, t1Hash, actual.Secret) u3 := *t1c1 u3.Name = "updated" u3.Secret = "updated-secret" require.NoError(t, r.Persister().UpdateClient(s.t1, &u3)) - require.NoError(t, r.Persister().Connection(context.Background()).Find(actual, t1c1.ID)) + + actual, err = r.Persister().GetConcreteClient(s.t1, t1c1.ID) + require.NoError(t, err) + require.Equal(t, "updated", actual.Name) + require.NotEqual(t, t1Hash, actual.Secret) + + actual, err = r.Persister().GetConcreteClient(s.t2, t2c1.ID) + require.NoError(t, err) require.Equal(t, "updated", actual.Name) - require.NotEqual(t, expectedHash, actual.Secret) + require.Equal(t, t2Hash, actual.Secret) }) } } func (s *PersisterTestSuite) TestUpdateKey() { - t := s.T() for k, r := range s.registries { - t.Run("dialect="+k, func(*testing.T) { + s.T().Run("dialect="+k, func(t *testing.T) { k1 := newKey("test-ks", "test") ks := "key-set" - require.NoError(t, r.Persister().AddKey(s.t1, ks, &k1)) - actual, err := r.Persister().GetKey(s.t1, ks, k1.KeyID) + require.NoError(t, r.KeyManager().AddKey(s.t1, ks, &k1)) + actual, err := r.KeyManager().GetKey(s.t1, ks, k1.KeyID) require.NoError(t, err) assertx.EqualAsJSON(t, &jose.JSONWebKeySet{Keys: []jose.JSONWebKey{k1}}, actual) k2 := newKey("test-ks", "test") - r.Persister().UpdateKey(s.t2, ks, &k2) - actual, err = r.Persister().GetKey(s.t1, ks, k1.KeyID) + require.NoError(t, r.KeyManager().UpdateKey(s.t2, ks, &k2)) + actual, err = r.KeyManager().GetKey(s.t1, ks, k1.KeyID) require.NoError(t, err) assertx.EqualAsJSON(t, &jose.JSONWebKeySet{Keys: []jose.JSONWebKey{k1}}, actual) - r.Persister().UpdateKey(s.t1, ks, &k2) - actual, err = r.Persister().GetKey(s.t1, ks, k2.KeyID) + require.NoError(t, r.KeyManager().UpdateKey(s.t1, ks, &k2)) + actual, err = r.KeyManager().GetKey(s.t1, ks, k2.KeyID) require.NoError(t, err) require.NotEqual(t, &jose.JSONWebKeySet{Keys: []jose.JSONWebKey{k1}}, actual) }) @@ -1841,24 +1576,23 @@ func (s *PersisterTestSuite) TestUpdateKey() { } func (s *PersisterTestSuite) TestUpdateKeySet() { - t := s.T() for k, r := range s.registries { - t.Run("dialect="+k, func(*testing.T) { + s.T().Run("dialect="+k, func(t *testing.T) { ks := "key-set" ks1 := newKeySet(ks, "test") - require.NoError(t, r.Persister().AddKeySet(s.t1, ks, ks1)) - actual, err := r.Persister().GetKeySet(s.t1, ks) + require.NoError(t, r.KeyManager().AddKeySet(s.t1, ks, ks1)) + actual, err := r.KeyManager().GetKeySet(s.t1, ks) require.NoError(t, err) requireKeySetEqual(t, ks1, actual) ks2 := newKeySet(ks, "test") - r.Persister().UpdateKeySet(s.t2, ks, ks2) - actual, err = r.Persister().GetKeySet(s.t1, ks) + require.NoError(t, r.KeyManager().UpdateKeySet(s.t2, ks, ks2)) + actual, err = r.KeyManager().GetKeySet(s.t1, ks) require.NoError(t, err) requireKeySetEqual(t, ks1, actual) - r.Persister().UpdateKeySet(s.t1, ks, ks2) - actual, err = r.Persister().GetKeySet(s.t1, ks) + require.NoError(t, r.KeyManager().UpdateKeySet(s.t1, ks, ks2)) + actual, err = r.KeyManager().GetKeySet(s.t1, ks) require.NoError(t, err) requireKeySetEqual(t, ks2, actual) }) @@ -1866,20 +1600,17 @@ func (s *PersisterTestSuite) TestUpdateKeySet() { } func (s *PersisterTestSuite) TestUpdateWithNetwork() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - t1c1 := &client.Client{LegacyClientID: "client-id", Name: "original", Secret: "original-secret"} - t2c1 := &client.Client{LegacyClientID: "client-id", Name: "original", Secret: "original-secret", Owner: "erase-me"} + s.T().Run(k, func(t *testing.T) { + t1c1 := &client.Client{ID: "client-id", Name: "original", Secret: "original-secret"} + t2c1 := &client.Client{ID: "client-id", Name: "original", Secret: "original-secret", Owner: "erase-me"} require.NoError(t, r.Persister().CreateClient(s.t1, t1c1)) require.NoError(t, r.Persister().CreateClient(s.t2, t2c1)) store, ok := r.Persister().(*persistencesql.Persister) - if !ok { - t.Fatal("type assertion failed") - } + require.True(t, ok) - count, err := store.UpdateWithNetwork(s.t1, &client.Client{ID: t1c1.ID, LegacyClientID: "client-id", Name: "updated", Secret: "original-secret"}) + count, err := store.UpdateWithNetwork(s.t1, &client.Client{ID: "client-id", Name: "updated", Secret: "original-secret"}) require.NoError(t, err) require.Equal(t, int64(1), count) actualt1, err := store.GetConcreteClient(s.t1, "client-id") @@ -1895,169 +1626,133 @@ func (s *PersisterTestSuite) TestUpdateWithNetwork() { } func (s *PersisterTestSuite) TestVerifyAndInvalidateConsentRequest() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { + s.T().Run(k, func(t *testing.T) { sub := uuid.Must(uuid.NewV4()).String() sessionID := uuid.Must(uuid.NewV4()).String() - require.NoError(t, r.Persister().CreateLoginSession(s.t1, &consent.LoginSession{ID: sessionID})) - client := &client.Client{LegacyClientID: "client-id"} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) - f := newFlow(s.t1NID, client.LegacyClientID, sub, sqlxx.NullString(sessionID)) + persistLoginSession(s.t1, t, r.Persister(), &flow.LoginSession{ID: sessionID}) + cl := &client.Client{ID: "client-id"} + require.NoError(t, r.Persister().CreateClient(s.t1, cl)) + f := newFlow(s.t1NID, cl.ID, sub, sqlxx.NullString(sessionID)) f.ConsentSkip = false f.GrantedScope = sqlxx.StringSliceJSONFormat{} f.ConsentRemember = false crf := 86400 f.ConsentRememberFor = &crf - f.ConsentError = &consent.RequestDeniedError{} + f.ConsentError = &flow.RequestDeniedError{} f.SessionAccessToken = map[string]interface{}{} f.SessionIDToken = map[string]interface{}{} - f.ConsentWasHandled = false f.State = flow.FlowStateConsentUnused - require.NoError(t, r.Persister().Connection(context.Background()).Create(f)) - actual := &flow.Flow{} - _, err := r.ConsentManager().VerifyAndInvalidateConsentRequest(s.t2, f.ConsentVerifier.String()) - require.Error(t, err) - require.NoError(t, r.Persister().Connection(context.Background()).Find(actual, f.ID)) - require.Equal(t, flow.FlowStateConsentUnused, actual.State) - require.Equal(t, false, actual.ConsentWasHandled) - _, err = r.ConsentManager().VerifyAndInvalidateConsentRequest(s.t1, f.ConsentVerifier.String()) - require.NoError(t, err) - require.NoError(t, r.Persister().Connection(context.Background()).Find(actual, f.ID)) - require.Equal(t, flow.FlowStateConsentUsed, actual.State) - require.Equal(t, true, actual.ConsentWasHandled) - }) - } -} + require.NoError(t, f.InvalidateConsentRequest()) -func (s *PersisterTestSuite) TestVerifyAndInvalidateLoginRequest() { - t := s.T() - for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - sub := uuid.Must(uuid.NewV4()).String() - sessionID := uuid.Must(uuid.NewV4()).String() - require.NoError(t, r.Persister().CreateLoginSession(s.t1, &consent.LoginSession{ID: sessionID})) - client := &client.Client{LegacyClientID: "client-id"} - require.NoError(t, r.Persister().CreateClient(s.t1, client)) - f := newFlow(s.t1NID, client.LegacyClientID, sub, sqlxx.NullString(sessionID)) - f.State = flow.FlowStateLoginUnused - require.NoError(t, r.Persister().Connection(context.Background()).Create(f)) - - actual := &flow.Flow{} - _, err := r.ConsentManager().VerifyAndInvalidateLoginRequest(s.t2, f.LoginVerifier) - require.Error(t, err) - require.NoError(t, r.Persister().Connection(context.Background()).Find(actual, f.ID)) - require.Equal(t, flow.FlowStateLoginUnused, actual.State) - require.Equal(t, false, actual.LoginWasUsed) - _, err = r.ConsentManager().VerifyAndInvalidateLoginRequest(s.t1, f.LoginVerifier) + err := r.ConsentManager().CreateConsentSession(s.t2, f) + require.ErrorIs(t, err, sqlcon.ErrNoRows) + + err = r.ConsentManager().CreateConsentSession(s.t1, f) require.NoError(t, err) - require.NoError(t, r.Persister().Connection(context.Background()).Find(actual, f.ID)) - require.Equal(t, flow.FlowStateLoginUsed, actual.State) - require.Equal(t, true, actual.LoginWasUsed) }) } } func (s *PersisterTestSuite) TestVerifyAndInvalidateLogoutRequest() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - lr := newLogoutRequest() - lr.Verifier = uuid.Must(uuid.NewV4()).String() - lr.Accepted = true - lr.Rejected = false - require.NoError(t, r.ConsentManager().CreateLogoutRequest(s.t1, lr)) + s.T().Run(k, func(t *testing.T) { + run := func(t *testing.T, lr *flow.LogoutRequest) { + lr.Verifier = uuid.Must(uuid.NewV4()).String() + lr.Accepted = true + lr.Rejected = false + require.NoError(t, r.LogoutManager().CreateLogoutRequest(s.t1, lr)) + + expected, err := r.LogoutManager().GetLogoutRequest(s.t1, lr.ID) + require.NoError(t, err) + + lrInvalidated, err := r.LogoutManager().VerifyAndInvalidateLogoutRequest(s.t2, lr.Verifier) + require.Error(t, err) + require.Nil(t, lrInvalidated) + actual := &flow.LogoutRequest{} + require.NoError(t, r.Persister().Connection(context.Background()).Find(actual, lr.ID)) + require.Equal(t, expected, actual) + + lrInvalidated, err = r.LogoutManager().VerifyAndInvalidateLogoutRequest(s.t1, lr.Verifier) + require.NoError(t, err) + require.NoError(t, r.Persister().Connection(context.Background()).Find(actual, lr.ID)) + require.Equal(t, lrInvalidated, actual) + require.Equal(t, true, actual.WasHandled) + } - expected, err := r.ConsentManager().GetLogoutRequest(s.t1, lr.ID) - require.NoError(t, err) + t.Run("case=legacy logout request without expiry", func(t *testing.T) { + lr := newLogoutRequest() + run(t, lr) + }) - lrInvalidated, err := r.ConsentManager().VerifyAndInvalidateLogoutRequest(s.t2, lr.Verifier) - require.Error(t, err) - require.Equal(t, &consent.LogoutRequest{}, lrInvalidated) - actual := &consent.LogoutRequest{} - require.NoError(t, r.Persister().Connection(context.Background()).Find(actual, lr.ID)) - require.Equal(t, expected, actual) + t.Run("case=logout request with expiry", func(t *testing.T) { + lr := newLogoutRequest() + lr.ExpiresAt = sqlxx.NullTime(time.Now().Add(time.Hour)) + run(t, lr) + }) - lrInvalidated, err = r.ConsentManager().VerifyAndInvalidateLogoutRequest(s.t1, lr.Verifier) - require.NoError(t, err) - require.NoError(t, r.Persister().Connection(context.Background()).Find(actual, lr.ID)) - require.Equal(t, lrInvalidated, actual) - require.Equal(t, true, actual.WasHandled) + t.Run("case=logout request that expired returns error", func(t *testing.T) { + lr := newLogoutRequest() + lr.ExpiresAt = sqlxx.NullTime(time.Now().UTC().Add(-time.Hour)) + lr.Verifier = uuid.Must(uuid.NewV4()).String() + lr.Accepted = true + lr.Rejected = false + require.NoError(t, r.LogoutManager().CreateLogoutRequest(s.t1, lr)) + + _, err := r.LogoutManager().VerifyAndInvalidateLogoutRequest(s.t2, lr.Verifier) + require.ErrorIs(t, err, x.ErrNotFound) + + _, err = r.LogoutManager().VerifyAndInvalidateLogoutRequest(s.t1, lr.Verifier) + require.ErrorIs(t, err, flow.ErrorLogoutFlowExpired) + }) }) } } func (s *PersisterTestSuite) TestWithFallbackNetworkID() { - t := s.T() for k, r := range s.registries { - t.Run(k, func(t *testing.T) { - r.WithContextualizer(&contextx.Default{}) - store, ok := r.Persister().(*persistencesql.Persister) - if !ok { - t.Fatal("type assertion failed") - } - original := store.NetworkID(context.Background()) + s.T().Run(k, func(t *testing.T) { + store1, ok := r.Persister().(*persistencesql.Persister) + require.True(t, ok) + original := store1.NetworkID(context.Background()) expected := uuid.Must(uuid.NewV4()) - store, ok = store.WithFallbackNetworkID(expected).(*persistencesql.Persister) - if !ok { - t.Fatal("type assertion failed") - } + store2 := store1.WithFallbackNetworkID(expected) - require.NotEqual(t, original, expected) - require.Equal(t, expected, store.NetworkID(context.Background())) + assert.NotEqual(t, original, expected) + assert.Equal(t, expected, store2.NetworkID(context.Background())) }) } } func TestPersisterTestSuite(t *testing.T) { + t.Parallel() + suite.Run(t, new(PersisterTestSuite)) } -func newClient() *client.Client { - return &client.Client{ - ID: uuid.Must(uuid.NewV4()), +func newFlow(nid uuid.UUID, clientID string, subject string, sessionID sqlxx.NullString) *flow.Flow { + return &flow.Flow{ + NID: nid, + ID: uuid.Must(uuid.NewV4()).String(), + ClientID: clientID, + Subject: subject, + State: flow.FlowStateConsentUnused, + ConsentRequestID: "not-null", + ConsentCSRF: "not-null", + SessionID: sessionID, + RequestedAt: time.Now(), } } -func newFlow(nid uuid.UUID, clientID string, subject string, sessionID sqlxx.NullString) *flow.Flow { - return &flow.Flow{ - NID: nid, - ID: uuid.Must(uuid.NewV4()).String(), - ClientID: clientID, - Subject: subject, - ConsentError: &consent.RequestDeniedError{}, - State: flow.FlowStateConsentUnused, - LoginError: &consent.RequestDeniedError{}, - Context: sqlxx.JSONRawMessage{}, - AMR: sqlxx.StringSliceJSONFormat{}, - ConsentChallengeID: sqlxx.NullString("not-null"), - ConsentVerifier: sqlxx.NullString("not-null"), - ConsentCSRF: sqlxx.NullString("not-null"), - SessionID: sessionID, - RequestedAt: time.Now(), - } -} - -func newGrant(keySet string, keyID string) trust.Grant { - return trust.Grant{ - ID: uuid.Must(uuid.NewV4()).String(), - ExpiresAt: time.Now().Add(time.Hour), - PublicKey: trust.PublicKey{ - Set: keySet, - KeyID: keyID, - }, - } -} - -func newLogoutRequest() *consent.LogoutRequest { - return &consent.LogoutRequest{ +func newLogoutRequest() *flow.LogoutRequest { + return &flow.LogoutRequest{ ID: uuid.Must(uuid.NewV4()).String(), } } func newKey(ksID string, use string) jose.JSONWebKey { - ks, err := jwk.GenerateJWK(context.Background(), jose.RS256, ksID, use) + ks, err := jwk.GenerateJWK(jose.RS256, ksID, use) if err != nil { panic(err) } @@ -2065,15 +1760,11 @@ func newKey(ksID string, use string) jose.JSONWebKey { } func newKeySet(id string, use string) *jose.JSONWebKeySet { - ks, err := jwk.GenerateJWK(context.Background(), jose.RS256, id, use) - if err != nil { - panic(err) - } - return ks + return x.Must(jwk.GenerateJWK(jose.RS256, id, use)) } -func newLoginSession() *consent.LoginSession { - return &consent.LoginSession{ +func newLoginSession() *flow.LoginSession { + return &flow.LoginSession{ ID: uuid.Must(uuid.NewV4()).String(), AuthenticatedAt: sqlxx.NullTime(time.Time{}), Subject: uuid.Must(uuid.NewV4()).String(), @@ -2084,3 +1775,8 @@ func newLoginSession() *consent.LoginSession { func requireKeySetEqual(t *testing.T, expected *jose.JSONWebKeySet, actual *jose.JSONWebKeySet) { assertx.EqualAsJSON(t, expected, actual) } + +func persistLoginSession(ctx context.Context, t *testing.T, p persistence.Persister, session *flow.LoginSession) { + t.Helper() + require.NoError(t, p.ConfirmLoginSession(ctx, session)) +} diff --git a/persistence/sql/persister_nonce.go b/persistence/sql/persister_nonce.go new file mode 100644 index 00000000000..e31a57f5a74 --- /dev/null +++ b/persistence/sql/persister_nonce.go @@ -0,0 +1,55 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package sql + +import ( + "context" + "time" + + "github.com/pkg/errors" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/verifiable" + "github.com/ory/hydra/v2/x" + "github.com/ory/x/otelx" +) + +var _ verifiable.NonceManager = (*Persister)(nil) + +// Set the aadAccessTokenPrefix to something unique to avoid ciphertext confusion with other usages of the AEAD cipher. +var aadAccessTokenPrefix = "vc-nonce-at:" // nolint:gosec + +// NewNonce implements NonceManager. +func (p *Persister) NewNonce(ctx context.Context, accessToken string, expiresIn time.Time) (res string, err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.NewNonce") + defer otelx.End(span, &err) + + plaintext := x.IntToBytes(expiresIn.Unix()) + aad := []byte(aadAccessTokenPrefix + accessToken) + + return p.r.FlowCipher().Encrypt(ctx, plaintext, aad) +} + +// IsNonceValid implements NonceManager. +func (p *Persister) IsNonceValid(ctx context.Context, accessToken, nonce string) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.IsNonceValid") + defer otelx.End(span, &err) + + aad := []byte(aadAccessTokenPrefix + accessToken) + plaintext, err := p.r.FlowCipher().Decrypt(ctx, nonce, aad) + if err != nil { + return errors.WithStack(fosite.ErrInvalidRequest.WithHintf("The nonce is invalid.")) + } + + exp, err := x.BytesToInt(plaintext) + if err != nil { + return errors.WithStack(fosite.ErrInvalidRequest.WithHintf("The nonce is invalid.")) // should never happen + } + + if exp < time.Now().Unix() { + return errors.WithStack(fosite.ErrInvalidRequest.WithHintf("The nonce has expired.")) + } + + return nil +} diff --git a/persistence/sql/persister_nonce_test.go b/persistence/sql/persister_nonce_test.go new file mode 100644 index 00000000000..4ecf4344c18 --- /dev/null +++ b/persistence/sql/persister_nonce_test.go @@ -0,0 +1,63 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package sql_test + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/x/randx" +) + +func TestPersister_Nonce(t *testing.T) { + t.Parallel() + + ctx := context.Background() + p := testhelpers.NewRegistryMemory(t).Persister() + + accessToken := randx.MustString(100, randx.AlphaNum) + anotherToken := randx.MustString(100, randx.AlphaNum) + validNonce, err := p.NewNonce(ctx, accessToken, time.Now().Add(1*time.Hour)) + require.NoError(t, err) + + expiredNonce, err := p.NewNonce(ctx, accessToken, time.Now().Add(-1*time.Hour)) + require.NoError(t, err) + + nonceForAnotherAccessToken, err := p.NewNonce(ctx, anotherToken, time.Now().Add(-1*time.Hour)) + require.NoError(t, err) + + for _, tc := range []struct { + name string + nonce string + assertErr assert.ErrorAssertionFunc + }{{ + name: "valid nonce", + nonce: validNonce, + assertErr: assert.NoError, + }, { + name: "expired nonce", + nonce: expiredNonce, + assertErr: assertInvalidRequest, + }, { + name: "nonce for another access token", + nonce: nonceForAnotherAccessToken, + assertErr: assertInvalidRequest, + }, + } { + t.Run("case="+tc.name, func(t *testing.T) { + err := p.IsNonceValid(ctx, accessToken, tc.nonce) + tc.assertErr(t, err) + }) + } +} + +func assertInvalidRequest(t assert.TestingT, err error, i ...interface{}) bool { + return assert.ErrorIs(t, err, fosite.ErrInvalidRequest) +} diff --git a/persistence/sql/persister_oauth2.go b/persistence/sql/persister_oauth2.go index 5d3acc585af..f64289e5ae4 100644 --- a/persistence/sql/persister_oauth2.go +++ b/persistence/sql/persister_oauth2.go @@ -5,34 +5,32 @@ package sql import ( "context" - "crypto/sha512" + "crypto/sha256" "database/sql" + "encoding/hex" "encoding/json" "fmt" "net/url" "strings" "time" - "github.com/gobuffalo/pop/v6" "github.com/gofrs/uuid" - - "github.com/ory/x/errorsx" - - "github.com/ory/fosite/storage" - "github.com/pkg/errors" "github.com/tidwall/gjson" - - "github.com/ory/fosite" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" + + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/x" + "github.com/ory/hydra/v2/x/events" + "github.com/ory/x/dbal" + "github.com/ory/x/otelx" "github.com/ory/x/sqlcon" + "github.com/ory/x/sqlxx" "github.com/ory/x/stringsx" - - "github.com/ory/hydra/oauth2" ) -var _ oauth2.AssertionJWTReader = &Persister{} -var _ storage.Transactional = &Persister{} - type ( tableName string OAuth2RequestSQL struct { @@ -51,6 +49,14 @@ type ( Active bool `db:"active"` Session []byte `db:"session_data"` Table tableName `db:"-"` + // InternalExpiresAt denormalizes the expiry from the session to additionally store it as a row. + InternalExpiresAt sqlxx.NullTime `db:"expires_at" json:"-"` + } + OAuth2RefreshTable struct { + OAuth2RequestSQL + FirstUsedAt sql.NullTime `db:"first_used_at"` + AccessTokenSignature sql.NullString `db:"access_token_signature"` + UsedTimes sql.NullInt32 `db:"used_times"` } ) @@ -62,29 +68,189 @@ const ( sqlTablePKCE tableName = "pkce" ) +func (r OAuth2RefreshTable) TableName() string { + return "hydra_oauth2_refresh" +} + func (r OAuth2RequestSQL) TableName() string { return "hydra_oauth2_" + string(r.Table) } -func (p *Persister) sqlSchemaFromRequest(ctx context.Context, rawSignature string, r fosite.Requester, table tableName) (*OAuth2RequestSQL, error) { - subject := "" - if r.GetSession() == nil { - p.l.Debugf("Got an empty session in sqlSchemaFromRequest") +func (r *OAuth2RequestSQL) toRequest(ctx context.Context, session fosite.Session, p *Persister) (_ *fosite.Request, err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.toRequest") + defer otelx.End(span, &err) + + sess := r.Session + if !gjson.ValidBytes(sess) { + var err error + sess, err = p.r.KeyCipher().Decrypt(ctx, string(sess), nil) + if err != nil { + return nil, err + } + } + + if session != nil { + if err := json.Unmarshal(sess, session); err != nil { + return nil, errors.WithStack(err) + } } else { - subject = r.GetSession().GetSubject() + p.l.Debugf("Got an empty session in toRequest") } - session, err := json.Marshal(r.GetSession()) + c, err := p.GetClient(ctx, r.Client) if err != nil { - return nil, errorsx.WithStack(err) + return nil, err } - if p.config.EncryptSessionData(ctx) { - ciphertext, err := p.r.KeyCipher().Encrypt(ctx, session) - if err != nil { - return nil, errorsx.WithStack(err) + val, err := url.ParseQuery(r.Form) + if err != nil { + return nil, errors.WithStack(err) + } + + return &fosite.Request{ + ID: r.Request, + RequestedAt: r.RequestedAt, + // ExpiresAt does not need to be populated as we get the expiry time from the session. + Client: c, + RequestedScope: stringsx.Splitx(r.Scopes, "|"), + GrantedScope: stringsx.Splitx(r.GrantedScope, "|"), + RequestedAudience: stringsx.Splitx(r.RequestedAudience, "|"), + GrantedAudience: stringsx.Splitx(r.GrantedAudience, "|"), + Form: val, + Session: session, + }, nil +} + +// strictRefreshRotation implements the strict refresh token rotation strategy. In strict rotation, we disable all +// refresh and access tokens associated with a request ID and subsequently create the only valid, new token pair. +func (p *Persister) strictRefreshRotation(ctx context.Context, requestID string) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.strictRefreshRotation", + trace.WithAttributes( + attribute.String("request_id", requestID), + attribute.String("network_id", p.NetworkID(ctx).String()))) + defer otelx.End(span, &err) + + c := p.Connection(ctx) + + // In strict rotation we only have one token chain for every request. Therefore, we remove all + // access tokens associated with the request ID. + if err := p.deleteSessionByRequestID(ctx, requestID, sqlTableAccess); errors.Is(err, fosite.ErrNotFound) { + return nil // Tokens may have been pruned earlier, so we do not return an error here. + } else if err != nil { + return err + } + + // The same applies to refresh tokens in strict mode. We disable all old refresh tokens when rotating. + count, err := c.RawQuery( + "UPDATE hydra_oauth2_refresh SET active=false, expires_at = ? WHERE request_id=? AND nid = ? AND active", + // We don't expire immediately, but in 30 minutes to avoid prematurely removing + // rows while they may still be needed (e.g. for reuse detection). + newUsedExpiry(), + requestID, + p.NetworkID(ctx), + ).ExecWithCount() + if err != nil { + return sqlcon.HandleError(err) + } else if count == 0 { + return errors.WithStack(fosite.ErrNotFound) + } + + return nil +} + +func (p *Persister) gracefulRefreshRotation(ctx context.Context, requestID, refreshSignature string) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.gracefulRefreshRotation", + trace.WithAttributes( + attribute.String("request_id", requestID), + attribute.String("network_id", p.NetworkID(ctx).String()))) + defer otelx.End(span, &err) + + c := p.Connection(ctx) + now := time.Now().UTC().Round(time.Millisecond) + // The new expiry of the token starts now and ends at the end of the graceful token period. + // After that, we can prune tokens from the store. + expiresAt := newUsedExpiry().Add(p.r.Config().GracefulRefreshTokenRotation(ctx).Period) + + // Signature is the primary key so no limit needed. We only update first_used_at if it is not set yet (otherwise + // we would "refresh" the grace period again and again, and the refresh token would never "expire"). + query := ` +UPDATE hydra_oauth2_refresh +SET + active=false, + first_used_at = COALESCE(first_used_at, ?), + used_times = COALESCE(used_times, 0) + 1, + expires_at = ? +WHERE signature = ? AND nid = ?` + + args := make([]any, 0, 5) + args = append(args, + now, + expiresAt, + refreshSignature, + p.NetworkID(ctx), + ) + + if l := p.r.Config().GracefulRefreshTokenRotation(ctx).Count; l > 0 { + query += " AND (used_times IS NULL OR used_times < ?)" + args = append(args, l) + } + + var accessTokenSignature sql.NullString + if c.Dialect.Name() == dbal.DriverMySQL { + // MySQL does not support returning values from an update query, so we need to do two queries. + var tokenToRevoke OAuth2RefreshTable + if err := c. + Select("access_token_signature"). + // Filtering by "active" status would break graceful token rotation. We know and trust (with tests) + // that Fosite is dealing with the refresh token reuse detection business logic without + // relying on the active filter here. + Where("signature = ? AND nid = ?", refreshSignature, p.NetworkID(ctx)). + First(&tokenToRevoke); err != nil { + return sqlcon.HandleError(err) + } + accessTokenSignature = tokenToRevoke.AccessTokenSignature + + if count, err := c.RawQuery(query, args...).ExecWithCount(); err != nil { + return sqlcon.HandleError(err) + } else if count == 0 { + return errors.WithStack(fosite.ErrNotFound) + } + } else { + // Same query like in the MySQL case, but we can return the access token signature directly. + if err := c.RawQuery(query+` RETURNING access_token_signature`, args...).First(&accessTokenSignature); errors.Is(err, sql.ErrNoRows) { + return errors.WithStack(fosite.ErrNotFound) + } else if err != nil { + return sqlcon.HandleError(err) } - session = []byte(ciphertext) + } + + if !accessTokenSignature.Valid { + // If the access token is not found, we fall back to deleting all access tokens associated with the request ID. + if err := p.deleteSessionByRequestID(ctx, requestID, sqlTableAccess); errors.Is(err, fosite.ErrNotFound) { + // Tokens may have been pruned earlier, so we do not return an error here. + return nil + } else if err != nil { + return err + } + } + + // We have the signature and we will only remove that specific access token as part of the rotation. + if err := p.deleteSessionBySignature(ctx, accessTokenSignature.String, sqlTableAccess); errors.Is(err, fosite.ErrNotFound) { + // Tokens may have been pruned earlier, so we do not return an error here. + return nil + } else if err != nil { + return err + } + + return nil +} + +func (p *Persister) sqlSchemaFromRequest(ctx context.Context, signature string, r fosite.Requester, table tableName, expiresAt time.Time) (*OAuth2RequestSQL, error) { + subject := "" + if r.GetSession() == nil { + p.l.Debugf("Got an empty session in sqlSchemaFromRequest") + } else { + subject = r.GetSession().GetSubject() } var challenge sql.NullString @@ -97,11 +263,25 @@ func (p *Persister) sqlSchemaFromRequest(ctx context.Context, rawSignature strin } } + session, err := json.Marshal(rr) + if err != nil { + return nil, errors.WithStack(err) + } + + if p.r.Config().EncryptSessionData(ctx) { + ciphertext, err := p.r.KeyCipher().Encrypt(ctx, session, nil) + if err != nil { + return nil, err + } + session = []byte(ciphertext) + } + return &OAuth2RequestSQL{ Request: r.GetID(), ConsentChallenge: challenge, - ID: p.hashSignature(ctx, rawSignature, table), + ID: signature, RequestedAt: r.GetRequestedAt(), + InternalExpiresAt: sqlxx.NullTime(expiresAt), Client: r.GetClient().GetID(), Scopes: strings.Join(r.GetRequestedScopes(), "|"), GrantedScope: strings.Join(r.GetGrantedScopes(), "|"), @@ -115,61 +295,151 @@ func (p *Persister) sqlSchemaFromRequest(ctx context.Context, rawSignature strin }, nil } -func (r *OAuth2RequestSQL) toRequest(ctx context.Context, session fosite.Session, p *Persister) (*fosite.Request, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.toRequest") - defer span.End() +func (p *Persister) createSession(ctx context.Context, signature string, requester fosite.Requester, table tableName, expiresAt time.Time) error { + req, err := p.sqlSchemaFromRequest(ctx, signature, requester, table, expiresAt) + if err != nil { + return err + } - sess := r.Session - if !gjson.ValidBytes(sess) { - var err error - sess, err = p.r.KeyCipher().Decrypt(ctx, string(sess)) + if err = sqlcon.HandleError(p.CreateWithNetwork(ctx, req)); errors.Is(err, sqlcon.ErrConcurrentUpdate) { + return fosite.ErrSerializationFailure.WithWrap(err) + } else if err != nil { + return err + } + return nil +} + +func (p *Persister) findSessionBySignature(ctx context.Context, signature string, session fosite.Session, table tableName) (fosite.Requester, error) { + r := OAuth2RequestSQL{Table: table} + err := p.QueryWithNetwork(ctx).Where("signature = ?", signature).First(&r) + if errors.Is(err, sql.ErrNoRows) { + return nil, errors.WithStack(fosite.ErrNotFound) + } + if err != nil { + return nil, sqlcon.HandleError(err) + } + if !r.Active { + fr, err := r.toRequest(ctx, session, p) if err != nil { - return nil, errorsx.WithStack(err) + return nil, err + } + if table == sqlTableCode { + return fr, errors.WithStack(fosite.ErrInvalidatedAuthorizeCode) } + return fr, errors.WithStack(fosite.ErrInactiveToken) } - if session != nil { - if err := json.Unmarshal(sess, session); err != nil { - return nil, errorsx.WithStack(err) + return r.toRequest(ctx, session, p) +} + +func (p *Persister) deleteSessionBySignature(ctx context.Context, signature string, table tableName) error { + err := sqlcon.HandleError( + p.QueryWithNetwork(ctx). + Where("signature = ?", signature). + Delete(OAuth2RequestSQL{Table: table}.TableName())) + if errors.Is(err, sqlcon.ErrNoRows) { + return errors.WithStack(fosite.ErrNotFound) + } + if errors.Is(err, sqlcon.ErrConcurrentUpdate) { + return fosite.ErrSerializationFailure.WithWrap(err) + } + return err +} + +func (p *Persister) deleteSessionByRequestID(ctx context.Context, id string, table tableName) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.deleteSessionByRequestID") + defer otelx.End(span, &err) + + err = p.QueryWithNetwork(ctx). + Where("request_id=?", id). + Delete(OAuth2RequestSQL{Table: table}.TableName()) + if errors.Is(err, sql.ErrNoRows) { + return errors.WithStack(fosite.ErrNotFound) + } + if err := sqlcon.HandleError(err); err != nil { + if errors.Is(err, sqlcon.ErrConcurrentUpdate) { + return fosite.ErrSerializationFailure.WithWrap(err) } - } else { - p.l.Debugf("Got an empty session in toRequest") + if strings.Contains(err.Error(), "Error 1213") { // InnoDB Deadlock? + return errors.Wrap(fosite.ErrSerializationFailure, err.Error()) + } + return err } + return nil +} - c, err := p.GetClient(ctx, r.Client) - if err != nil { - return nil, err +func (p *Persister) flushInactiveTokens(ctx context.Context, notAfter time.Time, limit int, batchSize int, table tableName, lifespan time.Duration) (err error) { + /* #nosec G201 table is static */ + // The value of notAfter should be the minimum between input parameter and token max expire based on its configured age + requestMaxExpire := time.Now().Add(-lifespan) + if requestMaxExpire.Before(notAfter) { + notAfter = requestMaxExpire } - val, err := url.ParseQuery(r.Form) - if err != nil { - return nil, errorsx.WithStack(err) + totalDeletedCount := 0 + for deletedRecords := batchSize; totalDeletedCount < limit && deletedRecords == batchSize; { + d := batchSize + if limit-totalDeletedCount < batchSize { + d = limit - totalDeletedCount + } + // Delete in batches + // The outer SELECT is necessary because our version of MySQL doesn't yet support 'LIMIT & IN/ALL/ANY/SOME subquery + deletedRecords, err = p.Connection(ctx).RawQuery( + fmt.Sprintf(`DELETE FROM %s WHERE signature in ( + SELECT signature FROM (SELECT signature FROM %s hoa WHERE requested_at < ? and nid = ? ORDER BY requested_at LIMIT %d ) as s + )`, OAuth2RequestSQL{Table: table}.TableName(), OAuth2RequestSQL{Table: table}.TableName(), d), + notAfter, + p.NetworkID(ctx), + ).ExecWithCount() + totalDeletedCount += deletedRecords + + if err != nil { + break + } + p.l.Debugf("Flushing tokens...: %d/%d", totalDeletedCount, limit) } + p.l.Debugf("Flush Refresh Tokens flushed_records: %d", totalDeletedCount) + return sqlcon.HandleError(err) +} - return &fosite.Request{ - ID: r.Request, - RequestedAt: r.RequestedAt, - Client: c, - RequestedScope: stringsx.Splitx(r.Scopes, "|"), - GrantedScope: stringsx.Splitx(r.GrantedScope, "|"), - RequestedAudience: stringsx.Splitx(r.RequestedAudience, "|"), - GrantedAudience: stringsx.Splitx(r.GrantedAudience, "|"), - Form: val, - Session: session, - }, nil +func toEventOptions(requester fosite.Requester) []trace.EventOption { + sub := "" + if requester.GetSession() != nil { + hash := sha256.Sum256([]byte(requester.GetSession().GetSubject())) + sub = hex.EncodeToString(hash[:]) + } + return []trace.EventOption{ + events.WithGrantType(requester.GetRequestForm().Get("grant_type")), + events.WithSubject(sub), + events.WithRequest(requester), + events.WithClientID(requester.GetClient().GetID()), + } } -// hashSignature prevents errors where the signature is longer than 128 characters (and thus doesn't fit into the pk). -func (p *Persister) hashSignature(ctx context.Context, signature string, table tableName) string { - if table == sqlTableAccess && p.config.IsUsingJWTAsAccessTokens(ctx) { - return fmt.Sprintf("%x", sha512.Sum384([]byte(signature))) +func handleRetryError(err error) error { + if err == nil { + return nil + } + + if errors.Is(err, sqlcon.ErrConcurrentUpdate) { + return fosite.ErrSerializationFailure.WithWrap(err) + } + if strings.Contains(err.Error(), "Error 1213") { // InnoDB Deadlock + return errors.Wrap(fosite.ErrSerializationFailure, err.Error()) } - return signature + return err +} + +func newUsedExpiry() time.Time { + // Reuse detection is racy and would generally happen within seconds. Using 30 minutes here is a paranoid + // setting but ensures that we do not prematurely remove rows while they may still be needed (e.g. for reuse detection). + return time.Now().UTC().Round(time.Millisecond).Add(time.Minute * 30) } -func (p *Persister) ClientAssertionJWTValid(ctx context.Context, jti string) error { +// ClientAssertionJWTValid implements fosite.ClientManager +func (p *Persister) ClientAssertionJWTValid(ctx context.Context, jti string) (err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.ClientAssertionJWTValid") - defer span.End() + defer otelx.End(span, &err) j, err := p.GetClientAssertionJWT(ctx, jti) if errors.Is(err, sqlcon.ErrNoRows) { @@ -180,15 +450,16 @@ func (p *Persister) ClientAssertionJWTValid(ctx context.Context, jti string) err } if j.Expiry.After(time.Now()) { // the jti is not expired yet => invalid - return errorsx.WithStack(fosite.ErrJTIKnown) + return errors.WithStack(fosite.ErrJTIKnown) } // the jti is expired => valid return nil } -func (p *Persister) SetClientAssertionJWT(ctx context.Context, jti string, exp time.Time) error { +// SetClientAssertionJWT implements fosite.ClientManager +func (p *Persister) SetClientAssertionJWT(ctx context.Context, jti string, exp time.Time) (err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.SetClientAssertionJWT") - defer span.End() + defer otelx.End(span, &err) // delete expired; this cleanup spares us the need for a background worker if err := p.QueryWithNetwork(ctx).Where("expires_at < CURRENT_TIMESTAMP").Delete(&oauth2.BlacklistedJTI{}); err != nil { @@ -197,7 +468,7 @@ func (p *Persister) SetClientAssertionJWT(ctx context.Context, jti string, exp t if err := p.SetClientAssertionJWTRaw(ctx, oauth2.NewBlacklistedJTI(jti, exp)); errors.Is(err, sqlcon.ErrUniqueViolation) { // found a jti - return errorsx.WithStack(fosite.ErrJTIKnown) + return errors.WithStack(fosite.ErrJTIKnown) } else if err != nil { return err } @@ -206,149 +477,54 @@ func (p *Persister) SetClientAssertionJWT(ctx context.Context, jti string, exp t return nil } -func (p *Persister) GetClientAssertionJWT(ctx context.Context, j string) (*oauth2.BlacklistedJTI, error) { +// GetClientAssertionJWT implements AssertionJWTReader +func (p *Persister) GetClientAssertionJWT(ctx context.Context, j string) (_ *oauth2.BlacklistedJTI, err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetClientAssertionJWT") - defer span.End() + defer otelx.End(span, &err) jti := oauth2.NewBlacklistedJTI(j, time.Time{}) return jti, sqlcon.HandleError(p.QueryWithNetwork(ctx).Find(jti, jti.ID)) } -func (p *Persister) SetClientAssertionJWTRaw(ctx context.Context, jti *oauth2.BlacklistedJTI) error { +// SetClientAssertionJWTRaw implements AssertionJWTReader +func (p *Persister) SetClientAssertionJWTRaw(ctx context.Context, jti *oauth2.BlacklistedJTI) (err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.SetClientAssertionJWTRaw") - defer span.End() + defer otelx.End(span, &err) return sqlcon.HandleError(p.CreateWithNetwork(ctx, jti)) } -func (p *Persister) createSession(ctx context.Context, signature string, requester fosite.Requester, table tableName) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.createSession") - defer span.End() - - req, err := p.sqlSchemaFromRequest(ctx, signature, requester, table) - if err != nil { - return err - } - - if err := sqlcon.HandleError(p.CreateWithNetwork(ctx, req)); errors.Is(err, sqlcon.ErrConcurrentUpdate) { - return errors.Wrap(fosite.ErrSerializationFailure, err.Error()) - } else if err != nil { - return err - } - return nil -} - -func (p *Persister) findSessionBySignature(ctx context.Context, rawSignature string, session fosite.Session, table tableName) (fosite.Requester, error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.findSessionBySignature") - defer span.End() - - rawSignature = p.hashSignature(ctx, rawSignature, table) - - r := OAuth2RequestSQL{Table: table} - var fr fosite.Requester - - return fr, p.transaction(ctx, func(ctx context.Context, c *pop.Connection) error { - err := p.QueryWithNetwork(ctx).Where("signature = ?", rawSignature).First(&r) - if errors.Is(err, sql.ErrNoRows) { - return errorsx.WithStack(fosite.ErrNotFound) - } else if err != nil { - return sqlcon.HandleError(err) - } else if !r.Active { - fr, err = r.toRequest(ctx, session, p) - if err != nil { - return err - } else if table == sqlTableCode { - return errorsx.WithStack(fosite.ErrInvalidatedAuthorizeCode) - } - - return errorsx.WithStack(fosite.ErrInactiveToken) - } - - fr, err = r.toRequest(ctx, session, p) - return err +// CreateAuthorizeCodeSession implements AuthorizeCodeStorage +func (p *Persister) CreateAuthorizeCodeSession(ctx context.Context, signature string, requester fosite.Requester) error { + return otelx.WithSpan(ctx, "persistence.sql.CreateAuthorizeCodeSession", func(ctx context.Context) error { + return p.createSession(ctx, signature, requester, sqlTableCode, requester.GetSession().GetExpiresAt(fosite.AuthorizeCode).UTC()) }) } -func (p *Persister) deleteSessionBySignature(ctx context.Context, signature string, table tableName) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.deleteSessionBySignature") - defer span.End() - - signature = p.hashSignature(ctx, signature, table) - - err := sqlcon.HandleError( - p.QueryWithNetwork(ctx). - Where("signature=?", signature). - Delete(&OAuth2RequestSQL{Table: table})) - - if errors.Is(err, sqlcon.ErrNoRows) { - return errorsx.WithStack(fosite.ErrNotFound) - } else if errors.Is(err, sqlcon.ErrConcurrentUpdate) { - return errors.Wrap(fosite.ErrSerializationFailure, err.Error()) - } else if err != nil { - return err - } - return nil -} - -func (p *Persister) deleteSessionByRequestID(ctx context.Context, id string, table tableName) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.deleteSessionByRequestID") - defer span.End() - - /* #nosec G201 table is static */ - if err := p.QueryWithNetwork(ctx). - Where("request_id=?", id). - Delete(&OAuth2RequestSQL{Table: table}); errors.Is(err, sql.ErrNoRows) { - return errorsx.WithStack(fosite.ErrNotFound) - } else if err := sqlcon.HandleError(err); err != nil { - if errors.Is(err, sqlcon.ErrConcurrentUpdate) { - return errors.Wrap(fosite.ErrSerializationFailure, err.Error()) - } else if strings.Contains(err.Error(), "Error 1213") { // InnoDB Deadlock? - return errors.Wrap(fosite.ErrSerializationFailure, err.Error()) - } - return err - } - return nil -} - -func (p *Persister) deactivateSessionByRequestID(ctx context.Context, id string, table tableName) error { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.deactivateSessionByRequestID") - defer span.End() - - /* #nosec G201 table is static */ - return sqlcon.HandleError( - p.Connection(ctx). - RawQuery( - fmt.Sprintf("UPDATE %s SET active=false WHERE request_id=? AND nid = ? AND active=true", OAuth2RequestSQL{Table: table}.TableName()), - id, - p.NetworkID(ctx), - ). - Exec(), - ) -} - -func (p *Persister) CreateAuthorizeCodeSession(ctx context.Context, signature string, requester fosite.Requester) (err error) { - ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.CreateAuthorizeCodeSession") - defer span.End() - - return p.createSession(ctx, signature, requester, sqlTableCode) -} - +// GetAuthorizeCodeSession implements AuthorizeCodeStorage func (p *Persister) GetAuthorizeCodeSession(ctx context.Context, signature string, session fosite.Session) (request fosite.Requester, err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetAuthorizeCodeSession") - defer span.End() + defer otelx.End(span, &err) return p.findSessionBySignature(ctx, signature, session, sqlTableCode) } +// InvalidateAuthorizeCodeSession implements AuthorizeCodeStorage func (p *Persister) InvalidateAuthorizeCodeSession(ctx context.Context, signature string) (err error) { ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.InvalidateAuthorizeCodeSession") - defer span.End() + defer otelx.End(span, &err) /* #nosec G201 table is static */ return sqlcon.HandleError( p.Connection(ctx). RawQuery( - fmt.Sprintf("UPDATE %s SET active=false WHERE signature=? AND nid = ?", OAuth2RequestSQL{Table: sqlTableCode}.TableName()), + fmt.Sprintf( + "UPDATE %s SET active = false, expires_at = ? WHERE signature = ? AND nid = ?", + OAuth2RequestSQL{Table: sqlTableCode}.TableName(), + ), + // We don't expire immediately, but in 30 minutes to avoid prematurely removing + // rows while they may still be needed (e.g. for reuse detection). + newUsedExpiry(), signature, p.NetworkID(ctx), ). @@ -356,111 +532,255 @@ func (p *Persister) InvalidateAuthorizeCodeSession(ctx context.Context, signatur ) } +// CreateAccessTokenSession implements AccessTokenStorage func (p *Persister) CreateAccessTokenSession(ctx context.Context, signature string, requester fosite.Requester) (err error) { - return p.createSession(ctx, signature, requester, sqlTableAccess) + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.CreateAccessTokenSession", + trace.WithAttributes(events.AccessTokenSignature(signature)), + ) + defer otelx.End(span, &err) + + events.Trace(ctx, events.AccessTokenIssued, + append(toEventOptions(requester), events.WithGrantType(requester.GetRequestForm().Get("grant_type")))..., + ) + + return p.createSession(ctx, x.SignatureHash(signature), requester, sqlTableAccess, requester.GetSession().GetExpiresAt(fosite.AccessToken).UTC()) } +// GetAccessTokenSession implements AccessTokenStorage func (p *Persister) GetAccessTokenSession(ctx context.Context, signature string, session fosite.Session) (request fosite.Requester, err error) { - return p.findSessionBySignature(ctx, signature, session, sqlTableAccess) + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetAccessTokenSession", + trace.WithAttributes(events.AccessTokenSignature(signature)), + ) + defer otelx.End(span, &err) + + r := OAuth2RequestSQL{Table: sqlTableAccess} + err = p.QueryWithNetwork(ctx).Where("signature = ?", x.SignatureHash(signature)).First(&r) + if errors.Is(err, sql.ErrNoRows) { + // Backwards compatibility: we previously did not always hash the + // signature before inserting. In case there are still very old (but + // valid) access tokens in the database, this should get them. + err = p.QueryWithNetwork(ctx).Where("signature = ?", signature).First(&r) + if errors.Is(err, sql.ErrNoRows) { + return nil, errors.WithStack(fosite.ErrNotFound) + } + } + if err != nil { + return nil, sqlcon.HandleError(err) + } + if !r.Active { + fr, err := r.toRequest(ctx, session, p) + if err != nil { + return nil, err + } + return fr, errors.WithStack(fosite.ErrInactiveToken) + } + + return r.toRequest(ctx, session, p) } +// DeleteAccessTokenSession implements AccessTokenStorage func (p *Persister) DeleteAccessTokenSession(ctx context.Context, signature string) (err error) { - return p.deleteSessionBySignature(ctx, signature, sqlTableAccess) + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.DeleteAccessTokenSession", + trace.WithAttributes(events.AccessTokenSignature(signature)), + ) + defer otelx.End(span, &err) + + err = sqlcon.HandleError( + p.QueryWithNetwork(ctx). + Where("signature = ?", x.SignatureHash(signature)). + Delete(OAuth2RequestSQL{Table: sqlTableAccess}.TableName())) + if errors.Is(err, sqlcon.ErrNoRows) { + // Backwards compatibility: we previously did not always hash the + // signature before inserting. In case there are still very old (but + // valid) access tokens in the database, this should get them. + err = sqlcon.HandleError( + p.QueryWithNetwork(ctx). + Where("signature = ?", signature). + Delete(OAuth2RequestSQL{Table: sqlTableAccess}.TableName())) + if errors.Is(err, sqlcon.ErrNoRows) { + return errors.WithStack(fosite.ErrNotFound) + } + } + if errors.Is(err, sqlcon.ErrConcurrentUpdate) { + return fosite.ErrSerializationFailure.WithWrap(err) + } + return err } -func (p *Persister) CreateRefreshTokenSession(ctx context.Context, signature string, requester fosite.Requester) (err error) { - return p.createSession(ctx, signature, requester, sqlTableRefresh) +// CreateRefreshTokenSession implements RefreshTokenStorage +func (p *Persister) CreateRefreshTokenSession(ctx context.Context, signature string, accessTokenSignature string, requester fosite.Requester) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.CreateRefreshTokenSession", + trace.WithAttributes(events.RefreshTokenSignature(signature)), + ) + defer otelx.End(span, &err) + events.Trace(ctx, events.RefreshTokenIssued, toEventOptions(requester)...) + + req, err := p.sqlSchemaFromRequest(ctx, signature, requester, sqlTableRefresh, requester.GetSession().GetExpiresAt(fosite.RefreshToken).UTC()) + if err != nil { + return err + } + + var sig sql.NullString + if len(accessTokenSignature) > 0 { + sig = sql.NullString{ + Valid: true, + String: x.SignatureHash(accessTokenSignature), + } + } + + if err = sqlcon.HandleError(p.CreateWithNetwork(ctx, &OAuth2RefreshTable{ + OAuth2RequestSQL: *req, + AccessTokenSignature: sig, + })); errors.Is(err, sqlcon.ErrConcurrentUpdate) { + return fosite.ErrSerializationFailure.WithWrap(err) + } else if err != nil { + return err + } + + return nil } +// GetRefreshTokenSession implements RefreshTokenStorage func (p *Persister) GetRefreshTokenSession(ctx context.Context, signature string, session fosite.Session) (request fosite.Requester, err error) { - return p.findSessionBySignature(ctx, signature, session, sqlTableRefresh) + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetRefreshTokenSession", + trace.WithAttributes(events.RefreshTokenSignature(signature)), + ) + defer otelx.End(span, &err) + + var row OAuth2RefreshTable + if err := p.QueryWithNetwork(ctx).Where("signature = ?", signature).First(&row); errors.Is(err, sql.ErrNoRows) { + return nil, errors.WithStack(fosite.ErrNotFound) + } else if err != nil { + return nil, sqlcon.HandleError(err) + } + + if row.Active { + // Token is active + return row.toRequest(ctx, session, p) + } + + if graceful := p.r.Config().GracefulRefreshTokenRotation(ctx); graceful.Period > 0 && + row.FirstUsedAt.Valid && + row.FirstUsedAt.Time.Add(graceful.Period).After(time.Now()) && + (graceful.Count == 0 || // no limit + (row.UsedTimes.Int32 < graceful.Count)) { + // We return the request as is, which indicates that the token is active (because we are in the grace period still). + return row.toRequest(ctx, session, p) + } + + fositeRequest, err := row.toRequest(ctx, session, p) + if err != nil { + return nil, err + } + + return fositeRequest, errors.WithStack(fosite.ErrInactiveToken) } +// DeleteRefreshTokenSession implements RefreshTokenStorage func (p *Persister) DeleteRefreshTokenSession(ctx context.Context, signature string) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.DeleteRefreshTokenSession", + trace.WithAttributes(events.RefreshTokenSignature(signature)), + ) + defer otelx.End(span, &err) return p.deleteSessionBySignature(ctx, signature, sqlTableRefresh) } -func (p *Persister) CreateOpenIDConnectSession(ctx context.Context, signature string, requester fosite.Requester) error { - return p.createSession(ctx, signature, requester, sqlTableOpenID) +// RotateRefreshToken implements RefreshTokenStorage +func (p *Persister) RotateRefreshToken(ctx context.Context, requestID, refreshTokenSignature string) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.RotateRefreshToken") + defer otelx.End(span, &err) + + // If we end up here, we have a valid refresh token and can proceed with the rotation. + if p.r.Config().GracefulRefreshTokenRotation(ctx).Period > 0 { + return handleRetryError(p.gracefulRefreshRotation(ctx, requestID, refreshTokenSignature)) + } + + return handleRetryError(p.strictRefreshRotation(ctx, requestID)) } -func (p *Persister) GetOpenIDConnectSession(ctx context.Context, signature string, requester fosite.Requester) (fosite.Requester, error) { +// CreateOpenIDConnectSession implements OpenIDConnectRequestStorage +func (p *Persister) CreateOpenIDConnectSession(ctx context.Context, signature string, requester fosite.Requester) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.CreateOpenIDConnectSession") + defer otelx.End(span, &err) + events.Trace(ctx, events.IdentityTokenIssued, toEventOptions(requester)...) + // The expiry of an OIDC session is equal to the expiry of the authorization code. If the code is invalid, so is this OIDC request. + return p.createSession(ctx, signature, requester, sqlTableOpenID, requester.GetSession().GetExpiresAt(fosite.AuthorizeCode).UTC()) +} + +// GetOpenIDConnectSession implements OpenIDConnectRequestStorage +func (p *Persister) GetOpenIDConnectSession(ctx context.Context, signature string, requester fosite.Requester) (_ fosite.Requester, err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetOpenIDConnectSession") + defer otelx.End(span, &err) return p.findSessionBySignature(ctx, signature, requester.GetSession(), sqlTableOpenID) } -func (p *Persister) DeleteOpenIDConnectSession(ctx context.Context, signature string) error { +// DeleteOpenIDConnectSession implements OpenIDConnectRequestStorage +func (p *Persister) DeleteOpenIDConnectSession(ctx context.Context, signature string) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.DeleteOpenIDConnectSession") + defer otelx.End(span, &err) return p.deleteSessionBySignature(ctx, signature, sqlTableOpenID) } -func (p *Persister) GetPKCERequestSession(ctx context.Context, signature string, session fosite.Session) (fosite.Requester, error) { +// GetPKCERequestSession implements PKCERequestStorage +func (p *Persister) GetPKCERequestSession(ctx context.Context, signature string, session fosite.Session) (_ fosite.Requester, err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.GetPKCERequestSession") + defer otelx.End(span, &err) return p.findSessionBySignature(ctx, signature, session, sqlTablePKCE) } -func (p *Persister) CreatePKCERequestSession(ctx context.Context, signature string, requester fosite.Requester) error { - return p.createSession(ctx, signature, requester, sqlTablePKCE) +// CreatePKCERequestSession implements PKCERequestStorage +func (p *Persister) CreatePKCERequestSession(ctx context.Context, signature string, requester fosite.Requester) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.CreatePKCERequestSession") + defer otelx.End(span, &err) + // The expiry of a PKCE session is equal to the expiry of the authorization code. If the code is invalid, so is this PKCE request. + return p.createSession(ctx, signature, requester, sqlTablePKCE, requester.GetSession().GetExpiresAt(fosite.AuthorizeCode).UTC()) } -func (p *Persister) DeletePKCERequestSession(ctx context.Context, signature string) error { +// DeletePKCERequestSession implements PKCERequestStorage +func (p *Persister) DeletePKCERequestSession(ctx context.Context, signature string) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.DeletePKCERequestSession") + defer otelx.End(span, &err) return p.deleteSessionBySignature(ctx, signature, sqlTablePKCE) } -func (p *Persister) RevokeRefreshToken(ctx context.Context, id string) error { - return p.deactivateSessionByRequestID(ctx, id, sqlTableRefresh) -} - -func (p *Persister) RevokeRefreshTokenMaybeGracePeriod(ctx context.Context, id string, _ string) error { - return p.deactivateSessionByRequestID(ctx, id, sqlTableRefresh) +// RevokeRefreshToken implements TokenRevocationStorage +func (p *Persister) RevokeRefreshToken(ctx context.Context, id string) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.RevokeRefreshToken", + trace.WithAttributes(events.ConsentRequestID(id)), + ) + defer otelx.End(span, &err) + return p.deleteSessionByRequestID(ctx, id, sqlTableRefresh) } -func (p *Persister) RevokeAccessToken(ctx context.Context, id string) error { +// RevokeAccessToken implements TokenRevocationStorage +func (p *Persister) RevokeAccessToken(ctx context.Context, id string) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.RevokeAccessToken", + trace.WithAttributes(events.ConsentRequestID(id)), + ) + defer otelx.End(span, &err) return p.deleteSessionByRequestID(ctx, id, sqlTableAccess) } -func (p *Persister) flushInactiveTokens(ctx context.Context, notAfter time.Time, limit int, batchSize int, table tableName, lifespan time.Duration) error { - /* #nosec G201 table is static */ - // The value of notAfter should be the minimum between input parameter and token max expire based on its configured age - requestMaxExpire := time.Now().Add(-lifespan) - if requestMaxExpire.Before(notAfter) { - notAfter = requestMaxExpire - } - - var err error - - totalDeletedCount := 0 - for deletedRecords := batchSize; totalDeletedCount < limit && deletedRecords == batchSize; { - d := batchSize - if limit-totalDeletedCount < batchSize { - d = limit - totalDeletedCount - } - // Delete in batches - // The outer SELECT is necessary because our version of MySQL doesn't yet support 'LIMIT & IN/ALL/ANY/SOME subquery - deletedRecords, err = p.Connection(ctx).RawQuery( - fmt.Sprintf(`DELETE FROM %s WHERE signature in ( - SELECT signature FROM (SELECT signature FROM %s hoa WHERE requested_at < ? and nid = ? ORDER BY signature LIMIT %d ) as s - )`, OAuth2RequestSQL{Table: table}.TableName(), OAuth2RequestSQL{Table: table}.TableName(), d), - notAfter, - p.NetworkID(ctx), - ).ExecWithCount() - totalDeletedCount += deletedRecords - - if err != nil { - break - } - p.l.Debugf("Flushing tokens...: %d/%d", totalDeletedCount, limit) - } - p.l.Debugf("Flush Refresh Tokens flushed_records: %d", totalDeletedCount) - return sqlcon.HandleError(err) +// FlushInactiveAccessTokens implements FositeStorer +func (p *Persister) FlushInactiveAccessTokens(ctx context.Context, notAfter time.Time, limit int, batchSize int) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.FlushInactiveAccessTokens") + defer otelx.End(span, &err) + return p.flushInactiveTokens(ctx, notAfter, limit, batchSize, sqlTableAccess, p.r.Config().GetAccessTokenLifespan(ctx)) } -func (p *Persister) FlushInactiveAccessTokens(ctx context.Context, notAfter time.Time, limit int, batchSize int) error { - return p.flushInactiveTokens(ctx, notAfter, limit, batchSize, sqlTableAccess, p.config.GetAccessTokenLifespan(ctx)) +// FlushInactiveRefreshTokens implements FositeStorer +func (p *Persister) FlushInactiveRefreshTokens(ctx context.Context, notAfter time.Time, limit int, batchSize int) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.FlushInactiveRefreshTokens") + defer otelx.End(span, &err) + return p.flushInactiveTokens(ctx, notAfter, limit, batchSize, sqlTableRefresh, p.r.Config().GetRefreshTokenLifespan(ctx)) } -func (p *Persister) FlushInactiveRefreshTokens(ctx context.Context, notAfter time.Time, limit int, batchSize int) error { - return p.flushInactiveTokens(ctx, notAfter, limit, batchSize, sqlTableRefresh, p.config.GetRefreshTokenLifespan(ctx)) -} - -func (p *Persister) DeleteAccessTokens(ctx context.Context, clientID string) error { +// DeleteAccessTokens implements FositeStorer +func (p *Persister) DeleteAccessTokens(ctx context.Context, clientID string) (err error) { + ctx, span := p.r.Tracer(ctx).Tracer().Start(ctx, "persistence.sql.DeleteAccessTokens", + trace.WithAttributes(events.ClientID(clientID)), + ) + defer otelx.End(span, &err) /* #nosec G201 table is static */ return sqlcon.HandleError( p.QueryWithNetwork(ctx).Where("client_id=?", clientID).Delete(&OAuth2RequestSQL{Table: sqlTableAccess}), diff --git a/persistence/sql/persister_test.go b/persistence/sql/persister_test.go index f870e4a81dc..71789716427 100644 --- a/persistence/sql/persister_test.go +++ b/persistence/sql/persister_test.go @@ -8,26 +8,23 @@ import ( "testing" "time" - "gopkg.in/square/go-jose.v2" - - "github.com/gobuffalo/pop/v6" + "github.com/go-jose/go-jose/v3" "github.com/gofrs/uuid" - "github.com/instana/testify/assert" - "github.com/instana/testify/require" "github.com/pkg/errors" - - "github.com/ory/hydra/client" - "github.com/ory/hydra/consent" - "github.com/ory/hydra/internal/testhelpers" - "github.com/ory/hydra/oauth2/trust" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/consent/test" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/jwk" + "github.com/ory/hydra/v2/oauth2/trust" + "github.com/ory/pop/v6" "github.com/ory/x/contextx" "github.com/ory/x/dbal" "github.com/ory/x/networkx" - - "github.com/ory/hydra/jwk" - - "github.com/ory/hydra/driver" - "github.com/ory/hydra/internal" + "github.com/ory/x/servicelocatorx" ) func init() { @@ -36,31 +33,62 @@ func init() { }) } -func testRegistry(t *testing.T, ctx context.Context, k string, t1 driver.Registry, t2 driver.Registry) { - t.Run("package=client/manager="+k, func(t *testing.T) { - t.Run("case=create-get-update-delete", client.TestHelperCreateGetUpdateDeleteClient(k, t1.Persister().Connection(context.Background()), t1.ClientManager(), t2.ClientManager())) +func testRegistry(t *testing.T, db string, t1, t2 *driver.RegistrySQL) { + // TODO enable parallel tests for mysql once we support automatic transaction retries + var parallel bool + switch db { + case "mysql", "sqlite": + parallel = false + default: + parallel = true + } + + t.Run("client", func(t *testing.T) { + if parallel { + // currently not possible as we have a lot of side-effects on listing of the clients between this and other tests + // t.Parallel() + } + + t.Run("case=create-get-update-delete", client.TestHelperCreateGetUpdateDeleteClient(t1.ClientManager(), t2.ClientManager())) - t.Run("case=autogenerate-key", client.TestHelperClientAutoGenerateKey(k, t1.ClientManager())) + t.Run("case=autogenerate-key", client.TestHelperClientAutoGenerateKey(t1.ClientManager())) - t.Run("case=auth-client", client.TestHelperClientAuthenticate(k, t1.ClientManager())) + t.Run("case=auth-client", client.TestHelperClientAuthenticate(t1.ClientManager())) - t.Run("case=update-two-clients", client.TestHelperUpdateTwoClients(k, t1.ClientManager())) + t.Run("case=update-two-clients", client.TestHelperUpdateTwoClients(t1.ClientManager())) }) - parallel := true - if k == "memory" || k == "mysql" || k == "cockroach" { // TODO enable parallel tests for cockroach once we configure the cockroach integration test server to support retry - parallel = false - } + for _, reg := range []*driver.RegistrySQL{t1, t2} { + t.Run("consent", func(t *testing.T) { + if parallel { + t.Parallel() + } + test.ConsentManagerTests(t, reg, reg.ConsentManager(), reg.LoginManager(), reg.ClientManager(), reg.OAuth2Storage()) + }) + + t.Run("login", func(t *testing.T) { + if parallel { + t.Parallel() + } + test.LoginManagerTest(t, reg, reg.LoginManager()) + }) - t.Run("package=consent/manager="+k, consent.ManagerTests(t1.ConsentManager(), t1.ClientManager(), t1.OAuth2Storage(), "t1", parallel)) - t.Run("package=consent/manager="+k, consent.ManagerTests(t2.ConsentManager(), t2.ClientManager(), t2.OAuth2Storage(), "t2", parallel)) + t.Run("obfuscated subject", func(t *testing.T) { + if parallel { + t.Parallel() + } + test.ObfuscatedSubjectManagerTest(t, reg, reg.ObfuscatedSubjectManager(), reg.ClientManager()) + }) - t.Run("parallel-boundary", func(t *testing.T) { - t.Run("package=consent/janitor="+k, testhelpers.JanitorTests(t1.Config(), t1.ConsentManager(), t1.ClientManager(), t1.OAuth2Storage(), "t1", parallel)) - t.Run("package=consent/janitor="+k, testhelpers.JanitorTests(t2.Config(), t2.ConsentManager(), t2.ClientManager(), t2.OAuth2Storage(), "t2", parallel)) - }) + t.Run("logout", func(t *testing.T) { + if parallel { + t.Parallel() + } + test.LogoutManagerTest(t, reg.LogoutManager(), reg.ClientManager()) + }) + } - t.Run("package=jwk/manager="+k, func(t *testing.T) { + t.Run("jwk", func(t *testing.T) { for _, tc := range []struct { alg string skip bool @@ -83,7 +111,7 @@ func testRegistry(t *testing.T, ctx context.Context, k string, t1 driver.Registr } else { kid, err := uuid.NewV4() require.NoError(t, err) - ks, err := jwk.GenerateJWK(context.Background(), jose.SignatureAlgorithm(tc.alg), kid.String(), "sig") + ks, err := jwk.GenerateJWK(jose.SignatureAlgorithm(tc.alg), kid.String(), "sig") require.NoError(t, err) t.Run("TestManagerKey", jwk.TestHelperManagerKey(t1.KeyManager(), tc.alg, ks, kid.String())) t.Run("Parallel", func(t *testing.T) { @@ -105,28 +133,24 @@ func testRegistry(t *testing.T, ctx context.Context, k string, t1 driver.Registr }) }) - t.Run("package=grant/trust/manager="+k, func(t *testing.T) { - t.Run("parallel-boundary", func(t *testing.T) { + t.Run("trust", func(t *testing.T) { + t.Run("parallel boundary", func(t *testing.T) { t.Run("case=create-get-delete/network=t1", trust.TestHelperGrantManagerCreateGetDeleteGrant(t1.GrantManager(), t1.KeyManager(), parallel)) t.Run("case=create-get-delete/network=t2", trust.TestHelperGrantManagerCreateGetDeleteGrant(t2.GrantManager(), t2.KeyManager(), parallel)) }) - t.Run("parallel-boundary", func(t *testing.T) { - t.Run("case=errors", trust.TestHelperGrantManagerErrors(t1.GrantManager(), t1.KeyManager(), parallel)) - t.Run("case=errors", trust.TestHelperGrantManagerErrors(t2.GrantManager(), t2.KeyManager(), parallel)) + t.Run("parallel boundary", func(t *testing.T) { + t.Run("case=errors", trust.TestHelperGrantManagerErrors(t1.GrantManager(), t1.KeyManager())) + t.Run("case=errors", trust.TestHelperGrantManagerErrors(t2.GrantManager(), t2.KeyManager())) }) }) } func TestManagersNextGen(t *testing.T) { - regs := map[string]driver.Registry{ - "memory": internal.NewRegistrySQLFromURL(t, dbal.NewSQLiteTestDatabase(t), true, &contextx.Default{}), - } + t.Parallel() - if !testing.Short() { - regs["postgres"], regs["mysql"], regs["cockroach"], _ = internal.ConnectDatabases(t, true, &contextx.Default{}) - } + regs := testhelpers.ConnectDatabases(t, true, driver.WithServiceLocatorOptions(servicelocatorx.WithContextualizer(&contextx.TestContextualizer{}))) - ctx := context.Background() + ctx := t.Context() networks := make([]uuid.UUID, 5) for k := range networks { nid := uuid.Must(uuid.NewV4()) @@ -137,11 +161,6 @@ func TestManagersNextGen(t *testing.T) { } for k := range regs { - regs[k].WithContextualizer(new(contextx.TestContextualizer)) - } - - for k := range regs { - k := k t.Run("database="+k, func(t *testing.T) { t.Parallel() client.TestHelperCreateGetUpdateDeleteClientNext(t, regs[k].Persister(), networks) @@ -150,43 +169,47 @@ func TestManagersNextGen(t *testing.T) { } func TestManagers(t *testing.T) { - ctx := context.TODO() - t1registries := map[string]driver.Registry{ - "memory": internal.NewRegistrySQLFromURL(t, dbal.NewSQLiteTestDatabase(t), true, &contextx.Default{}), - } + t.Parallel() - t2registries := map[string]driver.Registry{ - "memory": internal.NewRegistrySQLFromURL(t, dbal.NewSQLiteTestDatabase(t), false, &contextx.Default{}), + dsns := map[string]string{ + "sqlite": dbal.NewSQLiteTestDatabase(t), } - if !testing.Short() { - t2registries["postgres"], t2registries["mysql"], t2registries["cockroach"], _ = internal.ConnectDatabases(t, false, &contextx.Default{}) - t1registries["postgres"], t1registries["mysql"], t1registries["cockroach"], _ = internal.ConnectDatabases(t, true, &contextx.Default{}) + dsns["postgres"], dsns["mysql"], dsns["cockroach"] = testhelpers.ConnectDatabasesURLs(t) } + network1NID, network2NID, invalidNID := uuid.Must(uuid.NewV4()), uuid.Must(uuid.NewV4()), uuid.Must(uuid.NewV4()) - network1NID, _ := uuid.NewV4() - network2NID, _ := uuid.NewV4() + for db, dsn := range dsns { + t.Run(db, func(t *testing.T) { + t.Parallel() + t.Logf("Testing database %s: %q", db, dsn) - for k, t1 := range t1registries { - t2 := t2registries[k] - require.NoError(t, t1.Persister().Connection(ctx).Create(&networkx.Network{ID: network1NID})) - require.NoError(t, t2.Persister().Connection(ctx).Create(&networkx.Network{ID: network2NID})) - t1.WithContextualizer(&contextx.Static{NID: network1NID, C: t1.Config().Source(context.Background())}) - t2.WithContextualizer(&contextx.Static{NID: network2NID, C: t2.Config().Source(context.Background())}) - t.Run("parallel-boundary", func(t *testing.T) { testRegistry(t, ctx, k, t1, t2) }) - } + r1 := testhelpers.NewRegistrySQLFromURL(t, dsn, true, true, driver.DisableValidation(), driver.WithServiceLocatorOptions(servicelocatorx.WithContextualizer(&contextx.Static{NID: network1NID}))) + r2 := testhelpers.NewRegistrySQLFromURL(t, dsn, false, true, driver.DisableValidation(), driver.WithServiceLocatorOptions(servicelocatorx.WithContextualizer(&contextx.Static{NID: network2NID}))) + rInv := testhelpers.NewRegistrySQLFromURL(t, dsn, false, true, driver.DisableValidation(), driver.SkipNetworkInit(), driver.WithServiceLocatorOptions(servicelocatorx.WithContextualizer(&contextx.Static{NID: invalidNID}))) + + require.NoError(t, r1.Persister().Connection(t.Context()).Create(&networkx.Network{ID: network1NID})) + require.NoError(t, r1.Persister().Connection(t.Context()).Create(&networkx.Network{ID: network2NID})) + + require.Equal(t, network1NID, r1.Persister().NetworkID(t.Context())) + require.Equal(t, network2NID, r2.Persister().NetworkID(t.Context())) + require.Equal(t, invalidNID, rInv.Persister().NetworkID(t.Context())) - for k, t1 := range t1registries { - t2 := t2registries[k] - t2.WithContextualizer(&contextx.Static{NID: uuid.Nil, C: t2.Config().Source(context.Background())}) + t.Run("parallel boundary", func(t *testing.T) { testRegistry(t, db, r1, r2) }) - if !t1.Config().HSMEnabled() { // We don't support NID isolation with HSM at the moment - t.Run("package=jwk/manager="+k+"/case=nid", - jwk.TestHelperNID(t1.KeyManager(), t2.KeyManager()), + if db == "sqlite" { + // The following tests rely on foreign key constraints, which some of them are not correctly created in the SQLite schema. + return + } + + // if !r1.Config().HSMEnabled() { + t.Run("jwk nid", + jwk.TestHelperNID(r1.KeyManager(), rInv.KeyManager()), ) - } - t.Run("package=consent/manager="+k+"/case=nid", - consent.TestHelperNID(t1.ClientManager(), t1.ConsentManager(), t2.ConsentManager()), - ) + // } + t.Run("login nid", + test.LoginNIDTest(r1.Persister(), rInv.Persister()), + ) + }) } } diff --git a/persistence/sql/src/20150101000001_networks/20150101000001000000_networks.postgres.up.sql b/persistence/sql/src/20150101000001_networks/20150101000001000000_networks.postgres.up.sql index b8cf1372617..d3f33a36598 100644 --- a/persistence/sql/src/20150101000001_networks/20150101000001000000_networks.postgres.up.sql +++ b/persistence/sql/src/20150101000001_networks/20150101000001000000_networks.postgres.up.sql @@ -4,15 +4,5 @@ CREATE TABLE "networks" ( "created_at" timestamp NOT NULL, "updated_at" timestamp NOT NULL ); - -INSERT INTO networks (id, created_at, updated_at) VALUES (uuid_in( - overlay( - overlay( - md5(random()::text || ':' || clock_timestamp()::text) - placing '4' - from 13 - ) - placing to_hex(floor(random()*(11-8+1) + 8)::int)::text - from 17 - )::cstring -), '2013-10-07 08:23:19', '2013-10-07 08:23:19'); +CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; +INSERT INTO networks (id, created_at, updated_at) VALUES uuid_generate_v4(), '2013-10-07 08:23:19', '2013-10-07 08:23:19'); diff --git a/persistence/sql/src/20220210000001_nid/20220210000001000000_nid.cockroach.up.sql b/persistence/sql/src/20220210000001_nid/20220210000001000000_nid.cockroach.up.sql index 65a12bef989..70afb860736 100644 --- a/persistence/sql/src/20220210000001_nid/20220210000001000000_nid.cockroach.up.sql +++ b/persistence/sql/src/20220210000001_nid/20220210000001000000_nid.cockroach.up.sql @@ -103,8 +103,9 @@ DROP INDEX hydra_oauth2_jti_blacklist_expires_at_idx; --split CREATE INDEX hydra_oauth2_jti_blacklist_expires_at_idx ON hydra_oauth2_jti_blacklist (expires_at ASC, nid ASC); --split -ALTER TABLE hydra_oauth2_jti_blacklist DROP CONSTRAINT "primary"; -ALTER TABLE hydra_oauth2_jti_blacklist ADD CONSTRAINT hydra_oauth2_jti_blacklist_pkey PRIMARY KEY (signature ASC, nid ASC); +ALTER TABLE hydra_oauth2_jti_blacklist + DROP CONSTRAINT "primary", + ADD CONSTRAINT hydra_oauth2_jti_blacklist_pkey PRIMARY KEY (signature ASC, nid ASC); --split -- hydra_oauth2_logout_request @@ -132,8 +133,9 @@ ALTER TABLE hydra_oauth2_obfuscated_authentication_session ALTER nid SET NOT NUL --split ALTER TABLE hydra_oauth2_obfuscated_authentication_session ADD CONSTRAINT hydra_oauth2_obfuscated_authentication_session_client_id_fk FOREIGN KEY (client_id, nid) REFERENCES hydra_client(id, nid) ON DELETE CASCADE; --split -ALTER TABLE hydra_oauth2_obfuscated_authentication_session DROP CONSTRAINT "primary"; -ALTER TABLE hydra_oauth2_obfuscated_authentication_session ADD CONSTRAINT "hydra_oauth2_obfuscated_authentication_session_pkey" PRIMARY KEY (subject ASC, client_id ASC, nid ASC); +ALTER TABLE hydra_oauth2_obfuscated_authentication_session + DROP CONSTRAINT "primary", + ADD CONSTRAINT "hydra_oauth2_obfuscated_authentication_session_pkey" PRIMARY KEY (subject ASC, client_id ASC, nid ASC); --split DROP INDEX hydra_oauth2_obfuscated_authentication_session_client_id_subject_obfuscated_idx; --split diff --git a/quickstart-cockroach.yml b/quickstart-cockroach.yml index 339f317866b..81cc3b4dde2 100644 --- a/quickstart-cockroach.yml +++ b/quickstart-cockroach.yml @@ -9,7 +9,6 @@ # endpoint can only be used if you follow the steps in the tutorial. # # # ########################################################################### -version: "3.7" services: hydra-migrate: environment: @@ -18,7 +17,7 @@ services: environment: - DSN=cockroach://root@cockroachd:26257/defaultdb?sslmode=disable&max_conns=20&max_idle_conns=4 cockroachd: - image: cockroachdb/cockroach:v22.1.10 + image: cockroachdb/cockroach:latest-v25.4 ports: - "26257:26257" command: start-single-node --insecure diff --git a/quickstart-cors.yml b/quickstart-cors.yml index 0bde8012573..8bc137897ff 100644 --- a/quickstart-cors.yml +++ b/quickstart-cors.yml @@ -9,9 +9,6 @@ # endpoint can only be used if you follow the steps in the tutorial. # # # ########################################################################### - -version: "3.7" - services: hydra: environment: diff --git a/quickstart-debug.yml b/quickstart-debug.yml index a64b28efc2e..40780f7260d 100644 --- a/quickstart-debug.yml +++ b/quickstart-debug.yml @@ -9,9 +9,6 @@ # endpoint can only be used if you follow the steps in the tutorial. # # # ########################################################################### - -version: "3.7" - services: hydra: environment: diff --git a/quickstart-hsm.yml b/quickstart-hsm.yml index 709ab55e81b..c5822f9ae64 100644 --- a/quickstart-hsm.yml +++ b/quickstart-hsm.yml @@ -9,9 +9,6 @@ # endpoint can only be used if you follow the steps in the tutorial. # # # ########################################################################### - -version: "3.7" - services: hydra: build: diff --git a/quickstart-jwt.yml b/quickstart-jwt.yml index 0262dd29c0c..8227be78998 100644 --- a/quickstart-jwt.yml +++ b/quickstart-jwt.yml @@ -9,9 +9,6 @@ # endpoint can only be used if you follow the steps in the tutorial. # # # ########################################################################### - -version: "3.7" - services: hydra: environment: diff --git a/quickstart-mysql.yml b/quickstart-mysql.yml index b693d1812b2..28a97b92b1b 100644 --- a/quickstart-mysql.yml +++ b/quickstart-mysql.yml @@ -9,7 +9,6 @@ # endpoint can only be used if you follow the steps in the tutorial. # # # ########################################################################### -version: "3.7" services: hydra-migrate: environment: @@ -18,7 +17,7 @@ services: environment: - DSN=mysql://root:secret@tcp(mysqld:3306)/mysql?max_conns=20&max_idle_conns=4 mysqld: - image: mysql:8.0.26 + image: mysql:8.0 ports: - "3306:3306" environment: diff --git a/quickstart-postgres.yml b/quickstart-postgres.yml index a5e845cb948..331ca81c57c 100644 --- a/quickstart-postgres.yml +++ b/quickstart-postgres.yml @@ -9,7 +9,6 @@ # endpoint can only be used if you follow the steps in the tutorial. # # # ########################################################################### -version: "3.7" services: hydra-migrate: environment: @@ -18,7 +17,7 @@ services: environment: - DSN=postgres://hydra:secret@postgresd:5432/hydra?sslmode=disable&max_conns=20&max_idle_conns=4 postgresd: - image: postgres:11.8 + image: postgres:16 ports: - "5432:5432" environment: diff --git a/quickstart-prometheus.yml b/quickstart-prometheus.yml index 89b2455b755..ba944436aad 100644 --- a/quickstart-prometheus.yml +++ b/quickstart-prometheus.yml @@ -9,9 +9,6 @@ # endpoint can only be used if you follow the steps in the tutorial. # # # ########################################################################### - -version: "3.7" - services: prometheus: image: prom/prometheus:v2.12.0 diff --git a/quickstart-tracing.yml b/quickstart-tracing.yml index 19bb8657c06..ae7758e4a23 100644 --- a/quickstart-tracing.yml +++ b/quickstart-tracing.yml @@ -9,9 +9,6 @@ # endpoint can only be used if you follow the steps in the tutorial. # # # ########################################################################### - -version: "3.7" - services: hydra: depends_on: @@ -22,18 +19,19 @@ services: # - TRACING_SERVICE_NAME="Ory Hydra" - TRACING_PROVIDER=jaeger # - TRACING_PROVIDER=zipkin - # - TRACING_PROVIDER=datadog + # - TRACING_PROVIDER=otel # datadog # - TRACING_PROVIDER=elastic-apm ### Jaeger ### - TRACING_PROVIDERS_JAEGER_SAMPLING_SERVER_URL=http://jaeger:5778/sampling - TRACING_PROVIDERS_JAEGER_LOCAL_AGENT_ADDRESS=jaeger:6831 - - TRACING_PROVIDERS_JAEGER_SAMPLING_TYPE=const - - TRACING_PROVIDERS_JAEGER_SAMPLING_VALUE=1 + - TRACING_PROVIDERS_JAEGER_SAMPLING_TRACE_ID_RATIO=1 ### Zipkin ### # - TRACING_PROVIDERS_ZIPKIN_SERVER_URL=http://zipkin:9411/api/v2/spans ### DataDog ### ### See env vars here: https://docs.datadoghq.com/tracing/setup/go/#configuration) ### - # - DD_SERVICE=Hydra + # - TRACING_PROVIDERS_OTLP_INSECURE=true + # - TRACING_PROVIDERS_OTLP_SAMPLING_SAMPLING_RATIO=1.0 + # - TRACING_PROVIDERS_OTLP_SERVER_URL=localhost:4318 ### Elastic APM ### ### See env vars here: https://www.elastic.co/guide/en/apm/agent/go/1.x/configuration.html) ### # - ELASTIC_APM_SERVER_URL="http://apm-server:8200" @@ -61,3 +59,4 @@ services: # - DD_API_KEY= # Replace it with your DataDog API key # - DD_APM_ENABLED=true # - DD_APM_NON_LOCAL_TRAFFIC=true +# - DD_OTLP_CONFIG_RECEIVER_PROTOCOLS_HTTP_ENDPOINT=0.0.0.0:4318 diff --git a/quickstart.yml b/quickstart.yml index ad860c40d50..88a90cbea3b 100644 --- a/quickstart.yml +++ b/quickstart.yml @@ -9,50 +9,59 @@ # endpoint can only be used if you follow the steps in the tutorial. # # # ########################################################################### -version: "3.7" services: + sqlite: + image: busybox + volumes: + - hydra-sqlite:/mnt/sqlite + command: "chmod -R 777 /mnt/sqlite" hydra: - image: oryd/hydra:v2.0.3 + image: oryd/hydra:v25.4.0 + build: + context: . + dockerfile: .docker/Dockerfile-local-build ports: - "4444:4444" # Public port - "4445:4445" # Admin port - "5555:5555" # Port for hydra token user command: serve -c /etc/config/hydra/hydra.yml all --dev volumes: - - type: volume - source: hydra-sqlite - target: /var/lib/sqlite - read_only: false + - hydra-sqlite:/mnt/sqlite:rw - type: bind source: ./contrib/quickstart/5-min target: /etc/config/hydra + pull_policy: missing environment: - - DSN=sqlite:///var/lib/sqlite/db.sqlite?_fk=true + - DSN=sqlite:///mnt/sqlite/db.sqlite?_fk=true&mode=rwc restart: unless-stopped depends_on: - hydra-migrate + - sqlite networks: - intranet hydra-migrate: - image: oryd/hydra:v2.0.3 + image: oryd/hydra:v25.4.0 + build: + context: . + dockerfile: .docker/Dockerfile-local-build environment: - - DSN=sqlite:///var/lib/sqlite/db.sqlite?_fk=true - command: migrate -c /etc/config/hydra/hydra.yml sql -e --yes + - DSN=sqlite:///mnt/sqlite/db.sqlite?_fk=true&mode=rwc + command: migrate -c /etc/config/hydra/hydra.yml sql up -e --yes + pull_policy: missing volumes: - - type: volume - source: hydra-sqlite - target: /var/lib/sqlite - read_only: false + - hydra-sqlite:/mnt/sqlite:rw - type: bind source: ./contrib/quickstart/5-min target: /etc/config/hydra restart: on-failure networks: - intranet + depends_on: + - sqlite consent: environment: - HYDRA_ADMIN_URL=http://hydra:4445 - image: oryd/hydra-login-consent-node:v2.0.3 + image: oryd/hydra-login-consent-node:v25.4.0 ports: - "3000:3000" restart: unless-stopped diff --git a/scripts/5min-tutorial.sh b/scripts/5min-tutorial.sh index f6ec624e3a2..d5b6c8bb242 100755 --- a/scripts/5min-tutorial.sh +++ b/scripts/5min-tutorial.sh @@ -4,7 +4,7 @@ DB=${DB:-postgres} TRACING=${TRACING:-false} PROMETHEUS=${PROMETHEUS:-false} -DC="docker-compose -f quickstart.yml" +DC="docker compose -f quickstart.yml" if [[ $DB == "mysql" ]]; then DC+=" -f quickstart-mysql.yml" fi @@ -20,4 +20,3 @@ fi DC+=" up --build" $DC - diff --git a/scripts/db-diff.sh b/scripts/db-diff.sh index eee37fec8d4..c1b22106829 100755 --- a/scripts/db-diff.sh +++ b/scripts/db-diff.sh @@ -81,7 +81,7 @@ function dump_pg { make test-resetdb >/dev/null 2>&1 sleep 4 - go run . migrate sql "$TEST_DATABASE_POSTGRESQL" --yes >&2 || true + go run . migrate sql up "$TEST_DATABASE_POSTGRESQL" --yes >&2 || true sleep 1 pg_dump -s "$TEST_DATABASE_POSTGRESQL" | sed '/^--/d' } @@ -94,9 +94,9 @@ function dump_cockroach { make test-resetdb >/dev/null 2>&1 sleep 4 - go run . migrate sql "$TEST_DATABASE_COCKROACHDB" --yes > /dev/null || true + go run . migrate sql up "$TEST_DATABASE_COCKROACHDB" --yes > /dev/null || true hydra::util::parse-connection-url "${TEST_DATABASE_COCKROACHDB}" - docker run --rm --net=host -it cockroachdb/cockroach:v20.2.6 dump --dump-all --dump-mode=schema --insecure --user="${DB_USER}" --host="${DB_HOST}" --port="${DB_PORT}" + docker run --rm --net=host -it cockroachdb/cockroach:latest-v25.4 dump --dump-all --dump-mode=schema --insecure --user="${DB_USER}" --host="${DB_HOST}" --port="${DB_PORT}" } function dump_sqlite { @@ -107,7 +107,7 @@ function dump_sqlite { hydra::util::ensure-sqlite rm "$SQLITE_PATH" > /dev/null 2>&1 || true - go run -tags sqlite,json1 . migrate sql "sqlite://$SQLITE_PATH?_fk=true" --yes > /dev/null 2>&1 || true + go run -tags sqlite,sqlite_omit_load_extension . migrate sql up "sqlite://$SQLITE_PATH?_fk=true" --yes > /dev/null 2>&1 || true echo '.dump' | sqlite3 "$SQLITE_PATH" } @@ -120,7 +120,7 @@ function dump_mysql { hydra::util::ensure-mysqldump make test-resetdb >/dev/null 2>&1 sleep 10 - go run . migrate sql "$TEST_DATABASE_MYSQL" --yes > /dev/null || true + go run . migrate sql up "$TEST_DATABASE_MYSQL" --yes > /dev/null || true hydra::util::parse-connection-url "${TEST_DATABASE_MYSQL}" mysqldump --user="$DB_USER" --password="$DB_PASSWORD" --host="$DB_HOST" --port="$DB_PORT" "$DB_DB" --no-data } diff --git a/scripts/render-schemas.sh b/scripts/render-schemas.sh new file mode 100755 index 00000000000..dc6079a7b02 --- /dev/null +++ b/scripts/render-schemas.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +set -euxo pipefail + +schema_version="${1:-$(git rev-parse --short HEAD)}" + +sed "s!ory://tracing-config!https://raw.githubusercontent.com/ory/hydra/$schema_version/oryx/otelx/config.schema.json!g;" spec/config.json > .schema/config.schema.json + +git commit --author="ory-bot <60093411+ory-bot@users.noreply.github.com>" -m "autogen: render config schema" .schema/config.schema.json || true diff --git a/spec/api.json b/spec/api.json index d439715e270..bc8aa128263 100644 --- a/spec/api.json +++ b/spec/api.json @@ -2,7 +2,7 @@ "components": { "responses": { "emptyResponse": { - "description": "Empty responses are sent when, for example, resources are deleted. The HTTP status code for empty responses is\ntypically 201." + "description": "Empty responses are sent when, for example, resources are deleted. The HTTP status code for empty responses is\ntypically 204." }, "errorOAuth2BadRequest": { "content": { @@ -49,6 +49,54 @@ } }, "schemas": { + "CreateVerifiableCredentialRequestBody": { + "properties": { + "format": { + "type": "string" + }, + "proof": { + "$ref": "#/components/schemas/VerifiableCredentialProof" + }, + "types": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "title": "CreateVerifiableCredentialRequestBody contains the request body to request a verifiable credential.", + "type": "object" + }, + "DefaultError": {}, + "DeviceUserAuthRequest": { + "properties": { + "challenge": { + "description": "ID is the identifier (\"device challenge\") of the device grant request. It is used to\nidentify the session.", + "type": "string" + }, + "client": { + "$ref": "#/components/schemas/oAuth2Client" + }, + "handled_at": { + "$ref": "#/components/schemas/nullTime" + }, + "request_url": { + "description": "RequestURL is the original Device Authorization URL requested.", + "type": "string" + }, + "requested_access_token_audience": { + "$ref": "#/components/schemas/StringSliceJSONFormat" + }, + "requested_scope": { + "$ref": "#/components/schemas/StringSliceJSONFormat" + } + }, + "required": [ + "challenge" + ], + "title": "Contains information on an ongoing device grant request.", + "type": "object" + }, "JSONRawMessage": { "title": "JSONRawMessage represents a json.RawMessage that works well with JSON, SQL, and Swagger." }, @@ -80,6 +128,28 @@ "nullable": true, "type": "string" }, + "RFC6749ErrorJson": { + "properties": { + "error": { + "type": "string" + }, + "error_debug": { + "type": "string" + }, + "error_description": { + "type": "string" + }, + "error_hint": { + "type": "string" + }, + "status_code": { + "format": "int64", + "type": "integer" + } + }, + "title": "RFC6749ErrorJson is a helper struct for JSON encoding/decoding of RFC6749Error.", + "type": "object" + }, "StringSliceJSONFormat": { "items": { "type": "string" @@ -95,17 +165,38 @@ "format": "uuid4", "type": "string" }, + "VerifiableCredentialProof": { + "properties": { + "jwt": { + "type": "string" + }, + "proof_type": { + "type": "string" + } + }, + "title": "VerifiableCredentialProof contains the proof of a verifiable credential.", + "type": "object" + }, + "acceptDeviceUserCodeRequest": { + "description": "Contains information on an device verification", + "properties": { + "user_code": { + "type": "string" + } + }, + "type": "object" + }, "acceptOAuth2ConsentRequest": { "properties": { + "context": { + "$ref": "#/components/schemas/JSONRawMessage" + }, "grant_access_token_audience": { "$ref": "#/components/schemas/StringSliceJSONFormat" }, "grant_scope": { "$ref": "#/components/schemas/StringSliceJSONFormat" }, - "handled_at": { - "$ref": "#/components/schemas/nullTime" - }, "remember": { "description": "Remember, if set to true, tells ORY Hydra to remember this consent authorization and reuse it if the same\nclient asks the same user for the same, or a subset of, scope.", "type": "boolean" @@ -137,7 +228,7 @@ "acceptOAuth2LoginRequest": { "properties": { "acr": { - "description": "ACR sets the Authentication AuthorizationContext Class Reference value for this authentication session. You can use it\nto express that, for example, a user authenticated using two factor authentication.", + "description": "ACR sets the Authentication AuthorizationContext Class Reference value for this authentication session. You can use it\nto express that, for example, a user authenticated using two-factor authentication.", "type": "string" }, "amr": { @@ -146,12 +237,20 @@ "context": { "$ref": "#/components/schemas/JSONRawMessage" }, + "extend_session_lifespan": { + "description": "Extend OAuth2 authentication session lifespan\n\nIf set to `true`, the OAuth2 authentication cookie lifespan is extended. This is for example useful if you want the user to be able to use `prompt=none` continuously.\n\nThis value can only be set to `true` if the user has an authentication, which is the case if the `skip` value is `true`.", + "type": "boolean" + }, "force_subject_identifier": { "description": "ForceSubjectIdentifier forces the \"pairwise\" user ID of the end-user that authenticated. The \"pairwise\" user ID refers to the\n(Pairwise Identifier Algorithm)[http://openid.net/specs/openid-connect-core-1_0.html#PairwiseAlg] of the OpenID\nConnect specification. It allows you to set an obfuscated subject (\"user\") identifier that is unique to the client.\n\nPlease note that this changes the user ID on endpoint /userinfo and sub claim of the ID Token. It does not change the\nsub claim in the OAuth 2.0 Introspection.\n\nPer default, ORY Hydra handles this value with its own algorithm. In case you want to set this yourself\nyou can use this field. Please note that setting this field has no effect if `pairwise` is not configured in\nORY Hydra or the OAuth 2.0 Client does not expect a pairwise identifier (set via `subject_type` key in the client's\nconfiguration).\n\nPlease also be aware that ORY Hydra is unable to properly compute this value during authentication. This implies\nthat you have to compute this value on every authentication process (probably depending on the client ID or some\nother unique value).\n\nIf you fail to compute the proper value, then authentication processes which have id_token_hint set might fail.", "type": "string" }, + "identity_provider_session_id": { + "description": "IdentityProviderSessionID is the session ID of the end-user that authenticated.\nIf specified, we will use this value to propagate the logout.", + "type": "string" + }, "remember": { - "description": "Remember, if set to true, tells ORY Hydra to remember this user by telling the user agent (browser) to store\na cookie with authentication data. If the same user performs another OAuth 2.0 Authorization Request, he/she\nwill not be asked to log in again.", + "description": "Remember, if set to true, tells Ory Hydra to remember this user by telling the user agent (browser) to store\na cookie with authentication data. If the same user performs another OAuth 2.0 Authorization Request, they\nwill not be asked to log in again.", "type": "boolean" }, "remember_for": { @@ -193,6 +292,77 @@ ], "type": "object" }, + "credentialSupportedDraft00": { + "description": "Includes information about the supported verifiable credentials.", + "properties": { + "cryptographic_binding_methods_supported": { + "description": "OpenID Connect Verifiable Credentials Cryptographic Binding Methods Supported\n\nContains a list of cryptographic binding methods supported for signing the proof.", + "items": { + "type": "string" + }, + "type": "array" + }, + "cryptographic_suites_supported": { + "description": "OpenID Connect Verifiable Credentials Cryptographic Suites Supported\n\nContains a list of cryptographic suites methods supported for signing the proof.", + "items": { + "type": "string" + }, + "type": "array" + }, + "format": { + "description": "OpenID Connect Verifiable Credentials Format\n\nContains the format that is supported by this authorization server.", + "type": "string" + }, + "types": { + "description": "OpenID Connect Verifiable Credentials Types\n\nContains the types of verifiable credentials supported.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "title": "Verifiable Credentials Metadata (Draft 00)", + "type": "object" + }, + "deviceAuthorization": { + "description": "# Ory's OAuth 2.0 Device Authorization API", + "properties": { + "device_code": { + "description": "The device verification code.", + "example": "ory_dc_smldfksmdfkl.mslkmlkmlk", + "type": "string" + }, + "expires_in": { + "description": "The lifetime in seconds of the \"device_code\" and \"user_code\".", + "example": 16830, + "format": "int64", + "type": "integer" + }, + "interval": { + "description": "The minimum amount of time in seconds that the client\nSHOULD wait between polling requests to the token endpoint. If no\nvalue is provided, clients MUST use 5 as the default.", + "example": 5, + "format": "int64", + "type": "integer" + }, + "user_code": { + "description": "The end-user verification code.", + "example": "AAAAAA", + "type": "string" + }, + "verification_uri": { + "description": "The end-user verification URI on the authorization\nserver. The URI should be short and easy to remember as end users\nwill be asked to manually type it into their user agent.", + "example": "https://auth.ory.sh/tv", + "type": "string" + }, + "verification_uri_complete": { + "description": "A verification URI that includes the \"user_code\" (or\nother information with the same function as the \"user_code\"),\nwhich is designed for non-textual transmission.", + "example": "https://auth.ory.sh/tv?user_code=AAAAAA", + "type": "string" + } + }, + "title": "OAuth2 Device Flow", + "type": "object" + }, "errorOAuth2": { "description": "Error", "properties": { @@ -278,6 +448,7 @@ "type": "object" } }, + "title": "The not ready status of the service.", "type": "object" }, "healthStatus": { @@ -287,6 +458,7 @@ "type": "string" } }, + "title": "The health status of the service.", "type": "object" }, "introspectedOAuth2Token": { @@ -304,7 +476,7 @@ "type": "array" }, "client_id": { - "description": "ID is aclient identifier for the OAuth 2.0 client that\nrequested this token.", + "description": "ID is a client identifier for the OAuth 2.0 client that\nrequested this token.", "type": "string" }, "exp": { @@ -496,6 +668,36 @@ }, "type": "object" }, + "keysetPaginationRequestParameters": { + "description": "For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).", + "properties": { + "page_size": { + "default": 250, + "description": "Items per Page\n\nThis is the number of items per page to return.\nFor details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).", + "format": "int64", + "maximum": 1000, + "minimum": 1, + "type": "integer" + }, + "page_token": { + "description": "Next Page Token\n\nThe next page token.\nFor details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).", + "type": "string" + } + }, + "title": "Pagination Request Parameters", + "type": "object" + }, + "keysetPaginationResponseHeaders": { + "description": "The `Link` HTTP header contains multiple links (`first`, `next`) formatted as:\n`\u003chttps://{project-slug}.projects.oryapis.com/admin/sessions?page_size=250\u0026page_token=\u003e; rel=\"first\"`\n\nFor details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).", + "properties": { + "link": { + "description": "The Link HTTP Header\n\nThe `Link` header contains a comma-delimited list of links to the following pages:\n\nfirst: The first page of results.\nnext: The next page of results.\n\nPages are omitted if they do not exist. For example, if there is no next page, the `next` link is omitted. Examples:\n\n\u003c/admin/sessions?page_size=250\u0026page_token={last_item_uuid}; rel=\"first\",/admin/sessions?page_size=250\u0026page_token=\u003e; rel=\"next\"", + "type": "string" + } + }, + "title": "Pagination Response Header", + "type": "object" + }, "nullDuration": { "nullable": true, "pattern": "^[0-9]+(ns|us|ms|s|m|h)$", @@ -513,6 +715,10 @@ "oAuth2Client": { "description": "OAuth 2.0 Clients are used to perform OAuth 2.0 and OpenID Connect flows. Usually, OAuth 2.0 clients are\ngenerated for applications which want to consume your OAuth 2.0 or OpenID Connect capabilities.", "properties": { + "access_token_strategy": { + "description": "OAuth 2.0 Access Token Strategy\n\nAccessTokenStrategy is the strategy used to generate access tokens.\nValid options are `jwt` and `opaque`. `jwt` is a bad idea, see https://www.ory.sh/docs/oauth2-oidc/jwt-access-token\nSetting the strategy here overrides the global setting in `strategies.access_token`.", + "type": "string" + }, "allowed_cors_origins": { "$ref": "#/components/schemas/StringSliceJSONFormat" }, @@ -540,7 +746,7 @@ "$ref": "#/components/schemas/NullDuration" }, "client_id": { - "description": "OAuth 2.0 Client ID\n\nThe ID is autogenerated and immutable.", + "description": "OAuth 2.0 Client ID\n\nThe ID is immutable. If no ID is provided, a UUID4 will be generated.", "type": "string" }, "client_name": { @@ -568,6 +774,15 @@ "format": "date-time", "type": "string" }, + "device_authorization_grant_access_token_lifespan": { + "$ref": "#/components/schemas/NullDuration" + }, + "device_authorization_grant_id_token_lifespan": { + "$ref": "#/components/schemas/NullDuration" + }, + "device_authorization_grant_refresh_token_lifespan": { + "$ref": "#/components/schemas/NullDuration" + }, "frontchannel_logout_session_required": { "description": "OpenID Connect Front-Channel Logout Session Required\n\nBoolean value specifying whether the RP requires that iss (issuer) and sid (session ID) query parameters be\nincluded to identify the RP session with the OP when the frontchannel_logout_uri is used.\nIf omitted, the default value is false.", "type": "boolean" @@ -586,7 +801,7 @@ "$ref": "#/components/schemas/NullDuration" }, "jwks": { - "description": "OAuth 2.0 Client JSON Web Key Set\n\nClient's JSON Web Key Set [JWK] document, passed by value. The semantics of the jwks parameter are the same as\nthe jwks_uri parameter, other than that the JWK Set is passed by value, rather than by reference. This parameter\nis intended only to be used by Clients that, for some reason, are unable to use the jwks_uri parameter, for\ninstance, by native applications that might not have a location to host the contents of the JWK Set. If a Client\ncan use jwks_uri, it MUST NOT use jwks. One significant downside of jwks is that it does not enable key rotation\n(which jwks_uri does, as described in Section 10 of OpenID Connect Core 1.0 [OpenID.Core]). The jwks_uri and jwks\nparameters MUST NOT be used together." + "$ref": "#/components/schemas/jsonWebKeySet" }, "jwks_uri": { "description": "OAuth 2.0 Client JSON Web Key Set URL\n\nURL for the Client's JSON Web Key Set [JWK] document. If the Client signs requests to the Server, it contains\nthe signing key(s) the Server uses to validate signatures from the Client. The JWK Set MAY also contain the\nClient's encryption keys(s), which are used by the Server to encrypt responses to the Client. When both signing\nand encryption keys are made available, a use (Key Use) parameter value is REQUIRED for all keys in the referenced\nJWK Set to indicate each key's intended usage. Although some algorithms allow the same key to be used for both\nsignatures and encryption, doing so is NOT RECOMMENDED, as it is less secure. The JWK x5c parameter MAY be used\nto provide X.509 representations of keys provided. When used, the bare key values MUST still be present and MUST\nmatch those in the certificate.", @@ -652,12 +867,21 @@ "description": "OpenID Connect Sector Identifier URI\n\nURL using the https scheme to be used in calculating Pseudonymous Identifiers by the OP. The URL references a\nfile with a single JSON array of redirect_uri values.", "type": "string" }, + "skip_consent": { + "description": "SkipConsent skips the consent screen for this client. This field can only\nbe set from the admin API.", + "type": "boolean" + }, + "skip_logout_consent": { + "description": "SkipLogoutConsent skips the logout consent screen for this client. This field can only\nbe set from the admin API.", + "type": "boolean" + }, "subject_type": { "description": "OpenID Connect Subject Type\n\nThe `subject_types_supported` Discovery parameter contains a\nlist of the supported subject_type values for this server. Valid types include `pairwise` and `public`.", "type": "string" }, "token_endpoint_auth_method": { - "description": "OAuth 2.0 Token Endpoint Authentication Method\n\nRequested Client Authentication method for the Token Endpoint. The options are:\n\n`client_secret_post`: (default) Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` in the HTTP body.\n`client_secret_basic`: Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` encoded in the HTTP Authorization header.\n`private_key_jwt`: Use JSON Web Tokens to authenticate the client.\n`none`: Used for public clients (native apps, mobile apps) which can not have secrets.", + "default": "client_secret_basic", + "description": "OAuth 2.0 Token Endpoint Authentication Method\n\nRequested Client Authentication method for the Token Endpoint. The options are:\n\n`client_secret_basic`: (default) Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` encoded in the HTTP Authorization header.\n`client_secret_post`: Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` in the HTTP body.\n`private_key_jwt`: Use JSON Web Tokens to authenticate the client.\n`none`: Used for public clients (native apps, mobile apps) which can not have secrets.", "type": "string" }, "token_endpoint_auth_signing_alg": { @@ -696,6 +920,15 @@ "client_credentials_grant_access_token_lifespan": { "$ref": "#/components/schemas/NullDuration" }, + "device_authorization_grant_access_token_lifespan": { + "$ref": "#/components/schemas/NullDuration" + }, + "device_authorization_grant_id_token_lifespan": { + "$ref": "#/components/schemas/NullDuration" + }, + "device_authorization_grant_refresh_token_lifespan": { + "$ref": "#/components/schemas/NullDuration" + }, "implicit_grant_access_token_lifespan": { "$ref": "#/components/schemas/NullDuration" }, @@ -728,12 +961,16 @@ "$ref": "#/components/schemas/StringSliceJSONFormat" }, "challenge": { - "description": "ID is the identifier (\"authorization challenge\") of the consent authorization request. It is used to\nidentify the session.", + "description": "Challenge is used to retrieve/accept/deny the consent request.", "type": "string" }, "client": { "$ref": "#/components/schemas/oAuth2Client" }, + "consent_request_id": { + "description": "ConsentRequestID is the ID of the consent request.", + "type": "string" + }, "context": { "$ref": "#/components/schemas/JSONRawMessage" }, @@ -812,30 +1049,12 @@ "consent_request": { "$ref": "#/components/schemas/oAuth2ConsentRequest" }, - "expires_at": { - "properties": { - "access_token": { - "format": "date-time", - "type": "string" - }, - "authorize_code": { - "format": "date-time", - "type": "string" - }, - "id_token": { - "format": "date-time", - "type": "string" - }, - "par_context": { - "format": "date-time", - "type": "string" - }, - "refresh_token": { - "format": "date-time", - "type": "string" - } - }, - "type": "object" + "consent_request_id": { + "description": "ConsentRequestID is the identifier of the consent request that initiated this consent session.", + "type": "string" + }, + "context": { + "$ref": "#/components/schemas/JSONRawMessage" }, "grant_access_token_audience": { "$ref": "#/components/schemas/StringSliceJSONFormat" @@ -872,7 +1091,7 @@ "oAuth2LoginRequest": { "properties": { "challenge": { - "description": "ID is the identifier (\"login challenge\") of the login request. It is used to\nidentify the session.", + "description": "ID is the identifier of the login request.", "type": "string" }, "client": { @@ -906,8 +1125,6 @@ }, "required": [ "challenge", - "requested_scope", - "requested_access_token_audience", "skip", "subject", "client", @@ -919,16 +1136,22 @@ "oAuth2LogoutRequest": { "properties": { "challenge": { - "description": "Challenge is the identifier (\"logout challenge\") of the logout authentication request. It is used to\nidentify the session.", + "description": "Challenge is the identifier of the logout authentication request.", "type": "string" }, "client": { "$ref": "#/components/schemas/oAuth2Client" }, + "expires_at": { + "$ref": "#/components/schemas/nullTime" + }, "request_url": { "description": "RequestURL is the original Logout URL requested.", "type": "string" }, + "requested_at": { + "$ref": "#/components/schemas/nullTime" + }, "rp_initiated": { "description": "RPInitiated is set to true if the request was initiated by a Relying Party (RP), also known as an OAuth 2.0 Client.", "type": "boolean" @@ -973,8 +1196,7 @@ }, "id_token": { "description": "To retrieve a refresh token request the id_token scope.", - "format": "int64", - "type": "integer" + "type": "string" }, "refresh_token": { "description": "The refresh token, which can be used to obtain new\naccess tokens. To retrieve it add the scope \"offline\" to your access token request.", @@ -1025,6 +1247,22 @@ }, "type": "array" }, + "credentials_endpoint_draft_00": { + "description": "OpenID Connect Verifiable Credentials Endpoint\n\nContains the URL of the Verifiable Credentials Endpoint.", + "type": "string" + }, + "credentials_supported_draft_00": { + "description": "OpenID Connect Verifiable Credentials Supported\n\nJSON array containing a list of the Verifiable Credentials supported by this authorization server.", + "items": { + "$ref": "#/components/schemas/credentialSupportedDraft00" + }, + "type": "array" + }, + "device_authorization_endpoint": { + "description": "OAuth 2.0 Device Authorization Endpoint URL", + "example": "https://playground.ory.sh/ory-hydra/public/oauth2/device/oauth", + "type": "string" + }, "end_session_endpoint": { "description": "OpenID Connect End-Session Endpoint\n\nURL at the OP to which an RP can perform a redirect to request that the End-User be logged out at the OP.", "type": "string" @@ -1158,6 +1396,7 @@ "required": [ "issuer", "authorization_endpoint", + "device_authorization_endpoint", "token_endpoint", "jwks_uri", "subject_types_supported", @@ -1252,38 +1491,6 @@ }, "type": "object" }, - "pagination": { - "properties": { - "page_size": { - "default": 250, - "description": "Items per page\n\nThis is the number of items per page to return.\nFor details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).", - "format": "int64", - "maximum": 1000, - "minimum": 1, - "type": "integer" - }, - "page_token": { - "default": "1", - "description": "Next Page Token\n\nThe next page token.\nFor details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).", - "minimum": 1, - "type": "string" - } - }, - "type": "object" - }, - "paginationHeaders": { - "properties": { - "link": { - "description": "The link header contains pagination links.\n\nFor details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).\n\nin: header", - "type": "string" - }, - "x-total-count": { - "description": "The total number of clients.\n\nin: header", - "type": "string" - } - }, - "type": "object" - }, "rejectOAuth2Request": { "properties": { "error": { @@ -1496,6 +1703,75 @@ }, "type": "object" }, + "unexpectedError": { + "type": "string" + }, + "verifiableCredentialPrimingResponse": { + "properties": { + "c_nonce": { + "type": "string" + }, + "c_nonce_expires_in": { + "format": "int64", + "type": "integer" + }, + "error": { + "type": "string" + }, + "error_debug": { + "type": "string" + }, + "error_description": { + "type": "string" + }, + "error_hint": { + "type": "string" + }, + "format": { + "type": "string" + }, + "status_code": { + "format": "int64", + "type": "integer" + } + }, + "title": "VerifiableCredentialPrimingResponse contains the nonce to include in the proof-of-possession JWT.", + "type": "object" + }, + "verifiableCredentialResponse": { + "properties": { + "credential_draft_00": { + "type": "string" + }, + "format": { + "type": "string" + } + }, + "title": "VerifiableCredentialResponse contains the verifiable credential.", + "type": "object" + }, + "verifyUserCodeRequest": { + "properties": { + "client": { + "$ref": "#/components/schemas/oAuth2Client" + }, + "device_code_request_id": { + "type": "string" + }, + "request_url": { + "description": "RequestURL is the original Device Authorization URL requested.", + "type": "string" + }, + "requested_access_token_audience": { + "$ref": "#/components/schemas/StringSliceJSONFormat" + }, + "requested_scope": { + "$ref": "#/components/schemas/StringSliceJSONFormat" + } + }, + "title": "HandledDeviceUserAuthRequest is the request payload used to accept a device user_code.", + "type": "object" + }, "version": { "properties": { "version": { @@ -1546,7 +1822,7 @@ "paths": { "/.well-known/jwks.json": { "get": { - "description": "This endpoint returns JSON Web Keys required to verifying OpenID Connect ID Tokens and,\nif enabled, OAuth 2.0 JWT Access Tokens. This endpoint can be used with client libraries like\n[node-jwks-rsa](https://github.com/auth0/node-jwks-rsa) among others.", + "description": "This endpoint returns JSON Web Keys required to verifying OpenID Connect ID Tokens and,\nif enabled, OAuth 2.0 JWT Access Tokens. This endpoint can be used with client libraries like\n[node-jwks-rsa](https://github.com/auth0/node-jwks-rsa) among others.\n\nAdding custom keys requires first creating a keyset via the createJsonWebKeySet operation,\nand then configuring the webfinger.jwks.broadcast_keys configuration value to include the keyset name.", "operationId": "discoverJsonWebKeys", "responses": { "200": { @@ -1620,7 +1896,7 @@ "schema": { "default": 250, "format": "int64", - "maximum": 500, + "maximum": 1000, "minimum": 1, "type": "integer" } @@ -1630,8 +1906,6 @@ "in": "query", "name": "page_token", "schema": { - "default": "1", - "minimum": 1, "type": "string" } }, @@ -2005,7 +2279,7 @@ ] }, "post": { - "description": "This endpoint is capable of generating JSON Web Key Sets for you. There a different strategies available, such as symmetric cryptographic keys (HS256, HS512) and asymetric cryptographic keys (RS256, ECDSA). If the specified JSON Web Key Set does not exist, it will be created.\n\nA JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that represents a cryptographic key. A JWK Set is a JSON data structure that represents a set of JWKs. A JSON Web Key is identified by its set and key id. ORY Hydra uses this functionality to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), and allows storing user-defined keys as well.", + "description": "This endpoint is capable of generating JSON Web Key Sets for you. There are different strategies available, such as symmetric cryptographic keys (HS256, HS512) and asymmetric cryptographic keys (RS256, ECDSA). If the specified JSON Web Key Set does not exist, it will be created.\n\nA JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that represents a cryptographic key. A JWK Set is a JSON data structure that represents a set of JWKs. A JSON Web Key is identified by its set and key id. ORY Hydra uses this functionality to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), and allows storing user-defined keys as well.", "operationId": "createJsonWebKeySet", "parameters": [ { @@ -2422,6 +2696,58 @@ ] } }, + "/admin/oauth2/auth/requests/device/accept": { + "put": { + "description": "Accepts a device grant user_code request", + "operationId": "acceptUserCodeRequest", + "parameters": [ + { + "in": "query", + "name": "device_challenge", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/acceptDeviceUserCodeRequest" + } + } + }, + "x-originalParamName": "Body" + }, + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/oAuth2RedirectTo" + } + } + }, + "description": "oAuth2RedirectTo" + }, + "default": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/errorOAuth2" + } + } + }, + "description": "errorOAuth2" + } + }, + "summary": "Accepts a device grant user_code request", + "tags": [ + "oAuth2" + ] + } + }, "/admin/oauth2/auth/requests/login": { "get": { "description": "When an authorization code, hybrid, or implicit OAuth 2.0 Flow is initiated, Ory asks the login provider\nto authenticate the subject and then tell the Ory OAuth2 Service about it.\n\nPer default, the login provider is Ory itself. You may use a different login provider which needs to be a web-app\nyou write and host, and it must be able to authenticate (\"show the subject a login screen\")\na subject (in OAuth2 the proper name for subject is \"resource owner\").\n\nThe authentication challenge is appended to the login provider URL to which the subject's user-agent (browser) is redirected to. The login\nprovider uses that challenge to fetch information on the OAuth2 request and then accept or reject the requested authentication process.", @@ -2720,7 +3046,6 @@ "description": "OAuth 2.0 Consent Subject\n\nThe subject whose consent sessions should be deleted.", "in": "query", "name": "subject", - "required": true, "schema": { "type": "string" } @@ -2733,6 +3058,14 @@ "type": "string" } }, + { + "description": "Consent Request ID\n\nIf set, revoke all token chains derived from this particular consent request ID.", + "in": "query", + "name": "consent_request_id", + "schema": { + "type": "string" + } + }, { "description": "Revoke All Consent Sessions\n\nIf set to `true` deletes all consent sessions by the Subject that have been granted.", "in": "query", @@ -2836,14 +3169,21 @@ }, "/admin/oauth2/auth/sessions/login": { "delete": { - "description": "This endpoint invalidates a subject's authentication session. After revoking the authentication session, the subject\nhas to re-authenticate at the Ory OAuth2 Provider. This endpoint does not invalidate any tokens and\ndoes not work with OpenID Connect Front- or Back-channel logout.", + "description": "This endpoint invalidates authentication sessions. After revoking the authentication session(s), the subject\nhas to re-authenticate at the Ory OAuth2 Provider. This endpoint does not invalidate any tokens.\n\nIf you send the subject in a query param, all authentication sessions that belong to that subject are revoked.\nNo OpenID Connect Front- or Back-channel logout is performed in this case.\n\nAlternatively, you can send a SessionID via `sid` query param, in which case, only the session that is connected\nto that SessionID is revoked. OpenID Connect Back-channel logout is performed in this case.\n\nWhen using Ory for the identity provider, the login provider will also invalidate the session cookie.", "operationId": "revokeOAuth2LoginSessions", "parameters": [ { "description": "OAuth 2.0 Subject\n\nThe subject to revoke authentication sessions for.", "in": "query", "name": "subject", - "required": true, + "schema": { + "type": "string" + } + }, + { + "description": "Login Session ID\n\nThe login session to revoke.", + "in": "query", + "name": "sid", "schema": { "type": "string" } @@ -2864,7 +3204,7 @@ "description": "errorOAuth2" } }, - "summary": "Revokes All OAuth 2.0 Login Sessions of a Subject", + "summary": "Revokes OAuth 2.0 Login Sessions by either a Subject or a SessionID", "tags": [ "oAuth2" ] @@ -2971,19 +3311,23 @@ "operationId": "listTrustedOAuth2JwtGrantIssuers", "parameters": [ { + "description": "Items per Page\n\nThis is the number of items per page to return.\nFor details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).", "in": "query", - "name": "MaxItems", + "name": "page_size", "schema": { + "default": 250, "format": "int64", + "maximum": 1000, + "minimum": 1, "type": "integer" } }, { + "description": "Next Page Token\n\nThe next page token.\nFor details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).", "in": "query", - "name": "DefaultItems", + "name": "page_token", "schema": { - "format": "int64", - "type": "integer" + "type": "string" } }, { @@ -3140,6 +3484,58 @@ ] } }, + "/credentials": { + "post": { + "description": "This endpoint creates a verifiable credential that attests that the user\nauthenticated with the provided access token owns a certain public/private key\npair.\n\nMore information can be found at\nhttps://openid.net/specs/openid-connect-userinfo-vc-1_0.html.", + "operationId": "createVerifiableCredential", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateVerifiableCredentialRequestBody" + } + } + }, + "x-originalParamName": "Body" + }, + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/verifiableCredentialResponse" + } + } + }, + "description": "verifiableCredentialResponse" + }, + "400": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/verifiableCredentialPrimingResponse" + } + } + }, + "description": "verifiableCredentialPrimingResponse" + }, + "default": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/errorOAuth2" + } + } + }, + "description": "errorOAuth2" + } + }, + "summary": "Issues a Verifiable Credential", + "tags": [ + "oidc" + ] + } + }, "/health/alive": { "get": { "description": "This endpoint returns a HTTP 200 status code when Ory Hydra is accepting incoming\nHTTP requests. This status does currently not include checks whether the database connection is working.\n\nIf the service supports TLS Edge Termination, this endpoint does not require the\n`X-Forwarded-Proto` header to be set.\n\nBe aware that if you are running multiple nodes of this service, the health status will never\nrefer to the cluster state, only to a single instance.", @@ -3221,7 +3617,7 @@ }, "/oauth2/auth": { "get": { - "description": "Use open source libraries to perform OAuth 2.0 and OpenID Connect\navailable for any programming language. You can find a list of libraries at https://oauth.net/code/\n\nThe Ory SDK is not yet able to this endpoint properly.", + "description": "Use open source libraries to perform OAuth 2.0 and OpenID Connect\navailable for any programming language. You can find a list of libraries at https://oauth.net/code/\n\nThis endpoint should not be used via the Ory SDK and is only included for technical reasons.\nInstead, use one of the libraries linked above.", "operationId": "oAuth2Authorize", "responses": { "302": { @@ -3244,6 +3640,63 @@ ] } }, + "/oauth2/device/auth": { + "post": { + "description": "This endpoint is not documented here because you should never use your own implementation to perform OAuth2 flows.\nOAuth2 is a very popular protocol and a library for your programming language will exist.\n\nTo learn more about this flow please refer to the specification: https://tools.ietf.org/html/rfc8628", + "operationId": "oAuth2DeviceFlow", + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/deviceAuthorization" + } + } + }, + "description": "deviceAuthorization" + }, + "default": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/errorOAuth2" + } + } + }, + "description": "errorOAuth2" + } + }, + "summary": "The OAuth 2.0 Device Authorize Endpoint", + "tags": [ + "oAuth2" + ] + } + }, + "/oauth2/device/verify": { + "get": { + "description": "This is the device user verification endpoint. The user is redirected here when trying to log in using the device flow.", + "operationId": "performOAuth2DeviceVerificationFlow", + "responses": { + "302": { + "$ref": "#/components/responses/emptyResponse" + }, + "default": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/errorOAuth2" + } + } + }, + "description": "errorOAuth2" + } + }, + "summary": "OAuth 2.0 Device Verification Endpoint", + "tags": [ + "oAuth2" + ] + } + }, "/oauth2/register": { "post": { "description": "This endpoint behaves like the administrative counterpart (`createOAuth2Client`) but is capable of facing the\npublic internet directly and can be used in self-service. It implements the OpenID Connect\nDynamic Client Registration Protocol. This feature needs to be enabled in the configuration. This endpoint\nis disabled by default. It can be enabled by an administrator.\n\nPlease note that using this endpoint you are not able to choose the `client_secret` nor the `client_id` as those\nvalues will be server generated when specifying `token_endpoint_auth_method` as `client_secret_basic` or\n`client_secret_post`.\n\nThe `client_secret` will be returned in the response and you will not be able to retrieve it later on.\nWrite the secret down and keep it somewhere safe.", @@ -3497,7 +3950,7 @@ }, "/oauth2/token": { "post": { - "description": "Use open source libraries to perform OAuth 2.0 and OpenID Connect\navailable for any programming language. You can find a list of libraries here https://oauth.net/code/\n\nThe Ory SDK is not yet able to this endpoint properly.", + "description": "Use open source libraries to perform OAuth 2.0 and OpenID Connect\navailable for any programming language. You can find a list of libraries here https://oauth.net/code/\n\nThis endpoint should not be used via the Ory SDK and is only included for technical reasons.\nInstead, use one of the libraries linked above.", "operationId": "oauth2TokenExchange", "requestBody": { "content": { diff --git a/spec/config.go b/spec/config.go index 36fda6188dd..6aa4ec0c1bd 100644 --- a/spec/config.go +++ b/spec/config.go @@ -12,8 +12,9 @@ import ( "github.com/pkg/errors" "github.com/tidwall/gjson" + "github.com/ory/x/configx" "github.com/ory/x/logrusx" - "github.com/ory/x/tracing" + "github.com/ory/x/otelx" ) //go:embed config.json @@ -32,12 +33,15 @@ func init() { func AddConfigSchema(compiler interface { AddResource(url string, r io.Reader) error }) error { - if err := tracing.AddConfigSchema(compiler); err != nil { + if err := otelx.AddConfigSchema(compiler); err != nil { return err } if err := logrusx.AddConfigSchema(compiler); err != nil { return err } + if err := configx.AddSchemaResources(compiler); err != nil { + return err + } return errors.WithStack(compiler.AddResource(ConfigSchemaID, bytes.NewReader(ConfigValidationSchema))) } diff --git a/spec/config.json b/spec/config.json index d0a02bbc6cd..c298f099563 100644 --- a/spec/config.json +++ b/spec/config.json @@ -1,5 +1,5 @@ { - "$id": "https://github.com/ory/hydra/docs/config.schema.json", + "$id": "https://github.com/ory/hydra/spec/config.json", "$schema": "http://json-schema.org/draft-07/schema#", "title": "Ory Hydra Configuration", "type": "object", @@ -18,196 +18,6 @@ "TRACE" ] }, - "portNumber": { - "description": "The port to listen on.", - "minimum": 1, - "maximum": 65535 - }, - "socket": { - "type": "object", - "additionalProperties": false, - "description": "Sets the permissions of the unix socket", - "properties": { - "owner": { - "type": "string", - "description": "Owner of unix socket. If empty, the owner will be the user running hydra.", - "default": "" - }, - "group": { - "type": "string", - "description": "Group of unix socket. If empty, the group will be the primary group of the user running hydra.", - "default": "" - }, - "mode": { - "type": "integer", - "description": "Mode of unix socket in numeric form", - "default": 493, - "minimum": 0, - "maximum": 511 - } - } - }, - "cors": { - "type": "object", - "additionalProperties": false, - "description": "Configures Cross Origin Resource Sharing for public endpoints.", - "properties": { - "enabled": { - "type": "boolean", - "description": "Sets whether CORS is enabled.", - "default": false - }, - "allowed_origins": { - "type": "array", - "description": "A list of origins a cross-domain request can be executed from. If the special * value is present in the list, all origins will be allowed. An origin may contain a wildcard (*) to replace 0 or more characters (i.e.: http://*.domain.com). Only one wildcard can be used per origin.", - "items": { - "type": "string", - "minLength": 1, - "not": { - "type": "string", - "description": "does match all strings that contain two or more (*)", - "pattern": ".*\\*.*\\*.*" - }, - "anyOf": [ - { - "format": "uri" - }, - { - "const": "*" - } - ] - }, - "uniqueItems": true, - "default": [], - "examples": [ - [ - "*", - "https://example.com", - "https://*.example.com", - "https://*.foo.example.com" - ] - ] - }, - "allowed_methods": { - "type": "array", - "description": "A list of HTTP methods the user agent is allowed to use with cross-domain requests.", - "default": [ - "POST", - "GET", - "PUT", - "PATCH", - "DELETE", - "CONNECT", - "HEAD", - "OPTIONS", - "TRACE" - ], - "items": { - "type": "string", - "enum": [ - "POST", - "GET", - "PUT", - "PATCH", - "DELETE", - "CONNECT", - "HEAD", - "OPTIONS", - "TRACE" - ] - } - }, - "allowed_headers": { - "type": "array", - "description": "A list of non simple headers the client is allowed to use with cross-domain requests.", - "default": [ - "Accept", - "Content-Type", - "Content-Length", - "Accept-Language", - "Content-Language", - "Authorization" - ], - "items": { - "type": "string" - } - }, - "exposed_headers": { - "type": "array", - "description": "Sets which headers are safe to expose to the API of a CORS API specification.", - "default": [ - "Cache-Control", - "Expires", - "Last-Modified", - "Pragma", - "Content-Length", - "Content-Language", - "Content-Type" - ], - "items": { - "type": "string" - } - }, - "allow_credentials": { - "type": "boolean", - "description": "Sets whether the request can include user credentials like cookies, HTTP authentication or client side SSL certificates.", - "default": true - }, - "max_age": { - "type": "integer", - "description": "Sets how long (in seconds) the results of a preflight request can be cached. If set to 0, every request is preceded by a preflight request.", - "default": 0, - "minimum": 0 - }, - "debug": { - "type": "boolean", - "description": "Adds additional log output to debug server side CORS issues.", - "default": false - } - } - }, - "cidr": { - "description": "CIDR address range.", - "type": "string", - "oneOf": [ - { - "pattern": "^(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))/([0-9]|[1-9][0-9]|1[0-1][0-9]|12[0-8])$" - }, - { - "pattern": "^([0-9]{1,3}\\.){3}[0-9]{1,3}/([0-9]|[1-2][0-9]|3[0-2])$" - } - ], - "examples": ["127.0.0.1/32"] - }, - "pem_file": { - "type": "object", - "oneOf": [ - { - "properties": { - "path": { - "type": "string", - "description": "The path to the pem file.", - "examples": ["/path/to/file.pem"] - } - }, - "additionalProperties": false, - "required": ["path"] - }, - { - "properties": { - "base64": { - "type": "string", - "description": "The base64 encoded string (without padding).", - "contentEncoding": "base64", - "contentMediaType": "application/x-pem-file", - "examples": ["b3J5IGh5ZHJhIGlzIGF3ZXNvbWUK"] - } - }, - "additionalProperties": false, - "required": ["base64"] - } - ] - }, "duration": { "type": "string", "pattern": "^(\\d+(ns|us|ms|s|m|h))+$", @@ -216,35 +26,45 @@ "1h5m1s" ] }, - "tls_config": { + "webhook_config": { "type": "object", - "description": "Configures HTTPS (HTTP over TLS). If configured, the server automatically supports HTTP/2.", + "additionalProperties": false, + "description": "Configures a webhook.", + "required": ["url"], "properties": { - "enabled": { - "type": "boolean", - "description": "Setting enabled to false drops the TLS requirement for the admin endpoint, even if TLS is enabled on the public endpoint." - }, - "key": { - "description": "Configures the private key (pem encoded).", - "allOf": [ - { - "$ref": "#/definitions/pem_file" - } - ] + "url": { + "type": "string", + "format": "uri", + "description": "The URL to send the webhook to." }, - "cert": { - "description": "Configures the public certificate (pem encoded).", - "allOf": [ - { - "$ref": "#/definitions/pem_file" + "auth": { + "type": "object", + "additionalProperties": false, + "required": ["type", "config"], + "properties": { + "type": { + "type": "string", + "const": "api_key" + }, + "config": { + "type": "object", + "additionalProperties": false, + "required": ["name", "value"], + "properties": { + "in": { + "type": "string", + "enum": ["header", "cookie"] + }, + "name": { + "description": "The header or cookie name.", + "type": "string" + }, + "value": { + "description": "The header or cookie value.", + "type": "string" + } + } } - ] - }, - "allow_termination_from": { - "type": "array", - "description": "Whitelist one or multiple CIDR address ranges and allow them to terminate TLS connections. Be aware that the X-Forwarded-Proto header must be set and must never be modifiable by anyone but your proxy / gateway / load balancer. Supports ipv4 and ipv6. Hydra serves http instead of https when this option is set.", - "items": { - "$ref": "#/definitions/cidr" } } } @@ -298,96 +118,35 @@ "description": "Controls the configuration for the http(s) daemon(s).", "properties": { "public": { - "type": "object", - "additionalProperties": false, - "description": "Controls the public daemon serving public API endpoints like /oauth2/auth, /oauth2/token, /.well-known/jwks.json", - "properties": { - "port": { - "default": 4444, - "type": "integer", - "allOf": [ - { - "$ref": "#/definitions/portNumber" - } - ] - }, - "host": { - "type": "string", - "description": "The interface or unix socket Ory Hydra should listen and handle public API requests on. Use the prefix `unix:` to specify a path to a unix socket. Leave empty to listen on all interfaces.", - "default": "", - "examples": ["localhost"] - }, - "cors": { - "$ref": "#/definitions/cors" - }, - "socket": { - "$ref": "#/definitions/socket" + "allOf": [ + { + "$ref": "ory://serve-config" }, - "request_log": { - "type": "object", - "additionalProperties": false, - "description": "Access Log configuration for public server.", + { "properties": { - "disable_for_health": { - "type": "boolean", - "description": "Disable access log for health endpoints.", - "default": false + "cors": { + "$ref": "ory://cors-config" } } - }, - "tls": { - "$ref": "#/definitions/tls_config" } - } + ] }, "admin": { - "type": "object", - "additionalProperties": false, - "properties": { - "port": { - "default": 4445, - "type": "integer", - "allOf": [ - { - "$ref": "#/definitions/portNumber" - } - ] - }, - "host": { - "type": "string", - "description": "The interface or unix socket Ory Hydra should listen and handle administrative API requests on. Use the prefix `unix:` to specify a path to a unix socket. Leave empty to listen on all interfaces.", - "default": "", - "examples": ["localhost"] - }, - "cors": { - "$ref": "#/definitions/cors" - }, - "socket": { - "$ref": "#/definitions/socket" + "allOf": [ + { + "$ref": "ory://serve-config" }, - "request_log": { - "type": "object", - "additionalProperties": false, - "description": "Access Log configuration for admin server.", + { "properties": { - "disable_for_health": { - "type": "boolean", - "description": "Disable access log for health endpoints.", - "default": false + "cors": { + "$ref": "ory://cors-config" } } - }, - "tls": { - "allOf": [ - { - "$ref": "#/definitions/tls_config" - } - ] } - } + ] }, "tls": { - "$ref": "#/definitions/tls_config" + "$ref": "ory://tls-config" }, "cookies": { "type": "object", @@ -423,6 +182,11 @@ "description": "Sets the session cookie name. Use with care!", "type": "object", "properties": { + "device_csrf": { + "type": "string", + "title": "CSRF Cookie Name", + "default": "ory_hydra_device_csrf" + }, "login_csrf": { "type": "string", "title": "CSRF Cookie Name", @@ -439,6 +203,18 @@ "default": "ory_hydra_session" } } + }, + "paths": { + "title": "Cookie Paths", + "description": "Sets the path for which session cookie is scoped. Use with care!", + "type": "object", + "properties": { + "session": { + "type": "string", + "title": "Session Cookie Path", + "default": "/" + } + } } } } @@ -465,6 +241,16 @@ "description": "Disallow all outgoing HTTP calls to private IP ranges. This feature can help protect against SSRF attacks.", "type": "boolean", "default": false + }, + "private_ip_exception_urls": { + "title": "Add exempt URLs to private IP ranges", + "description": "Allows the given URLs to be called despite them being in the private IP range. URLs need to have an exact and case-sensitive match to be excempt.", + "type": "array", + "items": { + "type": "string", + "format": "uri-reference" + }, + "default": [] } } } @@ -551,6 +337,14 @@ "https://my-service.com/oauth2/auth" ] }, + "device_authorization_url": { + "type": "string", + "description": "Overwrites the OAuth2 Device Auth URL", + "format": "uri-reference", + "examples": [ + "https://my-service.com/oauth2/device/auth" + ] + }, "client_registration_url": { "description": "Sets the OpenID Connect Dynamic Client Registration Endpoint", "type": "string", @@ -624,6 +418,7 @@ "properties": { "supported_types": { "contains": { + "type": "string", "const": "pairwise" } } @@ -713,6 +508,15 @@ "/ui/login" ] }, + "registration": { + "type": "string", + "description": "Sets the OAuth2 Registration Endpoint URL of the OAuth2 User Login & Consent flow. Defaults to the same value as `login`. The registration URL is used if the authorization request was started with the `prompt=registration` parameter.", + "format": "uri-reference", + "examples": [ + "https://my-login.app/registration", + "/ui/registration" + ] + }, "consent": { "type": "string", "description": "Sets the consent endpoint of the User Login & Consent flow. Defaults to an internal fallback URL showing an error.", @@ -731,6 +535,30 @@ "/ui/logout" ] }, + "device": { + "type": "object", + "description": "Configure URLs for the OAuth 2.0 Device Code Flow.", + "properties": { + "verification": { + "type": "string", + "description": "Sets the device user code verification endpoint. Defaults to an internal fallback URL showing an error.", + "format": "uri-reference", + "examples": [ + "https://my-logout.app/device_verification", + "/ui/device_verification" + ] + }, + "success": { + "type": "string", + "description": "Sets the post device authentication endpoint. Defaults to an internal fallback URL showing an error.", + "format": "uri-reference", + "examples": [ + "https://my-logout.app/device_done", + "/ui/device_done" + ] + } + } + }, "error": { "type": "string", "description": "Sets the error endpoint. The error ui will be shown when an OAuth2 error occurs that which can not be sent back to the client. Defaults to an internal fallback URL showing an error.", @@ -748,6 +576,42 @@ "https://my-example.app/logout-successful", "/ui" ] + }, + "identity_provider": { + "type": "object", + "additionalProperties": false, + "properties": { + "url": { + "title": "The admin URL of the ORY Kratos instance.", + "description": "If set, ORY Hydra will use this URL to log out the user in addition to removing the Hydra session.", + "type": "string", + "format": "uri", + "examples": [ + "https://kratos.example.com/admin" + ] + }, + "publicUrl": { + "title": "The public URL of the ORY Kratos instance.", + "type": "string", + "format": "uri", + "examples": [ + "https://kratos.example.com/public" + ] + }, + "headers": { + "title": "HTTP Request Headers", + "description": "These headers will be passed in HTTP requests to the Identity Provider.", + "type": "object", + "additionalProperties": { + "type": "string" + }, + "examples": [ + { + "Authorization": "Bearer some-token" + } + ] + } + } } } }, @@ -766,9 +630,21 @@ }, "access_token": { "type": "string", - "description": "Defines access token type. jwt is a bad idea, see https://www.ory.sh/docs/hydra/advanced#json-web-tokens", + "description": "Defines access token type. jwt is a bad idea, see https://www.ory.sh/docs/oauth2-oidc/jwt-access-token", "enum": ["opaque", "jwt"], "default": "opaque" + }, + "jwt": { + "type": "object", + "additionalProperties": false, + "properties": { + "scope_claim": { + "type": "string", + "description": "Defines how the scope claim is represented within a JWT access token", + "enum": ["list", "string", "both"], + "default": "list" + } + } } } }, @@ -780,6 +656,17 @@ "login_consent_request": { "description": "Configures how long a user login and consent flow may take.", "default": "30m", + "type": "string", + "allOf": [ + { + "$ref": "#/definitions/duration" + } + ] + }, + "authentication_session": { + "description": "Configures how long the authentication session cookie will be valid after login has been remembered. The larger this value is, the more database storage is needed. Defaults to 30 days.", + "default": "720h", + "type": "string", "allOf": [ { "$ref": "#/definitions/duration" @@ -787,8 +674,9 @@ ] }, "access_token": { - "description": "Configures how long access tokens are valid.", + "description": "Configures how long access tokens are valid. The larger this value is, the more database storage is needed.", "default": "1h", + "type": "string", "allOf": [ { "$ref": "#/definitions/duration" @@ -796,7 +684,7 @@ ] }, "refresh_token": { - "description": "Configures how long refresh tokens are valid. Set to -1 for refresh tokens to never expire.", + "description": "Configures how long refresh tokens are valid. The larger this value is, the more database storage is needed. Set to -1 for refresh tokens to never expire, which is not recommended as the database can not be cleaned from stale tokens.", "default": "720h", "oneOf": [ { @@ -813,6 +701,7 @@ "id_token": { "description": "Configures how long id tokens are valid.", "default": "1h", + "type": "string", "allOf": [ { "$ref": "#/definitions/duration" @@ -820,7 +709,17 @@ ] }, "auth_code": { - "description": "Configures how long auth codes are valid.", + "description": "Configures how long auth codes are valid. The larger this value is, the more database storage is needed.", + "default": "10m", + "type": "string", + "allOf": [ + { + "$ref": "#/definitions/duration" + } + ] + }, + "device_user_code": { + "description": "Configures how long device & user codes are valid. The larger this value is, the more database storage is needed.", "default": "10m", "allOf": [ { @@ -865,6 +764,12 @@ }, "examples": [["username", "email", "user_uuid"]] }, + "mirror_top_level_claims": { + "type": "boolean", + "description": "Set to false if you don't want to mirror custom claims under 'ext'", + "default": true, + "examples": [false] + }, "hashers": { "type": "object", "additionalProperties": false, @@ -942,6 +847,29 @@ "type": "object", "additionalProperties": false, "properties": { + "refresh_token": { + "type": "object", + "properties": { + "rotation_grace_period": { + "title": "Refresh Token Rotation Grace Period", + "description": "Configures how long a Refresh Token remains valid after it has been used. The maximum value is 5 minutes, unless also a reuse count is configured, in which case the maximum is 180 days.", + "default": "0s", + "type": "string", + "allOf": [ + { + "$ref": "#/definitions/duration" + } + ] + }, + "rotation_grace_reuse_count": { + "title": "Refresh Token Rotation Grace Period Reuse Count", + "description": "Configures how many times a Refresh Token can be reused during the grace period. This is only effective if combined with a rotation grace period.", + "default": 0, + "type": "integer", + "minimum": 0 + } + } + }, "jwt": { "type": "object", "additionalProperties": false, @@ -960,6 +888,7 @@ "max_ttl": { "description": "Configures what the maximum age of a JWT assertion used in the JSON Web Token (JWT) Profile for OAuth 2.0 Client Authentication and Authorization Grants (RFC7523) can be. This feature uses the `exp` claim and `iat` claim to calculate assertion age. Assertions exceeding the max age will be denied. Useful as a safety measure and recommended to keep below 720h. This governs the `grant.jwt.max_ttl` setting.", "default": "720h", + "type": "string", "allOf": [ { "$ref": "#/definitions/duration" @@ -971,13 +900,83 @@ } }, "refresh_token_hook": { - "type": "string", "description": "Sets the refresh token hook endpoint. If set it will be called during token refresh to receive updated token claims.", - "format": "uri", - "examples": ["https://my-example.app/token-refresh-hook"] + "examples": ["https://my-example.app/token-refresh-hook"], + "oneOf": [ + { + "type": "string", + "format": "uri" + }, + { + "$ref": "#/definitions/webhook_config" + } + ] + }, + "device_authorization": { + "type": "object", + "additionalProperties": false, + "properties": { + "token_polling_interval": { + "allOf": [ + { + "$ref": "#/definitions/duration" + } + ], + "default": "5s", + "description": "Configures how often a non-interactive device should poll the device token endpoint, this is a purely informational configuration and does not enforce rate-limiting.", + "examples": ["5s", "15s", "1m"] + }, + "user_code": { + "type": "object", + "description": "Configures the user code settings.", + "oneOf": [ + { + "properties": { + "entropy_preset": { + "type": "string", + "description": "Presets for the user-code length and character set.", + "enum": ["high", "medium", "low"] + } + }, + "required": ["entropy_preset"], + "additionalProperties": false + }, + { + "properties": { + "length": { + "type": "integer", + "description": "The length of the user code.", + "minimum": 6 + }, + "character_set": { + "type": "string", + "description": "The character set to use for the user code. Provide the raw characters that should be used.", + "examples": ["ABCDEFGHJKLMNPQRSTUVWXYZ23456789"], + "minLength": 8 + } + }, + "required": ["length", "character_set"], + "additionalProperties": false + } + ] + } + } + }, + "token_hook": { + "description": "Sets the token hook endpoint for all grant types. If set it will be called while providing token to customize claims.", + "examples": ["https://my-example.app/token-hook"], + "oneOf": [ + { + "type": "string", + "format": "uri" + }, + { + "$ref": "#/definitions/webhook_config" + } + ] } - } - }, + } + }, "secrets": { "type": "object", "additionalProperties": false, @@ -1000,7 +999,22 @@ }, "cookie": { "type": "array", - "description": "A secret that is used to encrypt cookie sessions. Defaults to secrets.system. It is recommended to use a separate secret in production. The first item in the list is used for signing and encryption. The whole list is used for verifying signatures and decryption.", + "description": "Secrets that are used for cookie session encryption. Defaults to secrets.system. It is recommended to use a separate secret in production. The first item in the list is used for signing and encryption. The whole list is used for verifying signatures and decryption.", + "items": { + "type": "string", + "minLength": 16 + }, + "examples": [ + [ + "this-is-the-primary-secret", + "this-is-an-old-secret", + "this-is-another-old-secret" + ] + ] + }, + "pagination": { + "type": "array", + "description": "Secrets that are used for pagination token encryption. Defaults to secrets.system. It is recommended to use a separate secret in production. The first item in the list is used for signing and encryption. The whole list is used for verifying signatures and decryption.", "items": { "type": "string", "minLength": 16 @@ -1022,154 +1036,7 @@ "examples": ["cpu"] }, "tracing": { - "type": "object", - "additionalProperties": false, - "description": "Ory Hydra supports distributed tracing.", - "properties": { - "provider": { - "type": "string", - "description": "Set this to the tracing backend you wish to use. Supports Jaeger, Zipkin DataDog, Elastic APM and Instana. If omitted or empty, tracing will be disabled. Use environment variables to configure DataDog (see https://docs.datadoghq.com/tracing/setup/go/#configuration).", - "enum": [ - "jaeger", - "zipkin", - "datadog", - "elastic-apm", - "instana", - "otel" - ], - "examples": ["jaeger"] - }, - "service_name": { - "type": "string", - "description": "Specifies the service name to use on the tracer.", - "examples": ["Ory Hydra"] - }, - "providers": { - "type": "object", - "additionalProperties": false, - "properties": { - "jaeger": { - "type": "object", - "additionalProperties": false, - "description": "Configures the jaeger tracing backend.", - "properties": { - "local_agent_address": { - "type": "string", - "description": "The address of the jaeger-agent where spans should be sent to.", - "oneOf": [ - { - "pattern": "^\\[(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))]:([0-9]*)$" - }, - { - "pattern": "^([0-9]{1,3}\\.){3}[0-9]{1,3}:([0-9]*)$" - }, - { - "format": "uri" - } - ], - "examples": ["127.0.0.1:6831"] - }, - "propagation": { - "type": "string", - "description": "The tracing header format", - "examples": ["jaeger"] - }, - "max_tag_value_length": { - "type": "integer", - "description": "The value passed to the max tag value length that has been configured.", - "minimum": 0 - }, - "sampling": { - "type": "object", - "propertyNames": { - "enum": ["type", "value", "server_url"] - }, - "allOf": [ - { - "oneOf": [ - { - "properties": { - "type": { - "description": "The type of the sampler you want to use.", - "const": "const" - }, - "value": { - "type": "integer", - "description": "The value passed to the sampler type that has been configured.", - "minimum": 0, - "maximum": 1 - } - } - }, - { - "properties": { - "type": { - "description": "The type of the sampler you want to use.", - "const": "rateLimiting" - }, - "value": { - "type": "integer", - "description": "The value passed to the sampler type that has been configured.", - "minimum": 0 - } - } - }, - { - "properties": { - "type": { - "description": "The type of the sampler you want to use.", - "const": "probabilistic" - }, - "value": { - "type": "number", - "description": "The value passed to the sampler type that has been configured.", - "minimum": 0, - "maximum": 1 - } - } - } - ] - }, - { - "properties": { - "server_url": { - "type": "string", - "description": "The address of jaeger-agent's HTTP sampling server", - "format": "uri" - } - } - } - ], - "examples": [ - { - "type": "const", - "value": 1, - "server_url": "http://localhost:5778/sampling" - } - ] - } - } - }, - "zipkin": { - "type": "object", - "additionalProperties": false, - "description": "Configures the zipkin tracing backend.", - "properties": { - "server_url": { - "type": "string", - "description": "The address of Zipkin server where spans should be sent to.", - "format": "uri" - } - }, - "examples": [ - { - "server_url": "http://localhost:9411/api/v2/spans" - } - ] - } - } - } - } + "$ref": "ory://tracing-config" }, "sqa": { "type": "object", @@ -1220,6 +1087,12 @@ "title": "Enable development mode", "description": "If true, disables critical security measures to allow easier local development. Do not use in production.", "default": false + }, + "feature_flags": { + "title": "Feature flags", + "type": "object", + "additionalProperties": true, + "properties": {} } }, "additionalProperties": false diff --git a/spec/swagger.json b/spec/swagger.json index a1921533f02..f7e50979509 100755 --- a/spec/swagger.json +++ b/spec/swagger.json @@ -20,7 +20,7 @@ "paths": { "/.well-known/jwks.json": { "get": { - "description": "This endpoint returns JSON Web Keys required to verifying OpenID Connect ID Tokens and,\nif enabled, OAuth 2.0 JWT Access Tokens. This endpoint can be used with client libraries like\n[node-jwks-rsa](https://github.com/auth0/node-jwks-rsa) among others.", + "description": "This endpoint returns JSON Web Keys required to verifying OpenID Connect ID Tokens and,\nif enabled, OAuth 2.0 JWT Access Tokens. This endpoint can be used with client libraries like\n[node-jwks-rsa](https://github.com/auth0/node-jwks-rsa) among others.\n\nAdding custom keys requires first creating a keyset via the createJsonWebKeySet operation,\nand then configuring the webfinger.jwks.broadcast_keys configuration value to include the keyset name.", "consumes": [ "application/json" ], @@ -103,7 +103,7 @@ "operationId": "listOAuth2Clients", "parameters": [ { - "maximum": 500, + "maximum": 1000, "minimum": 1, "type": "integer", "format": "int64", @@ -113,9 +113,7 @@ "in": "query" }, { - "minimum": 1, "type": "string", - "default": "1", "description": "Next Page Token\n\nThe next page token.\nFor details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).", "name": "page_token", "in": "query" @@ -505,7 +503,7 @@ } }, "post": { - "description": "This endpoint is capable of generating JSON Web Key Sets for you. There a different strategies available, such as symmetric cryptographic keys (HS256, HS512) and asymetric cryptographic keys (RS256, ECDSA). If the specified JSON Web Key Set does not exist, it will be created.\n\nA JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that represents a cryptographic key. A JWK Set is a JSON data structure that represents a set of JWKs. A JSON Web Key is identified by its set and key id. ORY Hydra uses this functionality to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), and allows storing user-defined keys as well.", + "description": "This endpoint is capable of generating JSON Web Key Sets for you. There are different strategies available, such as symmetric cryptographic keys (HS256, HS512) and asymmetric cryptographic keys (RS256, ECDSA). If the specified JSON Web Key Set does not exist, it will be created.\n\nA JSON Web Key (JWK) is a JavaScript Object Notation (JSON) data structure that represents a cryptographic key. A JWK Set is a JSON data structure that represents a set of JWKs. A JSON Web Key is identified by its set and key id. ORY Hydra uses this functionality to store cryptographic keys used for TLS and JSON Web Tokens (such as OpenID Connect ID tokens), and allows storing user-defined keys as well.", "consumes": [ "application/json" ], @@ -891,6 +889,55 @@ } } }, + "/admin/oauth2/auth/requests/device/accept": { + "put": { + "description": "Accepts a device grant user_code request", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "schemes": [ + "http", + "https" + ], + "tags": [ + "oAuth2" + ], + "summary": "Accepts a device grant user_code request", + "operationId": "acceptUserCodeRequest", + "parameters": [ + { + "type": "string", + "name": "device_challenge", + "in": "query", + "required": true + }, + { + "name": "Body", + "in": "body", + "schema": { + "$ref": "#/definitions/acceptDeviceUserCodeRequest" + } + } + ], + "responses": { + "200": { + "description": "oAuth2RedirectTo", + "schema": { + "$ref": "#/definitions/oAuth2RedirectTo" + } + }, + "default": { + "description": "errorOAuth2", + "schema": { + "$ref": "#/definitions/errorOAuth2" + } + } + } + } + }, "/admin/oauth2/auth/requests/login": { "get": { "description": "When an authorization code, hybrid, or implicit OAuth 2.0 Flow is initiated, Ory asks the login provider\nto authenticate the subject and then tell the Ory OAuth2 Service about it.\n\nPer default, the login provider is Ory itself. You may use a different login provider which needs to be a web-app\nyou write and host, and it must be able to authenticate (\"show the subject a login screen\")\na subject (in OAuth2 the proper name for subject is \"resource owner\").\n\nThe authentication challenge is appended to the login provider URL to which the subject's user-agent (browser) is redirected to. The login\nprovider uses that challenge to fetch information on the OAuth2 request and then accept or reject the requested authentication process.", @@ -1249,8 +1296,7 @@ "type": "string", "description": "OAuth 2.0 Consent Subject\n\nThe subject whose consent sessions should be deleted.", "name": "subject", - "in": "query", - "required": true + "in": "query" }, { "type": "string", @@ -1258,6 +1304,12 @@ "name": "client", "in": "query" }, + { + "type": "string", + "description": "Consent Request ID\n\nIf set, revoke all token chains derived from this particular consent request ID.", + "name": "consent_request_id", + "in": "query" + }, { "type": "boolean", "description": "Revoke All Consent Sessions\n\nIf set to `true` deletes all consent sessions by the Subject that have been granted.", @@ -1280,7 +1332,7 @@ }, "/admin/oauth2/auth/sessions/login": { "delete": { - "description": "This endpoint invalidates a subject's authentication session. After revoking the authentication session, the subject\nhas to re-authenticate at the Ory OAuth2 Provider. This endpoint does not invalidate any tokens and\ndoes not work with OpenID Connect Front- or Back-channel logout.", + "description": "This endpoint invalidates authentication sessions. After revoking the authentication session(s), the subject\nhas to re-authenticate at the Ory OAuth2 Provider. This endpoint does not invalidate any tokens.\n\nIf you send the subject in a query param, all authentication sessions that belong to that subject are revoked.\nNo OpenID Connect Front- or Back-channel logout is performed in this case.\n\nAlternatively, you can send a SessionID via `sid` query param, in which case, only the session that is connected\nto that SessionID is revoked. OpenID Connect Back-channel logout is performed in this case.\n\nWhen using Ory for the identity provider, the login provider will also invalidate the session cookie.", "consumes": [ "application/json" ], @@ -1294,15 +1346,20 @@ "tags": [ "oAuth2" ], - "summary": "Revokes All OAuth 2.0 Login Sessions of a Subject", + "summary": "Revokes OAuth 2.0 Login Sessions by either a Subject or a SessionID", "operationId": "revokeOAuth2LoginSessions", "parameters": [ { "type": "string", "description": "OAuth 2.0 Subject\n\nThe subject to revoke authentication sessions for.", "name": "subject", - "in": "query", - "required": true + "in": "query" + }, + { + "type": "string", + "description": "Login Session ID\n\nThe login session to revoke.", + "name": "sid", + "in": "query" } ], "responses": { @@ -1424,15 +1481,19 @@ "operationId": "listTrustedOAuth2JwtGrantIssuers", "parameters": [ { + "maximum": 1000, + "minimum": 1, "type": "integer", "format": "int64", - "name": "MaxItems", + "default": 250, + "description": "Items per Page\n\nThis is the number of items per page to return.\nFor details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).", + "name": "page_size", "in": "query" }, { - "type": "integer", - "format": "int64", - "name": "DefaultItems", + "type": "string", + "description": "Next Page Token\n\nThe next page token.\nFor details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).", + "name": "page_token", "in": "query" }, { @@ -1580,6 +1641,52 @@ } } }, + "/credentials": { + "post": { + "description": "This endpoint creates a verifiable credential that attests that the user\nauthenticated with the provided access token owns a certain public/private key\npair.\n\nMore information can be found at\nhttps://openid.net/specs/openid-connect-userinfo-vc-1_0.html.", + "consumes": [ + "application/json" + ], + "schemes": [ + "http", + "https" + ], + "tags": [ + "oidc" + ], + "summary": "Issues a Verifiable Credential", + "operationId": "createVerifiableCredential", + "parameters": [ + { + "name": "Body", + "in": "body", + "schema": { + "$ref": "#/definitions/CreateVerifiableCredentialRequestBody" + } + } + ], + "responses": { + "200": { + "description": "verifiableCredentialResponse", + "schema": { + "$ref": "#/definitions/verifiableCredentialResponse" + } + }, + "400": { + "description": "verifiableCredentialPrimingResponse", + "schema": { + "$ref": "#/definitions/verifiableCredentialPrimingResponse" + } + }, + "default": { + "description": "errorOAuth2", + "schema": { + "$ref": "#/definitions/errorOAuth2" + } + } + } + } + }, "/health/alive": { "get": { "description": "This endpoint returns a 200 status code when the HTTP server is up running.\nThis status does currently not include checks whether the database connection is working.\n\nIf the service supports TLS Edge Termination, this endpoint does not require the\n`X-Forwarded-Proto` header to be set.\n\nBe aware that if you are running multiple nodes of this service, the health status will never\nrefer to the cluster state, only to a single instance.", @@ -1636,7 +1743,7 @@ }, "/oauth2/auth": { "get": { - "description": "Use open source libraries to perform OAuth 2.0 and OpenID Connect\navailable for any programming language. You can find a list of libraries at https://oauth.net/code/\n\nThe Ory SDK is not yet able to this endpoint properly.", + "description": "Use open source libraries to perform OAuth 2.0 and OpenID Connect\navailable for any programming language. You can find a list of libraries at https://oauth.net/code/\n\nThis endpoint should not be used via the Ory SDK and is only included for technical reasons.\nInstead, use one of the libraries linked above.", "consumes": [ "application/x-www-form-urlencoded" ], @@ -1662,6 +1769,65 @@ } } }, + "/oauth2/device/auth": { + "post": { + "description": "This endpoint is not documented here because you should never use your own implementation to perform OAuth2 flows.\nOAuth2 is a very popular protocol and a library for your programming language will exist.\n\nTo learn more about this flow please refer to the specification: https://tools.ietf.org/html/rfc8628", + "consumes": [ + "application/x-www-form-urlencoded" + ], + "schemes": [ + "http", + "https" + ], + "tags": [ + "oAuth2" + ], + "summary": "The OAuth 2.0 Device Authorize Endpoint", + "operationId": "oAuth2DeviceFlow", + "responses": { + "200": { + "description": "deviceAuthorization", + "schema": { + "$ref": "#/definitions/deviceAuthorization" + } + }, + "default": { + "description": "errorOAuth2", + "schema": { + "$ref": "#/definitions/errorOAuth2" + } + } + } + } + }, + "/oauth2/device/verify": { + "get": { + "description": "This is the device user verification endpoint. The user is redirected here when trying to log in using the device flow.", + "consumes": [ + "application/x-www-form-urlencoded" + ], + "schemes": [ + "http", + "https" + ], + "tags": [ + "oAuth2" + ], + "summary": "OAuth 2.0 Device Verification Endpoint", + "operationId": "performOAuth2DeviceVerificationFlow", + "responses": { + "302": { + "$ref": "#/responses/emptyResponse" + }, + "default": { + "description": "errorOAuth2", + "schema": { + "$ref": "#/definitions/errorOAuth2" + } + } + } + } + }, "/oauth2/register": { "post": { "description": "This endpoint behaves like the administrative counterpart (`createOAuth2Client`) but is capable of facing the\npublic internet directly and can be used in self-service. It implements the OpenID Connect\nDynamic Client Registration Protocol. This feature needs to be enabled in the configuration. This endpoint\nis disabled by default. It can be enabled by an administrator.\n\nPlease note that using this endpoint you are not able to choose the `client_secret` nor the `client_id` as those\nvalues will be server generated when specifying `token_endpoint_auth_method` as `client_secret_basic` or\n`client_secret_post`.\n\nThe `client_secret` will be returned in the response and you will not be able to retrieve it later on.\nWrite the secret down and keep it somewhere safe.", @@ -1930,7 +2096,7 @@ "oauth2": [] } ], - "description": "Use open source libraries to perform OAuth 2.0 and OpenID Connect\navailable for any programming language. You can find a list of libraries here https://oauth.net/code/\n\nThe Ory SDK is not yet able to this endpoint properly.", + "description": "Use open source libraries to perform OAuth 2.0 and OpenID Connect\navailable for any programming language. You can find a list of libraries here https://oauth.net/code/\n\nThis endpoint should not be used via the Ory SDK and is only included for technical reasons.\nInstead, use one of the libraries linked above.", "consumes": [ "application/x-www-form-urlencoded" ], @@ -2049,6 +2215,54 @@ } }, "definitions": { + "CreateVerifiableCredentialRequestBody": { + "type": "object", + "title": "CreateVerifiableCredentialRequestBody contains the request body to request a verifiable credential.", + "properties": { + "format": { + "type": "string" + }, + "proof": { + "$ref": "#/definitions/VerifiableCredentialProof" + }, + "types": { + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "DefaultError": {}, + "DeviceUserAuthRequest": { + "type": "object", + "title": "Contains information on an ongoing device grant request.", + "required": [ + "challenge" + ], + "properties": { + "challenge": { + "description": "ID is the identifier (\"device challenge\") of the device grant request. It is used to\nidentify the session.", + "type": "string" + }, + "client": { + "$ref": "#/definitions/oAuth2Client" + }, + "handled_at": { + "$ref": "#/definitions/nullTime" + }, + "request_url": { + "description": "RequestURL is the original Device Authorization URL requested.", + "type": "string" + }, + "requested_access_token_audience": { + "$ref": "#/definitions/StringSliceJSONFormat" + }, + "requested_scope": { + "$ref": "#/definitions/StringSliceJSONFormat" + } + } + }, "JSONRawMessage": { "type": "object", "title": "JSONRawMessage represents a json.RawMessage that works well with JSON, SQL, and Swagger." @@ -2058,6 +2272,28 @@ "type": "string", "title": "NullDuration represents a nullable JSON and SQL compatible time.Duration." }, + "RFC6749ErrorJson": { + "type": "object", + "title": "RFC6749ErrorJson is a helper struct for JSON encoding/decoding of RFC6749Error.", + "properties": { + "error": { + "type": "string" + }, + "error_debug": { + "type": "string" + }, + "error_description": { + "type": "string" + }, + "error_hint": { + "type": "string" + }, + "status_code": { + "type": "integer", + "format": "int64" + } + } + }, "StringSliceJSONFormat": { "type": "array", "title": "StringSliceJSONFormat represents []string{} which is encoded to/from JSON for SQL storage.", @@ -2065,19 +2301,40 @@ "type": "string" } }, + "VerifiableCredentialProof": { + "type": "object", + "title": "VerifiableCredentialProof contains the proof of a verifiable credential.", + "properties": { + "jwt": { + "type": "string" + }, + "proof_type": { + "type": "string" + } + } + }, + "acceptDeviceUserCodeRequest": { + "description": "Contains information on an device verification", + "type": "object", + "properties": { + "user_code": { + "type": "string" + } + } + }, "acceptOAuth2ConsentRequest": { "type": "object", "title": "The request payload used to accept a consent request.", "properties": { + "context": { + "$ref": "#/definitions/JSONRawMessage" + }, "grant_access_token_audience": { "$ref": "#/definitions/StringSliceJSONFormat" }, "grant_scope": { "$ref": "#/definitions/StringSliceJSONFormat" }, - "handled_at": { - "$ref": "#/definitions/nullTime" - }, "remember": { "description": "Remember, if set to true, tells ORY Hydra to remember this consent authorization and reuse it if the same\nclient asks the same user for the same, or a subset of, scope.", "type": "boolean" @@ -2116,7 +2373,7 @@ ], "properties": { "acr": { - "description": "ACR sets the Authentication AuthorizationContext Class Reference value for this authentication session. You can use it\nto express that, for example, a user authenticated using two factor authentication.", + "description": "ACR sets the Authentication AuthorizationContext Class Reference value for this authentication session. You can use it\nto express that, for example, a user authenticated using two-factor authentication.", "type": "string" }, "amr": { @@ -2125,12 +2382,20 @@ "context": { "$ref": "#/definitions/JSONRawMessage" }, + "extend_session_lifespan": { + "description": "Extend OAuth2 authentication session lifespan\n\nIf set to `true`, the OAuth2 authentication cookie lifespan is extended. This is for example useful if you want the user to be able to use `prompt=none` continuously.\n\nThis value can only be set to `true` if the user has an authentication, which is the case if the `skip` value is `true`.", + "type": "boolean" + }, "force_subject_identifier": { "description": "ForceSubjectIdentifier forces the \"pairwise\" user ID of the end-user that authenticated. The \"pairwise\" user ID refers to the\n(Pairwise Identifier Algorithm)[http://openid.net/specs/openid-connect-core-1_0.html#PairwiseAlg] of the OpenID\nConnect specification. It allows you to set an obfuscated subject (\"user\") identifier that is unique to the client.\n\nPlease note that this changes the user ID on endpoint /userinfo and sub claim of the ID Token. It does not change the\nsub claim in the OAuth 2.0 Introspection.\n\nPer default, ORY Hydra handles this value with its own algorithm. In case you want to set this yourself\nyou can use this field. Please note that setting this field has no effect if `pairwise` is not configured in\nORY Hydra or the OAuth 2.0 Client does not expect a pairwise identifier (set via `subject_type` key in the client's\nconfiguration).\n\nPlease also be aware that ORY Hydra is unable to properly compute this value during authentication. This implies\nthat you have to compute this value on every authentication process (probably depending on the client ID or some\nother unique value).\n\nIf you fail to compute the proper value, then authentication processes which have id_token_hint set might fail.", "type": "string" }, + "identity_provider_session_id": { + "description": "IdentityProviderSessionID is the session ID of the end-user that authenticated.\nIf specified, we will use this value to propagate the logout.", + "type": "string" + }, "remember": { - "description": "Remember, if set to true, tells ORY Hydra to remember this user by telling the user agent (browser) to store\na cookie with authentication data. If the same user performs another OAuth 2.0 Authorization Request, he/she\nwill not be asked to log in again.", + "description": "Remember, if set to true, tells Ory Hydra to remember this user by telling the user agent (browser) to store\na cookie with authentication data. If the same user performs another OAuth 2.0 Authorization Request, they\nwill not be asked to log in again.", "type": "boolean" }, "remember_for": { @@ -2167,6 +2432,77 @@ } } }, + "credentialSupportedDraft00": { + "description": "Includes information about the supported verifiable credentials.", + "type": "object", + "title": "Verifiable Credentials Metadata (Draft 00)", + "properties": { + "cryptographic_binding_methods_supported": { + "description": "OpenID Connect Verifiable Credentials Cryptographic Binding Methods Supported\n\nContains a list of cryptographic binding methods supported for signing the proof.", + "type": "array", + "items": { + "type": "string" + } + }, + "cryptographic_suites_supported": { + "description": "OpenID Connect Verifiable Credentials Cryptographic Suites Supported\n\nContains a list of cryptographic suites methods supported for signing the proof.", + "type": "array", + "items": { + "type": "string" + } + }, + "format": { + "description": "OpenID Connect Verifiable Credentials Format\n\nContains the format that is supported by this authorization server.", + "type": "string" + }, + "types": { + "description": "OpenID Connect Verifiable Credentials Types\n\nContains the types of verifiable credentials supported.", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "deviceAuthorization": { + "description": "# Ory's OAuth 2.0 Device Authorization API", + "type": "object", + "title": "OAuth2 Device Flow", + "properties": { + "device_code": { + "description": "The device verification code.", + "type": "string", + "example": "ory_dc_smldfksmdfkl.mslkmlkmlk" + }, + "expires_in": { + "description": "The lifetime in seconds of the \"device_code\" and \"user_code\".", + "type": "integer", + "format": "int64", + "example": 16830 + }, + "interval": { + "description": "The minimum amount of time in seconds that the client\nSHOULD wait between polling requests to the token endpoint. If no\nvalue is provided, clients MUST use 5 as the default.", + "type": "integer", + "format": "int64", + "example": 5 + }, + "user_code": { + "description": "The end-user verification code.", + "type": "string", + "example": "AAAAAA" + }, + "verification_uri": { + "description": "The end-user verification URI on the authorization\nserver. The URI should be short and easy to remember as end users\nwill be asked to manually type it into their user agent.", + "type": "string", + "example": "https://auth.ory.sh/tv" + }, + "verification_uri_complete": { + "description": "A verification URI that includes the \"user_code\" (or\nother information with the same function as the \"user_code\"),\nwhich is designed for non-textual transmission.", + "type": "string", + "example": "https://auth.ory.sh/tv?user_code=AAAAAA" + } + } + }, "errorOAuth2": { "description": "Error", "type": "object", @@ -2246,6 +2582,7 @@ }, "healthNotReadyStatus": { "type": "object", + "title": "The not ready status of the service.", "properties": { "errors": { "description": "Errors contains a list of errors that caused the not ready status.", @@ -2258,6 +2595,7 @@ }, "healthStatus": { "type": "object", + "title": "The health status of the service.", "properties": { "status": { "description": "Status always contains \"ok\".", @@ -2284,7 +2622,7 @@ } }, "client_id": { - "description": "ID is aclient identifier for the OAuth 2.0 client that\nrequested this token.", + "description": "ID is a client identifier for the OAuth 2.0 client that\nrequested this token.", "type": "string" }, "exp": { @@ -2472,6 +2810,36 @@ } } }, + "keysetPaginationRequestParameters": { + "description": "For details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).", + "type": "object", + "title": "Pagination Request Parameters", + "properties": { + "page_size": { + "description": "Items per Page\n\nThis is the number of items per page to return.\nFor details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).", + "type": "integer", + "format": "int64", + "default": 250, + "maximum": 1000, + "minimum": 1 + }, + "page_token": { + "description": "Next Page Token\n\nThe next page token.\nFor details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).", + "type": "string" + } + } + }, + "keysetPaginationResponseHeaders": { + "description": "The `Link` HTTP header contains multiple links (`first`, `next`) formatted as:\n`\u003chttps://{project-slug}.projects.oryapis.com/admin/sessions?page_size=250\u0026page_token=\u003e; rel=\"first\"`\n\nFor details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).", + "type": "object", + "title": "Pagination Response Header", + "properties": { + "link": { + "description": "The Link HTTP Header\n\nThe `Link` header contains a comma-delimited list of links to the following pages:\n\nfirst: The first page of results.\nnext: The next page of results.\n\nPages are omitted if they do not exist. For example, if there is no next page, the `next` link is omitted. Examples:\n\n\u003c/admin/sessions?page_size=250\u0026page_token={last_item_uuid}; rel=\"first\",/admin/sessions?page_size=250\u0026page_token=\u003e; rel=\"next\"", + "type": "string" + } + } + }, "nullTime": { "type": "string", "format": "date-time", @@ -2482,6 +2850,10 @@ "type": "object", "title": "OAuth 2.0 Client", "properties": { + "access_token_strategy": { + "description": "OAuth 2.0 Access Token Strategy\n\nAccessTokenStrategy is the strategy used to generate access tokens.\nValid options are `jwt` and `opaque`. `jwt` is a bad idea, see https://www.ory.sh/docs/oauth2-oidc/jwt-access-token\nSetting the strategy here overrides the global setting in `strategies.access_token`.", + "type": "string" + }, "allowed_cors_origins": { "$ref": "#/definitions/StringSliceJSONFormat" }, @@ -2509,7 +2881,7 @@ "$ref": "#/definitions/NullDuration" }, "client_id": { - "description": "OAuth 2.0 Client ID\n\nThe ID is autogenerated and immutable.", + "description": "OAuth 2.0 Client ID\n\nThe ID is immutable. If no ID is provided, a UUID4 will be generated.", "type": "string" }, "client_name": { @@ -2537,6 +2909,15 @@ "type": "string", "format": "date-time" }, + "device_authorization_grant_access_token_lifespan": { + "$ref": "#/definitions/NullDuration" + }, + "device_authorization_grant_id_token_lifespan": { + "$ref": "#/definitions/NullDuration" + }, + "device_authorization_grant_refresh_token_lifespan": { + "$ref": "#/definitions/NullDuration" + }, "frontchannel_logout_session_required": { "description": "OpenID Connect Front-Channel Logout Session Required\n\nBoolean value specifying whether the RP requires that iss (issuer) and sid (session ID) query parameters be\nincluded to identify the RP session with the OP when the frontchannel_logout_uri is used.\nIf omitted, the default value is false.", "type": "boolean" @@ -2621,13 +3002,22 @@ "description": "OpenID Connect Sector Identifier URI\n\nURL using the https scheme to be used in calculating Pseudonymous Identifiers by the OP. The URL references a\nfile with a single JSON array of redirect_uri values.", "type": "string" }, + "skip_consent": { + "description": "SkipConsent skips the consent screen for this client. This field can only\nbe set from the admin API.", + "type": "boolean" + }, + "skip_logout_consent": { + "description": "SkipLogoutConsent skips the logout consent screen for this client. This field can only\nbe set from the admin API.", + "type": "boolean" + }, "subject_type": { "description": "OpenID Connect Subject Type\n\nThe `subject_types_supported` Discovery parameter contains a\nlist of the supported subject_type values for this server. Valid types include `pairwise` and `public`.", "type": "string" }, "token_endpoint_auth_method": { - "description": "OAuth 2.0 Token Endpoint Authentication Method\n\nRequested Client Authentication method for the Token Endpoint. The options are:\n\n`client_secret_post`: (default) Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` in the HTTP body.\n`client_secret_basic`: Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` encoded in the HTTP Authorization header.\n`private_key_jwt`: Use JSON Web Tokens to authenticate the client.\n`none`: Used for public clients (native apps, mobile apps) which can not have secrets.", - "type": "string" + "description": "OAuth 2.0 Token Endpoint Authentication Method\n\nRequested Client Authentication method for the Token Endpoint. The options are:\n\n`client_secret_basic`: (default) Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` encoded in the HTTP Authorization header.\n`client_secret_post`: Send `client_id` and `client_secret` as `application/x-www-form-urlencoded` in the HTTP body.\n`private_key_jwt`: Use JSON Web Tokens to authenticate the client.\n`none`: Used for public clients (native apps, mobile apps) which can not have secrets.", + "type": "string", + "default": "client_secret_basic" }, "token_endpoint_auth_signing_alg": { "description": "OAuth 2.0 Token Endpoint Signing Algorithm\n\nRequested Client Authentication signing algorithm for the Token Endpoint.", @@ -2665,6 +3055,15 @@ "client_credentials_grant_access_token_lifespan": { "$ref": "#/definitions/NullDuration" }, + "device_authorization_grant_access_token_lifespan": { + "$ref": "#/definitions/NullDuration" + }, + "device_authorization_grant_id_token_lifespan": { + "$ref": "#/definitions/NullDuration" + }, + "device_authorization_grant_refresh_token_lifespan": { + "$ref": "#/definitions/NullDuration" + }, "implicit_grant_access_token_lifespan": { "$ref": "#/definitions/NullDuration" }, @@ -2700,12 +3099,16 @@ "$ref": "#/definitions/StringSliceJSONFormat" }, "challenge": { - "description": "ID is the identifier (\"authorization challenge\") of the consent authorization request. It is used to\nidentify the session.", + "description": "Challenge is used to retrieve/accept/deny the consent request.", "type": "string" }, "client": { "$ref": "#/definitions/oAuth2Client" }, + "consent_request_id": { + "description": "ConsentRequestID is the ID of the consent request.", + "type": "string" + }, "context": { "$ref": "#/definitions/JSONRawMessage" }, @@ -2781,6 +3184,13 @@ "consent_request": { "$ref": "#/definitions/oAuth2ConsentRequest" }, + "consent_request_id": { + "description": "ConsentRequestID is the identifier of the consent request that initiated this consent session.", + "type": "string" + }, + "context": { + "$ref": "#/definitions/JSONRawMessage" + }, "grant_access_token_audience": { "$ref": "#/definitions/StringSliceJSONFormat" }, @@ -2816,8 +3226,6 @@ "title": "Contains information on an ongoing login request.", "required": [ "challenge", - "requested_scope", - "requested_access_token_audience", "skip", "subject", "client", @@ -2825,7 +3233,7 @@ ], "properties": { "challenge": { - "description": "ID is the identifier (\"login challenge\") of the login request. It is used to\nidentify the session.", + "description": "ID is the identifier of the login request.", "type": "string" }, "client": { @@ -2863,16 +3271,22 @@ "title": "Contains information about an ongoing logout request.", "properties": { "challenge": { - "description": "Challenge is the identifier (\"logout challenge\") of the logout authentication request. It is used to\nidentify the session.", + "description": "Challenge is the identifier of the logout authentication request.", "type": "string" }, "client": { "$ref": "#/definitions/oAuth2Client" }, + "expires_at": { + "$ref": "#/definitions/nullTime" + }, "request_url": { "description": "RequestURL is the original Logout URL requested.", "type": "string" }, + "requested_at": { + "$ref": "#/definitions/nullTime" + }, "rp_initiated": { "description": "RPInitiated is set to true if the request was initiated by a Relying Party (RP), also known as an OAuth 2.0 Client.", "type": "boolean" @@ -2916,8 +3330,7 @@ }, "id_token": { "description": "To retrieve a refresh token request the id_token scope.", - "type": "integer", - "format": "int64" + "type": "string" }, "refresh_token": { "description": "The refresh token, which can be used to obtain new\naccess tokens. To retrieve it add the scope \"offline\" to your access token request.", @@ -2940,6 +3353,7 @@ "required": [ "issuer", "authorization_endpoint", + "device_authorization_endpoint", "token_endpoint", "jwks_uri", "subject_types_supported", @@ -2980,6 +3394,22 @@ "type": "string" } }, + "credentials_endpoint_draft_00": { + "description": "OpenID Connect Verifiable Credentials Endpoint\n\nContains the URL of the Verifiable Credentials Endpoint.", + "type": "string" + }, + "credentials_supported_draft_00": { + "description": "OpenID Connect Verifiable Credentials Supported\n\nJSON array containing a list of the Verifiable Credentials supported by this authorization server.", + "type": "array", + "items": { + "$ref": "#/definitions/credentialSupportedDraft00" + } + }, + "device_authorization_endpoint": { + "description": "OAuth 2.0 Device Authorization Endpoint URL", + "type": "string", + "example": "https://playground.ory.sh/ory-hydra/public/oauth2/device/oauth" + }, "end_session_endpoint": { "description": "OpenID Connect End-Session Endpoint\n\nURL at the OP to which an RP can perform a redirect to request that the End-User be logged out at the OP.", "type": "string" @@ -3194,38 +3624,6 @@ } } }, - "pagination": { - "type": "object", - "properties": { - "page_size": { - "description": "Items per page\n\nThis is the number of items per page to return.\nFor details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).", - "type": "integer", - "format": "int64", - "default": 250, - "maximum": 1000, - "minimum": 1 - }, - "page_token": { - "description": "Next Page Token\n\nThe next page token.\nFor details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).", - "type": "string", - "default": "1", - "minimum": 1 - } - } - }, - "paginationHeaders": { - "type": "object", - "properties": { - "link": { - "description": "The link header contains pagination links.\n\nFor details on pagination please head over to the [pagination documentation](https://www.ory.sh/docs/ecosystem/api-design#pagination).\n\nin: header", - "type": "string" - }, - "x-total-count": { - "description": "The total number of clients.\n\nin: header", - "type": "string" - } - } - }, "rejectOAuth2Request": { "type": "object", "title": "The request payload used to accept a login or consent request.", @@ -3438,6 +3836,75 @@ } } }, + "unexpectedError": { + "type": "string" + }, + "verifiableCredentialPrimingResponse": { + "type": "object", + "title": "VerifiableCredentialPrimingResponse contains the nonce to include in the proof-of-possession JWT.", + "properties": { + "c_nonce": { + "type": "string" + }, + "c_nonce_expires_in": { + "type": "integer", + "format": "int64" + }, + "error": { + "type": "string" + }, + "error_debug": { + "type": "string" + }, + "error_description": { + "type": "string" + }, + "error_hint": { + "type": "string" + }, + "format": { + "type": "string" + }, + "status_code": { + "type": "integer", + "format": "int64" + } + } + }, + "verifiableCredentialResponse": { + "type": "object", + "title": "VerifiableCredentialResponse contains the verifiable credential.", + "properties": { + "credential_draft_00": { + "type": "string" + }, + "format": { + "type": "string" + } + } + }, + "verifyUserCodeRequest": { + "type": "object", + "title": "HandledDeviceUserAuthRequest is the request payload used to accept a device user_code.", + "properties": { + "client": { + "$ref": "#/definitions/oAuth2Client" + }, + "device_code_request_id": { + "type": "string" + }, + "request_url": { + "description": "RequestURL is the original Device Authorization URL requested.", + "type": "string" + }, + "requested_access_token_audience": { + "$ref": "#/definitions/StringSliceJSONFormat" + }, + "requested_scope": { + "$ref": "#/definitions/StringSliceJSONFormat" + } + } + }, "version": { "type": "object", "properties": { @@ -3450,7 +3917,7 @@ ,"UUID":{"type": "string", "format": "uuid4"}}, "responses": { "emptyResponse": { - "description": "Empty responses are sent when, for example, resources are deleted. The HTTP status code for empty responses is\ntypically 201." + "description": "Empty responses are sent when, for example, resources are deleted. The HTTP status code for empty responses is\ntypically 204." }, "errorOAuth2BadRequest": { "description": "Bad Request Error Response", @@ -3481,12 +3948,7 @@ "headers": { "link": { "type": "string", - "description": "The Link HTTP Header\n\nThe `Link` header contains a comma-delimited list of links to the following pages:\n\nfirst: The first page of results.\nnext: The next page of results.\nprev: The previous page of results.\nlast: The last page of results.\n\nPages are omitted if they do not exist. For example, if there is no next page, the `next` link is omitted. Examples:\n\n\u003c/clients?page_size=5\u0026page_token=0\u003e; rel=\"first\",\u003c/clients?page_size=5\u0026page_token=15\u003e; rel=\"next\",\u003c/clients?page_size=5\u0026page_token=5\u003e; rel=\"prev\",\u003c/clients?page_size=5\u0026page_token=20\u003e; rel=\"last\"" - }, - "x-total-count": { - "type": "integer", - "format": "int64", - "description": "The X-Total-Count HTTP Header\n\nThe `X-Total-Count` header contains the total number of items in the collection." + "description": "The Link HTTP Header\n\nThe `Link` header contains a comma-delimited list of links to the following pages:\n\nfirst: The first page of results.\nnext: The next page of results.\n\nPages are omitted if they do not exist. For example, if there is no next page, the `next` link is omitted. Examples:\n\n\u003c/admin/sessions?page_size=250\u0026page_token={last_item_uuid}; rel=\"first\",/admin/sessions?page_size=250\u0026page_token=\u003e; rel=\"next\"" } } } diff --git a/test/conformance/Dockerfile b/test/conformance/Dockerfile index fcb1130a8ad..ea6f61052c2 100644 --- a/test/conformance/Dockerfile +++ b/test/conformance/Dockerfile @@ -10,7 +10,7 @@ RUN wget https://gitlab.com/openid/conformance-suite/-/archive/release-v4.1.4/co RUN mvn -B clean package -DskipTests && \ apt-get update && apt-get install -y \ - redir ca-certificates && \ + redir ca-certificates COPY ssl/ory-conformity.crt /etc/ssl/certs/ COPY ssl/ory-conformity.key /etc/ssl/private/ diff --git a/test/conformance/httpd/Dockerfile b/test/conformance/httpd/Dockerfile index 35d9df0f9af..1c7305cc155 100644 --- a/test/conformance/httpd/Dockerfile +++ b/test/conformance/httpd/Dockerfile @@ -1,4 +1,4 @@ -FROM debian:stretch +FROM debian:12-slim RUN apt-get update \ && apt-get install -y apache2 ssl-cert ca-certificates \ && apt-get clean diff --git a/test/conformance/hydra/Dockerfile b/test/conformance/hydra/Dockerfile index df86aefa45b..7d642e74616 100644 --- a/test/conformance/hydra/Dockerfile +++ b/test/conformance/hydra/Dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.19-buster AS builder +FROM golang:1.25 AS builder RUN apt-get update && \ apt-get install --no-install-recommends -y \ @@ -8,6 +8,10 @@ RUN apt-get update && \ WORKDIR /go/src/github.com/ory/hydra RUN mkdir -p ./internal/httpclient +COPY oryx/go.mod oryx/go.mod +COPY oryx/go.sum oryx/go.sum + + COPY go.mod go.sum ./ COPY internal/httpclient/go.* ./internal/httpclient/ @@ -18,9 +22,9 @@ RUN go mod download COPY . . -RUN go build -tags sqlite,json1 -o /usr/bin/hydra +RUN go build -tags sqlite -o /usr/bin/hydra -VOLUME /var/lib/sqlite +VOLUME /mnt/sqlite # Exposing the ory home directory VOLUME /home/ory diff --git a/test/conformance/publish.sh b/test/conformance/publish.sh index f39ba5f6e95..8f1600d7118 100755 --- a/test/conformance/publish.sh +++ b/test/conformance/publish.sh @@ -3,8 +3,9 @@ set -euxo pipefail cd "$( dirname "${BASH_SOURCE[0]}" )" -docker build -t oryd/hydra-oidc-server:latest . -docker build -t oryd/hydra-oidc-httpd:latest -f httpd/Dockerfile . + +docker buildx build --output type=docker --platform linux/amd64 -t oryd/hydra-oidc-server:latest . +docker buildx build --output type=docker --platform linux/amd64 -t oryd/hydra-oidc-httpd:latest -f httpd/Dockerfile . docker push oryd/hydra-oidc-server:latest docker push oryd/hydra-oidc-httpd:latest diff --git a/test/conformance/purge.sh b/test/conformance/purge.sh index caf4491106f..da436588b1d 100755 --- a/test/conformance/purge.sh +++ b/test/conformance/purge.sh @@ -3,4 +3,4 @@ set -euxo pipefail cd "$( dirname "${BASH_SOURCE[0]}" )/../.." -docker-compose -f quickstart.yml -f quickstart-postgres.yml -f test/conformance/docker-compose.yml down -v +docker compose -f quickstart.yml -f quickstart-postgres.yml -f test/conformance/docker-compose.yml down -v diff --git a/test/conformance/run_test.go b/test/conformance/run_test.go index b53de72197b..4a239321f44 100644 --- a/test/conformance/run_test.go +++ b/test/conformance/run_test.go @@ -2,7 +2,6 @@ // SPDX-License-Identifier: Apache-2.0 //go:build conformity -// +build conformity package main @@ -107,24 +106,19 @@ var ( } server = urlx.ParseOrPanic("https://127.0.0.1:8443") config, _ = os.ReadFile("./config.json") - httpClient = httpx.NewResilientClient( - httpx.ResilientClientWithMinxRetryWait(time.Second*5), - httpx.ResilientClientWithClient(&http.Client{ - Timeout: time.Second * 5, - Transport: &http.Transport{ - TLSClientConfig: &tls.Config{ - InsecureSkipVerify: true, - }, - }, - })) - - workdir string + httpClient = httpx.NewResilientClient(httpx.ResilientClientWithMinxRetryWait(time.Second * 5)) + workdir string hydra = hydrac.NewAPIClient(hydrac.NewConfiguration()) ) func init() { - rand.Seed(time.Now().UnixNano()) + httpClient.HTTPClient.Timeout = 5 * time.Second + httpClient.HTTPClient.Transport = &http.Transport{ + TLSClientConfig: &tls.Config{ + InsecureSkipVerify: true, + }, + } hydra.GetConfig().HTTPClient = httpClient.HTTPClient hydra.GetConfig().Servers = hydrac.ServerConfigurations{{URL: "https://127.0.0.1:4445"}} } @@ -185,8 +179,10 @@ func TestPlans(t *testing.T) { func makePost(t *testing.T, href string, payload io.Reader, esc int) []byte { res, err := httpClient.Post(href, "application/json", payload) - require.NoError(t, err) - defer res.Body.Close() + if err != nil { + require.FailNowf(t, "Failed to make POST request. Check that the server is live and that the certificate in test/conformance/ssl is not expired.", "Error: %s\nURL: %s", err, href) + } + defer res.Body.Close() //nolint:errcheck body, err := io.ReadAll(res.Body) require.NoError(t, err) require.Equal(t, esc, res.StatusCode, "%s\n%s", href, body) @@ -201,8 +197,8 @@ func createPlan(t *testing.T, extra url.Values, isParallel bool) { } // https://localhost:8443/api/plan?planName=oidcc-formpost-basic-certification-test-plan&variant={"server_metadata":"discovery","client_registration":"dynamic_client"}&variant={"server_metadata":"discovery","client_registration":"dynamic_client"} - //planConfig, err := sjson.SetBytes(config, "alias", uuid.New()) - //require.NoError(t, err) + // planConfig, err := sjson.SetBytes(config, "alias", uuid.New()) + // require.NoError(t, err) body := makePost(t, urlx.CopyWithQuery(urlx.AppendPaths(server, "/api/plan"), extra).String(), bytes.NewReader(config), 201) @@ -224,7 +220,7 @@ func createPlan(t *testing.T, extra url.Values, isParallel bool) { t.Skipf("Test module 'oidcc-server-rotate-keys' can not run in parallel tests and was skipped...") return } else if module != "oidcc-server-rotate-keys" && !isParallel { - t.Skipf("Without paralleism only test module 'oidcc-server-rotate-keys' will be executed.") + t.Skipf("Without parallelism only test module 'oidcc-server-rotate-keys' will be executed.") return } @@ -277,7 +273,7 @@ func createPlan(t *testing.T, extra url.Values, isParallel bool) { bo := conf.NextBackOff() require.NotEqual(t, backoff.Stop, bo, "%+v", err) - _, _, err = hydra.JwkApi.CreateJsonWebKeySet(context.Background(), "hydra.openid.id-token").CreateJsonWebKeySet(hydrac.CreateJsonWebKeySet{ + _, _, err = hydra.JwkAPI.CreateJsonWebKeySet(context.Background(), "hydra.openid.id-token").CreateJsonWebKeySet(hydrac.CreateJsonWebKeySet{ Alg: "RS256", }).Execute() if err == nil { @@ -302,7 +298,7 @@ func createPlan(t *testing.T, extra url.Values, isParallel bool) { func checkStatus(t *testing.T, testID string) (string, status) { res, err := httpClient.Get(urlx.AppendPaths(server, "/api/info", testID).String()) require.NoError(t, err) - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck body, err := io.ReadAll(res.Body) require.NoError(t, err) require.Equal(t, 200, res.StatusCode, "%s", body) diff --git a/test/conformance/ssl/generate.sh b/test/conformance/ssl/generate.sh index 39779e4c431..6a925d33b0c 100755 --- a/test/conformance/ssl/generate.sh +++ b/test/conformance/ssl/generate.sh @@ -4,11 +4,11 @@ set -euxo pipefail cd "$( dirname "${BASH_SOURCE[0]}" )" subj="/C=GB/ST=London/L=London/O=Global Security/OU=IT Department/CN=ory.sh.local" -openssl genrsa -out ory-ca.key 2048 -subj "$subj" -openssl req -x509 -new -nodes -key ory-ca.key -sha256 -days 4096 -out ory-ca.pem -subj "$subj" +openssl genrsa -out ory-ca.key 2048 +openssl req -x509 -new -nodes -key ory-ca.key -sha256 -days 16384 -out ory-ca.pem -subj "$subj" NAME=ory-conformity -openssl genrsa -out $NAME.key 2048 -subj "$subj" +openssl genrsa -out $NAME.key 2048 openssl req -new -key $NAME.key -out $NAME.csr -subj "$subj" cat > $NAME.ext << EOF @@ -24,4 +24,4 @@ IP.1 = 127.0.0.1 EOF openssl x509 -req -in $NAME.csr -CA ory-ca.pem -CAkey ory-ca.key -CAcreateserial \ - -out $NAME.crt -days 825 -sha256 -extfile $NAME.ext + -out $NAME.crt -days 16384 -sha256 -extfile $NAME.ext diff --git a/test/conformance/ssl/ory-ca.key b/test/conformance/ssl/ory-ca.key index ac95905420e..09112b4a3fc 100644 --- a/test/conformance/ssl/ory-ca.key +++ b/test/conformance/ssl/ory-ca.key @@ -1,27 +1,28 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEogIBAAKCAQEAuSTkfDNMsW3lYTQVVtzGR0Uf1b1lrNJuRNT3nDbTGmhR4cUD -Aks3W5cJbZMBm5XS/ym7pLBbCgTj+5N17Mfc6dgTIyzdd1gpFEqTRfmgKZcOfEbQ -EHChMzYPSZS7c2/NTHQX8P4wlvVUjeUi4zA2wZgUqduDwxS+d/BPj/Kqi2samaFm -FVpBxSHgW4MwYKN9WR1KbijIhxaDsqVoypkRPgqmgw3L7HRX0oy2mrd/QFaSfdpi -j2tZPiwL4G1MjHzI3KpWOWWm6RIlkFoI0ihCkS+lIuOZnclW0NIpi0I+xER8X9BR -kzJATJUPTwfxTrVNqFdogjItsqVIYgff4neWzwIDAQABAoIBAHzA3zTRXFwyMN3O -upJNMU3Uxh7AAELziFnZJv+b8aNZp9L5bY2eIS8WFsQ+ylaMHiDRfh76tl2J7hUW -x99EzRbFAiAxmwlP3l5N4e7ExadAogIbtRdezUA0rRhnn+eo7i0WDQvWO+d8Z/00 -6M0tL7hDQakywRYyZCVtSC821LQb0CZ82aA0oRvVO7ZvqCgnD4kCx5LXLkwK+/xe -wVuemjplBVTGZpNhqv73L37eOb1nYxmyYq1Ma9qASP44p1ArR49D+hlvU5w7GLLT -hP//MK00wDLvKEQnVjWv9RSFHHHm5b/Yjg5ibWHvfXmrxqFiVt4dnt3nF8hWvM+a -dC/HxAECgYEA8vbM8j4pMj4l6QveHKX9z686xPKK3xDhtMGJTC/N6w2riFMwor+W -iZaM6agMqJ7iVf9hyePVlrpDLGBY1NMB0mKxJ3cw0xKQNH5l5RRXueEB6gxHgZqU -smASEnoSmxuJlhSXea/EAdpT91lSX1Eg8/VDBSkKOn/ZER+vzO7OX6UCgYEAwxPp -+H7Ou/QeVXssUevnZWh66YgNlKqO6xinQV4iBoH/xM72C7c1AN/KGusq4ZnXetF0 -KkuXGRtmEfeJfZwP+RrtQYKF0Yt6Nuz3hrBvC8UcB6iD0fbfJ/t5WXPo3NjxTTYV -hC81IdpXixtQSBUWQBjN6a50cFOMXxtArZsDEmMCgYA3jbj01sPCqjUu4TjLIiUz -vKRabf7U3F5PX0VOd0vFKB4FkMf5HoeENDlKH9OtJo8oDLnGWOL4kORVHD6Tgx/S -hCBvnrA6Qu9YIWKZC5q3dLxkUKR0/OxtEMxxDxp7/sLp3xF3kHnn1o0GbJUKGFnJ -jlXhsy2UjLEMWktvN1dWxQKBgDgI21c2zZltmN0DAYsSkSgu58d2/jeq+lyLe1eq -e2mVI+vbaw+Yc5ApuDsdO/cLorqzuYOArQlYNmyIxySelHQAiRAdNuUnBFcMoFk/ -Cta9qryEakEAgb9RW90XD1eItV9xXqLWkJOFzUm28cuSyw5kUZmDNA0j4plbEawM -b8hzAoGABhGhbKg7bIWrQM/Y465RL2lKqOt3P+Hs9yXLuVS/q8tTzf6LSus2XD2z -hSeZFEBIE0ufPv1hQD1uW2cLpwckcKts3clFNLaQa4lAgSWRRMkz39IqnmxsgYpx -ZYQtAJrovA79o03ylsgm4LZoLa1LNL09VyqCMMnQhJsGJ1Z9v5I= ------END RSA PRIVATE KEY----- +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC/qQP+sySNr1ug +nCg+72eVtQiyMlhV5FgjyXoFGnwBXjzVDRCxTWAZGCKQlQXXa/Dv95p8iXi9UjyP +HKJOo/lAr/ClgkIiMAD6RxvMvHo1/IBiladDbplU63EkLlOcp9bs0fm24DgV+pCn +osIDe8ORy+kNfIAvYaSzf6d9Ik5qldvacp5YnvEt4ABtuAAg/juW5sORgjkrsrac +0Q6XITH6gEC/RhX24pmzSQtGw40XkZa+rBboMTEY9i+kYT4OrAuj4PPONsd4098H +ScQSczVIbCBbDbkpW2i5r/Thdic+BUhUlyeiTolzdjZ2JnfNrNfStMY9+RryDXyq +KKgJmtxrAgMBAAECggEANiRXShCuP04tyqnCXsD/mJV6/ufk92KZmYZ1b1KjfDl0 +YtUdhZCvmHcucHyAmXHXLtrZ69tj3cuI6v6yRRfiNildiEvLReAcrpb+3e/ScETD +udW/2fc43nniTM+/c+4f82d/rO8bOO4/BfQe7Z/9R8b8L4Cfr4LcZY6msF1MHKR6 +ui3IvzJ2xRaqVjCnh4de/48Foda37/UZrU4scCeedKYJvG9hsT+BpepgqYD+biDx +UWP7ZswWa+HQNjS2EHmy9t6gWGtdFClxIdCXlPnaDIAciQvbdx7MZAm3mu2iHiTb +2PX0pCZM+wuRxhZ0skf5oaCto1lSE4VGA6pPpdBKpQKBgQDh9KnB+ZKx4Ui68o68 +FR4xk4Lvv6NRDMoDrz8VoOoNYicyvjm8SC4d8scq7CYac09Zu1DSoCPRw8ZMFxsH +xzrtln3Bsn/AIaUTfTLCsUNTjz3INaWnYJ3cRpA7g+LiuQcj5STO0qQI1uNe8YXq +zcGW42ZiI12y29Fg40ZMuVjIHwKBgQDZJPaje/OrPOPwvWAf1f1jXLlNDndKpd4F +pZsTIFHyTorX1ZJ6SxugQiDWzrs706Xv560WoBKqV3/94S6eP1B93SJh0f1LXaWg +bYhPtn+MydkhXYVgkWs7XbXNMw04iK9hLRxbj7v3DGQXvuv4ZYJQ6xIVpRl0Vya3 +bECtp/DSNQKBgQDPm1PKNPwS0fJYSnrY1vJqDKXWkJaG5qBKKF9nCPVjnJZ1ahB0 +LUGqJKHYjycnUGFNCe1lyNuAZUeyK9ybFfPPzmD3coEsT/1zXaog9mvsv4S9ioUM +t5nzhSYKeNAjDmxAD8709NbghEheuL0ZkzlW4NL2QpZZutyzD79MyVOIcwKBgF// +HF5oFPBWCftK7W0FoHDF9FGhnJFOxNnAPc910WSK3ozOqWQRQlRWB8iPOm0Xg1uN +E5NWDwpphncJjx98pCwQj/uo5w7jhq5m3gVHjBoEvk4m1yk7AKoNIqtl72lX3Bjc +5ZWA9LsiKjnpOM7mqXb2hVc2Vwt83+vlGq7q+OclAoGAFrd2iy6PAATydoZgLcmW +tFje7vmZdLcWuzXdtUDeBF5kYEoy9YheAqgaRQLQko27i2FKK1bw6f4tH47OHxfP +eFqhhu2723eQ2HiUEZUATE6VTwAjUWLF965pUgXQBxNYUBe+Am6ad7PIOt7a6TMM +hsn4hDKKhbOlD0/s0l9xEQA= +-----END PRIVATE KEY----- diff --git a/test/conformance/ssl/ory-ca.pem b/test/conformance/ssl/ory-ca.pem index ff1f45ee072..0db0a18a1b4 100644 --- a/test/conformance/ssl/ory-ca.pem +++ b/test/conformance/ssl/ory-ca.pem @@ -1,21 +1,23 @@ -----BEGIN CERTIFICATE----- -MIIDbDCCAlQCCQCdoz0sAj5xlzANBgkqhkiG9w0BAQsFADB4MQswCQYDVQQGEwJH -QjEPMA0GA1UECAwGTG9uZG9uMQ8wDQYDVQQHDAZMb25kb24xGDAWBgNVBAoMD0ds -b2JhbCBTZWN1cml0eTEWMBQGA1UECwwNSVQgRGVwYXJ0bWVudDEVMBMGA1UEAwwM -b3J5LnNoLmxvY2FsMB4XDTIwMTExMDA5MjY1NFoXDTMyMDEyODA5MjY1NFoweDEL -MAkGA1UEBhMCR0IxDzANBgNVBAgMBkxvbmRvbjEPMA0GA1UEBwwGTG9uZG9uMRgw -FgYDVQQKDA9HbG9iYWwgU2VjdXJpdHkxFjAUBgNVBAsMDUlUIERlcGFydG1lbnQx -FTATBgNVBAMMDG9yeS5zaC5sb2NhbDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC -AQoCggEBALkk5HwzTLFt5WE0FVbcxkdFH9W9ZazSbkTU95w20xpoUeHFAwJLN1uX -CW2TAZuV0v8pu6SwWwoE4/uTdezH3OnYEyMs3XdYKRRKk0X5oCmXDnxG0BBwoTM2 -D0mUu3NvzUx0F/D+MJb1VI3lIuMwNsGYFKnbg8MUvnfwT4/yqotrGpmhZhVaQcUh -4FuDMGCjfVkdSm4oyIcWg7KlaMqZET4KpoMNy+x0V9KMtpq3f0BWkn3aYo9rWT4s -C+BtTIx8yNyqVjllpukSJZBaCNIoQpEvpSLjmZ3JVtDSKYtCPsREfF/QUZMyQEyV -D08H8U61TahXaIIyLbKlSGIH3+J3ls8CAwEAATANBgkqhkiG9w0BAQsFAAOCAQEA -ad4IllTfGig/Pcee+hCvYKuFX9n5QbVBUebNzSRIg8LC38jCNTK3dLBeiNel9D0q -TR53s83k9uFfPkmajV1THNIc+phDrrDEAqE7SlUPitdmdZvaFf7Mho5YWca+MpgL -2mawCZZ3ALZ9s4l9Q1v5eA5woZ/AdSJ1ESbVaw1IYV92xvKLRE2Wtr/XMav0eJDQ -ZVBHiXLdYPUcLna5gkfBCCFTliY2FPX6XTAN9u2E/dmva762H15pV2KQmfNmgLVO -wMRygU53w8IoJE79yegLUmMZoGN1KfcxOoCc4Xz58hv1kQo38CE3yCboVtLNoHmv -CuGY6J36+9mQ8bBs26Oinw== +MIID0zCCArugAwIBAgIUY1TU9vVzyEsItktoDoH+sX8i1JswDQYJKoZIhvcNAQEL +BQAweDELMAkGA1UEBhMCR0IxDzANBgNVBAgMBkxvbmRvbjEPMA0GA1UEBwwGTG9u +ZG9uMRgwFgYDVQQKDA9HbG9iYWwgU2VjdXJpdHkxFjAUBgNVBAsMDUlUIERlcGFy +dG1lbnQxFTATBgNVBAMMDG9yeS5zaC5sb2NhbDAgFw0yNTA2MDQxMzQ1MTFaGA8y +MDcwMDQxMzEzNDUxMVoweDELMAkGA1UEBhMCR0IxDzANBgNVBAgMBkxvbmRvbjEP +MA0GA1UEBwwGTG9uZG9uMRgwFgYDVQQKDA9HbG9iYWwgU2VjdXJpdHkxFjAUBgNV +BAsMDUlUIERlcGFydG1lbnQxFTATBgNVBAMMDG9yeS5zaC5sb2NhbDCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBAL+pA/6zJI2vW6CcKD7vZ5W1CLIyWFXk +WCPJegUafAFePNUNELFNYBkYIpCVBddr8O/3mnyJeL1SPI8cok6j+UCv8KWCQiIw +APpHG8y8ejX8gGKVp0NumVTrcSQuU5yn1uzR+bbgOBX6kKeiwgN7w5HL6Q18gC9h +pLN/p30iTmqV29pynlie8S3gAG24ACD+O5bmw5GCOSuytpzRDpchMfqAQL9GFfbi +mbNJC0bDjReRlr6sFugxMRj2L6RhPg6sC6Pg8842x3jT3wdJxBJzNUhsIFsNuSlb +aLmv9OF2Jz4FSFSXJ6JOiXN2NnYmd82s19K0xj35GvINfKooqAma3GsCAwEAAaNT +MFEwHQYDVR0OBBYEFEG6sr2rlH4blBTY10yemUmkDdRXMB8GA1UdIwQYMBaAFEG6 +sr2rlH4blBTY10yemUmkDdRXMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAIsvd/UG58rz4e381UHxUBqCYdiiMUWIoaBZKXpguwbxfg16Ih+62815 +F9uDGr0abrN5vXRIJa0wZ5+yCSmf81lYeJ0g+El6J7U8kRwmJ3mA1hfOtd0OyxoK +p8JNFuHkOeXWnxRZYa3zrOXRBp7D16Jm8sHqpnIDx7/Y8us6zotuMhAFv+Oz7D6v +nRQ0K8/KUZM7yVTRGk+RHElNvz4Tp/P89NbkzYftH8hIS6QZjbOKmac+g21herNo +D5fZqic6uwwJ1XN0Mlq1rQkY0etOsWmomLsbiK2tvDgWNgaghi9JN9SR/QEl4cDH +vXsZ4cT8BW1BXsmIhCf6vkn5pXsv628= -----END CERTIFICATE----- diff --git a/test/conformance/ssl/ory-ca.srl b/test/conformance/ssl/ory-ca.srl index 8277e9f8e4d..43a3ed5021b 100644 --- a/test/conformance/ssl/ory-ca.srl +++ b/test/conformance/ssl/ory-ca.srl @@ -1 +1 @@ -DD6B7209999175BD +DD6B7209999175C2 diff --git a/test/conformance/ssl/ory-conformity.crt b/test/conformance/ssl/ory-conformity.crt index b926d15bb22..d056e59d8f2 100644 --- a/test/conformance/ssl/ory-conformity.crt +++ b/test/conformance/ssl/ory-conformity.crt @@ -1,26 +1,24 @@ -----BEGIN CERTIFICATE----- -MIIETjCCAzagAwIBAgIJAN1rcgmZkXW9MA0GCSqGSIb3DQEBCwUAMHgxCzAJBgNV +MIID+TCCAuGgAwIBAgIJAN1rcgmZkXXCMA0GCSqGSIb3DQEBCwUAMHgxCzAJBgNV BAYTAkdCMQ8wDQYDVQQIDAZMb25kb24xDzANBgNVBAcMBkxvbmRvbjEYMBYGA1UE CgwPR2xvYmFsIFNlY3VyaXR5MRYwFAYDVQQLDA1JVCBEZXBhcnRtZW50MRUwEwYD -VQQDDAxvcnkuc2gubG9jYWwwHhcNMjAxMTEwMDkyNjU0WhcNMjMwMjEzMDkyNjU0 -WjB4MQswCQYDVQQGEwJHQjEPMA0GA1UECAwGTG9uZG9uMQ8wDQYDVQQHDAZMb25k -b24xGDAWBgNVBAoMD0dsb2JhbCBTZWN1cml0eTEWMBQGA1UECwwNSVQgRGVwYXJ0 -bWVudDEVMBMGA1UEAwwMb3J5LnNoLmxvY2FsMIIBIjANBgkqhkiG9w0BAQEFAAOC -AQ8AMIIBCgKCAQEAweQbhpjij22E/gQo27/mIJIzA5s7Y/PbfRfQXSWQME/UrsKP -6gauioHHN/y+acWinrvM7qjm5t90METdrpGOl1uLSXQcev0SAQWxc6enjIqnBQ7u -dNzg4oEl3SE4IkeXWggdubq6pbch5PxtLnIeO5cXSUaXkRfzEcb7Vk0mY7QGnHTm -kiwQa6B0JKzTps4lT909ueUoTlkRl+xV48vt0igcOAbmm/ONq4GeLucaYOhPOZ2F -Cp20Bx8RzSIOt5+54ziVQWJ2cz6+qamoZTVRqd18sabR5pVAp5z5TB+C8F6CuDxU -NsPX0X12TnDH+MEWzhnEe7Y2cvZU7ez+kxauSwIDAQABo4HaMIHXMIGUBgNVHSME -gYwwgYmhfKR6MHgxCzAJBgNVBAYTAkdCMQ8wDQYDVQQIDAZMb25kb24xDzANBgNV -BAcMBkxvbmRvbjEYMBYGA1UECgwPR2xvYmFsIFNlY3VyaXR5MRYwFAYDVQQLDA1J -VCBEZXBhcnRtZW50MRUwEwYDVQQDDAxvcnkuc2gubG9jYWyCCQCdoz0sAj5xlzAJ -BgNVHRMEAjAAMAsGA1UdDwQEAwIE8DAmBgNVHREEHzAdggVodHRwZIIFaHlkcmGC -B2NvbnNlbnSHBH8AAAEwDQYJKoZIhvcNAQELBQADggEBACFr6UqN/9Mx3NuLgFeb -VMfq929BOdbXeRz5lYUsdoBktexDHIRk/zORu6nnsWoGptnk1Swbdq9hj2WLElCS -73GwlDJISCRe65qlhcDPNixOp7R/8ZZMrV4VvhyADXclBsdBVwtYNI42G9MHTxhx -D69JDzbeNRY+aZ5i8DvGh29V+1ZkG9bpwsMIXn1OoF7b3MTQ+s/eajQTnNfvmWXg -jgvFIZZdj5O8apEkO/e6XXAWz5uyVAFzIcWijT+FjyjDdZzVCA1Qx0y2nxAoz2BI -HvdIhX6wXW9p6UBmsuxkVtxLNn/SdGzhl7OA4sWFdJgDmo5/EoKaYvRULxX6Z+sB -qIc= +VQQDDAxvcnkuc2gubG9jYWwwIBcNMjUwNjA0MTM0NTExWhgPMjA3MDA0MTMxMzQ1 +MTFaMHgxCzAJBgNVBAYTAkdCMQ8wDQYDVQQIDAZMb25kb24xDzANBgNVBAcMBkxv +bmRvbjEYMBYGA1UECgwPR2xvYmFsIFNlY3VyaXR5MRYwFAYDVQQLDA1JVCBEZXBh +cnRtZW50MRUwEwYDVQQDDAxvcnkuc2gubG9jYWwwggEiMA0GCSqGSIb3DQEBAQUA +A4IBDwAwggEKAoIBAQDMYTTfT8T7rAM5PdkJkXHc3HKLLghD5NeAyedurx0j2TJ2 +Q1gXII1nP7WGYn0YS2z/jlY2ncOBYDmhsYYVzg5L2z7AynyezGNLcuKZNvxc3BSs +Y54xWU3Jav36UlgdFxzsQWbrefe+4G0c/R0hEFSM6jBYCaJn+Z93z/T+gQEpUfCe +/BMQiWQ5NF0BCgxeRYqUDS+WEJWqqDBF6Wh+EAj82CNvABK21B/JP4+r5QVFGryV +GqRGtadbYdbk0dFlRYTtNsftgCgg/rWnNt6cZr5GVGl70a02DbJ6vE7HmCM8r+ay +WdOjg+5u86OsaognnygmcFXA1ELfo6lNVQ5gzRoHAgMBAAGjgYMwgYAwHwYDVR0j +BBgwFoAUQbqyvauUfhuUFNjXTJ6ZSaQN1FcwCQYDVR0TBAIwADALBgNVHQ8EBAMC +BPAwJgYDVR0RBB8wHYIFaHR0cGSCBWh5ZHJhggdjb25zZW50hwR/AAABMB0GA1Ud +DgQWBBSjIOAFRaBVkpck741o52NObH+tMzANBgkqhkiG9w0BAQsFAAOCAQEAb1Aq +gEmDlFICyKz7Eb2DiIOJ2fr5TSwYsdMQBvXFhCLwWlYYAkfJwTnsBF4CIcL4SSsL +ty0vEW/RBI97BCewNz3OkjxaJyfp86UwdMwl1Jv2IKydDjlNighsHUPJCVex+bHm +E6HNOQcTjNfp9T9VZtPiQKbLy8skfuMBbxT31zqFwhnx6WoHNGHzAalsYu/2CRWg +DquQpBI6SOjT+pP4PQ/u/soGHaG04lU10xWecFeo8i1II42+cHumsL8BkHh8R8eS +9K8Uw+iA3/ToEPLOvlmiZKLey/JbYloA3dkvJKPPCjRIe3ouXFhqXrU1n8Lil05I +sqacE/FoNVM0fRdCQA== -----END CERTIFICATE----- diff --git a/test/conformance/ssl/ory-conformity.csr b/test/conformance/ssl/ory-conformity.csr index f788d77fd1d..52c1f10043e 100644 --- a/test/conformance/ssl/ory-conformity.csr +++ b/test/conformance/ssl/ory-conformity.csr @@ -2,16 +2,16 @@ MIICvTCCAaUCAQAweDELMAkGA1UEBhMCR0IxDzANBgNVBAgMBkxvbmRvbjEPMA0G A1UEBwwGTG9uZG9uMRgwFgYDVQQKDA9HbG9iYWwgU2VjdXJpdHkxFjAUBgNVBAsM DUlUIERlcGFydG1lbnQxFTATBgNVBAMMDG9yeS5zaC5sb2NhbDCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAMHkG4aY4o9thP4EKNu/5iCSMwObO2Pz230X -0F0lkDBP1K7Cj+oGroqBxzf8vmnFop67zO6o5ubfdDBE3a6Rjpdbi0l0HHr9EgEF -sXOnp4yKpwUO7nTc4OKBJd0hOCJHl1oIHbm6uqW3IeT8bS5yHjuXF0lGl5EX8xHG -+1ZNJmO0Bpx05pIsEGugdCSs06bOJU/dPbnlKE5ZEZfsVePL7dIoHDgG5pvzjauB -ni7nGmDoTzmdhQqdtAcfEc0iDrefueM4lUFidnM+vqmpqGU1UandfLGm0eaVQKec -+UwfgvBegrg8VDbD19F9dk5wx/jBFs4ZxHu2NnL2VO3s/pMWrksCAwEAAaAAMA0G -CSqGSIb3DQEBCwUAA4IBAQC9WyDMh1HO+DH+/KvBgZy1LecI0ScM3K5sNtikDCaJ -KJrbRljUYdJPkFmTO9XCgRYAY3CzlZLj9uURjqRIk7lDUr39bPZzIUBmxYhlkiQ/ -Ivgt52QJYo9RsonQ3g7NoW7IneLJdFn2rBx0Z81qjQV0UNdx/dg5lZVTdeco1JbC -Tj6vO0tKLahBcBwVIcOnHmvf+sp5mwmf12BT6XO13QzOASUssPOGSfXCvA+b6F9Y -bMA3pFgkgzGnAPjPDJdx1oImlg6pRga77scUQNJiVXGP/ZW0lF9GcL44iqcidFnv -MQTqARfG+1hBM87/1JK++On490wvYcy/hKCMb3nwaA3Y +hvcNAQEBBQADggEPADCCAQoCggEBAMxhNN9PxPusAzk92QmRcdzccosuCEPk14DJ +526vHSPZMnZDWBcgjWc/tYZifRhLbP+OVjadw4FgOaGxhhXODkvbPsDKfJ7MY0ty +4pk2/FzcFKxjnjFZTclq/fpSWB0XHOxBZut5977gbRz9HSEQVIzqMFgJomf5n3fP +9P6BASlR8J78ExCJZDk0XQEKDF5FipQNL5YQlaqoMEXpaH4QCPzYI28AErbUH8k/ +j6vlBUUavJUapEa1p1th1uTR0WVFhO02x+2AKCD+tac23pxmvkZUaXvRrTYNsnq8 +TseYIzyv5rJZ06OD7m7zo6xqiCefKCZwVcDUQt+jqU1VDmDNGgcCAwEAAaAAMA0G +CSqGSIb3DQEBCwUAA4IBAQB6+LCK+G/OalrEaSKaMdFMUCuTYUBToTwgjnaWbCsj +eC1Lsx+wFeoKAqMiZOvzkLYI1cNDKMtEhqtvNGtCrVkcoOpV6xE5PbVUtp9EwqMK +NPd3aMdaUs7/b7sAHzthrNGUfNV6RWUuGWaYCfRoGpYnKJAbWzrhXtdnIUUZPynE +QMRHbJHUIBmP0dv1rSOj1S62rRJ2iVUHVaTa5iuS37/QP6FzjhnEwXXaQ81Bs6Iq ++FaAQClo4ENdNxZc6IZjWXOTRwx0VDeDDjJ40RrVpceetMEg+XkEmibXJxuh6sek +XbnY5qBRY3JnU/eRdwwkNxTzEV7pRCUsWEQvahCpVF9L -----END CERTIFICATE REQUEST----- diff --git a/test/conformance/ssl/ory-conformity.key b/test/conformance/ssl/ory-conformity.key index 1728cdca4cd..5955893aef4 100644 --- a/test/conformance/ssl/ory-conformity.key +++ b/test/conformance/ssl/ory-conformity.key @@ -1,27 +1,28 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEpQIBAAKCAQEAweQbhpjij22E/gQo27/mIJIzA5s7Y/PbfRfQXSWQME/UrsKP -6gauioHHN/y+acWinrvM7qjm5t90METdrpGOl1uLSXQcev0SAQWxc6enjIqnBQ7u -dNzg4oEl3SE4IkeXWggdubq6pbch5PxtLnIeO5cXSUaXkRfzEcb7Vk0mY7QGnHTm -kiwQa6B0JKzTps4lT909ueUoTlkRl+xV48vt0igcOAbmm/ONq4GeLucaYOhPOZ2F -Cp20Bx8RzSIOt5+54ziVQWJ2cz6+qamoZTVRqd18sabR5pVAp5z5TB+C8F6CuDxU -NsPX0X12TnDH+MEWzhnEe7Y2cvZU7ez+kxauSwIDAQABAoIBAE4pGVuJ+BLfkHQh -0yK62hcZOI1kn21smXd1CR7zjIudMtx8PC2YIfZd2KReM5hJ/oNEq5kt5178h0cK -C99aeAPV+HFU6EJAEGjvFZjM5gMGxcuJOGFNxS4p58ybnphbCc1WZj0/5Av3pic8 -BvtpL3zQLKfyWBk6cVciQqGImkgoT57t1HDSkQJFVIAjjx4myxcP7vQvxhj4HouW -1uk40p9sw7I7/VWOycR+Y2A5UKsculVSWNWV9U+pnFWDuDWeorNVBSqanmVznZOi -IiZjP3ipxitFF9OwlPth7mD0DDspTotHa0+ID70tNSosxBxiUhOaFwQr+SarKuEY -bsLZLlECgYEA5yYkOLAPdJW7wvlapHOD0LP1biXhZJuIhlcL/Tf4/KqlXa2ThsI1 -x5oRDTkuAU5Sdkov499l6NbjBxeZTLsh5ViNbn6aSwDVOLsD5tem7/NcvvllpEtI -RdhAgfxg+pzDLiLhU4ewIy0ktcEUekEmrsK1Rwdq46TknRjBG0m50SMCgYEA1ryG -RWFIfxFHr/GY3tMbup3zKtja5iOkkNZyiNRycNs8rL2FHviBf6/cJVY6jDH/GIe9 -Gvt/KVmLMw6aqGk+bO1G03gkV0il8KVOhOXxyHTwIRSd+nIQ2Mdx/+In7cdgq4as -7QX7ohBfDB7qJxTsXqBnV7WOc97lzDfIk5LuBLkCgYEAwxtIu6iwgwHYSMzdG659 -du5PnbfPtnIqHOrgrc5PFkab6qSWv8qyRlbfScAX+vY3WK1qGvf4Nz8axmO+/CuA -guvqBXhLmD+NuqLwuisRN7y9bl1o0/LPgFT2rkFvQlqLzSeX9j+0Bx0VvWzKch8I -Hb8v8FJsgNYwOgIpDXV/qO0CgYEAnVtvZX4hmwP1rWf0pyy6QCmo3PgmiMkLuht7 -uOs9pZfywX7sA0MF2qTrRO0IYF3u2nH10MUFbS6yA8sq6EifN0E1SpfawZ96AnuQ -q3C5mjSfFw69qZ5W3BRjZKi8q9ET9P3u0I421I78p6wr2FHL2JfgwvRHnH91lA44 -zHO6OfkCgYEAnkHD4gOuhJJyb/y1kcOKHduReNZ4Mn49bv6+zI6j5QawrGxf3c6D -u6Xla6bivwjICnsN+cAAPfg86X0yXzlu0YFQYJ2y0idYo3GY8cgCU4Bo5ltF10En -qtR2HQ9oN3HRYZ7hObDgpOj0D2v1lWdGOi4iOofIIE809pLQPDLcBKY= ------END RSA PRIVATE KEY----- +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDMYTTfT8T7rAM5 +PdkJkXHc3HKLLghD5NeAyedurx0j2TJ2Q1gXII1nP7WGYn0YS2z/jlY2ncOBYDmh +sYYVzg5L2z7AynyezGNLcuKZNvxc3BSsY54xWU3Jav36UlgdFxzsQWbrefe+4G0c +/R0hEFSM6jBYCaJn+Z93z/T+gQEpUfCe/BMQiWQ5NF0BCgxeRYqUDS+WEJWqqDBF +6Wh+EAj82CNvABK21B/JP4+r5QVFGryVGqRGtadbYdbk0dFlRYTtNsftgCgg/rWn +Nt6cZr5GVGl70a02DbJ6vE7HmCM8r+ayWdOjg+5u86OsaognnygmcFXA1ELfo6lN +VQ5gzRoHAgMBAAECggEAVnCOchxEzcSUoHMeBkPzilvaPi8746s30gqSzWdWxcCo +7QfSjAWCiLaTScEB83KDKRrEL9lNxLwz48U3VqWNuluCEPwS9P6wyEpQuEhfN88F +F1wSngCNO7D6thJ5neQWoX2Pz+wH8jztKSOMXF2RxP+Pbsy0WZFj9sKHddSN7zEt +H/O48TXq9hwRKIbW71JrtNXmuU4+0XzzsTdIGWXl3cXITddlg2ury5Wzox6wDS54 +nd0jwd/23rk2kQ5DVgFLRjQv3GeF5Xv6KW7Fb5epd+gbXopjr+ZRgpLr2EVFQ3Hu +qDWdQU5xHfPh1A3CxmXXwShwjZ2gTGrjHy46bjRmIQKBgQDsuZ5hllV1mq31VZ68 +It76RE8gaOkR3sA4XeWtfP+cJc9aHoua/Mbb4JUV51nQHmtcRRAMj2y0CfL910d8 +jkUpY6fdrOHx46fr4HPSBf9+izYwsbZYPm/H0mNpSFv5fxJtcoHz/6aJAObqUqW8 +93VWvaIisZvtrrcO2PgOhI2N6QKBgQDdBV6lQkfqvWsz7Lw+PTLHJHw2KLsMLQhg +GpmGNKMnL6DKKt4hzp5QDTnlw7WlF1SItRDNelSsUyZtOxWDLtjj3Q44xTbEbQQJ +uyctL9HNfLS3xtujosHxJsORJNxkLb7/uPEJfROzAfGcCV1vxR0lHTee0rkxSBhn +dBLgO5LCbwKBgQCWIBlbVhWYYaIUxlnGgm7aVFvB+AZnONa6aIHyBiWshyX6xwhO +ICSU6NZjb7a7j68ScxH8hTCw9OZPKSnCyg7+cp7QrIUV/ARQSKXq+bbQL8UMGIfF +asNE/B3jdOovTfEHy+iyLt0UYagPzp4hOuYFPnoGepCiqXyaQ+r8ef+zkQKBgGVI +IKb6SJp5UrbotRqQUMSRI5pt78kUmdHwXonxdRJv8O6z1jcHx+RHK1Du7j5JAqPi +1WBTvpl5QRKtEJ6cySciFXJObVp12nPbH4jFcQ8JJodQyTWjT9yCNvw8OAJwNCZb +SZlfh7dwHJe0cuzVFxX63ZIaXmMG9qfxuPSoWAhhAoGAc/QduUR+2lGli7T9w0Oc +jlULgRbslMCgXsvIV1rKqGeL8agSPRm6+EwlcE69dbQdXrAVcbyD3aT+IiubfqKI +uPyvZ/jUnIsZ1ftMv2NZFA+WCSyOfXTQehFvpR5QdIeG54F+cZpyHeyrOKD2Hpcp +Nx7ytv5oREXwvWvDU0yjWM8= +-----END PRIVATE KEY----- diff --git a/test/conformance/start.sh b/test/conformance/start.sh index 65080fe2c02..342a4f59d24 100755 --- a/test/conformance/start.sh +++ b/test/conformance/start.sh @@ -4,4 +4,6 @@ set -euxo pipefail cd "$( dirname "${BASH_SOURCE[0]}" )/../.." # shellcheck disable=SC2086 -docker-compose -f quickstart.yml -f quickstart-postgres.yml -f test/conformance/docker-compose.yml up ${1:-} -d --force-recreate --build +docker compose -f quickstart.yml -f quickstart-postgres.yml -f test/conformance/docker-compose.yml up ${1:-} -d --force-recreate --build +docker ps -a +docker images diff --git a/test/e2e/circle-ci.bash b/test/e2e/circle-ci.bash index f90ffd15251..498ec93eef5 100755 --- a/test/e2e/circle-ci.bash +++ b/test/e2e/circle-ci.bash @@ -12,7 +12,6 @@ function catch { trap catch ERR killall hydra || true -killall node || true # Check if any ports that we need are open already ! nc -zv 127.0.0.1 5004 @@ -26,7 +25,7 @@ if [[ ! -d "../../node_modules/" ]]; then (cd ../..; npm ci) fi -(cd ../../; go build -tags sqlite,json1 -o test/e2e/hydra . ) +(cd ../../; go build -tags sqlite -o test/e2e/hydra . ) # Install oauth2-client if [[ ! -d "./oauth2-client/node_modules/" ]]; then @@ -38,6 +37,8 @@ fi (cd oauth2-client; PORT=5002 HYDRA_ADMIN_URL=http://127.0.0.1:5001 npm run consent > ../login-consent-logout.e2e.log 2>&1 &) export URLS_SELF_ISSUER=http://127.0.0.1:5004/ +export URLS_DEVICE_VERIFICATION=http://127.0.0.1:5002/device/verify +export URLS_DEVICE_SUCCESS=http://127.0.0.1:5002/oauth2/device/success export URLS_CONSENT=http://127.0.0.1:5002/consent export URLS_LOGIN=http://127.0.0.1:5002/login export URLS_LOGOUT=http://127.0.0.1:5002/logout @@ -92,7 +93,7 @@ case $i in esac done -./hydra migrate sql --yes $TEST_DATABASE > ./hydra-migrate.e2e.log 2>&1 +./hydra migrate sql up --yes $TEST_DATABASE > ./hydra-migrate.e2e.log 2>&1 DSN=$TEST_DATABASE \ ./hydra serve all --dev --sqa-opt-out > ./hydra.e2e.log 2>&1 & diff --git a/test/e2e/docker-compose.cockroach.yml b/test/e2e/docker-compose.cockroach.yml index 08fd1cd8c2c..d982e416fea 100644 --- a/test/e2e/docker-compose.cockroach.yml +++ b/test/e2e/docker-compose.cockroach.yml @@ -5,7 +5,7 @@ services: image: oryd/hydra:e2e environment: - DSN=cockroach://root@cockroachd:26257/defaultdb?sslmode=disable&max_conns=20&max_idle_conns=4 - command: migrate sql -e --yes + command: migrate sql up -e --yes restart: on-failure hydra: @@ -15,7 +15,7 @@ services: - DSN=cockroach://root@cockroachd:26257/defaultdb?sslmode=disable&max_conns=20&max_idle_conns=4 cockroachd: - image: cockroachdb/cockroach:v22.1.10 + image: cockroachdb/cockroach:latest-v25.4 ports: - "26257:26257" command: start-single-node --insecure diff --git a/test/e2e/docker-compose.mysql.yml b/test/e2e/docker-compose.mysql.yml index 703e8f3cfef..e8a03d4eb67 100644 --- a/test/e2e/docker-compose.mysql.yml +++ b/test/e2e/docker-compose.mysql.yml @@ -5,7 +5,7 @@ services: image: oryd/hydra:e2e environment: - DSN=mysql://root:secret@tcp(mysqld:3306)/mysql?max_conns=20&max_idle_conns=4 - command: migrate sql -e --yes + command: migrate sql up -e --yes restart: on-failure hydra: @@ -15,8 +15,7 @@ services: - DSN=mysql://root:secret@tcp(mysqld:3306)/mysql?max_conns=20&max_idle_conns=4 mysqld: - image: mysql:8.0.26 - platform: linux/amd64 + image: mysql:8.0 ports: - "3306:3306" environment: diff --git a/test/e2e/docker-compose.postgres.yml b/test/e2e/docker-compose.postgres.yml index 7c1a4f6bee2..72e3ed7443c 100644 --- a/test/e2e/docker-compose.postgres.yml +++ b/test/e2e/docker-compose.postgres.yml @@ -5,7 +5,7 @@ services: image: oryd/hydra:e2e environment: - DSN=postgres://hydra:secret@postgresd:5432/hydra?sslmode=disable&max_conns=20&max_idle_conns=4 - command: migrate sql -e --yes + command: migrate sql up -e --yes restart: on-failure hydra: diff --git a/test/e2e/oauth2-client/package-lock.json b/test/e2e/oauth2-client/package-lock.json index 76f26b2d1a5..cb18ebf78ac 100644 --- a/test/e2e/oauth2-client/package-lock.json +++ b/test/e2e/oauth2-client/package-lock.json @@ -10,10 +10,10 @@ "dependencies": { "body-parser": "^1.20.1", "dotenv": "^7.0.0", - "express": "^4.18.2", + "express": "^4.21.2", "express-session": "^1.17.0", "express-winston": "^3.4.0", - "hydra-login-consent-logout": "1.4.3", + "hydra-login-consent-logout": "2.4.0-pre.3", "jsonwebtoken": "^8.5.1", "jwks-rsa": "^2.1.4", "node-fetch": "^2.6.0", @@ -24,7 +24,53 @@ }, "devDependencies": { "cross-env": "^5.2.1", - "nodemon": "^1.19.4" + "nodemon": "^2.0.22" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", + "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", + "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.9.tgz", + "integrity": "sha512-81NWa1njQblgZbQHxWHpxxCzNsa3ZwvFqpUg7P+NNUU6f3UU2jBEg4OlF/J6rl8+PQGh1q6/zWScd001YwcA5A==", + "license": "MIT", + "dependencies": { + "@babel/types": "^7.26.9" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/types": { + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.9.tgz", + "integrity": "sha512-Y3IR1cRnOxOCDvMmNiym7XpXQ93iGDDPHx+Zj+NM+rg0fBaShfQLkg+hKPaZCEvg5N/LeCo4+Rj/i3FuJsIQaw==", + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" } }, "node_modules/@hapi/address": { @@ -66,6 +112,12 @@ "@hapi/hoek": "^8.3.0" } }, + "node_modules/@ory/hydra-client-fetch": { + "version": "2.4.0-alpha.1", + "resolved": "https://registry.npmjs.org/@ory/hydra-client-fetch/-/hydra-client-fetch-2.4.0-alpha.1.tgz", + "integrity": "sha512-TTuw+1DdIFskz4JU7yP2OSOHP3pVr7HjLnr8YI4S1pQJ91JaGwBQxCPQLrqdNuffs7d8JU1VRkcgdSWOo+89eA==", + "license": "Apache-2.0" + }, "node_modules/@panva/asn1.js": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/@panva/asn1.js/-/asn1.js-1.0.0.tgz", @@ -82,19 +134,6 @@ "node": ">=4" } }, - "node_modules/@types/babel-types": { - "version": "7.0.7", - "resolved": "https://registry.npmjs.org/@types/babel-types/-/babel-types-7.0.7.tgz", - "integrity": "sha512-dBtBbrc+qTHy1WdfHYjBwRln4+LWqASWakLHsWHR2NWHIFkv4W3O070IGoGLEBrJBvct3r0L1BUPuvURi7kYUQ==" - }, - "node_modules/@types/babylon": { - "version": "6.16.5", - "resolved": "https://registry.npmjs.org/@types/babylon/-/babylon-6.16.5.tgz", - "integrity": "sha512-xH2e58elpj1X4ynnKp9qSnWlsRTIs6n3tgLGNfwAGHwePw0mulHQllV34n0T25uYSu1k0hRKkWXF890B1yS47w==", - "dependencies": { - "@types/babel-types": "*" - } - }, "node_modules/@types/body-parser": { "version": "1.19.2", "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", @@ -112,6 +151,22 @@ "@types/node": "*" } }, + "node_modules/@types/cookie-parser": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/@types/cookie-parser/-/cookie-parser-1.4.3.tgz", + "integrity": "sha512-CqSKwFwefj4PzZ5n/iwad/bow2hTCh0FlNAeWLtQM3JA/NX/iYagIpWG2cf1bQKQ2c9gU2log5VUCrn7LDOs0w==", + "dependencies": { + "@types/express": "*" + } + }, + "node_modules/@types/csurf": { + "version": "1.11.2", + "resolved": "https://registry.npmjs.org/@types/csurf/-/csurf-1.11.2.tgz", + "integrity": "sha512-9bc98EnwmC1S0aSJiA8rWwXtgXtXHHOQOsGHptImxFgqm6CeH+mIOunHRg6+/eg2tlmDMX3tY7XrWxo2M/nUNQ==", + "dependencies": { + "@types/express-serve-static-core": "*" + } + }, "node_modules/@types/express": { "version": "4.17.13", "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.13.tgz", @@ -146,6 +201,14 @@ "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz", "integrity": "sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==" }, + "node_modules/@types/morgan": { + "version": "1.9.4", + "resolved": "https://registry.npmjs.org/@types/morgan/-/morgan-1.9.4.tgz", + "integrity": "sha512-cXoc4k+6+YAllH3ZHmx4hf7La1dzUk6keTR4bF4b4Sc0mZxU/zK4wO7l+ZzezXm/jkYj/qC+uYGZrarZdIVvyQ==", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/node": { "version": "17.0.42", "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.42.tgz", @@ -170,6 +233,11 @@ "@types/node": "*" } }, + "node_modules/@types/url-join": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@types/url-join/-/url-join-4.0.1.tgz", + "integrity": "sha512-wDXw9LEEUHyV+7UWy7U315nrJGJ7p1BzaCxDpEoLr789Dk1WDVMMlf3iBfbG2F8NdWnYyFbtTxUn2ZNbm1Q4LQ==" + }, "node_modules/abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", @@ -189,28 +257,10 @@ } }, "node_modules/acorn": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-3.3.0.tgz", - "integrity": "sha1-ReN/s56No/JbruP/U2niu18iAXo=", - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/acorn-globals": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-3.1.0.tgz", - "integrity": "sha1-/YJw9x+7SZawBPqIDuXUZXOnMb8=", - "dependencies": { - "acorn": "^4.0.4" - } - }, - "node_modules/acorn-globals/node_modules/acorn": { - "version": "4.0.13", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-4.0.13.tgz", - "integrity": "sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c=", + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "license": "MIT", "bin": { "acorn": "bin/acorn" }, @@ -230,48 +280,6 @@ "node": ">=4" } }, - "node_modules/align-text": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/align-text/-/align-text-0.1.4.tgz", - "integrity": "sha1-DNkKVhCT810KmSVsIrcGlDP60Rc=", - "dependencies": { - "kind-of": "^3.0.2", - "longest": "^1.0.1", - "repeat-string": "^1.5.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/align-text/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/ansi-align": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-2.0.0.tgz", - "integrity": "sha1-w2rsy6VjuJzrVW82kPCx2eNUf38=", - "dev": true, - "dependencies": { - "string-width": "^2.0.0" - } - }, - "node_modules/ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", - "dev": true, - "engines": { - "node": ">=4" - } - }, "node_modules/ansi-styles": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", @@ -284,52 +292,16 @@ } }, "node_modules/anymatch": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", - "integrity": "sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==", - "dev": true, - "dependencies": { - "micromatch": "^3.1.4", - "normalize-path": "^2.1.1" - } - }, - "node_modules/anymatch/node_modules/normalize-path": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", - "integrity": "sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", "dev": true, "dependencies": { - "remove-trailing-separator": "^1.0.1" + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/arr-diff": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz", - "integrity": "sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/arr-flatten": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz", - "integrity": "sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/arr-union": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/arr-union/-/arr-union-3.1.0.tgz", - "integrity": "sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=", - "dev": true, - "engines": { - "node": ">=0.10.0" + "node": ">= 8" } }, "node_modules/array-flatten": { @@ -337,28 +309,17 @@ "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" }, - "node_modules/array-unique": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz", - "integrity": "sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/asap": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", - "integrity": "sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=" + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==", + "license": "MIT" }, - "node_modules/assign-symbols": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz", - "integrity": "sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } + "node_modules/assert-never": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/assert-never/-/assert-never-1.4.0.tgz", + "integrity": "sha512-5oJg84os6NMQNl27T9LnZkvvqzvAnHu03ShCnoj6bsJwS7L8AO4lf+C/XjK/nvzEqQB744moC6V128RucQd1jA==", + "license": "MIT" }, "node_modules/async": { "version": "2.6.4", @@ -368,50 +329,16 @@ "lodash": "^4.17.14" } }, - "node_modules/async-each": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/async-each/-/async-each-1.0.3.tgz", - "integrity": "sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ==", - "dev": true - }, - "node_modules/atob": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz", - "integrity": "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==", - "dev": true, - "bin": { - "atob": "bin/atob.js" + "node_modules/babel-walk": { + "version": "3.0.0-canary-5", + "resolved": "https://registry.npmjs.org/babel-walk/-/babel-walk-3.0.0-canary-5.tgz", + "integrity": "sha512-GAwkz0AihzY5bkwIY5QDR+LvsRQgB/B+1foMPvi0FZPMl5fjD7ICiznUiBdLYMH1QYe6vqu4gWYytZOccLouFw==", + "license": "MIT", + "dependencies": { + "@babel/types": "^7.9.6" }, "engines": { - "node": ">= 4.5.0" - } - }, - "node_modules/babel-runtime": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-runtime/-/babel-runtime-6.26.0.tgz", - "integrity": "sha1-llxwWGaOgrVde/4E/yM3vItWR/4=", - "dependencies": { - "core-js": "^2.4.0", - "regenerator-runtime": "^0.11.0" - } - }, - "node_modules/babel-types": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-types/-/babel-types-6.26.0.tgz", - "integrity": "sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc=", - "dependencies": { - "babel-runtime": "^6.26.0", - "esutils": "^2.0.2", - "lodash": "^4.17.4", - "to-fast-properties": "^1.0.3" - } - }, - "node_modules/babylon": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/babylon/-/babylon-6.18.0.tgz", - "integrity": "sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ==", - "bin": { - "babylon": "bin/babylon.js" + "node": ">= 10.0.0" } }, "node_modules/balanced-match": { @@ -420,74 +347,6 @@ "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", "dev": true }, - "node_modules/base": { - "version": "0.11.2", - "resolved": "https://registry.npmjs.org/base/-/base-0.11.2.tgz", - "integrity": "sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==", - "dev": true, - "dependencies": { - "cache-base": "^1.0.1", - "class-utils": "^0.3.5", - "component-emitter": "^1.2.1", - "define-property": "^1.0.0", - "isobject": "^3.0.1", - "mixin-deep": "^1.2.0", - "pascalcase": "^0.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base/node_modules/define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "dev": true, - "dependencies": { - "is-descriptor": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base/node_modules/is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "dev": true, - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base/node_modules/is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "dev": true, - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base/node_modules/is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dev": true, - "dependencies": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/base64-js": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.1.tgz", @@ -513,39 +372,29 @@ } }, "node_modules/binary-extensions": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.13.1.tgz", - "integrity": "sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", "dev": true, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/bindings": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", - "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", - "dev": true, - "optional": true, - "dependencies": { - "file-uri-to-path": "1.0.0" + "node": ">=8" } }, "node_modules/body-parser": { - "version": "1.20.1", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz", - "integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==", + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", "dependencies": { "bytes": "3.1.2", - "content-type": "~1.0.4", + "content-type": "~1.0.5", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", - "qs": "6.11.0", - "raw-body": "2.5.1", + "qs": "6.13.0", + "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" }, @@ -588,24 +437,6 @@ "integrity": "sha512-b2dgVkTZhkQirNMohgC00rWfpVqEi9y5tKM1k3JvoNx05ODtfQoPPd4js9CYFQoY0IM8LAmnJulEuWv74zjUOg==", "deprecated": "This module has moved and is now available at @hapi/bourne. Please update your dependencies as this version is no longer maintained an may contain bugs and security issues." }, - "node_modules/boxen": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/boxen/-/boxen-1.3.0.tgz", - "integrity": "sha512-TNPjfTr432qx7yOjQyaXm3dSR0MH9vXp7eT1BFSl/C51g+EFnOR9hTg1IreahGBmDNCehscshe45f+C1TBZbLw==", - "dev": true, - "dependencies": { - "ansi-align": "^2.0.0", - "camelcase": "^4.0.0", - "chalk": "^2.0.1", - "cli-boxes": "^1.0.0", - "string-width": "^2.0.0", - "term-size": "^1.2.0", - "widest-line": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -617,36 +448,15 @@ } }, "node_modules/braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", - "dev": true, - "dependencies": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/braces/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dev": true, "dependencies": { - "is-extendable": "^0.1.0" + "fill-range": "^7.1.1" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/browserify-zlib": { @@ -669,7 +479,8 @@ "node_modules/buffer-equal-constant-time": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", - "integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=" + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==", + "license": "BSD-3-Clause" }, "node_modules/bytes": { "version": "3.1.2", @@ -679,26 +490,6 @@ "node": ">= 0.8" } }, - "node_modules/cache-base": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz", - "integrity": "sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==", - "dev": true, - "dependencies": { - "collection-visit": "^1.0.0", - "component-emitter": "^1.2.1", - "get-value": "^2.0.6", - "has-value": "^1.0.0", - "isobject": "^3.0.1", - "set-value": "^2.0.0", - "to-object-path": "^0.3.0", - "union-value": "^1.0.0", - "unset-value": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/cacheable-request": { "version": "2.1.4", "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-2.1.4.tgz", @@ -722,45 +513,50 @@ } }, "node_modules/call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", "dependencies": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/camelcase": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz", - "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/capture-stack-trace": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/capture-stack-trace/-/capture-stack-trace-1.0.1.tgz", - "integrity": "sha512-mYQLZnx5Qt1JgB1WEiMCf2647plpGeQ2NMR/5L0HNZzGQo4fuSPnK+wjfPnKZV0aiJDgzmWqqkV/g7JD+DW0qw==", - "dev": true, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" } }, - "node_modules/center-align": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/center-align/-/center-align-0.1.3.tgz", - "integrity": "sha1-qg0yYptu6XIgBBHL1EYckHvCt60=", + "node_modules/call-bound": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.3.tgz", + "integrity": "sha512-YTd+6wGlNlPxSuri7Y6X8tY2dmm12UMH66RpKMhiX6rsk5wXXnYgbUcOt8kiS31/AjfoTOvCsE+w8nZQLQnzHA==", + "license": "MIT", "dependencies": { - "align-text": "^0.1.3", - "lazy-cache": "^1.0.3" + "call-bind-apply-helpers": "^1.0.1", + "get-intrinsic": "^1.2.6" }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/chalk": { @@ -779,113 +575,47 @@ "node_modules/character-parser": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/character-parser/-/character-parser-2.2.0.tgz", - "integrity": "sha1-x84o821LzZdE5f/CxfzeHHMmH8A=", + "integrity": "sha512-+UqJQjFEFaTAs3bNsF2j2kEN1baG/zghZbdqoYEDxGZtJo9LBzl1A+m0D4n3qKx8N2FNv8/Xp6yV9mQmBuptaw==", + "license": "MIT", "dependencies": { "is-regex": "^1.0.3" } }, "node_modules/chokidar": { - "version": "2.1.8", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", - "integrity": "sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==", - "deprecated": "Chokidar 2 does not receive security updates since 2019. Upgrade to chokidar 3 with 15x fewer dependencies", + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + ], "dependencies": { - "anymatch": "^2.0.0", - "async-each": "^1.0.1", - "braces": "^2.3.2", - "glob-parent": "^3.1.0", - "inherits": "^2.0.3", - "is-binary-path": "^1.0.0", - "is-glob": "^4.0.0", - "normalize-path": "^3.0.0", - "path-is-absolute": "^1.0.0", - "readdirp": "^2.2.1", - "upath": "^1.1.1" + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" }, "optionalDependencies": { - "fsevents": "^1.2.7" + "fsevents": "~2.3.2" } }, - "node_modules/ci-info": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-1.6.0.tgz", - "integrity": "sha512-vsGdkwSCDpWmP80ncATX7iea5DWQemg1UgCW5J8tqjU3lYw4FBYuj89J0CTVomA7BEfvSZd84GmHko+MxFQU2A==", - "dev": true - }, - "node_modules/class-utils": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/class-utils/-/class-utils-0.3.6.tgz", - "integrity": "sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==", - "dev": true, - "dependencies": { - "arr-union": "^3.1.0", - "define-property": "^0.2.5", - "isobject": "^3.0.0", - "static-extend": "^0.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/class-utils/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dev": true, - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/clean-css": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/clean-css/-/clean-css-4.2.3.tgz", - "integrity": "sha512-VcMWDN54ZN/DS+g58HYL5/n4Zrqe8vHJpGA8KdgUXFU4fuP/aHNw8eld9SyEIyabIMJX/0RaY/fplOo5hYLSFA==", - "dependencies": { - "source-map": "~0.6.0" - }, - "engines": { - "node": ">= 4.0" - } - }, - "node_modules/clean-css/node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/clean-stack": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-1.3.0.tgz", - "integrity": "sha1-noIVAa6XmYbEax1m0tQy2y/UrjE=", + "node_modules/clean-stack": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-1.3.0.tgz", + "integrity": "sha1-noIVAa6XmYbEax1m0tQy2y/UrjE=", "engines": { "node": ">=4" } }, - "node_modules/cli-boxes": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-1.0.0.tgz", - "integrity": "sha1-T6kXw+WclKAEzWH47lCdplFocUM=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/cliui": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-2.1.0.tgz", - "integrity": "sha1-S0dXYP+AJkx2LDoXGQMukcf+oNE=", - "dependencies": { - "center-align": "^0.1.1", - "right-align": "^0.1.1", - "wordwrap": "0.0.2" - } - }, "node_modules/clone-response": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz", @@ -894,19 +624,6 @@ "mimic-response": "^1.0.0" } }, - "node_modules/collection-visit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz", - "integrity": "sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA=", - "dev": true, - "dependencies": { - "map-visit": "^1.0.0", - "object-visit": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/color": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/color/-/color-3.0.0.tgz", @@ -960,44 +677,20 @@ "text-hex": "1.0.x" } }, - "node_modules/component-emitter": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", - "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==", - "dev": true - }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", "dev": true }, - "node_modules/configstore": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/configstore/-/configstore-3.1.2.tgz", - "integrity": "sha512-vtv5HtGjcYUgFrXc6Kx747B83MRRVS5R1VTEQoXvuP+kMI+if6uywV0nDGoiydJRy4yk7h9od5Og0kxx4zUXmw==", - "dev": true, - "dependencies": { - "dot-prop": "^4.1.0", - "graceful-fs": "^4.1.2", - "make-dir": "^1.0.0", - "unique-string": "^1.0.0", - "write-file-atomic": "^2.0.0", - "xdg-basedir": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/constantinople": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/constantinople/-/constantinople-3.1.2.tgz", - "integrity": "sha512-yePcBqEFhLOqSBtwYOGGS1exHo/s1xjekXiinh4itpNQGCu4KA1euPh1fg07N2wMITZXQkBz75Ntdt1ctGZouw==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/constantinople/-/constantinople-4.0.1.tgz", + "integrity": "sha512-vCrqcSIq4//Gx74TXXCGnHpulY1dskqLTFGDmhrGxzeXL8lF8kvXv6mpNWlJj1uD4DW23D4ljAqbY4RRaaUZIw==", + "license": "MIT", "dependencies": { - "@types/babel-types": "^7.0.0", - "@types/babylon": "^6.16.2", - "babel-types": "^6.26.0", - "babylon": "^6.18.0" + "@babel/parser": "^7.6.0", + "@babel/types": "^7.6.1" } }, "node_modules/content-disposition": { @@ -1031,9 +724,9 @@ ] }, "node_modules/content-type": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", - "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", "engines": { "node": ">= 0.6" } @@ -1047,55 +740,37 @@ } }, "node_modules/cookie-parser": { - "version": "1.4.5", - "resolved": "https://registry.npmjs.org/cookie-parser/-/cookie-parser-1.4.5.tgz", - "integrity": "sha512-f13bPUj/gG/5mDr+xLmSxxDsB9DQiTIfhJS/sqjrmfAWiAN+x2O4i/XguTL9yDZ+/IFDanJ+5x7hC4CXT9Tdzw==", + "version": "1.4.7", + "resolved": "https://registry.npmjs.org/cookie-parser/-/cookie-parser-1.4.7.tgz", + "integrity": "sha512-nGUvgXnotP3BsjiLX2ypbQnWoGUPIIfHQNZkkC668ntrzGWEZVW70HDEB1qnNGMicPje6EttlIgzo51YSwNQGw==", + "license": "MIT", "dependencies": { - "cookie": "0.4.0", + "cookie": "0.7.2", "cookie-signature": "1.0.6" }, "engines": { "node": ">= 0.8.0" } }, + "node_modules/cookie-parser/node_modules/cookie": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/cookie-signature": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" }, - "node_modules/copy-descriptor": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz", - "integrity": "sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/core-js": { - "version": "2.6.11", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.11.tgz", - "integrity": "sha512-5wjnpaT/3dV+XB4borEsnAYQchn00XSgTAWKDkEqv+K8KevjbzmofK6hfJ9TZIlpj2N0xQpazy7PiRQiWHqzWg==", - "deprecated": "core-js@<3.4 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Please, upgrade your dependencies to the actual version of core-js.", - "hasInstallScript": true - }, "node_modules/core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, - "node_modules/create-error-class": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/create-error-class/-/create-error-class-3.0.2.tgz", - "integrity": "sha1-Br56vvlHo/FKMP1hBnHUAbyot7Y=", - "dev": true, - "dependencies": { - "capture-stack-trace": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/cross-env": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-5.2.1.tgz", @@ -1128,15 +803,6 @@ "node": ">=4.8" } }, - "node_modules/crypto-random-string": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-1.0.0.tgz", - "integrity": "sha1-ojD2T1aDEOFJgAmUB5DsmVRbyn4=", - "dev": true, - "engines": { - "node": ">=4" - } - }, "node_modules/csrf": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/csrf/-/csrf-3.1.0.tgz", @@ -1196,14 +862,6 @@ "ms": "2.0.0" } }, - "node_modules/decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/decode-uri-component": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz", @@ -1223,64 +881,20 @@ "node": ">=4" } }, - "node_modules/deep-extend": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", - "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", - "dev": true, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/define-property": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-2.0.2.tgz", - "integrity": "sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==", - "dev": true, - "dependencies": { - "is-descriptor": "^1.0.2", - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/define-property/node_modules/is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "dev": true, - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/define-property/node_modules/is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "dev": true, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", "dependencies": { - "kind-of": "^6.0.0" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/define-property/node_modules/is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dev": true, - "dependencies": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" + "node": ">= 0.4" }, - "engines": { - "node": ">=0.10.0" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/depd": { @@ -1313,19 +927,8 @@ "node_modules/doctypes": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/doctypes/-/doctypes-1.1.0.tgz", - "integrity": "sha1-6oCxBqh1OHdOijpKWv4pPeSJ4Kk=" - }, - "node_modules/dot-prop": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-4.2.0.tgz", - "integrity": "sha512-tUMXrxlExSW6U2EXiiKGSBVdYgtV8qlHL+C10TsW4PURY/ic+eaysnSkwB4kA/mBlCyy/IKDJ+Lc3wbWeaXtuQ==", - "dev": true, - "dependencies": { - "is-obj": "^1.0.0" - }, - "engines": { - "node": ">=4" - } + "integrity": "sha512-LLBi6pEqS6Do3EKQ3J0NqHWV5hhb78Pi8vvESYwyOy2c31ZEZVdtitdzsQsKb7878PEERhzUk0ftqGhG6Mz+pQ==", + "license": "MIT" }, "node_modules/dotenv": { "version": "7.0.0", @@ -1335,6 +938,20 @@ "node": ">=6" } }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/duplexer3": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.4.tgz", @@ -1344,6 +961,7 @@ "version": "1.0.11", "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "license": "Apache-2.0", "dependencies": { "safe-buffer": "^5.0.1" } @@ -1362,9 +980,9 @@ } }, "node_modules/encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", "engines": { "node": ">= 0.8" } @@ -1374,6 +992,35 @@ "resolved": "https://registry.npmjs.org/env-variable/-/env-variable-0.0.5.tgz", "integrity": "sha512-zoB603vQReOFvTg5xMl9I1P2PnHsHQQKTEowsKKD7nseUfJq6UWzK+4YtlWUO1nhiQUxe6XMkk+JleSZD1NZFA==" }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es6-promise": { "version": "4.2.8", "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", @@ -1392,14 +1039,6 @@ "node": ">=0.8.0" } }, - "node_modules/esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/etag": { "version": "1.8.1", "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", @@ -1408,108 +1047,37 @@ "node": ">= 0.6" } }, - "node_modules/execa": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz", - "integrity": "sha1-lEvs00zEHuMqY6n68nrVpl/Fl3c=", - "dev": true, - "dependencies": { - "cross-spawn": "^5.0.1", - "get-stream": "^3.0.0", - "is-stream": "^1.1.0", - "npm-run-path": "^2.0.0", - "p-finally": "^1.0.0", - "signal-exit": "^3.0.0", - "strip-eof": "^1.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/execa/node_modules/cross-spawn": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", - "integrity": "sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=", - "dev": true, - "dependencies": { - "lru-cache": "^4.0.1", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } - }, - "node_modules/expand-brackets": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-2.1.4.tgz", - "integrity": "sha1-t3c14xXOMPa27/D4OwQVGiJEliI=", - "dev": true, - "dependencies": { - "debug": "^2.3.3", - "define-property": "^0.2.5", - "extend-shallow": "^2.0.1", - "posix-character-classes": "^0.1.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/expand-brackets/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dev": true, - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/expand-brackets/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/express": { - "version": "4.18.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz", - "integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==", + "version": "4.21.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", + "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.1", + "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", - "cookie": "0.5.0", + "cookie": "0.7.1", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", - "finalhandler": "1.2.0", + "finalhandler": "1.3.1", "fresh": "0.5.2", "http-errors": "2.0.0", - "merge-descriptors": "1.0.1", + "merge-descriptors": "1.0.3", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", - "path-to-regexp": "0.1.7", + "path-to-regexp": "0.1.12", "proxy-addr": "~2.0.7", - "qs": "6.11.0", + "qs": "6.13.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", - "send": "0.18.0", - "serve-static": "1.15.0", + "send": "0.19.0", + "serve-static": "1.16.2", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", @@ -1518,6 +1086,10 @@ }, "engines": { "node": ">= 0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/express-session": { @@ -1567,9 +1139,9 @@ } }, "node_modules/express/node_modules/cookie": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", - "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", "engines": { "node": ">= 0.6" } @@ -1625,201 +1197,64 @@ "node": ">= 0.8" } }, - "node_modules/extend-shallow": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz", - "integrity": "sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg=", - "dev": true, - "dependencies": { - "assign-symbols": "^1.0.0", - "is-extendable": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } + "node_modules/fast-safe-stringify": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.0.6.tgz", + "integrity": "sha512-q8BZ89jjc+mz08rSxROs8VsrBBcn1SIw1kq9NjolL509tkABRk9io01RAjSaEv1Xb2uFLt8VtRiZbGp5H8iDtg==" }, - "node_modules/extend-shallow/node_modules/is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", + "node_modules/fecha": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fecha/-/fecha-2.3.3.tgz", + "integrity": "sha512-lUGBnIamTAwk4znq5BcqsDaxSmZ9nDVJaij6NvRt/Tg4R69gERA+otPKbS86ROw9nxVMw2/mp1fnaiWqbs6Sdg==" + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dev": true, "dependencies": { - "is-plain-object": "^2.0.4" + "to-regex-range": "^5.0.1" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node_modules/extglob": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/extglob/-/extglob-2.0.4.tgz", - "integrity": "sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==", - "dev": true, + "node_modules/finalhandler": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", "dependencies": { - "array-unique": "^0.3.2", - "define-property": "^1.0.0", - "expand-brackets": "^2.1.4", - "extend-shallow": "^2.0.1", - "fragment-cache": "^0.2.1", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" + "debug": "2.6.9", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.8" } }, - "node_modules/extglob/node_modules/define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "dev": true, + "node_modules/finalhandler/node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", "dependencies": { - "is-descriptor": "^1.0.0" + "ee-first": "1.1.1" }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.8" } }, - "node_modules/extglob/node_modules/extend-shallow": { + "node_modules/finalhandler/node_modules/statuses": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/extglob/node_modules/is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "dev": true, - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/extglob/node_modules/is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "dev": true, - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/extglob/node_modules/is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dev": true, - "dependencies": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/fast-safe-stringify": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.0.6.tgz", - "integrity": "sha512-q8BZ89jjc+mz08rSxROs8VsrBBcn1SIw1kq9NjolL509tkABRk9io01RAjSaEv1Xb2uFLt8VtRiZbGp5H8iDtg==" - }, - "node_modules/fecha": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fecha/-/fecha-2.3.3.tgz", - "integrity": "sha512-lUGBnIamTAwk4znq5BcqsDaxSmZ9nDVJaij6NvRt/Tg4R69gERA+otPKbS86ROw9nxVMw2/mp1fnaiWqbs6Sdg==" - }, - "node_modules/file-uri-to-path": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", - "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", - "dev": true, - "optional": true - }, - "node_modules/fill-range": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", - "integrity": "sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=", - "dev": true, - "dependencies": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/fill-range/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/finalhandler": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", - "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==", - "dependencies": { - "debug": "2.6.9", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "on-finished": "2.4.1", - "parseurl": "~1.3.3", - "statuses": "2.0.1", - "unpipe": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/finalhandler/node_modules/on-finished": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", - "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", - "dependencies": { - "ee-first": "1.1.1" - }, + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", "engines": { "node": ">= 0.8" } }, - "node_modules/finalhandler/node_modules/statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/for-in": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", - "integrity": "sha1-gQaNKVqBQuwKxybG4iAMMPttXoA=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/forwarded": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", @@ -1828,18 +1263,6 @@ "node": ">= 0.6" } }, - "node_modules/fragment-cache": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/fragment-cache/-/fragment-cache-0.2.1.tgz", - "integrity": "sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk=", - "dev": true, - "dependencies": { - "map-cache": "^0.2.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/fresh": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", @@ -1880,1856 +1303,1571 @@ } }, "node_modules/fsevents": { - "version": "1.2.12", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.12.tgz", - "integrity": "sha512-Ggd/Ktt7E7I8pxZRbGIs7vwqAPscSESMrCSkx2FtWeqmheJgCo2R74fTsZFCifr0VTPwqRpPv17+6b8Zp7th0Q==", - "bundleDependencies": [ - "node-pre-gyp" - ], - "deprecated": "fsevents 1 will break on node v14+ and could be using insecure binaries. Upgrade to fsevents 2.", + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", "dev": true, "hasInstallScript": true, "optional": true, "os": [ "darwin" ], - "dependencies": { - "bindings": "^1.5.0", - "nan": "^2.12.1", - "node-pre-gyp": "*" - }, "engines": { - "node": ">= 4.0" + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" } }, - "node_modules/fsevents/node_modules/abbrev": { - "version": "1.1.1", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, - "node_modules/fsevents/node_modules/ansi-regex": { - "version": "2.1.1", - "dev": true, - "inBundle": true, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", "license": "MIT", - "optional": true, + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/fsevents/node_modules/aproba": { - "version": "1.2.0", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true - }, - "node_modules/fsevents/node_modules/are-we-there-yet": { - "version": "1.1.5", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", "dependencies": { - "delegates": "^1.0.0", - "readable-stream": "^2.0.6" + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" } }, - "node_modules/fsevents/node_modules/balanced-match": { - "version": "1.0.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true + "node_modules/get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=", + "engines": { + "node": ">=4" + } }, - "node_modules/fsevents/node_modules/brace-expansion": { - "version": "1.1.11", + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" } }, - "node_modules/fsevents/node_modules/chownr": { - "version": "1.1.4", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true - }, - "node_modules/fsevents/node_modules/code-point-at": { - "version": "1.1.0", - "dev": true, - "inBundle": true, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", "license": "MIT", - "optional": true, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/fsevents/node_modules/concat-map": { - "version": "0.0.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true - }, - "node_modules/fsevents/node_modules/console-control-strings": { - "version": "1.1.0", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "engines": { + "node": ">=4" + } }, - "node_modules/fsevents/node_modules/core-util-is": { + "node_modules/has-property-descriptors": { "version": "1.0.2", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true - }, - "node_modules/fsevents/node_modules/debug": { - "version": "3.2.6", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", "dependencies": { - "ms": "^2.1.1" + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/fsevents/node_modules/deep-extend": { - "version": "0.6.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, + "node_modules/has-symbol-support-x": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/has-symbol-support-x/-/has-symbol-support-x-1.4.2.tgz", + "integrity": "sha512-3ToOva++HaW+eCpgqZrCfN51IPB+7bJNVT6CUATzueB5Heb8o6Nam0V3HG5dlDvZU1Gn5QLcbahiKw/XVk5JJw==", "engines": { - "node": ">=4.0.0" + "node": "*" } }, - "node_modules/fsevents/node_modules/delegates": { - "version": "1.0.0", - "dev": true, - "inBundle": true, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", "license": "MIT", - "optional": true - }, - "node_modules/fsevents/node_modules/detect-libc": { - "version": "1.0.3", - "dev": true, - "inBundle": true, - "license": "Apache-2.0", - "optional": true, - "bin": { - "detect-libc": "bin/detect-libc.js" - }, "engines": { - "node": ">=0.10" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/fsevents/node_modules/fs-minipass": { - "version": "1.2.7", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true, + "node_modules/has-to-string-tag-x": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/has-to-string-tag-x/-/has-to-string-tag-x-1.4.1.tgz", + "integrity": "sha512-vdbKfmw+3LoOYVr+mtxHaX5a96+0f3DljYd8JOqvOLsf5mw2Otda2qCDT9qRqLAhrjyQ0h7ual5nOiASpsGNFw==", "dependencies": { - "minipass": "^2.6.0" + "has-symbol-support-x": "^1.4.1" + }, + "engines": { + "node": "*" } }, - "node_modules/fsevents/node_modules/fs.realpath": { - "version": "1.0.0", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true - }, - "node_modules/fsevents/node_modules/gauge": { - "version": "2.7.4", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", "dependencies": { - "aproba": "^1.0.3", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.0", - "object-assign": "^4.1.0", - "signal-exit": "^3.0.0", - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1", - "wide-align": "^1.1.0" + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/fsevents/node_modules/glob": { - "version": "7.1.6", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" + "function-bind": "^1.1.2" }, "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" + "node": ">= 0.4" } }, - "node_modules/fsevents/node_modules/has-unicode": { - "version": "2.0.1", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true + "node_modules/hoek": { + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/hoek/-/hoek-6.1.3.tgz", + "integrity": "sha512-YXXAAhmF9zpQbC7LEcREFtXfGq5K1fmd+4PHkBq8NUqmzW3G+Dq10bI/i0KucLRwss3YYFQ0fSfoxBZYiGUqtQ==", + "deprecated": "This module has moved and is now available at @hapi/hoek. Please update your dependencies as this version is no longer maintained an may contain bugs and security issues." }, - "node_modules/fsevents/node_modules/iconv-lite": { - "version": "0.4.24", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, + "node_modules/http-cache-semantics": { + "version": "3.8.1", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz", + "integrity": "sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w==" + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", "dependencies": { - "safer-buffer": ">= 2.1.2 < 3" + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/fsevents/node_modules/ignore-walk": { - "version": "3.0.3", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true, - "dependencies": { - "minimatch": "^3.0.4" + "node": ">= 0.8" } }, - "node_modules/fsevents/node_modules/inflight": { - "version": "1.0.6", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true, - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" + "node_modules/http-errors/node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" } }, - "node_modules/fsevents/node_modules/inherits": { - "version": "2.0.4", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true + "node_modules/http-errors/node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, - "node_modules/fsevents/node_modules/ini": { - "version": "1.3.5", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true, + "node_modules/http-errors/node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", "engines": { - "node": "*" + "node": ">= 0.8" } }, - "node_modules/fsevents/node_modules/is-fullwidth-code-point": { - "version": "1.0.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, - "dependencies": { - "number-is-nan": "^1.0.0" - }, + "node_modules/http-errors/node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", "engines": { - "node": ">=0.10.0" + "node": ">=0.6" } }, - "node_modules/fsevents/node_modules/isarray": { - "version": "1.0.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true - }, - "node_modules/fsevents/node_modules/minimatch": { - "version": "3.0.4", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true, - "dependencies": { - "brace-expansion": "^1.1.7" + "node_modules/hydra-login-consent-logout": { + "version": "2.4.0-pre.3", + "resolved": "https://registry.npmjs.org/hydra-login-consent-logout/-/hydra-login-consent-logout-2.4.0-pre.3.tgz", + "integrity": "sha512-Dtoop55BOQ/z+DQunVblxcFC5IMLw9NOw4vqTm2WmvMn7TjWwCfq+gA03ifGPAiFuqbyprxUnUaILCSJn9Hqdg==", + "dependencies": { + "@ory/hydra-client-fetch": "^2.4.0-alpha.1", + "@types/cookie-parser": "^1.4.2", + "@types/csurf": "^1.9.36", + "@types/express": "^4.17.7", + "@types/morgan": "^1.9.1", + "@types/url-join": "^4.0.0", + "body-parser": "^1.20.3", + "cookie-parser": "^1.4.7", + "csurf": "^1.11.0", + "debug": "^4.1.1", + "express": "^4.21.2", + "morgan": "^1.10.0", + "node-fetch": "^2.6.7", + "pug": "^3.0.3", + "querystring": "^0.2.0", + "serve-favicon": "^2.5.0", + "typescript": "^5.7.3", + "url-join": "^4.0.1" }, - "engines": { - "node": "*" + "bin": { + "hydra-login-consent-logout": "lib/app.js" } }, - "node_modules/fsevents/node_modules/minimist": { - "version": "1.2.5", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true - }, - "node_modules/fsevents/node_modules/minipass": { - "version": "2.9.0", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true, + "node_modules/hydra-login-consent-logout/node_modules/debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "deprecated": "Debug versions >=3.2.0 <3.2.7 || >=4 <4.3.1 have a low-severity ReDos regression when used in a Node.js environment. It is recommended you upgrade to 3.2.7 or 4.3.1. (https://github.com/visionmedia/debug/issues/797)", "dependencies": { - "safe-buffer": "^5.1.2", - "yallist": "^3.0.0" + "ms": "^2.1.1" } }, - "node_modules/fsevents/node_modules/minizlib": { - "version": "1.3.3", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, - "dependencies": { - "minipass": "^2.9.0" - } + "node_modules/hydra-login-consent-logout/node_modules/debug/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, - "node_modules/fsevents/node_modules/mkdirp": { - "version": "0.5.3", - "deprecated": "Legacy versions of mkdirp are no longer supported. Please update to mkdirp 1.x. (Note that the API surface has changed to use Promises in 1.x.)", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", "dependencies": { - "minimist": "^1.2.5" + "safer-buffer": ">= 2.1.2 < 3" }, - "bin": { - "mkdirp": "bin/cmd.js" + "engines": { + "node": ">=0.10.0" } }, - "node_modules/fsevents/node_modules/ms": { - "version": "2.1.2", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true + "node_modules/ieee754": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz", + "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==" }, - "node_modules/fsevents/node_modules/needle": { - "version": "2.3.3", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, - "dependencies": { - "debug": "^3.2.6", - "iconv-lite": "^0.4.4", - "sax": "^1.2.4" - }, - "bin": { - "needle": "bin/needle" - }, + "node_modules/ignore-by-default": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz", + "integrity": "sha1-SMptcvbGo68Aqa1K5odr44ieKwk=", + "dev": true + }, + "node_modules/indent-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-3.2.0.tgz", + "integrity": "sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok=", "engines": { - "node": ">= 4.4.x" + "node": ">=4" } }, - "node_modules/fsevents/node_modules/node-pre-gyp": { - "version": "0.14.0", - "dev": true, - "inBundle": true, - "license": "BSD-3-Clause", - "optional": true, - "dependencies": { - "detect-libc": "^1.0.2", - "mkdirp": "^0.5.1", - "needle": "^2.2.1", - "nopt": "^4.0.1", - "npm-packlist": "^1.1.6", - "npmlog": "^4.0.2", - "rc": "^1.2.7", - "rimraf": "^2.6.1", - "semver": "^5.3.0", - "tar": "^4.4.2" - }, - "bin": { - "node-pre-gyp": "bin/node-pre-gyp" - } + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, - "node_modules/fsevents/node_modules/nopt": { - "version": "4.0.3", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true, + "node_modules/into-stream": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/into-stream/-/into-stream-3.1.0.tgz", + "integrity": "sha1-lvsKk2wSur1v8XUqF9BWFqvQlMY=", "dependencies": { - "abbrev": "1", - "osenv": "^0.1.4" + "from2": "^2.1.1", + "p-is-promise": "^1.1.0" }, - "bin": { - "nopt": "bin/nopt.js" + "engines": { + "node": ">=4" } }, - "node_modules/fsevents/node_modules/npm-bundled": { - "version": "1.1.1", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true, - "dependencies": { - "npm-normalize-package-bin": "^1.0.1" + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "engines": { + "node": ">= 0.10" } }, - "node_modules/fsevents/node_modules/npm-normalize-package-bin": { - "version": "1.0.1", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true - }, - "node_modules/fsevents/node_modules/npm-packlist": { - "version": "1.4.8", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true, - "dependencies": { - "ignore-walk": "^3.0.1", - "npm-bundled": "^1.0.1", - "npm-normalize-package-bin": "^1.0.1" - } + "node_modules/is-arrayish": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", + "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" }, - "node_modules/fsevents/node_modules/npmlog": { - "version": "4.1.2", + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true, "dependencies": { - "are-we-there-yet": "~1.1.2", - "console-control-strings": "~1.1.0", - "gauge": "~2.7.3", - "set-blocking": "~2.0.0" - } - }, - "node_modules/fsevents/node_modules/number-is-nan": { - "version": "1.0.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, + "binary-extensions": "^2.0.0" + }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node_modules/fsevents/node_modules/object-assign": { - "version": "4.1.1", - "dev": true, - "inBundle": true, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", "license": "MIT", - "optional": true, + "dependencies": { + "hasown": "^2.0.2" + }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/fsevents/node_modules/once": { - "version": "1.4.0", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true, + "node_modules/is-expression": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-expression/-/is-expression-4.0.0.tgz", + "integrity": "sha512-zMIXX63sxzG3XrkHkrAPvm/OVZVSCPNkwMHU8oTX7/U3AL78I0QXCEICXUM13BIa8TYGZ68PiTKfQz3yaTNr4A==", + "license": "MIT", "dependencies": { - "wrappy": "1" + "acorn": "^7.1.1", + "object-assign": "^4.1.1" } }, - "node_modules/fsevents/node_modules/os-homedir": { - "version": "1.0.2", + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, "engines": { "node": ">=0.10.0" } }, - "node_modules/fsevents/node_modules/os-tmpdir": { - "version": "1.0.2", + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, "engines": { "node": ">=0.10.0" } }, - "node_modules/fsevents/node_modules/osenv": { - "version": "0.1.5", + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true, - "dependencies": { - "os-homedir": "^1.0.0", - "os-tmpdir": "^1.0.0" + "engines": { + "node": ">=0.12.0" } }, - "node_modules/fsevents/node_modules/path-is-absolute": { + "node_modules/is-object": { "version": "1.0.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, + "resolved": "https://registry.npmjs.org/is-object/-/is-object-1.0.1.tgz", + "integrity": "sha1-iVJojF7C/9awPsyF52ngKQMINHA=" + }, + "node_modules/is-plain-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", + "integrity": "sha1-caUMhCnfync8kqOQpKA7OfzVHT4=", "engines": { "node": ">=0.10.0" } }, - "node_modules/fsevents/node_modules/process-nextick-args": { - "version": "2.0.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true - }, - "node_modules/fsevents/node_modules/rc": { - "version": "1.2.8", - "dev": true, - "inBundle": true, - "license": "(BSD-2-Clause OR MIT OR Apache-2.0)", - "optional": true, - "dependencies": { - "deep-extend": "^0.6.0", - "ini": "~1.3.0", - "minimist": "^1.2.0", - "strip-json-comments": "~2.0.1" - }, - "bin": { - "rc": "cli.js" - } - }, - "node_modules/fsevents/node_modules/readable-stream": { - "version": "2.3.7", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/fsevents/node_modules/rimraf": { - "version": "2.7.1", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true, - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - } - }, - "node_modules/fsevents/node_modules/safe-buffer": { - "version": "5.1.2", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true - }, - "node_modules/fsevents/node_modules/safer-buffer": { - "version": "2.1.2", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true - }, - "node_modules/fsevents/node_modules/sax": { - "version": "1.2.4", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true - }, - "node_modules/fsevents/node_modules/semver": { - "version": "5.7.1", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true, - "bin": { - "semver": "bin/semver" - } - }, - "node_modules/fsevents/node_modules/set-blocking": { - "version": "2.0.0", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true - }, - "node_modules/fsevents/node_modules/signal-exit": { - "version": "3.0.2", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true - }, - "node_modules/fsevents/node_modules/string_decoder": { - "version": "1.1.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, - "dependencies": { - "safe-buffer": "~5.1.0" - } + "node_modules/is-promise": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.2.2.tgz", + "integrity": "sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ==", + "license": "MIT" }, - "node_modules/fsevents/node_modules/string-width": { - "version": "1.0.2", - "dev": true, - "inBundle": true, + "node_modules/is-regex": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", + "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", "license": "MIT", - "optional": true, "dependencies": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" + "call-bound": "^1.0.2", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/fsevents/node_modules/strip-ansi": { - "version": "3.0.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, - "dependencies": { - "ansi-regex": "^2.0.0" - }, + "node_modules/is-retry-allowed": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz", + "integrity": "sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg==", "engines": { "node": ">=0.10.0" } }, - "node_modules/fsevents/node_modules/strip-json-comments": { - "version": "2.0.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true, + "node_modules/is-stream": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", + "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=", "engines": { "node": ">=0.10.0" } }, - "node_modules/fsevents/node_modules/tar": { - "version": "4.4.13", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true + }, + "node_modules/isurl": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isurl/-/isurl-1.0.0.tgz", + "integrity": "sha512-1P/yWsxPlDtn7QeRD+ULKQPaIaN6yF368GZ2vDfv0AL0NwpStafjWCDDdn0k8wgFMWpVAqG7oJhxHnlud42i9w==", "dependencies": { - "chownr": "^1.1.1", - "fs-minipass": "^1.2.5", - "minipass": "^2.8.6", - "minizlib": "^1.2.1", - "mkdirp": "^0.5.0", - "safe-buffer": "^5.1.2", - "yallist": "^3.0.3" + "has-to-string-tag-x": "^1.2.0", + "is-object": "^1.0.1" }, "engines": { - "node": ">=4.5" + "node": ">= 4" } }, - "node_modules/fsevents/node_modules/util-deprecate": { - "version": "1.0.2", - "dev": true, - "inBundle": true, - "license": "MIT", - "optional": true - }, - "node_modules/fsevents/node_modules/wide-align": { - "version": "1.1.3", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true, + "node_modules/jose": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/jose/-/jose-2.0.7.tgz", + "integrity": "sha512-5hFWIigKqC+e/lRyQhfnirrAqUdIPMB7SJRqflJaO29dW7q5DFvH1XCSTmv6PQ6pb++0k6MJlLRoS0Wv4s38Wg==", "dependencies": { - "string-width": "^1.0.2 || 2" + "@panva/asn1.js": "^1.0.0" + }, + "engines": { + "node": ">=10.13.0 < 13 || >=13.7.0" + }, + "funding": { + "url": "https://github.com/sponsors/panva" } }, - "node_modules/fsevents/node_modules/wrappy": { + "node_modules/js-stringify": { "version": "1.0.2", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true - }, - "node_modules/fsevents/node_modules/yallist": { - "version": "3.1.1", - "dev": true, - "inBundle": true, - "license": "ISC", - "optional": true + "resolved": "https://registry.npmjs.org/js-stringify/-/js-stringify-1.0.2.tgz", + "integrity": "sha512-rtS5ATOo2Q5k1G+DADISilDA6lv79zIiwFd6CcjuIxGKLFm5C+RLImRscVap9k55i+MOZwgliw+NejvkLuGD5g==", + "license": "MIT" }, - "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + "node_modules/json-buffer": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz", + "integrity": "sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg=" }, - "node_modules/get-intrinsic": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", - "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", + "node_modules/jsonwebtoken": { + "version": "8.5.1", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz", + "integrity": "sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w==", "dependencies": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.3" + "jws": "^3.2.2", + "lodash.includes": "^4.3.0", + "lodash.isboolean": "^3.0.3", + "lodash.isinteger": "^4.0.4", + "lodash.isnumber": "^3.0.3", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.once": "^4.0.0", + "ms": "^2.1.1", + "semver": "^5.6.0" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-stream": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", - "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=", "engines": { - "node": ">=4" + "node": ">=4", + "npm": ">=1.4.28" } }, - "node_modules/get-value": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz", - "integrity": "sha1-3BXKHGcjh8p2vTesCjlbogQqLCg=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } + "node_modules/jsonwebtoken/node_modules/ms": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", + "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==" }, - "node_modules/glob-parent": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", - "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", - "dev": true, + "node_modules/jstransformer": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/jstransformer/-/jstransformer-1.0.0.tgz", + "integrity": "sha512-C9YK3Rf8q6VAPDCCU9fnqo3mAfOH6vUGnMcP4AQAYIEpWtfGLpwOTmZ+igtdK5y+VvI2n3CyYSzy4Qh34eq24A==", + "license": "MIT", "dependencies": { - "is-glob": "^3.1.0", - "path-dirname": "^1.0.0" + "is-promise": "^2.0.0", + "promise": "^7.0.1" } }, - "node_modules/glob-parent/node_modules/is-glob": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", - "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", - "dev": true, + "node_modules/jwa": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.2.tgz", + "integrity": "sha512-eeH5JO+21J78qMvTIDdBXidBd6nG2kZjg5Ohz/1fpa28Z4CcsWUzJ1ZZyFq/3z3N17aZy+ZuBoHljASbL1WfOw==", + "license": "MIT", "dependencies": { - "is-extglob": "^2.1.0" - }, - "engines": { - "node": ">=0.10.0" + "buffer-equal-constant-time": "^1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" } }, - "node_modules/global-dirs": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-0.1.1.tgz", - "integrity": "sha1-sxnA3UYH81PzvpzKTHL8FIxJ9EU=", - "dev": true, + "node_modules/jwks-rsa": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/jwks-rsa/-/jwks-rsa-2.1.4.tgz", + "integrity": "sha512-mpArfgPkUpX11lNtGxsF/szkasUcbWHGplZl/uFvFO2NuMHmt0dQXIihh0rkPU2yQd5niQtuUHbXnG/WKiXF6Q==", "dependencies": { - "ini": "^1.3.4" + "@types/express": "^4.17.13", + "@types/jsonwebtoken": "^8.5.8", + "debug": "^4.3.4", + "jose": "^2.0.5", + "limiter": "^1.1.5", + "lru-memoizer": "^2.1.4" }, "engines": { - "node": ">=4" + "node": ">=10 < 13 || >=14" } }, - "node_modules/got": { - "version": "6.7.1", - "resolved": "https://registry.npmjs.org/got/-/got-6.7.1.tgz", - "integrity": "sha1-JAzQV4WpoY5WHcG0S0HHY+8ejbA=", - "dev": true, + "node_modules/jwks-rsa/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", "dependencies": { - "create-error-class": "^3.0.0", - "duplexer3": "^0.1.4", - "get-stream": "^3.0.0", - "is-redirect": "^1.0.0", - "is-retry-allowed": "^1.0.0", - "is-stream": "^1.0.0", - "lowercase-keys": "^1.0.0", - "safe-buffer": "^5.0.1", - "timed-out": "^4.0.0", - "unzip-response": "^2.0.1", - "url-parse-lax": "^1.0.0" + "ms": "2.1.2" }, "engines": { - "node": ">=4" + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } } }, - "node_modules/graceful-fs": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", - "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==", - "dev": true + "node_modules/jwks-rsa/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, - "node_modules/has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "node_modules/jws": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.3.tgz", + "integrity": "sha512-byiJ0FLRdLdSVSReO/U4E7RoEyOCKnEnEPMjq3HxWtvzLsV08/i5RQKsFVNkCldrCaPr2vDNAOMsfs8T/Hze7g==", + "license": "MIT", "dependencies": { - "function-bind": "^1.1.1" - }, - "engines": { - "node": ">= 0.4.0" + "jwa": "^1.4.2", + "safe-buffer": "^5.0.1" } }, - "node_modules/has-flag": { + "node_modules/keyv": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", - "engines": { - "node": ">=4" + "resolved": "https://registry.npmjs.org/keyv/-/keyv-3.0.0.tgz", + "integrity": "sha512-eguHnq22OE3uVoSYG0LVWNP+4ppamWr9+zWBe1bsNcovIMy6huUJFPgy4mGwCd/rnl3vOLGW1MTlu4c57CT1xA==", + "dependencies": { + "json-buffer": "3.0.0" } }, - "node_modules/has-symbol-support-x": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/has-symbol-support-x/-/has-symbol-support-x-1.4.2.tgz", - "integrity": "sha512-3ToOva++HaW+eCpgqZrCfN51IPB+7bJNVT6CUATzueB5Heb8o6Nam0V3HG5dlDvZU1Gn5QLcbahiKw/XVk5JJw==", - "engines": { - "node": "*" + "node_modules/kuler": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/kuler/-/kuler-1.0.1.tgz", + "integrity": "sha512-J9nVUucG1p/skKul6DU3PUZrhs0LPulNaeUOox0IyXDi8S4CztTHs1gQphhuZmzXG7VOQSf6NJfKuzteQLv9gQ==", + "dependencies": { + "colornames": "^1.1.1" } }, - "node_modules/has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } + "node_modules/limiter": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/limiter/-/limiter-1.1.5.tgz", + "integrity": "sha512-FWWMIEOxz3GwUI4Ts/IvgVy6LPvoMPgjMdQ185nN6psJyBJ4yOpzqm695/h5umdLJg2vW3GR5iG11MAkR2AzJA==" }, - "node_modules/has-to-string-tag-x": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/has-to-string-tag-x/-/has-to-string-tag-x-1.4.1.tgz", - "integrity": "sha512-vdbKfmw+3LoOYVr+mtxHaX5a96+0f3DljYd8JOqvOLsf5mw2Otda2qCDT9qRqLAhrjyQ0h7ual5nOiASpsGNFw==", + "node_modules/lodash": { + "version": "4.17.14", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.14.tgz", + "integrity": "sha512-mmKYbW3GLuJeX+iGP+Y7Gp1AiGHGbXHCOh/jZmrawMmsE7MS4znI3RL2FsjbqOyMayHInjOeykW7PEajUk1/xw==" + }, + "node_modules/lodash.clonedeep": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", + "integrity": "sha512-H5ZhCF25riFd9uB5UCkVKo61m3S/xZk1x4wA6yp/L3RFP6Z/eHH1ymQcGLo7J3GMPfm0V/7m1tryHuGVxpqEBQ==" + }, + "node_modules/lodash.includes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", + "integrity": "sha1-YLuYqHy5I8aMoeUTJUgzFISfVT8=" + }, + "node_modules/lodash.isboolean": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", + "integrity": "sha1-bC4XHbKiV82WgC/UOwGyDV9YcPY=" + }, + "node_modules/lodash.isinteger": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", + "integrity": "sha1-YZwK89A/iwTDH1iChAt3sRzWg0M=" + }, + "node_modules/lodash.isnumber": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", + "integrity": "sha1-POdoEMWSjQM1IwGsKHMX8RwLH/w=" + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs=" + }, + "node_modules/lodash.isstring": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", + "integrity": "sha1-1SfftUVuynzJu5XV2ur4i6VKVFE=" + }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha1-DdOXEhPHxW34gJd9UEyI+0cal6w=" + }, + "node_modules/logform": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/logform/-/logform-2.1.2.tgz", + "integrity": "sha512-+lZh4OpERDBLqjiwDLpAWNQu6KMjnlXH2ByZwCuSqVPJletw0kTWJf5CgSNAUKn1KUkv3m2cUz/LK8zyEy7wzQ==", "dependencies": { - "has-symbol-support-x": "^1.4.1" - }, - "engines": { - "node": "*" + "colors": "^1.2.1", + "fast-safe-stringify": "^2.0.4", + "fecha": "^2.3.3", + "ms": "^2.1.1", + "triple-beam": "^1.3.0" } }, - "node_modules/has-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-value/-/has-value-1.0.0.tgz", - "integrity": "sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc=", - "dev": true, - "dependencies": { - "get-value": "^2.0.6", - "has-values": "^1.0.0", - "isobject": "^3.0.0" - }, + "node_modules/logform/node_modules/ms": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", + "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==" + }, + "node_modules/long": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", + "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==" + }, + "node_modules/lowercase-keys": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", + "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==", "engines": { "node": ">=0.10.0" } }, - "node_modules/has-values": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-values/-/has-values-1.0.0.tgz", - "integrity": "sha1-lbC2P+whRmGab+V/51Yo1aOe/k8=", - "dev": true, + "node_modules/lru-cache": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.0.2.tgz", + "integrity": "sha1-HRdnnAac2l0ECZGgnbwsDbN35V4=", "dependencies": { - "is-number": "^3.0.0", - "kind-of": "^4.0.0" - }, - "engines": { - "node": ">=0.10.0" + "pseudomap": "^1.0.1", + "yallist": "^2.0.0" } }, - "node_modules/has-values/node_modules/kind-of": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-4.0.0.tgz", - "integrity": "sha1-IIE989cSkosgc3hpGkUGb65y3Vc=", - "dev": true, + "node_modules/lru-memoizer": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/lru-memoizer/-/lru-memoizer-2.1.4.tgz", + "integrity": "sha512-IXAq50s4qwrOBrXJklY+KhgZF+5y98PDaNo0gi/v2KQBFLyWr+JyFvijZXkGKjQj/h9c0OwoE+JZbwUXce76hQ==", "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" + "lodash.clonedeep": "^4.5.0", + "lru-cache": "~4.0.0" } }, - "node_modules/hoek": { - "version": "6.1.3", - "resolved": "https://registry.npmjs.org/hoek/-/hoek-6.1.3.tgz", - "integrity": "sha512-YXXAAhmF9zpQbC7LEcREFtXfGq5K1fmd+4PHkBq8NUqmzW3G+Dq10bI/i0KucLRwss3YYFQ0fSfoxBZYiGUqtQ==", - "deprecated": "This module has moved and is now available at @hapi/hoek. Please update your dependencies as this version is no longer maintained an may contain bugs and security issues." - }, - "node_modules/http-cache-semantics": { - "version": "3.8.1", - "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz", - "integrity": "sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w==" - }, - "node_modules/http-errors": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", - "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", - "dependencies": { - "depd": "2.0.0", - "inherits": "2.0.4", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "toidentifier": "1.0.1" - }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", "engines": { - "node": ">= 0.8" + "node": ">= 0.4" } }, - "node_modules/http-errors/node_modules/depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=", "engines": { - "node": ">= 0.8" + "node": ">= 0.6" } }, - "node_modules/http-errors/node_modules/setprototypeof": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", - "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" - }, - "node_modules/http-errors/node_modules/statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", - "engines": { - "node": ">= 0.8" + "node_modules/merge-descriptors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/http-errors/node_modules/toidentifier": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", - "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=", "engines": { - "node": ">=0.6" + "node": ">= 0.6" } }, - "node_modules/hydra-login-consent-logout": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/hydra-login-consent-logout/-/hydra-login-consent-logout-1.4.3.tgz", - "integrity": "sha512-q2Y1SBpNnKf869TvloCr0CYL3tIljORDiMsBg/3xG6W+wQXGi7VQ4WJWdQR2hTtkSvYU2dXhLvxsQuFb0+mwlw==", - "dependencies": { - "body-parser": "^1.19.0", - "cookie-parser": "^1.4.5", - "csurf": "^1.11.0", - "debug": "^4.1.1", - "express": "^4.17.1", - "morgan": "^1.10.0", - "node-fetch": "^2.6.0", - "pug": "^2.0.4", - "querystring": "^0.2.0", - "serve-favicon": "^2.5.0", - "url-join": "^4.0.1" - }, + "node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", "bin": { - "hydra-login-consent-logout": "bin/www" + "mime": "cli.js" + }, + "engines": { + "node": ">=4" } }, - "node_modules/hydra-login-consent-logout/node_modules/debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", - "deprecated": "Debug versions >=3.2.0 <3.2.7 || >=4 <4.3.1 have a low-severity ReDos regression when used in a Node.js environment. It is recommended you upgrade to 3.2.7 or 4.3.1. (https://github.com/visionmedia/debug/issues/797)", - "dependencies": { - "ms": "^2.1.1" + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" } }, - "node_modules/hydra-login-consent-logout/node_modules/debug/node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "node_modules/iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", "dependencies": { - "safer-buffer": ">= 2.1.2 < 3" + "mime-db": "1.52.0" }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.6" } }, - "node_modules/ieee754": { - "version": "1.1.13", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz", - "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==" - }, - "node_modules/ignore-by-default": { + "node_modules/mimic-response": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz", - "integrity": "sha1-SMptcvbGo68Aqa1K5odr44ieKwk=", - "dev": true - }, - "node_modules/import-lazy": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/import-lazy/-/import-lazy-2.1.0.tgz", - "integrity": "sha1-BWmOPUXIjo1+nZLLBYTnfwlvPkM=", - "dev": true, + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", + "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==", "engines": { "node": ">=4" } }, - "node_modules/imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, "engines": { - "node": ">=0.8.19" + "node": "*" } }, - "node_modules/indent-string": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-3.2.0.tgz", - "integrity": "sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok=", + "node_modules/morgan": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz", + "integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==", + "dependencies": { + "basic-auth": "~2.0.1", + "debug": "2.6.9", + "depd": "~2.0.0", + "on-finished": "~2.3.0", + "on-headers": "~1.0.2" + }, "engines": { - "node": ">=4" + "node": ">= 0.8.0" } }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "node_modules/ini": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", - "integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==", - "deprecated": "Please update to ini >=1.3.6 to avoid a prototype pollution issue", - "dev": true, + "node_modules/morgan/node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", "engines": { - "node": "*" + "node": ">= 0.8" } }, - "node_modules/into-stream": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/into-stream/-/into-stream-3.1.0.tgz", - "integrity": "sha1-lvsKk2wSur1v8XUqF9BWFqvQlMY=", - "dependencies": { - "from2": "^2.1.1", - "p-is-promise": "^1.1.0" - }, - "engines": { - "node": ">=4" - } + "node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" }, - "node_modules/ipaddr.js": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", - "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", "engines": { - "node": ">= 0.10" + "node": ">= 0.6" } }, - "node_modules/is-accessor-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", - "integrity": "sha1-qeEss66Nh2cn7u84Q/igiXtcmNY=", - "dev": true, + "node_modules/nice-try": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", + "dev": true + }, + "node_modules/node-fetch": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.9.tgz", + "integrity": "sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg==", "dependencies": { - "kind-of": "^3.0.2" + "whatwg-url": "^5.0.0" }, "engines": { - "node": ">=0.10.0" + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } } }, - "node_modules/is-accessor-descriptor/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, + "node_modules/node-forge": { + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.5.tgz", + "integrity": "sha512-vFMQIWt+J/7FLNyKouZ9TazT74PRV3wgv9UT4cRjC8BffxFbKXkgIWR42URCPSnHm/QDz6BOlb2Q0U4+VQT67Q==", "engines": { - "node": ">=0.10.0" + "node": ">= 4.5.0" } }, - "node_modules/is-arrayish": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", - "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" - }, - "node_modules/is-binary-path": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz", - "integrity": "sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg=", - "dev": true, + "node_modules/node-jose": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/node-jose/-/node-jose-1.1.4.tgz", + "integrity": "sha512-L31IFwL3pWWcMHxxidCY51ezqrDXMkvlT/5pLTfNw5sXmmOLJuN6ug7txzF/iuZN55cRpyOmoJrotwBQIoo5Lw==", "dependencies": { - "binary-extensions": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" + "base64url": "^3.0.1", + "browserify-zlib": "^0.2.0", + "buffer": "^5.5.0", + "es6-promise": "^4.2.8", + "lodash": "^4.17.15", + "long": "^4.0.0", + "node-forge": "^0.8.5", + "process": "^0.11.10", + "react-zlib-js": "^1.0.4", + "uuid": "^3.3.3" } }, - "node_modules/is-buffer": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", - "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + "node_modules/node-jose/node_modules/lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==" }, - "node_modules/is-ci": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-1.2.1.tgz", - "integrity": "sha512-s6tfsaQaQi3JNciBH6shVqEDvhGut0SUXr31ag8Pd8BBbVVlcGfWhpPmEOoM6RJ5TFhbypvf5yyRw/VXW1IiWg==", - "dev": true, - "dependencies": { - "ci-info": "^1.5.0" - }, + "node_modules/node-jose/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", "bin": { - "is-ci": "bin.js" + "uuid": "bin/uuid" } }, - "node_modules/is-data-descriptor": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", - "integrity": "sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y=", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" + "node_modules/node-uuid": { + "version": "1.4.8", + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", + "integrity": "sha1-sEDrCSOWivq/jTL7HxfxFn/auQc=", + "deprecated": "Use uuid module instead", + "bin": { + "uuid": "bin/uuid" } }, - "node_modules/is-data-descriptor/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "node_modules/nodemon": { + "version": "2.0.22", + "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-2.0.22.tgz", + "integrity": "sha512-B8YqaKMmyuCO7BowF1Z1/mkPqLk6cs/l63Ojtd6otKjMx47Dq1utxfRxcavH1I7VSaL8n5BUaoutadnsX3AAVQ==", "dev": true, "dependencies": { - "is-buffer": "^1.1.5" + "chokidar": "^3.5.2", + "debug": "^3.2.7", + "ignore-by-default": "^1.0.1", + "minimatch": "^3.1.2", + "pstree.remy": "^1.1.8", + "semver": "^5.7.1", + "simple-update-notifier": "^1.0.7", + "supports-color": "^5.5.0", + "touch": "^3.1.0", + "undefsafe": "^2.0.5" + }, + "bin": { + "nodemon": "bin/nodemon.js" }, "engines": { - "node": ">=0.10.0" + "node": ">=8.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/nodemon" } }, - "node_modules/is-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.6.tgz", - "integrity": "sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==", + "node_modules/nodemon/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", "dev": true, "dependencies": { - "is-accessor-descriptor": "^0.1.6", - "is-data-descriptor": "^0.1.4", - "kind-of": "^5.0.0" - }, - "engines": { - "node": ">=0.10.0" + "ms": "^2.1.1" } }, - "node_modules/is-descriptor/node_modules/kind-of": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-5.1.0.tgz", - "integrity": "sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } + "node_modules/nodemon/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true }, - "node_modules/is-expression": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-expression/-/is-expression-3.0.0.tgz", - "integrity": "sha1-Oayqa+f9HzRx3ELHQW5hwkMXrJ8=", + "node_modules/nopt": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz", + "integrity": "sha1-bd0hvSoxQXuScn3Vhfim83YI6+4=", + "dev": true, "dependencies": { - "acorn": "~4.0.2", - "object-assign": "^4.0.1" - } - }, - "node_modules/is-expression/node_modules/acorn": { - "version": "4.0.13", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-4.0.13.tgz", - "integrity": "sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c=", + "abbrev": "1" + }, "bin": { - "acorn": "bin/acorn" + "nopt": "bin/nopt.js" }, "engines": { - "node": ">=0.4.0" + "node": "*" } }, - "node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=", + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", "dev": true, "engines": { "node": ">=0.10.0" } }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", - "dev": true, + "node_modules/normalize-url": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-2.0.1.tgz", + "integrity": "sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw==", + "dependencies": { + "prepend-http": "^2.0.0", + "query-string": "^5.0.1", + "sort-keys": "^2.0.0" + }, "engines": { - "node": ">=0.10.0" + "node": ">=4" } }, - "node_modules/is-fullwidth-code-point": { + "node_modules/normalize-url/node_modules/prepend-http": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", - "dev": true, + "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz", + "integrity": "sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc=", "engines": { "node": ">=4" } }, - "node_modules/is-glob": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", - "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", - "dev": true, - "dependencies": { - "is-extglob": "^2.1.1" - }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", "engines": { "node": ">=0.10.0" } }, - "node_modules/is-installed-globally": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.1.0.tgz", - "integrity": "sha1-Df2Y9akRFxbdU13aZJL2e/PSWoA=", - "dev": true, - "dependencies": { - "global-dirs": "^0.1.0", - "is-path-inside": "^1.0.0" - }, + "node_modules/object-hash": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-1.3.1.tgz", + "integrity": "sha512-OSuu/pU4ENM9kmREg0BdNrUDIl1heYa4mBZacJc+vVWz4GtAwu7jO8s4AIt2aGRUTqxykpWzI3Oqnsm13tTMDA==", "engines": { - "node": ">=4" + "node": ">= 0.10.0" } }, - "node_modules/is-npm": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-npm/-/is-npm-1.0.0.tgz", - "integrity": "sha1-8vtjpl5JBbQGyGBydloaTceTufQ=", - "dev": true, + "node_modules/object-inspect": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz", + "integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==", "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, + "node_modules/oidc-token-hash": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/oidc-token-hash/-/oidc-token-hash-3.0.2.tgz", + "integrity": "sha512-dTzp80/y/da+um+i+sOucNqiPpwRL7M/xPwj7pH1TFA2/bqQ+OK2sJahSXbemEoLtPkHcFLyhLhLWZa9yW5+RA==", "engines": { - "node": ">=0.10.0" + "node": ">=6.9.0" } }, - "node_modules/is-number/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, + "node_modules/on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", "dependencies": { - "is-buffer": "^1.1.5" + "ee-first": "1.1.1" }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.8" } }, - "node_modules/is-obj": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz", - "integrity": "sha1-PkcprB9f3gJc19g6iW2rn09n2w8=", - "dev": true, + "node_modules/on-headers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", "engines": { - "node": ">=0.10.0" + "node": ">= 0.8" } }, - "node_modules/is-object": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-object/-/is-object-1.0.1.tgz", - "integrity": "sha1-iVJojF7C/9awPsyF52ngKQMINHA=" + "node_modules/one-time": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/one-time/-/one-time-0.0.4.tgz", + "integrity": "sha1-+M33eISCb+Tf+T46nMN7HkSAdC4=" }, - "node_modules/is-path-inside": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-1.0.1.tgz", - "integrity": "sha1-jvW33lBDej/cprToZe96pVy0gDY=", - "dev": true, + "node_modules/openid-client": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/openid-client/-/openid-client-2.5.0.tgz", + "integrity": "sha512-t3hFD7xEoW1U25RyBcRFaL19fGGs6hNVTysq9pgmiltH0IVUPzH/bQV9w24pM5Q7MunnGv2/5XjIru6BQcWdxg==", "dependencies": { - "path-is-inside": "^1.0.1" + "base64url": "^3.0.0", + "got": "^8.3.2", + "lodash": "^4.17.11", + "lru-cache": "^5.1.1", + "node-jose": "^1.1.0", + "object-hash": "^1.3.1", + "oidc-token-hash": "^3.0.1", + "p-any": "^1.1.0" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-plain-obj": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", - "integrity": "sha1-caUMhCnfync8kqOQpKA7OfzVHT4=", - "engines": { - "node": ">=0.10.0" + "node": ">=6.9.0" } }, - "node_modules/is-plain-object": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", - "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", - "dev": true, + "node_modules/openid-client/node_modules/got": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/got/-/got-8.3.2.tgz", + "integrity": "sha512-qjUJ5U/hawxosMryILofZCkm3C84PLJS/0grRIpjAwu+Lkxxj5cxeCU25BG0/3mDSpXKTyZr8oh8wIgLaH0QCw==", "dependencies": { - "isobject": "^3.0.1" + "@sindresorhus/is": "^0.7.0", + "cacheable-request": "^2.1.1", + "decompress-response": "^3.3.0", + "duplexer3": "^0.1.4", + "get-stream": "^3.0.0", + "into-stream": "^3.1.0", + "is-retry-allowed": "^1.1.0", + "isurl": "^1.0.0-alpha5", + "lowercase-keys": "^1.0.0", + "mimic-response": "^1.0.0", + "p-cancelable": "^0.4.0", + "p-timeout": "^2.0.1", + "pify": "^3.0.0", + "safe-buffer": "^5.1.1", + "timed-out": "^4.0.1", + "url-parse-lax": "^3.0.0", + "url-to-options": "^1.0.1" }, "engines": { - "node": ">=0.10.0" + "node": ">=4" } }, - "node_modules/is-promise": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz", - "integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=" + "node_modules/openid-client/node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dependencies": { + "yallist": "^3.0.2" + } }, - "node_modules/is-redirect": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-redirect/-/is-redirect-1.0.0.tgz", - "integrity": "sha1-HQPd7VO9jbDzDCbk+V02/HyH3CQ=", - "dev": true, + "node_modules/openid-client/node_modules/prepend-http": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz", + "integrity": "sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc=", "engines": { - "node": ">=0.10.0" + "node": ">=4" } }, - "node_modules/is-regex": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", - "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "node_modules/openid-client/node_modules/url-parse-lax": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz", + "integrity": "sha1-FrXK/Afb42dsGxmZF3gj1lA6yww=", "dependencies": { - "has": "^1.0.3" + "prepend-http": "^2.0.0" }, "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": ">=4" } }, - "node_modules/is-retry-allowed": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz", - "integrity": "sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg==", - "engines": { - "node": ">=0.10.0" - } + "node_modules/openid-client/node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" }, - "node_modules/is-stream": { + "node_modules/p-any": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=", + "resolved": "https://registry.npmjs.org/p-any/-/p-any-1.1.0.tgz", + "integrity": "sha512-Ef0tVa4CZ5pTAmKn+Cg3w8ABBXh+hHO1aV8281dKOoUHfX+3tjG2EaFcC+aZyagg9b4EYGsHEjz21DnEE8Og2g==", + "dependencies": { + "p-some": "^2.0.0" + }, "engines": { - "node": ">=0.10.0" + "node": ">=4" } }, - "node_modules/is-windows": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", - "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==", - "dev": true, + "node_modules/p-cancelable": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-0.4.1.tgz", + "integrity": "sha512-HNa1A8LvB1kie7cERyy21VNeHb2CWJJYqyyC2o3klWFfMGlFmWv2Z7sFgZH8ZiaYL95ydToKTFVXgMV/Os0bBQ==", "engines": { - "node": ">=0.10.0" + "node": ">=4" } }, - "node_modules/isarray": { + "node_modules/p-finally": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", - "dev": true + "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", + "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=", + "engines": { + "node": ">=4" + } }, - "node_modules/isobject": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", - "dev": true, + "node_modules/p-is-promise": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/p-is-promise/-/p-is-promise-1.1.0.tgz", + "integrity": "sha1-nJRWmJ6fZYgBewQ01WCXZ1w9oF4=", "engines": { - "node": ">=0.10.0" + "node": ">=4" } }, - "node_modules/isurl": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isurl/-/isurl-1.0.0.tgz", - "integrity": "sha512-1P/yWsxPlDtn7QeRD+ULKQPaIaN6yF368GZ2vDfv0AL0NwpStafjWCDDdn0k8wgFMWpVAqG7oJhxHnlud42i9w==", + "node_modules/p-some": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/p-some/-/p-some-2.0.1.tgz", + "integrity": "sha1-Zdh8ixVO289SIdFnd4ttLhUPbwY=", "dependencies": { - "has-to-string-tag-x": "^1.2.0", - "is-object": "^1.0.1" + "aggregate-error": "^1.0.0" }, "engines": { - "node": ">= 4" + "node": ">=4" } }, - "node_modules/jose": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/jose/-/jose-2.0.5.tgz", - "integrity": "sha512-BAiDNeDKTMgk4tvD0BbxJ8xHEHBZgpeRZ1zGPPsitSyMgjoMWiLGYAE7H7NpP5h0lPppQajQs871E8NHUrzVPA==", + "node_modules/p-timeout": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-2.0.1.tgz", + "integrity": "sha512-88em58dDVB/KzPEx1X0N3LwFfYZPyDc4B6eF38M1rk9VTZMbxXXgjugz8mmwpS9Ox4BDZ+t6t3QP5+/gazweIA==", "dependencies": { - "@panva/asn1.js": "^1.0.0" + "p-finally": "^1.0.0" }, "engines": { - "node": ">=10.13.0 < 13 || >=13.7.0" + "node": ">=4" + } + }, + "node_modules/pako": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==" + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "license": "MIT" + }, + "node_modules/path-to-regexp": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", + "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" }, "funding": { - "url": "https://github.com/sponsors/panva" + "url": "https://github.com/sponsors/jonschlinkert" } }, - "node_modules/js-stringify": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/js-stringify/-/js-stringify-1.0.2.tgz", - "integrity": "sha1-Fzb939lyTyijaCrcYjCufk6Weds=" - }, - "node_modules/json-buffer": { + "node_modules/pify": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz", - "integrity": "sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg=" + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", + "engines": { + "node": ">=4" + } }, - "node_modules/jsonwebtoken": { - "version": "8.5.1", - "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz", - "integrity": "sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w==", - "dependencies": { - "jws": "^3.2.2", - "lodash.includes": "^4.3.0", - "lodash.isboolean": "^3.0.3", - "lodash.isinteger": "^4.0.4", - "lodash.isnumber": "^3.0.3", - "lodash.isplainobject": "^4.0.6", - "lodash.isstring": "^4.0.1", - "lodash.once": "^4.0.0", - "ms": "^2.1.1", - "semver": "^5.6.0" - }, + "node_modules/process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=", "engines": { - "node": ">=4", - "npm": ">=1.4.28" + "node": ">= 0.6.0" } }, - "node_modules/jsonwebtoken/node_modules/ms": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", - "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==" + "node_modules/process-nextick-args": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz", + "integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==" }, - "node_modules/jstransformer": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/jstransformer/-/jstransformer-1.0.0.tgz", - "integrity": "sha1-7Yvwkh4vPx7U1cGkT2hwntJHIsM=", + "node_modules/promise": { + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz", + "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==", + "license": "MIT", "dependencies": { - "is-promise": "^2.0.0", - "promise": "^7.0.1" + "asap": "~2.0.3" } }, - "node_modules/jwa": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", - "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", "dependencies": { - "buffer-equal-constant-time": "1.0.1", - "ecdsa-sig-formatter": "1.0.11", - "safe-buffer": "^5.0.1" + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" } }, - "node_modules/jwks-rsa": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/jwks-rsa/-/jwks-rsa-2.1.4.tgz", - "integrity": "sha512-mpArfgPkUpX11lNtGxsF/szkasUcbWHGplZl/uFvFO2NuMHmt0dQXIihh0rkPU2yQd5niQtuUHbXnG/WKiXF6Q==", - "dependencies": { - "@types/express": "^4.17.13", - "@types/jsonwebtoken": "^8.5.8", - "debug": "^4.3.4", - "jose": "^2.0.5", - "limiter": "^1.1.5", - "lru-memoizer": "^2.1.4" - }, - "engines": { - "node": ">=10 < 13 || >=14" - } - }, - "node_modules/jwks-rsa/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } + "node_modules/pseudomap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", + "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=" }, - "node_modules/jwks-rsa/node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + "node_modules/pstree.remy": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz", + "integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==", + "dev": true }, - "node_modules/jws": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", - "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "node_modules/pug": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pug/-/pug-3.0.3.tgz", + "integrity": "sha512-uBi6kmc9f3SZ3PXxqcHiUZLmIXgfgWooKWXcwSGwQd2Zi5Rb0bT14+8CJjJgI8AB+nndLaNgHGrcc6bPIB665g==", + "license": "MIT", "dependencies": { - "jwa": "^1.4.1", - "safe-buffer": "^5.0.1" + "pug-code-gen": "^3.0.3", + "pug-filters": "^4.0.0", + "pug-lexer": "^5.0.1", + "pug-linker": "^4.0.0", + "pug-load": "^3.0.0", + "pug-parser": "^6.0.0", + "pug-runtime": "^3.0.1", + "pug-strip-comments": "^2.0.0" } }, - "node_modules/keyv": { + "node_modules/pug-attrs": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-3.0.0.tgz", - "integrity": "sha512-eguHnq22OE3uVoSYG0LVWNP+4ppamWr9+zWBe1bsNcovIMy6huUJFPgy4mGwCd/rnl3vOLGW1MTlu4c57CT1xA==", + "resolved": "https://registry.npmjs.org/pug-attrs/-/pug-attrs-3.0.0.tgz", + "integrity": "sha512-azINV9dUtzPMFQktvTXciNAfAuVh/L/JCl0vtPCwvOA21uZrC08K/UnmrL+SXGEVc1FwzjW62+xw5S/uaLj6cA==", + "license": "MIT", "dependencies": { - "json-buffer": "3.0.0" - } - }, - "node_modules/kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "dev": true, - "engines": { - "node": ">=0.10.0" + "constantinople": "^4.0.1", + "js-stringify": "^1.0.2", + "pug-runtime": "^3.0.0" } }, - "node_modules/kuler": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/kuler/-/kuler-1.0.1.tgz", - "integrity": "sha512-J9nVUucG1p/skKul6DU3PUZrhs0LPulNaeUOox0IyXDi8S4CztTHs1gQphhuZmzXG7VOQSf6NJfKuzteQLv9gQ==", + "node_modules/pug-code-gen": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pug-code-gen/-/pug-code-gen-3.0.3.tgz", + "integrity": "sha512-cYQg0JW0w32Ux+XTeZnBEeuWrAY7/HNE6TWnhiHGnnRYlCgyAUPoyh9KzCMa9WhcJlJ1AtQqpEYHc+vbCzA+Aw==", + "license": "MIT", "dependencies": { - "colornames": "^1.1.1" + "constantinople": "^4.0.1", + "doctypes": "^1.1.0", + "js-stringify": "^1.0.2", + "pug-attrs": "^3.0.0", + "pug-error": "^2.1.0", + "pug-runtime": "^3.0.1", + "void-elements": "^3.1.0", + "with": "^7.0.0" } }, - "node_modules/latest-version": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/latest-version/-/latest-version-3.1.0.tgz", - "integrity": "sha1-ogU4P+oyKzO1rjsYq+4NwvNW7hU=", - "dev": true, - "dependencies": { - "package-json": "^4.0.0" - }, - "engines": { - "node": ">=4" - } + "node_modules/pug-error": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/pug-error/-/pug-error-2.1.0.tgz", + "integrity": "sha512-lv7sU9e5Jk8IeUheHata6/UThZ7RK2jnaaNztxfPYUY+VxZyk/ePVaNZ/vwmH8WqGvDz3LrNYt/+gA55NDg6Pg==", + "license": "MIT" }, - "node_modules/lazy-cache": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-1.0.4.tgz", - "integrity": "sha1-odePw6UEdMuAhF07O24dpJpEbo4=", - "engines": { - "node": ">=0.10.0" + "node_modules/pug-filters": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/pug-filters/-/pug-filters-4.0.0.tgz", + "integrity": "sha512-yeNFtq5Yxmfz0f9z2rMXGw/8/4i1cCFecw/Q7+D0V2DdtII5UvqE12VaZ2AY7ri6o5RNXiweGH79OCq+2RQU4A==", + "license": "MIT", + "dependencies": { + "constantinople": "^4.0.1", + "jstransformer": "1.0.0", + "pug-error": "^2.0.0", + "pug-walk": "^2.0.0", + "resolve": "^1.15.1" } }, - "node_modules/limiter": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/limiter/-/limiter-1.1.5.tgz", - "integrity": "sha512-FWWMIEOxz3GwUI4Ts/IvgVy6LPvoMPgjMdQ185nN6psJyBJ4yOpzqm695/h5umdLJg2vW3GR5iG11MAkR2AzJA==" - }, - "node_modules/lodash": { - "version": "4.17.14", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.14.tgz", - "integrity": "sha512-mmKYbW3GLuJeX+iGP+Y7Gp1AiGHGbXHCOh/jZmrawMmsE7MS4znI3RL2FsjbqOyMayHInjOeykW7PEajUk1/xw==" - }, - "node_modules/lodash.clonedeep": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", - "integrity": "sha512-H5ZhCF25riFd9uB5UCkVKo61m3S/xZk1x4wA6yp/L3RFP6Z/eHH1ymQcGLo7J3GMPfm0V/7m1tryHuGVxpqEBQ==" - }, - "node_modules/lodash.includes": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", - "integrity": "sha1-YLuYqHy5I8aMoeUTJUgzFISfVT8=" - }, - "node_modules/lodash.isboolean": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", - "integrity": "sha1-bC4XHbKiV82WgC/UOwGyDV9YcPY=" - }, - "node_modules/lodash.isinteger": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", - "integrity": "sha1-YZwK89A/iwTDH1iChAt3sRzWg0M=" - }, - "node_modules/lodash.isnumber": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", - "integrity": "sha1-POdoEMWSjQM1IwGsKHMX8RwLH/w=" - }, - "node_modules/lodash.isplainobject": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", - "integrity": "sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs=" - }, - "node_modules/lodash.isstring": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", - "integrity": "sha1-1SfftUVuynzJu5XV2ur4i6VKVFE=" - }, - "node_modules/lodash.once": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", - "integrity": "sha1-DdOXEhPHxW34gJd9UEyI+0cal6w=" - }, - "node_modules/logform": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/logform/-/logform-2.1.2.tgz", - "integrity": "sha512-+lZh4OpERDBLqjiwDLpAWNQu6KMjnlXH2ByZwCuSqVPJletw0kTWJf5CgSNAUKn1KUkv3m2cUz/LK8zyEy7wzQ==", + "node_modules/pug-lexer": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/pug-lexer/-/pug-lexer-5.0.1.tgz", + "integrity": "sha512-0I6C62+keXlZPZkOJeVam9aBLVP2EnbeDw3An+k0/QlqdwH6rv8284nko14Na7c0TtqtogfWXcRoFE4O4Ff20w==", + "license": "MIT", "dependencies": { - "colors": "^1.2.1", - "fast-safe-stringify": "^2.0.4", - "fecha": "^2.3.3", - "ms": "^2.1.1", - "triple-beam": "^1.3.0" + "character-parser": "^2.2.0", + "is-expression": "^4.0.0", + "pug-error": "^2.0.0" } }, - "node_modules/logform/node_modules/ms": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", - "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==" - }, - "node_modules/long": { + "node_modules/pug-linker": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", - "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==" - }, - "node_modules/longest": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/longest/-/longest-1.0.1.tgz", - "integrity": "sha1-MKCy2jj3N3DoKUoNIuZiXtd9AJc=", - "engines": { - "node": ">=0.10.0" + "resolved": "https://registry.npmjs.org/pug-linker/-/pug-linker-4.0.0.tgz", + "integrity": "sha512-gjD1yzp0yxbQqnzBAdlhbgoJL5qIFJw78juN1NpTLt/mfPJ5VgC4BvkoD3G23qKzJtIIXBbcCt6FioLSFLOHdw==", + "license": "MIT", + "dependencies": { + "pug-error": "^2.0.0", + "pug-walk": "^2.0.0" } }, - "node_modules/lowercase-keys": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", - "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==", - "engines": { - "node": ">=0.10.0" + "node_modules/pug-load": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pug-load/-/pug-load-3.0.0.tgz", + "integrity": "sha512-OCjTEnhLWZBvS4zni/WUMjH2YSUosnsmjGBB1An7CsKQarYSWQ0GCVyd4eQPMFJqZ8w9xgs01QdiZXKVjk92EQ==", + "license": "MIT", + "dependencies": { + "object-assign": "^4.1.1", + "pug-walk": "^2.0.0" } }, - "node_modules/lru-cache": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.0.2.tgz", - "integrity": "sha1-HRdnnAac2l0ECZGgnbwsDbN35V4=", + "node_modules/pug-parser": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/pug-parser/-/pug-parser-6.0.0.tgz", + "integrity": "sha512-ukiYM/9cH6Cml+AOl5kETtM9NR3WulyVP2y4HOU45DyMim1IeP/OOiyEWRr6qk5I5klpsBnbuHpwKmTx6WURnw==", + "license": "MIT", "dependencies": { - "pseudomap": "^1.0.1", - "yallist": "^2.0.0" + "pug-error": "^2.0.0", + "token-stream": "1.0.0" } }, - "node_modules/lru-memoizer": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/lru-memoizer/-/lru-memoizer-2.1.4.tgz", - "integrity": "sha512-IXAq50s4qwrOBrXJklY+KhgZF+5y98PDaNo0gi/v2KQBFLyWr+JyFvijZXkGKjQj/h9c0OwoE+JZbwUXce76hQ==", + "node_modules/pug-runtime": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/pug-runtime/-/pug-runtime-3.0.1.tgz", + "integrity": "sha512-L50zbvrQ35TkpHwv0G6aLSuueDRwc/97XdY8kL3tOT0FmhgG7UypU3VztfV/LATAvmUfYi4wNxSajhSAeNN+Kg==", + "license": "MIT" + }, + "node_modules/pug-strip-comments": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pug-strip-comments/-/pug-strip-comments-2.0.0.tgz", + "integrity": "sha512-zo8DsDpH7eTkPHCXFeAk1xZXJbyoTfdPlNR0bK7rpOMuhBYb0f5qUVCO1xlsitYd3w5FQTK7zpNVKb3rZoUrrQ==", + "license": "MIT", "dependencies": { - "lodash.clonedeep": "^4.5.0", - "lru-cache": "~4.0.0" + "pug-error": "^2.0.0" } }, - "node_modules/make-dir": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz", - "integrity": "sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ==", - "dev": true, + "node_modules/pug-walk": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pug-walk/-/pug-walk-2.0.0.tgz", + "integrity": "sha512-yYELe9Q5q9IQhuvqsZNwA5hfPkMJ8u92bQLIMcsMxf/VADjNtEYptU+inlufAFYcWdHlwNfZOEnOOQrZrcyJCQ==", + "license": "MIT" + }, + "node_modules/qs": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", "dependencies": { - "pify": "^3.0.0" + "side-channel": "^1.0.6" }, "engines": { - "node": ">=4" - } - }, - "node_modules/map-cache": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz", - "integrity": "sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8=", - "dev": true, - "engines": { - "node": ">=0.10.0" + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/map-visit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/map-visit/-/map-visit-1.0.0.tgz", - "integrity": "sha1-7Nyo8TFE5mDxtb1B8S80edmN+48=", - "dev": true, + "node_modules/query-string": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/query-string/-/query-string-5.1.1.tgz", + "integrity": "sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw==", "dependencies": { - "object-visit": "^1.0.0" + "decode-uri-component": "^0.2.0", + "object-assign": "^4.1.0", + "strict-uri-encode": "^1.0.0" }, "engines": { "node": ">=0.10.0" } }, - "node_modules/media-typer": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=", + "node_modules/querystring": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=", + "deprecated": "The querystring API is considered Legacy. new code should use the URLSearchParams API instead.", "engines": { - "node": ">= 0.6" + "node": ">=0.4.x" } }, - "node_modules/merge-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" + "node_modules/random-bytes": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz", + "integrity": "sha1-T2ih3Arli9P7lYSMMDJNt11kNgs=", + "engines": { + "node": ">= 0.8" + } }, - "node_modules/methods": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=", + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", "engines": { "node": ">= 0.6" } }, - "node_modules/micromatch": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", - "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", - "dev": true, + "node_modules/raw-body": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", "dependencies": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.8" } }, - "node_modules/mime": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", - "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", - "bin": { - "mime": "cli.js" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "engines": { - "node": ">= 0.6" - } + "node_modules/react-zlib-js": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/react-zlib-js/-/react-zlib-js-1.0.4.tgz", + "integrity": "sha512-ynXD9DFxpE7vtGoa3ZwBtPmZrkZYw2plzHGbanUjBOSN4RtuXdektSfABykHtTiWEHMh7WdYj45LHtp228ZF1A==" }, - "node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "node_modules/readable-stream": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.3.0.tgz", + "integrity": "sha512-EsI+s3k3XsW+fU8fQACLN59ky34AZ14LoeVZpYwmZvldCFo0r0gnelwF2TcMjLor/BTL5aDJVBMkss0dthToPw==", "dependencies": { - "mime-db": "1.52.0" + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" }, "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mimic-response": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", - "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==", - "engines": { - "node": ">=4" + "node": ">= 6" } }, - "node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", "dev": true, "dependencies": { - "brace-expansion": "^1.1.7" + "picomatch": "^2.2.1" }, "engines": { - "node": "*" + "node": ">=8.10.0" } }, - "node_modules/minimist": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", - "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", - "dev": true - }, - "node_modules/mixin-deep": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz", - "integrity": "sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==", - "dev": true, + "node_modules/resolve": { + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "license": "MIT", "dependencies": { - "for-in": "^1.0.2", - "is-extendable": "^1.0.1" + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/mixin-deep/node_modules/is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", - "dev": true, + "node_modules/responselike": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz", + "integrity": "sha1-kYcg7ztjHFZCvgaPFa3lpG9Loec=", "dependencies": { - "is-plain-object": "^2.0.4" - }, - "engines": { - "node": ">=0.10.0" + "lowercase-keys": "^1.0.0" } }, - "node_modules/morgan": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz", - "integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==", + "node_modules/rndm": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/rndm/-/rndm-1.2.0.tgz", + "integrity": "sha1-8z/pz7Urv9UgqhgyO8ZdsRCht2w=" + }, + "node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/send": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", "dependencies": { - "basic-auth": "~2.0.1", "debug": "2.6.9", - "depd": "~2.0.0", - "on-finished": "~2.3.0", - "on-headers": "~1.0.2" + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" }, "engines": { "node": ">= 0.8.0" } }, - "node_modules/morgan/node_modules/depd": { + "node_modules/send/node_modules/depd": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", @@ -3737,3268 +2875,1365 @@ "node": ">= 0.8" } }, - "node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + "node_modules/send/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "engines": { + "node": ">= 0.8" + } }, - "node_modules/nan": { - "version": "2.14.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", - "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==", - "dev": true, - "optional": true + "node_modules/send/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, - "node_modules/nanomatch": { - "version": "1.2.13", - "resolved": "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz", - "integrity": "sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==", - "dev": true, + "node_modules/send/node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", "dependencies": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "fragment-cache": "^0.2.1", - "is-windows": "^1.0.2", - "kind-of": "^6.0.2", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" + "ee-first": "1.1.1" }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.8" } }, - "node_modules/negotiator": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", - "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "node_modules/send/node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", "engines": { - "node": ">= 0.6" + "node": ">= 0.8" } }, - "node_modules/nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", - "dev": true - }, - "node_modules/node-fetch": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", - "integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==", + "node_modules/serve-favicon": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/serve-favicon/-/serve-favicon-2.5.0.tgz", + "integrity": "sha1-k10kDN/g9YBTB/3+ln2IlCosvPA=", + "dependencies": { + "etag": "~1.8.1", + "fresh": "0.5.2", + "ms": "2.1.1", + "parseurl": "~1.3.2", + "safe-buffer": "5.1.1" + }, "engines": { - "node": "4.x || >=6.0.0" + "node": ">= 0.8.0" } }, - "node_modules/node-forge": { - "version": "0.8.5", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.5.tgz", - "integrity": "sha512-vFMQIWt+J/7FLNyKouZ9TazT74PRV3wgv9UT4cRjC8BffxFbKXkgIWR42URCPSnHm/QDz6BOlb2Q0U4+VQT67Q==", - "engines": { - "node": ">= 4.5.0" - } + "node_modules/serve-favicon/node_modules/ms": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", + "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==" }, - "node_modules/node-jose": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/node-jose/-/node-jose-1.1.4.tgz", - "integrity": "sha512-L31IFwL3pWWcMHxxidCY51ezqrDXMkvlT/5pLTfNw5sXmmOLJuN6ug7txzF/iuZN55cRpyOmoJrotwBQIoo5Lw==", + "node_modules/serve-favicon/node_modules/safe-buffer": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", + "integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==" + }, + "node_modules/serve-static": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", "dependencies": { - "base64url": "^3.0.1", - "browserify-zlib": "^0.2.0", - "buffer": "^5.5.0", - "es6-promise": "^4.2.8", - "lodash": "^4.17.15", - "long": "^4.0.0", - "node-forge": "^0.8.5", - "process": "^0.11.10", - "react-zlib-js": "^1.0.4", - "uuid": "^3.3.3" + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.19.0" + }, + "engines": { + "node": ">= 0.8.0" } }, - "node_modules/node-jose/node_modules/lodash": { - "version": "4.17.15", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", - "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==" - }, - "node_modules/node-jose/node_modules/uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", - "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", - "bin": { - "uuid": "bin/uuid" + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" } }, - "node_modules/node-uuid": { - "version": "1.4.8", - "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.8.tgz", - "integrity": "sha1-sEDrCSOWivq/jTL7HxfxFn/auQc=", - "deprecated": "Use uuid module instead", - "bin": { - "uuid": "bin/uuid" - } + "node_modules/setprototypeof": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", + "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==" }, - "node_modules/nodemon": { - "version": "1.19.4", - "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-1.19.4.tgz", - "integrity": "sha512-VGPaqQBNk193lrJFotBU8nvWZPqEZY2eIzymy2jjY0fJ9qIsxA0sxQ8ATPl0gZC645gijYEc1jtZvpS8QWzJGQ==", + "node_modules/shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", "dev": true, - "hasInstallScript": true, "dependencies": { - "chokidar": "^2.1.8", - "debug": "^3.2.6", - "ignore-by-default": "^1.0.1", - "minimatch": "^3.0.4", - "pstree.remy": "^1.1.7", - "semver": "^5.7.1", - "supports-color": "^5.5.0", - "touch": "^3.1.0", - "undefsafe": "^2.0.2", - "update-notifier": "^2.5.0" - }, - "bin": { - "nodemon": "bin/nodemon.js" + "shebang-regex": "^1.0.0" }, "engines": { - "node": ">=4" + "node": ">=0.10.0" } }, - "node_modules/nodemon/node_modules/debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", - "deprecated": "Debug versions >=3.2.0 <3.2.7 || >=4 <4.3.1 have a low-severity ReDos regression when used in a Node.js environment. It is recommended you upgrade to 3.2.7 or 4.3.1. (https://github.com/visionmedia/debug/issues/797)", + "node_modules/shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", "dev": true, - "dependencies": { - "ms": "^2.1.1" + "engines": { + "node": ">=0.10.0" } }, - "node_modules/nodemon/node_modules/ms": { + "node_modules/side-channel": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/simple-oauth2": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/simple-oauth2/-/simple-oauth2-2.5.2.tgz", + "integrity": "sha512-8qjf+nHRdSUllFjjfpnonrU1oF/HNVbDle5HIbvXRYiy38C7KUvYe6w0ZZ//g4AFB6VNWuiZ80HmnycR8ZFDyQ==", + "deprecated": "simple-oauth2 v2 is no longer supported. Please upgrade to v3 for further support", + "dependencies": { + "@hapi/joi": "^15.1.1", + "date-fns": "^2.2.1", + "debug": "^4.1.1", + "wreck": "^14.0.2" + } + }, + "node_modules/simple-oauth2/node_modules/debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "deprecated": "Debug versions >=3.2.0 <3.2.7 || >=4 <4.3.1 have a low-severity ReDos regression when used in a Node.js environment. It is recommended you upgrade to 3.2.7 or 4.3.1. (https://github.com/visionmedia/debug/issues/797)", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/simple-oauth2/node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, - "node_modules/nodemon/node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true, - "bin": { - "semver": "bin/semver" + "node_modules/simple-swizzle": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", + "integrity": "sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo=", + "dependencies": { + "is-arrayish": "^0.3.1" } }, - "node_modules/nopt": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz", - "integrity": "sha1-bd0hvSoxQXuScn3Vhfim83YI6+4=", + "node_modules/simple-update-notifier": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-1.1.0.tgz", + "integrity": "sha512-VpsrsJSUcJEseSbMHkrsrAVSdvVS5I96Qo1QAQ4FxQ9wXFcB+pjj7FB7/us9+GcgfW4ziHtYMc1J0PLczb55mg==", "dev": true, "dependencies": { - "abbrev": "1" - }, - "bin": { - "nopt": "bin/nopt.js" + "semver": "~7.0.0" }, "engines": { - "node": "*" + "node": ">=8.10.0" } }, - "node_modules/normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "node_modules/simple-update-notifier/node_modules/semver": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", + "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==", "dev": true, - "engines": { - "node": ">=0.10.0" + "bin": { + "semver": "bin/semver.js" } }, - "node_modules/normalize-url": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-2.0.1.tgz", - "integrity": "sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw==", + "node_modules/sort-keys": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-2.0.0.tgz", + "integrity": "sha1-ZYU1WEhh7JfXMNbPQYIuH1ZoQSg=", "dependencies": { - "prepend-http": "^2.0.0", - "query-string": "^5.0.1", - "sort-keys": "^2.0.0" + "is-plain-obj": "^1.0.0" }, "engines": { "node": ">=4" } }, - "node_modules/normalize-url/node_modules/prepend-http": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz", - "integrity": "sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc=", + "node_modules/stack-trace": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", + "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=", "engines": { - "node": ">=4" + "node": "*" } }, - "node_modules/npm-run-path": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", - "integrity": "sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=", - "dev": true, - "dependencies": { - "path-key": "^2.0.0" - }, + "node_modules/statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=", "engines": { - "node": ">=4" + "node": ">= 0.6" } }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", + "node_modules/strict-uri-encode": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz", + "integrity": "sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM=", "engines": { "node": ">=0.10.0" } }, - "node_modules/object-copy": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/object-copy/-/object-copy-0.1.0.tgz", - "integrity": "sha1-fn2Fi3gb18mRpBupde04EnVOmYw=", - "dev": true, + "node_modules/string_decoder": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.2.0.tgz", + "integrity": "sha512-6YqyX6ZWEYguAxgZzHGL7SsCeGx3V2TtOTqZz1xSTSWnqsbWwbptafNyvf/ACquZUXV3DANr5BDIwNYe1mN42w==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", "dependencies": { - "copy-descriptor": "^0.1.0", - "define-property": "^0.2.5", - "kind-of": "^3.0.3" + "has-flag": "^3.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=4" } }, - "node_modules/object-copy/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dev": true, - "dependencies": { - "is-descriptor": "^0.1.0" + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "license": "MIT", + "engines": { + "node": ">= 0.4" }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/text-hex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz", + "integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==" + }, + "node_modules/timed-out": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz", + "integrity": "sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8=", "engines": { "node": ">=0.10.0" } }, - "node_modules/object-copy/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", "dev": true, "dependencies": { - "is-buffer": "^1.1.5" + "is-number": "^7.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=8.0" } }, - "node_modules/object-hash": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-1.3.1.tgz", - "integrity": "sha512-OSuu/pU4ENM9kmREg0BdNrUDIl1heYa4mBZacJc+vVWz4GtAwu7jO8s4AIt2aGRUTqxykpWzI3Oqnsm13tTMDA==", + "node_modules/toidentifier": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", + "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==", "engines": { - "node": ">= 0.10.0" + "node": ">=0.6" } }, - "node_modules/object-inspect": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", - "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } + "node_modules/token-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/token-stream/-/token-stream-1.0.0.tgz", + "integrity": "sha512-VSsyNPPW74RpHwR8Fc21uubwHY7wMDeJLys2IX5zJNih+OnAnaifKHo+1LHT7DAdloQ7apeaaWg8l7qnf/TnEg==", + "license": "MIT" }, - "node_modules/object-visit": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/object-visit/-/object-visit-1.0.1.tgz", - "integrity": "sha1-95xEk68MU3e1n+OdOV5BBC3QRbs=", + "node_modules/touch": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/touch/-/touch-3.1.0.tgz", + "integrity": "sha512-WBx8Uy5TLtOSRtIq+M03/sKDrXCLHxwDcquSP2c43Le03/9serjQBIztjRz6FkJez9D/hleyAXTBGLwwZUw9lA==", "dev": true, "dependencies": { - "isobject": "^3.0.0" + "nopt": "~1.0.10" }, - "engines": { - "node": ">=0.10.0" + "bin": { + "nodetouch": "bin/nodetouch.js" } }, - "node_modules/object.pick": { + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, + "node_modules/triple-beam": { "version": "1.3.0", - "resolved": "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz", - "integrity": "sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c=", - "dev": true, + "resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.3.0.tgz", + "integrity": "sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw==" + }, + "node_modules/tsscmp": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/tsscmp/-/tsscmp-1.0.6.tgz", + "integrity": "sha512-LxhtAkPDTkVCMQjt2h6eBVY28KCjikZqZfMcC15YBeNjkgUpdCfBu5HoiOTDu86v6smE8yOjyEktJ8hlbANHQA==", + "engines": { + "node": ">=0.6.x" + } + }, + "node_modules/type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", "dependencies": { - "isobject": "^3.0.1" + "media-typer": "0.3.0", + "mime-types": "~2.1.24" }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.6" } }, - "node_modules/oidc-token-hash": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/oidc-token-hash/-/oidc-token-hash-3.0.2.tgz", - "integrity": "sha512-dTzp80/y/da+um+i+sOucNqiPpwRL7M/xPwj7pH1TFA2/bqQ+OK2sJahSXbemEoLtPkHcFLyhLhLWZa9yW5+RA==", + "node_modules/typescript": { + "version": "5.7.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.3.tgz", + "integrity": "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==", + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, "engines": { - "node": ">=6.9.0" + "node": ">=14.17" } }, - "node_modules/on-finished": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", - "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", + "node_modules/uid-safe": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-2.1.5.tgz", + "integrity": "sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA==", "dependencies": { - "ee-first": "1.1.1" + "random-bytes": "~1.0.0" }, "engines": { "node": ">= 0.8" } }, - "node_modules/on-headers": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", - "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", + "node_modules/undefsafe": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.5.tgz", + "integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==", + "dev": true + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=", "engines": { "node": ">= 0.8" } }, - "node_modules/one-time": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/one-time/-/one-time-0.0.4.tgz", - "integrity": "sha1-+M33eISCb+Tf+T46nMN7HkSAdC4=" + "node_modules/url-join": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/url-join/-/url-join-4.0.1.tgz", + "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==" }, - "node_modules/openid-client": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/openid-client/-/openid-client-2.5.0.tgz", - "integrity": "sha512-t3hFD7xEoW1U25RyBcRFaL19fGGs6hNVTysq9pgmiltH0IVUPzH/bQV9w24pM5Q7MunnGv2/5XjIru6BQcWdxg==", - "dependencies": { - "base64url": "^3.0.0", - "got": "^8.3.2", - "lodash": "^4.17.11", - "lru-cache": "^5.1.1", - "node-jose": "^1.1.0", - "object-hash": "^1.3.1", - "oidc-token-hash": "^3.0.1", - "p-any": "^1.1.0" - }, + "node_modules/url-to-options": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/url-to-options/-/url-to-options-1.0.1.tgz", + "integrity": "sha1-FQWgOiiaSMvXpDTvuu7FBV9WM6k=", "engines": { - "node": ">=6.9.0" + "node": ">= 4" } }, - "node_modules/openid-client/node_modules/got": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/got/-/got-8.3.2.tgz", - "integrity": "sha512-qjUJ5U/hawxosMryILofZCkm3C84PLJS/0grRIpjAwu+Lkxxj5cxeCU25BG0/3mDSpXKTyZr8oh8wIgLaH0QCw==", - "dependencies": { - "@sindresorhus/is": "^0.7.0", - "cacheable-request": "^2.1.1", - "decompress-response": "^3.3.0", - "duplexer3": "^0.1.4", - "get-stream": "^3.0.0", - "into-stream": "^3.1.0", - "is-retry-allowed": "^1.1.0", - "isurl": "^1.0.0-alpha5", - "lowercase-keys": "^1.0.0", - "mimic-response": "^1.0.0", - "p-cancelable": "^0.4.0", - "p-timeout": "^2.0.1", - "pify": "^3.0.0", - "safe-buffer": "^5.1.1", - "timed-out": "^4.0.1", - "url-parse-lax": "^3.0.0", - "url-to-options": "^1.0.1" - }, - "engines": { - "node": ">=4" - } + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" }, - "node_modules/openid-client/node_modules/lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", - "dependencies": { - "yallist": "^3.0.2" + "node_modules/utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=", + "engines": { + "node": ">= 0.4.0" } }, - "node_modules/openid-client/node_modules/prepend-http": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz", - "integrity": "sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc=", + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=", "engines": { - "node": ">=4" + "node": ">= 0.8" } }, - "node_modules/openid-client/node_modules/url-parse-lax": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz", - "integrity": "sha1-FrXK/Afb42dsGxmZF3gj1lA6yww=", - "dependencies": { - "prepend-http": "^2.0.0" - }, + "node_modules/void-elements": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/void-elements/-/void-elements-3.1.0.tgz", + "integrity": "sha512-Dhxzh5HZuiHQhbvTW9AMetFfBHDMYpo23Uo9btPXgdYP+3T5S+p+jgNy7spra+veYhBP2dCSgxR/i2Y02h5/6w==", + "license": "MIT", "engines": { - "node": ">=4" + "node": ">=0.10.0" } }, - "node_modules/openid-client/node_modules/yallist": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", - "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" }, - "node_modules/p-any": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/p-any/-/p-any-1.1.0.tgz", - "integrity": "sha512-Ef0tVa4CZ5pTAmKn+Cg3w8ABBXh+hHO1aV8281dKOoUHfX+3tjG2EaFcC+aZyagg9b4EYGsHEjz21DnEE8Og2g==", + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", "dependencies": { - "p-some": "^2.0.0" - }, - "engines": { - "node": ">=4" + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" } }, - "node_modules/p-cancelable": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-0.4.1.tgz", - "integrity": "sha512-HNa1A8LvB1kie7cERyy21VNeHb2CWJJYqyyC2o3klWFfMGlFmWv2Z7sFgZH8ZiaYL95ydToKTFVXgMV/Os0bBQ==", - "engines": { - "node": ">=4" + "node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" } }, - "node_modules/p-finally": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", - "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=", + "node_modules/winston": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/winston/-/winston-3.2.1.tgz", + "integrity": "sha512-zU6vgnS9dAWCEKg/QYigd6cgMVVNwyTzKs81XZtTFuRwJOcDdBg7AU0mXVyNbs7O5RH2zdv+BdNZUlx7mXPuOw==", + "peer": true, + "dependencies": { + "async": "^2.6.1", + "diagnostics": "^1.1.1", + "is-stream": "^1.1.0", + "logform": "^2.1.1", + "one-time": "0.0.4", + "readable-stream": "^3.1.1", + "stack-trace": "0.0.x", + "triple-beam": "^1.3.0", + "winston-transport": "^4.3.0" + }, "engines": { - "node": ">=4" + "node": ">= 6.4.0" } }, - "node_modules/p-is-promise": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/p-is-promise/-/p-is-promise-1.1.0.tgz", - "integrity": "sha1-nJRWmJ6fZYgBewQ01WCXZ1w9oF4=", + "node_modules/winston-transport": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/winston-transport/-/winston-transport-4.3.0.tgz", + "integrity": "sha512-B2wPuwUi3vhzn/51Uukcao4dIduEiPOcOt9HJ3QeaXgkJ5Z7UwpBzxS4ZGNHtrxrUvTwemsQiSys0ihOf8Mp1A==", + "dependencies": { + "readable-stream": "^2.3.6", + "triple-beam": "^1.2.0" + }, "engines": { - "node": ">=4" + "node": ">= 6.4.0" } }, - "node_modules/p-some": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/p-some/-/p-some-2.0.1.tgz", - "integrity": "sha1-Zdh8ixVO289SIdFnd4ttLhUPbwY=", + "node_modules/winston-transport/node_modules/readable-stream": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", "dependencies": { - "aggregate-error": "^1.0.0" - }, - "engines": { - "node": ">=4" + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" } }, - "node_modules/p-timeout": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-2.0.1.tgz", - "integrity": "sha512-88em58dDVB/KzPEx1X0N3LwFfYZPyDc4B6eF38M1rk9VTZMbxXXgjugz8mmwpS9Ox4BDZ+t6t3QP5+/gazweIA==", + "node_modules/winston-transport/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "dependencies": { - "p-finally": "^1.0.0" - }, - "engines": { - "node": ">=4" + "safe-buffer": "~5.1.0" } }, - "node_modules/package-json": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/package-json/-/package-json-4.0.1.tgz", - "integrity": "sha1-iGmgQBJTZhxMTKPabCEh7VVfXu0=", - "dev": true, + "node_modules/with": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/with/-/with-7.0.2.tgz", + "integrity": "sha512-RNGKj82nUPg3g5ygxkQl0R937xLyho1J24ItRCBTr/m1YnZkzJy1hUiHUJrc/VlsDQzsCnInEGSg3bci0Lmd4w==", + "license": "MIT", "dependencies": { - "got": "^6.7.1", - "registry-auth-token": "^3.0.1", - "registry-url": "^3.0.3", - "semver": "^5.1.0" + "@babel/parser": "^7.9.6", + "@babel/types": "^7.9.6", + "assert-never": "^1.2.1", + "babel-walk": "3.0.0-canary-5" }, "engines": { - "node": ">=4" + "node": ">= 10.0.0" } }, - "node_modules/pako": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", - "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==" - }, - "node_modules/parseurl": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", - "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", - "engines": { - "node": ">= 0.8" + "node_modules/wreck": { + "version": "14.2.0", + "resolved": "https://registry.npmjs.org/wreck/-/wreck-14.2.0.tgz", + "integrity": "sha512-NFFft3SMgqrJbXEVfYifh+QDWFxni+98/I7ut7rLbz3F0XOypluHsdo3mdEYssGSirMobM3fGlqhyikbWKDn2Q==", + "deprecated": "This module has moved and is now available at @hapi/wreck. Please update your dependencies as this version is no longer maintained an may contain bugs and security issues.", + "dependencies": { + "boom": "7.x.x", + "bourne": "1.x.x", + "hoek": "6.x.x" } }, - "node_modules/pascalcase": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/pascalcase/-/pascalcase-0.1.1.tgz", - "integrity": "sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } + "node_modules/yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=" + } + }, + "dependencies": { + "@babel/helper-string-parser": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", + "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==" }, - "node_modules/path-dirname": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/path-dirname/-/path-dirname-1.0.2.tgz", - "integrity": "sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA=", - "dev": true + "@babel/helper-validator-identifier": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", + "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==" }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", - "dev": true, - "engines": { - "node": ">=0.10.0" + "@babel/parser": { + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.9.tgz", + "integrity": "sha512-81NWa1njQblgZbQHxWHpxxCzNsa3ZwvFqpUg7P+NNUU6f3UU2jBEg4OlF/J6rl8+PQGh1q6/zWScd001YwcA5A==", + "requires": { + "@babel/types": "^7.26.9" } }, - "node_modules/path-is-inside": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", - "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=", - "dev": true - }, - "node_modules/path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", - "dev": true, - "engines": { - "node": ">=4" + "@babel/types": { + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.9.tgz", + "integrity": "sha512-Y3IR1cRnOxOCDvMmNiym7XpXQ93iGDDPHx+Zj+NM+rg0fBaShfQLkg+hKPaZCEvg5N/LeCo4+Rj/i3FuJsIQaw==", + "requires": { + "@babel/helper-string-parser": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9" } }, - "node_modules/path-parse": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", - "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" + "@hapi/address": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@hapi/address/-/address-2.1.4.tgz", + "integrity": "sha512-QD1PhQk+s31P1ixsX0H0Suoupp3VMXzIVMSwobR3F3MSUO2YCV0B7xqLcUw/Bh8yuvd3LhpyqLQWTNcRmp6IdQ==" }, - "node_modules/path-to-regexp": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", - "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" + "@hapi/bourne": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@hapi/bourne/-/bourne-1.3.2.tgz", + "integrity": "sha512-1dVNHT76Uu5N3eJNTYcvxee+jzX4Z9lfciqRRHCU27ihbUcYi+iSc2iml5Ke1LXe1SyJCLA0+14Jh4tXJgOppA==" }, - "node_modules/pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", - "engines": { - "node": ">=4" - } + "@hapi/hoek": { + "version": "8.5.1", + "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-8.5.1.tgz", + "integrity": "sha512-yN7kbciD87WzLGc5539Tn0sApjyiGHAJgKvG9W8C7O+6c7qmoQMfVs0W4bX17eqz6C78QJqqFrtgdK5EWf6Qow==" }, - "node_modules/posix-character-classes": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz", - "integrity": "sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=", - "dev": true, - "engines": { - "node": ">=0.10.0" + "@hapi/joi": { + "version": "15.1.1", + "resolved": "https://registry.npmjs.org/@hapi/joi/-/joi-15.1.1.tgz", + "integrity": "sha512-entf8ZMOK8sc+8YfeOlM8pCfg3b5+WZIKBfUaaJT8UsjAAPjartzxIYm3TIbjvA4u+u++KbcXD38k682nVHDAQ==", + "requires": { + "@hapi/address": "2.x.x", + "@hapi/bourne": "1.x.x", + "@hapi/hoek": "8.x.x", + "@hapi/topo": "3.x.x" } }, - "node_modules/prepend-http": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-1.0.4.tgz", - "integrity": "sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw=", - "dev": true, - "engines": { - "node": ">=0.10.0" + "@hapi/topo": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/@hapi/topo/-/topo-3.1.6.tgz", + "integrity": "sha512-tAag0jEcjwH+P2quUfipd7liWCNX2F8NvYjQp2wtInsZxnMlypdw0FtAOLxtvvkO+GSRRbmNi8m/5y42PQJYCQ==", + "requires": { + "@hapi/hoek": "^8.3.0" } }, - "node_modules/process": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=", - "engines": { - "node": ">= 0.6.0" - } + "@ory/hydra-client-fetch": { + "version": "2.4.0-alpha.1", + "resolved": "https://registry.npmjs.org/@ory/hydra-client-fetch/-/hydra-client-fetch-2.4.0-alpha.1.tgz", + "integrity": "sha512-TTuw+1DdIFskz4JU7yP2OSOHP3pVr7HjLnr8YI4S1pQJ91JaGwBQxCPQLrqdNuffs7d8JU1VRkcgdSWOo+89eA==" }, - "node_modules/process-nextick-args": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz", - "integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==" + "@panva/asn1.js": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@panva/asn1.js/-/asn1.js-1.0.0.tgz", + "integrity": "sha512-UdkG3mLEqXgnlKsWanWcgb6dOjUzJ+XC5f+aWw30qrtjxeNUSfKX1cd5FBzOaXQumoe9nIqeZUvrRJS03HCCtw==" }, - "node_modules/promise": { - "version": "7.3.1", - "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz", - "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==", - "dependencies": { - "asap": "~2.0.3" - } + "@sindresorhus/is": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.7.0.tgz", + "integrity": "sha512-ONhaKPIufzzrlNbqtWFFd+jlnemX6lJAgq9ZeiZtS7I1PIf/la7CW4m83rTXRnVnsMbW2k56pGYu7AUFJD9Pow==" }, - "node_modules/proxy-addr": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", - "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", - "dependencies": { - "forwarded": "0.2.0", - "ipaddr.js": "1.9.1" - }, - "engines": { - "node": ">= 0.10" + "@types/body-parser": { + "version": "1.19.2", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", + "integrity": "sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==", + "requires": { + "@types/connect": "*", + "@types/node": "*" } }, - "node_modules/pseudomap": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", - "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=" - }, - "node_modules/pstree.remy": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.7.tgz", - "integrity": "sha512-xsMgrUwRpuGskEzBFkH8NmTimbZ5PcPup0LA8JJkHIm2IMUbQcpo3yeLNWVrufEYjh8YwtSVh0xz6UeWc5Oh5A==", - "dev": true - }, - "node_modules/pug": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/pug/-/pug-2.0.4.tgz", - "integrity": "sha512-XhoaDlvi6NIzL49nu094R2NA6P37ijtgMDuWE+ofekDChvfKnzFal60bhSdiy8y2PBO6fmz3oMEIcfpBVRUdvw==", - "dependencies": { - "pug-code-gen": "^2.0.2", - "pug-filters": "^3.1.1", - "pug-lexer": "^4.1.0", - "pug-linker": "^3.0.6", - "pug-load": "^2.0.12", - "pug-parser": "^5.0.1", - "pug-runtime": "^2.0.5", - "pug-strip-comments": "^1.0.4" + "@types/connect": { + "version": "3.4.35", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz", + "integrity": "sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==", + "requires": { + "@types/node": "*" } }, - "node_modules/pug-attrs": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/pug-attrs/-/pug-attrs-2.0.4.tgz", - "integrity": "sha512-TaZ4Z2TWUPDJcV3wjU3RtUXMrd3kM4Wzjbe3EWnSsZPsJ3LDI0F3yCnf2/W7PPFF+edUFQ0HgDL1IoxSz5K8EQ==", - "dependencies": { - "constantinople": "^3.0.1", - "js-stringify": "^1.0.1", - "pug-runtime": "^2.0.5" + "@types/cookie-parser": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/@types/cookie-parser/-/cookie-parser-1.4.3.tgz", + "integrity": "sha512-CqSKwFwefj4PzZ5n/iwad/bow2hTCh0FlNAeWLtQM3JA/NX/iYagIpWG2cf1bQKQ2c9gU2log5VUCrn7LDOs0w==", + "requires": { + "@types/express": "*" } }, - "node_modules/pug-code-gen": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/pug-code-gen/-/pug-code-gen-2.0.3.tgz", - "integrity": "sha512-r9sezXdDuZJfW9J91TN/2LFbiqDhmltTFmGpHTsGdrNGp3p4SxAjjXEfnuK2e4ywYsRIVP0NeLbSAMHUcaX1EA==", - "dependencies": { - "constantinople": "^3.1.2", - "doctypes": "^1.1.0", - "js-stringify": "^1.0.1", - "pug-attrs": "^2.0.4", - "pug-error": "^1.3.3", - "pug-runtime": "^2.0.5", - "void-elements": "^2.0.1", - "with": "^5.0.0" + "@types/csurf": { + "version": "1.11.2", + "resolved": "https://registry.npmjs.org/@types/csurf/-/csurf-1.11.2.tgz", + "integrity": "sha512-9bc98EnwmC1S0aSJiA8rWwXtgXtXHHOQOsGHptImxFgqm6CeH+mIOunHRg6+/eg2tlmDMX3tY7XrWxo2M/nUNQ==", + "requires": { + "@types/express-serve-static-core": "*" } }, - "node_modules/pug-error": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/pug-error/-/pug-error-1.3.3.tgz", - "integrity": "sha512-qE3YhESP2mRAWMFJgKdtT5D7ckThRScXRwkfo+Erqga7dyJdY3ZquspprMCj/9sJ2ijm5hXFWQE/A3l4poMWiQ==" - }, - "node_modules/pug-filters": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/pug-filters/-/pug-filters-3.1.1.tgz", - "integrity": "sha512-lFfjNyGEyVWC4BwX0WyvkoWLapI5xHSM3xZJFUhx4JM4XyyRdO8Aucc6pCygnqV2uSgJFaJWW3Ft1wCWSoQkQg==", - "dependencies": { - "clean-css": "^4.1.11", - "constantinople": "^3.0.1", - "jstransformer": "1.0.0", - "pug-error": "^1.3.3", - "pug-walk": "^1.1.8", - "resolve": "^1.1.6", - "uglify-js": "^2.6.1" + "@types/express": { + "version": "4.17.13", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.13.tgz", + "integrity": "sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA==", + "requires": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.18", + "@types/qs": "*", + "@types/serve-static": "*" } }, - "node_modules/pug-lexer": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/pug-lexer/-/pug-lexer-4.1.0.tgz", - "integrity": "sha512-i55yzEBtjm0mlplW4LoANq7k3S8gDdfC6+LThGEvsK4FuobcKfDAwt6V4jKPH9RtiE3a2Akfg5UpafZ1OksaPA==", - "dependencies": { - "character-parser": "^2.1.1", - "is-expression": "^3.0.0", - "pug-error": "^1.3.3" + "@types/express-serve-static-core": { + "version": "4.17.28", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.28.tgz", + "integrity": "sha512-P1BJAEAW3E2DJUlkgq4tOL3RyMunoWXqbSCygWo5ZIWTjUgN1YnaXWW4VWl/oc8vs/XoYibEGBKP0uZyF4AHig==", + "requires": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*" } }, - "node_modules/pug-linker": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/pug-linker/-/pug-linker-3.0.6.tgz", - "integrity": "sha512-bagfuHttfQOpANGy1Y6NJ+0mNb7dD2MswFG2ZKj22s8g0wVsojpRlqveEQHmgXXcfROB2RT6oqbPYr9EN2ZWzg==", - "dependencies": { - "pug-error": "^1.3.3", - "pug-walk": "^1.1.8" + "@types/jsonwebtoken": { + "version": "8.5.8", + "resolved": "https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-8.5.8.tgz", + "integrity": "sha512-zm6xBQpFDIDM6o9r6HSgDeIcLy82TKWctCXEPbJJcXb5AKmi5BNNdLXneixK4lplX3PqIVcwLBCGE/kAGnlD4A==", + "requires": { + "@types/node": "*" } }, - "node_modules/pug-load": { - "version": "2.0.12", - "resolved": "https://registry.npmjs.org/pug-load/-/pug-load-2.0.12.tgz", - "integrity": "sha512-UqpgGpyyXRYgJs/X60sE6SIf8UBsmcHYKNaOccyVLEuT6OPBIMo6xMPhoJnqtB3Q3BbO4Z3Bjz5qDsUWh4rXsg==", - "dependencies": { - "object-assign": "^4.1.0", - "pug-walk": "^1.1.8" - } + "@types/mime": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz", + "integrity": "sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==" }, - "node_modules/pug-parser": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/pug-parser/-/pug-parser-5.0.1.tgz", - "integrity": "sha512-nGHqK+w07p5/PsPIyzkTQfzlYfuqoiGjaoqHv1LjOv2ZLXmGX1O+4Vcvps+P4LhxZ3drYSljjq4b+Naid126wA==", - "dependencies": { - "pug-error": "^1.3.3", - "token-stream": "0.0.1" + "@types/morgan": { + "version": "1.9.4", + "resolved": "https://registry.npmjs.org/@types/morgan/-/morgan-1.9.4.tgz", + "integrity": "sha512-cXoc4k+6+YAllH3ZHmx4hf7La1dzUk6keTR4bF4b4Sc0mZxU/zK4wO7l+ZzezXm/jkYj/qC+uYGZrarZdIVvyQ==", + "requires": { + "@types/node": "*" } }, - "node_modules/pug-runtime": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/pug-runtime/-/pug-runtime-2.0.5.tgz", - "integrity": "sha512-P+rXKn9un4fQY77wtpcuFyvFaBww7/91f3jHa154qU26qFAnOe6SW1CbIDcxiG5lLK9HazYrMCCuDvNgDQNptw==" + "@types/node": { + "version": "17.0.42", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.42.tgz", + "integrity": "sha512-Q5BPGyGKcvQgAMbsr7qEGN/kIPN6zZecYYABeTDBizOsau+2NMdSVTar9UQw21A2+JyA2KRNDYaYrPB0Rpk2oQ==" }, - "node_modules/pug-strip-comments": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/pug-strip-comments/-/pug-strip-comments-1.0.4.tgz", - "integrity": "sha512-i5j/9CS4yFhSxHp5iKPHwigaig/VV9g+FgReLJWWHEHbvKsbqL0oP/K5ubuLco6Wu3Kan5p7u7qk8A4oLLh6vw==", - "dependencies": { - "pug-error": "^1.3.3" - } + "@types/qs": { + "version": "6.9.7", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", + "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==" }, - "node_modules/pug-walk": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/pug-walk/-/pug-walk-1.1.8.tgz", - "integrity": "sha512-GMu3M5nUL3fju4/egXwZO0XLi6fW/K3T3VTgFQ14GxNi8btlxgT5qZL//JwZFm/2Fa64J/PNS8AZeys3wiMkVA==" + "@types/range-parser": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz", + "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==" }, - "node_modules/qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", - "dependencies": { - "side-channel": "^1.0.4" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "@types/serve-static": { + "version": "1.13.10", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.10.tgz", + "integrity": "sha512-nCkHGI4w7ZgAdNkrEu0bv+4xNV/XDqW+DydknebMOQwkpDGx8G+HTlj7R7ABI8i8nKxVw0wtKPi1D+lPOkh4YQ==", + "requires": { + "@types/mime": "^1", + "@types/node": "*" } }, - "node_modules/query-string": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/query-string/-/query-string-5.1.1.tgz", - "integrity": "sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw==", - "dependencies": { - "decode-uri-component": "^0.2.0", - "object-assign": "^4.1.0", - "strict-uri-encode": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } + "@types/url-join": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@types/url-join/-/url-join-4.0.1.tgz", + "integrity": "sha512-wDXw9LEEUHyV+7UWy7U315nrJGJ7p1BzaCxDpEoLr789Dk1WDVMMlf3iBfbG2F8NdWnYyFbtTxUn2ZNbm1Q4LQ==" }, - "node_modules/querystring": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", - "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=", - "deprecated": "The querystring API is considered Legacy. new code should use the URLSearchParams API instead.", - "engines": { - "node": ">=0.4.x" - } + "abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", + "dev": true }, - "node_modules/random-bytes": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz", - "integrity": "sha1-T2ih3Arli9P7lYSMMDJNt11kNgs=", - "engines": { - "node": ">= 0.8" + "accepts": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "requires": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" } }, - "node_modules/range-parser": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", - "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", - "engines": { - "node": ">= 0.6" + "acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==" + }, + "aggregate-error": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-1.0.0.tgz", + "integrity": "sha1-iINE2tAiCnLjr1CQYRf0h3GSX6w=", + "requires": { + "clean-stack": "^1.0.0", + "indent-string": "^3.0.0" } }, - "node_modules/raw-body": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", - "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", - "dependencies": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8" + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "requires": { + "color-convert": "^1.9.0" } }, - "node_modules/rc": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", - "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", "dev": true, - "dependencies": { - "deep-extend": "^0.6.0", - "ini": "~1.3.0", - "minimist": "^1.2.0", - "strip-json-comments": "~2.0.1" - }, - "bin": { - "rc": "cli.js" + "requires": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" } }, - "node_modules/react-zlib-js": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/react-zlib-js/-/react-zlib-js-1.0.4.tgz", - "integrity": "sha512-ynXD9DFxpE7vtGoa3ZwBtPmZrkZYw2plzHGbanUjBOSN4RtuXdektSfABykHtTiWEHMh7WdYj45LHtp228ZF1A==" + "array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" }, - "node_modules/readable-stream": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.3.0.tgz", - "integrity": "sha512-EsI+s3k3XsW+fU8fQACLN59ky34AZ14LoeVZpYwmZvldCFo0r0gnelwF2TcMjLor/BTL5aDJVBMkss0dthToPw==", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } + "asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" }, - "node_modules/readdirp": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz", - "integrity": "sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==", - "dev": true, - "dependencies": { - "graceful-fs": "^4.1.11", - "micromatch": "^3.1.10", - "readable-stream": "^2.0.2" - }, - "engines": { - "node": ">=0.10" + "assert-never": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/assert-never/-/assert-never-1.4.0.tgz", + "integrity": "sha512-5oJg84os6NMQNl27T9LnZkvvqzvAnHu03ShCnoj6bsJwS7L8AO4lf+C/XjK/nvzEqQB744moC6V128RucQd1jA==" + }, + "async": { + "version": "2.6.4", + "resolved": "https://registry.npmjs.org/async/-/async-2.6.4.tgz", + "integrity": "sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==", + "requires": { + "lodash": "^4.17.14" } }, - "node_modules/readdirp/node_modules/readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "dev": true, - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" + "babel-walk": { + "version": "3.0.0-canary-5", + "resolved": "https://registry.npmjs.org/babel-walk/-/babel-walk-3.0.0-canary-5.tgz", + "integrity": "sha512-GAwkz0AihzY5bkwIY5QDR+LvsRQgB/B+1foMPvi0FZPMl5fjD7ICiznUiBdLYMH1QYe6vqu4gWYytZOccLouFw==", + "requires": { + "@babel/types": "^7.9.6" } }, - "node_modules/readdirp/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "dependencies": { - "safe-buffer": "~5.1.0" + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "base64-js": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.1.tgz", + "integrity": "sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g==" + }, + "base64url": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/base64url/-/base64url-3.0.1.tgz", + "integrity": "sha512-ir1UPr3dkwexU7FdV8qBBbNDRUhMmIekYMFZfi+C/sLNnRESKPl23nB9b2pltqfOQNnGzsDdId90AEtG5tCx4A==" + }, + "basic-auth": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", + "integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==", + "requires": { + "safe-buffer": "5.1.2" } }, - "node_modules/regenerator-runtime": { - "version": "0.11.1", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz", - "integrity": "sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg==" + "binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true }, - "node_modules/regex-not": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/regex-not/-/regex-not-1.0.2.tgz", - "integrity": "sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==", - "dev": true, - "dependencies": { - "extend-shallow": "^3.0.2", - "safe-regex": "^1.1.0" + "body-parser": { + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "requires": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.13.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" }, - "engines": { - "node": ">=0.10.0" + "dependencies": { + "depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" + }, + "on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "requires": { + "ee-first": "1.1.1" + } + } } }, - "node_modules/registry-auth-token": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-3.4.0.tgz", - "integrity": "sha512-4LM6Fw8eBQdwMYcES4yTnn2TqIasbXuwDx3um+QRs7S55aMKCBKBxvPXl2RiUjHwuJLTyYfxSpmfSAjQpcuP+A==", - "dev": true, - "dependencies": { - "rc": "^1.1.6", - "safe-buffer": "^5.0.1" + "boom": { + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/boom/-/boom-7.3.0.tgz", + "integrity": "sha512-Swpoyi2t5+GhOEGw8rEsKvTxFLIDiiKoUc2gsoV6Lyr43LHBIzch3k2MvYUs8RTROrIkVJ3Al0TkaOGjnb+B6A==", + "requires": { + "hoek": "6.x.x" } }, - "node_modules/registry-url": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/registry-url/-/registry-url-3.1.0.tgz", - "integrity": "sha1-PU74cPc93h138M+aOBQyRE4XSUI=", + "bourne": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/bourne/-/bourne-1.1.2.tgz", + "integrity": "sha512-b2dgVkTZhkQirNMohgC00rWfpVqEi9y5tKM1k3JvoNx05ODtfQoPPd4js9CYFQoY0IM8LAmnJulEuWv74zjUOg==" + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "dev": true, - "dependencies": { - "rc": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" } }, - "node_modules/remove-trailing-separator": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz", - "integrity": "sha1-wkvOKig62tW8P1jg1IJJuSN52O8=", - "dev": true - }, - "node_modules/repeat-element": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.3.tgz", - "integrity": "sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g==", + "braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dev": true, - "engines": { - "node": ">=0.10.0" + "requires": { + "fill-range": "^7.1.1" } }, - "node_modules/repeat-string": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", - "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=", - "engines": { - "node": ">=0.10" + "browserify-zlib": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.2.0.tgz", + "integrity": "sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA==", + "requires": { + "pako": "~1.0.5" } }, - "node_modules/resolve": { - "version": "1.16.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.16.0.tgz", - "integrity": "sha512-LarL/PIKJvc09k1jaeT4kQb/8/7P+qV4qSnN2K80AES+OHdfZELAKVOBjxsvtToT/uLOfFbvYvKfZmV8cee7nA==", - "dependencies": { - "path-parse": "^1.0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "buffer": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.6.0.tgz", + "integrity": "sha512-/gDYp/UtU0eA1ys8bOs9J6a+E/KWIY+DZ+Q2WESNUA0jFRsJOc0SNUO6xJ5SGA1xueg3NL65W6s+NY5l9cunuw==", + "requires": { + "base64-js": "^1.0.2", + "ieee754": "^1.1.4" } }, - "node_modules/resolve-url": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz", - "integrity": "sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=", - "deprecated": "https://github.com/lydell/resolve-url#deprecated", - "dev": true + "buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" }, - "node_modules/responselike": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz", - "integrity": "sha1-kYcg7ztjHFZCvgaPFa3lpG9Loec=", + "bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==" + }, + "cacheable-request": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-2.1.4.tgz", + "integrity": "sha1-DYCIAbY0KtM8kd+dC0TcCbkeXD0=", + "requires": { + "clone-response": "1.0.2", + "get-stream": "3.0.0", + "http-cache-semantics": "3.8.1", + "keyv": "3.0.0", + "lowercase-keys": "1.0.0", + "normalize-url": "2.0.1", + "responselike": "1.0.2" + }, "dependencies": { - "lowercase-keys": "^1.0.0" + "lowercase-keys": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.0.tgz", + "integrity": "sha1-TjNms55/VFfjXxMkvfb4jQv8cwY=" + } } }, - "node_modules/ret": { - "version": "0.1.15", - "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", - "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==", - "dev": true, - "engines": { - "node": ">=0.12" + "call-bind": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", + "requires": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" } }, - "node_modules/right-align": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/right-align/-/right-align-0.1.3.tgz", - "integrity": "sha1-YTObci/mo1FWiSENJOFMlhSGE+8=", - "dependencies": { - "align-text": "^0.1.1" - }, - "engines": { - "node": ">=0.10.0" + "call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "requires": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" } }, - "node_modules/rndm": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/rndm/-/rndm-1.2.0.tgz", - "integrity": "sha1-8z/pz7Urv9UgqhgyO8ZdsRCht2w=" - }, - "node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - }, - "node_modules/safe-regex": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", - "integrity": "sha1-QKNmnzsHfR6UPURinhV91IAjvy4=", - "dev": true, - "dependencies": { - "ret": "~0.1.10" + "call-bound": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.3.tgz", + "integrity": "sha512-YTd+6wGlNlPxSuri7Y6X8tY2dmm12UMH66RpKMhiX6rsk5wXXnYgbUcOt8kiS31/AjfoTOvCsE+w8nZQLQnzHA==", + "requires": { + "call-bind-apply-helpers": "^1.0.1", + "get-intrinsic": "^1.2.6" } }, - "node_modules/safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } }, - "node_modules/semver": { - "version": "5.7.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz", - "integrity": "sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA==", - "bin": { - "semver": "bin/semver" + "character-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/character-parser/-/character-parser-2.2.0.tgz", + "integrity": "sha512-+UqJQjFEFaTAs3bNsF2j2kEN1baG/zghZbdqoYEDxGZtJo9LBzl1A+m0D4n3qKx8N2FNv8/Xp6yV9mQmBuptaw==", + "requires": { + "is-regex": "^1.0.3" } }, - "node_modules/semver-diff": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-2.1.0.tgz", - "integrity": "sha1-S7uEN8jTfksM8aaP1ybsbWRdbTY=", + "chokidar": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", "dev": true, - "dependencies": { - "semver": "^5.0.3" - }, - "engines": { - "node": ">=0.10.0" + "requires": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "fsevents": "~2.3.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" } }, - "node_modules/send": { - "version": "0.18.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", - "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", - "dependencies": { - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "fresh": "0.5.2", - "http-errors": "2.0.0", - "mime": "1.6.0", - "ms": "2.1.3", - "on-finished": "2.4.1", - "range-parser": "~1.2.1", - "statuses": "2.0.1" - }, - "engines": { - "node": ">= 0.8.0" - } + "clean-stack": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-1.3.0.tgz", + "integrity": "sha1-noIVAa6XmYbEax1m0tQy2y/UrjE=" }, - "node_modules/send/node_modules/depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", - "engines": { - "node": ">= 0.8" + "clone-response": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz", + "integrity": "sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws=", + "requires": { + "mimic-response": "^1.0.0" } }, - "node_modules/send/node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" - }, - "node_modules/send/node_modules/on-finished": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", - "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", - "dependencies": { - "ee-first": "1.1.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/send/node_modules/statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/serve-favicon": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/serve-favicon/-/serve-favicon-2.5.0.tgz", - "integrity": "sha1-k10kDN/g9YBTB/3+ln2IlCosvPA=", - "dependencies": { - "etag": "~1.8.1", - "fresh": "0.5.2", - "ms": "2.1.1", - "parseurl": "~1.3.2", - "safe-buffer": "5.1.1" - }, - "engines": { - "node": ">= 0.8.0" + "color": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/color/-/color-3.0.0.tgz", + "integrity": "sha512-jCpd5+s0s0t7p3pHQKpnJ0TpQKKdleP71LWcA0aqiljpiuAkOSUFN/dyH8ZwF0hRmFlrIuRhufds1QyEP9EB+w==", + "requires": { + "color-convert": "^1.9.1", + "color-string": "^1.5.2" } }, - "node_modules/serve-favicon/node_modules/ms": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", - "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==" - }, - "node_modules/serve-favicon/node_modules/safe-buffer": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", - "integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==" - }, - "node_modules/serve-static": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz", - "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==", - "dependencies": { - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "parseurl": "~1.3.3", - "send": "0.18.0" - }, - "engines": { - "node": ">= 0.8.0" + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "requires": { + "color-name": "1.1.3" } }, - "node_modules/set-value": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz", - "integrity": "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==", - "dev": true, - "dependencies": { - "extend-shallow": "^2.0.1", - "is-extendable": "^0.1.1", - "is-plain-object": "^2.0.3", - "split-string": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" }, - "node_modules/set-value/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" + "color-string": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.5.5.tgz", + "integrity": "sha512-jgIoum0OfQfq9Whcfc2z/VhCNcmQjWbey6qBX0vqt7YICflUmBCh9E9CiQD5GSJ+Uehixm3NUwHVhqUAWRivZg==", + "requires": { + "color-name": "^1.0.0", + "simple-swizzle": "^0.2.2" } }, - "node_modules/setprototypeof": { + "colornames": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", - "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==" - }, - "node_modules/shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", - "dev": true, - "dependencies": { - "shebang-regex": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } + "resolved": "https://registry.npmjs.org/colornames/-/colornames-1.1.1.tgz", + "integrity": "sha1-+IiQMGhcfE/54qVZ9Qd+t2qBb5Y=" }, - "node_modules/shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } + "colors": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.3.3.tgz", + "integrity": "sha512-mmGt/1pZqYRjMxB1axhTo16/snVZ5krrKkcmMeVKxzECMMXoCgnvTPp10QgHfcbQZw8Dq2jMNG6je4JlWU0gWg==" }, - "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", - "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "colorspace": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.1.tgz", + "integrity": "sha512-pI3btWyiuz7Ken0BWh9Elzsmv2bM9AhA7psXib4anUXy/orfZ/E0MbQwhSOG/9L8hLlalqrU0UhOuqxW1YjmVw==", + "requires": { + "color": "3.0.x", + "text-hex": "1.0.x" } }, - "node_modules/signal-exit": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", - "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==", + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", "dev": true }, - "node_modules/simple-oauth2": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/simple-oauth2/-/simple-oauth2-2.5.2.tgz", - "integrity": "sha512-8qjf+nHRdSUllFjjfpnonrU1oF/HNVbDle5HIbvXRYiy38C7KUvYe6w0ZZ//g4AFB6VNWuiZ80HmnycR8ZFDyQ==", - "deprecated": "simple-oauth2 v2 is no longer supported. Please upgrade to v3 for further support", - "dependencies": { - "@hapi/joi": "^15.1.1", - "date-fns": "^2.2.1", - "debug": "^4.1.1", - "wreck": "^14.0.2" + "constantinople": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/constantinople/-/constantinople-4.0.1.tgz", + "integrity": "sha512-vCrqcSIq4//Gx74TXXCGnHpulY1dskqLTFGDmhrGxzeXL8lF8kvXv6mpNWlJj1uD4DW23D4ljAqbY4RRaaUZIw==", + "requires": { + "@babel/parser": "^7.6.0", + "@babel/types": "^7.6.1" } }, - "node_modules/simple-oauth2/node_modules/debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", - "deprecated": "Debug versions >=3.2.0 <3.2.7 || >=4 <4.3.1 have a low-severity ReDos regression when used in a Node.js environment. It is recommended you upgrade to 3.2.7 or 4.3.1. (https://github.com/visionmedia/debug/issues/797)", + "content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "requires": { + "safe-buffer": "5.2.1" + }, "dependencies": { - "ms": "^2.1.1" + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + } } }, - "node_modules/simple-oauth2/node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + "content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==" }, - "node_modules/simple-swizzle": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", - "integrity": "sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo=", - "dependencies": { - "is-arrayish": "^0.3.1" - } + "cookie": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.0.tgz", + "integrity": "sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==" }, - "node_modules/snapdragon": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz", - "integrity": "sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==", - "dev": true, - "dependencies": { - "base": "^0.11.1", - "debug": "^2.2.0", - "define-property": "^0.2.5", - "extend-shallow": "^2.0.1", - "map-cache": "^0.2.2", - "source-map": "^0.5.6", - "source-map-resolve": "^0.5.0", - "use": "^3.1.0" + "cookie-parser": { + "version": "1.4.7", + "resolved": "https://registry.npmjs.org/cookie-parser/-/cookie-parser-1.4.7.tgz", + "integrity": "sha512-nGUvgXnotP3BsjiLX2ypbQnWoGUPIIfHQNZkkC668ntrzGWEZVW70HDEB1qnNGMicPje6EttlIgzo51YSwNQGw==", + "requires": { + "cookie": "0.7.2", + "cookie-signature": "1.0.6" }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-node": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/snapdragon-node/-/snapdragon-node-2.1.1.tgz", - "integrity": "sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==", - "dev": true, "dependencies": { - "define-property": "^1.0.0", - "isobject": "^3.0.0", - "snapdragon-util": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" + "cookie": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==" + } } }, - "node_modules/snapdragon-node/node_modules/define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "dev": true, - "dependencies": { - "is-descriptor": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } + "cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" }, - "node_modules/snapdragon-node/node_modules/is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", + "core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + }, + "cross-env": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-5.2.1.tgz", + "integrity": "sha512-1yHhtcfAd1r4nwQgknowuUNfIT9E8dOMMspC36g45dN+iD1blloi7xp8X/xAIDnjHWyt1uQ8PHk2fkNaym7soQ==", "dev": true, - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" + "requires": { + "cross-spawn": "^6.0.5" } }, - "node_modules/snapdragon-node/node_modules/is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", + "cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", "dev": true, - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" + "requires": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" } }, - "node_modules/snapdragon-node/node_modules/is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dev": true, - "dependencies": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=0.10.0" + "csrf": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/csrf/-/csrf-3.1.0.tgz", + "integrity": "sha512-uTqEnCvWRk042asU6JtapDTcJeeailFy4ydOQS28bj1hcLnYRiqi8SsD2jS412AY1I/4qdOwWZun774iqywf9w==", + "requires": { + "rndm": "1.2.0", + "tsscmp": "1.0.6", + "uid-safe": "2.1.5" } }, - "node_modules/snapdragon-util": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/snapdragon-util/-/snapdragon-util-3.0.1.tgz", - "integrity": "sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==", - "dev": true, - "dependencies": { - "kind-of": "^3.2.0" + "csurf": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/csurf/-/csurf-1.11.0.tgz", + "integrity": "sha512-UCtehyEExKTxgiu8UHdGvHj4tnpE/Qctue03Giq5gPgMQ9cg/ciod5blZQ5a4uCEenNQjxyGuzygLdKUmee/bQ==", + "requires": { + "cookie": "0.4.0", + "cookie-signature": "1.0.6", + "csrf": "3.1.0", + "http-errors": "~1.7.3" }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-util/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" + "http-errors": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.3.tgz", + "integrity": "sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw==", + "requires": { + "depd": "~1.1.2", + "inherits": "2.0.4", + "setprototypeof": "1.1.1", + "statuses": ">= 1.5.0 < 2", + "toidentifier": "1.0.0" + } + } } }, - "node_modules/snapdragon/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dev": true, - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } + "date-fns": { + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.12.0.tgz", + "integrity": "sha512-qJgn99xxKnFgB1qL4jpxU7Q2t0LOn1p8KMIveef3UZD7kqjT3tpFNNdXJelEHhE+rUgffriXriw/sOSU+cS1Hw==" }, - "node_modules/snapdragon/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" } }, - "node_modules/sort-keys": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-2.0.0.tgz", - "integrity": "sha1-ZYU1WEhh7JfXMNbPQYIuH1ZoQSg=", - "dependencies": { - "is-plain-obj": "^1.0.0" - }, - "engines": { - "node": ">=4" - } + "decode-uri-component": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz", + "integrity": "sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==" }, - "node_modules/source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "engines": { - "node": ">=0.10.0" + "decompress-response": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz", + "integrity": "sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M=", + "requires": { + "mimic-response": "^1.0.0" } }, - "node_modules/source-map-resolve": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.3.tgz", - "integrity": "sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==", - "deprecated": "See https://github.com/lydell/source-map-resolve#deprecated", - "dev": true, - "dependencies": { - "atob": "^2.1.2", - "decode-uri-component": "^0.2.0", - "resolve-url": "^0.2.1", - "source-map-url": "^0.4.0", - "urix": "^0.1.0" + "define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "requires": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" } }, - "node_modules/source-map-url": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.0.tgz", - "integrity": "sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM=", - "deprecated": "See https://github.com/lydell/source-map-url#deprecated", - "dev": true - }, - "node_modules/split-string": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/split-string/-/split-string-3.1.0.tgz", - "integrity": "sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==", - "dev": true, - "dependencies": { - "extend-shallow": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } + "depd": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=" }, - "node_modules/stack-trace": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", - "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=", - "engines": { - "node": "*" - } + "destroy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==" }, - "node_modules/static-extend": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/static-extend/-/static-extend-0.1.2.tgz", - "integrity": "sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY=", - "dev": true, - "dependencies": { - "define-property": "^0.2.5", - "object-copy": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" + "diagnostics": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/diagnostics/-/diagnostics-1.1.1.tgz", + "integrity": "sha512-8wn1PmdunLJ9Tqbx+Fx/ZEuHfJf4NKSN2ZBj7SJC/OWRWha843+WsTjqMe1B5E3p28jqBlp+mJ2fPVxPyNgYKQ==", + "requires": { + "colorspace": "1.1.x", + "enabled": "1.0.x", + "kuler": "1.0.x" } }, - "node_modules/static-extend/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dev": true, - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } + "doctypes": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/doctypes/-/doctypes-1.1.0.tgz", + "integrity": "sha512-LLBi6pEqS6Do3EKQ3J0NqHWV5hhb78Pi8vvESYwyOy2c31ZEZVdtitdzsQsKb7878PEERhzUk0ftqGhG6Mz+pQ==" }, - "node_modules/statuses": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", - "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=", - "engines": { - "node": ">= 0.6" - } + "dotenv": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-7.0.0.tgz", + "integrity": "sha512-M3NhsLbV1i6HuGzBUH8vXrtxOk+tWmzWKDMbAVSUp3Zsjm7ywFeuwrUXhmhQyRK1q5B5GGy7hcXPbj3bnfZg2g==" }, - "node_modules/strict-uri-encode": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz", - "integrity": "sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM=", - "engines": { - "node": ">=0.10.0" + "dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "requires": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" } }, - "node_modules/string_decoder": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.2.0.tgz", - "integrity": "sha512-6YqyX6ZWEYguAxgZzHGL7SsCeGx3V2TtOTqZz1xSTSWnqsbWwbptafNyvf/ACquZUXV3DANr5BDIwNYe1mN42w==", - "dependencies": { - "safe-buffer": "~5.1.0" - } + "duplexer3": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.4.tgz", + "integrity": "sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI=" }, - "node_modules/string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", - "dev": true, - "dependencies": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" - }, - "engines": { - "node": ">=4" + "ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "requires": { + "safe-buffer": "^5.0.1" } }, - "node_modules/strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", - "dev": true, - "dependencies": { - "ansi-regex": "^3.0.0" - }, - "engines": { - "node": ">=4" - } + "ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" }, - "node_modules/strip-eof": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", - "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=", - "dev": true, - "engines": { - "node": ">=0.10.0" + "enabled": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/enabled/-/enabled-1.0.2.tgz", + "integrity": "sha1-ll9lE9LC0cX0ZStkouM5ZGf8L5M=", + "requires": { + "env-variable": "0.0.x" } }, - "node_modules/strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } + "encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==" }, - "node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=4" - } + "env-variable": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/env-variable/-/env-variable-0.0.5.tgz", + "integrity": "sha512-zoB603vQReOFvTg5xMl9I1P2PnHsHQQKTEowsKKD7nseUfJq6UWzK+4YtlWUO1nhiQUxe6XMkk+JleSZD1NZFA==" }, - "node_modules/term-size": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/term-size/-/term-size-1.2.0.tgz", - "integrity": "sha1-RYuDiH8oj8Vtb/+/rSYuJmOO+mk=", - "dev": true, - "dependencies": { - "execa": "^0.7.0" - }, - "engines": { - "node": ">=4" - } + "es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==" }, - "node_modules/text-hex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz", - "integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==" + "es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==" }, - "node_modules/timed-out": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz", - "integrity": "sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8=", - "engines": { - "node": ">=0.10.0" + "es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "requires": { + "es-errors": "^1.3.0" } }, - "node_modules/to-fast-properties": { + "es6-promise": { + "version": "4.2.8", + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", + "integrity": "sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==" + }, + "escape-html": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-1.0.3.tgz", - "integrity": "sha1-uDVx+k2MJbguIxsG46MFXeTKGkc=", - "engines": { - "node": ">=0.10.0" - } + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" }, - "node_modules/to-object-path": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz", - "integrity": "sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68=", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/to-object-path/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/to-regex": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/to-regex/-/to-regex-3.0.2.tgz", - "integrity": "sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==", - "dev": true, - "dependencies": { - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "regex-not": "^1.0.2", - "safe-regex": "^1.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/to-regex-range": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", - "dev": true, - "dependencies": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/toidentifier": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", - "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==", - "engines": { - "node": ">=0.6" - } - }, - "node_modules/token-stream": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/token-stream/-/token-stream-0.0.1.tgz", - "integrity": "sha1-zu78cXp2xDFvEm0LnbqlXX598Bo=" - }, - "node_modules/touch": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/touch/-/touch-3.1.0.tgz", - "integrity": "sha512-WBx8Uy5TLtOSRtIq+M03/sKDrXCLHxwDcquSP2c43Le03/9serjQBIztjRz6FkJez9D/hleyAXTBGLwwZUw9lA==", - "dev": true, - "dependencies": { - "nopt": "~1.0.10" - }, - "bin": { - "nodetouch": "bin/nodetouch.js" - } - }, - "node_modules/triple-beam": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.3.0.tgz", - "integrity": "sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw==" - }, - "node_modules/tsscmp": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/tsscmp/-/tsscmp-1.0.6.tgz", - "integrity": "sha512-LxhtAkPDTkVCMQjt2h6eBVY28KCjikZqZfMcC15YBeNjkgUpdCfBu5HoiOTDu86v6smE8yOjyEktJ8hlbANHQA==", - "engines": { - "node": ">=0.6.x" - } - }, - "node_modules/type-is": { - "version": "1.6.18", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", - "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", - "dependencies": { - "media-typer": "0.3.0", - "mime-types": "~2.1.24" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/uglify-js": { - "version": "2.8.29", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-2.8.29.tgz", - "integrity": "sha1-KcVzMUgFe7Th913zW3qcty5qWd0=", - "dependencies": { - "source-map": "~0.5.1", - "yargs": "~3.10.0" - }, - "bin": { - "uglifyjs": "bin/uglifyjs" - }, - "engines": { - "node": ">=0.8.0" - }, - "optionalDependencies": { - "uglify-to-browserify": "~1.0.0" - } - }, - "node_modules/uglify-to-browserify": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz", - "integrity": "sha1-bgkk1r2mta/jSeOabWMoUKD4grc=", - "optional": true - }, - "node_modules/uid-safe": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-2.1.5.tgz", - "integrity": "sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA==", - "dependencies": { - "random-bytes": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/undefsafe": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.3.tgz", - "integrity": "sha512-nrXZwwXrD/T/JXeygJqdCO6NZZ1L66HrxM/Z7mIq2oPanoN0F1nLx3lwJMu6AwJY69hdixaFQOuoYsMjE5/C2A==", - "dev": true, - "dependencies": { - "debug": "^2.2.0" - } - }, - "node_modules/union-value": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz", - "integrity": "sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==", - "dev": true, - "dependencies": { - "arr-union": "^3.1.0", - "get-value": "^2.0.6", - "is-extendable": "^0.1.1", - "set-value": "^2.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unique-string": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-1.0.0.tgz", - "integrity": "sha1-nhBXzKhRq7kzmPizOuGHuZyuwRo=", - "dev": true, - "dependencies": { - "crypto-random-string": "^1.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/unpipe": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", - "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/unset-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unset-value/-/unset-value-1.0.0.tgz", - "integrity": "sha1-g3aHP30jNRef+x5vw6jtDfyKtVk=", - "dev": true, - "dependencies": { - "has-value": "^0.3.1", - "isobject": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unset-value/node_modules/has-value": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/has-value/-/has-value-0.3.1.tgz", - "integrity": "sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8=", - "dev": true, - "dependencies": { - "get-value": "^2.0.3", - "has-values": "^0.1.4", - "isobject": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unset-value/node_modules/has-value/node_modules/isobject": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz", - "integrity": "sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=", - "dev": true, - "dependencies": { - "isarray": "1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unset-value/node_modules/has-values": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/has-values/-/has-values-0.1.4.tgz", - "integrity": "sha1-bWHeldkd/Km5oCCJrThL/49it3E=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unzip-response": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/unzip-response/-/unzip-response-2.0.1.tgz", - "integrity": "sha1-0vD3N9FrBhXnKmk17QQhRXLVb5c=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/upath": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz", - "integrity": "sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg==", - "dev": true, - "engines": { - "node": ">=4", - "yarn": "*" - } - }, - "node_modules/update-notifier": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/update-notifier/-/update-notifier-2.5.0.tgz", - "integrity": "sha512-gwMdhgJHGuj/+wHJJs9e6PcCszpxR1b236igrOkUofGhqJuG+amlIKwApH1IW1WWl7ovZxsX49lMBWLxSdm5Dw==", - "dev": true, - "dependencies": { - "boxen": "^1.2.1", - "chalk": "^2.0.1", - "configstore": "^3.0.0", - "import-lazy": "^2.1.0", - "is-ci": "^1.0.10", - "is-installed-globally": "^0.1.0", - "is-npm": "^1.0.0", - "latest-version": "^3.0.0", - "semver-diff": "^2.0.0", - "xdg-basedir": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/urix": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/urix/-/urix-0.1.0.tgz", - "integrity": "sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI=", - "deprecated": "Please see https://github.com/lydell/urix#deprecated", - "dev": true - }, - "node_modules/url-join": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/url-join/-/url-join-4.0.1.tgz", - "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==" - }, - "node_modules/url-parse-lax": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-1.0.0.tgz", - "integrity": "sha1-evjzA2Rem9eaJy56FKxovAYJ2nM=", - "dev": true, - "dependencies": { - "prepend-http": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/url-to-options": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/url-to-options/-/url-to-options-1.0.1.tgz", - "integrity": "sha1-FQWgOiiaSMvXpDTvuu7FBV9WM6k=", - "engines": { - "node": ">= 4" - } - }, - "node_modules/use": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/use/-/use-3.1.1.tgz", - "integrity": "sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" - }, - "node_modules/utils-merge": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", - "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=", - "engines": { - "node": ">= 0.4.0" - } - }, - "node_modules/vary": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", - "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/void-elements": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/void-elements/-/void-elements-2.0.1.tgz", - "integrity": "sha1-wGavtYK7HLQSjWDqkjkulNXp2+w=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "which": "bin/which" - } - }, - "node_modules/widest-line": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-2.0.1.tgz", - "integrity": "sha512-Ba5m9/Fa4Xt9eb2ELXt77JxVDV8w7qQrH0zS/TWSJdLyAwQjWoOzpzj5lwVftDz6n/EOu3tNACS84v509qwnJA==", - "dev": true, - "dependencies": { - "string-width": "^2.1.1" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/window-size": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz", - "integrity": "sha1-VDjNLqk7IC76Ohn+iIeu58lPnJ0=", - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/winston": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/winston/-/winston-3.2.1.tgz", - "integrity": "sha512-zU6vgnS9dAWCEKg/QYigd6cgMVVNwyTzKs81XZtTFuRwJOcDdBg7AU0mXVyNbs7O5RH2zdv+BdNZUlx7mXPuOw==", - "dependencies": { - "async": "^2.6.1", - "diagnostics": "^1.1.1", - "is-stream": "^1.1.0", - "logform": "^2.1.1", - "one-time": "0.0.4", - "readable-stream": "^3.1.1", - "stack-trace": "0.0.x", - "triple-beam": "^1.3.0", - "winston-transport": "^4.3.0" - }, - "engines": { - "node": ">= 6.4.0" - } - }, - "node_modules/winston-transport": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/winston-transport/-/winston-transport-4.3.0.tgz", - "integrity": "sha512-B2wPuwUi3vhzn/51Uukcao4dIduEiPOcOt9HJ3QeaXgkJ5Z7UwpBzxS4ZGNHtrxrUvTwemsQiSys0ihOf8Mp1A==", - "dependencies": { - "readable-stream": "^2.3.6", - "triple-beam": "^1.2.0" - }, - "engines": { - "node": ">= 6.4.0" - } - }, - "node_modules/winston-transport/node_modules/readable-stream": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/winston-transport/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, - "node_modules/with": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/with/-/with-5.1.1.tgz", - "integrity": "sha1-+k2qktrzLE6pTtRTyB8EaGtXXf4=", - "dependencies": { - "acorn": "^3.1.0", - "acorn-globals": "^3.0.0" - } - }, - "node_modules/wordwrap": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.2.tgz", - "integrity": "sha1-t5Zpu0LstAn4PVg8rVLKF+qhZD8=", - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/wreck": { - "version": "14.2.0", - "resolved": "https://registry.npmjs.org/wreck/-/wreck-14.2.0.tgz", - "integrity": "sha512-NFFft3SMgqrJbXEVfYifh+QDWFxni+98/I7ut7rLbz3F0XOypluHsdo3mdEYssGSirMobM3fGlqhyikbWKDn2Q==", - "deprecated": "This module has moved and is now available at @hapi/wreck. Please update your dependencies as this version is no longer maintained an may contain bugs and security issues.", - "dependencies": { - "boom": "7.x.x", - "bourne": "1.x.x", - "hoek": "6.x.x" - } - }, - "node_modules/write-file-atomic": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz", - "integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==", - "dev": true, - "dependencies": { - "graceful-fs": "^4.1.11", - "imurmurhash": "^0.1.4", - "signal-exit": "^3.0.2" - } - }, - "node_modules/xdg-basedir": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-3.0.0.tgz", - "integrity": "sha1-SWsswQnsqNus/i3HK2A8F8WHCtQ=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/yallist": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=" - }, - "node_modules/yargs": { - "version": "3.10.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-3.10.0.tgz", - "integrity": "sha1-9+572FfdfB0tOMDnTvvWgdFDH9E=", - "dependencies": { - "camelcase": "^1.0.2", - "cliui": "^2.1.0", - "decamelize": "^1.0.0", - "window-size": "0.1.0" - } - }, - "node_modules/yargs/node_modules/camelcase": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-1.2.1.tgz", - "integrity": "sha1-m7UwTS4LVmmLLHWLCKPqqdqlijk=", - "engines": { - "node": ">=0.10.0" - } - } - }, - "dependencies": { - "@hapi/address": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/@hapi/address/-/address-2.1.4.tgz", - "integrity": "sha512-QD1PhQk+s31P1ixsX0H0Suoupp3VMXzIVMSwobR3F3MSUO2YCV0B7xqLcUw/Bh8yuvd3LhpyqLQWTNcRmp6IdQ==" - }, - "@hapi/bourne": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/@hapi/bourne/-/bourne-1.3.2.tgz", - "integrity": "sha512-1dVNHT76Uu5N3eJNTYcvxee+jzX4Z9lfciqRRHCU27ihbUcYi+iSc2iml5Ke1LXe1SyJCLA0+14Jh4tXJgOppA==" - }, - "@hapi/hoek": { - "version": "8.5.1", - "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-8.5.1.tgz", - "integrity": "sha512-yN7kbciD87WzLGc5539Tn0sApjyiGHAJgKvG9W8C7O+6c7qmoQMfVs0W4bX17eqz6C78QJqqFrtgdK5EWf6Qow==" - }, - "@hapi/joi": { - "version": "15.1.1", - "resolved": "https://registry.npmjs.org/@hapi/joi/-/joi-15.1.1.tgz", - "integrity": "sha512-entf8ZMOK8sc+8YfeOlM8pCfg3b5+WZIKBfUaaJT8UsjAAPjartzxIYm3TIbjvA4u+u++KbcXD38k682nVHDAQ==", - "requires": { - "@hapi/address": "2.x.x", - "@hapi/bourne": "1.x.x", - "@hapi/hoek": "8.x.x", - "@hapi/topo": "3.x.x" - } - }, - "@hapi/topo": { - "version": "3.1.6", - "resolved": "https://registry.npmjs.org/@hapi/topo/-/topo-3.1.6.tgz", - "integrity": "sha512-tAag0jEcjwH+P2quUfipd7liWCNX2F8NvYjQp2wtInsZxnMlypdw0FtAOLxtvvkO+GSRRbmNi8m/5y42PQJYCQ==", - "requires": { - "@hapi/hoek": "^8.3.0" - } - }, - "@panva/asn1.js": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@panva/asn1.js/-/asn1.js-1.0.0.tgz", - "integrity": "sha512-UdkG3mLEqXgnlKsWanWcgb6dOjUzJ+XC5f+aWw30qrtjxeNUSfKX1cd5FBzOaXQumoe9nIqeZUvrRJS03HCCtw==" - }, - "@sindresorhus/is": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.7.0.tgz", - "integrity": "sha512-ONhaKPIufzzrlNbqtWFFd+jlnemX6lJAgq9ZeiZtS7I1PIf/la7CW4m83rTXRnVnsMbW2k56pGYu7AUFJD9Pow==" - }, - "@types/babel-types": { - "version": "7.0.7", - "resolved": "https://registry.npmjs.org/@types/babel-types/-/babel-types-7.0.7.tgz", - "integrity": "sha512-dBtBbrc+qTHy1WdfHYjBwRln4+LWqASWakLHsWHR2NWHIFkv4W3O070IGoGLEBrJBvct3r0L1BUPuvURi7kYUQ==" - }, - "@types/babylon": { - "version": "6.16.5", - "resolved": "https://registry.npmjs.org/@types/babylon/-/babylon-6.16.5.tgz", - "integrity": "sha512-xH2e58elpj1X4ynnKp9qSnWlsRTIs6n3tgLGNfwAGHwePw0mulHQllV34n0T25uYSu1k0hRKkWXF890B1yS47w==", - "requires": { - "@types/babel-types": "*" - } - }, - "@types/body-parser": { - "version": "1.19.2", - "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", - "integrity": "sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==", - "requires": { - "@types/connect": "*", - "@types/node": "*" - } - }, - "@types/connect": { - "version": "3.4.35", - "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz", - "integrity": "sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==", - "requires": { - "@types/node": "*" - } - }, - "@types/express": { - "version": "4.17.13", - "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.13.tgz", - "integrity": "sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA==", - "requires": { - "@types/body-parser": "*", - "@types/express-serve-static-core": "^4.17.18", - "@types/qs": "*", - "@types/serve-static": "*" - } - }, - "@types/express-serve-static-core": { - "version": "4.17.28", - "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.28.tgz", - "integrity": "sha512-P1BJAEAW3E2DJUlkgq4tOL3RyMunoWXqbSCygWo5ZIWTjUgN1YnaXWW4VWl/oc8vs/XoYibEGBKP0uZyF4AHig==", - "requires": { - "@types/node": "*", - "@types/qs": "*", - "@types/range-parser": "*" - } - }, - "@types/jsonwebtoken": { - "version": "8.5.8", - "resolved": "https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-8.5.8.tgz", - "integrity": "sha512-zm6xBQpFDIDM6o9r6HSgDeIcLy82TKWctCXEPbJJcXb5AKmi5BNNdLXneixK4lplX3PqIVcwLBCGE/kAGnlD4A==", - "requires": { - "@types/node": "*" - } - }, - "@types/mime": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz", - "integrity": "sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==" - }, - "@types/node": { - "version": "17.0.42", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.42.tgz", - "integrity": "sha512-Q5BPGyGKcvQgAMbsr7qEGN/kIPN6zZecYYABeTDBizOsau+2NMdSVTar9UQw21A2+JyA2KRNDYaYrPB0Rpk2oQ==" - }, - "@types/qs": { - "version": "6.9.7", - "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", - "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==" - }, - "@types/range-parser": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz", - "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==" - }, - "@types/serve-static": { - "version": "1.13.10", - "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.10.tgz", - "integrity": "sha512-nCkHGI4w7ZgAdNkrEu0bv+4xNV/XDqW+DydknebMOQwkpDGx8G+HTlj7R7ABI8i8nKxVw0wtKPi1D+lPOkh4YQ==", - "requires": { - "@types/mime": "^1", - "@types/node": "*" - } - }, - "abbrev": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", - "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", - "dev": true - }, - "accepts": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", - "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", - "requires": { - "mime-types": "~2.1.34", - "negotiator": "0.6.3" - } - }, - "acorn": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-3.3.0.tgz", - "integrity": "sha1-ReN/s56No/JbruP/U2niu18iAXo=" - }, - "acorn-globals": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-3.1.0.tgz", - "integrity": "sha1-/YJw9x+7SZawBPqIDuXUZXOnMb8=", - "requires": { - "acorn": "^4.0.4" - }, - "dependencies": { - "acorn": { - "version": "4.0.13", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-4.0.13.tgz", - "integrity": "sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c=" - } - } - }, - "aggregate-error": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-1.0.0.tgz", - "integrity": "sha1-iINE2tAiCnLjr1CQYRf0h3GSX6w=", - "requires": { - "clean-stack": "^1.0.0", - "indent-string": "^3.0.0" - } - }, - "align-text": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/align-text/-/align-text-0.1.4.tgz", - "integrity": "sha1-DNkKVhCT810KmSVsIrcGlDP60Rc=", - "requires": { - "kind-of": "^3.0.2", - "longest": "^1.0.1", - "repeat-string": "^1.5.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "ansi-align": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-2.0.0.tgz", - "integrity": "sha1-w2rsy6VjuJzrVW82kPCx2eNUf38=", - "dev": true, - "requires": { - "string-width": "^2.0.0" - } - }, - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", - "dev": true - }, - "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "requires": { - "color-convert": "^1.9.0" - } - }, - "anymatch": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", - "integrity": "sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==", - "dev": true, - "requires": { - "micromatch": "^3.1.4", - "normalize-path": "^2.1.1" - }, - "dependencies": { - "normalize-path": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", - "integrity": "sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=", - "dev": true, - "requires": { - "remove-trailing-separator": "^1.0.1" - } - } - } - }, - "arr-diff": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz", - "integrity": "sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=", - "dev": true - }, - "arr-flatten": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz", - "integrity": "sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==", - "dev": true - }, - "arr-union": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/arr-union/-/arr-union-3.1.0.tgz", - "integrity": "sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=", - "dev": true - }, - "array-flatten": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", - "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" - }, - "array-unique": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz", - "integrity": "sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=", - "dev": true - }, - "asap": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", - "integrity": "sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=" - }, - "assign-symbols": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz", - "integrity": "sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c=", - "dev": true - }, - "async": { - "version": "2.6.4", - "resolved": "https://registry.npmjs.org/async/-/async-2.6.4.tgz", - "integrity": "sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==", - "requires": { - "lodash": "^4.17.14" - } - }, - "async-each": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/async-each/-/async-each-1.0.3.tgz", - "integrity": "sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ==", - "dev": true - }, - "atob": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz", - "integrity": "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==", - "dev": true - }, - "babel-runtime": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-runtime/-/babel-runtime-6.26.0.tgz", - "integrity": "sha1-llxwWGaOgrVde/4E/yM3vItWR/4=", - "requires": { - "core-js": "^2.4.0", - "regenerator-runtime": "^0.11.0" - } - }, - "babel-types": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-types/-/babel-types-6.26.0.tgz", - "integrity": "sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc=", - "requires": { - "babel-runtime": "^6.26.0", - "esutils": "^2.0.2", - "lodash": "^4.17.4", - "to-fast-properties": "^1.0.3" - } - }, - "babylon": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/babylon/-/babylon-6.18.0.tgz", - "integrity": "sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ==" - }, - "balanced-match": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", - "dev": true - }, - "base": { - "version": "0.11.2", - "resolved": "https://registry.npmjs.org/base/-/base-0.11.2.tgz", - "integrity": "sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==", - "dev": true, - "requires": { - "cache-base": "^1.0.1", - "class-utils": "^0.3.5", - "component-emitter": "^1.2.1", - "define-property": "^1.0.0", - "isobject": "^3.0.1", - "mixin-deep": "^1.2.0", - "pascalcase": "^0.1.1" - }, - "dependencies": { - "define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "dev": true, - "requires": { - "is-descriptor": "^1.0.0" - } - }, - "is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "dev": true, - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "dev": true, - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dev": true, - "requires": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - } - } - } - }, - "base64-js": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.1.tgz", - "integrity": "sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g==" - }, - "base64url": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/base64url/-/base64url-3.0.1.tgz", - "integrity": "sha512-ir1UPr3dkwexU7FdV8qBBbNDRUhMmIekYMFZfi+C/sLNnRESKPl23nB9b2pltqfOQNnGzsDdId90AEtG5tCx4A==" - }, - "basic-auth": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", - "integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==", - "requires": { - "safe-buffer": "5.1.2" - } - }, - "binary-extensions": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.13.1.tgz", - "integrity": "sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==", - "dev": true - }, - "bindings": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", - "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", - "dev": true, - "optional": true, - "requires": { - "file-uri-to-path": "1.0.0" - } - }, - "body-parser": { - "version": "1.20.1", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz", - "integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==", - "requires": { - "bytes": "3.1.2", - "content-type": "~1.0.4", - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "on-finished": "2.4.1", - "qs": "6.11.0", - "raw-body": "2.5.1", - "type-is": "~1.6.18", - "unpipe": "1.0.0" - }, - "dependencies": { - "depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" - }, - "on-finished": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", - "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", - "requires": { - "ee-first": "1.1.1" - } - } - } - }, - "boom": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/boom/-/boom-7.3.0.tgz", - "integrity": "sha512-Swpoyi2t5+GhOEGw8rEsKvTxFLIDiiKoUc2gsoV6Lyr43LHBIzch3k2MvYUs8RTROrIkVJ3Al0TkaOGjnb+B6A==", - "requires": { - "hoek": "6.x.x" - } - }, - "bourne": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/bourne/-/bourne-1.1.2.tgz", - "integrity": "sha512-b2dgVkTZhkQirNMohgC00rWfpVqEi9y5tKM1k3JvoNx05ODtfQoPPd4js9CYFQoY0IM8LAmnJulEuWv74zjUOg==" - }, - "boxen": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/boxen/-/boxen-1.3.0.tgz", - "integrity": "sha512-TNPjfTr432qx7yOjQyaXm3dSR0MH9vXp7eT1BFSl/C51g+EFnOR9hTg1IreahGBmDNCehscshe45f+C1TBZbLw==", - "dev": true, - "requires": { - "ansi-align": "^2.0.0", - "camelcase": "^4.0.0", - "chalk": "^2.0.1", - "cli-boxes": "^1.0.0", - "string-width": "^2.0.0", - "term-size": "^1.2.0", - "widest-line": "^2.0.0" - } - }, - "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", - "dev": true, - "requires": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "browserify-zlib": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.2.0.tgz", - "integrity": "sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA==", - "requires": { - "pako": "~1.0.5" - } - }, - "buffer": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.6.0.tgz", - "integrity": "sha512-/gDYp/UtU0eA1ys8bOs9J6a+E/KWIY+DZ+Q2WESNUA0jFRsJOc0SNUO6xJ5SGA1xueg3NL65W6s+NY5l9cunuw==", - "requires": { - "base64-js": "^1.0.2", - "ieee754": "^1.1.4" - } - }, - "buffer-equal-constant-time": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", - "integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=" - }, - "bytes": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", - "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==" - }, - "cache-base": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz", - "integrity": "sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==", - "dev": true, - "requires": { - "collection-visit": "^1.0.0", - "component-emitter": "^1.2.1", - "get-value": "^2.0.6", - "has-value": "^1.0.0", - "isobject": "^3.0.1", - "set-value": "^2.0.0", - "to-object-path": "^0.3.0", - "union-value": "^1.0.0", - "unset-value": "^1.0.0" - } - }, - "cacheable-request": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-2.1.4.tgz", - "integrity": "sha1-DYCIAbY0KtM8kd+dC0TcCbkeXD0=", - "requires": { - "clone-response": "1.0.2", - "get-stream": "3.0.0", - "http-cache-semantics": "3.8.1", - "keyv": "3.0.0", - "lowercase-keys": "1.0.0", - "normalize-url": "2.0.1", - "responselike": "1.0.2" - }, - "dependencies": { - "lowercase-keys": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.0.tgz", - "integrity": "sha1-TjNms55/VFfjXxMkvfb4jQv8cwY=" - } - } - }, - "call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", - "requires": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" - } - }, - "camelcase": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz", - "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=", - "dev": true - }, - "capture-stack-trace": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/capture-stack-trace/-/capture-stack-trace-1.0.1.tgz", - "integrity": "sha512-mYQLZnx5Qt1JgB1WEiMCf2647plpGeQ2NMR/5L0HNZzGQo4fuSPnK+wjfPnKZV0aiJDgzmWqqkV/g7JD+DW0qw==", - "dev": true - }, - "center-align": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/center-align/-/center-align-0.1.3.tgz", - "integrity": "sha1-qg0yYptu6XIgBBHL1EYckHvCt60=", - "requires": { - "align-text": "^0.1.3", - "lazy-cache": "^1.0.3" - } - }, - "chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "character-parser": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/character-parser/-/character-parser-2.2.0.tgz", - "integrity": "sha1-x84o821LzZdE5f/CxfzeHHMmH8A=", - "requires": { - "is-regex": "^1.0.3" - } - }, - "chokidar": { - "version": "2.1.8", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", - "integrity": "sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==", - "dev": true, - "requires": { - "anymatch": "^2.0.0", - "async-each": "^1.0.1", - "braces": "^2.3.2", - "fsevents": "^1.2.7", - "glob-parent": "^3.1.0", - "inherits": "^2.0.3", - "is-binary-path": "^1.0.0", - "is-glob": "^4.0.0", - "normalize-path": "^3.0.0", - "path-is-absolute": "^1.0.0", - "readdirp": "^2.2.1", - "upath": "^1.1.1" - } - }, - "ci-info": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-1.6.0.tgz", - "integrity": "sha512-vsGdkwSCDpWmP80ncATX7iea5DWQemg1UgCW5J8tqjU3lYw4FBYuj89J0CTVomA7BEfvSZd84GmHko+MxFQU2A==", - "dev": true - }, - "class-utils": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/class-utils/-/class-utils-0.3.6.tgz", - "integrity": "sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==", - "dev": true, - "requires": { - "arr-union": "^3.1.0", - "define-property": "^0.2.5", - "isobject": "^3.0.0", - "static-extend": "^0.1.1" - }, - "dependencies": { - "define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dev": true, - "requires": { - "is-descriptor": "^0.1.0" - } - } - } - }, - "clean-css": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/clean-css/-/clean-css-4.2.3.tgz", - "integrity": "sha512-VcMWDN54ZN/DS+g58HYL5/n4Zrqe8vHJpGA8KdgUXFU4fuP/aHNw8eld9SyEIyabIMJX/0RaY/fplOo5hYLSFA==", - "requires": { - "source-map": "~0.6.0" - }, - "dependencies": { - "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" - } - } - }, - "clean-stack": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-1.3.0.tgz", - "integrity": "sha1-noIVAa6XmYbEax1m0tQy2y/UrjE=" - }, - "cli-boxes": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-1.0.0.tgz", - "integrity": "sha1-T6kXw+WclKAEzWH47lCdplFocUM=", - "dev": true - }, - "cliui": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-2.1.0.tgz", - "integrity": "sha1-S0dXYP+AJkx2LDoXGQMukcf+oNE=", - "requires": { - "center-align": "^0.1.1", - "right-align": "^0.1.1", - "wordwrap": "0.0.2" - } - }, - "clone-response": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz", - "integrity": "sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws=", - "requires": { - "mimic-response": "^1.0.0" - } - }, - "collection-visit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz", - "integrity": "sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA=", - "dev": true, - "requires": { - "map-visit": "^1.0.0", - "object-visit": "^1.0.0" - } - }, - "color": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/color/-/color-3.0.0.tgz", - "integrity": "sha512-jCpd5+s0s0t7p3pHQKpnJ0TpQKKdleP71LWcA0aqiljpiuAkOSUFN/dyH8ZwF0hRmFlrIuRhufds1QyEP9EB+w==", - "requires": { - "color-convert": "^1.9.1", - "color-string": "^1.5.2" - } - }, - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "requires": { - "color-name": "1.1.3" - } - }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" - }, - "color-string": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.5.5.tgz", - "integrity": "sha512-jgIoum0OfQfq9Whcfc2z/VhCNcmQjWbey6qBX0vqt7YICflUmBCh9E9CiQD5GSJ+Uehixm3NUwHVhqUAWRivZg==", - "requires": { - "color-name": "^1.0.0", - "simple-swizzle": "^0.2.2" - } - }, - "colornames": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/colornames/-/colornames-1.1.1.tgz", - "integrity": "sha1-+IiQMGhcfE/54qVZ9Qd+t2qBb5Y=" - }, - "colors": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/colors/-/colors-1.3.3.tgz", - "integrity": "sha512-mmGt/1pZqYRjMxB1axhTo16/snVZ5krrKkcmMeVKxzECMMXoCgnvTPp10QgHfcbQZw8Dq2jMNG6je4JlWU0gWg==" - }, - "colorspace": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.1.tgz", - "integrity": "sha512-pI3btWyiuz7Ken0BWh9Elzsmv2bM9AhA7psXib4anUXy/orfZ/E0MbQwhSOG/9L8hLlalqrU0UhOuqxW1YjmVw==", - "requires": { - "color": "3.0.x", - "text-hex": "1.0.x" - } - }, - "component-emitter": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", - "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==", - "dev": true - }, - "concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "dev": true - }, - "configstore": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/configstore/-/configstore-3.1.2.tgz", - "integrity": "sha512-vtv5HtGjcYUgFrXc6Kx747B83MRRVS5R1VTEQoXvuP+kMI+if6uywV0nDGoiydJRy4yk7h9od5Og0kxx4zUXmw==", - "dev": true, - "requires": { - "dot-prop": "^4.1.0", - "graceful-fs": "^4.1.2", - "make-dir": "^1.0.0", - "unique-string": "^1.0.0", - "write-file-atomic": "^2.0.0", - "xdg-basedir": "^3.0.0" - } - }, - "constantinople": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/constantinople/-/constantinople-3.1.2.tgz", - "integrity": "sha512-yePcBqEFhLOqSBtwYOGGS1exHo/s1xjekXiinh4itpNQGCu4KA1euPh1fg07N2wMITZXQkBz75Ntdt1ctGZouw==", - "requires": { - "@types/babel-types": "^7.0.0", - "@types/babylon": "^6.16.2", - "babel-types": "^6.26.0", - "babylon": "^6.18.0" - } - }, - "content-disposition": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", - "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", - "requires": { - "safe-buffer": "5.2.1" - }, - "dependencies": { - "safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" - } - } - }, - "content-type": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", - "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==" - }, - "cookie": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.0.tgz", - "integrity": "sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==" - }, - "cookie-parser": { - "version": "1.4.5", - "resolved": "https://registry.npmjs.org/cookie-parser/-/cookie-parser-1.4.5.tgz", - "integrity": "sha512-f13bPUj/gG/5mDr+xLmSxxDsB9DQiTIfhJS/sqjrmfAWiAN+x2O4i/XguTL9yDZ+/IFDanJ+5x7hC4CXT9Tdzw==", - "requires": { - "cookie": "0.4.0", - "cookie-signature": "1.0.6" - } - }, - "cookie-signature": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", - "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" - }, - "copy-descriptor": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz", - "integrity": "sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=", - "dev": true - }, - "core-js": { - "version": "2.6.11", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.11.tgz", - "integrity": "sha512-5wjnpaT/3dV+XB4borEsnAYQchn00XSgTAWKDkEqv+K8KevjbzmofK6hfJ9TZIlpj2N0xQpazy7PiRQiWHqzWg==" - }, - "core-util-is": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" - }, - "create-error-class": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/create-error-class/-/create-error-class-3.0.2.tgz", - "integrity": "sha1-Br56vvlHo/FKMP1hBnHUAbyot7Y=", - "dev": true, - "requires": { - "capture-stack-trace": "^1.0.0" - } - }, - "cross-env": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-5.2.1.tgz", - "integrity": "sha512-1yHhtcfAd1r4nwQgknowuUNfIT9E8dOMMspC36g45dN+iD1blloi7xp8X/xAIDnjHWyt1uQ8PHk2fkNaym7soQ==", - "dev": true, - "requires": { - "cross-spawn": "^6.0.5" - } - }, - "cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, - "requires": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } - }, - "crypto-random-string": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-1.0.0.tgz", - "integrity": "sha1-ojD2T1aDEOFJgAmUB5DsmVRbyn4=", - "dev": true - }, - "csrf": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/csrf/-/csrf-3.1.0.tgz", - "integrity": "sha512-uTqEnCvWRk042asU6JtapDTcJeeailFy4ydOQS28bj1hcLnYRiqi8SsD2jS412AY1I/4qdOwWZun774iqywf9w==", - "requires": { - "rndm": "1.2.0", - "tsscmp": "1.0.6", - "uid-safe": "2.1.5" - } - }, - "csurf": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/csurf/-/csurf-1.11.0.tgz", - "integrity": "sha512-UCtehyEExKTxgiu8UHdGvHj4tnpE/Qctue03Giq5gPgMQ9cg/ciod5blZQ5a4uCEenNQjxyGuzygLdKUmee/bQ==", - "requires": { - "cookie": "0.4.0", - "cookie-signature": "1.0.6", - "csrf": "3.1.0", - "http-errors": "~1.7.3" - }, - "dependencies": { - "http-errors": { - "version": "1.7.3", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.3.tgz", - "integrity": "sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw==", - "requires": { - "depd": "~1.1.2", - "inherits": "2.0.4", - "setprototypeof": "1.1.1", - "statuses": ">= 1.5.0 < 2", - "toidentifier": "1.0.0" - } - } - } - }, - "date-fns": { - "version": "2.12.0", - "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.12.0.tgz", - "integrity": "sha512-qJgn99xxKnFgB1qL4jpxU7Q2t0LOn1p8KMIveef3UZD7kqjT3tpFNNdXJelEHhE+rUgffriXriw/sOSU+cS1Hw==" - }, - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "requires": { - "ms": "2.0.0" - } - }, - "decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=" - }, - "decode-uri-component": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz", - "integrity": "sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==" - }, - "decompress-response": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz", - "integrity": "sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M=", - "requires": { - "mimic-response": "^1.0.0" - } - }, - "deep-extend": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", - "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", - "dev": true - }, - "define-property": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-2.0.2.tgz", - "integrity": "sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==", - "dev": true, - "requires": { - "is-descriptor": "^1.0.2", - "isobject": "^3.0.1" - }, - "dependencies": { - "is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "dev": true, - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "dev": true, - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dev": true, - "requires": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - } - } - } - }, - "depd": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=" - }, - "destroy": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", - "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==" - }, - "diagnostics": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/diagnostics/-/diagnostics-1.1.1.tgz", - "integrity": "sha512-8wn1PmdunLJ9Tqbx+Fx/ZEuHfJf4NKSN2ZBj7SJC/OWRWha843+WsTjqMe1B5E3p28jqBlp+mJ2fPVxPyNgYKQ==", - "requires": { - "colorspace": "1.1.x", - "enabled": "1.0.x", - "kuler": "1.0.x" - } - }, - "doctypes": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/doctypes/-/doctypes-1.1.0.tgz", - "integrity": "sha1-6oCxBqh1OHdOijpKWv4pPeSJ4Kk=" - }, - "dot-prop": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-4.2.0.tgz", - "integrity": "sha512-tUMXrxlExSW6U2EXiiKGSBVdYgtV8qlHL+C10TsW4PURY/ic+eaysnSkwB4kA/mBlCyy/IKDJ+Lc3wbWeaXtuQ==", - "dev": true, - "requires": { - "is-obj": "^1.0.0" - } - }, - "dotenv": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-7.0.0.tgz", - "integrity": "sha512-M3NhsLbV1i6HuGzBUH8vXrtxOk+tWmzWKDMbAVSUp3Zsjm7ywFeuwrUXhmhQyRK1q5B5GGy7hcXPbj3bnfZg2g==" - }, - "duplexer3": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.4.tgz", - "integrity": "sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI=" - }, - "ecdsa-sig-formatter": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", - "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", - "requires": { - "safe-buffer": "^5.0.1" - } - }, - "ee-first": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" - }, - "enabled": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/enabled/-/enabled-1.0.2.tgz", - "integrity": "sha1-ll9lE9LC0cX0ZStkouM5ZGf8L5M=", - "requires": { - "env-variable": "0.0.x" - } - }, - "encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==" - }, - "env-variable": { - "version": "0.0.5", - "resolved": "https://registry.npmjs.org/env-variable/-/env-variable-0.0.5.tgz", - "integrity": "sha512-zoB603vQReOFvTg5xMl9I1P2PnHsHQQKTEowsKKD7nseUfJq6UWzK+4YtlWUO1nhiQUxe6XMkk+JleSZD1NZFA==" - }, - "es6-promise": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", - "integrity": "sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==" - }, - "escape-html": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" - }, - "escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" - }, - "esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==" + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" }, "etag": { "version": "1.8.1", "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" }, - "execa": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz", - "integrity": "sha1-lEvs00zEHuMqY6n68nrVpl/Fl3c=", - "dev": true, - "requires": { - "cross-spawn": "^5.0.1", - "get-stream": "^3.0.0", - "is-stream": "^1.1.0", - "npm-run-path": "^2.0.0", - "p-finally": "^1.0.0", - "signal-exit": "^3.0.0", - "strip-eof": "^1.0.0" - }, - "dependencies": { - "cross-spawn": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", - "integrity": "sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=", - "dev": true, - "requires": { - "lru-cache": "^4.0.1", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } - } - } - }, - "expand-brackets": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-2.1.4.tgz", - "integrity": "sha1-t3c14xXOMPa27/D4OwQVGiJEliI=", - "dev": true, - "requires": { - "debug": "^2.3.3", - "define-property": "^0.2.5", - "extend-shallow": "^2.0.1", - "posix-character-classes": "^0.1.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "dependencies": { - "define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dev": true, - "requires": { - "is-descriptor": "^0.1.0" - } - }, - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, "express": { - "version": "4.18.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz", - "integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==", + "version": "4.21.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", + "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", "requires": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.1", + "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", - "cookie": "0.5.0", + "cookie": "0.7.1", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", - "finalhandler": "1.2.0", + "finalhandler": "1.3.1", "fresh": "0.5.2", "http-errors": "2.0.0", - "merge-descriptors": "1.0.1", + "merge-descriptors": "1.0.3", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", - "path-to-regexp": "0.1.7", + "path-to-regexp": "0.1.12", "proxy-addr": "~2.0.7", - "qs": "6.11.0", + "qs": "6.13.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", - "send": "0.18.0", - "serve-static": "1.15.0", + "send": "0.19.0", + "serve-static": "1.16.2", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", @@ -7007,216 +4242,15 @@ }, "dependencies": { "cookie": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", - "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==" - }, - "depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" - }, - "on-finished": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", - "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", - "requires": { - "ee-first": "1.1.1" - } - }, - "safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" - }, - "setprototypeof": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", - "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" - }, - "statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==" - } - } - }, - "express-session": { - "version": "1.17.0", - "resolved": "https://registry.npmjs.org/express-session/-/express-session-1.17.0.tgz", - "integrity": "sha512-t4oX2z7uoSqATbMfsxWMbNjAL0T5zpvcJCk3Z9wnPPN7ibddhnmDZXHfEcoBMG2ojKXZoCyPMc5FbtK+G7SoDg==", - "requires": { - "cookie": "0.4.0", - "cookie-signature": "1.0.6", - "debug": "2.6.9", - "depd": "~2.0.0", - "on-headers": "~1.0.2", - "parseurl": "~1.3.3", - "safe-buffer": "5.2.0", - "uid-safe": "~2.1.5" - }, - "dependencies": { - "depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" - }, - "safe-buffer": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz", - "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==" - } - } - }, - "express-winston": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/express-winston/-/express-winston-3.4.0.tgz", - "integrity": "sha512-CKo4ESwIV4BpNIsGVNiq2GcAwuomL4dVJRIIH/2K/jMpoRI2DakhkVTtaJACzV7n2I1v+knDJkkjZRCymJ7nmA==", - "requires": { - "chalk": "^2.4.1", - "lodash": "^4.17.10" - } - }, - "extend-shallow": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz", - "integrity": "sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg=", - "dev": true, - "requires": { - "assign-symbols": "^1.0.0", - "is-extendable": "^1.0.1" - }, - "dependencies": { - "is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", - "dev": true, - "requires": { - "is-plain-object": "^2.0.4" - } - } - } - }, - "extglob": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/extglob/-/extglob-2.0.4.tgz", - "integrity": "sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==", - "dev": true, - "requires": { - "array-unique": "^0.3.2", - "define-property": "^1.0.0", - "expand-brackets": "^2.1.4", - "extend-shallow": "^2.0.1", - "fragment-cache": "^0.2.1", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "dependencies": { - "define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "dev": true, - "requires": { - "is-descriptor": "^1.0.0" - } - }, - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - }, - "is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "dev": true, - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "dev": true, - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dev": true, - "requires": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - } - } - } - }, - "fast-safe-stringify": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.0.6.tgz", - "integrity": "sha512-q8BZ89jjc+mz08rSxROs8VsrBBcn1SIw1kq9NjolL509tkABRk9io01RAjSaEv1Xb2uFLt8VtRiZbGp5H8iDtg==" - }, - "fecha": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fecha/-/fecha-2.3.3.tgz", - "integrity": "sha512-lUGBnIamTAwk4znq5BcqsDaxSmZ9nDVJaij6NvRt/Tg4R69gERA+otPKbS86ROw9nxVMw2/mp1fnaiWqbs6Sdg==" - }, - "file-uri-to-path": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", - "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", - "dev": true, - "optional": true - }, - "fill-range": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", - "integrity": "sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=", - "dev": true, - "requires": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "finalhandler": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", - "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==", - "requires": { - "debug": "2.6.9", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "on-finished": "2.4.1", - "parseurl": "~1.3.3", - "statuses": "2.0.1", - "unpipe": "~1.0.0" - }, - "dependencies": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==" + }, + "depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" + }, "on-finished": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", @@ -7225,6 +4259,16 @@ "ee-first": "1.1.1" } }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + }, + "setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" + }, "statuses": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", @@ -7232,481 +4276,113 @@ } } }, - "for-in": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", - "integrity": "sha1-gQaNKVqBQuwKxybG4iAMMPttXoA=", - "dev": true - }, - "forwarded": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", - "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==" - }, - "fragment-cache": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/fragment-cache/-/fragment-cache-0.2.1.tgz", - "integrity": "sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk=", - "dev": true, - "requires": { - "map-cache": "^0.2.2" - } - }, - "fresh": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" - }, - "from2": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/from2/-/from2-2.3.0.tgz", - "integrity": "sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8=", - "requires": { - "inherits": "^2.0.1", - "readable-stream": "^2.0.0" - }, - "dependencies": { - "readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "requires": { - "safe-buffer": "~5.1.0" - } - } - } - }, - "fsevents": { - "version": "1.2.12", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.12.tgz", - "integrity": "sha512-Ggd/Ktt7E7I8pxZRbGIs7vwqAPscSESMrCSkx2FtWeqmheJgCo2R74fTsZFCifr0VTPwqRpPv17+6b8Zp7th0Q==", - "dev": true, - "optional": true, + "express-session": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/express-session/-/express-session-1.17.0.tgz", + "integrity": "sha512-t4oX2z7uoSqATbMfsxWMbNjAL0T5zpvcJCk3Z9wnPPN7ibddhnmDZXHfEcoBMG2ojKXZoCyPMc5FbtK+G7SoDg==", "requires": { - "bindings": "^1.5.0", - "nan": "^2.12.1", - "node-pre-gyp": "*" + "cookie": "0.4.0", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "~2.0.0", + "on-headers": "~1.0.2", + "parseurl": "~1.3.3", + "safe-buffer": "5.2.0", + "uid-safe": "~2.1.5" }, "dependencies": { - "abbrev": { - "version": "1.1.1", - "bundled": true, - "dev": true, - "optional": true - }, - "ansi-regex": { - "version": "2.1.1", - "bundled": true, - "dev": true, - "optional": true - }, - "aproba": { - "version": "1.2.0", - "bundled": true, - "dev": true, - "optional": true - }, - "are-we-there-yet": { - "version": "1.1.5", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "delegates": "^1.0.0", - "readable-stream": "^2.0.6" - } - }, - "balanced-match": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "optional": true - }, - "brace-expansion": { - "version": "1.1.11", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "chownr": { - "version": "1.1.4", - "bundled": true, - "dev": true, - "optional": true - }, - "code-point-at": { - "version": "1.1.0", - "bundled": true, - "dev": true, - "optional": true - }, - "concat-map": { - "version": "0.0.1", - "bundled": true, - "dev": true, - "optional": true - }, - "console-control-strings": { - "version": "1.1.0", - "bundled": true, - "dev": true, - "optional": true - }, - "core-util-is": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "optional": true - }, - "debug": { - "version": "3.2.6", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "ms": "^2.1.1" - } - }, - "deep-extend": { - "version": "0.6.0", - "bundled": true, - "dev": true, - "optional": true - }, - "delegates": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "optional": true - }, - "detect-libc": { - "version": "1.0.3", - "bundled": true, - "dev": true, - "optional": true - }, - "fs-minipass": { - "version": "1.2.7", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "minipass": "^2.6.0" - } - }, - "fs.realpath": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "optional": true - }, - "gauge": { - "version": "2.7.4", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "aproba": "^1.0.3", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.0", - "object-assign": "^4.1.0", - "signal-exit": "^3.0.0", - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1", - "wide-align": "^1.1.0" - } - }, - "glob": { - "version": "7.1.6", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "has-unicode": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "optional": true - }, - "iconv-lite": { - "version": "0.4.24", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "safer-buffer": ">= 2.1.2 < 3" - } - }, - "ignore-walk": { - "version": "3.0.3", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "minimatch": "^3.0.4" - } - }, - "inflight": { - "version": "1.0.6", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "inherits": { - "version": "2.0.4", - "bundled": true, - "dev": true, - "optional": true - }, - "ini": { - "version": "1.3.5", - "bundled": true, - "dev": true, - "optional": true - }, - "is-fullwidth-code-point": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "number-is-nan": "^1.0.0" - } - }, - "isarray": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "optional": true - }, - "minimatch": { - "version": "3.0.4", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "brace-expansion": "^1.1.7" - } - }, - "minimist": { - "version": "1.2.5", - "bundled": true, - "dev": true, - "optional": true - }, - "minipass": { - "version": "2.9.0", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "safe-buffer": "^5.1.2", - "yallist": "^3.0.0" - } - }, - "minizlib": { - "version": "1.3.3", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "minipass": "^2.9.0" - } - }, - "mkdirp": { - "version": "0.5.3", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "minimist": "^1.2.5" - } - }, - "ms": { - "version": "2.1.2", - "bundled": true, - "dev": true, - "optional": true - }, - "needle": { - "version": "2.3.3", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "debug": "^3.2.6", - "iconv-lite": "^0.4.4", - "sax": "^1.2.4" - } - }, - "node-pre-gyp": { - "version": "0.14.0", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "detect-libc": "^1.0.2", - "mkdirp": "^0.5.1", - "needle": "^2.2.1", - "nopt": "^4.0.1", - "npm-packlist": "^1.1.6", - "npmlog": "^4.0.2", - "rc": "^1.2.7", - "rimraf": "^2.6.1", - "semver": "^5.3.0", - "tar": "^4.4.2" - } - }, - "nopt": { - "version": "4.0.3", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "abbrev": "1", - "osenv": "^0.1.4" - } - }, - "npm-bundled": { - "version": "1.1.1", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "npm-normalize-package-bin": "^1.0.1" - } - }, - "npm-normalize-package-bin": { - "version": "1.0.1", - "bundled": true, - "dev": true, - "optional": true - }, - "npm-packlist": { - "version": "1.4.8", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "ignore-walk": "^3.0.1", - "npm-bundled": "^1.0.1", - "npm-normalize-package-bin": "^1.0.1" - } - }, - "npmlog": { - "version": "4.1.2", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "are-we-there-yet": "~1.1.2", - "console-control-strings": "~1.1.0", - "gauge": "~2.7.3", - "set-blocking": "~2.0.0" - } - }, - "number-is-nan": { - "version": "1.0.1", - "bundled": true, - "dev": true, - "optional": true - }, - "object-assign": { - "version": "4.1.1", - "bundled": true, - "dev": true, - "optional": true - }, - "once": { - "version": "1.4.0", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "wrappy": "1" - } - }, - "os-homedir": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "optional": true - }, - "os-tmpdir": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "optional": true + "depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" }, - "osenv": { - "version": "0.1.5", - "bundled": true, - "dev": true, - "optional": true, + "safe-buffer": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz", + "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==" + } + } + }, + "express-winston": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/express-winston/-/express-winston-3.4.0.tgz", + "integrity": "sha512-CKo4ESwIV4BpNIsGVNiq2GcAwuomL4dVJRIIH/2K/jMpoRI2DakhkVTtaJACzV7n2I1v+knDJkkjZRCymJ7nmA==", + "requires": { + "chalk": "^2.4.1", + "lodash": "^4.17.10" + } + }, + "fast-safe-stringify": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.0.6.tgz", + "integrity": "sha512-q8BZ89jjc+mz08rSxROs8VsrBBcn1SIw1kq9NjolL509tkABRk9io01RAjSaEv1Xb2uFLt8VtRiZbGp5H8iDtg==" + }, + "fecha": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fecha/-/fecha-2.3.3.tgz", + "integrity": "sha512-lUGBnIamTAwk4znq5BcqsDaxSmZ9nDVJaij6NvRt/Tg4R69gERA+otPKbS86ROw9nxVMw2/mp1fnaiWqbs6Sdg==" + }, + "fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "finalhandler": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", + "requires": { + "debug": "2.6.9", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" + }, + "dependencies": { + "on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", "requires": { - "os-homedir": "^1.0.0", - "os-tmpdir": "^1.0.0" + "ee-first": "1.1.1" } }, - "path-is-absolute": { - "version": "1.0.1", - "bundled": true, - "dev": true, - "optional": true - }, - "process-nextick-args": { + "statuses": { "version": "2.0.1", - "bundled": true, - "dev": true, - "optional": true - }, - "rc": { - "version": "1.2.8", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "deep-extend": "^0.6.0", - "ini": "~1.3.0", - "minimist": "^1.2.0", - "strip-json-comments": "~2.0.1" - } - }, + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==" + } + } + }, + "forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==" + }, + "fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" + }, + "from2": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/from2/-/from2-2.3.0.tgz", + "integrity": "sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8=", + "requires": { + "inherits": "^2.0.1", + "readable-stream": "^2.0.0" + }, + "dependencies": { "readable-stream": { "version": "2.3.7", - "bundled": true, - "dev": true, - "optional": true, + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", "requires": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", @@ -7717,143 +4393,52 @@ "util-deprecate": "~1.0.1" } }, - "rimraf": { - "version": "2.7.1", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "glob": "^7.1.3" - } - }, - "safe-buffer": { - "version": "5.1.2", - "bundled": true, - "dev": true, - "optional": true - }, - "safer-buffer": { - "version": "2.1.2", - "bundled": true, - "dev": true, - "optional": true - }, - "sax": { - "version": "1.2.4", - "bundled": true, - "dev": true, - "optional": true - }, - "semver": { - "version": "5.7.1", - "bundled": true, - "dev": true, - "optional": true - }, - "set-blocking": { - "version": "2.0.0", - "bundled": true, - "dev": true, - "optional": true - }, - "signal-exit": { - "version": "3.0.2", - "bundled": true, - "dev": true, - "optional": true - }, "string_decoder": { "version": "1.1.1", - "bundled": true, - "dev": true, - "optional": true, + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "requires": { "safe-buffer": "~5.1.0" } - }, - "string-width": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" - } - }, - "strip-ansi": { - "version": "3.0.1", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "strip-json-comments": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "optional": true - }, - "tar": { - "version": "4.4.13", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "chownr": "^1.1.1", - "fs-minipass": "^1.2.5", - "minipass": "^2.8.6", - "minizlib": "^1.2.1", - "mkdirp": "^0.5.0", - "safe-buffer": "^5.1.2", - "yallist": "^3.0.3" - } - }, - "util-deprecate": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "optional": true - }, - "wide-align": { - "version": "1.1.3", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "string-width": "^1.0.2 || 2" - } - }, - "wrappy": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "optional": true - }, - "yallist": { - "version": "3.1.1", - "bundled": true, - "dev": true, - "optional": true } } }, + "fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "optional": true + }, "function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==" }, "get-intrinsic": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", - "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "requires": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + } + }, + "get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", "requires": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.3" + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" } }, "get-stream": { @@ -7861,89 +4446,42 @@ "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=" }, - "get-value": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz", - "integrity": "sha1-3BXKHGcjh8p2vTesCjlbogQqLCg=", - "dev": true - }, "glob-parent": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", - "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", - "dev": true, - "requires": { - "is-glob": "^3.1.0", - "path-dirname": "^1.0.0" - }, - "dependencies": { - "is-glob": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", - "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", - "dev": true, - "requires": { - "is-extglob": "^2.1.0" - } - } - } - }, - "global-dirs": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-0.1.1.tgz", - "integrity": "sha1-sxnA3UYH81PzvpzKTHL8FIxJ9EU=", - "dev": true, - "requires": { - "ini": "^1.3.4" - } - }, - "got": { - "version": "6.7.1", - "resolved": "https://registry.npmjs.org/got/-/got-6.7.1.tgz", - "integrity": "sha1-JAzQV4WpoY5WHcG0S0HHY+8ejbA=", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "dev": true, "requires": { - "create-error-class": "^3.0.0", - "duplexer3": "^0.1.4", - "get-stream": "^3.0.0", - "is-redirect": "^1.0.0", - "is-retry-allowed": "^1.0.0", - "is-stream": "^1.0.0", - "lowercase-keys": "^1.0.0", - "safe-buffer": "^5.0.1", - "timed-out": "^4.0.0", - "unzip-response": "^2.0.1", - "url-parse-lax": "^1.0.0" + "is-glob": "^4.0.1" } }, - "graceful-fs": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", - "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==", - "dev": true - }, - "has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "requires": { - "function-bind": "^1.1.1" - } + "gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==" }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" }, + "has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "requires": { + "es-define-property": "^1.0.0" + } + }, "has-symbol-support-x": { "version": "1.4.2", "resolved": "https://registry.npmjs.org/has-symbol-support-x/-/has-symbol-support-x-1.4.2.tgz", "integrity": "sha512-3ToOva++HaW+eCpgqZrCfN51IPB+7bJNVT6CUATzueB5Heb8o6Nam0V3HG5dlDvZU1Gn5QLcbahiKw/XVk5JJw==" }, "has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==" }, "has-to-string-tag-x": { "version": "1.4.1", @@ -7953,36 +4491,20 @@ "has-symbol-support-x": "^1.4.1" } }, - "has-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-value/-/has-value-1.0.0.tgz", - "integrity": "sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc=", - "dev": true, + "has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", "requires": { - "get-value": "^2.0.6", - "has-values": "^1.0.0", - "isobject": "^3.0.0" + "has-symbols": "^1.0.3" } }, - "has-values": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-values/-/has-values-1.0.0.tgz", - "integrity": "sha1-lbC2P+whRmGab+V/51Yo1aOe/k8=", - "dev": true, + "hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", "requires": { - "is-number": "^3.0.0", - "kind-of": "^4.0.0" - }, - "dependencies": { - "kind-of": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-4.0.0.tgz", - "integrity": "sha1-IIE989cSkosgc3hpGkUGb65y3Vc=", - "dev": true, - "requires": { - "is-buffer": "^1.1.5" - } - } + "function-bind": "^1.1.2" } }, "hoek": { @@ -8030,20 +4552,27 @@ } }, "hydra-login-consent-logout": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/hydra-login-consent-logout/-/hydra-login-consent-logout-1.4.3.tgz", - "integrity": "sha512-q2Y1SBpNnKf869TvloCr0CYL3tIljORDiMsBg/3xG6W+wQXGi7VQ4WJWdQR2hTtkSvYU2dXhLvxsQuFb0+mwlw==", - "requires": { - "body-parser": "^1.19.0", - "cookie-parser": "^1.4.5", + "version": "2.4.0-pre.3", + "resolved": "https://registry.npmjs.org/hydra-login-consent-logout/-/hydra-login-consent-logout-2.4.0-pre.3.tgz", + "integrity": "sha512-Dtoop55BOQ/z+DQunVblxcFC5IMLw9NOw4vqTm2WmvMn7TjWwCfq+gA03ifGPAiFuqbyprxUnUaILCSJn9Hqdg==", + "requires": { + "@ory/hydra-client-fetch": "^2.4.0-alpha.1", + "@types/cookie-parser": "^1.4.2", + "@types/csurf": "^1.9.36", + "@types/express": "^4.17.7", + "@types/morgan": "^1.9.1", + "@types/url-join": "^4.0.0", + "body-parser": "^1.20.3", + "cookie-parser": "^1.4.7", "csurf": "^1.11.0", "debug": "^4.1.1", - "express": "^4.17.1", + "express": "^4.21.2", "morgan": "^1.10.0", - "node-fetch": "^2.6.0", - "pug": "^2.0.4", + "node-fetch": "^2.6.7", + "pug": "^3.0.3", "querystring": "^0.2.0", "serve-favicon": "^2.5.0", + "typescript": "^5.7.3", "url-join": "^4.0.1" }, "dependencies": { @@ -8083,18 +4612,6 @@ "integrity": "sha1-SMptcvbGo68Aqa1K5odr44ieKwk=", "dev": true }, - "import-lazy": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/import-lazy/-/import-lazy-2.1.0.tgz", - "integrity": "sha1-BWmOPUXIjo1+nZLLBYTnfwlvPkM=", - "dev": true - }, - "imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", - "dev": true - }, "indent-string": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-3.2.0.tgz", @@ -8105,12 +4622,6 @@ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, - "ini": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", - "integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==", - "dev": true - }, "into-stream": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/into-stream/-/into-stream-3.1.0.tgz", @@ -8125,176 +4636,56 @@ "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==" }, - "is-accessor-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", - "integrity": "sha1-qeEss66Nh2cn7u84Q/igiXtcmNY=", - "dev": true, - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, "is-arrayish": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" }, - "is-binary-path": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz", - "integrity": "sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg=", - "dev": true, - "requires": { - "binary-extensions": "^1.0.0" - } - }, - "is-buffer": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", - "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" - }, - "is-ci": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-1.2.1.tgz", - "integrity": "sha512-s6tfsaQaQi3JNciBH6shVqEDvhGut0SUXr31ag8Pd8BBbVVlcGfWhpPmEOoM6RJ5TFhbypvf5yyRw/VXW1IiWg==", - "dev": true, - "requires": { - "ci-info": "^1.5.0" - } - }, - "is-data-descriptor": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", - "integrity": "sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y=", - "dev": true, - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "is-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.6.tgz", - "integrity": "sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==", + "is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", "dev": true, "requires": { - "is-accessor-descriptor": "^0.1.6", - "is-data-descriptor": "^0.1.4", - "kind-of": "^5.0.0" - }, - "dependencies": { - "kind-of": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-5.1.0.tgz", - "integrity": "sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==", - "dev": true - } + "binary-extensions": "^2.0.0" } }, - "is-expression": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-expression/-/is-expression-3.0.0.tgz", - "integrity": "sha1-Oayqa+f9HzRx3ELHQW5hwkMXrJ8=", + "is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", "requires": { - "acorn": "~4.0.2", - "object-assign": "^4.0.1" - }, - "dependencies": { - "acorn": { - "version": "4.0.13", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-4.0.13.tgz", - "integrity": "sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c=" - } + "hasown": "^2.0.2" } }, - "is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=", - "dev": true + "is-expression": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-expression/-/is-expression-4.0.0.tgz", + "integrity": "sha512-zMIXX63sxzG3XrkHkrAPvm/OVZVSCPNkwMHU8oTX7/U3AL78I0QXCEICXUM13BIa8TYGZ68PiTKfQz3yaTNr4A==", + "requires": { + "acorn": "^7.1.1", + "object-assign": "^4.1.1" + } }, "is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", - "dev": true - }, - "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", "dev": true }, "is-glob": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", - "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "dev": true, "requires": { "is-extglob": "^2.1.1" } }, - "is-installed-globally": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.1.0.tgz", - "integrity": "sha1-Df2Y9akRFxbdU13aZJL2e/PSWoA=", - "dev": true, - "requires": { - "global-dirs": "^0.1.0", - "is-path-inside": "^1.0.0" - } - }, - "is-npm": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-npm/-/is-npm-1.0.0.tgz", - "integrity": "sha1-8vtjpl5JBbQGyGBydloaTceTufQ=", - "dev": true - }, "is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=", - "dev": true, - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "is-obj": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz", - "integrity": "sha1-PkcprB9f3gJc19g6iW2rn09n2w8=", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", "dev": true }, "is-object": { @@ -8302,46 +4693,25 @@ "resolved": "https://registry.npmjs.org/is-object/-/is-object-1.0.1.tgz", "integrity": "sha1-iVJojF7C/9awPsyF52ngKQMINHA=" }, - "is-path-inside": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-1.0.1.tgz", - "integrity": "sha1-jvW33lBDej/cprToZe96pVy0gDY=", - "dev": true, - "requires": { - "path-is-inside": "^1.0.1" - } - }, "is-plain-obj": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", "integrity": "sha1-caUMhCnfync8kqOQpKA7OfzVHT4=" }, - "is-plain-object": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", - "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", - "dev": true, - "requires": { - "isobject": "^3.0.1" - } - }, "is-promise": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz", - "integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=" - }, - "is-redirect": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-redirect/-/is-redirect-1.0.0.tgz", - "integrity": "sha1-HQPd7VO9jbDzDCbk+V02/HyH3CQ=", - "dev": true + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.2.2.tgz", + "integrity": "sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ==" }, "is-regex": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", - "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", + "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", "requires": { - "has": "^1.0.3" + "call-bound": "^1.0.2", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" } }, "is-retry-allowed": { @@ -8354,12 +4724,6 @@ "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=" }, - "is-windows": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", - "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==", - "dev": true - }, "isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", @@ -8371,12 +4735,6 @@ "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", "dev": true }, - "isobject": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", - "dev": true - }, "isurl": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isurl/-/isurl-1.0.0.tgz", @@ -8387,9 +4745,9 @@ } }, "jose": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/jose/-/jose-2.0.5.tgz", - "integrity": "sha512-BAiDNeDKTMgk4tvD0BbxJ8xHEHBZgpeRZ1zGPPsitSyMgjoMWiLGYAE7H7NpP5h0lPppQajQs871E8NHUrzVPA==", + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/jose/-/jose-2.0.7.tgz", + "integrity": "sha512-5hFWIigKqC+e/lRyQhfnirrAqUdIPMB7SJRqflJaO29dW7q5DFvH1XCSTmv6PQ6pb++0k6MJlLRoS0Wv4s38Wg==", "requires": { "@panva/asn1.js": "^1.0.0" } @@ -8397,7 +4755,7 @@ "js-stringify": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/js-stringify/-/js-stringify-1.0.2.tgz", - "integrity": "sha1-Fzb939lyTyijaCrcYjCufk6Weds=" + "integrity": "sha512-rtS5ATOo2Q5k1G+DADISilDA6lv79zIiwFd6CcjuIxGKLFm5C+RLImRscVap9k55i+MOZwgliw+NejvkLuGD5g==" }, "json-buffer": { "version": "3.0.0", @@ -8431,18 +4789,18 @@ "jstransformer": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/jstransformer/-/jstransformer-1.0.0.tgz", - "integrity": "sha1-7Yvwkh4vPx7U1cGkT2hwntJHIsM=", + "integrity": "sha512-C9YK3Rf8q6VAPDCCU9fnqo3mAfOH6vUGnMcP4AQAYIEpWtfGLpwOTmZ+igtdK5y+VvI2n3CyYSzy4Qh34eq24A==", "requires": { "is-promise": "^2.0.0", "promise": "^7.0.1" } }, "jwa": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", - "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.2.tgz", + "integrity": "sha512-eeH5JO+21J78qMvTIDdBXidBd6nG2kZjg5Ohz/1fpa28Z4CcsWUzJ1ZZyFq/3z3N17aZy+ZuBoHljASbL1WfOw==", "requires": { - "buffer-equal-constant-time": "1.0.1", + "buffer-equal-constant-time": "^1.0.1", "ecdsa-sig-formatter": "1.0.11", "safe-buffer": "^5.0.1" } @@ -8476,11 +4834,11 @@ } }, "jws": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", - "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.3.tgz", + "integrity": "sha512-byiJ0FLRdLdSVSReO/U4E7RoEyOCKnEnEPMjq3HxWtvzLsV08/i5RQKsFVNkCldrCaPr2vDNAOMsfs8T/Hze7g==", "requires": { - "jwa": "^1.4.1", + "jwa": "^1.4.2", "safe-buffer": "^5.0.1" } }, @@ -8492,12 +4850,6 @@ "json-buffer": "3.0.0" } }, - "kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "dev": true - }, "kuler": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/kuler/-/kuler-1.0.1.tgz", @@ -8506,20 +4858,6 @@ "colornames": "^1.1.1" } }, - "latest-version": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/latest-version/-/latest-version-3.1.0.tgz", - "integrity": "sha1-ogU4P+oyKzO1rjsYq+4NwvNW7hU=", - "dev": true, - "requires": { - "package-json": "^4.0.0" - } - }, - "lazy-cache": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-1.0.4.tgz", - "integrity": "sha1-odePw6UEdMuAhF07O24dpJpEbo4=" - }, "limiter": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/limiter/-/limiter-1.1.5.tgz", @@ -8594,11 +4932,6 @@ "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==" }, - "longest": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/longest/-/longest-1.0.1.tgz", - "integrity": "sha1-MKCy2jj3N3DoKUoNIuZiXtd9AJc=" - }, "lowercase-keys": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", @@ -8622,29 +4955,10 @@ "lru-cache": "~4.0.0" } }, - "make-dir": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz", - "integrity": "sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ==", - "dev": true, - "requires": { - "pify": "^3.0.0" - } - }, - "map-cache": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz", - "integrity": "sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8=", - "dev": true - }, - "map-visit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/map-visit/-/map-visit-1.0.0.tgz", - "integrity": "sha1-7Nyo8TFE5mDxtb1B8S80edmN+48=", - "dev": true, - "requires": { - "object-visit": "^1.0.0" - } + "math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==" }, "media-typer": { "version": "0.3.0", @@ -8652,36 +4966,15 @@ "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" }, "merge-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==" }, "methods": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" }, - "micromatch": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", - "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", - "dev": true, - "requires": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" - } - }, "mime": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", @@ -8714,33 +5007,6 @@ "brace-expansion": "^1.1.7" } }, - "minimist": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", - "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", - "dev": true - }, - "mixin-deep": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz", - "integrity": "sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==", - "dev": true, - "requires": { - "for-in": "^1.0.2", - "is-extendable": "^1.0.1" - }, - "dependencies": { - "is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", - "dev": true, - "requires": { - "is-plain-object": "^2.0.4" - } - } - } - }, "morgan": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz", @@ -8765,32 +5031,6 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" }, - "nan": { - "version": "2.14.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", - "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==", - "dev": true, - "optional": true - }, - "nanomatch": { - "version": "1.2.13", - "resolved": "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz", - "integrity": "sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==", - "dev": true, - "requires": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "fragment-cache": "^0.2.1", - "is-windows": "^1.0.2", - "kind-of": "^6.0.2", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - } - }, "negotiator": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", @@ -8803,9 +5043,12 @@ "dev": true }, "node-fetch": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", - "integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==" + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.9.tgz", + "integrity": "sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg==", + "requires": { + "whatwg-url": "^5.0.0" + } }, "node-forge": { "version": "0.8.5", @@ -8847,42 +5090,36 @@ "integrity": "sha1-sEDrCSOWivq/jTL7HxfxFn/auQc=" }, "nodemon": { - "version": "1.19.4", - "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-1.19.4.tgz", - "integrity": "sha512-VGPaqQBNk193lrJFotBU8nvWZPqEZY2eIzymy2jjY0fJ9qIsxA0sxQ8ATPl0gZC645gijYEc1jtZvpS8QWzJGQ==", + "version": "2.0.22", + "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-2.0.22.tgz", + "integrity": "sha512-B8YqaKMmyuCO7BowF1Z1/mkPqLk6cs/l63Ojtd6otKjMx47Dq1utxfRxcavH1I7VSaL8n5BUaoutadnsX3AAVQ==", "dev": true, "requires": { - "chokidar": "^2.1.8", - "debug": "^3.2.6", + "chokidar": "^3.5.2", + "debug": "^3.2.7", "ignore-by-default": "^1.0.1", - "minimatch": "^3.0.4", - "pstree.remy": "^1.1.7", + "minimatch": "^3.1.2", + "pstree.remy": "^1.1.8", "semver": "^5.7.1", + "simple-update-notifier": "^1.0.7", "supports-color": "^5.5.0", "touch": "^3.1.0", - "undefsafe": "^2.0.2", - "update-notifier": "^2.5.0" + "undefsafe": "^2.0.5" }, "dependencies": { "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", "dev": true, "requires": { "ms": "^2.1.1" } }, "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "dev": true } } @@ -8919,78 +5156,20 @@ } } }, - "npm-run-path": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", - "integrity": "sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=", - "dev": true, - "requires": { - "path-key": "^2.0.0" - } - }, "object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" }, - "object-copy": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/object-copy/-/object-copy-0.1.0.tgz", - "integrity": "sha1-fn2Fi3gb18mRpBupde04EnVOmYw=", - "dev": true, - "requires": { - "copy-descriptor": "^0.1.0", - "define-property": "^0.2.5", - "kind-of": "^3.0.3" - }, - "dependencies": { - "define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dev": true, - "requires": { - "is-descriptor": "^0.1.0" - } - }, - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, "object-hash": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-1.3.1.tgz", "integrity": "sha512-OSuu/pU4ENM9kmREg0BdNrUDIl1heYa4mBZacJc+vVWz4GtAwu7jO8s4AIt2aGRUTqxykpWzI3Oqnsm13tTMDA==" }, "object-inspect": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", - "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==" - }, - "object-visit": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/object-visit/-/object-visit-1.0.1.tgz", - "integrity": "sha1-95xEk68MU3e1n+OdOV5BBC3QRbs=", - "dev": true, - "requires": { - "isobject": "^3.0.0" - } - }, - "object.pick": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz", - "integrity": "sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c=", - "dev": true, - "requires": { - "isobject": "^3.0.1" - } + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz", + "integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==" }, "oidc-token-hash": { "version": "3.0.2", @@ -9121,18 +5300,6 @@ "p-finally": "^1.0.0" } }, - "package-json": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/package-json/-/package-json-4.0.1.tgz", - "integrity": "sha1-iGmgQBJTZhxMTKPabCEh7VVfXu0=", - "dev": true, - "requires": { - "got": "^6.7.1", - "registry-auth-token": "^3.0.1", - "registry-url": "^3.0.3", - "semver": "^5.1.0" - } - }, "pako": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", @@ -9143,30 +5310,6 @@ "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==" }, - "pascalcase": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/pascalcase/-/pascalcase-0.1.1.tgz", - "integrity": "sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ=", - "dev": true - }, - "path-dirname": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/path-dirname/-/path-dirname-1.0.2.tgz", - "integrity": "sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA=", - "dev": true - }, - "path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", - "dev": true - }, - "path-is-inside": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", - "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=", - "dev": true - }, "path-key": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", @@ -9174,32 +5317,26 @@ "dev": true }, "path-parse": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", - "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" }, "path-to-regexp": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", - "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", + "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==" + }, + "picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true }, "pify": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" }, - "posix-character-classes": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz", - "integrity": "sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=", - "dev": true - }, - "prepend-http": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-1.0.4.tgz", - "integrity": "sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw=", - "dev": true - }, "process": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", @@ -9233,131 +5370,129 @@ "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=" }, "pstree.remy": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.7.tgz", - "integrity": "sha512-xsMgrUwRpuGskEzBFkH8NmTimbZ5PcPup0LA8JJkHIm2IMUbQcpo3yeLNWVrufEYjh8YwtSVh0xz6UeWc5Oh5A==", + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz", + "integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==", "dev": true }, "pug": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/pug/-/pug-2.0.4.tgz", - "integrity": "sha512-XhoaDlvi6NIzL49nu094R2NA6P37ijtgMDuWE+ofekDChvfKnzFal60bhSdiy8y2PBO6fmz3oMEIcfpBVRUdvw==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pug/-/pug-3.0.3.tgz", + "integrity": "sha512-uBi6kmc9f3SZ3PXxqcHiUZLmIXgfgWooKWXcwSGwQd2Zi5Rb0bT14+8CJjJgI8AB+nndLaNgHGrcc6bPIB665g==", "requires": { - "pug-code-gen": "^2.0.2", - "pug-filters": "^3.1.1", - "pug-lexer": "^4.1.0", - "pug-linker": "^3.0.6", - "pug-load": "^2.0.12", - "pug-parser": "^5.0.1", - "pug-runtime": "^2.0.5", - "pug-strip-comments": "^1.0.4" + "pug-code-gen": "^3.0.3", + "pug-filters": "^4.0.0", + "pug-lexer": "^5.0.1", + "pug-linker": "^4.0.0", + "pug-load": "^3.0.0", + "pug-parser": "^6.0.0", + "pug-runtime": "^3.0.1", + "pug-strip-comments": "^2.0.0" } }, "pug-attrs": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/pug-attrs/-/pug-attrs-2.0.4.tgz", - "integrity": "sha512-TaZ4Z2TWUPDJcV3wjU3RtUXMrd3kM4Wzjbe3EWnSsZPsJ3LDI0F3yCnf2/W7PPFF+edUFQ0HgDL1IoxSz5K8EQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pug-attrs/-/pug-attrs-3.0.0.tgz", + "integrity": "sha512-azINV9dUtzPMFQktvTXciNAfAuVh/L/JCl0vtPCwvOA21uZrC08K/UnmrL+SXGEVc1FwzjW62+xw5S/uaLj6cA==", "requires": { - "constantinople": "^3.0.1", - "js-stringify": "^1.0.1", - "pug-runtime": "^2.0.5" + "constantinople": "^4.0.1", + "js-stringify": "^1.0.2", + "pug-runtime": "^3.0.0" } }, "pug-code-gen": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/pug-code-gen/-/pug-code-gen-2.0.3.tgz", - "integrity": "sha512-r9sezXdDuZJfW9J91TN/2LFbiqDhmltTFmGpHTsGdrNGp3p4SxAjjXEfnuK2e4ywYsRIVP0NeLbSAMHUcaX1EA==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pug-code-gen/-/pug-code-gen-3.0.3.tgz", + "integrity": "sha512-cYQg0JW0w32Ux+XTeZnBEeuWrAY7/HNE6TWnhiHGnnRYlCgyAUPoyh9KzCMa9WhcJlJ1AtQqpEYHc+vbCzA+Aw==", "requires": { - "constantinople": "^3.1.2", + "constantinople": "^4.0.1", "doctypes": "^1.1.0", - "js-stringify": "^1.0.1", - "pug-attrs": "^2.0.4", - "pug-error": "^1.3.3", - "pug-runtime": "^2.0.5", - "void-elements": "^2.0.1", - "with": "^5.0.0" + "js-stringify": "^1.0.2", + "pug-attrs": "^3.0.0", + "pug-error": "^2.1.0", + "pug-runtime": "^3.0.1", + "void-elements": "^3.1.0", + "with": "^7.0.0" } }, "pug-error": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/pug-error/-/pug-error-1.3.3.tgz", - "integrity": "sha512-qE3YhESP2mRAWMFJgKdtT5D7ckThRScXRwkfo+Erqga7dyJdY3ZquspprMCj/9sJ2ijm5hXFWQE/A3l4poMWiQ==" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/pug-error/-/pug-error-2.1.0.tgz", + "integrity": "sha512-lv7sU9e5Jk8IeUheHata6/UThZ7RK2jnaaNztxfPYUY+VxZyk/ePVaNZ/vwmH8WqGvDz3LrNYt/+gA55NDg6Pg==" }, "pug-filters": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/pug-filters/-/pug-filters-3.1.1.tgz", - "integrity": "sha512-lFfjNyGEyVWC4BwX0WyvkoWLapI5xHSM3xZJFUhx4JM4XyyRdO8Aucc6pCygnqV2uSgJFaJWW3Ft1wCWSoQkQg==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/pug-filters/-/pug-filters-4.0.0.tgz", + "integrity": "sha512-yeNFtq5Yxmfz0f9z2rMXGw/8/4i1cCFecw/Q7+D0V2DdtII5UvqE12VaZ2AY7ri6o5RNXiweGH79OCq+2RQU4A==", "requires": { - "clean-css": "^4.1.11", - "constantinople": "^3.0.1", + "constantinople": "^4.0.1", "jstransformer": "1.0.0", - "pug-error": "^1.3.3", - "pug-walk": "^1.1.8", - "resolve": "^1.1.6", - "uglify-js": "^2.6.1" + "pug-error": "^2.0.0", + "pug-walk": "^2.0.0", + "resolve": "^1.15.1" } }, "pug-lexer": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/pug-lexer/-/pug-lexer-4.1.0.tgz", - "integrity": "sha512-i55yzEBtjm0mlplW4LoANq7k3S8gDdfC6+LThGEvsK4FuobcKfDAwt6V4jKPH9RtiE3a2Akfg5UpafZ1OksaPA==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/pug-lexer/-/pug-lexer-5.0.1.tgz", + "integrity": "sha512-0I6C62+keXlZPZkOJeVam9aBLVP2EnbeDw3An+k0/QlqdwH6rv8284nko14Na7c0TtqtogfWXcRoFE4O4Ff20w==", "requires": { - "character-parser": "^2.1.1", - "is-expression": "^3.0.0", - "pug-error": "^1.3.3" + "character-parser": "^2.2.0", + "is-expression": "^4.0.0", + "pug-error": "^2.0.0" } }, "pug-linker": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/pug-linker/-/pug-linker-3.0.6.tgz", - "integrity": "sha512-bagfuHttfQOpANGy1Y6NJ+0mNb7dD2MswFG2ZKj22s8g0wVsojpRlqveEQHmgXXcfROB2RT6oqbPYr9EN2ZWzg==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/pug-linker/-/pug-linker-4.0.0.tgz", + "integrity": "sha512-gjD1yzp0yxbQqnzBAdlhbgoJL5qIFJw78juN1NpTLt/mfPJ5VgC4BvkoD3G23qKzJtIIXBbcCt6FioLSFLOHdw==", "requires": { - "pug-error": "^1.3.3", - "pug-walk": "^1.1.8" + "pug-error": "^2.0.0", + "pug-walk": "^2.0.0" } }, "pug-load": { - "version": "2.0.12", - "resolved": "https://registry.npmjs.org/pug-load/-/pug-load-2.0.12.tgz", - "integrity": "sha512-UqpgGpyyXRYgJs/X60sE6SIf8UBsmcHYKNaOccyVLEuT6OPBIMo6xMPhoJnqtB3Q3BbO4Z3Bjz5qDsUWh4rXsg==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pug-load/-/pug-load-3.0.0.tgz", + "integrity": "sha512-OCjTEnhLWZBvS4zni/WUMjH2YSUosnsmjGBB1An7CsKQarYSWQ0GCVyd4eQPMFJqZ8w9xgs01QdiZXKVjk92EQ==", "requires": { - "object-assign": "^4.1.0", - "pug-walk": "^1.1.8" + "object-assign": "^4.1.1", + "pug-walk": "^2.0.0" } }, "pug-parser": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/pug-parser/-/pug-parser-5.0.1.tgz", - "integrity": "sha512-nGHqK+w07p5/PsPIyzkTQfzlYfuqoiGjaoqHv1LjOv2ZLXmGX1O+4Vcvps+P4LhxZ3drYSljjq4b+Naid126wA==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/pug-parser/-/pug-parser-6.0.0.tgz", + "integrity": "sha512-ukiYM/9cH6Cml+AOl5kETtM9NR3WulyVP2y4HOU45DyMim1IeP/OOiyEWRr6qk5I5klpsBnbuHpwKmTx6WURnw==", "requires": { - "pug-error": "^1.3.3", - "token-stream": "0.0.1" + "pug-error": "^2.0.0", + "token-stream": "1.0.0" } }, "pug-runtime": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/pug-runtime/-/pug-runtime-2.0.5.tgz", - "integrity": "sha512-P+rXKn9un4fQY77wtpcuFyvFaBww7/91f3jHa154qU26qFAnOe6SW1CbIDcxiG5lLK9HazYrMCCuDvNgDQNptw==" + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/pug-runtime/-/pug-runtime-3.0.1.tgz", + "integrity": "sha512-L50zbvrQ35TkpHwv0G6aLSuueDRwc/97XdY8kL3tOT0FmhgG7UypU3VztfV/LATAvmUfYi4wNxSajhSAeNN+Kg==" }, "pug-strip-comments": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/pug-strip-comments/-/pug-strip-comments-1.0.4.tgz", - "integrity": "sha512-i5j/9CS4yFhSxHp5iKPHwigaig/VV9g+FgReLJWWHEHbvKsbqL0oP/K5ubuLco6Wu3Kan5p7u7qk8A4oLLh6vw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pug-strip-comments/-/pug-strip-comments-2.0.0.tgz", + "integrity": "sha512-zo8DsDpH7eTkPHCXFeAk1xZXJbyoTfdPlNR0bK7rpOMuhBYb0f5qUVCO1xlsitYd3w5FQTK7zpNVKb3rZoUrrQ==", "requires": { - "pug-error": "^1.3.3" + "pug-error": "^2.0.0" } }, "pug-walk": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/pug-walk/-/pug-walk-1.1.8.tgz", - "integrity": "sha512-GMu3M5nUL3fju4/egXwZO0XLi6fW/K3T3VTgFQ14GxNi8btlxgT5qZL//JwZFm/2Fa64J/PNS8AZeys3wiMkVA==" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pug-walk/-/pug-walk-2.0.0.tgz", + "integrity": "sha512-yYELe9Q5q9IQhuvqsZNwA5hfPkMJ8u92bQLIMcsMxf/VADjNtEYptU+inlufAFYcWdHlwNfZOEnOOQrZrcyJCQ==" }, "qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", "requires": { - "side-channel": "^1.0.4" + "side-channel": "^1.0.6" } }, "query-string": { @@ -9386,9 +5521,9 @@ "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" }, "raw-body": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", - "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", "requires": { "bytes": "3.1.2", "http-errors": "2.0.0", @@ -9396,135 +5531,40 @@ "unpipe": "1.0.0" } }, - "rc": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", - "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", - "dev": true, - "requires": { - "deep-extend": "^0.6.0", - "ini": "~1.3.0", - "minimist": "^1.2.0", - "strip-json-comments": "~2.0.1" - } - }, "react-zlib-js": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/react-zlib-js/-/react-zlib-js-1.0.4.tgz", - "integrity": "sha512-ynXD9DFxpE7vtGoa3ZwBtPmZrkZYw2plzHGbanUjBOSN4RtuXdektSfABykHtTiWEHMh7WdYj45LHtp228ZF1A==" - }, - "readable-stream": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.3.0.tgz", - "integrity": "sha512-EsI+s3k3XsW+fU8fQACLN59ky34AZ14LoeVZpYwmZvldCFo0r0gnelwF2TcMjLor/BTL5aDJVBMkss0dthToPw==", - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - }, - "readdirp": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz", - "integrity": "sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==", - "dev": true, - "requires": { - "graceful-fs": "^4.1.11", - "micromatch": "^3.1.10", - "readable-stream": "^2.0.2" - }, - "dependencies": { - "readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "dev": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.0" - } - } - } - }, - "regenerator-runtime": { - "version": "0.11.1", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz", - "integrity": "sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg==" - }, - "regex-not": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/regex-not/-/regex-not-1.0.2.tgz", - "integrity": "sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==", - "dev": true, - "requires": { - "extend-shallow": "^3.0.2", - "safe-regex": "^1.1.0" - } + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/react-zlib-js/-/react-zlib-js-1.0.4.tgz", + "integrity": "sha512-ynXD9DFxpE7vtGoa3ZwBtPmZrkZYw2plzHGbanUjBOSN4RtuXdektSfABykHtTiWEHMh7WdYj45LHtp228ZF1A==" }, - "registry-auth-token": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-3.4.0.tgz", - "integrity": "sha512-4LM6Fw8eBQdwMYcES4yTnn2TqIasbXuwDx3um+QRs7S55aMKCBKBxvPXl2RiUjHwuJLTyYfxSpmfSAjQpcuP+A==", - "dev": true, + "readable-stream": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.3.0.tgz", + "integrity": "sha512-EsI+s3k3XsW+fU8fQACLN59ky34AZ14LoeVZpYwmZvldCFo0r0gnelwF2TcMjLor/BTL5aDJVBMkss0dthToPw==", "requires": { - "rc": "^1.1.6", - "safe-buffer": "^5.0.1" + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" } }, - "registry-url": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/registry-url/-/registry-url-3.1.0.tgz", - "integrity": "sha1-PU74cPc93h138M+aOBQyRE4XSUI=", + "readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", "dev": true, "requires": { - "rc": "^1.0.1" + "picomatch": "^2.2.1" } }, - "remove-trailing-separator": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz", - "integrity": "sha1-wkvOKig62tW8P1jg1IJJuSN52O8=", - "dev": true - }, - "repeat-element": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.3.tgz", - "integrity": "sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g==", - "dev": true - }, - "repeat-string": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", - "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=" - }, "resolve": { - "version": "1.16.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.16.0.tgz", - "integrity": "sha512-LarL/PIKJvc09k1jaeT4kQb/8/7P+qV4qSnN2K80AES+OHdfZELAKVOBjxsvtToT/uLOfFbvYvKfZmV8cee7nA==", + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", "requires": { - "path-parse": "^1.0.6" + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" } }, - "resolve-url": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz", - "integrity": "sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=", - "dev": true - }, "responselike": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz", @@ -9533,20 +5573,6 @@ "lowercase-keys": "^1.0.0" } }, - "ret": { - "version": "0.1.15", - "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", - "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==", - "dev": true - }, - "right-align": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/right-align/-/right-align-0.1.3.tgz", - "integrity": "sha1-YTObci/mo1FWiSENJOFMlhSGE+8=", - "requires": { - "align-text": "^0.1.1" - } - }, "rndm": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/rndm/-/rndm-1.2.0.tgz", @@ -9557,38 +5583,20 @@ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, - "safe-regex": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", - "integrity": "sha1-QKNmnzsHfR6UPURinhV91IAjvy4=", - "dev": true, - "requires": { - "ret": "~0.1.10" - } - }, "safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "semver": { - "version": "5.7.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz", - "integrity": "sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA==" - }, - "semver-diff": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-2.1.0.tgz", - "integrity": "sha1-S7uEN8jTfksM8aaP1ybsbWRdbTY=", - "dev": true, - "requires": { - "semver": "^5.0.3" - } + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==" }, "send": { - "version": "0.18.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", - "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", "requires": { "debug": "2.6.9", "depd": "2.0.0", @@ -9610,6 +5618,11 @@ "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" }, + "encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==" + }, "ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -9655,37 +5668,27 @@ } }, "serve-static": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz", - "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==", + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", "requires": { - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "parseurl": "~1.3.3", - "send": "0.18.0" + "send": "0.19.0" } }, - "set-value": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz", - "integrity": "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==", - "dev": true, + "set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", "requires": { - "extend-shallow": "^2.0.1", - "is-extendable": "^0.1.1", - "is-plain-object": "^2.0.3", - "split-string": "^3.0.1" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - } + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" } }, "setprototypeof": { @@ -9709,21 +5712,16 @@ "dev": true }, "side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", "requires": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" } }, - "signal-exit": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", - "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==", - "dev": true - }, "simple-oauth2": { "version": "2.5.2", "resolved": "https://registry.npmjs.org/simple-oauth2/-/simple-oauth2-2.5.2.tgz", @@ -9758,110 +5756,20 @@ "is-arrayish": "^0.3.1" } }, - "snapdragon": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz", - "integrity": "sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==", - "dev": true, - "requires": { - "base": "^0.11.1", - "debug": "^2.2.0", - "define-property": "^0.2.5", - "extend-shallow": "^2.0.1", - "map-cache": "^0.2.2", - "source-map": "^0.5.6", - "source-map-resolve": "^0.5.0", - "use": "^3.1.0" - }, - "dependencies": { - "define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dev": true, - "requires": { - "is-descriptor": "^0.1.0" - } - }, - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "snapdragon-node": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/snapdragon-node/-/snapdragon-node-2.1.1.tgz", - "integrity": "sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==", - "dev": true, - "requires": { - "define-property": "^1.0.0", - "isobject": "^3.0.0", - "snapdragon-util": "^3.0.1" - }, - "dependencies": { - "define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "dev": true, - "requires": { - "is-descriptor": "^1.0.0" - } - }, - "is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "dev": true, - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "dev": true, - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dev": true, - "requires": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - } - } - } - }, - "snapdragon-util": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/snapdragon-util/-/snapdragon-util-3.0.1.tgz", - "integrity": "sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==", + "simple-update-notifier": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-1.1.0.tgz", + "integrity": "sha512-VpsrsJSUcJEseSbMHkrsrAVSdvVS5I96Qo1QAQ4FxQ9wXFcB+pjj7FB7/us9+GcgfW4ziHtYMc1J0PLczb55mg==", "dev": true, "requires": { - "kind-of": "^3.2.0" + "semver": "~7.0.0" }, "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "requires": { - "is-buffer": "^1.1.5" - } + "semver": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", + "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==", + "dev": true } } }, @@ -9873,65 +5781,11 @@ "is-plain-obj": "^1.0.0" } }, - "source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=" - }, - "source-map-resolve": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.3.tgz", - "integrity": "sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==", - "dev": true, - "requires": { - "atob": "^2.1.2", - "decode-uri-component": "^0.2.0", - "resolve-url": "^0.2.1", - "source-map-url": "^0.4.0", - "urix": "^0.1.0" - } - }, - "source-map-url": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.0.tgz", - "integrity": "sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM=", - "dev": true - }, - "split-string": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/split-string/-/split-string-3.1.0.tgz", - "integrity": "sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==", - "dev": true, - "requires": { - "extend-shallow": "^3.0.0" - } - }, "stack-trace": { "version": "0.0.10", "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=" }, - "static-extend": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/static-extend/-/static-extend-0.1.2.tgz", - "integrity": "sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY=", - "dev": true, - "requires": { - "define-property": "^0.2.5", - "object-copy": "^0.1.0" - }, - "dependencies": { - "define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dev": true, - "requires": { - "is-descriptor": "^0.1.0" - } - } - } - }, "statuses": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", @@ -9950,37 +5804,6 @@ "safe-buffer": "~5.1.0" } }, - "string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", - "dev": true, - "requires": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" - } - }, - "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", - "dev": true, - "requires": { - "ansi-regex": "^3.0.0" - } - }, - "strip-eof": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", - "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=", - "dev": true - }, - "strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", - "dev": true - }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -9989,14 +5812,10 @@ "has-flag": "^3.0.0" } }, - "term-size": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/term-size/-/term-size-1.2.0.tgz", - "integrity": "sha1-RYuDiH8oj8Vtb/+/rSYuJmOO+mk=", - "dev": true, - "requires": { - "execa": "^0.7.0" - } + "supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==" }, "text-hex": { "version": "1.0.0", @@ -10008,51 +5827,13 @@ "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz", "integrity": "sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8=" }, - "to-fast-properties": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-1.0.3.tgz", - "integrity": "sha1-uDVx+k2MJbguIxsG46MFXeTKGkc=" - }, - "to-object-path": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz", - "integrity": "sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68=", - "dev": true, - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "to-regex": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/to-regex/-/to-regex-3.0.2.tgz", - "integrity": "sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==", - "dev": true, - "requires": { - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "regex-not": "^1.0.2", - "safe-regex": "^1.1.0" - } - }, "to-regex-range": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", "dev": true, "requires": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" + "is-number": "^7.0.0" } }, "toidentifier": { @@ -10061,9 +5842,9 @@ "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" }, "token-stream": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/token-stream/-/token-stream-0.0.1.tgz", - "integrity": "sha1-zu78cXp2xDFvEm0LnbqlXX598Bo=" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/token-stream/-/token-stream-1.0.0.tgz", + "integrity": "sha512-VSsyNPPW74RpHwR8Fc21uubwHY7wMDeJLys2IX5zJNih+OnAnaifKHo+1LHT7DAdloQ7apeaaWg8l7qnf/TnEg==" }, "touch": { "version": "3.1.0", @@ -10074,6 +5855,11 @@ "nopt": "~1.0.10" } }, + "tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, "triple-beam": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.3.0.tgz", @@ -10093,21 +5879,10 @@ "mime-types": "~2.1.24" } }, - "uglify-js": { - "version": "2.8.29", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-2.8.29.tgz", - "integrity": "sha1-KcVzMUgFe7Th913zW3qcty5qWd0=", - "requires": { - "source-map": "~0.5.1", - "uglify-to-browserify": "~1.0.0", - "yargs": "~3.10.0" - } - }, - "uglify-to-browserify": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz", - "integrity": "sha1-bgkk1r2mta/jSeOabWMoUKD4grc=", - "optional": true + "typescript": { + "version": "5.7.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.3.tgz", + "integrity": "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==" }, "uid-safe": { "version": "2.1.5", @@ -10118,141 +5893,26 @@ } }, "undefsafe": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.3.tgz", - "integrity": "sha512-nrXZwwXrD/T/JXeygJqdCO6NZZ1L66HrxM/Z7mIq2oPanoN0F1nLx3lwJMu6AwJY69hdixaFQOuoYsMjE5/C2A==", - "dev": true, - "requires": { - "debug": "^2.2.0" - } - }, - "union-value": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz", - "integrity": "sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==", - "dev": true, - "requires": { - "arr-union": "^3.1.0", - "get-value": "^2.0.6", - "is-extendable": "^0.1.1", - "set-value": "^2.0.1" - } - }, - "unique-string": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-1.0.0.tgz", - "integrity": "sha1-nhBXzKhRq7kzmPizOuGHuZyuwRo=", - "dev": true, - "requires": { - "crypto-random-string": "^1.0.0" - } + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.5.tgz", + "integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==", + "dev": true }, "unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" }, - "unset-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unset-value/-/unset-value-1.0.0.tgz", - "integrity": "sha1-g3aHP30jNRef+x5vw6jtDfyKtVk=", - "dev": true, - "requires": { - "has-value": "^0.3.1", - "isobject": "^3.0.0" - }, - "dependencies": { - "has-value": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/has-value/-/has-value-0.3.1.tgz", - "integrity": "sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8=", - "dev": true, - "requires": { - "get-value": "^2.0.3", - "has-values": "^0.1.4", - "isobject": "^2.0.0" - }, - "dependencies": { - "isobject": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz", - "integrity": "sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=", - "dev": true, - "requires": { - "isarray": "1.0.0" - } - } - } - }, - "has-values": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/has-values/-/has-values-0.1.4.tgz", - "integrity": "sha1-bWHeldkd/Km5oCCJrThL/49it3E=", - "dev": true - } - } - }, - "unzip-response": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/unzip-response/-/unzip-response-2.0.1.tgz", - "integrity": "sha1-0vD3N9FrBhXnKmk17QQhRXLVb5c=", - "dev": true - }, - "upath": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz", - "integrity": "sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg==", - "dev": true - }, - "update-notifier": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/update-notifier/-/update-notifier-2.5.0.tgz", - "integrity": "sha512-gwMdhgJHGuj/+wHJJs9e6PcCszpxR1b236igrOkUofGhqJuG+amlIKwApH1IW1WWl7ovZxsX49lMBWLxSdm5Dw==", - "dev": true, - "requires": { - "boxen": "^1.2.1", - "chalk": "^2.0.1", - "configstore": "^3.0.0", - "import-lazy": "^2.1.0", - "is-ci": "^1.0.10", - "is-installed-globally": "^0.1.0", - "is-npm": "^1.0.0", - "latest-version": "^3.0.0", - "semver-diff": "^2.0.0", - "xdg-basedir": "^3.0.0" - } - }, - "urix": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/urix/-/urix-0.1.0.tgz", - "integrity": "sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI=", - "dev": true - }, "url-join": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/url-join/-/url-join-4.0.1.tgz", "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==" }, - "url-parse-lax": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-1.0.0.tgz", - "integrity": "sha1-evjzA2Rem9eaJy56FKxovAYJ2nM=", - "dev": true, - "requires": { - "prepend-http": "^1.0.1" - } - }, "url-to-options": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/url-to-options/-/url-to-options-1.0.1.tgz", "integrity": "sha1-FQWgOiiaSMvXpDTvuu7FBV9WM6k=" }, - "use": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/use/-/use-3.1.1.tgz", - "integrity": "sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==", - "dev": true - }, "util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", @@ -10269,9 +5929,23 @@ "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=" }, "void-elements": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/void-elements/-/void-elements-2.0.1.tgz", - "integrity": "sha1-wGavtYK7HLQSjWDqkjkulNXp2+w=" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/void-elements/-/void-elements-3.1.0.tgz", + "integrity": "sha512-Dhxzh5HZuiHQhbvTW9AMetFfBHDMYpo23Uo9btPXgdYP+3T5S+p+jgNy7spra+veYhBP2dCSgxR/i2Y02h5/6w==" + }, + "webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + }, + "whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "requires": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } }, "which": { "version": "1.3.1", @@ -10282,24 +5956,11 @@ "isexe": "^2.0.0" } }, - "widest-line": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-2.0.1.tgz", - "integrity": "sha512-Ba5m9/Fa4Xt9eb2ELXt77JxVDV8w7qQrH0zS/TWSJdLyAwQjWoOzpzj5lwVftDz6n/EOu3tNACS84v509qwnJA==", - "dev": true, - "requires": { - "string-width": "^2.1.1" - } - }, - "window-size": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz", - "integrity": "sha1-VDjNLqk7IC76Ohn+iIeu58lPnJ0=" - }, "winston": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/winston/-/winston-3.2.1.tgz", "integrity": "sha512-zU6vgnS9dAWCEKg/QYigd6cgMVVNwyTzKs81XZtTFuRwJOcDdBg7AU0mXVyNbs7O5RH2zdv+BdNZUlx7mXPuOw==", + "peer": true, "requires": { "async": "^2.6.1", "diagnostics": "^1.1.1", @@ -10346,19 +6007,16 @@ } }, "with": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/with/-/with-5.1.1.tgz", - "integrity": "sha1-+k2qktrzLE6pTtRTyB8EaGtXXf4=", + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/with/-/with-7.0.2.tgz", + "integrity": "sha512-RNGKj82nUPg3g5ygxkQl0R937xLyho1J24ItRCBTr/m1YnZkzJy1hUiHUJrc/VlsDQzsCnInEGSg3bci0Lmd4w==", "requires": { - "acorn": "^3.1.0", - "acorn-globals": "^3.0.0" + "@babel/parser": "^7.9.6", + "@babel/types": "^7.9.6", + "assert-never": "^1.2.1", + "babel-walk": "3.0.0-canary-5" } }, - "wordwrap": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.2.tgz", - "integrity": "sha1-t5Zpu0LstAn4PVg8rVLKF+qhZD8=" - }, "wreck": { "version": "14.2.0", "resolved": "https://registry.npmjs.org/wreck/-/wreck-14.2.0.tgz", @@ -10369,45 +6027,10 @@ "hoek": "6.x.x" } }, - "write-file-atomic": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz", - "integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==", - "dev": true, - "requires": { - "graceful-fs": "^4.1.11", - "imurmurhash": "^0.1.4", - "signal-exit": "^3.0.2" - } - }, - "xdg-basedir": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-3.0.0.tgz", - "integrity": "sha1-SWsswQnsqNus/i3HK2A8F8WHCtQ=", - "dev": true - }, "yallist": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=" - }, - "yargs": { - "version": "3.10.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-3.10.0.tgz", - "integrity": "sha1-9+572FfdfB0tOMDnTvvWgdFDH9E=", - "requires": { - "camelcase": "^1.0.2", - "cliui": "^2.1.0", - "decamelize": "^1.0.0", - "window-size": "0.1.0" - }, - "dependencies": { - "camelcase": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-1.2.1.tgz", - "integrity": "sha1-m7UwTS4LVmmLLHWLCKPqqdqlijk=" - } - } } } } diff --git a/test/e2e/oauth2-client/package.json b/test/e2e/oauth2-client/package.json index af9bf036ba6..0ddd96f7002 100644 --- a/test/e2e/oauth2-client/package.json +++ b/test/e2e/oauth2-client/package.json @@ -4,17 +4,17 @@ "private": true, "main": "./src/index.js", "scripts": { - "consent": "hydra-login-consent-logout", + "consent": "node node_modules/hydra-login-consent-logout/lib/app.js", "start": "node ./src/index.js", "watch": "nodemon ./src/index.js" }, "dependencies": { "body-parser": "^1.20.1", "dotenv": "^7.0.0", - "express": "^4.18.2", + "express": "^4.21.2", "express-session": "^1.17.0", "express-winston": "^3.4.0", - "hydra-login-consent-logout": "1.4.3", + "hydra-login-consent-logout": "2.4.0-pre.3", "jsonwebtoken": "^8.5.1", "jwks-rsa": "^2.1.4", "node-fetch": "^2.6.0", @@ -25,6 +25,6 @@ }, "devDependencies": { "cross-env": "^5.2.1", - "nodemon": "^1.19.4" + "nodemon": "^2.0.22" } } diff --git a/test/e2e/oauth2-client/src/index.js b/test/e2e/oauth2-client/src/index.js index b27512bedeb..d868fbc0d84 100644 --- a/test/e2e/oauth2-client/src/index.js +++ b/test/e2e/oauth2-client/src/index.js @@ -152,6 +152,87 @@ app.get("/oauth2/callback", async (req, res) => { }) }) +app.get("/oauth2/device", async (req, res) => { + const client = { + id: req.query.client_id, + secret: req.query.client_secret, + } + + const state = uuid.v4() + const scope = req.query.scope || "" + + req.session.client = client + req.session.scope = scope.split(" ") + + const params = new URLSearchParams() + params.append("client_id", req.query.client_id) + params.append("scope", scope) + + let headers = new Headers() + headers.set( + "Authorization", + "Basic " + + Buffer.from(req.query.client_id + ":" + req.query.client_secret).toString( + "base64", + ), + ) + + fetch(new URL("/oauth2/device/auth", config.public).toString(), { + method: "POST", + body: params, + headers: headers, + }) + .then(isStatusOk) + .then((res) => res.json()) + .then((body) => { + // Store the device_code to use after authentication to get the tokens + req.session.device_code = body?.device_code + res.redirect(body?.verification_uri_complete) + }) + .catch((err) => { + res.send(JSON.stringify({ error: err.toString() })) + }) +}) + +app.get("/oauth2/device/success", async (req, res) => { + const clientId = req.session?.client?.id + const clientSecret = req.session?.client?.secret + + if (clientId === undefined || clientSecret === undefined) { + res.send( + JSON.stringify({ + result: "error", + error: "no client credentials in session", + }), + ) + return + } + + const params = new URLSearchParams() + params.append("client_id", clientId) + params.append("device_code", req.session?.device_code) + params.append("grant_type", "urn:ietf:params:oauth:grant-type:device_code") + let headers = new Headers() + headers.set( + "Authorization", + "Basic " + Buffer.from(clientId + ":" + clientSecret).toString("base64"), + ) + + fetch(new URL("/oauth2/token", config.public).toString(), { + method: "POST", + body: params, + headers: headers, + }) + .then(isStatusOk) + .then((resp) => resp.json()) + .then((data) => { + res.send({ result: "success", token: data }) + }) + .catch((err) => { + res.send(JSON.stringify({ error: err.toString() })) + }) +}) + app.get("/oauth2/refresh", function (req, res) { oauth2 .create(req.session.credentials) diff --git a/test/main.go b/test/main.go new file mode 100644 index 00000000000..f9aa66ee161 --- /dev/null +++ b/test/main.go @@ -0,0 +1,18 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package main + +import ( + "context" + "fmt" + + "github.com/ory/hydra/v2/flow" + "github.com/ory/pop/v6" +) + +func main() { + var session flow.LoginSession + + fmt.Printf("%+v", pop.NewModel(&session, context.Background()).Columns().Readable().SelectString()) +} diff --git a/test/mock-client/main.go b/test/mock-client/main.go index c09edaca14c..476c1f2d7ae 100644 --- a/test/mock-client/main.go +++ b/test/mock-client/main.go @@ -6,6 +6,7 @@ package main import ( "bytes" "context" + "encoding/base64" "encoding/json" "flag" "fmt" @@ -22,7 +23,6 @@ import ( "golang.org/x/oauth2" - "github.com/ory/hydra/x" "github.com/ory/x/cmdx" "github.com/ory/x/urlx" ) @@ -84,7 +84,7 @@ func main() { }, }).Get(au) cmdx.CheckResponse(err, http.StatusOK, resp) - defer resp.Body.Close() + defer resp.Body.Close() //nolint:errcheck out, err := io.ReadAll(resp.Body) if err != nil { @@ -118,7 +118,7 @@ func main() { // refreshing the same token twice does not work resp, err = refreshTokenRequest(token) cmdx.CheckResponse(err, http.StatusBadRequest, resp) - defer resp.Body.Close() + defer resp.Body.Close() //nolint:errcheck } func refreshTokenRequest(token oauth2token) (*http.Response, error) { @@ -135,7 +135,7 @@ func refreshTokenRequest(token oauth2token) (*http.Response, error) { func refreshToken(token oauth2token) (result oauth2token) { resp, err := refreshTokenRequest(token) cmdx.CheckResponse(err, http.StatusOK, resp) - defer resp.Body.Close() + defer resp.Body.Close() //nolint:errcheck err = json.NewDecoder(resp.Body).Decode(&result) cmdx.Must(err, "Unable to decode refresh token: %s", err) @@ -161,7 +161,7 @@ func checkTokenResponse(token oauth2token) { log.Fatalf("JWT Access Token does not seem to have three parts: %d - %+v - %v", len(parts), token, parts) } - payload, err := x.DecodeSegment(parts[1]) + payload, err := base64.RawURLEncoding.DecodeString(parts[1]) if err != nil { log.Fatalf("Unable to decode id token segment: %s", err) } @@ -181,8 +181,8 @@ func checkTokenResponse(token oauth2token) { } } - intro, resp, err := sdk.OAuth2Api.IntrospectOAuth2Token(context.Background()).Token(token.AccessToken).Execute() - defer resp.Body.Close() + intro, resp, err := sdk.OAuth2API.IntrospectOAuth2Token(context.Background()).Token(token.AccessToken).Execute() + defer resp.Body.Close() //nolint:errcheck if err != nil { log.Fatalf("Unable to introspect OAuth2 token: %s", err) } @@ -209,7 +209,7 @@ func checkTokenResponse(token oauth2token) { log.Fatalf("ID Token does not seem to have three parts: %d - %+v - %v", len(parts), token, parts) } - payload, err := x.DecodeSegment(parts[1]) + payload, err := base64.RawURLEncoding.DecodeString(parts[1]) if err != nil { log.Fatalf("Unable to decode id token segment: %s", err) } diff --git a/test/mock-lcp/main.go b/test/mock-lcp/main.go index a3c3bc13d3a..f582a3018f1 100644 --- a/test/mock-lcp/main.go +++ b/test/mock-lcp/main.go @@ -24,8 +24,8 @@ func init() { func login(rw http.ResponseWriter, r *http.Request) { challenge := r.URL.Query().Get("login_challenge") - lr, resp, err := client.OAuth2Api.GetOAuth2LoginRequest(r.Context()).LoginChallenge(challenge).Execute() - defer resp.Body.Close() + lr, resp, err := client.OAuth2API.GetOAuth2LoginRequest(r.Context()).LoginChallenge(challenge).Execute() + defer resp.Body.Close() //nolint:errcheck if err != nil { log.Fatalf("Unable to fetch clogin request: %s", err) } @@ -37,24 +37,24 @@ func login(rw http.ResponseWriter, r *http.Request) { remember = true } - vr, resp, err := client.OAuth2Api.AcceptOAuth2LoginRequest(r.Context()). + vr, resp, err := client.OAuth2API.AcceptOAuth2LoginRequest(r.Context()). LoginChallenge(challenge). AcceptOAuth2LoginRequest(hydra.AcceptOAuth2LoginRequest{ Subject: "the-subject", Remember: pointerx.Bool(remember), }).Execute() - defer resp.Body.Close() + defer resp.Body.Close() //nolint:errcheck if err != nil { log.Fatalf("Unable to execute request: %s", err) } redirectTo = vr.RedirectTo } else { - vr, resp, err := client.OAuth2Api.RejectOAuth2LoginRequest(r.Context()). + vr, resp, err := client.OAuth2API.RejectOAuth2LoginRequest(r.Context()). LoginChallenge(challenge). RejectOAuth2Request(hydra.RejectOAuth2Request{ Error: pointerx.String("invalid_request"), }).Execute() - defer resp.Body.Close() + defer resp.Body.Close() //nolint:errcheck if err != nil { log.Fatalf("Unable to execute request: %s", err) } @@ -69,8 +69,8 @@ func login(rw http.ResponseWriter, r *http.Request) { func consent(rw http.ResponseWriter, r *http.Request) { challenge := r.URL.Query().Get("consent_challenge") - o, resp, err := client.OAuth2Api.GetOAuth2ConsentRequest(r.Context()).ConsentChallenge(challenge).Execute() - defer resp.Body.Close() + o, resp, err := client.OAuth2API.GetOAuth2ConsentRequest(r.Context()).ConsentChallenge(challenge).Execute() + defer resp.Body.Close() //nolint:errcheck if err != nil { log.Fatalf("Unable to fetch consent request: %s", err) } @@ -86,7 +86,7 @@ func consent(rw http.ResponseWriter, r *http.Request) { value = "rab" } - v, resp, err := client.OAuth2Api.AcceptOAuth2ConsentRequest(r.Context()). + v, resp, err := client.OAuth2API.AcceptOAuth2ConsentRequest(r.Context()). ConsentChallenge(challenge). AcceptOAuth2ConsentRequest(hydra.AcceptOAuth2ConsentRequest{ GrantScope: o.RequestedScope, @@ -96,16 +96,16 @@ func consent(rw http.ResponseWriter, r *http.Request) { IdToken: map[string]interface{}{"baz": value}, }, }).Execute() - defer resp.Body.Close() + defer resp.Body.Close() //nolint:errcheck if err != nil { log.Fatalf("Unable to execute request: %s", err) } redirectTo = v.RedirectTo } else { - v, resp, err := client.OAuth2Api.RejectOAuth2ConsentRequest(r.Context()). + v, resp, err := client.OAuth2API.RejectOAuth2ConsentRequest(r.Context()). ConsentChallenge(challenge). RejectOAuth2Request(hydra.RejectOAuth2Request{Error: pointerx.String("invalid_request")}).Execute() - defer resp.Body.Close() + defer resp.Body.Close() //nolint:errcheck if err != nil { log.Fatalf("Unable to execute request: %s", err) } diff --git a/x/audit.go b/x/audit.go deleted file mode 100644 index edfb50210b6..00000000000 --- a/x/audit.go +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package x - -import ( - "net/http" - - "github.com/ory/x/logrusx" -) - -func LogAudit(r *http.Request, message interface{}, logger *logrusx.Logger) { - if logger == nil { - logger = logrusx.NewAudit("", "") - } - - logger = logger.WithRequest(r) - - if err, ok := message.(error); ok { - logger.WithError(err).Infoln("access denied") - return - } - - logger.Infoln("access allowed") -} diff --git a/x/audit_test.go b/x/audit_test.go deleted file mode 100644 index 6eecc8dcbf7..00000000000 --- a/x/audit_test.go +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package x - -import ( - "bytes" - "fmt" - "net/http" - "testing" - - "github.com/pkg/errors" - "github.com/sirupsen/logrus" - "github.com/stretchr/testify/assert" - - "github.com/ory/x/logrusx" -) - -func TestLogAudit(t *testing.T) { - for k, tc := range []struct { - d string - message interface{} - expectContains []string - }{ - { - d: "This should log \"access allowed\" because no errors are given", - message: nil, - expectContains: []string{"msg=access allowed"}, - }, - { - d: "This should log \"access denied\" because an error is given", - message: errors.New("asdf"), - expectContains: []string{"msg=access denied"}, - }, - } { - t.Run(fmt.Sprintf("case=%d/description=%s", k, tc.d), func(t *testing.T) { - r, err := http.NewRequest(http.MethodGet, "https://hydra/some/endpoint", nil) - if err != nil { - t.Fatal(err) - } - buf := bytes.NewBuffer([]byte{}) - l := logrusx.NewAudit("", "", logrusx.ForceLevel(logrus.TraceLevel)) - l.Logger.Out = buf - LogAudit(r, tc.message, l) - - t.Logf("%s", string(buf.Bytes())) - - assert.Contains(t, buf.String(), "audience=audit") - for _, expectContain := range tc.expectContains { - assert.Contains(t, buf.String(), expectContain) - } - }) - } -} diff --git a/x/authenticator.go b/x/authenticator.go deleted file mode 100644 index 193cc0d922e..00000000000 --- a/x/authenticator.go +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package x - -import ( - "context" - "net/http" - "net/url" - - "github.com/ory/fosite" -) - -type ClientAuthenticatorProvider interface { - ClientAuthenticator() ClientAuthenticator -} - -type ClientAuthenticator interface { - AuthenticateClient(ctx context.Context, r *http.Request, form url.Values) (fosite.Client, error) -} diff --git a/x/clamp.go b/x/clamp.go new file mode 100644 index 00000000000..4bf9af45350 --- /dev/null +++ b/x/clamp.go @@ -0,0 +1,15 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package x + +// Clamp clamps val to be within the range [min, max] for any integer type. +func Clamp[T ~int | ~int8 | ~int16 | ~int32 | ~int64 | ~uint | ~uint8 | ~uint16 | ~uint32 | ~uint64](val, min, max T) T { + if val < min { + return min + } + if val > max { + return max + } + return val +} diff --git a/x/clean_sql.go b/x/clean_sql.go index 59628fb3f97..917e19842a8 100644 --- a/x/clean_sql.go +++ b/x/clean_sql.go @@ -6,17 +6,17 @@ package x import ( "testing" - "github.com/gobuffalo/pop/v6" + "github.com/ory/pop/v6" ) func DeleteHydraRows(t *testing.T, c *pop.Connection) { - t.Logf("Deleting hydra rows in database: %s", c.Dialect.Name()) for _, tb := range []string{ "hydra_oauth2_access", "hydra_oauth2_refresh", "hydra_oauth2_code", "hydra_oauth2_oidc", "hydra_oauth2_pkce", + "hydra_oauth2_device_auth_codes", "hydra_oauth2_flow", "hydra_oauth2_authentication_session", "hydra_oauth2_obfuscated_authentication_session", @@ -31,34 +31,3 @@ func DeleteHydraRows(t *testing.T, c *pop.Connection) { } } } - -func CleanSQLPop(t *testing.T, c *pop.Connection) { - t.Logf("Cleaning up database: %s", c.Dialect.Name()) - for _, tb := range []string{ - "hydra_oauth2_access", - "hydra_oauth2_refresh", - "hydra_oauth2_code", - "hydra_oauth2_oidc", - "hydra_oauth2_pkce", - "hydra_oauth2_flow", - "hydra_oauth2_authentication_session", - "hydra_oauth2_obfuscated_authentication_session", - "hydra_oauth2_logout_request", - "hydra_oauth2_jti_blacklist", - "hydra_oauth2_trusted_jwt_bearer_issuer", - "hydra_jwk", - "hydra_client", - // Migrations - "hydra_oauth2_authentication_consent_migration", - "hydra_client_migration", - "hydra_oauth2_migration", - "hydra_jwk_migration", - "networks", - "schema_migration", - } { - if err := c.RawQuery("DROP TABLE IF EXISTS " + tb).Exec(); err != nil { - t.Logf(`Unable to clean up table "%s": %s`, tb, err) - } - } - t.Logf("Successfully cleaned up database: %s", c.Dialect.Name()) -} diff --git a/x/doc_swagger.go b/x/doc_swagger.go index af7944ca09d..b178ae736da 100644 --- a/x/doc_swagger.go +++ b/x/doc_swagger.go @@ -4,10 +4,10 @@ package x // Empty responses are sent when, for example, resources are deleted. The HTTP status code for empty responses is -// typically 201. +// typically 204. // // swagger:response emptyResponse -type emptyResponse struct{} +type _ struct{} // Error // @@ -40,7 +40,7 @@ type errorOAuth2 struct { // Default Error Response // // swagger:response errorOAuth2Default -type errorOAuth2Default struct { +type _ struct { // in: body Body errorOAuth2 } @@ -48,7 +48,7 @@ type errorOAuth2Default struct { // Bad Request Error Response // // swagger:response errorOAuth2BadRequest -type errorOAuth2BadRequest struct { +type _ struct { // in: body Body errorOAuth2 } @@ -56,7 +56,7 @@ type errorOAuth2BadRequest struct { // Not Found Error Response // // swagger:response errorOAuth2NotFound -type errorOAuth2NotFound struct { +type _ struct { // in: body Body errorOAuth2 } diff --git a/x/error_enhancer.go b/x/error_enhancer.go index b5b6d7dd1ec..862dfa7d9d1 100644 --- a/x/error_enhancer.go +++ b/x/error_enhancer.go @@ -8,8 +8,8 @@ import ( "github.com/pkg/errors" - "github.com/ory/fosite" "github.com/ory/herodot" + "github.com/ory/hydra/v2/fosite" ) type enhancedError struct { diff --git a/x/error_enhancer_test.go b/x/error_enhancer_test.go index a86a0281603..f5c30565bb8 100644 --- a/x/error_enhancer_test.go +++ b/x/error_enhancer_test.go @@ -10,13 +10,11 @@ import ( "net/http" "testing" - "github.com/ory/x/errorsx" - "github.com/pkg/errors" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/ory/fosite" + "github.com/ory/hydra/v2/fosite" "github.com/ory/x/sqlcon" ) @@ -30,7 +28,7 @@ func TestErrorEnhancer(t *testing.T) { out: "{\"error\":\"Unable to locate the resource\",\"error_description\":\"\"}", }, { - in: errorsx.WithStack(sqlcon.ErrNoRows), + in: errors.WithStack(sqlcon.ErrNoRows), out: "{\"error\":\"Unable to locate the resource\",\"error_description\":\"\"}", }, { @@ -46,7 +44,7 @@ func TestErrorEnhancer(t *testing.T) { out: "{\"error\":\"invalid_request\",\"error_description\":\"The request is missing a required parameter, includes an invalid parameter value, includes a parameter more than once, or is otherwise malformed. Make sure that the various parameters are correct, be aware of case sensitivity and trim your parameters. Make sure that the client you are using has exactly whitelisted the redirect_uri you specified.\"}", }, { - in: errorsx.WithStack(fosite.ErrInvalidRequest), + in: errors.WithStack(fosite.ErrInvalidRequest), out: "{\"error\":\"invalid_request\",\"error_description\":\"The request is missing a required parameter, includes an invalid parameter value, includes a parameter more than once, or is otherwise malformed. Make sure that the various parameters are correct, be aware of case sensitivity and trim your parameters. Make sure that the client you are using has exactly whitelisted the redirect_uri you specified.\"}", }, } { diff --git a/x/errors.go b/x/errors.go index 229884a5d51..2d04dcc638e 100644 --- a/x/errors.go +++ b/x/errors.go @@ -6,7 +6,7 @@ package x import ( "net/http" - "github.com/ory/fosite" + "github.com/ory/hydra/v2/fosite" "github.com/ory/x/logrusx" ) @@ -31,3 +31,10 @@ func LogError(r *http.Request, err error, logger *logrusx.Logger) { logger.WithRequest(r). WithError(err).Errorln("An error occurred") } + +func Must[T any](t T, err error) T { + if err != nil { + panic(err) + } + return t +} diff --git a/x/errors_test.go b/x/errors_test.go index 71de4e8d3fe..13c311b9aec 100644 --- a/x/errors_test.go +++ b/x/errors_test.go @@ -36,9 +36,9 @@ func TestLogError(t *testing.T) { l.Logger.Out = buf LogError(r, errors.New("asdf"), l) - t.Logf("%s", string(buf.Bytes())) + t.Logf("%s", buf.String()) - assert.True(t, strings.Contains(string(buf.Bytes()), "trace")) + assert.True(t, strings.Contains(buf.String(), "trace")) LogError(r, errors.Wrap(new(errStackTracer), ""), l) } diff --git a/x/events/events.go b/x/events/events.go new file mode 100644 index 00000000000..a62de0a0cc3 --- /dev/null +++ b/x/events/events.go @@ -0,0 +1,153 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package events + +import ( + "context" + "errors" + + otelattr "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" + + "github.com/ory/herodot" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/x/otelx/semconv" +) + +const ( + // LoginAccepted will be emitted when the login UI accepts a login request. + LoginAccepted semconv.Event = "OAuth2LoginAccepted" + + // LoginRejected will be emitted when the login UI rejects a login request. + LoginRejected semconv.Event = "OAuth2LoginRejected" + + DeviceUserCodeAccepted semconv.Event = "OAuth2DeviceUserCodeAccepted" + + // ConsentAccepted will be emitted when the consent UI accepts a consent request. + ConsentAccepted semconv.Event = "OAuth2ConsentAccepted" + + // ConsentRejected will be emitted when the consent UI rejects a consent request. + ConsentRejected semconv.Event = "OAuth2ConsentRejected" + + // ConsentRevoked will be emitted when the user revokes a consent request. + ConsentRevoked semconv.Event = "OAuth2ConsentRevoked" + + // ClientCreated will be emitted when a client is created. + ClientCreated semconv.Event = "OAuth2ClientCreated" + + // ClientDeleted will be emitted when a client is deleted. + ClientDeleted semconv.Event = "OAuth2ClientDeleted" + + // ClientUpdated will be emitted when a client is updated. + ClientUpdated semconv.Event = "OAuth2ClientUpdated" + + // AccessTokenIssued will be emitted by requests to POST /oauth2/token in case the request was successful. + AccessTokenIssued semconv.Event = "OAuth2AccessTokenIssued" //nolint:gosec + + // TokenExchangeError will be emitted by requests to POST /oauth2/token in case the request was unsuccessful. + TokenExchangeError semconv.Event = "OAuth2TokenExchangeError" //nolint:gosec + + // AccessTokenInspected will be emitted by requests to POST /admin/oauth2/introspect. + AccessTokenInspected semconv.Event = "OAuth2AccessTokenInspected" //nolint:gosec + + // AccessTokenRevoked will be emitted by requests to POST /oauth2/revoke. + AccessTokenRevoked semconv.Event = "OAuth2AccessTokenRevoked" //nolint:gosec + + // RefreshTokenIssued will be emitted when a refresh token is issued. + RefreshTokenIssued semconv.Event = "OAuth2RefreshTokenIssued" //nolint:gosec + + // IdentityTokenIssued will be emitted when a refresh token is issued. + IdentityTokenIssued semconv.Event = "OIDCIdentityTokenIssued" //nolint:gosec +) + +const ( + attributeKeyOAuth2ClientName = "OAuth2ClientName" + attributeKeyOAuth2ClientID = "OAuth2ClientID" + attributeKeyOAuth2Subject = "OAuth2Subject" + attributeKeyOAuth2GrantType = "OAuth2GrantType" + attributeKeyOAuth2ConsentRequestID = "OAuth2ConsentRequestID" + attributeKeyOAuth2TokenFormat = "OAuth2TokenFormat" //nolint:gosec + attributeKeyOAuth2RefreshTokenSignature = "OAuth2RefreshTokenSignature" //nolint:gosec + attributeKeyOAuth2AccessTokenSignature = "OAuth2AccessTokenSignature" //nolint:gosec + attributeKeyErrorReason = "ErrorReason" +) + +// WithTokenFormat emits the token format as part of the event. +func WithTokenFormat(format string) trace.EventOption { + return trace.WithAttributes(otelattr.String(attributeKeyOAuth2TokenFormat, format)) +} + +// WithGrantType emits the token format as part of the event. +func WithGrantType(grantType string) trace.EventOption { + return trace.WithAttributes(otelattr.String(attributeKeyOAuth2GrantType, grantType)) +} + +func ClientID(clientID string) otelattr.KeyValue { + return otelattr.String(attributeKeyOAuth2ClientID, clientID) +} + +func RefreshTokenSignature(signature string) otelattr.KeyValue { + return otelattr.String(attributeKeyOAuth2RefreshTokenSignature, signature) +} + +func AccessTokenSignature(signature string) otelattr.KeyValue { + return otelattr.String(attributeKeyOAuth2AccessTokenSignature, signature) +} + +func ConsentRequestID(id string) otelattr.KeyValue { + return otelattr.String(attributeKeyOAuth2ConsentRequestID, id) +} + +// WithClientID emits the client ID as part of the event. +func WithClientID(clientID string) trace.EventOption { + return trace.WithAttributes(ClientID(clientID)) +} + +// WithClientName emits the client name as part of the event. +func WithClientName(clientID string) trace.EventOption { + return trace.WithAttributes(otelattr.String(attributeKeyOAuth2ClientName, clientID)) +} + +// WithSubject emits the subject as part of the event. +func WithSubject(subject string) trace.EventOption { + return trace.WithAttributes(otelattr.String(attributeKeyOAuth2Subject, subject)) +} + +// WithSubject emits the consent request ID as part of the event. +func WithConsentRequestID(id string) trace.EventOption { + return trace.WithAttributes(ConsentRequestID(id)) +} + +// WithRequest emits the subject and client ID from the fosite request as part of the event. +func WithRequest(request fosite.Requester) trace.EventOption { + var attributes []otelattr.KeyValue + if client := request.GetClient(); client != nil { + attributes = append(attributes, otelattr.String(attributeKeyOAuth2ClientID, client.GetID())) + } + if session := request.GetSession(); session != nil { + attributes = append(attributes, otelattr.String(attributeKeyOAuth2Subject, session.GetSubject())) + } + + return trace.WithAttributes(attributes...) +} + +// WithError sets the Reason attribute according to the error given. +func WithError(err error) trace.EventOption { + if err == nil { + return trace.WithAttributes() + } + if rc := herodot.ReasonCarrier(nil); errors.As(err, &rc) && rc.Reason() != "" { // also works for fosite.RFC6749Error + return trace.WithAttributes(otelattr.String(attributeKeyErrorReason, rc.Reason())) + } + return trace.WithAttributes(otelattr.String(attributeKeyErrorReason, err.Error())) +} + +// Trace emits an event with the given attributes. +func Trace(ctx context.Context, event semconv.Event, opts ...trace.EventOption) { + allOpts := append([]trace.EventOption{trace.WithAttributes(semconv.AttributesFromContext(ctx)...)}, opts...) + trace.SpanFromContext(ctx).AddEvent( + string(event), + allOpts..., + ) +} diff --git a/x/fosite_storer.go b/x/fosite_storer.go index 1afec037710..2fab0cd454a 100644 --- a/x/fosite_storer.go +++ b/x/fosite_storer.go @@ -7,24 +7,29 @@ import ( "context" "time" - "github.com/ory/fosite" - "github.com/ory/fosite/handler/oauth2" - "github.com/ory/fosite/handler/openid" - "github.com/ory/fosite/handler/pkce" - "github.com/ory/fosite/handler/rfc7523" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/fosite/handler/oauth2" + "github.com/ory/hydra/v2/fosite/handler/openid" + "github.com/ory/hydra/v2/fosite/handler/pkce" + "github.com/ory/hydra/v2/fosite/handler/rfc7523" + "github.com/ory/hydra/v2/fosite/handler/rfc8628" + "github.com/ory/hydra/v2/fosite/handler/verifiable" ) type FositeStorer interface { - fosite.Storage - oauth2.CoreStorage + fosite.ClientManager + oauth2.AuthorizeCodeStorage + oauth2.AccessTokenStorage + oauth2.RefreshTokenStorage + oauth2.TokenRevocationStorage openid.OpenIDConnectRequestStorage pkce.PKCERequestStorage rfc7523.RFC7523KeyStorage + rfc8628.DeviceAuthStorage + verifiable.NonceManager + oauth2.ResourceOwnerPasswordCredentialsGrantStorage - RevokeRefreshToken(ctx context.Context, requestID string) error - - RevokeAccessToken(ctx context.Context, requestID string) error - + // Hydra-specific storage utilities // flush the access token requests from the database. // no data will be deleted after the 'notAfter' timeframe. FlushInactiveAccessTokens(ctx context.Context, notAfter time.Time, limit int, batchSize int) error @@ -40,5 +45,10 @@ type FositeStorer interface { // DeleteOpenIDConnectSession deletes an OpenID Connect session. // This is duplicated from Ory Fosite to help against deprecation linting errors. - DeleteOpenIDConnectSession(ctx context.Context, authorizeCode string) error + // DeleteOpenIDConnectSession(ctx context.Context, authorizeCode string) error + + // Hydra-specific RFC8628 Device Auth capabilities + GetUserCodeSession(context.Context, string, fosite.Session) (fosite.DeviceRequester, error) + GetDeviceCodeSessionByRequestID(ctx context.Context, requestID string, requester fosite.Session) (fosite.DeviceRequester, string, error) + UpdateDeviceCodeSessionBySignature(ctx context.Context, requestID string, requester fosite.DeviceRequester) error } diff --git a/x/hasher.go b/x/hasher.go index 1e7bd37a6b0..22974975433 100644 --- a/x/hasher.go +++ b/x/hasher.go @@ -6,31 +6,18 @@ package x import ( "context" - "github.com/ory/fosite" - "github.com/ory/x/hasherx" - - "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/attribute" - "github.com/ory/x/errorsx" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/x/hasherx" + "github.com/ory/x/otelx" ) -const tracingComponent = "github.com/ory/hydra/x" - var _ fosite.Hasher = (*Hasher)(nil) -type HashAlgorithm string - -func (a HashAlgorithm) String() string { - return string(a) -} - -const ( - HashAlgorithmBCrypt = HashAlgorithm("bcrypt") - HashAlgorithmPBKDF2 = HashAlgorithm("pbkdf2") -) - // Hasher implements fosite.Hasher. type Hasher struct { + t TracingProvider c config bcrypt *hasherx.Bcrypt pbkdf2 *hasherx.PBKDF2 @@ -39,38 +26,44 @@ type Hasher struct { type config interface { hasherx.PBKDF2Configurator hasherx.BCryptConfigurator - GetHasherAlgorithm(ctx context.Context) HashAlgorithm + GetHasherAlgorithm(ctx context.Context) string } // NewHasher returns a new BCrypt instance. -func NewHasher(c config) *Hasher { +func NewHasher(t TracingProvider, c config) *Hasher { return &Hasher{ + t: t, c: c, bcrypt: hasherx.NewHasherBcrypt(c), pbkdf2: hasherx.NewHasherPBKDF2(c), } } -func (b *Hasher) Hash(ctx context.Context, data []byte) ([]byte, error) { - ctx, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "x.hasher.Hash") - defer span.End() +const ( + hashAlgorithmBCrypt = "bcrypt" + hashAlgorithmPBKDF2 = "pbkdf2" +) - switch b.c.GetHasherAlgorithm(ctx) { - case HashAlgorithmBCrypt: - return b.bcrypt.Generate(ctx, data) - case HashAlgorithmPBKDF2: +func (h *Hasher) Hash(ctx context.Context, data []byte) (_ []byte, err error) { + ctx, span := h.t.Tracer(ctx).Tracer().Start(ctx, "x.hasher.Hash") + defer otelx.End(span, &err) + + alg := h.c.GetHasherAlgorithm(ctx) + span.SetAttributes(attribute.String("algorithm", alg)) + + switch alg { + case hashAlgorithmBCrypt: + return h.bcrypt.Generate(ctx, data) + case hashAlgorithmPBKDF2: fallthrough default: - return b.pbkdf2.Generate(ctx, data) + return h.pbkdf2.Generate(ctx, data) } } -func (b *Hasher) Compare(ctx context.Context, hash, data []byte) error { - _, span := otel.GetTracerProvider().Tracer(tracingComponent).Start(ctx, "x.hasher.Hash") - defer span.End() +func (h *Hasher) Compare(ctx context.Context, hash, data []byte) (err error) { + ctx, span := h.t.Tracer(ctx).Tracer().Start(ctx, "x.hasher.Compare") + defer otelx.End(span, &err) - if err := hasherx.Compare(ctx, data, hash); err != nil { - return errorsx.WithStack(err) - } - return nil + return hasherx.Compare(ctx, data, hash) } diff --git a/x/hasher_test.go b/x/hasher_test.go index a91b9daa2a3..d97b5562b74 100644 --- a/x/hasher_test.go +++ b/x/hasher_test.go @@ -8,30 +8,31 @@ import ( "fmt" "testing" - "github.com/ory/x/hasherx" - "github.com/stretchr/testify/require" + + "github.com/ory/x/hasherx" + "github.com/ory/x/otelx" ) type hasherConfig struct { cost uint32 } -func (c hasherConfig) HasherPBKDF2Config(ctx context.Context) *hasherx.PBKDF2Config { +func (c hasherConfig) HasherPBKDF2Config(_ context.Context) *hasherx.PBKDF2Config { return &hasherx.PBKDF2Config{} } -func (c hasherConfig) HasherBcryptConfig(ctx context.Context) *hasherx.BCryptConfig { +func (c hasherConfig) HasherBcryptConfig(_ context.Context) *hasherx.BCryptConfig { return &hasherx.BCryptConfig{Cost: c.cost} } -func (c hasherConfig) GetHasherAlgorithm(ctx context.Context) HashAlgorithm { - return HashAlgorithmPBKDF2 -} +func (c hasherConfig) GetHasherAlgorithm(_ context.Context) string { return hashAlgorithmPBKDF2 } +func (c hasherConfig) Tracer(_ context.Context) *otelx.Tracer { return otelx.NewNoop(nil, nil) } func TestHasher(t *testing.T) { for _, cost := range []uint32{1, 8, 10} { - result, err := NewHasher(&hasherConfig{cost: cost}).Hash(context.Background(), []byte("foobar")) + c := &hasherConfig{cost: cost} + result, err := NewHasher(c, c).Hash(t.Context(), []byte("foobar")) require.NoError(t, err) require.NotEmpty(t, result) } @@ -39,7 +40,8 @@ func TestHasher(t *testing.T) { // TestBackwardsCompatibility confirms that hashes generated with v1.x work with v2.x. func TestBackwardsCompatibility(t *testing.T) { - h := NewHasher(new(hasherConfig)) + c := new(hasherConfig) + h := NewHasher(c, c) require.NoError(t, h.Compare(context.Background(), []byte("$2a$10$lsrJjLPOUF7I75s3339R2uwqpjSlYGfhFyg7YsPtrSoITVy5UF3B2"), []byte("secret"))) require.NoError(t, h.Compare(context.Background(), []byte("$2a$10$O1jZhd3U0azpLXwTu0cHHuTDWsBFnTJVbeHTADNQJWPR4Zqs8ATKS"), []byte("secret"))) require.Error(t, h.Compare(context.Background(), []byte("$2a$10$lsrJjLPOUF7I75s3339R2uwqpjSlYGfhFyg7YsPtrSoITVy5UF3B3"), []byte("secret"))) @@ -48,8 +50,9 @@ func TestBackwardsCompatibility(t *testing.T) { func BenchmarkHasher(b *testing.B) { for cost := uint32(1); cost <= 16; cost++ { b.Run(fmt.Sprintf("cost=%d", cost), func(b *testing.B) { - for n := 0; n < b.N; n++ { - result, err := NewHasher(&hasherConfig{cost: cost}).Hash(context.Background(), []byte("foobar")) + for range b.N { + c := &hasherConfig{cost: cost} + result, err := NewHasher(c, c).Hash(b.Context(), []byte("foobar")) require.NoError(b, err) require.NotEmpty(b, result) } diff --git a/x/int_to_bytes.go b/x/int_to_bytes.go new file mode 100644 index 00000000000..08805ae4de2 --- /dev/null +++ b/x/int_to_bytes.go @@ -0,0 +1,26 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package x + +import ( + "encoding/binary" + + "github.com/pkg/errors" +) + +// IntToBytes converts an int64 to a byte slice. It is the inverse of BytesToInt. +func IntToBytes(i int64) []byte { + b := make([]byte, 8) + binary.LittleEndian.PutUint64(b, uint64(i)) //nolint:gosec + + return b +} + +// BytesToInt converts a byte slice to an int64. It is the inverse of IntToBytes. +func BytesToInt(b []byte) (int64, error) { + if len(b) != 8 { + return 0, errors.New("byte slice must be 8 bytes long") + } + return int64(binary.LittleEndian.Uint64(b)), nil //nolint:gosec +} diff --git a/x/int_to_bytes_test.go b/x/int_to_bytes_test.go new file mode 100644 index 00000000000..c67f54cc5db --- /dev/null +++ b/x/int_to_bytes_test.go @@ -0,0 +1,52 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package x + +import ( + "math" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_toBytes_fromBytes(t *testing.T) { + for _, tc := range []struct { + name string + i int64 + }{ + { + name: "zero", + i: 0, + }, + { + name: "positive", + i: 1234567890, + }, + { + name: "negative", + i: -1234567890, + }, + { + name: "now", + i: time.Now().Unix(), + }, + { + name: "max", + i: math.MaxInt64, + }, + { + name: "min", + i: math.MinInt64, + }, + } { + t.Run("case="+tc.name, func(t *testing.T) { + bytes := IntToBytes(tc.i) + i, err := BytesToInt(bytes) + require.NoError(t, err) + assert.Equal(t, tc.i, i) + }) + } +} diff --git a/x/jwt.go b/x/jwt.go deleted file mode 100644 index 0204c2250bc..00000000000 --- a/x/jwt.go +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package x - -import ( - "encoding/base64" - "strings" -) - -// Decode JWT specific base64url encoding with padding stripped -func DecodeSegment(seg string) ([]byte, error) { - if l := len(seg) % 4; l > 0 { - seg += strings.Repeat("=", 4-l) - } - - return base64.URLEncoding.DecodeString(seg) -} diff --git a/x/oauth2_test_client.go b/x/oauth2_test_client.go new file mode 100644 index 00000000000..97a9eddc01c --- /dev/null +++ b/x/oauth2_test_client.go @@ -0,0 +1,110 @@ +// Copyright © 2025 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package x + +import ( + "context" + "fmt" + "net/http" + "net/http/cookiejar" + "net/url" + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/require" + "golang.org/x/oauth2" + + hydra "github.com/ory/hydra-client-go/v2" + "github.com/ory/x/ioutilx" + "github.com/ory/x/uuidx" +) + +func NewEmptyCookieJar(t testing.TB) *cookiejar.Jar { + c, err := cookiejar.New(&cookiejar.Options{}) + require.NoError(t, err) + return c +} + +func NewEmptyJarClient(t testing.TB) *http.Client { + return &http.Client{ + Jar: NewEmptyCookieJar(t), + CheckRedirect: func(req *http.Request, via []*http.Request) error { + //t.Logf("Redirect to %s", req.URL.String()) + + if len(via) >= 20 { + for k, v := range via { + t.Logf("Failed with redirect (%d): %s", k, v.URL.String()) + } + return errors.New("stopped after 20 redirects") + } + return nil + }, + } +} + +func GetExpectRedirect(t *testing.T, cl *http.Client, uri string) *url.URL { + resp, err := cl.Get(uri) + require.NoError(t, err) + require.Equalf(t, 3, resp.StatusCode/100, "status: %d\nresponse: %s", resp.StatusCode, ioutilx.MustReadAll(resp.Body)) + loc, err := resp.Location() + require.NoError(t, err) + return loc +} + +const ( + ClientCallbackURL = "https://client.ory/callback" + LoginURL = "https://ui.ory/login" + ConsentURL = "https://ui.ory/consent" +) + +func PerformAuthCodeFlow(ctx context.Context, t *testing.T, baseClient *http.Client, cfg *oauth2.Config, admin *hydra.APIClient, lr func(*testing.T, *hydra.OAuth2LoginRequest) hydra.AcceptOAuth2LoginRequest, cr func(*testing.T, *hydra.OAuth2ConsentRequest) hydra.AcceptOAuth2ConsentRequest, authCodeOpts ...oauth2.AuthCodeOption) *oauth2.Token { + var cl http.Client + if baseClient != nil { + cl = *baseClient + } + if cl.Jar == nil { + cl.Jar = NewEmptyCookieJar(t) + } + cl.CheckRedirect = func(*http.Request, []*http.Request) error { return http.ErrUseLastResponse } + + // start the auth code flow + state := uuidx.NewV4().String() + loc := GetExpectRedirect(t, &cl, cfg.AuthCodeURL(state, authCodeOpts...)) + require.Equal(t, LoginURL, fmt.Sprintf("%s://%s%s", loc.Scheme, loc.Host, loc.Path)) + + // get & submit the login request + lReq, _, err := admin.OAuth2API.GetOAuth2LoginRequest(ctx).LoginChallenge(loc.Query().Get("login_challenge")).Execute() + require.NoError(t, err) + + v, _, err := admin.OAuth2API.AcceptOAuth2LoginRequest(ctx). + LoginChallenge(lReq.Challenge). + AcceptOAuth2LoginRequest(lr(t, lReq)). + Execute() + require.NoError(t, err) + + loc = GetExpectRedirect(t, &cl, v.RedirectTo) + require.Equal(t, ConsentURL, fmt.Sprintf("%s://%s%s", loc.Scheme, loc.Host, loc.Path)) + + // get & submit the consent request + cReq, _, err := admin.OAuth2API.GetOAuth2ConsentRequest(ctx).ConsentChallenge(loc.Query().Get("consent_challenge")).Execute() + require.NoError(t, err) + + v, _, err = admin.OAuth2API.AcceptOAuth2ConsentRequest(ctx). + ConsentChallenge(cReq.Challenge). + AcceptOAuth2ConsentRequest(cr(t, cReq)). + Execute() + require.NoError(t, err) + loc = GetExpectRedirect(t, &cl, v.RedirectTo) + // ensure we got redirected to the client callback URL + require.Equal(t, ClientCallbackURL, fmt.Sprintf("%s://%s%s", loc.Scheme, loc.Host, loc.Path)) + require.Equal(t, state, loc.Query().Get("state")) + + // exchange the code for a token + code := loc.Query().Get("code") + require.NotEmpty(t, code) + token, err := cfg.Exchange(ctx, code) + require.NoError(t, err) + + return token +} diff --git a/x/oauth2cors/cors.go b/x/oauth2cors/cors.go index 2069c2b8cd0..fb37e37091f 100644 --- a/x/oauth2cors/cors.go +++ b/x/oauth2cors/cors.go @@ -4,120 +4,142 @@ package oauth2cors import ( - "context" - "fmt" "net/http" "strings" - "github.com/ory/hydra/client" - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/oauth2" - "github.com/ory/hydra/x" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/x" "github.com/gobwas/glob" "github.com/rs/cors" - "github.com/ory/fosite" + "github.com/ory/hydra/v2/fosite" ) func Middleware( - ctx context.Context, reg interface { x.RegistryLogger oauth2.Registry client.Registry }) func(h http.Handler) http.Handler { - opts, enabled := reg.Config().CORS(ctx, config.PublicInterface) - if !enabled { - return func(h http.Handler) http.Handler { - return h - } - } - - var alwaysAllow = len(opts.AllowedOrigins) == 0 - var patterns []glob.Glob - for _, o := range opts.AllowedOrigins { - if o == "*" { - alwaysAllow = true - } - // if the protocol (http or https) is specified, but the url is wildcard, use special ** glob, which ignore the '.' separator. - // This way g := glob.Compile("http://**") g.Match("http://google.com") returns true. - if splittedO := strings.Split(o, "://"); len(splittedO) != 1 && splittedO[1] == "*" { - o = fmt.Sprintf("%s://**", splittedO[0]) - } - g, err := glob.Compile(strings.ToLower(o), '.') - if err != nil { - reg.Logger().WithError(err).Fatalf("Unable to parse cors origin: %s", o) - } - - patterns = append(patterns, g) - } - - options := cors.Options{ - AllowedOrigins: opts.AllowedOrigins, - AllowedMethods: opts.AllowedMethods, - AllowedHeaders: opts.AllowedHeaders, - ExposedHeaders: opts.ExposedHeaders, - MaxAge: opts.MaxAge, - AllowCredentials: opts.AllowCredentials, - OptionsPassthrough: opts.OptionsPassthrough, - Debug: opts.Debug, - AllowOriginRequestFunc: func(r *http.Request, origin string) bool { + return func(h http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() - if alwaysAllow { - return true - } - origin = strings.ToLower(origin) - for _, p := range patterns { - if p.Match(origin) { - return true - } + opts, enabled := reg.Config().CORSPublic(ctx) + if !enabled { + reg.Logger().Debug("not enhancing CORS per client, as CORS is disabled") + h.ServeHTTP(w, r) + return } - // pre-flight requests do not contain credentials (cookies, HTTP authorization) - // so we return true in all cases here. - if r.Method == http.MethodOptions { - return true - } - - username, _, ok := r.BasicAuth() - if !ok || username == "" { - token := fosite.AccessTokenFromRequest(r) - if token == "" { - return false + alwaysAllow := len(opts.AllowedOrigins) == 0 + patterns := make([]glob.Glob, 0, len(opts.AllowedOrigins)) + for _, o := range opts.AllowedOrigins { + if o == "*" { + alwaysAllow = true + break } - - session := oauth2.NewSessionWithCustomClaims("", reg.Config().AllowedTopLevelClaims(ctx)) - _, ar, err := reg.OAuth2Provider().IntrospectToken(ctx, token, fosite.AccessToken, session) + // if the protocol (http or https) is specified, but the url is wildcard, use special ** glob, which ignore the '.' separator. + // This way g := glob.Compile("http://**") g.Match("http://google.com") returns true. + if scheme, rest, found := strings.Cut(o, "://"); found && rest == "*" { + o = scheme + "://**" + } + g, err := glob.Compile(strings.ToLower(o), '.') if err != nil { - return false + reg.Logger().WithError(err).WithField("pattern", o).Error("Unable to parse CORS origin, ignoring it") + continue } - username = ar.GetClient().GetID() + patterns = append(patterns, g) } - cl, err := reg.ClientManager().GetConcreteClient(ctx, username) - if err != nil { - return false - } + options := cors.Options{ + AllowedOrigins: opts.AllowedOrigins, + AllowedMethods: opts.AllowedMethods, + AllowedHeaders: opts.AllowedHeaders, + ExposedHeaders: opts.ExposedHeaders, + MaxAge: opts.MaxAge, + AllowCredentials: opts.AllowCredentials, + OptionsPassthrough: opts.OptionsPassthrough, + Debug: opts.Debug, + AllowOriginRequestFunc: func(r *http.Request, origin string) bool { + ctx := r.Context() + if alwaysAllow { + return true + } + + origin = strings.ToLower(origin) + for _, p := range patterns { + if p.Match(origin) { + return true + } + } + + // pre-flight requests do not contain credentials (cookies, HTTP authorization) + // so we return true in all cases here. + if r.Method == http.MethodOptions { + return true + } + + var clientID string + + // if the client uses client_secret_post auth it will provide its client ID in form data + clientID = r.PostFormValue("client_id") + + // if the client uses client_secret_basic auth the client ID will be the username component + if clientID == "" { + clientID, _, _ = r.BasicAuth() + } + + // otherwise, this may be a bearer auth request, in which case we can introspect the token + if clientID == "" { + token := fosite.AccessTokenFromRequest(r) + if token == "" { + return false + } + + session := oauth2.NewSessionWithCustomClaims(ctx, reg.Config(), "") + _, ar, err := reg.OAuth2Provider().IntrospectToken(ctx, token, fosite.AccessToken, session) + if err != nil { + return false + } + + clientID = ar.GetClient().GetID() + } + + cl, err := reg.ClientManager().GetConcreteClient(ctx, clientID) + if err != nil { + return false + } + + for _, o := range cl.AllowedCORSOrigins { + if o == "*" { + return true + } + + // if the protocol (http or https) is specified, but the url is wildcard, use special ** glob, which ignore the '.' separator. + // This way g := glob.Compile("http://**") g.Match("http://google.com") returns true. + if scheme, rest, found := strings.Cut(o, "://"); found && rest == "*" { + o = scheme + "://**" + } + + g, err := glob.Compile(strings.ToLower(o), '.') + if err != nil { + return false + } + if g.Match(origin) { + return true + } + } - for _, o := range cl.AllowedCORSOrigins { - if o == "*" { - return true - } - g, err := glob.Compile(strings.ToLower(o), '.') - if err != nil { return false - } - if g.Match(origin) { - return true - } + }, } - return false - }, + reg.Logger().Debug("enhancing CORS per client") + cors.New(options).Handler(h).ServeHTTP(w, r) + }) } - - return cors.New(options).Handler } diff --git a/x/oauth2cors/cors_test.go b/x/oauth2cors/cors_test.go index d083dc5f6bd..21f1c067d79 100644 --- a/x/oauth2cors/cors_test.go +++ b/x/oauth2cors/cors_test.go @@ -4,65 +4,105 @@ package oauth2cors_test import ( + "bytes" "context" "fmt" + "io" "net/http" "net/http/httptest" + "net/url" "testing" "time" - "github.com/ory/hydra/driver" - "github.com/ory/hydra/x/oauth2cors" - "github.com/ory/x/contextx" - - "github.com/ory/hydra/x" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/ory/fosite" - "github.com/ory/hydra/client" - "github.com/ory/hydra/internal" - "github.com/ory/hydra/oauth2" + "github.com/ory/hydra/v2/client" + "github.com/ory/hydra/v2/driver" + "github.com/ory/hydra/v2/fosite" + "github.com/ory/hydra/v2/internal/testhelpers" + "github.com/ory/hydra/v2/oauth2" + "github.com/ory/hydra/v2/x" + "github.com/ory/hydra/v2/x/oauth2cors" + "github.com/ory/x/configx" + "github.com/ory/x/dbal" ) func TestOAuth2AwareCORSMiddleware(t *testing.T) { - r := internal.NewRegistryMemory(t, internal.NewConfigurationWithDefaults(), &contextx.Default{}) - token, signature, _ := r.OAuth2HMACStrategy().GenerateAccessToken(nil, nil) + ctx := context.Background() + dsn := dbal.NewSQLiteTestDatabase(t) + r := testhelpers.NewRegistrySQLFromURL(t, dsn, true, true) + token, signature, _ := r.OAuth2HMACStrategy().GenerateAccessToken(ctx, nil) + for k, tc := range []struct { - prep func(*testing.T, driver.Registry) + prep func(*testing.T, *driver.RegistrySQL) + configs map[string]any d string mw func(http.Handler) http.Handler code int header http.Header expectHeader http.Header method string + body io.Reader }{ { d: "should ignore when disabled", - prep: func(t *testing.T, r driver.Registry) {}, + prep: func(t *testing.T, r *driver.RegistrySQL) {}, code: http.StatusNotImplemented, header: http.Header{}, expectHeader: http.Header{}, }, { d: "should reject when basic auth but client does not exist and cors enabled", - prep: func(t *testing.T, r driver.Registry) { - r.Config().MustSet(context.Background(), "serve.public.cors.enabled", true) - r.Config().MustSet(context.Background(), "serve.public.cors.allowed_origins", []string{"http://not-test-domain.com"}) + configs: map[string]any{ + "serve.public.cors.enabled": true, + "serve.public.cors.allowed_origins": []string{"http://not-test-domain.com"}, }, code: http.StatusNotImplemented, header: http.Header{"Origin": {"http://foobar.com"}, "Authorization": {fmt.Sprintf("Basic %s", x.BasicAuth("foo", "bar"))}}, expectHeader: http.Header{"Vary": {"Origin"}}, }, + { + d: "should reject when post auth client exists but origin not allowed", + configs: map[string]any{ + "serve.public.cors.enabled": true, + "serve.public.cors.allowed_origins": []string{"http://not-test-domain.com"}, + }, + prep: func(t *testing.T, r *driver.RegistrySQL) { + // Ignore unique violations + _ = r.ClientManager().CreateClient(ctx, &client.Client{ID: "foo-2", Secret: "bar", AllowedCORSOrigins: []string{"http://not-foobar.com"}}) + }, + code: http.StatusNotImplemented, + header: http.Header{"Origin": {"http://foobar.com"}, "Content-Type": {"application/x-www-form-urlencoded"}}, + expectHeader: http.Header{"Vary": {"Origin"}}, + method: http.MethodPost, + body: bytes.NewBufferString(url.Values{"client_id": []string{"foo-2"}}.Encode()), + }, + { + d: "should accept when post auth client exists and origin allowed", + configs: map[string]any{ + "serve.public.cors.enabled": true, + "serve.public.cors.allowed_origins": []string{"http://not-test-domain.com"}, + }, + prep: func(t *testing.T, r *driver.RegistrySQL) { + // Ignore unique violations + _ = r.ClientManager().CreateClient(ctx, &client.Client{ID: "foo-3", Secret: "bar", AllowedCORSOrigins: []string{"http://foobar.com"}}) + }, + code: http.StatusNotImplemented, + header: http.Header{"Origin": {"http://foobar.com"}, "Content-Type": {"application/x-www-form-urlencoded"}}, + expectHeader: http.Header{"Access-Control-Allow-Credentials": []string{"true"}, "Access-Control-Allow-Origin": []string{"http://foobar.com"}, "Access-Control-Expose-Headers": []string{"Cache-Control, Expires, Last-Modified, Pragma, Content-Length, Content-Language, Content-Type"}, "Vary": []string{"Origin"}}, + method: http.MethodPost, + body: bytes.NewBufferString(url.Values{"client_id": {"foo-3"}}.Encode()), + }, { d: "should reject when basic auth client exists but origin not allowed", - prep: func(t *testing.T, r driver.Registry) { - r.Config().MustSet(context.Background(), "serve.public.cors.enabled", true) - r.Config().MustSet(context.Background(), "serve.public.cors.allowed_origins", []string{"http://not-test-domain.com"}) - + configs: map[string]any{ + "serve.public.cors.enabled": true, + "serve.public.cors.allowed_origins": []string{"http://not-test-domain.com"}, + }, + prep: func(t *testing.T, r *driver.RegistrySQL) { // Ignore unique violations - _ = r.ClientManager().CreateClient(context.Background(), &client.Client{LegacyClientID: "foo-2", Secret: "bar", AllowedCORSOrigins: []string{"http://not-foobar.com"}}) + _ = r.ClientManager().CreateClient(ctx, &client.Client{ID: "foo-2", Secret: "bar", AllowedCORSOrigins: []string{"http://not-foobar.com"}}) }, code: http.StatusNotImplemented, header: http.Header{"Origin": {"http://foobar.com"}, "Authorization": {fmt.Sprintf("Basic %s", x.BasicAuth("foo-2", "bar"))}}, @@ -70,11 +110,12 @@ func TestOAuth2AwareCORSMiddleware(t *testing.T) { }, { d: "should accept when basic auth client exists and origin allowed", - prep: func(t *testing.T, r driver.Registry) { - r.Config().MustSet(context.Background(), "serve.public.cors.enabled", true) - + configs: map[string]any{ + "serve.public.cors.enabled": true, + }, + prep: func(t *testing.T, r *driver.RegistrySQL) { // Ignore unique violations - _ = r.ClientManager().CreateClient(context.Background(), &client.Client{LegacyClientID: "foo-3", Secret: "bar", AllowedCORSOrigins: []string{"http://foobar.com"}}) + _ = r.ClientManager().CreateClient(ctx, &client.Client{ID: "foo-3", Secret: "bar", AllowedCORSOrigins: []string{"http://foobar.com"}}) }, code: http.StatusNotImplemented, header: http.Header{"Origin": {"http://foobar.com"}, "Authorization": {fmt.Sprintf("Basic %s", x.BasicAuth("foo-3", "bar"))}}, @@ -82,12 +123,13 @@ func TestOAuth2AwareCORSMiddleware(t *testing.T) { }, { d: "should accept when basic auth client exists and origin allowed", - prep: func(t *testing.T, r driver.Registry) { - r.Config().MustSet(context.Background(), "serve.public.cors.enabled", true) - r.Config().MustSet(context.Background(), "serve.public.cors.allowed_origins", []string{}) - + configs: map[string]any{ + "serve.public.cors.enabled": true, + "serve.public.cors.allowed_origins": []string{}, + }, + prep: func(t *testing.T, r *driver.RegistrySQL) { // Ignore unique violations - _ = r.ClientManager().CreateClient(context.Background(), &client.Client{LegacyClientID: "foo-3", Secret: "bar", AllowedCORSOrigins: []string{"http://foobar.com"}}) + _ = r.ClientManager().CreateClient(ctx, &client.Client{ID: "foo-3", Secret: "bar", AllowedCORSOrigins: []string{"http://foobar.com"}}) }, code: http.StatusNotImplemented, header: http.Header{"Origin": {"http://foobar.com"}, "Authorization": {fmt.Sprintf("Basic %s", x.BasicAuth("foo-3", "bar"))}}, @@ -95,12 +137,27 @@ func TestOAuth2AwareCORSMiddleware(t *testing.T) { }, { d: "should accept when basic auth client exists and origin (with partial wildcard) is allowed per client", - prep: func(t *testing.T, r driver.Registry) { - r.Config().MustSet(context.Background(), "serve.public.cors.enabled", true) - r.Config().MustSet(context.Background(), "serve.public.cors.allowed_origins", []string{}) - + configs: map[string]any{ + "serve.public.cors.enabled": true, + "serve.public.cors.allowed_origins": []string{}, + }, + prep: func(t *testing.T, r *driver.RegistrySQL) { + // Ignore unique violations + _ = r.ClientManager().CreateClient(ctx, &client.Client{ID: "foo-4", Secret: "bar", AllowedCORSOrigins: []string{"http://*.foobar.com"}}) + }, + code: http.StatusNotImplemented, + header: http.Header{"Origin": {"http://foo.foobar.com"}, "Authorization": {fmt.Sprintf("Basic %s", x.BasicAuth("foo-4", "bar"))}}, + expectHeader: http.Header{"Access-Control-Allow-Credentials": []string{"true"}, "Access-Control-Allow-Origin": []string{"http://foo.foobar.com"}, "Access-Control-Expose-Headers": []string{"Cache-Control, Expires, Last-Modified, Pragma, Content-Length, Content-Language, Content-Type"}, "Vary": []string{"Origin"}}, + }, + { + d: "should accept when basic auth client exists and wildcard origin is allowed per client", + configs: map[string]any{ + "serve.public.cors.enabled": true, + "serve.public.cors.allowed_origins": []string{}, + }, + prep: func(t *testing.T, r *driver.RegistrySQL) { // Ignore unique violations - _ = r.ClientManager().CreateClient(context.Background(), &client.Client{LegacyClientID: "foo-4", Secret: "bar", AllowedCORSOrigins: []string{"http://*.foobar.com"}}) + _ = r.ClientManager().CreateClient(ctx, &client.Client{ID: "foo-4", Secret: "bar", AllowedCORSOrigins: []string{"http://*"}}) }, code: http.StatusNotImplemented, header: http.Header{"Origin": {"http://foo.foobar.com"}, "Authorization": {fmt.Sprintf("Basic %s", x.BasicAuth("foo-4", "bar"))}}, @@ -108,12 +165,13 @@ func TestOAuth2AwareCORSMiddleware(t *testing.T) { }, { d: "should accept when basic auth client exists and origin (with full wildcard) is allowed globally", - prep: func(t *testing.T, r driver.Registry) { - r.Config().MustSet(context.Background(), "serve.public.cors.enabled", true) - r.Config().MustSet(context.Background(), "serve.public.cors.allowed_origins", []string{"*"}) - + configs: map[string]any{ + "serve.public.cors.enabled": true, + "serve.public.cors.allowed_origins": []string{"*"}, + }, + prep: func(t *testing.T, r *driver.RegistrySQL) { // Ignore unique violations - _ = r.ClientManager().CreateClient(context.Background(), &client.Client{LegacyClientID: "foo-5", Secret: "bar", AllowedCORSOrigins: []string{"http://barbar.com"}}) + _ = r.ClientManager().CreateClient(ctx, &client.Client{ID: "foo-5", Secret: "bar", AllowedCORSOrigins: []string{"http://barbar.com"}}) }, code: http.StatusNotImplemented, header: http.Header{"Origin": {"*"}, "Authorization": {fmt.Sprintf("Basic %s", x.BasicAuth("foo-5", "bar"))}}, @@ -121,12 +179,13 @@ func TestOAuth2AwareCORSMiddleware(t *testing.T) { }, { d: "should accept when basic auth client exists and origin (with partial wildcard) is allowed globally", - prep: func(t *testing.T, r driver.Registry) { - r.Config().MustSet(context.Background(), "serve.public.cors.enabled", true) - r.Config().MustSet(context.Background(), "serve.public.cors.allowed_origins", []string{"http://*.foobar.com"}) - + configs: map[string]any{ + "serve.public.cors.enabled": true, + "serve.public.cors.allowed_origins": []string{"http://*.foobar.com"}, + }, + prep: func(t *testing.T, r *driver.RegistrySQL) { // Ignore unique violations - _ = r.ClientManager().CreateClient(context.Background(), &client.Client{LegacyClientID: "foo-6", Secret: "bar", AllowedCORSOrigins: []string{"http://barbar.com"}}) + _ = r.ClientManager().CreateClient(ctx, &client.Client{ID: "foo-6", Secret: "bar", AllowedCORSOrigins: []string{"http://barbar.com"}}) }, code: http.StatusNotImplemented, header: http.Header{"Origin": {"http://foo.foobar.com"}, "Authorization": {fmt.Sprintf("Basic %s", x.BasicAuth("foo-6", "bar"))}}, @@ -134,12 +193,13 @@ func TestOAuth2AwareCORSMiddleware(t *testing.T) { }, { d: "should accept when basic auth client exists and origin (with full wildcard) allowed per client", - prep: func(t *testing.T, r driver.Registry) { - r.Config().MustSet(context.Background(), "serve.public.cors.enabled", true) - r.Config().MustSet(context.Background(), "serve.public.cors.allowed_origins", []string{"http://not-test-domain.com"}) - + configs: map[string]any{ + "serve.public.cors.enabled": true, + "serve.public.cors.allowed_origins": []string{"http://not-test-domain.com"}, + }, + prep: func(t *testing.T, r *driver.RegistrySQL) { // Ignore unique violations - _ = r.ClientManager().CreateClient(context.Background(), &client.Client{LegacyClientID: "foo-7", Secret: "bar", AllowedCORSOrigins: []string{"*"}}) + _ = r.ClientManager().CreateClient(ctx, &client.Client{ID: "foo-7", Secret: "bar", AllowedCORSOrigins: []string{"*"}}) }, code: http.StatusNotImplemented, header: http.Header{"Origin": {"http://foobar.com"}, "Authorization": {fmt.Sprintf("Basic %s", x.BasicAuth("foo-7", "bar"))}}, @@ -147,9 +207,9 @@ func TestOAuth2AwareCORSMiddleware(t *testing.T) { }, { d: "should succeed on pre-flight request when token introspection fails", - prep: func(t *testing.T, r driver.Registry) { - r.Config().MustSet(context.Background(), "serve.public.cors.enabled", true) - r.Config().MustSet(context.Background(), "serve.public.cors.allowed_origins", []string{"http://not-test-domain.com"}) + configs: map[string]any{ + "serve.public.cors.enabled": true, + "serve.public.cors.allowed_origins": []string{"http://not-test-domain.com"}, }, code: http.StatusNotImplemented, header: http.Header{"Origin": {"http://foobar.com"}, "Authorization": {"Bearer 1234"}}, @@ -158,9 +218,9 @@ func TestOAuth2AwareCORSMiddleware(t *testing.T) { }, { d: "should fail when token introspection fails", - prep: func(t *testing.T, r driver.Registry) { - r.Config().MustSet(context.Background(), "serve.public.cors.enabled", true) - r.Config().MustSet(context.Background(), "serve.public.cors.allowed_origins", []string{"http://not-test-domain.com"}) + configs: map[string]any{ + "serve.public.cors.enabled": true, + "serve.public.cors.allowed_origins": []string{"http://not-test-domain.com"}, }, code: http.StatusNotImplemented, header: http.Header{"Origin": {"http://foobar.com"}, "Authorization": {"Bearer 1234"}}, @@ -168,18 +228,20 @@ func TestOAuth2AwareCORSMiddleware(t *testing.T) { }, { d: "should work when token introspection returns a session", - prep: func(t *testing.T, r driver.Registry) { - r.Config().MustSet(context.Background(), "serve.public.cors.enabled", true) - r.Config().MustSet(context.Background(), "serve.public.cors.allowed_origins", []string{"http://not-test-domain.com"}) - sess := oauth2.NewSession("foo-9") + configs: map[string]any{ + "serve.public.cors.enabled": true, + "serve.public.cors.allowed_origins": []string{"http://not-test-domain.com"}, + }, + prep: func(t *testing.T, r *driver.RegistrySQL) { + sess := oauth2.NewTestSession(t, "foo-9") sess.SetExpiresAt(fosite.AccessToken, time.Now().Add(time.Hour)) ar := fosite.NewAccessRequest(sess) - cl := &client.Client{LegacyClientID: "foo-9", Secret: "bar", AllowedCORSOrigins: []string{"http://foobar.com"}} + cl := &client.Client{ID: "foo-9", Secret: "bar", AllowedCORSOrigins: []string{"http://foobar.com"}} ar.Client = cl // Ignore unique violations - _ = r.ClientManager().CreateClient(context.Background(), cl) - _ = r.OAuth2Storage().CreateAccessTokenSession(context.Background(), signature, ar) + _ = r.ClientManager().CreateClient(ctx, cl) + _ = r.OAuth2Storage().CreateAccessTokenSession(ctx, signature, ar) }, code: http.StatusNotImplemented, header: http.Header{"Origin": {"http://foobar.com"}, "Authorization": {"Bearer " + token}}, @@ -187,13 +249,13 @@ func TestOAuth2AwareCORSMiddleware(t *testing.T) { }, { d: "should accept any allowed specified origin protocol", - prep: func(t *testing.T, r driver.Registry) { - r.Config().MustSet(context.Background(), "serve.public.cors.enabled", true) - + configs: map[string]any{ + "serve.public.cors.enabled": true, + "serve.public.cors.allowed_origins": []string{"http://*", "https://*"}, + }, + prep: func(t *testing.T, r *driver.RegistrySQL) { // Ignore unique violations - _ = r.ClientManager().CreateClient(context.Background(), &client.Client{LegacyClientID: "foo-11", Secret: "bar", AllowedCORSOrigins: []string{"*"}}) - r.Config().MustSet(context.Background(), "serve.public.cors.enabled", true) - r.Config().MustSet(context.Background(), "serve.public.cors.allowed_origins", []string{"http://*", "https://*"}) + _ = r.ClientManager().CreateClient(ctx, &client.Client{ID: "foo-11", Secret: "bar", AllowedCORSOrigins: []string{"*"}}) }, code: http.StatusNotImplemented, header: http.Header{"Origin": {"http://foo.foobar.com"}, "Authorization": {fmt.Sprintf("Basic %s", x.BasicAuth("foo-11", "bar"))}}, @@ -201,12 +263,13 @@ func TestOAuth2AwareCORSMiddleware(t *testing.T) { }, { d: "should accept client origin when basic auth client exists and origin is set at the client as well as the server", - prep: func(t *testing.T, r driver.Registry) { - r.Config().MustSet(context.Background(), "serve.public.cors.enabled", true) - r.Config().MustSet(context.Background(), "serve.public.cors.allowed_origins", []string{"http://**.example.com"}) - + configs: map[string]any{ + "serve.public.cors.enabled": true, + "serve.public.cors.allowed_origins": []string{"http://**.example.com"}, + }, + prep: func(t *testing.T, r *driver.RegistrySQL) { // Ignore unique violations - _ = r.ClientManager().CreateClient(context.Background(), &client.Client{LegacyClientID: "foo-12", Secret: "bar", AllowedCORSOrigins: []string{"http://myapp.example.biz"}}) + _ = r.ClientManager().CreateClient(ctx, &client.Client{ID: "foo-12", Secret: "bar", AllowedCORSOrigins: []string{"http://myapp.example.biz"}}) }, code: http.StatusNotImplemented, header: http.Header{"Origin": {"http://myapp.example.biz"}, "Authorization": {fmt.Sprintf("Basic %s", x.BasicAuth("foo-12", "bar"))}}, @@ -214,12 +277,13 @@ func TestOAuth2AwareCORSMiddleware(t *testing.T) { }, { d: "should accept server origin when basic auth client exists and origin is set at the client as well as the server", - prep: func(t *testing.T, r driver.Registry) { - r.Config().MustSet(context.Background(), "serve.public.cors.enabled", true) - r.Config().MustSet(context.Background(), "serve.public.cors.allowed_origins", []string{"http://**.example.com"}) - + configs: map[string]any{ + "serve.public.cors.enabled": true, + "serve.public.cors.allowed_origins": []string{"http://**.example.com"}, + }, + prep: func(t *testing.T, r *driver.RegistrySQL) { // Ignore unique violations - _ = r.ClientManager().CreateClient(context.Background(), &client.Client{LegacyClientID: "foo-13", Secret: "bar", AllowedCORSOrigins: []string{"http://myapp.example.biz"}}) + _ = r.ClientManager().CreateClient(ctx, &client.Client{ID: "foo-13", Secret: "bar", AllowedCORSOrigins: []string{"http://myapp.example.biz"}}) }, code: http.StatusNotImplemented, header: http.Header{"Origin": {"http://client-app.example.com"}, "Authorization": {fmt.Sprintf("Basic %s", x.BasicAuth("foo-13", "bar"))}}, @@ -227,7 +291,7 @@ func TestOAuth2AwareCORSMiddleware(t *testing.T) { }, } { t.Run(fmt.Sprintf("case=%d/description=%s", k, tc.d), func(t *testing.T) { - r.WithConfig(internal.NewConfigurationWithDefaults()) + r := testhelpers.NewRegistrySQLFromURL(t, dsn, false, true, driver.WithConfigOptions(configx.WithValues(tc.configs))) if tc.prep != nil { tc.prep(t, r) @@ -237,14 +301,14 @@ func TestOAuth2AwareCORSMiddleware(t *testing.T) { if tc.method != "" { method = tc.method } - req, err := http.NewRequest(method, "http://foobar.com/", nil) + req, err := http.NewRequest(method, "http://foobar.com/", tc.body) require.NoError(t, err) for k := range tc.header { req.Header.Set(k, tc.header.Get(k)) } res := httptest.NewRecorder() - oauth2cors.Middleware(context.Background(), r)(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + oauth2cors.Middleware(r)(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusNotImplemented) })).ServeHTTP(res, req) require.NoError(t, err) diff --git a/x/pointer.go b/x/pointer.go deleted file mode 100644 index b415ad115da..00000000000 --- a/x/pointer.go +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package x - -// ToPointer returns the pointer to the value. -func ToPointer[T any](val T) *T { - return &val -} - -// FromPointer returns the dereferenced value or if the pointer is nil the zero value. -func FromPointer[T any, TT *T](val *T) (zero T) { - if val == nil { - return zero - } - return *val -} diff --git a/x/redirect_uri.go b/x/redirect_uri.go index d4c01a3e6a2..8c5b53048b7 100644 --- a/x/redirect_uri.go +++ b/x/redirect_uri.go @@ -7,7 +7,7 @@ import ( "context" "net/url" - "github.com/ory/fosite" + "github.com/ory/hydra/v2/fosite" ) type redirectConfiguration interface { diff --git a/x/registry.go b/x/registry.go index 7d81e3a1ab9..0eacde91670 100644 --- a/x/registry.go +++ b/x/registry.go @@ -6,20 +6,18 @@ package x import ( "context" - "github.com/hashicorp/go-retryablehttp" - - "github.com/ory/x/httpx" - "github.com/ory/x/otelx" - + "github.com/gofrs/uuid" "github.com/gorilla/sessions" + "github.com/hashicorp/go-retryablehttp" "github.com/ory/herodot" + "github.com/ory/x/httpx" "github.com/ory/x/logrusx" + "github.com/ory/x/otelx" ) type RegistryLogger interface { Logger() *logrusx.Logger - AuditLogger() *logrusx.Logger } type RegistryWriter interface { @@ -37,3 +35,15 @@ type TracingProvider interface { type HTTPClientProvider interface { HTTPClient(ctx context.Context, opts ...httpx.ResilientOptions) *retryablehttp.Client } + +type Networker interface { + NetworkID(ctx context.Context) uuid.UUID +} + +type NetworkProvider interface { + Networker() Networker +} + +type Transactor interface { + Transaction(ctx context.Context, f func(ctx context.Context) error) error +} diff --git a/x/router.go b/x/router.go index aefe82a6a6c..7ba499cac23 100644 --- a/x/router.go +++ b/x/router.go @@ -4,24 +4,19 @@ package x import ( - "context" - "net/url" - - "github.com/julienschmidt/httprouter" - "github.com/ory/x/httprouterx" - + "github.com/ory/x/prometheusx" "github.com/ory/x/serverx" ) -func NewRouterPublic() *httprouterx.RouterPublic { - router := httprouter.New() - router.NotFound = serverx.DefaultNotFoundHandler - return httprouterx.NewRouterPublic() +func NewRouterPublic(metricsHandler *prometheusx.MetricsManager) *httprouterx.RouterPublic { + router := httprouterx.NewRouterPublic(metricsHandler) + router.Handler("", "/", serverx.DefaultNotFoundHandler) + return router } -func NewRouterAdmin(f func(context.Context) *url.URL) *httprouterx.RouterAdmin { - router := httprouterx.NewRouterAdminWithPrefix("/admin", f) - router.NotFound = serverx.DefaultNotFoundHandler +func NewRouterAdmin(metricsHandler *prometheusx.MetricsManager) *httprouterx.RouterAdmin { + router := httprouterx.NewRouterAdminWithPrefix(metricsHandler) + router.Handler("", "/", serverx.DefaultNotFoundHandler) return router } diff --git a/x/sighash.go b/x/sighash.go new file mode 100644 index 00000000000..00069c6f998 --- /dev/null +++ b/x/sighash.go @@ -0,0 +1,15 @@ +// Copyright © 2024 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package x + +import ( + "crypto/sha512" + "fmt" +) + +// SignatureHash hashes the signature to prevent errors where the signature is +// longer than 128 characters (and thus doesn't fit into the pk). +func SignatureHash(signature string) string { + return fmt.Sprintf("%x", sha512.Sum384([]byte(signature))) +} diff --git a/x/sqlx.go b/x/sqlx.go index 7ca0e5a727d..335ff0219de 100644 --- a/x/sqlx.go +++ b/x/sqlx.go @@ -12,9 +12,7 @@ import ( "github.com/pkg/errors" - "github.com/ory/x/errorsx" - - jose "gopkg.in/square/go-jose.v2" + jose "github.com/go-jose/go-jose/v3" ) // swagger:type JSONWebKeySet @@ -28,13 +26,13 @@ func (n *JoseJSONWebKeySet) Scan(value interface{}) error { if len(v) == 0 { return nil } - return errorsx.WithStack(json.Unmarshal([]byte(v), n)) + return errors.WithStack(json.Unmarshal([]byte(v), n)) } func (n *JoseJSONWebKeySet) Value() (driver.Value, error) { value, err := json.Marshal(n) if err != nil { - return nil, errorsx.WithStack(err) + return nil, errors.WithStack(err) } return string(value), nil } @@ -71,6 +69,8 @@ func (ns *Duration) UnmarshalJSON(data []byte) error { } // swagger:model NullDuration +// +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions type swaggerNullDuration string // NullDuration represents a nullable JSON and SQL compatible time.Duration. diff --git a/x/swagger/genericError.go b/x/swagger/genericError.go new file mode 100644 index 00000000000..43005060b91 --- /dev/null +++ b/x/swagger/genericError.go @@ -0,0 +1,12 @@ +// Copyright © 2023 Ory Corp +// SPDX-License-Identifier: Apache-2.0 + +package swagger + +import "github.com/ory/herodot" + +// swagger:model genericError +// +//nolint:deadcode,unused +//lint:ignore U1000 Used to generate Swagger and OpenAPI definitions +type GenericError struct{ herodot.DefaultError } diff --git a/x/test_helpers.go b/x/test_helpers.go index 65312cd5da3..666e8a048a6 100644 --- a/x/test_helpers.go +++ b/x/test_helpers.go @@ -4,7 +4,7 @@ package x import ( - "github.com/ory/fosite/storage" + "github.com/ory/hydra/v2/fosite/storage" ) func FositeStore() *storage.MemoryStore { diff --git a/x/tls_termination.go b/x/tls_termination.go deleted file mode 100644 index 6d93494b9ba..00000000000 --- a/x/tls_termination.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package x - -import ( - "net" - "net/http" - "strings" - - "github.com/ory/x/errorsx" - - "github.com/pkg/errors" - "github.com/urfave/negroni" - - "github.com/ory/x/healthx" - prometheus "github.com/ory/x/prometheusx" - "github.com/ory/x/stringsx" -) - -func MatchesRange(r *http.Request, ranges []string) error { - remoteIP, _, err := net.SplitHostPort(r.RemoteAddr) - if err != nil { - return errorsx.WithStack(err) - } - - check := []string{remoteIP} - for _, fwd := range stringsx.Splitx(r.Header.Get("X-Forwarded-For"), ",") { - check = append(check, strings.TrimSpace(fwd)) - } - - for _, rn := range ranges { - _, cidr, err := net.ParseCIDR(rn) - if err != nil { - return errorsx.WithStack(err) - } - - for _, ip := range check { - addr := net.ParseIP(ip) - if cidr.Contains(addr) { - return nil - } - } - } - return errors.Errorf("neither remote address nor any x-forwarded-for values match CIDR ranges %v: %v, ranges, check)", ranges, check) -} - -type tlsRegistry interface { - RegistryLogger - RegistryWriter -} - -type tlsConfig interface { - Enabled() bool - AllowTerminationFrom() []string -} - -func RejectInsecureRequests(reg tlsRegistry, c tlsConfig) negroni.HandlerFunc { - return func(rw http.ResponseWriter, r *http.Request, next http.HandlerFunc) { - if r.TLS != nil || - !c.Enabled() || - r.URL.Path == healthx.AliveCheckPath || - r.URL.Path == healthx.ReadyCheckPath || - r.URL.Path == prometheus.MetricsPrometheusPath { - next.ServeHTTP(rw, r) - return - } - - if len(c.AllowTerminationFrom()) == 0 { - reg.Logger().WithRequest(r).WithError(errors.New("TLS termination is not enabled")).Error("Could not serve http connection") - reg.Writer().WriteErrorCode(rw, r, http.StatusBadGateway, errors.New("can not serve request over insecure http")) - return - } - - ranges := c.AllowTerminationFrom() - if err := MatchesRange(r, ranges); err != nil { - reg.Logger().WithRequest(r).WithError(err).Warnln("Could not serve http connection") - reg.Writer().WriteErrorCode(rw, r, http.StatusBadGateway, errors.New("can not serve request over insecure http")) - return - } - - proto := r.Header.Get("X-Forwarded-Proto") - if proto == "" { - reg.Logger().WithRequest(r).WithError(errors.New("X-Forwarded-Proto header is missing")).Error("Could not serve http connection") - reg.Writer().WriteErrorCode(rw, r, http.StatusBadGateway, errors.New("can not serve request over insecure http")) - return - } else if proto != "https" { - reg.Logger().WithRequest(r).WithError(errors.New("X-Forwarded-Proto header is missing")).Error("Could not serve http connection") - reg.Writer().WriteErrorCode(rw, r, http.StatusBadGateway, errors.Errorf("expected X-Forwarded-Proto header to be https but got: %s", proto)) - return - } - - next.ServeHTTP(rw, r) - } -} diff --git a/x/tls_termination_test.go b/x/tls_termination_test.go deleted file mode 100644 index 92f2fd6f27b..00000000000 --- a/x/tls_termination_test.go +++ /dev/null @@ -1,235 +0,0 @@ -// Copyright © 2022 Ory Corp -// SPDX-License-Identifier: Apache-2.0 - -package x_test - -import ( - "context" - "net/http" - "net/http/httptest" - "net/url" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/ory/hydra/driver/config" - "github.com/ory/hydra/internal" - . "github.com/ory/hydra/x" - "github.com/ory/x/contextx" -) - -func panicHandler(w http.ResponseWriter, r *http.Request) { - panic("should not have been called") -} - -func noopHandler(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusNoContent) -} - -func TestDoesRequestSatisfyTermination(t *testing.T) { - c := internal.NewConfigurationWithDefaultsAndHTTPS() - r := internal.NewRegistryMemory(t, c, &contextx.Default{}) - - t.Run("case=tls-termination-disabled", func(t *testing.T) { - c.MustSet(context.Background(), config.KeyTLSAllowTerminationFrom, "") - - res := httptest.NewRecorder() - RejectInsecureRequests(r, c.TLS(context.Background(), config.PublicInterface))(res, &http.Request{Header: http.Header{}, URL: new(url.URL)}, panicHandler) - assert.EqualValues(t, http.StatusBadGateway, res.Code) - }) - - // change: x-forwarded-proto is checked after cidr, therefore it will never actually test header - t.Run("case=missing-x-forwarded-proto", func(t *testing.T) { - c.MustSet(context.Background(), config.KeyTLSAllowTerminationFrom, []string{"126.0.0.1/24", "127.0.0.1/24"}) - - res := httptest.NewRecorder() - RejectInsecureRequests(r, c.TLS(context.Background(), config.PublicInterface))(res, &http.Request{ - RemoteAddr: "127.0.0.1:123", - Header: http.Header{}, - URL: new(url.URL)}, - panicHandler, - ) - assert.EqualValues(t, http.StatusBadGateway, res.Code) - }) - - // change: x-forwarded-proto is checked after cidr, therefor it will never actually test header with "http" - t.Run("case=x-forwarded-proto-is-http", func(t *testing.T) { - c.MustSet(context.Background(), config.KeyTLSAllowTerminationFrom, []string{"126.0.0.1/24", "127.0.0.1/24"}) - - res := httptest.NewRecorder() - RejectInsecureRequests(r, c.TLS(context.Background(), config.PublicInterface))(res, &http.Request{ - RemoteAddr: "127.0.0.1:123", - Header: http.Header{ - "X-Forwarded-Proto": []string{"http"}, - }, URL: new(url.URL)}, - panicHandler, - ) - assert.EqualValues(t, http.StatusBadGateway, res.Code) - }) - - t.Run("case=missing-x-forwarded-for", func(t *testing.T) { - c.MustSet(context.Background(), config.KeyTLSAllowTerminationFrom, []string{"126.0.0.1/24", "127.0.0.1/24"}) - - res := httptest.NewRecorder() - RejectInsecureRequests(r, c.TLS(context.Background(), config.PublicInterface))(res, &http.Request{Header: http.Header{"X-Forwarded-Proto": []string{"https"}}, URL: new(url.URL)}, panicHandler) - assert.EqualValues(t, http.StatusBadGateway, res.Code) - }) - - t.Run("case=remote-not-in-cidr", func(t *testing.T) { - c.MustSet(context.Background(), config.KeyTLSAllowTerminationFrom, []string{"126.0.0.1/24", "127.0.0.1/24"}) - - res := httptest.NewRecorder() - RejectInsecureRequests(r, c.TLS(context.Background(), config.PublicInterface))(res, &http.Request{ - RemoteAddr: "227.0.0.1:123", - Header: http.Header{"X-Forwarded-Proto": []string{"https"}}, URL: new(url.URL)}, - panicHandler, - ) - assert.EqualValues(t, http.StatusBadGateway, res.Code) - }) - - t.Run("case=remote-and-forwarded-not-in-cidr", func(t *testing.T) { - c.MustSet(context.Background(), config.KeyTLSAllowTerminationFrom, []string{"126.0.0.1/24", "127.0.0.1/24"}) - - res := httptest.NewRecorder() - RejectInsecureRequests(r, c.TLS(context.Background(), config.PublicInterface))(res, &http.Request{ - RemoteAddr: "227.0.0.1:123", - Header: http.Header{ - "X-Forwarded-Proto": []string{"https"}, - "X-Forwarded-For": []string{"227.0.0.1"}, - }, URL: new(url.URL)}, - panicHandler, - ) - assert.EqualValues(t, http.StatusBadGateway, res.Code) - }) - - t.Run("case=remote-matches-cidr", func(t *testing.T) { - c.MustSet(context.Background(), config.KeyTLSAllowTerminationFrom, []string{"126.0.0.1/24", "127.0.0.1/24"}) - - res := httptest.NewRecorder() - RejectInsecureRequests(r, c.TLS(context.Background(), config.PublicInterface))(res, &http.Request{ - RemoteAddr: "127.0.0.1:123", - Header: http.Header{ - "X-Forwarded-Proto": []string{"https"}, - }, URL: new(url.URL)}, - noopHandler, - ) - assert.EqualValues(t, http.StatusNoContent, res.Code) - }) - - // change: cidr and x-forwarded-proto headers are irrelevant for this test - t.Run("case=passes-because-health-alive-endpoint", func(t *testing.T) { - c.MustSet(context.Background(), config.AdminInterface.Key(config.KeySuffixTLSAllowTerminationFrom), []string{"126.0.0.1/24", "127.0.0.1/24"}) - - res := httptest.NewRecorder() - RejectInsecureRequests(r, c.TLS(context.Background(), config.AdminInterface))(res, &http.Request{ - RemoteAddr: "227.0.0.1:123", - Header: http.Header{}, - URL: &url.URL{Path: "/health/alive"}, - }, - noopHandler, - ) - assert.EqualValues(t, http.StatusNoContent, res.Code) - }) - - // change: cidr and x-forwarded-proto headers are irrelevant for this test - t.Run("case=passes-because-health-ready-endpoint", func(t *testing.T) { - c.MustSet(context.Background(), config.AdminInterface.Key(config.KeySuffixTLSAllowTerminationFrom), []string{"126.0.0.1/24", "127.0.0.1/24"}) - - res := httptest.NewRecorder() - RejectInsecureRequests(r, c.TLS(context.Background(), config.AdminInterface))(res, &http.Request{ - RemoteAddr: "227.0.0.1:123", - Header: http.Header{}, - URL: &url.URL{Path: "/health/alive"}, - }, - noopHandler, - ) - assert.EqualValues(t, http.StatusNoContent, res.Code) - }) - - t.Run("case=forwarded-matches-cidr", func(t *testing.T) { - c.MustSet(context.Background(), config.KeyTLSAllowTerminationFrom, []string{"126.0.0.1/24", "127.0.0.1/24"}) - - res := httptest.NewRecorder() - RejectInsecureRequests(r, c.TLS(context.Background(), config.PublicInterface))(res, &http.Request{ - RemoteAddr: "227.0.0.2:123", - Header: http.Header{ - "X-Forwarded-For": []string{"227.0.0.1, 127.0.0.1, 227.0.0.2"}, - "X-Forwarded-Proto": []string{"https"}, - }, URL: new(url.URL)}, - noopHandler, - ) - assert.EqualValues(t, http.StatusNoContent, res.Code) - }) - - t.Run("case=forwarded-matches-cidr-without-spaces", func(t *testing.T) { - c.MustSet(context.Background(), config.KeyTLSAllowTerminationFrom, []string{"126.0.0.1/24", "127.0.0.1/24"}) - - res := httptest.NewRecorder() - RejectInsecureRequests(r, c.TLS(context.Background(), config.PublicInterface))(res, &http.Request{ - RemoteAddr: "227.0.0.2:123", - Header: http.Header{ - "X-Forwarded-For": []string{"227.0.0.1,127.0.0.1,227.0.0.2"}, - "X-Forwarded-Proto": []string{"https"}, - }, URL: new(url.URL)}, - noopHandler, - ) - assert.EqualValues(t, http.StatusNoContent, res.Code) - }) - - // test: in case http is forced request should be accepted - t.Run("case=forced-http", func(t *testing.T) { - c := internal.NewConfigurationWithDefaults() - res := httptest.NewRecorder() - RejectInsecureRequests(r, c.TLS(context.Background(), config.PublicInterface))(res, &http.Request{Header: http.Header{}, URL: new(url.URL)}, noopHandler) - assert.EqualValues(t, http.StatusNoContent, res.Code) - }) - - // test: prometheus endpoint should accept request - t.Run("case=passes-with-tls-upstream-on-metrics-prometheus-endpoint", func(t *testing.T) { - c.MustSet(context.Background(), config.AdminInterface.Key(config.KeySuffixTLSAllowTerminationFrom), []string{"126.0.0.1/24", "127.0.0.1/24"}) - - res := httptest.NewRecorder() - RejectInsecureRequests(r, c.TLS(context.Background(), config.AdminInterface))(res, &http.Request{ - RemoteAddr: "227.0.0.1:123", - Header: http.Header{}, - URL: &url.URL{Path: "/metrics/prometheus"}, - }, - noopHandler, - ) - assert.EqualValues(t, http.StatusNoContent, res.Code) - }) - - // test: prometheus endpoint should accept request because TLS is disabled - t.Run("case=passes-with-tls-disabled-on-admin-endpoint", func(t *testing.T) { - c.MustSet(context.Background(), config.AdminInterface.Key(config.KeySuffixTLSEnabled), false) - - res := httptest.NewRecorder() - RejectInsecureRequests(r, c.TLS(context.Background(), config.AdminInterface))(res, &http.Request{ - RemoteAddr: "227.0.0.1:123", - Header: http.Header{ - "X-Forwarded-Proto": []string{"http"}, - }, - URL: &url.URL{Path: "/foo"}, - }, - noopHandler, - ) - assert.EqualValues(t, http.StatusNoContent, res.Code) - }) - - // test: prometheus endpoint should not accept request because TLS is enabled - t.Run("case=fails-with-tls-enabled-on-admin-endpoint", func(t *testing.T) { - c.MustSet(context.Background(), config.AdminInterface.Key(config.KeySuffixTLSEnabled), true) - - res := httptest.NewRecorder() - RejectInsecureRequests(r, c.TLS(context.Background(), config.AdminInterface))(res, &http.Request{ - RemoteAddr: "227.0.0.1:123", - Header: http.Header{ - "X-Forwarded-Proto": []string{"http"}, - }, - URL: &url.URL{Path: "/foo"}, - }, - panicHandler, - ) - assert.EqualValues(t, http.StatusBadGateway, res.Code) - }) -}